Started by user OpenShift CI Robot [EnvInject] - Loading node environment variables. Building in workspace /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2 [WS-CLEANUP] Deleting project workspace... [workspace@2] $ /bin/bash /tmp/jenkins1764131639517138423.sh ########## STARTING STAGE: INSTALL THE ORIGIN-CI-TOOL ########## + [[ -s /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/activate ]] ++ readlink /var/lib/jenkins/origin-ci-tool/latest + latest=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66 + touch /var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66 + cp /var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/bin/activate /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/activate + cat + source /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/activate ++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66 ++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66 ++ export PATH=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ PATH=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ unset PYTHON_HOME ++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config ++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config + mkdir -p /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config + rm -rf /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config/origin-ci-tool + oct configure ansible-client verbosity 2 Option verbosity updated to be 2. + oct configure aws-client keypair_name libra Option keypair_name updated to be libra. + oct configure aws-client private_key_path /var/lib/jenkins/.ssh/devenv.pem Option private_key_path updated to be /var/lib/jenkins/.ssh/devenv.pem. + set +o xtrace ########## FINISHED STAGE: SUCCESS: INSTALL THE ORIGIN-CI-TOOL [00h 00m 02s] ########## [workspace@2] $ /bin/bash /tmp/jenkins2697555124219808375.sh ########## STARTING STAGE: PROVISION CLOUD RESOURCES ########## + [[ -s /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/activate ]] + source /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/activate ++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66 ++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66 ++ export PATH=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ PATH=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ unset PYTHON_HOME ++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config ++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config + oct provision remote all-in-one --os rhel --stage base --provider aws --discrete-ssh-config --name test_pull_request_openshift_ansible_logging_37_97 PLAYBOOK: aws-up.yml *********************************************************** 2 plays in /var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/aws-up.yml PLAY [ensure we have the parameters necessary to bring up the AWS EC2 instance] *** TASK [ensure all required variables are set] *********************************** task path: /var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/aws-up.yml:9 skipping: [localhost] => (item=origin_ci_inventory_dir) => { "changed": false, "generated_timestamp": "2018-04-06 16:56:03.228686", "item": "origin_ci_inventory_dir", "skip_reason": "Conditional check failed", "skipped": true } skipping: [localhost] => (item=origin_ci_aws_ami_os) => { "changed": false, "generated_timestamp": "2018-04-06 16:56:03.260206", "item": "origin_ci_aws_ami_os", "skip_reason": "Conditional check failed", "skipped": true } skipping: [localhost] => (item=origin_ci_aws_region) => { "changed": false, "generated_timestamp": "2018-04-06 16:56:03.261410", "item": "origin_ci_aws_region", "skip_reason": "Conditional check failed", "skipped": true } skipping: [localhost] => (item=origin_ci_aws_private_key_path) => { "changed": false, "generated_timestamp": "2018-04-06 16:56:03.262430", "item": "origin_ci_aws_private_key_path", "skip_reason": "Conditional check failed", "skipped": true } skipping: [localhost] => (item=origin_ci_aws_keypair_name) => { "changed": false, "generated_timestamp": "2018-04-06 16:56:03.263439", "item": "origin_ci_aws_keypair_name", "skip_reason": "Conditional check failed", "skipped": true } skipping: [localhost] => (item=origin_ci_aws_ami_stage) => { "changed": false, "generated_timestamp": "2018-04-06 16:56:03.282762", "item": "origin_ci_aws_ami_stage", "skip_reason": "Conditional check failed", "skipped": true } skipping: [localhost] => (item=openshift_node_labels) => { "changed": false, "generated_timestamp": "2018-04-06 16:56:03.332758", "item": "openshift_node_labels", "skip_reason": "Conditional check failed", "skipped": true } skipping: [localhost] => (item=openshift_schedulable) => { "changed": false, "generated_timestamp": "2018-04-06 16:56:03.333886", "item": "openshift_schedulable", "skip_reason": "Conditional check failed", "skipped": true } skipping: [localhost] => (item=origin_ci_ssh_config_strategy) => { "changed": false, "generated_timestamp": "2018-04-06 16:56:03.334930", "item": "origin_ci_ssh_config_strategy", "skip_reason": "Conditional check failed", "skipped": true } skipping: [localhost] => (item=origin_ci_aws_hostname) => { "changed": false, "generated_timestamp": "2018-04-06 16:56:03.335983", "item": "origin_ci_aws_hostname", "skip_reason": "Conditional check failed", "skipped": true } skipping: [localhost] => (item=origin_ci_aws_identifying_tag_key) => { "changed": false, "generated_timestamp": "2018-04-06 16:56:03.337038", "item": "origin_ci_aws_identifying_tag_key", "skip_reason": "Conditional check failed", "skipped": true } skipping: [localhost] => (item=origin_ci_aws_master_instance_type) => { "changed": false, "generated_timestamp": "2018-04-06 16:56:03.338082", "item": "origin_ci_aws_master_instance_type", "skip_reason": "Conditional check failed", "skipped": true } skipping: [localhost] => (item=origin_ci_aws_instance_name) => { "changed": false, "generated_timestamp": "2018-04-06 16:56:03.339157", "item": "origin_ci_aws_instance_name", "skip_reason": "Conditional check failed", "skipped": true } TASK [ensure all required variables are set] *********************************** task path: /var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/aws-up.yml:28 skipping: [localhost] => (item=origin_ci_aws_etcd_security_group) => { "changed": false, "generated_timestamp": "2018-04-06 16:56:03.424335", "item": "origin_ci_aws_etcd_security_group", "skip_reason": "Conditional check failed", "skipped": true } skipping: [localhost] => (item=origin_ci_aws_master_subnet) => { "changed": false, "generated_timestamp": "2018-04-06 16:56:03.426131", "item": "origin_ci_aws_master_subnet", "skip_reason": "Conditional check failed", "skipped": true } skipping: [localhost] => (item=origin_ci_aws_master_security_group) => { "changed": false, "generated_timestamp": "2018-04-06 16:56:03.470614", "item": "origin_ci_aws_master_security_group", "skip_reason": "Conditional check failed", "skipped": true } skipping: [localhost] => (item=origin_ci_aws_node_security_group) => { "changed": false, "generated_timestamp": "2018-04-06 16:56:03.471874", "item": "origin_ci_aws_node_security_group", "skip_reason": "Conditional check failed", "skipped": true } skipping: [localhost] => (item=origin_ci_aws_router_elb_security_group) => { "changed": false, "generated_timestamp": "2018-04-06 16:56:03.483524", "item": "origin_ci_aws_router_elb_security_group", "skip_reason": "Conditional check failed", "skipped": true } skipping: [localhost] => (item=origin_ci_aws_router_security_group) => { "changed": false, "generated_timestamp": "2018-04-06 16:56:03.484716", "item": "origin_ci_aws_router_security_group", "skip_reason": "Conditional check failed", "skipped": true } skipping: [localhost] => (item=origin_ci_aws_master_internal_elb_security_group) => { "changed": false, "generated_timestamp": "2018-04-06 16:56:03.485831", "item": "origin_ci_aws_master_internal_elb_security_group", "skip_reason": "Conditional check failed", "skipped": true } skipping: [localhost] => (item=origin_ci_aws_master_external_elb_security_group) => { "changed": false, "generated_timestamp": "2018-04-06 16:56:03.486912", "item": "origin_ci_aws_master_external_elb_security_group", "skip_reason": "Conditional check failed", "skipped": true } PLAY [provision an AWS EC2 instance] ******************************************* TASK [Gathering Facts] ********************************************************* ok: [localhost] TASK [inventory : initialize the inventory directory] ************************** task path: /var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/inventory/tasks/main.yml:2 ok: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 16:56:05.112773", "gid": 995, "group": "jenkins", "mode": "0755", "owner": "jenkins", "path": "/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config/origin-ci-tool/inventory", "secontext": "system_u:object_r:var_lib_t:s0", "size": 6, "state": "directory", "uid": 997 } TASK [inventory : add the nested group mapping] ******************************** task path: /var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/inventory/tasks/main.yml:7 changed: [localhost] => { "changed": true, "checksum": "18aaee00994df38cc3a63b635893175235331a9c", "dest": "/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config/origin-ci-tool/inventory/nested_group_mappings", "generated_timestamp": "2018-04-06 16:56:06.464324", "gid": 995, "group": "jenkins", "md5sum": "b30c3226ea63efa3ff9c5e346c14a16e", "mode": "0644", "owner": "jenkins", "secontext": "system_u:object_r:var_lib_t:s0", "size": 93, "src": "/var/lib/jenkins/.ansible/tmp/ansible-tmp-1523048165.83-59155667679949/source", "state": "file", "uid": 997 } TASK [inventory : initialize the OSEv3 group variables directory] ************** task path: /var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/inventory/tasks/main.yml:12 changed: [localhost] => { "changed": true, "generated_timestamp": "2018-04-06 16:56:06.884610", "gid": 995, "group": "jenkins", "mode": "0755", "owner": "jenkins", "path": "/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config/origin-ci-tool/inventory/group_vars/OSEv3", "secontext": "system_u:object_r:var_lib_t:s0", "size": 6, "state": "directory", "uid": 997 } TASK [inventory : initialize the host variables directory] ********************* task path: /var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/inventory/tasks/main.yml:17 changed: [localhost] => { "changed": true, "generated_timestamp": "2018-04-06 16:56:07.286216", "gid": 995, "group": "jenkins", "mode": "0755", "owner": "jenkins", "path": "/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config/origin-ci-tool/inventory/host_vars", "secontext": "system_u:object_r:var_lib_t:s0", "size": 6, "state": "directory", "uid": 997 } TASK [inventory : add the default Origin installation configuration] *********** task path: /var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/inventory/tasks/main.yml:22 changed: [localhost] => { "changed": true, "checksum": "4c06ba508f055c20f13426e8587342e8765a7b66", "dest": "/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config/origin-ci-tool/inventory/group_vars/OSEv3/general.yml", "generated_timestamp": "2018-04-06 16:56:08.010052", "gid": 995, "group": "jenkins", "md5sum": "8aec71c75f7d512b278ae7c6f2959b12", "mode": "0644", "owner": "jenkins", "secontext": "system_u:object_r:var_lib_t:s0", "size": 331, "src": "/var/lib/jenkins/.ansible/tmp/ansible-tmp-1523048167.67-45692840476857/source", "state": "file", "uid": 997 } TASK [aws-up : determine if we are inside AWS EC2] ***************************** task path: /var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/aws-up/tasks/main.yml:2 changed: [localhost] => { "changed": true, "cmd": [ "curl", "-s", "http://instance-data.ec2.internal" ], "delta": "0:00:00.036104", "end": "2018-04-06 16:56:08.490582", "failed": false, "failed_when_result": false, "generated_timestamp": "2018-04-06 16:56:08.528145", "rc": 0, "start": "2018-04-06 16:56:08.454478", "stderr": [], "stdout": [ "1.0", "2007-01-19", "2007-03-01", "2007-08-29", "2007-10-10", "2007-12-15", "2008-02-01", "2008-09-01", "2009-04-04", "2011-01-01", "2011-05-01", "2012-01-12", "2014-02-25", "2014-11-05", "2015-10-20", "2016-04-19", "2016-06-30", "2016-09-02", "latest" ], "warnings": [ "Consider using get_url or uri module rather than running curl" ] } [WARNING]: Consider using get_url or uri module rather than running curl TASK [aws-up : configure EC2 parameters for inventory when controlling from inside EC2] *** task path: /var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/aws-up/tasks/main.yml:7 ok: [localhost] => { "ansible_facts": { "origin_ci_aws_destination_variable": "private_dns_name", "origin_ci_aws_host_address_variable": "private_ip", "origin_ci_aws_vpc_destination_variable": "private_ip_address" }, "changed": false, "generated_timestamp": "2018-04-06 16:56:08.632770" } TASK [aws-up : determine where to put the AWS API cache] *********************** task path: /var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/aws-up/tasks/main.yml:14 ok: [localhost] => { "ansible_facts": { "origin_ci_aws_cache_dir": "/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config/origin-ci-tool/inventory/.ec2_cache" }, "changed": false, "generated_timestamp": "2018-04-06 16:56:08.736748" } TASK [aws-up : ensure we have a place to put the AWS API cache] **************** task path: /var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/aws-up/tasks/main.yml:18 changed: [localhost] => { "changed": true, "generated_timestamp": "2018-04-06 16:56:09.137639", "gid": 995, "group": "jenkins", "mode": "0755", "owner": "jenkins", "path": "/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config/origin-ci-tool/inventory/.ec2_cache", "secontext": "system_u:object_r:var_lib_t:s0", "size": 6, "state": "directory", "uid": 997 } TASK [aws-up : place the EC2 dynamic inventory script] ************************* task path: /var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/aws-up/tasks/main.yml:23 changed: [localhost] => { "changed": true, "checksum": "625b8af723189db3b96ba0026d0f997a0025bc47", "dest": "/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config/origin-ci-tool/inventory/ec2.py", "generated_timestamp": "2018-04-06 16:56:09.864457", "gid": 995, "group": "jenkins", "md5sum": "cac06c14065dac74904232b89d4ba24c", "mode": "0755", "owner": "jenkins", "secontext": "system_u:object_r:var_lib_t:s0", "size": 63725, "src": "/var/lib/jenkins/.ansible/tmp/ansible-tmp-1523048169.57-50701607157302/source", "state": "file", "uid": 997 } TASK [aws-up : place the EC2 dynamic inventory configuration] ****************** task path: /var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/aws-up/tasks/main.yml:29 changed: [localhost] => { "changed": true, "checksum": "7d9ee2e26920a7dc41d0db5c523256daffde74b1", "dest": "/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config/origin-ci-tool/inventory/ec2.ini", "generated_timestamp": "2018-04-06 16:56:10.527693", "gid": 995, "group": "jenkins", "md5sum": "6932d049e6dbcfce4988709d1be72194", "mode": "0644", "owner": "jenkins", "secontext": "system_u:object_r:var_lib_t:s0", "size": 416, "src": "/var/lib/jenkins/.ansible/tmp/ansible-tmp-1523048169.93-31843239729422/source", "state": "file", "uid": 997 } TASK [aws-up : place the EC2 tag to group mappings] **************************** task path: /var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/aws-up/tasks/main.yml:34 changed: [localhost] => { "changed": true, "checksum": "b4205a33dc73f62bd4f77f35d045cf8e09ae62b0", "dest": "/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config/origin-ci-tool/inventory/tag_to_group_mappings", "generated_timestamp": "2018-04-06 16:56:11.541199", "gid": 995, "group": "jenkins", "md5sum": "bc3a567a1b6f342e1005182efc1b66be", "mode": "0644", "owner": "jenkins", "secontext": "system_u:object_r:var_lib_t:s0", "size": 287, "src": "/var/lib/jenkins/.ansible/tmp/ansible-tmp-1523048171.03-210113702019953/source", "state": "file", "uid": 997 } TASK [aws-up : list available AMIs] ******************************************** task path: /var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/aws-up/tasks/main.yml:40 ok: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 16:56:20.243425", "results": [ { "ami_id": "ami-091038c2724a8834e", "architecture": "x86_64", "block_device_mapping": { "/dev/sda1": { "delete_on_termination": true, "encrypted": false, "size": 75, "snapshot_id": "snap-09042843a5714b710", "volume_type": "gp2" }, "/dev/sdb": { "delete_on_termination": true, "encrypted": false, "size": 50, "snapshot_id": "snap-03b526946b77517ea", "volume_type": "gp2" } }, "creationDate": "2018-03-05T18:40:09.000Z", "description": "OpenShift Origin development AMI on rhel at the base stage.", "hypervisor": "xen", "is_public": false, "location": "531415883065/ami_build_origin_int_rhel_base_611", "name": "ami_build_origin_int_rhel_base_611", "owner_id": "531415883065", "platform": null, "root_device_name": "/dev/sda1", "root_device_type": "ebs", "state": "available", "tags": { "Name": "ami_build_origin_int_rhel_base_611", "image_stage": "base", "operating_system": "rhel", "ready": "yes" }, "virtualization_type": "hvm" }, { "ami_id": "ami-069c0ca6cc091e8fa", "architecture": "x86_64", "block_device_mapping": { "/dev/sda1": { "delete_on_termination": true, "encrypted": false, "size": 75, "snapshot_id": "snap-0d20c69b20a8b3f3d", "volume_type": "gp2" }, "/dev/sdb": { "delete_on_termination": true, "encrypted": false, "size": 50, "snapshot_id": "snap-012e3422f546895da", "volume_type": "gp2" } }, "creationDate": "2018-03-08T22:39:48.000Z", "description": "OpenShift Origin development AMI on rhel at the base stage.", "hypervisor": "xen", "is_public": false, "location": "531415883065/ami_build_origin_int_rhel_base_618", "name": "ami_build_origin_int_rhel_base_618", "owner_id": "531415883065", "platform": null, "root_device_name": "/dev/sda1", "root_device_type": "ebs", "state": "available", "tags": { "Name": "ami_build_origin_int_rhel_base_618", "image_stage": "base", "operating_system": "rhel", "ready": "yes" }, "virtualization_type": "hvm" } ] } TASK [aws-up : choose appropriate AMIs for use] ******************************** task path: /var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/aws-up/tasks/main.yml:52 ok: [localhost] => (item={u'ami_id': u'ami-091038c2724a8834e', u'root_device_type': u'ebs', u'description': u'OpenShift Origin development AMI on rhel at the base stage.', u'tags': {u'ready': u'yes', u'image_stage': u'base', u'Name': u'ami_build_origin_int_rhel_base_611', u'operating_system': u'rhel'}, u'hypervisor': u'xen', u'block_device_mapping': {u'/dev/sdb': {u'encrypted': False, u'snapshot_id': u'snap-03b526946b77517ea', u'delete_on_termination': True, u'volume_type': u'gp2', u'size': 50}, u'/dev/sda1': {u'encrypted': False, u'snapshot_id': u'snap-09042843a5714b710', u'delete_on_termination': True, u'volume_type': u'gp2', u'size': 75}}, u'architecture': u'x86_64', u'owner_id': u'531415883065', u'platform': None, u'state': u'available', u'location': u'531415883065/ami_build_origin_int_rhel_base_611', u'is_public': False, u'creationDate': u'2018-03-05T18:40:09.000Z', u'root_device_name': u'/dev/sda1', u'virtualization_type': u'hvm', u'name': u'ami_build_origin_int_rhel_base_611'}) => { "ansible_facts": { "origin_ci_aws_ami_id_candidate": "ami-091038c2724a8834e" }, "changed": false, "generated_timestamp": "2018-04-06 16:56:20.334348", "item": { "ami_id": "ami-091038c2724a8834e", "architecture": "x86_64", "block_device_mapping": { "/dev/sda1": { "delete_on_termination": true, "encrypted": false, "size": 75, "snapshot_id": "snap-09042843a5714b710", "volume_type": "gp2" }, "/dev/sdb": { "delete_on_termination": true, "encrypted": false, "size": 50, "snapshot_id": "snap-03b526946b77517ea", "volume_type": "gp2" } }, "creationDate": "2018-03-05T18:40:09.000Z", "description": "OpenShift Origin development AMI on rhel at the base stage.", "hypervisor": "xen", "is_public": false, "location": "531415883065/ami_build_origin_int_rhel_base_611", "name": "ami_build_origin_int_rhel_base_611", "owner_id": "531415883065", "platform": null, "root_device_name": "/dev/sda1", "root_device_type": "ebs", "state": "available", "tags": { "Name": "ami_build_origin_int_rhel_base_611", "image_stage": "base", "operating_system": "rhel", "ready": "yes" }, "virtualization_type": "hvm" } } ok: [localhost] => (item={u'ami_id': u'ami-069c0ca6cc091e8fa', u'root_device_type': u'ebs', u'description': u'OpenShift Origin development AMI on rhel at the base stage.', u'tags': {u'ready': u'yes', u'image_stage': u'base', u'Name': u'ami_build_origin_int_rhel_base_618', u'operating_system': u'rhel'}, u'hypervisor': u'xen', u'block_device_mapping': {u'/dev/sdb': {u'encrypted': False, u'snapshot_id': u'snap-012e3422f546895da', u'delete_on_termination': True, u'volume_type': u'gp2', u'size': 50}, u'/dev/sda1': {u'encrypted': False, u'snapshot_id': u'snap-0d20c69b20a8b3f3d', u'delete_on_termination': True, u'volume_type': u'gp2', u'size': 75}}, u'architecture': u'x86_64', u'owner_id': u'531415883065', u'platform': None, u'state': u'available', u'location': u'531415883065/ami_build_origin_int_rhel_base_618', u'is_public': False, u'creationDate': u'2018-03-08T22:39:48.000Z', u'root_device_name': u'/dev/sda1', u'virtualization_type': u'hvm', u'name': u'ami_build_origin_int_rhel_base_618'}) => { "ansible_facts": { "origin_ci_aws_ami_id_candidate": "ami-069c0ca6cc091e8fa" }, "changed": false, "generated_timestamp": "2018-04-06 16:56:20.366426", "item": { "ami_id": "ami-069c0ca6cc091e8fa", "architecture": "x86_64", "block_device_mapping": { "/dev/sda1": { "delete_on_termination": true, "encrypted": false, "size": 75, "snapshot_id": "snap-0d20c69b20a8b3f3d", "volume_type": "gp2" }, "/dev/sdb": { "delete_on_termination": true, "encrypted": false, "size": 50, "snapshot_id": "snap-012e3422f546895da", "volume_type": "gp2" } }, "creationDate": "2018-03-08T22:39:48.000Z", "description": "OpenShift Origin development AMI on rhel at the base stage.", "hypervisor": "xen", "is_public": false, "location": "531415883065/ami_build_origin_int_rhel_base_618", "name": "ami_build_origin_int_rhel_base_618", "owner_id": "531415883065", "platform": null, "root_device_name": "/dev/sda1", "root_device_type": "ebs", "state": "available", "tags": { "Name": "ami_build_origin_int_rhel_base_618", "image_stage": "base", "operating_system": "rhel", "ready": "yes" }, "virtualization_type": "hvm" } } TASK [aws-up : determine which AMI to use] ************************************* task path: /var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/aws-up/tasks/main.yml:58 ok: [localhost] => { "ansible_facts": { "origin_ci_aws_ami_id": "ami-069c0ca6cc091e8fa" }, "changed": false, "generated_timestamp": "2018-04-06 16:56:20.468169" } TASK [aws-up : determine which subnets are available] ************************** task path: /var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/aws-up/tasks/main.yml:63 ok: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 16:56:21.599312", "subnets": [ { "availability_zone": "us-east-1d", "available_ip_address_count": 3862, "cidr_block": "172.18.0.0/20", "default_for_az": "false", "id": "subnet-cf57c596", "map_public_ip_on_launch": "true", "state": "available", "tags": { "Name": "devenv-subnet-1", "origin_ci_aws_cluster_component": "master_subnet" }, "vpc_id": "vpc-69705d0c" }, { "availability_zone": "us-east-1c", "available_ip_address_count": 4080, "cidr_block": "172.18.16.0/20", "default_for_az": "false", "id": "subnet-8bdb5ac2", "map_public_ip_on_launch": "true", "state": "available", "tags": { "Name": "devenv-subnet-2", "origin_ci_aws_cluster_component": "master_subnet" }, "vpc_id": "vpc-69705d0c" } ] } TASK [aws-up : determine which subnets to use for the master] ****************** task path: /var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/aws-up/tasks/main.yml:70 ok: [localhost] => { "ansible_facts": { "origin_ci_aws_master_subnet_ids": [ "subnet-cf57c596", "subnet-8bdb5ac2" ] }, "changed": false, "generated_timestamp": "2018-04-06 16:56:21.667265" } TASK [aws-up : determine which security groups are available] ****************** task path: /var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/aws-up/tasks/main.yml:75 ok: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 16:56:23.159672", "security_groups": [ { "description": "default VPC security group", "group_id": "sg-7e73221a", "group_name": "default", "ip_permissions": [ { "ip_protocol": "-1", "ip_ranges": [], "ipv6_ranges": [], "prefix_list_ids": [], "user_id_group_pairs": [ { "group_id": "sg-7e73221a", "user_id": "531415883065" } ] }, { "from_port": 80, "ip_protocol": "tcp", "ip_ranges": [ { "cidr_ip": "54.241.19.245/32" }, { "cidr_ip": "97.65.119.184/29" }, { "cidr_ip": "107.20.219.35/32" }, { "cidr_ip": "108.166.48.153/32" }, { "cidr_ip": "212.199.177.64/27" }, { "cidr_ip": "212.72.208.162/32" } ], "ipv6_ranges": [], "prefix_list_ids": [], "to_port": 443, "user_id_group_pairs": [] }, { "from_port": 53, "ip_protocol": "tcp", "ip_ranges": [ { "cidr_ip": "119.254.120.64/26" }, { "cidr_ip": "209.132.176.0/20" }, { "cidr_ip": "209.132.186.34/32" }, { "cidr_ip": "213.175.37.10/32" }, { "cidr_ip": "62.40.79.66/32" }, { "cidr_ip": "66.187.224.0/20" }, { "cidr_ip": "66.187.239.0/24" }, { "cidr_ip": "38.140.108.0/24" }, { "cidr_ip": "213.175.37.9/32" }, { "cidr_ip": "38.99.12.232/29" }, { "cidr_ip": "4.14.33.72/30" }, { "cidr_ip": "4.14.35.88/29" }, { "cidr_ip": "50.227.40.96/29" } ], "ipv6_ranges": [], "prefix_list_ids": [], "to_port": 8444, "user_id_group_pairs": [] }, { "from_port": 22, "ip_protocol": "tcp", "ip_ranges": [ { "cidr_ip": "0.0.0.0/0" } ], "ipv6_ranges": [], "prefix_list_ids": [], "to_port": 22, "user_id_group_pairs": [] }, { "from_port": 53, "ip_protocol": "udp", "ip_ranges": [ { "cidr_ip": "209.132.176.0/20" }, { "cidr_ip": "66.187.224.0/20" }, { "cidr_ip": "66.187.239.0/24" } ], "ipv6_ranges": [], "prefix_list_ids": [], "to_port": 53, "user_id_group_pairs": [] }, { "from_port": 3389, "ip_protocol": "tcp", "ip_ranges": [ { "cidr_ip": "0.0.0.0/0" } ], "ipv6_ranges": [], "prefix_list_ids": [], "to_port": 3389, "user_id_group_pairs": [] }, { "from_port": -1, "ip_protocol": "icmp", "ip_ranges": [ { "cidr_ip": "0.0.0.0/0" } ], "ipv6_ranges": [], "prefix_list_ids": [], "to_port": -1, "user_id_group_pairs": [] } ], "ip_permissions_egress": [ { "ip_protocol": "-1", "ip_ranges": [ { "cidr_ip": "0.0.0.0/0" } ], "ipv6_ranges": [], "prefix_list_ids": [], "user_id_group_pairs": [] } ], "owner_id": "531415883065", "tags": { "Name": "devenv-vpc", "origin_ci_aws_cluster_component": "master_security_group" }, "vpc_id": "vpc-69705d0c" } ] } TASK [aws-up : determine which security group to use] ************************** task path: /var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/aws-up/tasks/main.yml:82 ok: [localhost] => { "ansible_facts": { "origin_ci_aws_master_security_group_ids": [ "sg-7e73221a" ] }, "changed": false, "generated_timestamp": "2018-04-06 16:56:23.210019" } TASK [aws-up : provision an AWS EC2 instance] ********************************** task path: /var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/aws-up/tasks/main.yml:87 changed: [localhost] => { "changed": true, "generated_timestamp": "2018-04-06 16:56:45.465590", "instance_ids": [ "i-0cd57e9e471604c34" ], "instances": [ { "ami_launch_index": "0", "architecture": "x86_64", "block_device_mapping": { "/dev/sda1": { "delete_on_termination": true, "status": "attached", "volume_id": "vol-079cf222217bb2e11" }, "/dev/sdb": { "delete_on_termination": true, "status": "attached", "volume_id": "vol-03213c54be8a41316" } }, "dns_name": "ec2-54-152-227-161.compute-1.amazonaws.com", "ebs_optimized": false, "groups": { "sg-7e73221a": "default" }, "hypervisor": "xen", "id": "i-0cd57e9e471604c34", "image_id": "ami-069c0ca6cc091e8fa", "instance_type": "m4.xlarge", "kernel": null, "key_name": "libra", "launch_time": "2018-04-06T20:56:24.000Z", "placement": "us-east-1d", "private_dns_name": "ip-172-18-1-211.ec2.internal", "private_ip": "172.18.1.211", "public_dns_name": "ec2-54-152-227-161.compute-1.amazonaws.com", "public_ip": "54.152.227.161", "ramdisk": null, "region": "us-east-1", "root_device_name": "/dev/sda1", "root_device_type": "ebs", "state": "running", "state_code": 16, "tags": { "Name": "test_pull_request_openshift_ansible_logging_37_97", "openshift_etcd": "", "openshift_master": "", "openshift_node": "" }, "tenancy": "default", "virtualization_type": "hvm" } ], "tagged_instances": [] } TASK [aws-up : determine the host address] ************************************* task path: /var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/aws-up/tasks/main.yml:113 ok: [localhost] => { "ansible_facts": { "origin_ci_aws_host": "172.18.1.211" }, "changed": false, "generated_timestamp": "2018-04-06 16:56:45.521615" } TASK [aws-up : determine the default user to use for SSH] ********************** task path: /var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/aws-up/tasks/main.yml:117 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 16:56:45.566217", "skip_reason": "Conditional check failed", "skipped": true } TASK [aws-up : determine the default user to use for SSH] ********************** task path: /var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/aws-up/tasks/main.yml:122 ok: [localhost] => { "ansible_facts": { "origin_ci_aws_ssh_user": "origin" }, "changed": false, "generated_timestamp": "2018-04-06 16:56:45.627987" } TASK [aws-up : update variables for the host] ********************************** task path: /var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/aws-up/tasks/main.yml:127 changed: [localhost] => { "changed": true, "checksum": "021cf62cf6f4613f18b04ce886a4b4efe520cdf2", "dest": "/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config/origin-ci-tool/inventory/host_vars/172.18.1.211.yml", "generated_timestamp": "2018-04-06 16:56:46.080491", "gid": 995, "group": "jenkins", "md5sum": "c66abfadf814ba54c1ae0a11b43194fc", "mode": "0644", "owner": "jenkins", "secontext": "system_u:object_r:var_lib_t:s0", "size": 687, "src": "/var/lib/jenkins/.ansible/tmp/ansible-tmp-1523048205.88-114805800745599/source", "state": "file", "uid": 997 } TASK [aws-up : determine where updated SSH configuration should go] ************ task path: /var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/aws-up/tasks/main.yml:144 ok: [localhost] => { "ansible_facts": { "origin_ci_ssh_config_files": [ "/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config/origin-ci-tool/inventory/.ssh_config" ] }, "changed": false, "generated_timestamp": "2018-04-06 16:56:46.155814" } TASK [aws-up : determine where updated SSH configuration should go] ************ task path: /var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/aws-up/tasks/main.yml:149 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 16:56:46.204679", "skip_reason": "Conditional check failed", "skipped": true } TASK [aws-up : ensure the targeted SSH configuration file exists] ************** task path: /var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/aws-up/tasks/main.yml:154 changed: [localhost] => (item=/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config/origin-ci-tool/inventory/.ssh_config) => { "changed": true, "dest": "/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config/origin-ci-tool/inventory/.ssh_config", "generated_timestamp": "2018-04-06 16:56:46.424520", "gid": 995, "group": "jenkins", "item": "/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config/origin-ci-tool/inventory/.ssh_config", "mode": "0644", "owner": "jenkins", "secontext": "system_u:object_r:var_lib_t:s0", "size": 0, "state": "file", "uid": 997 } TASK [aws-up : update the SSH configuration] *********************************** task path: /var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/aws-up/tasks/main.yml:160 changed: [localhost] => (item=/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config/origin-ci-tool/inventory/.ssh_config) => { "changed": true, "generated_timestamp": "2018-04-06 16:56:46.766315", "item": "/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config/origin-ci-tool/inventory/.ssh_config", "msg": "Block inserted" } TASK [aws-up : wait for SSH to be available] *********************************** task path: /var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/aws-up/tasks/main.yml:178 ok: [localhost] => { "changed": false, "elapsed": 33, "generated_timestamp": "2018-04-06 16:57:20.115024", "path": null, "port": 22, "search_regex": null, "state": "started" } PLAY RECAP ********************************************************************* localhost : ok=28 changed=13 unreachable=0 failed=0 + set +o xtrace ########## FINISHED STAGE: SUCCESS: PROVISION CLOUD RESOURCES [00h 01m 19s] ########## [workspace@2] $ /bin/bash /tmp/jenkins5349572268654952901.sh ########## STARTING STAGE: FORWARD GCS CREDENTIALS TO REMOTE HOST ########## + [[ -s /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/activate ]] + source /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/activate ++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66 ++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66 ++ export PATH=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ PATH=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ unset PYTHON_HOME ++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config ++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config + scp -F ./.config/origin-ci-tool/inventory/.ssh_config /var/lib/jenkins/.config/gcloud/gcs-publisher-credentials.json openshiftdevel:/data/credentials.json + set +o xtrace ########## FINISHED STAGE: SUCCESS: FORWARD GCS CREDENTIALS TO REMOTE HOST [00h 00m 01s] ########## [workspace@2] $ /bin/bash /tmp/jenkins2779993466128149036.sh ########## STARTING STAGE: FORWARD PARAMETERS TO THE REMOTE HOST ########## + [[ -s /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/activate ]] + source /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/activate ++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66 ++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66 ++ export PATH=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ PATH=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ unset PYTHON_HOME ++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config ++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config + ssh -F ./.config/origin-ci-tool/inventory/.ssh_config openshiftdevel sudo chmod o+rw /etc/environment + ssh -F ./.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'echo '\''JOB_SPEC={"type":"presubmit","job":"test_pull_request_openshift_ansible_logging_37","buildid":"dd7a388b-39dc-11e8-a837-0a58ac100475","refs":{"org":"openshift","repo":"openshift-ansible","base_ref":"release-3.7","base_sha":"e5443134210b0d49d376c183ca3472f66d71b7e9","pulls":[{"number":7844,"author":"mtnbikenc","sha":"8fe25956544e69773f179429374bfb7b527dd1f6"}]}}'\'' >> /etc/environment' + ssh -F ./.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'echo '\''buildId=dd7a388b-39dc-11e8-a837-0a58ac100475'\'' >> /etc/environment' + ssh -F ./.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'echo '\''BUILD_ID=dd7a388b-39dc-11e8-a837-0a58ac100475'\'' >> /etc/environment' + ssh -F ./.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'echo '\''REPO_OWNER=openshift'\'' >> /etc/environment' + ssh -F ./.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'echo '\''REPO_NAME=openshift-ansible'\'' >> /etc/environment' + ssh -F ./.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'echo '\''PULL_BASE_REF=release-3.7'\'' >> /etc/environment' + ssh -F ./.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'echo '\''PULL_BASE_SHA=e5443134210b0d49d376c183ca3472f66d71b7e9'\'' >> /etc/environment' + ssh -F ./.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'echo '\''PULL_REFS=release-3.7:e5443134210b0d49d376c183ca3472f66d71b7e9,7844:8fe25956544e69773f179429374bfb7b527dd1f6'\'' >> /etc/environment' + ssh -F ./.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'echo '\''PULL_NUMBER=7844'\'' >> /etc/environment' + ssh -F ./.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'echo '\''PULL_PULL_SHA=8fe25956544e69773f179429374bfb7b527dd1f6'\'' >> /etc/environment' + ssh -F ./.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'echo '\''JOB_SPEC={"type":"presubmit","job":"test_pull_request_openshift_ansible_logging_37","buildid":"dd7a388b-39dc-11e8-a837-0a58ac100475","refs":{"org":"openshift","repo":"openshift-ansible","base_ref":"release-3.7","base_sha":"e5443134210b0d49d376c183ca3472f66d71b7e9","pulls":[{"number":7844,"author":"mtnbikenc","sha":"8fe25956544e69773f179429374bfb7b527dd1f6"}]}}'\'' >> /etc/environment' + ssh -F ./.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'echo '\''BUILD_NUMBER=97'\'' >> /etc/environment' + ssh -F ./.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'echo '\''CLONEREFS_ARGS='\'' >> /etc/environment' + set +o xtrace ########## FINISHED STAGE: SUCCESS: FORWARD PARAMETERS TO THE REMOTE HOST [00h 00m 04s] ########## [workspace@2] $ /bin/bash /tmp/jenkins3683295167922359495.sh ########## STARTING STAGE: SYNC REPOSITORIES ########## + [[ -s /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/activate ]] + source /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/activate ++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66 ++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66 ++ export PATH=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ PATH=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ unset PYTHON_HOME ++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config ++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config ++ mktemp + script=/tmp/tmp.fSC07UpzEn + cat + chmod +x /tmp/tmp.fSC07UpzEn + scp -F ./.config/origin-ci-tool/inventory/.ssh_config /tmp/tmp.fSC07UpzEn openshiftdevel:/tmp/tmp.fSC07UpzEn + ssh -F ./.config/origin-ci-tool/inventory/.ssh_config -t openshiftdevel 'bash -l -c "timeout 14400 /tmp/tmp.fSC07UpzEn"' + cd /home/origin ++ jq --compact-output .buildid + [[ "dd7a388b-39dc-11e8-a837-0a58ac100475" =~ ^\[0-9]\+$ ]] Using BUILD_NUMBER + echo 'Using BUILD_NUMBER' ++ jq --compact-output '.buildid |= "97"' + JOB_SPEC='{"type":"presubmit","job":"test_pull_request_openshift_ansible_logging_37","buildid":"97","refs":{"org":"openshift","repo":"openshift-ansible","base_ref":"release-3.7","base_sha":"e5443134210b0d49d376c183ca3472f66d71b7e9","pulls":[{"number":7844,"author":"mtnbikenc","sha":"8fe25956544e69773f179429374bfb7b527dd1f6"}]}}' + for image in ''\''registry.svc.ci.openshift.org/ci/clonerefs:latest'\''' ''\''registry.svc.ci.openshift.org/ci/initupload:latest'\''' + (( i = 0 )) + (( i < 5 )) + docker pull registry.svc.ci.openshift.org/ci/clonerefs:latest Trying to pull repository registry.svc.ci.openshift.org/ci/clonerefs ... latest: Pulling from registry.svc.ci.openshift.org/ci/clonerefs 6d987f6f4279: Pulling fs layer 4cccebe844ee: Pulling fs layer 91f69e3a333d: Pulling fs layer 310bf9de3328: Pulling fs layer 310bf9de3328: Waiting 4cccebe844ee: Verifying Checksum 4cccebe844ee: Download complete 6d987f6f4279: Download complete 91f69e3a333d: Verifying Checksum 91f69e3a333d: Download complete 6d987f6f4279: Pull complete 4cccebe844ee: Pull complete 91f69e3a333d: Pull complete error pulling image configuration: Get https://storage.googleapis.com/openshift-ci-infra-origin-ci-registry-bucket/docker/registry/v2/blobs/sha256/34/342bbd01df4dc41f9c247468a407fad0fe4b9e6fe86a12a7e228ba50afc57745/data?Expires=1523049447&GoogleAccessId=api-ci%40openshift-ci-infra.iam.gserviceaccount.com&Signature=Wzk0i%2Fo%2FWEX%2FppjVQWxadx49zRw6A6DIojRyofT3wctQ8%2BJF9oCz2eVplN3zVD%2B4BVYBMzrV9fvi%2FGhlSABf6xzkCG6KlsM9NhWvEP6IKH0G%2FjJibCYas76FtqlOS%2FQ0L69oaCXChdfdD15m4FZmtv87UwCr3Q5%2FRP%2FTKWKoeQpqlLmdOncEa6R9qAc9319oZ0Cjtid%2FDfnB%2FLRqcbJeZbcbLclnxA5uYyCnvOM2cvKQQWvDobqjmwxIZcgRg3df4qH0rKJlCDq8M2Iv4f1YtH80BznwmH06W2gKj72ENtR5dnUGbJDAFhb2rGABID2PfTaDCthptxaB09sTcEdRpg%3D%3D: dial tcp 172.217.13.240:443: i/o timeout + (( i++ )) + (( i < 5 )) + docker pull registry.svc.ci.openshift.org/ci/clonerefs:latest Trying to pull repository registry.svc.ci.openshift.org/ci/clonerefs ... latest: Pulling from registry.svc.ci.openshift.org/ci/clonerefs 6d987f6f4279: Pulling fs layer 4cccebe844ee: Pulling fs layer 91f69e3a333d: Pulling fs layer 310bf9de3328: Pulling fs layer 310bf9de3328: Waiting 91f69e3a333d: Waiting 310bf9de3328: Waiting 4cccebe844ee: Verifying Checksum 4cccebe844ee: Download complete 91f69e3a333d: Verifying Checksum 91f69e3a333d: Download complete 310bf9de3328: Verifying Checksum 310bf9de3328: Download complete 6d987f6f4279: Verifying Checksum 6d987f6f4279: Download complete 6d987f6f4279: Pull complete 4cccebe844ee: Pull complete 91f69e3a333d: Pull complete 310bf9de3328: Pull complete Digest: sha256:4cbcb14dd1a77b8d4f810b84479a0b27781c7c0bdfd20c025efe7ead577f4775 Status: Downloaded newer image for registry.svc.ci.openshift.org/ci/clonerefs:latest + break + for image in ''\''registry.svc.ci.openshift.org/ci/clonerefs:latest'\''' ''\''registry.svc.ci.openshift.org/ci/initupload:latest'\''' + (( i = 0 )) + (( i < 5 )) + docker pull registry.svc.ci.openshift.org/ci/initupload:latest Trying to pull repository registry.svc.ci.openshift.org/ci/initupload ... latest: Pulling from registry.svc.ci.openshift.org/ci/initupload 6d987f6f4279: Already exists 4cccebe844ee: Already exists 23e4017c0ba8: Pulling fs layer 23e4017c0ba8: Verifying Checksum 23e4017c0ba8: Download complete 23e4017c0ba8: Pull complete Digest: sha256:d94fd5317f379ab83aa010ba35fe5f24b60814bd296b2a2e72bb825f8b9951b0 Status: Downloaded newer image for registry.svc.ci.openshift.org/ci/initupload:latest + break + clonerefs_args='--repo=openshift,origin=release-3.7 --repo=openshift,aos-cd-jobs=master --repo=openshift,origin-aggregated-logging=release-3.7 ' + docker run -e 'JOB_SPEC={"type":"presubmit","job":"test_pull_request_openshift_ansible_logging_37","buildid":"97","refs":{"org":"openshift","repo":"openshift-ansible","base_ref":"release-3.7","base_sha":"e5443134210b0d49d376c183ca3472f66d71b7e9","pulls":[{"number":7844,"author":"mtnbikenc","sha":"8fe25956544e69773f179429374bfb7b527dd1f6"}]}}' -v /data:/data:z registry.svc.ci.openshift.org/ci/clonerefs:latest --src-root=/data --log=/data/clone.json --repo=openshift,origin=release-3.7 --repo=openshift,aos-cd-jobs=master --repo=openshift,origin-aggregated-logging=release-3.7 {"component":"clonerefs","level":"info","msg":"Cloning refs","refs":{"org":"openshift","repo":"origin-aggregated-logging","base_ref":"release-3.7"},"time":"2018-04-06T20:59:07Z"} {"component":"clonerefs","level":"info","msg":"Cloning refs","refs":{"org":"openshift","repo":"origin","base_ref":"release-3.7"},"time":"2018-04-06T20:59:07Z"} {"component":"clonerefs","level":"info","msg":"Cloning refs","refs":{"org":"openshift","repo":"aos-cd-jobs","base_ref":"master"},"time":"2018-04-06T20:59:07Z"} {"component":"clonerefs","level":"info","msg":"Cloning refs","refs":{"org":"openshift","repo":"openshift-ansible","base_ref":"release-3.7","base_sha":"e5443134210b0d49d376c183ca3472f66d71b7e9","pulls":[{"number":7844,"author":"mtnbikenc","sha":"8fe25956544e69773f179429374bfb7b527dd1f6"}]},"time":"2018-04-06T20:59:07Z"} {"command":"os.MkdirAll(/data/src/github.com/openshift/origin, 0755)","component":"clonerefs","error":null,"level":"info","msg":"Ran clone command","output":"","time":"2018-04-06T20:59:07Z"} {"command":"os.MkdirAll(/data/src/github.com/openshift/aos-cd-jobs, 0755)","component":"clonerefs","error":null,"level":"info","msg":"Ran clone command","output":"","time":"2018-04-06T20:59:07Z"} {"command":"os.MkdirAll(/data/src/github.com/openshift/openshift-ansible, 0755)","component":"clonerefs","error":null,"level":"info","msg":"Ran clone command","output":"","time":"2018-04-06T20:59:07Z"} {"command":"os.MkdirAll(/data/src/github.com/openshift/origin-aggregated-logging, 0755)","component":"clonerefs","error":null,"level":"info","msg":"Ran clone command","output":"","time":"2018-04-06T20:59:07Z"} {"command":"git init","component":"clonerefs","error":null,"level":"info","msg":"Ran clone command","output":"Reinitialized existing shared Git repository in /data/src/github.com/openshift/origin/.git/\n","time":"2018-04-06T20:59:07Z"} {"command":"git init","component":"clonerefs","error":null,"level":"info","msg":"Ran clone command","output":"Reinitialized existing shared Git repository in /data/src/github.com/openshift/aos-cd-jobs/.git/\n","time":"2018-04-06T20:59:07Z"} {"command":"git config user.name ci-robot","component":"clonerefs","error":null,"level":"info","msg":"Ran clone command","output":"","time":"2018-04-06T20:59:07Z"} {"command":"git config user.email ci-robot@k8s.io","component":"clonerefs","error":null,"level":"info","msg":"Ran clone command","output":"","time":"2018-04-06T20:59:07Z"} {"command":"git init","component":"clonerefs","error":null,"level":"info","msg":"Ran clone command","output":"Reinitialized existing shared Git repository in /data/src/github.com/openshift/origin-aggregated-logging/.git/\n","time":"2018-04-06T20:59:07Z"} {"command":"git init","component":"clonerefs","error":null,"level":"info","msg":"Ran clone command","output":"Reinitialized existing shared Git repository in /data/src/github.com/openshift/openshift-ansible/.git/\n","time":"2018-04-06T20:59:07Z"} {"command":"git config user.name ci-robot","component":"clonerefs","error":null,"level":"info","msg":"Ran clone command","output":"","time":"2018-04-06T20:59:07Z"} {"command":"git config user.email ci-robot@k8s.io","component":"clonerefs","error":null,"level":"info","msg":"Ran clone command","output":"","time":"2018-04-06T20:59:07Z"} {"command":"git config user.name ci-robot","component":"clonerefs","error":null,"level":"info","msg":"Ran clone command","output":"","time":"2018-04-06T20:59:07Z"} {"command":"git config user.email ci-robot@k8s.io","component":"clonerefs","error":null,"level":"info","msg":"Ran clone command","output":"","time":"2018-04-06T20:59:07Z"} {"command":"git config user.name ci-robot","component":"clonerefs","error":null,"level":"info","msg":"Ran clone command","output":"","time":"2018-04-06T20:59:07Z"} {"command":"git config user.email ci-robot@k8s.io","component":"clonerefs","error":null,"level":"info","msg":"Ran clone command","output":"","time":"2018-04-06T20:59:07Z"} {"command":"git fetch https://github.com/openshift/origin-aggregated-logging.git --tags --prune","component":"clonerefs","error":null,"level":"info","msg":"Ran clone command","output":"From https://github.com/openshift/origin-aggregated-logging\n * branch HEAD -\u003e FETCH_HEAD\n * [new tag] v3.8.0 -\u003e v3.8.0\n * [new tag] v3.9.0 -\u003e v3.9.0\n","time":"2018-04-06T20:59:08Z"} {"command":"git fetch https://github.com/openshift/origin-aggregated-logging.git release-3.7","component":"clonerefs","error":null,"level":"info","msg":"Ran clone command","output":"From https://github.com/openshift/origin-aggregated-logging\n * branch release-3.7 -\u003e FETCH_HEAD\n","time":"2018-04-06T20:59:09Z"} {"command":"git fetch https://github.com/openshift/aos-cd-jobs.git --tags --prune","component":"clonerefs","error":null,"level":"info","msg":"Ran clone command","output":"From https://github.com/openshift/aos-cd-jobs\n * branch HEAD -\u003e FETCH_HEAD\n","time":"2018-04-06T20:59:09Z"} {"command":"git fetch https://github.com/openshift/aos-cd-jobs.git master","component":"clonerefs","error":null,"level":"info","msg":"Ran clone command","output":"From https://github.com/openshift/aos-cd-jobs\n * branch master -\u003e FETCH_HEAD\n","time":"2018-04-06T20:59:09Z"} {"command":"git fetch https://github.com/openshift/openshift-ansible.git --tags --prune","component":"clonerefs","error":null,"level":"info","msg":"Ran clone command","output":"From https://github.com/openshift/openshift-ansible\n * branch HEAD -\u003e FETCH_HEAD\n * [new tag] openshift-ansible-3.10.0-0.10.0 -\u003e openshift-ansible-3.10.0-0.10.0\n * [new tag] openshift-ansible-3.10.0-0.11.0 -\u003e openshift-ansible-3.10.0-0.11.0\n * [new tag] openshift-ansible-3.10.0-0.12.0 -\u003e openshift-ansible-3.10.0-0.12.0\n * [new tag] openshift-ansible-3.10.0-0.13.0 -\u003e openshift-ansible-3.10.0-0.13.0\n * [new tag] openshift-ansible-3.10.0-0.14.0 -\u003e openshift-ansible-3.10.0-0.14.0\n * [new tag] openshift-ansible-3.10.0-0.15.0 -\u003e openshift-ansible-3.10.0-0.15.0\n * [new tag] openshift-ansible-3.10.0-0.16.0 -\u003e openshift-ansible-3.10.0-0.16.0\n * [new tag] openshift-ansible-3.10.0-0.6.0 -\u003e openshift-ansible-3.10.0-0.6.0\n * [new tag] openshift-ansible-3.10.0-0.7.0 -\u003e openshift-ansible-3.10.0-0.7.0\n * [new tag] openshift-ansible-3.10.0-0.8.0 -\u003e openshift-ansible-3.10.0-0.8.0\n * [new tag] openshift-ansible-3.10.0-0.9.0 -\u003e openshift-ansible-3.10.0-0.9.0\n * [new tag] openshift-ansible-3.3.143-1 -\u003e openshift-ansible-3.3.143-1\n * [new tag] openshift-ansible-3.4.167-1 -\u003e openshift-ansible-3.4.167-1\n * [new tag] openshift-ansible-3.5.160-1 -\u003e openshift-ansible-3.5.160-1\n * [new tag] openshift-ansible-3.5.161-1 -\u003e openshift-ansible-3.5.161-1\n * [new tag] openshift-ansible-3.5.162-1 -\u003e openshift-ansible-3.5.162-1\n * [new tag] openshift-ansible-3.5.163-1 -\u003e openshift-ansible-3.5.163-1\n * [new tag] openshift-ansible-3.5.164-1 -\u003e openshift-ansible-3.5.164-1\n * [new tag] openshift-ansible-3.6.173.0.105-1 -\u003e openshift-ansible-3.6.173.0.105-1\n * [new tag] openshift-ansible-3.6.173.0.106-1 -\u003e openshift-ansible-3.6.173.0.106-1\n * [new tag] openshift-ansible-3.6.173.0.109-1 -\u003e openshift-ansible-3.6.173.0.109-1\n * [new tag] openshift-ansible-3.6.173.0.110-1 -\u003e openshift-ansible-3.6.173.0.110-1\n * [new tag] openshift-ansible-3.6.173.0.111-1 -\u003e openshift-ansible-3.6.173.0.111-1\n * [new tag] openshift-ansible-3.6.173.0.112-1 -\u003e openshift-ansible-3.6.173.0.112-1\n * [new tag] openshift-ansible-3.7.38-1 -\u003e openshift-ansible-3.7.38-1\n * [new tag] openshift-ansible-3.7.39-1 -\u003e openshift-ansible-3.7.39-1\n * [new tag] openshift-ansible-3.7.40-1 -\u003e openshift-ansible-3.7.40-1\n * [new tag] openshift-ansible-3.7.41-1 -\u003e openshift-ansible-3.7.41-1\n * [new tag] openshift-ansible-3.7.42-1 -\u003e openshift-ansible-3.7.42-1\n * [new tag] openshift-ansible-3.8.34-1 -\u003e openshift-ansible-3.8.34-1\n * [new tag] openshift-ansible-3.8.35-1 -\u003e openshift-ansible-3.8.35-1\n * [new tag] openshift-ansible-3.8.36-1 -\u003e openshift-ansible-3.8.36-1\n * [new tag] openshift-ansible-3.9.10-1 -\u003e openshift-ansible-3.9.10-1\n * [new tag] openshift-ansible-3.9.11-1 -\u003e openshift-ansible-3.9.11-1\n * [new tag] openshift-ansible-3.9.12-1 -\u003e openshift-ansible-3.9.12-1\n * [new tag] openshift-ansible-3.9.13-1 -\u003e openshift-ansible-3.9.13-1\n * [new tag] openshift-ansible-3.9.14-1 -\u003e openshift-ansible-3.9.14-1\n * [new tag] openshift-ansible-3.9.15-1 -\u003e openshift-ansible-3.9.15-1\n * [new tag] openshift-ansible-3.9.16-1 -\u003e openshift-ansible-3.9.16-1\n * [new tag] openshift-ansible-3.9.17-1 -\u003e openshift-ansible-3.9.17-1\n * [new tag] openshift-ansible-3.9.18-1 -\u003e openshift-ansible-3.9.18-1\n * [new tag] openshift-ansible-3.9.19-1 -\u003e openshift-ansible-3.9.19-1\n * [new tag] openshift-ansible-3.9.5-1 -\u003e openshift-ansible-3.9.5-1\n * [new tag] openshift-ansible-3.9.6-1 -\u003e openshift-ansible-3.9.6-1\n * [new tag] openshift-ansible-3.9.7-1 -\u003e openshift-ansible-3.9.7-1\n * [new tag] openshift-ansible-3.9.8-1 -\u003e openshift-ansible-3.9.8-1\n * [new tag] openshift-ansible-3.9.9-1 -\u003e openshift-ansible-3.9.9-1\n","time":"2018-04-06T20:59:10Z"} {"command":"git fetch https://github.com/openshift/openshift-ansible.git release-3.7","component":"clonerefs","error":null,"level":"info","msg":"Ran clone command","output":"From https://github.com/openshift/openshift-ansible\n * branch release-3.7 -\u003e FETCH_HEAD\n","time":"2018-04-06T20:59:10Z"} {"command":"git checkout FETCH_HEAD","component":"clonerefs","error":null,"level":"info","msg":"Ran clone command","output":"Note: checking out 'FETCH_HEAD'.\n\nYou are in 'detached HEAD' state. You can look around, make experimental\nchanges and commit them, and you can discard any commits you make in this\nstate without impacting any branches by performing another checkout.\n\nIf you want to create a new branch to retain commits you create, you may\ndo so (now or later) by using -b with the checkout command again. Example:\n\n git checkout -b \u003cnew-branch-name\u003e\n\nHEAD is now at 8f191fc5... Merge pull request #1289 from mwoodson/master\n","time":"2018-04-06T20:59:11Z"} {"command":"git branch --force master FETCH_HEAD","component":"clonerefs","error":null,"level":"info","msg":"Ran clone command","output":"","time":"2018-04-06T20:59:11Z"} {"command":"git checkout master","component":"clonerefs","error":null,"level":"info","msg":"Ran clone command","output":"Switched to branch 'master'\nYour branch is ahead of 'origin/master' by 203 commits.\n (use \"git push\" to publish your local commits)\n","time":"2018-04-06T20:59:11Z"} {"command":"git fetch https://github.com/openshift/origin.git --tags --prune","component":"clonerefs","error":null,"level":"info","msg":"Ran clone command","output":"From https://github.com/openshift/origin\n * branch HEAD -\u003e FETCH_HEAD\n * [new tag] v3.7.2 -\u003e v3.7.2\n * [new tag] v3.8.0 -\u003e v3.8.0\n * [new tag] v3.9.0 -\u003e v3.9.0\n","time":"2018-04-06T20:59:14Z"} {"command":"git fetch https://github.com/openshift/origin.git release-3.7","component":"clonerefs","error":null,"level":"info","msg":"Ran clone command","output":"From https://github.com/openshift/origin\n * branch release-3.7 -\u003e FETCH_HEAD\n","time":"2018-04-06T20:59:14Z"} {"command":"git checkout FETCH_HEAD","component":"clonerefs","error":null,"level":"info","msg":"Ran clone command","output":"Note: checking out 'FETCH_HEAD'.\n\nYou are in 'detached HEAD' state. You can look around, make experimental\nchanges and commit them, and you can discard any commits you make in this\nstate without impacting any branches by performing another checkout.\n\nIf you want to create a new branch to retain commits you create, you may\ndo so (now or later) by using -b with the checkout command again. Example:\n\n git checkout -b \u003cnew-branch-name\u003e\n\nHEAD is now at 24d6f31... Merge pull request #1041 from openshift-cherrypick-robot/cherry-pick-1035-to-release-3.7\n","time":"2018-04-06T20:59:15Z"} {"command":"git branch --force release-3.7 FETCH_HEAD","component":"clonerefs","error":null,"level":"info","msg":"Ran clone command","output":"","time":"2018-04-06T20:59:15Z"} {"command":"git checkout release-3.7","component":"clonerefs","error":null,"level":"info","msg":"Ran clone command","output":"Switched to branch 'release-3.7'\n","time":"2018-04-06T20:59:15Z"} {"command":"git checkout e5443134210b0d49d376c183ca3472f66d71b7e9","component":"clonerefs","error":null,"level":"info","msg":"Ran clone command","output":"Note: checking out 'e5443134210b0d49d376c183ca3472f66d71b7e9'.\n\nYou are in 'detached HEAD' state. You can look around, make experimental\nchanges and commit them, and you can discard any commits you make in this\nstate without impacting any branches by performing another checkout.\n\nIf you want to create a new branch to retain commits you create, you may\ndo so (now or later) by using -b with the checkout command again. Example:\n\n git checkout -b \u003cnew-branch-name\u003e\n\nHEAD is now at e54431342... Merge pull request #7609 from nak3/max-time-3.7\n","time":"2018-04-06T20:59:16Z"} {"command":"git branch --force release-3.7 e5443134210b0d49d376c183ca3472f66d71b7e9","component":"clonerefs","error":null,"level":"info","msg":"Ran clone command","output":"","time":"2018-04-06T20:59:16Z"} {"command":"git checkout release-3.7","component":"clonerefs","error":null,"level":"info","msg":"Ran clone command","output":"Switched to branch 'release-3.7'\n","time":"2018-04-06T20:59:16Z"} {"command":"git fetch https://github.com/openshift/openshift-ansible.git pull/7844/head","component":"clonerefs","error":null,"level":"info","msg":"Ran clone command","output":"From https://github.com/openshift/openshift-ansible\n * branch refs/pull/7844/head -\u003e FETCH_HEAD\n","time":"2018-04-06T20:59:17Z"} {"command":"git merge 8fe25956544e69773f179429374bfb7b527dd1f6","component":"clonerefs","error":null,"level":"info","msg":"Ran clone command","output":"Updating e54431342..8fe259565\nFast-forward\n roles/openshift_node_upgrade/tasks/rpm_upgrade.yml | 2 +-\n 1 file changed, 1 insertion(+), 1 deletion(-)\n","time":"2018-04-06T20:59:17Z"} {"command":"git checkout FETCH_HEAD","component":"clonerefs","error":null,"level":"info","msg":"Ran clone command","output":"Note: checking out 'FETCH_HEAD'.\n\nYou are in 'detached HEAD' state. You can look around, make experimental\nchanges and commit them, and you can discard any commits you make in this\nstate without impacting any branches by performing another checkout.\n\nIf you want to create a new branch to retain commits you create, you may\ndo so (now or later) by using -b with the checkout command again. Example:\n\n git checkout -b \u003cnew-branch-name\u003e\n\nHEAD is now at 5eda3fafdb... Merge pull request #19042 from tnozicka/fix-golang-version-check-3.7\n","time":"2018-04-06T21:03:10Z"} {"command":"git branch --force release-3.7 FETCH_HEAD","component":"clonerefs","error":null,"level":"info","msg":"Ran clone command","output":"","time":"2018-04-06T21:03:10Z"} {"command":"git checkout release-3.7","component":"clonerefs","error":null,"level":"info","msg":"Ran clone command","output":"Switched to branch 'release-3.7'\n","time":"2018-04-06T21:03:11Z"} {"component":"clonerefs","level":"info","msg":"Finished cloning refs","time":"2018-04-06T21:03:11Z"} + docker run -e 'JOB_SPEC={"type":"presubmit","job":"test_pull_request_openshift_ansible_logging_37","buildid":"97","refs":{"org":"openshift","repo":"openshift-ansible","base_ref":"release-3.7","base_sha":"e5443134210b0d49d376c183ca3472f66d71b7e9","pulls":[{"number":7844,"author":"mtnbikenc","sha":"8fe25956544e69773f179429374bfb7b527dd1f6"}]}}' -v /data:/data:z registry.svc.ci.openshift.org/ci/initupload:latest --clone-log=/data/clone.json --dry-run=false --gcs-bucket=origin-ci-test --gcs-credentials-file=/data/credentials.json --path-strategy=single --default-org=openshift --default-repo=origin {"component":"clonerefs","dest":"pr-logs/pull/openshift_openshift-ansible/7844/test_pull_request_openshift_ansible_logging_37/latest-build.txt","level":"info","msg":"Queued for upload","time":"2018-04-06T21:03:16Z"} {"component":"clonerefs","dest":"pr-logs/pull/openshift_openshift-ansible/7844/test_pull_request_openshift_ansible_logging_37/97/started.json","level":"info","msg":"Queued for upload","time":"2018-04-06T21:03:16Z"} {"component":"clonerefs","dest":"pr-logs/pull/openshift_openshift-ansible/7844/test_pull_request_openshift_ansible_logging_37/97/clone-log.txt","level":"info","msg":"Queued for upload","time":"2018-04-06T21:03:16Z"} {"component":"clonerefs","dest":"pr-logs/pull/openshift_openshift-ansible/7844/test_pull_request_openshift_ansible_logging_37/97/clone-records.json","level":"info","msg":"Queued for upload","time":"2018-04-06T21:03:16Z"} {"component":"clonerefs","dest":"pr-logs/pull/openshift_openshift-ansible/7844/test_pull_request_openshift_ansible_logging_37/97/job-spec.json","level":"info","msg":"Queued for upload","time":"2018-04-06T21:03:16Z"} {"component":"clonerefs","dest":"pr-logs/directory/test_pull_request_openshift_ansible_logging_37/97.txt","level":"info","msg":"Queued for upload","time":"2018-04-06T21:03:16Z"} {"component":"clonerefs","dest":"pr-logs/directory/test_pull_request_openshift_ansible_logging_37/latest-build.txt","level":"info","msg":"Queued for upload","time":"2018-04-06T21:03:16Z"} {"component":"clonerefs","dest":"pr-logs/pull/openshift_openshift-ansible/7844/test_pull_request_openshift_ansible_logging_37/latest-build.txt","level":"info","msg":"Finished upload","time":"2018-04-06T21:03:17Z"} {"component":"clonerefs","dest":"pr-logs/pull/openshift_openshift-ansible/7844/test_pull_request_openshift_ansible_logging_37/97/job-spec.json","level":"info","msg":"Finished upload","time":"2018-04-06T21:03:17Z"} {"component":"clonerefs","dest":"pr-logs/directory/test_pull_request_openshift_ansible_logging_37/97.txt","level":"info","msg":"Finished upload","time":"2018-04-06T21:03:17Z"} {"component":"clonerefs","dest":"pr-logs/pull/openshift_openshift-ansible/7844/test_pull_request_openshift_ansible_logging_37/97/started.json","level":"info","msg":"Finished upload","time":"2018-04-06T21:03:17Z"} {"component":"clonerefs","dest":"pr-logs/pull/openshift_openshift-ansible/7844/test_pull_request_openshift_ansible_logging_37/97/clone-log.txt","level":"info","msg":"Finished upload","time":"2018-04-06T21:03:17Z"} {"component":"clonerefs","dest":"pr-logs/pull/openshift_openshift-ansible/7844/test_pull_request_openshift_ansible_logging_37/97/clone-records.json","level":"info","msg":"Finished upload","time":"2018-04-06T21:03:17Z"} {"component":"clonerefs","dest":"pr-logs/directory/test_pull_request_openshift_ansible_logging_37/latest-build.txt","level":"info","msg":"Finished upload","time":"2018-04-06T21:03:17Z"} {"component":"clonerefs","level":"info","msg":"Finished upload to GCS","time":"2018-04-06T21:03:17Z"} + sudo chmod -R a+rwX /data + sudo chown -R origin:origin-git /data + set +o xtrace ########## FINISHED STAGE: SUCCESS: SYNC REPOSITORIES [00h 05m 56s] ########## [workspace@2] $ /bin/bash /tmp/jenkins4120222523886552581.sh ########## STARTING STAGE: RECORD EXTRA EVARS ########## + [[ -s /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/activate ]] + source /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/activate ++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66 ++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66 ++ export PATH=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ PATH=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ unset PYTHON_HOME ++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config ++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config ++ mktemp + script=/tmp/tmp.2bCCCTxFnH + cat + chmod +x /tmp/tmp.2bCCCTxFnH + scp -F ./.config/origin-ci-tool/inventory/.ssh_config /tmp/tmp.2bCCCTxFnH openshiftdevel:/tmp/tmp.2bCCCTxFnH + ssh -F ./.config/origin-ci-tool/inventory/.ssh_config -t openshiftdevel 'bash -l -c "timeout 14400 /tmp/tmp.2bCCCTxFnH"' + cd /home/origin + sudo chmod o+rw /etc/environment + echo 'EXTRA_EVARS="-e skip_sanity_checks=true -e openshift_disable_check=* -e openshift_install_examples=false"' + set +o xtrace ########## FINISHED STAGE: SUCCESS: RECORD EXTRA EVARS [00h 00m 01s] ########## [workspace@2] $ /bin/bash /tmp/jenkins7001147691616628861.sh ########## STARTING STAGE: FORWARD PARAMETERS TO THE REMOTE HOST ########## + [[ -s /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/activate ]] + source /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/activate ++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66 ++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66 ++ export PATH=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ PATH=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ unset PYTHON_HOME ++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config ++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config + ssh -F ./.config/origin-ci-tool/inventory/.ssh_config openshiftdevel sudo chmod o+rw /etc/environment + ssh -F ./.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'echo '\''JOB_NAME=test_pull_request_openshift_ansible_logging_37'\'' >> /etc/environment' + ssh -F ./.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'echo '\''BUILD_NUMBER=97'\'' >> /etc/environment' + set +o xtrace ########## FINISHED STAGE: SUCCESS: FORWARD PARAMETERS TO THE REMOTE HOST [00h 00m 00s] ########## [workspace@2] $ /bin/bash /tmp/jenkins4278256963959391730.sh ########## STARTING STAGE: USE A RAMDISK FOR ETCD ########## + [[ -s /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/activate ]] + source /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/activate ++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66 ++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66 ++ export PATH=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ PATH=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ unset PYTHON_HOME ++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config ++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config ++ mktemp + script=/tmp/tmp.nGvsHePU9y + cat + chmod +x /tmp/tmp.nGvsHePU9y + scp -F ./.config/origin-ci-tool/inventory/.ssh_config /tmp/tmp.nGvsHePU9y openshiftdevel:/tmp/tmp.nGvsHePU9y + ssh -F ./.config/origin-ci-tool/inventory/.ssh_config -t openshiftdevel 'bash -l -c "timeout 300 /tmp/tmp.nGvsHePU9y"' + cd /home/origin + sudo su root + set +o xtrace ########## FINISHED STAGE: SUCCESS: USE A RAMDISK FOR ETCD [00h 00m 01s] ########## [workspace@2] $ /bin/bash /tmp/jenkins813332922381475882.sh ########## STARTING STAGE: TURN OFF UNNECESSARY CENTOS PAAS SIG REPOS ########## + [[ -s /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/activate ]] + source /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/activate ++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66 ++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66 ++ export PATH=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ PATH=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ unset PYTHON_HOME ++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config ++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config ++ mktemp + script=/tmp/tmp.5wtoznMa8V + cat + chmod +x /tmp/tmp.5wtoznMa8V + scp -F ./.config/origin-ci-tool/inventory/.ssh_config /tmp/tmp.5wtoznMa8V openshiftdevel:/tmp/tmp.5wtoznMa8V + ssh -F ./.config/origin-ci-tool/inventory/.ssh_config -t openshiftdevel 'bash -l -c "timeout 300 /tmp/tmp.5wtoznMa8V"' + cd /home/origin + sudo yum-config-manager --disable 'centos-paas-sig-openshift-origin*-rpms' Loaded plugins: amazon-id, rhui-lb ================ repo: centos-paas-sig-openshift-origin13-rpms ================= [centos-paas-sig-openshift-origin13-rpms] async = True bandwidth = 0 base_persistdir = /var/lib/yum/repos/x86_64/7Server baseurl = https://buildlogs.centos.org/centos/7/paas/x86_64/openshift-origin13/ cache = 0 cachedir = /var/cache/yum/x86_64/7Server/centos-paas-sig-openshift-origin13-rpms check_config_file_age = True compare_providers_priority = 80 cost = 1000 deltarpm_metadata_percentage = 100 deltarpm_percentage = enabled = 0 enablegroups = True exclude = failovermethod = priority ftp_disable_epsv = False gpgcadir = /var/lib/yum/repos/x86_64/7Server/centos-paas-sig-openshift-origin13-rpms/gpgcadir gpgcakey = gpgcheck = False gpgdir = /var/lib/yum/repos/x86_64/7Server/centos-paas-sig-openshift-origin13-rpms/gpgdir gpgkey = hdrdir = /var/cache/yum/x86_64/7Server/centos-paas-sig-openshift-origin13-rpms/headers http_caching = all includepkgs = ip_resolve = keepalive = True keepcache = False mddownloadpolicy = sqlite mdpolicy = group:small mediaid = metadata_expire = 21600 metadata_expire_filter = read-only:present metalink = minrate = 0 mirrorlist = mirrorlist_expire = 86400 name = CentOS PaaS SIG Origin 1.3 Repository old_base_cache_dir = password = persistdir = /var/lib/yum/repos/x86_64/7Server/centos-paas-sig-openshift-origin13-rpms pkgdir = /var/cache/yum/x86_64/7Server/centos-paas-sig-openshift-origin13-rpms/packages proxy = False proxy_dict = proxy_password = proxy_username = repo_gpgcheck = False retries = 10 skip_if_unavailable = False ssl_check_cert_permissions = True sslcacert = sslclientcert = /var/lib/yum/client-cert.pem sslclientkey = /var/lib/yum/client-key.pem sslverify = False throttle = 0 timeout = 120.0 ui_id = centos-paas-sig-openshift-origin13-rpms ui_repoid_vars = releasever, basearch username = ================ repo: centos-paas-sig-openshift-origin14-rpms ================= [centos-paas-sig-openshift-origin14-rpms] async = True bandwidth = 0 base_persistdir = /var/lib/yum/repos/x86_64/7Server baseurl = https://buildlogs.centos.org/centos/7/paas/x86_64/openshift-origin14/ cache = 0 cachedir = /var/cache/yum/x86_64/7Server/centos-paas-sig-openshift-origin14-rpms check_config_file_age = True compare_providers_priority = 80 cost = 1000 deltarpm_metadata_percentage = 100 deltarpm_percentage = enabled = 0 enablegroups = True exclude = failovermethod = priority ftp_disable_epsv = False gpgcadir = /var/lib/yum/repos/x86_64/7Server/centos-paas-sig-openshift-origin14-rpms/gpgcadir gpgcakey = gpgcheck = False gpgdir = /var/lib/yum/repos/x86_64/7Server/centos-paas-sig-openshift-origin14-rpms/gpgdir gpgkey = hdrdir = /var/cache/yum/x86_64/7Server/centos-paas-sig-openshift-origin14-rpms/headers http_caching = all includepkgs = ip_resolve = keepalive = True keepcache = False mddownloadpolicy = sqlite mdpolicy = group:small mediaid = metadata_expire = 21600 metadata_expire_filter = read-only:present metalink = minrate = 0 mirrorlist = mirrorlist_expire = 86400 name = CentOS PaaS SIG Origin 1.4 Repository old_base_cache_dir = password = persistdir = /var/lib/yum/repos/x86_64/7Server/centos-paas-sig-openshift-origin14-rpms pkgdir = /var/cache/yum/x86_64/7Server/centos-paas-sig-openshift-origin14-rpms/packages proxy = False proxy_dict = proxy_password = proxy_username = repo_gpgcheck = False retries = 10 skip_if_unavailable = False ssl_check_cert_permissions = True sslcacert = sslclientcert = /var/lib/yum/client-cert.pem sslclientkey = /var/lib/yum/client-key.pem sslverify = False throttle = 0 timeout = 120.0 ui_id = centos-paas-sig-openshift-origin14-rpms ui_repoid_vars = releasever, basearch username = ================ repo: centos-paas-sig-openshift-origin15-rpms ================= [centos-paas-sig-openshift-origin15-rpms] async = True bandwidth = 0 base_persistdir = /var/lib/yum/repos/x86_64/7Server baseurl = https://buildlogs.centos.org/centos/7/paas/x86_64/openshift-origin15/ cache = 0 cachedir = /var/cache/yum/x86_64/7Server/centos-paas-sig-openshift-origin15-rpms check_config_file_age = True compare_providers_priority = 80 cost = 1000 deltarpm_metadata_percentage = 100 deltarpm_percentage = enabled = 0 enablegroups = True exclude = failovermethod = priority ftp_disable_epsv = False gpgcadir = /var/lib/yum/repos/x86_64/7Server/centos-paas-sig-openshift-origin15-rpms/gpgcadir gpgcakey = gpgcheck = False gpgdir = /var/lib/yum/repos/x86_64/7Server/centos-paas-sig-openshift-origin15-rpms/gpgdir gpgkey = hdrdir = /var/cache/yum/x86_64/7Server/centos-paas-sig-openshift-origin15-rpms/headers http_caching = all includepkgs = ip_resolve = keepalive = True keepcache = False mddownloadpolicy = sqlite mdpolicy = group:small mediaid = metadata_expire = 21600 metadata_expire_filter = read-only:present metalink = minrate = 0 mirrorlist = mirrorlist_expire = 86400 name = CentOS PaaS SIG Origin 1.5 Repository old_base_cache_dir = password = persistdir = /var/lib/yum/repos/x86_64/7Server/centos-paas-sig-openshift-origin15-rpms pkgdir = /var/cache/yum/x86_64/7Server/centos-paas-sig-openshift-origin15-rpms/packages proxy = False proxy_dict = proxy_password = proxy_username = repo_gpgcheck = False retries = 10 skip_if_unavailable = False ssl_check_cert_permissions = True sslcacert = sslclientcert = /var/lib/yum/client-cert.pem sslclientkey = /var/lib/yum/client-key.pem sslverify = False throttle = 0 timeout = 120.0 ui_id = centos-paas-sig-openshift-origin15-rpms ui_repoid_vars = releasever, basearch username = ================ repo: centos-paas-sig-openshift-origin36-rpms ================= [centos-paas-sig-openshift-origin36-rpms] async = True bandwidth = 0 base_persistdir = /var/lib/yum/repos/x86_64/7Server baseurl = https://buildlogs.centos.org/centos/7/paas/x86_64/openshift-origin36/ cache = 0 cachedir = /var/cache/yum/x86_64/7Server/centos-paas-sig-openshift-origin36-rpms check_config_file_age = True compare_providers_priority = 80 cost = 1000 deltarpm_metadata_percentage = 100 deltarpm_percentage = enabled = 0 enablegroups = True exclude = failovermethod = priority ftp_disable_epsv = False gpgcadir = /var/lib/yum/repos/x86_64/7Server/centos-paas-sig-openshift-origin36-rpms/gpgcadir gpgcakey = gpgcheck = False gpgdir = /var/lib/yum/repos/x86_64/7Server/centos-paas-sig-openshift-origin36-rpms/gpgdir gpgkey = hdrdir = /var/cache/yum/x86_64/7Server/centos-paas-sig-openshift-origin36-rpms/headers http_caching = all includepkgs = ip_resolve = keepalive = True keepcache = False mddownloadpolicy = sqlite mdpolicy = group:small mediaid = metadata_expire = 21600 metadata_expire_filter = read-only:present metalink = minrate = 0 mirrorlist = mirrorlist_expire = 86400 name = CentOS PaaS SIG Origin 3.6 Repository old_base_cache_dir = password = persistdir = /var/lib/yum/repos/x86_64/7Server/centos-paas-sig-openshift-origin36-rpms pkgdir = /var/cache/yum/x86_64/7Server/centos-paas-sig-openshift-origin36-rpms/packages proxy = False proxy_dict = proxy_password = proxy_username = repo_gpgcheck = False retries = 10 skip_if_unavailable = False ssl_check_cert_permissions = True sslcacert = sslclientcert = /var/lib/yum/client-cert.pem sslclientkey = /var/lib/yum/client-key.pem sslverify = False throttle = 0 timeout = 120.0 ui_id = centos-paas-sig-openshift-origin36-rpms ui_repoid_vars = releasever, basearch username = ================ repo: centos-paas-sig-openshift-origin37-rpms ================= [centos-paas-sig-openshift-origin37-rpms] async = True bandwidth = 0 base_persistdir = /var/lib/yum/repos/x86_64/7Server baseurl = https://buildlogs.centos.org/centos/7/paas/x86_64/openshift-origin37/ cache = 0 cachedir = /var/cache/yum/x86_64/7Server/centos-paas-sig-openshift-origin37-rpms check_config_file_age = True compare_providers_priority = 80 cost = 1000 deltarpm_metadata_percentage = 100 deltarpm_percentage = enabled = 0 enablegroups = True exclude = failovermethod = priority ftp_disable_epsv = False gpgcadir = /var/lib/yum/repos/x86_64/7Server/centos-paas-sig-openshift-origin37-rpms/gpgcadir gpgcakey = gpgcheck = False gpgdir = /var/lib/yum/repos/x86_64/7Server/centos-paas-sig-openshift-origin37-rpms/gpgdir gpgkey = hdrdir = /var/cache/yum/x86_64/7Server/centos-paas-sig-openshift-origin37-rpms/headers http_caching = all includepkgs = ip_resolve = keepalive = True keepcache = False mddownloadpolicy = sqlite mdpolicy = group:small mediaid = metadata_expire = 21600 metadata_expire_filter = read-only:present metalink = minrate = 0 mirrorlist = mirrorlist_expire = 86400 name = CentOS PaaS SIG Origin 3.7 Repository old_base_cache_dir = password = persistdir = /var/lib/yum/repos/x86_64/7Server/centos-paas-sig-openshift-origin37-rpms pkgdir = /var/cache/yum/x86_64/7Server/centos-paas-sig-openshift-origin37-rpms/packages proxy = False proxy_dict = proxy_password = proxy_username = repo_gpgcheck = False retries = 10 skip_if_unavailable = False ssl_check_cert_permissions = True sslcacert = sslclientcert = /var/lib/yum/client-cert.pem sslclientkey = /var/lib/yum/client-key.pem sslverify = False throttle = 0 timeout = 120.0 ui_id = centos-paas-sig-openshift-origin37-rpms ui_repoid_vars = releasever, basearch username = + [[ test_pull_request_openshift_ansible_logging_37 == *update* ]] + set +o xtrace ########## FINISHED STAGE: SUCCESS: TURN OFF UNNECESSARY CENTOS PAAS SIG REPOS [00h 00m 03s] ########## [workspace@2] $ /bin/bash /tmp/jenkins7555181363407155172.sh ########## STARTING STAGE: ENABLE DOCKER TESTED REPO ########## + [[ -s /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/activate ]] + source /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/activate ++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66 ++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66 ++ export PATH=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ PATH=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ unset PYTHON_HOME ++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config ++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config ++ mktemp + script=/tmp/tmp.xbmglCFkpQ + cat + chmod +x /tmp/tmp.xbmglCFkpQ + scp -F ./.config/origin-ci-tool/inventory/.ssh_config /tmp/tmp.xbmglCFkpQ openshiftdevel:/tmp/tmp.xbmglCFkpQ + ssh -F ./.config/origin-ci-tool/inventory/.ssh_config -t openshiftdevel 'bash -l -c "timeout 300 /tmp/tmp.xbmglCFkpQ"' + cd /home/origin + [[ release-3.7 == \m\a\s\t\e\r ]] + [[ release-3.7 == \r\e\l\e\a\s\e\-\3\.\9 ]] + [[ release-3.7 == \r\e\l\e\a\s\e\-\3\.\1\0 ]] + set +o xtrace ########## FINISHED STAGE: SUCCESS: ENABLE DOCKER TESTED REPO [00h 00m 00s] ########## [workspace@2] $ /bin/bash /tmp/jenkins2819029301619914715.sh ########## STARTING STAGE: BUILD AN ORIGIN RELEASE ########## + [[ -s /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/activate ]] + source /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/activate ++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66 ++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66 ++ export PATH=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ PATH=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ unset PYTHON_HOME ++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config ++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config ++ mktemp + script=/tmp/tmp.Bc420ywj93 + cat + chmod +x /tmp/tmp.Bc420ywj93 + scp -F ./.config/origin-ci-tool/inventory/.ssh_config /tmp/tmp.Bc420ywj93 openshiftdevel:/tmp/tmp.Bc420ywj93 + ssh -F ./.config/origin-ci-tool/inventory/.ssh_config -t openshiftdevel 'bash -l -c "timeout 7200 /tmp/tmp.Bc420ywj93"' + cd /data/src/github.com/openshift/origin ++ git rev-parse --abbrev-ref --symbolic-full-name HEAD + ORIGIN_TARGET_BRANCH=release-3.7 + export OS_BUILD_IMAGE_ARGS= + OS_BUILD_IMAGE_ARGS= + export OS_ONLY_BUILD_PLATFORMS=linux/amd64 + OS_ONLY_BUILD_PLATFORMS=linux/amd64 + export OS_BUILD_ENV_PRESERVE=_output/local + OS_BUILD_ENV_PRESERVE=_output/local + hack/build-base-images.sh [openshift/origin-source] --> Image centos:7 was not found, pulling ... [openshift/origin-source] --> Pulled 1/2 layers, 50% complete [openshift/origin-source] --> FROM centos:7 as 0 [openshift/origin-source] --> COPY *.repo /etc/yum.repos.d/ [openshift/origin-source] --> Committing changes to openshift/origin-source:5eda3fa ... [openshift/origin-source] --> Tagged as openshift/origin-source:latest [openshift/origin-source] --> Done [openshift/origin-base] --> FROM openshift/origin-source as 0 [openshift/origin-base] --> RUN INSTALL_PKGS="bsdtar ceph-common device-mapper device-mapper-persistent-data e2fsprogs ethtool findutils git hostname iptables lsof nmap-ncat socat sysvinit-tools tar tree util-linux wget which xfsprogs" && yum install -y ${INSTALL_PKGS} && rpm -V ${INSTALL_PKGS} && yum clean all && mkdir -p /var/lib/origin [openshift/origin-base] Loaded plugins: fastestmirror, ovl [openshift/origin-base] http://mirror.lug.udel.edu/pub/centos/7.4.1708/extras/x86_64/repodata/repomd.xml: [Errno 12] Timeout on http://mirror.lug.udel.edu/pub/centos/7.4.1708/extras/x86_64/repodata/repomd.xml: (28, 'Operation too slow. Less than 1000 bytes/sec transferred the last 30 seconds') [openshift/origin-base] Trying other mirror. [openshift/origin-base] Determining fastest mirrors [openshift/origin-base] * base: mirror.ash.fastserv.com [openshift/origin-base] * extras: repos-va.psychz.net [openshift/origin-base] * updates: mirrors.lga7.us.voxel.net [openshift/origin-base] Package 7:device-mapper-1.02.140-8.el7.x86_64 already installed and latest version [openshift/origin-base] Package 1:findutils-4.5.11-5.el7.x86_64 already installed and latest version [openshift/origin-base] Package hostname-3.13-3.el7.x86_64 already installed and latest version [openshift/origin-base] Package 2:tar-1.26-32.el7.x86_64 already installed and latest version [openshift/origin-base] Package util-linux-2.23.2-43.el7_4.2.x86_64 already installed and latest version [openshift/origin-base] Resolving Dependencies [openshift/origin-base] --> Running transaction check [openshift/origin-base] ---> Package bsdtar.x86_64 0:3.1.2-10.el7_2 will be installed [openshift/origin-base] --> Processing Dependency: libarchive = 3.1.2-10.el7_2 for package: bsdtar-3.1.2-10.el7_2.x86_64 [openshift/origin-base] --> Processing Dependency: liblzo2.so.2()(64bit) for package: bsdtar-3.1.2-10.el7_2.x86_64 [openshift/origin-base] --> Processing Dependency: libarchive.so.13()(64bit) for package: bsdtar-3.1.2-10.el7_2.x86_64 [openshift/origin-base] ---> Package ceph-common.x86_64 1:0.94.5-2.el7 will be installed [openshift/origin-base] --> Processing Dependency: python-rbd = 1:0.94.5-2.el7 for package: 1:ceph-common-0.94.5-2.el7.x86_64 [openshift/origin-base] --> Processing Dependency: python-rados = 1:0.94.5-2.el7 for package: 1:ceph-common-0.94.5-2.el7.x86_64 [openshift/origin-base] --> Processing Dependency: librbd1 = 1:0.94.5-2.el7 for package: 1:ceph-common-0.94.5-2.el7.x86_64 [openshift/origin-base] --> Processing Dependency: librados2 = 1:0.94.5-2.el7 for package: 1:ceph-common-0.94.5-2.el7.x86_64 [openshift/origin-base] --> Processing Dependency: redhat-lsb-core for package: 1:ceph-common-0.94.5-2.el7.x86_64 [openshift/origin-base] --> Processing Dependency: python-requests for package: 1:ceph-common-0.94.5-2.el7.x86_64 [openshift/origin-base] --> Processing Dependency: parted for package: 1:ceph-common-0.94.5-2.el7.x86_64 [openshift/origin-base] --> Processing Dependency: initscripts for package: 1:ceph-common-0.94.5-2.el7.x86_64 [openshift/origin-base] --> Processing Dependency: hdparm for package: 1:ceph-common-0.94.5-2.el7.x86_64 [openshift/origin-base] --> Processing Dependency: gdisk for package: 1:ceph-common-0.94.5-2.el7.x86_64 [openshift/origin-base] --> Processing Dependency: cryptsetup for package: 1:ceph-common-0.94.5-2.el7.x86_64 [openshift/origin-base] --> Processing Dependency: librbd.so.1()(64bit) for package: 1:ceph-common-0.94.5-2.el7.x86_64 [openshift/origin-base] --> Processing Dependency: librados.so.2()(64bit) for package: 1:ceph-common-0.94.5-2.el7.x86_64 [openshift/origin-base] --> Processing Dependency: libboost_thread-mt.so.1.53.0()(64bit) for package: 1:ceph-common-0.94.5-2.el7.x86_64 [openshift/origin-base] --> Processing Dependency: libboost_system-mt.so.1.53.0()(64bit) for package: 1:ceph-common-0.94.5-2.el7.x86_64 [openshift/origin-base] ---> Package device-mapper-persistent-data.x86_64 0:0.7.0-0.1.rc6.el7_4.1 will be installed [openshift/origin-base] --> Processing Dependency: libaio.so.1(LIBAIO_0.4)(64bit) for package: device-mapper-persistent-data-0.7.0-0.1.rc6.el7_4.1.x86_64 [openshift/origin-base] --> Processing Dependency: libaio.so.1(LIBAIO_0.1)(64bit) for package: device-mapper-persistent-data-0.7.0-0.1.rc6.el7_4.1.x86_64 [openshift/origin-base] --> Processing Dependency: libaio.so.1()(64bit) for package: device-mapper-persistent-data-0.7.0-0.1.rc6.el7_4.1.x86_64 [openshift/origin-base] ---> Package e2fsprogs.x86_64 0:1.42.9-10.el7 will be installed [openshift/origin-base] --> Processing Dependency: libss = 1.42.9-10.el7 for package: e2fsprogs-1.42.9-10.el7.x86_64 [openshift/origin-base] --> Processing Dependency: e2fsprogs-libs(x86-64) = 1.42.9-10.el7 for package: e2fsprogs-1.42.9-10.el7.x86_64 [openshift/origin-base] --> Processing Dependency: libss.so.2()(64bit) for package: e2fsprogs-1.42.9-10.el7.x86_64 [openshift/origin-base] --> Processing Dependency: libext2fs.so.2()(64bit) for package: e2fsprogs-1.42.9-10.el7.x86_64 [openshift/origin-base] --> Processing Dependency: libe2p.so.2()(64bit) for package: e2fsprogs-1.42.9-10.el7.x86_64 [openshift/origin-base] ---> Package ethtool.x86_64 2:4.8-1.el7 will be installed [openshift/origin-base] ---> Package git.x86_64 0:1.8.3.1-12.el7_4 will be installed [openshift/origin-base] --> Processing Dependency: perl-Git = 1.8.3.1-12.el7_4 for package: git-1.8.3.1-12.el7_4.x86_64 [openshift/origin-base] --> Processing Dependency: perl >= 5.008 for package: git-1.8.3.1-12.el7_4.x86_64 [openshift/origin-base] --> Processing Dependency: rsync for package: git-1.8.3.1-12.el7_4.x86_64 [openshift/origin-base] --> Processing Dependency: perl(warnings) for package: git-1.8.3.1-12.el7_4.x86_64 [openshift/origin-base] --> Processing Dependency: perl(vars) for package: git-1.8.3.1-12.el7_4.x86_64 [openshift/origin-base] --> Processing Dependency: perl(strict) for package: git-1.8.3.1-12.el7_4.x86_64 [openshift/origin-base] --> Processing Dependency: perl(lib) for package: git-1.8.3.1-12.el7_4.x86_64 [openshift/origin-base] --> Processing Dependency: perl(Term::ReadKey) for package: git-1.8.3.1-12.el7_4.x86_64 [openshift/origin-base] --> Processing Dependency: perl(Git) for package: git-1.8.3.1-12.el7_4.x86_64 [openshift/origin-base] --> Processing Dependency: perl(Getopt::Long) for package: git-1.8.3.1-12.el7_4.x86_64 [openshift/origin-base] --> Processing Dependency: perl(File::stat) for package: git-1.8.3.1-12.el7_4.x86_64 [openshift/origin-base] --> Processing Dependency: perl(File::Temp) for package: git-1.8.3.1-12.el7_4.x86_64 [openshift/origin-base] --> Processing Dependency: perl(File::Spec) for package: git-1.8.3.1-12.el7_4.x86_64 [openshift/origin-base] --> Processing Dependency: perl(File::Path) for package: git-1.8.3.1-12.el7_4.x86_64 [openshift/origin-base] --> Processing Dependency: perl(File::Find) for package: git-1.8.3.1-12.el7_4.x86_64 [openshift/origin-base] --> Processing Dependency: perl(File::Copy) for package: git-1.8.3.1-12.el7_4.x86_64 [openshift/origin-base] --> Processing Dependency: perl(File::Basename) for package: git-1.8.3.1-12.el7_4.x86_64 [openshift/origin-base] --> Processing Dependency: perl(Exporter) for package: git-1.8.3.1-12.el7_4.x86_64 [openshift/origin-base] --> Processing Dependency: perl(Error) for package: git-1.8.3.1-12.el7_4.x86_64 [openshift/origin-base] --> Processing Dependency: openssh-clients for package: git-1.8.3.1-12.el7_4.x86_64 [openshift/origin-base] --> Processing Dependency: less for package: git-1.8.3.1-12.el7_4.x86_64 [openshift/origin-base] --> Processing Dependency: /usr/bin/perl for package: git-1.8.3.1-12.el7_4.x86_64 [openshift/origin-base] --> Processing Dependency: libgnome-keyring.so.0()(64bit) for package: git-1.8.3.1-12.el7_4.x86_64 [openshift/origin-base] ---> Package iptables.x86_64 0:1.4.21-18.3.el7_4 will be installed [openshift/origin-base] --> Processing Dependency: libnfnetlink.so.0()(64bit) for package: iptables-1.4.21-18.3.el7_4.x86_64 [openshift/origin-base] --> Processing Dependency: libnetfilter_conntrack.so.3()(64bit) for package: iptables-1.4.21-18.3.el7_4.x86_64 [openshift/origin-base] ---> Package lsof.x86_64 0:4.87-4.el7 will be installed [openshift/origin-base] ---> Package nmap-ncat.x86_64 2:6.40-7.el7 will be installed [openshift/origin-base] --> Processing Dependency: libpcap.so.1()(64bit) for package: 2:nmap-ncat-6.40-7.el7.x86_64 [openshift/origin-base] ---> Package socat.x86_64 0:1.7.3.2-2.el7 will be installed [openshift/origin-base] --> Processing Dependency: libwrap.so.0()(64bit) for package: socat-1.7.3.2-2.el7.x86_64 [openshift/origin-base] ---> Package sysvinit-tools.x86_64 0:2.88-14.dsf.el7 will be installed [openshift/origin-base] ---> Package tree.x86_64 0:1.6.0-10.el7 will be installed [openshift/origin-base] ---> Package wget.x86_64 0:1.14-15.el7_4.1 will be installed [openshift/origin-base] ---> Package which.x86_64 0:2.20-7.el7 will be installed [openshift/origin-base] ---> Package xfsprogs.x86_64 0:4.5.0-12.el7 will be installed [openshift/origin-base] --> Running transaction check [openshift/origin-base] ---> Package boost-system.x86_64 0:1.53.0-27.el7 will be installed [openshift/origin-base] ---> Package boost-thread.x86_64 0:1.53.0-27.el7 will be installed [openshift/origin-base] ---> Package cryptsetup.x86_64 0:1.7.4-3.el7_4.1 will be installed [openshift/origin-base] ---> Package e2fsprogs-libs.x86_64 0:1.42.9-10.el7 will be installed [openshift/origin-base] ---> Package gdisk.x86_64 0:0.8.6-5.el7 will be installed [openshift/origin-base] --> Processing Dependency: libicuuc.so.50()(64bit) for package: gdisk-0.8.6-5.el7.x86_64 [openshift/origin-base] --> Processing Dependency: libicuio.so.50()(64bit) for package: gdisk-0.8.6-5.el7.x86_64 [openshift/origin-base] ---> Package hdparm.x86_64 0:9.43-5.el7 will be installed [openshift/origin-base] ---> Package initscripts.x86_64 0:9.49.39-1.el7_4.1 will be installed [openshift/origin-base] --> Processing Dependency: iproute for package: initscripts-9.49.39-1.el7_4.1.x86_64 [openshift/origin-base] ---> Package less.x86_64 0:458-9.el7 will be installed [openshift/origin-base] --> Processing Dependency: groff-base for package: less-458-9.el7.x86_64 [openshift/origin-base] ---> Package libaio.x86_64 0:0.3.109-13.el7 will be installed [openshift/origin-base] ---> Package libarchive.x86_64 0:3.1.2-10.el7_2 will be installed [openshift/origin-base] ---> Package libgnome-keyring.x86_64 0:3.12.0-1.el7 will be installed [openshift/origin-base] ---> Package libnetfilter_conntrack.x86_64 0:1.0.6-1.el7_3 will be installed [openshift/origin-base] --> Processing Dependency: libmnl.so.0(LIBMNL_1.1)(64bit) for package: libnetfilter_conntrack-1.0.6-1.el7_3.x86_64 [openshift/origin-base] --> Processing Dependency: libmnl.so.0(LIBMNL_1.0)(64bit) for package: libnetfilter_conntrack-1.0.6-1.el7_3.x86_64 [openshift/origin-base] --> Processing Dependency: libmnl.so.0()(64bit) for package: libnetfilter_conntrack-1.0.6-1.el7_3.x86_64 [openshift/origin-base] ---> Package libnfnetlink.x86_64 0:1.0.1-4.el7 will be installed [openshift/origin-base] ---> Package libpcap.x86_64 14:1.5.3-9.el7 will be installed [openshift/origin-base] ---> Package librados2.x86_64 1:0.94.5-2.el7 will be installed [openshift/origin-base] ---> Package librbd1.x86_64 1:0.94.5-2.el7 will be installed [openshift/origin-base] ---> Package libss.x86_64 0:1.42.9-10.el7 will be installed [openshift/origin-base] ---> Package lzo.x86_64 0:2.06-8.el7 will be installed [openshift/origin-base] ---> Package openssh-clients.x86_64 0:7.4p1-13.el7_4 will be installed [openshift/origin-base] --> Processing Dependency: openssh = 7.4p1-13.el7_4 for package: openssh-clients-7.4p1-13.el7_4.x86_64 [openshift/origin-base] --> Processing Dependency: fipscheck-lib(x86-64) >= 1.3.0 for package: openssh-clients-7.4p1-13.el7_4.x86_64 [openshift/origin-base] --> Processing Dependency: libfipscheck.so.1()(64bit) for package: openssh-clients-7.4p1-13.el7_4.x86_64 [openshift/origin-base] --> Processing Dependency: libedit.so.0()(64bit) for package: openssh-clients-7.4p1-13.el7_4.x86_64 [openshift/origin-base] ---> Package parted.x86_64 0:3.1-28.el7 will be installed [openshift/origin-base] ---> Package perl.x86_64 4:5.16.3-292.el7 will be installed [openshift/origin-base] --> Processing Dependency: perl-libs = 4:5.16.3-292.el7 for package: 4:perl-5.16.3-292.el7.x86_64 [openshift/origin-base] --> Processing Dependency: perl(Socket) >= 1.3 for package: 4:perl-5.16.3-292.el7.x86_64 [openshift/origin-base] --> Processing Dependency: perl(Scalar::Util) >= 1.10 for package: 4:perl-5.16.3-292.el7.x86_64 [openshift/origin-base] --> Processing Dependency: perl-macros for package: 4:perl-5.16.3-292.el7.x86_64 [openshift/origin-base] --> Processing Dependency: perl-libs for package: 4:perl-5.16.3-292.el7.x86_64 [openshift/origin-base] --> Processing Dependency: perl(threads::shared) for package: 4:perl-5.16.3-292.el7.x86_64 [openshift/origin-base] --> Processing Dependency: perl(threads) for package: 4:perl-5.16.3-292.el7.x86_64 [openshift/origin-base] --> Processing Dependency: perl(constant) for package: 4:perl-5.16.3-292.el7.x86_64 [openshift/origin-base] --> Processing Dependency: perl(Time::Local) for package: 4:perl-5.16.3-292.el7.x86_64 [openshift/origin-base] --> Processing Dependency: perl(Time::HiRes) for package: 4:perl-5.16.3-292.el7.x86_64 [openshift/origin-base] --> Processing Dependency: perl(Storable) for package: 4:perl-5.16.3-292.el7.x86_64 [openshift/origin-base] --> Processing Dependency: perl(Socket) for package: 4:perl-5.16.3-292.el7.x86_64 [openshift/origin-base] --> Processing Dependency: perl(Scalar::Util) for package: 4:perl-5.16.3-292.el7.x86_64 [openshift/origin-base] --> Processing Dependency: perl(Pod::Simple::XHTML) for package: 4:perl-5.16.3-292.el7.x86_64 [openshift/origin-base] --> Processing Dependency: perl(Pod::Simple::Search) for package: 4:perl-5.16.3-292.el7.x86_64 [openshift/origin-base] --> Processing Dependency: perl(Filter::Util::Call) for package: 4:perl-5.16.3-292.el7.x86_64 [openshift/origin-base] --> Processing Dependency: perl(Carp) for package: 4:perl-5.16.3-292.el7.x86_64 [openshift/origin-base] --> Processing Dependency: libperl.so()(64bit) for package: 4:perl-5.16.3-292.el7.x86_64 [openshift/origin-base] ---> Package perl-Error.noarch 1:0.17020-2.el7 will be installed [openshift/origin-base] ---> Package perl-Exporter.noarch 0:5.68-3.el7 will be installed [openshift/origin-base] ---> Package perl-File-Path.noarch 0:2.09-2.el7 will be installed [openshift/origin-base] ---> Package perl-File-Temp.noarch 0:0.23.01-3.el7 will be installed [openshift/origin-base] ---> Package perl-Getopt-Long.noarch 0:2.40-2.el7 will be installed [openshift/origin-base] --> Processing Dependency: perl(Pod::Usage) >= 1.14 for package: perl-Getopt-Long-2.40-2.el7.noarch [openshift/origin-base] --> Processing Dependency: perl(Text::ParseWords) for package: perl-Getopt-Long-2.40-2.el7.noarch [openshift/origin-base] ---> Package perl-Git.noarch 0:1.8.3.1-12.el7_4 will be installed [openshift/origin-base] ---> Package perl-PathTools.x86_64 0:3.40-5.el7 will be installed [openshift/origin-base] ---> Package perl-TermReadKey.x86_64 0:2.30-20.el7 will be installed [openshift/origin-base] ---> Package python-rados.x86_64 1:0.94.5-2.el7 will be installed [openshift/origin-base] ---> Package python-rbd.x86_64 1:0.94.5-2.el7 will be installed [openshift/origin-base] ---> Package python-requests.noarch 0:2.6.0-1.el7_1 will be installed [openshift/origin-base] --> Processing Dependency: python-urllib3 >= 1.10.2-1 for package: python-requests-2.6.0-1.el7_1.noarch [openshift/origin-base] ---> Package redhat-lsb-core.x86_64 0:4.1-27.el7.centos.1 will be installed [openshift/origin-base] --> Processing Dependency: redhat-lsb-submod-security(x86-64) = 4.1-27.el7.centos.1 for package: redhat-lsb-core-4.1-27.el7.centos.1.x86_64 [openshift/origin-base] --> Processing Dependency: spax for package: redhat-lsb-core-4.1-27.el7.centos.1.x86_64 [openshift/origin-base] --> Processing Dependency: /usr/sbin/sendmail for package: redhat-lsb-core-4.1-27.el7.centos.1.x86_64 [openshift/origin-base] --> Processing Dependency: /usr/sbin/fuser for package: redhat-lsb-core-4.1-27.el7.centos.1.x86_64 [openshift/origin-base] --> Processing Dependency: /usr/bin/time for package: redhat-lsb-core-4.1-27.el7.centos.1.x86_64 [openshift/origin-base] --> Processing Dependency: /usr/bin/patch for package: redhat-lsb-core-4.1-27.el7.centos.1.x86_64 [openshift/origin-base] --> Processing Dependency: /usr/bin/msgfmt for package: redhat-lsb-core-4.1-27.el7.centos.1.x86_64 [openshift/origin-base] --> Processing Dependency: /usr/bin/man for package: redhat-lsb-core-4.1-27.el7.centos.1.x86_64 [openshift/origin-base] --> Processing Dependency: /usr/bin/make for package: redhat-lsb-core-4.1-27.el7.centos.1.x86_64 [openshift/origin-base] --> Processing Dependency: /usr/bin/m4 for package: redhat-lsb-core-4.1-27.el7.centos.1.x86_64 [openshift/origin-base] --> Processing Dependency: /usr/bin/lpr for package: redhat-lsb-core-4.1-27.el7.centos.1.x86_64 [openshift/origin-base] --> Processing Dependency: /usr/bin/lp for package: redhat-lsb-core-4.1-27.el7.centos.1.x86_64 [openshift/origin-base] --> Processing Dependency: /usr/bin/killall for package: redhat-lsb-core-4.1-27.el7.centos.1.x86_64 [openshift/origin-base] --> Processing Dependency: /usr/bin/gettext for package: redhat-lsb-core-4.1-27.el7.centos.1.x86_64 [openshift/origin-base] --> Processing Dependency: /usr/bin/file for package: redhat-lsb-core-4.1-27.el7.centos.1.x86_64 [openshift/origin-base] --> Processing Dependency: /usr/bin/crontab for package: redhat-lsb-core-4.1-27.el7.centos.1.x86_64 [openshift/origin-base] --> Processing Dependency: /usr/bin/bc for package: redhat-lsb-core-4.1-27.el7.centos.1.x86_64 [openshift/origin-base] --> Processing Dependency: /usr/bin/batch for package: redhat-lsb-core-4.1-27.el7.centos.1.x86_64 [openshift/origin-base] --> Processing Dependency: /usr/bin/at for package: redhat-lsb-core-4.1-27.el7.centos.1.x86_64 [openshift/origin-base] --> Processing Dependency: /bin/mailx for package: redhat-lsb-core-4.1-27.el7.centos.1.x86_64 [openshift/origin-base] --> Processing Dependency: /bin/ed for package: redhat-lsb-core-4.1-27.el7.centos.1.x86_64 [openshift/origin-base] ---> Package rsync.x86_64 0:3.0.9-18.el7 will be installed [openshift/origin-base] ---> Package tcp_wrappers-libs.x86_64 0:7.6-77.el7 will be installed [openshift/origin-base] --> Running transaction check [openshift/origin-base] ---> Package at.x86_64 0:3.1.13-22.el7_4.2 will be installed [openshift/origin-base] ---> Package bc.x86_64 0:1.06.95-13.el7 will be installed [openshift/origin-base] ---> Package cronie.x86_64 0:1.4.11-17.el7 will be installed [openshift/origin-base] --> Processing Dependency: dailyjobs for package: cronie-1.4.11-17.el7.x86_64 [openshift/origin-base] ---> Package cups-client.x86_64 1:1.6.3-29.el7 will be installed [openshift/origin-base] --> Processing Dependency: cups-libs(x86-64) = 1:1.6.3-29.el7 for package: 1:cups-client-1.6.3-29.el7.x86_64 [openshift/origin-base] --> Processing Dependency: libcups.so.2()(64bit) for package: 1:cups-client-1.6.3-29.el7.x86_64 [openshift/origin-base] --> Processing Dependency: libavahi-common.so.3()(64bit) for package: 1:cups-client-1.6.3-29.el7.x86_64 [openshift/origin-base] --> Processing Dependency: libavahi-client.so.3()(64bit) for package: 1:cups-client-1.6.3-29.el7.x86_64 [openshift/origin-base] ---> Package ed.x86_64 0:1.9-4.el7 will be installed [openshift/origin-base] ---> Package file.x86_64 0:5.11-33.el7 will be installed [openshift/origin-base] ---> Package fipscheck-lib.x86_64 0:1.4.1-6.el7 will be installed [openshift/origin-base] --> Processing Dependency: /usr/bin/fipscheck for package: fipscheck-lib-1.4.1-6.el7.x86_64 [openshift/origin-base] ---> Package gettext.x86_64 0:0.19.8.1-2.el7 will be installed [openshift/origin-base] --> Processing Dependency: gettext-libs(x86-64) = 0.19.8.1-2.el7 for package: gettext-0.19.8.1-2.el7.x86_64 [openshift/origin-base] --> Processing Dependency: libgomp.so.1(GOMP_1.0)(64bit) for package: gettext-0.19.8.1-2.el7.x86_64 [openshift/origin-base] --> Processing Dependency: libunistring.so.0()(64bit) for package: gettext-0.19.8.1-2.el7.x86_64 [openshift/origin-base] --> Processing Dependency: libgomp.so.1()(64bit) for package: gettext-0.19.8.1-2.el7.x86_64 [openshift/origin-base] --> Processing Dependency: libgettextsrc-0.19.8.1.so()(64bit) for package: gettext-0.19.8.1-2.el7.x86_64 [openshift/origin-base] --> Processing Dependency: libgettextlib-0.19.8.1.so()(64bit) for package: gettext-0.19.8.1-2.el7.x86_64 [openshift/origin-base] --> Processing Dependency: libcroco-0.6.so.3()(64bit) for package: gettext-0.19.8.1-2.el7.x86_64 [openshift/origin-base] ---> Package groff-base.x86_64 0:1.22.2-8.el7 will be installed [openshift/origin-base] ---> Package iproute.x86_64 0:3.10.0-87.el7 will be installed [openshift/origin-base] ---> Package libedit.x86_64 0:3.0-12.20121213cvs.el7 will be installed [openshift/origin-base] ---> Package libicu.x86_64 0:50.1.2-15.el7 will be installed [openshift/origin-base] ---> Package libmnl.x86_64 0:1.0.3-7.el7 will be installed [openshift/origin-base] ---> Package m4.x86_64 0:1.4.16-10.el7 will be installed [openshift/origin-base] ---> Package mailx.x86_64 0:12.5-16.el7 will be installed [openshift/origin-base] ---> Package make.x86_64 1:3.82-23.el7 will be installed [openshift/origin-base] ---> Package man-db.x86_64 0:2.6.3-9.el7 will be installed [openshift/origin-base] --> Processing Dependency: libpipeline.so.1()(64bit) for package: man-db-2.6.3-9.el7.x86_64 [openshift/origin-base] ---> Package openssh.x86_64 0:7.4p1-13.el7_4 will be installed [openshift/origin-base] ---> Package patch.x86_64 0:2.7.1-8.el7 will be installed [openshift/origin-base] ---> Package perl-Carp.noarch 0:1.26-244.el7 will be installed [openshift/origin-base] ---> Package perl-Filter.x86_64 0:1.49-3.el7 will be installed [openshift/origin-base] ---> Package perl-Pod-Simple.noarch 1:3.28-4.el7 will be installed [openshift/origin-base] --> Processing Dependency: perl(Pod::Escapes) >= 1.04 for package: 1:perl-Pod-Simple-3.28-4.el7.noarch [openshift/origin-base] --> Processing Dependency: perl(Encode) for package: 1:perl-Pod-Simple-3.28-4.el7.noarch [openshift/origin-base] ---> Package perl-Pod-Usage.noarch 0:1.63-3.el7 will be installed [openshift/origin-base] --> Processing Dependency: perl(Pod::Text) >= 3.15 for package: perl-Pod-Usage-1.63-3.el7.noarch [openshift/origin-base] --> Processing Dependency: perl-Pod-Perldoc for package: perl-Pod-Usage-1.63-3.el7.noarch [openshift/origin-base] ---> Package perl-Scalar-List-Utils.x86_64 0:1.27-248.el7 will be installed [openshift/origin-base] ---> Package perl-Socket.x86_64 0:2.010-4.el7 will be installed [openshift/origin-base] ---> Package perl-Storable.x86_64 0:2.45-3.el7 will be installed [openshift/origin-base] ---> Package perl-Text-ParseWords.noarch 0:3.29-4.el7 will be installed [openshift/origin-base] ---> Package perl-Time-HiRes.x86_64 4:1.9725-3.el7 will be installed [openshift/origin-base] ---> Package perl-Time-Local.noarch 0:1.2300-2.el7 will be installed [openshift/origin-base] ---> Package perl-constant.noarch 0:1.27-2.el7 will be installed [openshift/origin-base] ---> Package perl-libs.x86_64 4:5.16.3-292.el7 will be installed [openshift/origin-base] ---> Package perl-macros.x86_64 4:5.16.3-292.el7 will be installed [openshift/origin-base] ---> Package perl-threads.x86_64 0:1.87-4.el7 will be installed [openshift/origin-base] ---> Package perl-threads-shared.x86_64 0:1.43-6.el7 will be installed [openshift/origin-base] ---> Package postfix.x86_64 2:2.10.1-6.el7 will be installed [openshift/origin-base] --> Processing Dependency: systemd-sysv for package: 2:postfix-2.10.1-6.el7.x86_64 [openshift/origin-base] --> Processing Dependency: libmysqlclient.so.18(libmysqlclient_18)(64bit) for package: 2:postfix-2.10.1-6.el7.x86_64 [openshift/origin-base] --> Processing Dependency: libmysqlclient.so.18()(64bit) for package: 2:postfix-2.10.1-6.el7.x86_64 [openshift/origin-base] ---> Package psmisc.x86_64 0:22.20-15.el7 will be installed [openshift/origin-base] ---> Package python-urllib3.noarch 0:1.10.2-3.el7 will be installed [openshift/origin-base] --> Processing Dependency: python-six for package: python-urllib3-1.10.2-3.el7.noarch [openshift/origin-base] --> Processing Dependency: python-backports-ssl_match_hostname for package: python-urllib3-1.10.2-3.el7.noarch [openshift/origin-base] ---> Package redhat-lsb-submod-security.x86_64 0:4.1-27.el7.centos.1 will be installed [openshift/origin-base] ---> Package spax.x86_64 0:1.5.2-13.el7 will be installed [openshift/origin-base] ---> Package time.x86_64 0:1.7-45.el7 will be installed [openshift/origin-base] --> Running transaction check [openshift/origin-base] ---> Package avahi-libs.x86_64 0:0.6.31-17.el7 will be installed [openshift/origin-base] ---> Package cronie-anacron.x86_64 0:1.4.11-17.el7 will be installed [openshift/origin-base] --> Processing Dependency: crontabs for package: cronie-anacron-1.4.11-17.el7.x86_64 [openshift/origin-base] ---> Package cups-libs.x86_64 1:1.6.3-29.el7 will be installed [openshift/origin-base] ---> Package fipscheck.x86_64 0:1.4.1-6.el7 will be installed [openshift/origin-base] ---> Package gettext-libs.x86_64 0:0.19.8.1-2.el7 will be installed [openshift/origin-base] ---> Package libcroco.x86_64 0:0.6.11-1.el7 will be installed [openshift/origin-base] ---> Package libgomp.x86_64 0:4.8.5-16.el7_4.2 will be installed [openshift/origin-base] ---> Package libpipeline.x86_64 0:1.2.3-3.el7 will be installed [openshift/origin-base] ---> Package libunistring.x86_64 0:0.9.3-9.el7 will be installed [openshift/origin-base] ---> Package mariadb-libs.x86_64 1:5.5.56-2.el7 will be installed [openshift/origin-base] ---> Package perl-Encode.x86_64 0:2.51-7.el7 will be installed [openshift/origin-base] ---> Package perl-Pod-Escapes.noarch 1:1.04-292.el7 will be installed [openshift/origin-base] ---> Package perl-Pod-Perldoc.noarch 0:3.20-4.el7 will be installed [openshift/origin-base] --> Processing Dependency: perl(parent) for package: perl-Pod-Perldoc-3.20-4.el7.noarch [openshift/origin-base] --> Processing Dependency: perl(HTTP::Tiny) for package: perl-Pod-Perldoc-3.20-4.el7.noarch [openshift/origin-base] ---> Package perl-podlators.noarch 0:2.5.1-3.el7 will be installed [openshift/origin-base] ---> Package python-backports-ssl_match_hostname.noarch 0:3.4.0.2-4.el7 will be installed [openshift/origin-base] --> Processing Dependency: python-backports for package: python-backports-ssl_match_hostname-3.4.0.2-4.el7.noarch [openshift/origin-base] ---> Package python-six.noarch 0:1.9.0-2.el7 will be installed [openshift/origin-base] ---> Package systemd-sysv.x86_64 0:219-42.el7_4.10 will be installed [openshift/origin-base] --> Processing Dependency: systemd = 219-42.el7_4.10 for package: systemd-sysv-219-42.el7_4.10.x86_64 [openshift/origin-base] --> Running transaction check [openshift/origin-base] ---> Package crontabs.noarch 0:1.11-6.20121102git.el7 will be installed [openshift/origin-base] ---> Package perl-HTTP-Tiny.noarch 0:0.033-3.el7 will be installed [openshift/origin-base] ---> Package perl-parent.noarch 1:0.225-244.el7 will be installed [openshift/origin-base] ---> Package python-backports.x86_64 0:1.0-8.el7 will be installed [openshift/origin-base] ---> Package systemd.x86_64 0:219-42.el7_4.7 will be updated [openshift/origin-base] ---> Package systemd.x86_64 0:219-42.el7_4.10 will be an update [openshift/origin-base] --> Processing Dependency: systemd-libs = 219-42.el7_4.10 for package: systemd-219-42.el7_4.10.x86_64 [openshift/origin-base] --> Running transaction check [openshift/origin-base] ---> Package systemd-libs.x86_64 0:219-42.el7_4.7 will be updated [openshift/origin-base] ---> Package systemd-libs.x86_64 0:219-42.el7_4.10 will be an update [openshift/origin-base] --> Finished Dependency Resolution [openshift/origin-base] Dependencies Resolved [openshift/origin-base] ================================================================================ [openshift/origin-base] Package Arch Version Repository [openshift/origin-base] Size [openshift/origin-base] ================================================================================ [openshift/origin-base] Installing: [openshift/origin-base] bsdtar x86_64 3.1.2-10.el7_2 base 56 k [openshift/origin-base] ceph-common x86_64 1:0.94.5-2.el7 base 6.2 M [openshift/origin-base] device-mapper-persistent-data x86_64 0.7.0-0.1.rc6.el7_4.1 updates 400 k [openshift/origin-base] e2fsprogs x86_64 1.42.9-10.el7 base 698 k [openshift/origin-base] ethtool x86_64 2:4.8-1.el7 base 123 k [openshift/origin-base] git x86_64 1.8.3.1-12.el7_4 updates 4.4 M [openshift/origin-base] iptables x86_64 1.4.21-18.3.el7_4 updates 428 k [openshift/origin-base] lsof x86_64 4.87-4.el7 base 331 k [openshift/origin-base] nmap-ncat x86_64 2:6.40-7.el7 base 201 k [openshift/origin-base] socat x86_64 1.7.3.2-2.el7 base 290 k [openshift/origin-base] sysvinit-tools x86_64 2.88-14.dsf.el7 base 63 k [openshift/origin-base] tree x86_64 1.6.0-10.el7 base 46 k [openshift/origin-base] wget x86_64 1.14-15.el7_4.1 updates 547 k [openshift/origin-base] which x86_64 2.20-7.el7 base 41 k [openshift/origin-base] xfsprogs x86_64 4.5.0-12.el7 base 895 k [openshift/origin-base] Installing for dependencies: [openshift/origin-base] at x86_64 3.1.13-22.el7_4.2 updates 51 k [openshift/origin-base] avahi-libs x86_64 0.6.31-17.el7 base 61 k [openshift/origin-base] bc x86_64 1.06.95-13.el7 base 115 k [openshift/origin-base] boost-system x86_64 1.53.0-27.el7 base 40 k [openshift/origin-base] boost-thread x86_64 1.53.0-27.el7 base 57 k [openshift/origin-base] cronie x86_64 1.4.11-17.el7 base 91 k [openshift/origin-base] cronie-anacron x86_64 1.4.11-17.el7 base 35 k [openshift/origin-base] crontabs noarch 1.11-6.20121102git.el7 base 13 k [openshift/origin-base] cryptsetup x86_64 1.7.4-3.el7_4.1 updates 128 k [openshift/origin-base] cups-client x86_64 1:1.6.3-29.el7 base 150 k [openshift/origin-base] cups-libs x86_64 1:1.6.3-29.el7 base 356 k [openshift/origin-base] e2fsprogs-libs x86_64 1.42.9-10.el7 base 166 k [openshift/origin-base] ed x86_64 1.9-4.el7 base 72 k [openshift/origin-base] file x86_64 5.11-33.el7 base 57 k [openshift/origin-base] fipscheck x86_64 1.4.1-6.el7 base 21 k [openshift/origin-base] fipscheck-lib x86_64 1.4.1-6.el7 base 11 k [openshift/origin-base] gdisk x86_64 0.8.6-5.el7 base 187 k [openshift/origin-base] gettext x86_64 0.19.8.1-2.el7 base 1.0 M [openshift/origin-base] gettext-libs x86_64 0.19.8.1-2.el7 base 501 k [openshift/origin-base] groff-base x86_64 1.22.2-8.el7 base 942 k [openshift/origin-base] hdparm x86_64 9.43-5.el7 base 83 k [openshift/origin-base] initscripts x86_64 9.49.39-1.el7_4.1 updates 435 k [openshift/origin-base] iproute x86_64 3.10.0-87.el7 base 651 k [openshift/origin-base] less x86_64 458-9.el7 base 120 k [openshift/origin-base] libaio x86_64 0.3.109-13.el7 base 24 k [openshift/origin-base] libarchive x86_64 3.1.2-10.el7_2 base 318 k [openshift/origin-base] libcroco x86_64 0.6.11-1.el7 base 105 k [openshift/origin-base] libedit x86_64 3.0-12.20121213cvs.el7 base 92 k [openshift/origin-base] libgnome-keyring x86_64 3.12.0-1.el7 base 109 k [openshift/origin-base] libgomp x86_64 4.8.5-16.el7_4.2 updates 154 k [openshift/origin-base] libicu x86_64 50.1.2-15.el7 base 6.9 M [openshift/origin-base] libmnl x86_64 1.0.3-7.el7 base 23 k [openshift/origin-base] libnetfilter_conntrack x86_64 1.0.6-1.el7_3 base 55 k [openshift/origin-base] libnfnetlink x86_64 1.0.1-4.el7 base 26 k [openshift/origin-base] libpcap x86_64 14:1.5.3-9.el7 base 138 k [openshift/origin-base] libpipeline x86_64 1.2.3-3.el7 base 53 k [openshift/origin-base] librados2 x86_64 1:0.94.5-2.el7 base 1.7 M [openshift/origin-base] librbd1 x86_64 1:0.94.5-2.el7 base 1.8 M [openshift/origin-base] libss x86_64 1.42.9-10.el7 base 45 k [openshift/origin-base] libunistring x86_64 0.9.3-9.el7 base 293 k [openshift/origin-base] lzo x86_64 2.06-8.el7 base 59 k [openshift/origin-base] m4 x86_64 1.4.16-10.el7 base 256 k [openshift/origin-base] mailx x86_64 12.5-16.el7 base 244 k [openshift/origin-base] make x86_64 1:3.82-23.el7 base 420 k [openshift/origin-base] man-db x86_64 2.6.3-9.el7 base 559 k [openshift/origin-base] mariadb-libs x86_64 1:5.5.56-2.el7 base 757 k [openshift/origin-base] openssh x86_64 7.4p1-13.el7_4 updates 509 k [openshift/origin-base] openssh-clients x86_64 7.4p1-13.el7_4 updates 654 k [openshift/origin-base] parted x86_64 3.1-28.el7 base 607 k [openshift/origin-base] patch x86_64 2.7.1-8.el7 base 110 k [openshift/origin-base] perl x86_64 4:5.16.3-292.el7 base 8.0 M [openshift/origin-base] perl-Carp noarch 1.26-244.el7 base 19 k [openshift/origin-base] perl-Encode x86_64 2.51-7.el7 base 1.5 M [openshift/origin-base] perl-Error noarch 1:0.17020-2.el7 base 32 k [openshift/origin-base] perl-Exporter noarch 5.68-3.el7 base 28 k [openshift/origin-base] perl-File-Path noarch 2.09-2.el7 base 26 k [openshift/origin-base] perl-File-Temp noarch 0.23.01-3.el7 base 56 k [openshift/origin-base] perl-Filter x86_64 1.49-3.el7 base 76 k [openshift/origin-base] perl-Getopt-Long noarch 2.40-2.el7 base 56 k [openshift/origin-base] perl-Git noarch 1.8.3.1-12.el7_4 updates 53 k [openshift/origin-base] perl-HTTP-Tiny noarch 0.033-3.el7 base 38 k [openshift/origin-base] perl-PathTools x86_64 3.40-5.el7 base 82 k [openshift/origin-base] perl-Pod-Escapes noarch 1:1.04-292.el7 base 51 k [openshift/origin-base] perl-Pod-Perldoc noarch 3.20-4.el7 base 87 k [openshift/origin-base] perl-Pod-Simple noarch 1:3.28-4.el7 base 216 k [openshift/origin-base] perl-Pod-Usage noarch 1.63-3.el7 base 27 k [openshift/origin-base] perl-Scalar-List-Utils x86_64 1.27-248.el7 base 36 k [openshift/origin-base] perl-Socket x86_64 2.010-4.el7 base 49 k [openshift/origin-base] perl-Storable x86_64 2.45-3.el7 base 77 k [openshift/origin-base] perl-TermReadKey x86_64 2.30-20.el7 base 31 k [openshift/origin-base] perl-Text-ParseWords noarch 3.29-4.el7 base 14 k [openshift/origin-base] perl-Time-HiRes x86_64 4:1.9725-3.el7 base 45 k [openshift/origin-base] perl-Time-Local noarch 1.2300-2.el7 base 24 k [openshift/origin-base] perl-constant noarch 1.27-2.el7 base 19 k [openshift/origin-base] perl-libs x86_64 4:5.16.3-292.el7 base 688 k [openshift/origin-base] perl-macros x86_64 4:5.16.3-292.el7 base 43 k [openshift/origin-base] perl-parent noarch 1:0.225-244.el7 base 12 k [openshift/origin-base] perl-podlators noarch 2.5.1-3.el7 base 112 k [openshift/origin-base] perl-threads x86_64 1.87-4.el7 base 49 k [openshift/origin-base] perl-threads-shared x86_64 1.43-6.el7 base 39 k [openshift/origin-base] postfix x86_64 2:2.10.1-6.el7 base 2.4 M [openshift/origin-base] psmisc x86_64 22.20-15.el7 base 141 k [openshift/origin-base] python-backports x86_64 1.0-8.el7 base 5.8 k [openshift/origin-base] python-backports-ssl_match_hostname [openshift/origin-base] noarch 3.4.0.2-4.el7 base 12 k [openshift/origin-base] python-rados x86_64 1:0.94.5-2.el7 base 39 k [openshift/origin-base] python-rbd x86_64 1:0.94.5-2.el7 base 29 k [openshift/origin-base] python-requests noarch 2.6.0-1.el7_1 base 94 k [openshift/origin-base] python-six noarch 1.9.0-2.el7 base 29 k [openshift/origin-base] python-urllib3 noarch 1.10.2-3.el7 base 101 k [openshift/origin-base] redhat-lsb-core x86_64 4.1-27.el7.centos.1 base 38 k [openshift/origin-base] redhat-lsb-submod-security x86_64 4.1-27.el7.centos.1 base 15 k [openshift/origin-base] rsync x86_64 3.0.9-18.el7 base 360 k [openshift/origin-base] spax x86_64 1.5.2-13.el7 base 260 k [openshift/origin-base] systemd-sysv x86_64 219-42.el7_4.10 updates 72 k [openshift/origin-base] tcp_wrappers-libs x86_64 7.6-77.el7 base 66 k [openshift/origin-base] time x86_64 1.7-45.el7 base 30 k [openshift/origin-base] Updating for dependencies: [openshift/origin-base] systemd x86_64 219-42.el7_4.10 updates 5.2 M [openshift/origin-base] systemd-libs x86_64 219-42.el7_4.10 updates 378 k [openshift/origin-base] Transaction Summary [openshift/origin-base] ================================================================================ [openshift/origin-base] Install 15 Packages (+96 Dependent packages) [openshift/origin-base] Upgrade ( 2 Dependent packages) [openshift/origin-base] Total download size: 56 M [openshift/origin-base] Downloading packages: [openshift/origin-base] Delta RPMs disabled because /usr/bin/applydeltarpm not installed. [openshift/origin-base] warning: /var/cache/yum/x86_64/7/base/packages/avahi-libs-0.6.31-17.el7.x86_64.rpm: Header V3 RSA/SHA256 Signature, key ID f4a80eb5: NOKEY [openshift/origin-base] Public key for avahi-libs-0.6.31-17.el7.x86_64.rpm is not installed [openshift/origin-base] Public key for at-3.1.13-22.el7_4.2.x86_64.rpm is not installed [openshift/origin-base] -------------------------------------------------------------------------------- [openshift/origin-base] Total 23 MB/s | 56 MB 00:02 [openshift/origin-base] Retrieving key from file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-7 [openshift/origin-base] Importing GPG key 0xF4A80EB5: [openshift/origin-base] Userid : "CentOS-7 Key (CentOS 7 Official Signing Key) <security@centos.org>" [openshift/origin-base] Fingerprint: 6341 ab27 53d7 8a78 a7c2 7bb1 24c6 a8a7 f4a8 0eb5 [openshift/origin-base] Package : centos-release-7-4.1708.el7.centos.x86_64 (@CentOS) [openshift/origin-base] From : /etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-7 [openshift/origin-base] Running transaction check [openshift/origin-base] Running transaction test [openshift/origin-base] Transaction test succeeded [openshift/origin-base] Running transaction [openshift/origin-base] Installing : boost-system-1.53.0-27.el7.x86_64 1/115 [openshift/origin-base] Installing : boost-thread-1.53.0-27.el7.x86_64 2/115 [openshift/origin-base] Installing : 1:librados2-0.94.5-2.el7.x86_64 3/115 [openshift/origin-base] Installing : fipscheck-1.4.1-6.el7.x86_64 4/115 [openshift/origin-base] Installing : fipscheck-lib-1.4.1-6.el7.x86_64 5/115 [openshift/origin-base] Installing : groff-base-1.22.2-8.el7.x86_64 6/115 [openshift/origin-base] Installing : less-458-9.el7.x86_64 7/115 [openshift/origin-base] Installing : 1:librbd1-0.94.5-2.el7.x86_64 8/115 [openshift/origin-base] Installing : 1:python-rados-0.94.5-2.el7.x86_64 9/115 [openshift/origin-base] Installing : libgomp-4.8.5-16.el7_4.2.x86_64 10/115 [openshift/origin-base] Updating : systemd-libs-219-42.el7_4.10.x86_64 11/115 [openshift/origin-base] Updating : systemd-219-42.el7_4.10.x86_64 12/115 [openshift/origin-base] Installing : cronie-1.4.11-17.el7.x86_64 13/115 [openshift/origin-base] Installing : crontabs-1.11-6.20121102git.el7.noarch 14/115 [openshift/origin-base] Installing : cronie-anacron-1.4.11-17.el7.x86_64 15/115 [openshift/origin-base] Installing : avahi-libs-0.6.31-17.el7.x86_64 16/115 [openshift/origin-base] Installing : lzo-2.06-8.el7.x86_64 17/115 [openshift/origin-base] Installing : libunistring-0.9.3-9.el7.x86_64 18/115 [openshift/origin-base] Installing : libmnl-1.0.3-7.el7.x86_64 19/115 [openshift/origin-base] Installing : libcroco-0.6.11-1.el7.x86_64 20/115 [openshift/origin-base] Installing : libnfnetlink-1.0.1-4.el7.x86_64 21/115 [openshift/origin-base] Installing : sysvinit-tools-2.88-14.dsf.el7.x86_64 22/115 [openshift/origin-base] Installing : libnetfilter_conntrack-1.0.6-1.el7_3.x86_64 23/115 [openshift/origin-base] Installing : iptables-1.4.21-18.3.el7_4.x86_64 24/115 [openshift/origin-base] Installing : iproute-3.10.0-87.el7.x86_64 25/115 [openshift/origin-base] Installing : initscripts-9.49.39-1.el7_4.1.x86_64 26/115 [openshift/origin-base] Installing : gettext-libs-0.19.8.1-2.el7.x86_64 27/115 [openshift/origin-base] Installing : gettext-0.19.8.1-2.el7.x86_64 28/115 [openshift/origin-base] install-info: No such file or directory for /usr/share/info/gettext.info.gz [openshift/origin-base] Installing : libarchive-3.1.2-10.el7_2.x86_64 29/115 [openshift/origin-base] Installing : 1:cups-libs-1.6.3-29.el7.x86_64 30/115 [openshift/origin-base] Installing : 1:cups-client-1.6.3-29.el7.x86_64 31/115 [openshift/origin-base] Installing : rsync-3.0.9-18.el7.x86_64 32/115 [openshift/origin-base] Installing : systemd-sysv-219-42.el7_4.10.x86_64 33/115 [openshift/origin-base] Installing : at-3.1.13-22.el7_4.2.x86_64 34/115 [openshift/origin-base] Installing : 1:python-rbd-0.94.5-2.el7.x86_64 35/115 [openshift/origin-base] Installing : 1:perl-parent-0.225-244.el7.noarch 36/115 [openshift/origin-base] Installing : perl-HTTP-Tiny-0.033-3.el7.noarch 37/115 [openshift/origin-base] Installing : perl-podlators-2.5.1-3.el7.noarch 38/115 [openshift/origin-base] Installing : perl-Pod-Perldoc-3.20-4.el7.noarch 39/115 [openshift/origin-base] Installing : 1:perl-Pod-Escapes-1.04-292.el7.noarch 40/115 [openshift/origin-base] Installing : perl-Text-ParseWords-3.29-4.el7.noarch 41/115 [openshift/origin-base] Installing : perl-Encode-2.51-7.el7.x86_64 42/115 [openshift/origin-base] Installing : perl-Pod-Usage-1.63-3.el7.noarch 43/115 [openshift/origin-base] Installing : 4:perl-libs-5.16.3-292.el7.x86_64 44/115 [openshift/origin-base] Installing : 4:perl-macros-5.16.3-292.el7.x86_64 45/115 [openshift/origin-base] Installing : perl-Socket-2.010-4.el7.x86_64 46/115 [openshift/origin-base] Installing : 4:perl-Time-HiRes-1.9725-3.el7.x86_64 47/115 [openshift/origin-base] Installing : perl-threads-1.87-4.el7.x86_64 48/115 [openshift/origin-base] Installing : perl-Storable-2.45-3.el7.x86_64 49/115 [openshift/origin-base] Installing : perl-Carp-1.26-244.el7.noarch 50/115 [openshift/origin-base] Installing : perl-Filter-1.49-3.el7.x86_64 51/115 [openshift/origin-base] Installing : perl-Exporter-5.68-3.el7.noarch 52/115 [openshift/origin-base] Installing : perl-constant-1.27-2.el7.noarch 53/115 [openshift/origin-base] Installing : perl-Time-Local-1.2300-2.el7.noarch 54/115 [openshift/origin-base] Installing : perl-threads-shared-1.43-6.el7.x86_64 55/115 [openshift/origin-base] Installing : perl-File-Temp-0.23.01-3.el7.noarch 56/115 [openshift/origin-base] Installing : perl-File-Path-2.09-2.el7.noarch 57/115 [openshift/origin-base] Installing : perl-PathTools-3.40-5.el7.x86_64 58/115 [openshift/origin-base] Installing : perl-Scalar-List-Utils-1.27-248.el7.x86_64 59/115 [openshift/origin-base] Installing : 1:perl-Pod-Simple-3.28-4.el7.noarch 60/115 [openshift/origin-base] Installing : perl-Getopt-Long-2.40-2.el7.noarch 61/115 [openshift/origin-base] Installing : 4:perl-5.16.3-292.el7.x86_64 62/115 [openshift/origin-base] Installing : 1:perl-Error-0.17020-2.el7.noarch 63/115 [openshift/origin-base] Installing : perl-TermReadKey-2.30-20.el7.x86_64 64/115 [openshift/origin-base] Installing : openssh-7.4p1-13.el7_4.x86_64 65/115 [openshift/origin-base] Installing : 1:mariadb-libs-5.5.56-2.el7.x86_64 66/115 [openshift/origin-base] Installing : 2:postfix-2.10.1-6.el7.x86_64 67/115 [openshift/origin-base] Installing : spax-1.5.2-13.el7.x86_64 68/115 [openshift/origin-base] Installing : mailx-12.5-16.el7.x86_64 69/115 [openshift/origin-base] Installing : python-six-1.9.0-2.el7.noarch 70/115 [openshift/origin-base] Installing : parted-3.1-28.el7.x86_64 71/115 [openshift/origin-base] Installing : hdparm-9.43-5.el7.x86_64 72/115 [openshift/origin-base] Installing : libss-1.42.9-10.el7.x86_64 73/115 [openshift/origin-base] Installing : xfsprogs-4.5.0-12.el7.x86_64 74/115 [openshift/origin-base] Installing : time-1.7-45.el7.x86_64 75/115 [openshift/origin-base] Installing : libpipeline-1.2.3-3.el7.x86_64 76/115 [openshift/origin-base] Installing : man-db-2.6.3-9.el7.x86_64 77/115 [openshift/origin-base] Installing : libaio-0.3.109-13.el7.x86_64 78/115 [openshift/origin-base] Installing : bc-1.06.95-13.el7.x86_64 79/115 [openshift/origin-base] Installing : m4-1.4.16-10.el7.x86_64 80/115 [openshift/origin-base] Installing : 14:libpcap-1.5.3-9.el7.x86_64 81/115 [openshift/origin-base] Installing : tcp_wrappers-libs-7.6-77.el7.x86_64 82/115 [openshift/origin-base] Installing : file-5.11-33.el7.x86_64 83/115 [openshift/origin-base] Installing : ed-1.9-4.el7.x86_64 84/115 [openshift/origin-base] install-info: No such file or directory for /usr/share/info/ed.info.gz [openshift/origin-base] Installing : e2fsprogs-libs-1.42.9-10.el7.x86_64 85/115 [openshift/origin-base] Installing : psmisc-22.20-15.el7.x86_64 86/115 [openshift/origin-base] Installing : redhat-lsb-submod-security-4.1-27.el7.centos.1.x86_64 87/115 [openshift/origin-base] Installing : cryptsetup-1.7.4-3.el7_4.1.x86_64 88/115 [openshift/origin-base] Installing : libedit-3.0-12.20121213cvs.el7.x86_64 89/115 [openshift/origin-base] Installing : openssh-clients-7.4p1-13.el7_4.x86_64 90/115 [openshift/origin-base] Installing : libicu-50.1.2-15.el7.x86_64 91/115 [openshift/origin-base] Installing : gdisk-0.8.6-5.el7.x86_64 92/115 [openshift/origin-base] Installing : 1:make-3.82-23.el7.x86_64 93/115 [openshift/origin-base] Installing : python-backports-1.0-8.el7.x86_64 94/115 [openshift/origin-base] Installing : python-backports-ssl_match_hostname-3.4.0.2-4.el7.noar 95/115 [openshift/origin-base] Installing : python-urllib3-1.10.2-3.el7.noarch 96/115 [openshift/origin-base] Installing : python-requests-2.6.0-1.el7_1.noarch 97/115 [openshift/origin-base] Installing : patch-2.7.1-8.el7.x86_64 98/115 [openshift/origin-base] Installing : redhat-lsb-core-4.1-27.el7.centos.1.x86_64 99/115 [openshift/origin-base] Installing : libgnome-keyring-3.12.0-1.el7.x86_64 100/115 [openshift/origin-base] Installing : perl-Git-1.8.3.1-12.el7_4.noarch 101/115 [openshift/origin-base] Installing : git-1.8.3.1-12.el7_4.x86_64 102/115 [openshift/origin-base] Installing : 1:ceph-common-0.94.5-2.el7.x86_64 103/115 [openshift/origin-base] Installing : e2fsprogs-1.42.9-10.el7.x86_64 104/115 [openshift/origin-base] Installing : socat-1.7.3.2-2.el7.x86_64 105/115 [openshift/origin-base] Installing : 2:nmap-ncat-6.40-7.el7.x86_64 106/115 [openshift/origin-base] Installing : device-mapper-persistent-data-0.7.0-0.1.rc6.el7_4.1.x8 107/115 [openshift/origin-base] Installing : bsdtar-3.1.2-10.el7_2.x86_64 108/115 [openshift/origin-base] Installing : wget-1.14-15.el7_4.1.x86_64 109/115 [openshift/origin-base] install-info: No such file or directory for /usr/share/info/wget.info.gz [openshift/origin-base] Installing : tree-1.6.0-10.el7.x86_64 110/115 [openshift/origin-base] Installing : lsof-4.87-4.el7.x86_64 111/115 [openshift/origin-base] Installing : which-2.20-7.el7.x86_64 112/115 [openshift/origin-base] install-info: No such file or directory for /usr/share/info/which.info.gz [openshift/origin-base] Installing : 2:ethtool-4.8-1.el7.x86_64 113/115 [openshift/origin-base] Cleanup : systemd-219-42.el7_4.7.x86_64 114/115 [openshift/origin-base] Cleanup : systemd-libs-219-42.el7_4.7.x86_64 115/115 [openshift/origin-base] Verifying : fipscheck-lib-1.4.1-6.el7.x86_64 1/115 [openshift/origin-base] Verifying : perl-HTTP-Tiny-0.033-3.el7.noarch 2/115 [openshift/origin-base] Verifying : python-backports-ssl_match_hostname-3.4.0.2-4.el7.noar 3/115 [openshift/origin-base] Verifying : libgnome-keyring-3.12.0-1.el7.x86_64 4/115 [openshift/origin-base] Verifying : 1:cups-libs-1.6.3-29.el7.x86_64 5/115 [openshift/origin-base] Verifying : sysvinit-tools-2.88-14.dsf.el7.x86_64 6/115 [openshift/origin-base] Verifying : rsync-3.0.9-18.el7.x86_64 7/115 [openshift/origin-base] Verifying : 4:perl-5.16.3-292.el7.x86_64 8/115 [openshift/origin-base] Verifying : 2:ethtool-4.8-1.el7.x86_64 9/115 [openshift/origin-base] Verifying : perl-TermReadKey-2.30-20.el7.x86_64 10/115 [openshift/origin-base] Verifying : which-2.20-7.el7.x86_64 11/115 [openshift/origin-base] Verifying : groff-base-1.22.2-8.el7.x86_64 12/115 [openshift/origin-base] Verifying : perl-File-Temp-0.23.01-3.el7.noarch 13/115 [openshift/origin-base] Verifying : boost-thread-1.53.0-27.el7.x86_64 14/115 [openshift/origin-base] Verifying : patch-2.7.1-8.el7.x86_64 15/115 [openshift/origin-base] Verifying : perl-Socket-2.010-4.el7.x86_64 16/115 [openshift/origin-base] Verifying : crontabs-1.11-6.20121102git.el7.noarch 17/115 [openshift/origin-base] Verifying : fipscheck-1.4.1-6.el7.x86_64 18/115 [openshift/origin-base] Verifying : python-backports-1.0-8.el7.x86_64 19/115 [openshift/origin-base] Verifying : 1:make-3.82-23.el7.x86_64 20/115 [openshift/origin-base] Verifying : 1:perl-Pod-Escapes-1.04-292.el7.noarch 21/115 [openshift/origin-base] Verifying : libnfnetlink-1.0.1-4.el7.x86_64 22/115 [openshift/origin-base] Verifying : perl-File-Path-2.09-2.el7.noarch 23/115 [openshift/origin-base] Verifying : libicu-50.1.2-15.el7.x86_64 24/115 [openshift/origin-base] Verifying : libedit-3.0-12.20121213cvs.el7.x86_64 25/115 [openshift/origin-base] Verifying : lsof-4.87-4.el7.x86_64 26/115 [openshift/origin-base] Verifying : cryptsetup-1.7.4-3.el7_4.1.x86_64 27/115 [openshift/origin-base] Verifying : perl-Text-ParseWords-3.29-4.el7.noarch 28/115 [openshift/origin-base] Verifying : iptables-1.4.21-18.3.el7_4.x86_64 29/115 [openshift/origin-base] Verifying : libcroco-0.6.11-1.el7.x86_64 30/115 [openshift/origin-base] Verifying : socat-1.7.3.2-2.el7.x86_64 31/115 [openshift/origin-base] Verifying : boost-system-1.53.0-27.el7.x86_64 32/115 [openshift/origin-base] Verifying : redhat-lsb-submod-security-4.1-27.el7.centos.1.x86_64 33/115 [openshift/origin-base] Verifying : 4:perl-Time-HiRes-1.9725-3.el7.x86_64 34/115 [openshift/origin-base] Verifying : git-1.8.3.1-12.el7_4.x86_64 35/115 [openshift/origin-base] Verifying : 2:nmap-ncat-6.40-7.el7.x86_64 36/115 [openshift/origin-base] Verifying : python-urllib3-1.10.2-3.el7.noarch 37/115 [openshift/origin-base] Verifying : libarchive-3.1.2-10.el7_2.x86_64 38/115 [openshift/origin-base] Verifying : openssh-7.4p1-13.el7_4.x86_64 39/115 [openshift/origin-base] Verifying : tree-1.6.0-10.el7.x86_64 40/115 [openshift/origin-base] Verifying : 4:perl-libs-5.16.3-292.el7.x86_64 41/115 [openshift/origin-base] Verifying : psmisc-22.20-15.el7.x86_64 42/115 [openshift/origin-base] Verifying : 1:ceph-common-0.94.5-2.el7.x86_64 43/115 [openshift/origin-base] Verifying : 1:librados2-0.94.5-2.el7.x86_64 44/115 [openshift/origin-base] Verifying : systemd-219-42.el7_4.10.x86_64 45/115 [openshift/origin-base] Verifying : device-mapper-persistent-data-0.7.0-0.1.rc6.el7_4.1.x8 46/115 [openshift/origin-base] Verifying : bsdtar-3.1.2-10.el7_2.x86_64 47/115 [openshift/origin-base] Verifying : e2fsprogs-libs-1.42.9-10.el7.x86_64 48/115 [openshift/origin-base] Verifying : perl-Pod-Usage-1.63-3.el7.noarch 49/115 [openshift/origin-base] Verifying : perl-Encode-2.51-7.el7.x86_64 50/115 [openshift/origin-base] Verifying : perl-threads-1.87-4.el7.x86_64 51/115 [openshift/origin-base] Verifying : ed-1.9-4.el7.x86_64 52/115 [openshift/origin-base] Verifying : redhat-lsb-core-4.1-27.el7.centos.1.x86_64 53/115 [openshift/origin-base] Verifying : file-5.11-33.el7.x86_64 54/115 [openshift/origin-base] Verifying : libmnl-1.0.3-7.el7.x86_64 55/115 [openshift/origin-base] Verifying : perl-threads-shared-1.43-6.el7.x86_64 56/115 [openshift/origin-base] Verifying : perl-Storable-2.45-3.el7.x86_64 57/115 [openshift/origin-base] Verifying : gettext-0.19.8.1-2.el7.x86_64 58/115 [openshift/origin-base] Verifying : libunistring-0.9.3-9.el7.x86_64 59/115 [openshift/origin-base] Verifying : tcp_wrappers-libs-7.6-77.el7.x86_64 60/115 [openshift/origin-base] Verifying : 14:libpcap-1.5.3-9.el7.x86_64 61/115 [openshift/origin-base] Verifying : 4:perl-macros-5.16.3-292.el7.x86_64 62/115 [openshift/origin-base] Verifying : lzo-2.06-8.el7.x86_64 63/115 [openshift/origin-base] Verifying : m4-1.4.16-10.el7.x86_64 64/115 [openshift/origin-base] Verifying : gettext-libs-0.19.8.1-2.el7.x86_64 65/115 [openshift/origin-base] Verifying : bc-1.06.95-13.el7.x86_64 66/115 [openshift/origin-base] Verifying : 1:perl-parent-0.225-244.el7.noarch 67/115 [openshift/origin-base] Verifying : 1:librbd1-0.94.5-2.el7.x86_64 68/115 [openshift/origin-base] Verifying : gdisk-0.8.6-5.el7.x86_64 69/115 [openshift/origin-base] Verifying : 1:python-rbd-0.94.5-2.el7.x86_64 70/115 [openshift/origin-base] Verifying : libaio-0.3.109-13.el7.x86_64 71/115 [openshift/origin-base] Verifying : perl-Carp-1.26-244.el7.noarch 72/115 [openshift/origin-base] Verifying : perl-Git-1.8.3.1-12.el7_4.noarch 73/115 [openshift/origin-base] Verifying : libpipeline-1.2.3-3.el7.x86_64 74/115 [openshift/origin-base] Verifying : cronie-anacron-1.4.11-17.el7.x86_64 75/115 [openshift/origin-base] Verifying : perl-podlators-2.5.1-3.el7.noarch 76/115 [openshift/origin-base] Verifying : time-1.7-45.el7.x86_64 77/115 [openshift/origin-base] Verifying : perl-Filter-1.49-3.el7.x86_64 78/115 [openshift/origin-base] Verifying : xfsprogs-4.5.0-12.el7.x86_64 79/115 [openshift/origin-base] Verifying : less-458-9.el7.x86_64 80/115 [openshift/origin-base] Verifying : libss-1.42.9-10.el7.x86_64 81/115 [openshift/origin-base] Verifying : perl-Exporter-5.68-3.el7.noarch 82/115 [openshift/origin-base] Verifying : perl-constant-1.27-2.el7.noarch 83/115 [openshift/origin-base] Verifying : perl-PathTools-3.40-5.el7.x86_64 84/115 [openshift/origin-base] Verifying : libnetfilter_conntrack-1.0.6-1.el7_3.x86_64 85/115 [openshift/origin-base] Verifying : 2:postfix-2.10.1-6.el7.x86_64 86/115 [openshift/origin-base] Verifying : systemd-sysv-219-42.el7_4.10.x86_64 87/115 [openshift/origin-base] Verifying : hdparm-9.43-5.el7.x86_64 88/115 [openshift/origin-base] Verifying : 1:python-rados-0.94.5-2.el7.x86_64 89/115 [openshift/origin-base] Verifying : iproute-3.10.0-87.el7.x86_64 90/115 [openshift/origin-base] Verifying : avahi-libs-0.6.31-17.el7.x86_64 91/115 [openshift/origin-base] Verifying : 1:perl-Pod-Simple-3.28-4.el7.noarch 92/115 [openshift/origin-base] Verifying : 1:cups-client-1.6.3-29.el7.x86_64 93/115 [openshift/origin-base] Verifying : perl-Time-Local-1.2300-2.el7.noarch 94/115 [openshift/origin-base] Verifying : man-db-2.6.3-9.el7.x86_64 95/115 [openshift/origin-base] Verifying : openssh-clients-7.4p1-13.el7_4.x86_64 96/115 [openshift/origin-base] Verifying : parted-3.1-28.el7.x86_64 97/115 [openshift/origin-base] Verifying : perl-Pod-Perldoc-3.20-4.el7.noarch 98/115 [openshift/origin-base] Verifying : python-six-1.9.0-2.el7.noarch 99/115 [openshift/origin-base] Verifying : systemd-libs-219-42.el7_4.10.x86_64 100/115 [openshift/origin-base] Verifying : at-3.1.13-22.el7_4.2.x86_64 101/115 [openshift/origin-base] Verifying : wget-1.14-15.el7_4.1.x86_64 102/115 [openshift/origin-base] Verifying : 1:perl-Error-0.17020-2.el7.noarch 103/115 [openshift/origin-base] Verifying : perl-Scalar-List-Utils-1.27-248.el7.x86_64 104/115 [openshift/origin-base] Verifying : libgomp-4.8.5-16.el7_4.2.x86_64 105/115 [openshift/origin-base] Verifying : cronie-1.4.11-17.el7.x86_64 106/115 [openshift/origin-base] Verifying : mailx-12.5-16.el7.x86_64 107/115 [openshift/origin-base] Verifying : perl-Getopt-Long-2.40-2.el7.noarch 108/115 [openshift/origin-base] Verifying : python-requests-2.6.0-1.el7_1.noarch 109/115 [openshift/origin-base] Verifying : initscripts-9.49.39-1.el7_4.1.x86_64 110/115 [openshift/origin-base] Verifying : e2fsprogs-1.42.9-10.el7.x86_64 111/115 [openshift/origin-base] Verifying : spax-1.5.2-13.el7.x86_64 112/115 [openshift/origin-base] Verifying : 1:mariadb-libs-5.5.56-2.el7.x86_64 113/115 [openshift/origin-base] Verifying : systemd-libs-219-42.el7_4.7.x86_64 114/115 [openshift/origin-base] Verifying : systemd-219-42.el7_4.7.x86_64 115/115 [openshift/origin-base] Installed: [openshift/origin-base] bsdtar.x86_64 0:3.1.2-10.el7_2 [openshift/origin-base] ceph-common.x86_64 1:0.94.5-2.el7 [openshift/origin-base] device-mapper-persistent-data.x86_64 0:0.7.0-0.1.rc6.el7_4.1 [openshift/origin-base] e2fsprogs.x86_64 0:1.42.9-10.el7 [openshift/origin-base] ethtool.x86_64 2:4.8-1.el7 [openshift/origin-base] git.x86_64 0:1.8.3.1-12.el7_4 [openshift/origin-base] iptables.x86_64 0:1.4.21-18.3.el7_4 [openshift/origin-base] lsof.x86_64 0:4.87-4.el7 [openshift/origin-base] nmap-ncat.x86_64 2:6.40-7.el7 [openshift/origin-base] socat.x86_64 0:1.7.3.2-2.el7 [openshift/origin-base] sysvinit-tools.x86_64 0:2.88-14.dsf.el7 [openshift/origin-base] tree.x86_64 0:1.6.0-10.el7 [openshift/origin-base] wget.x86_64 0:1.14-15.el7_4.1 [openshift/origin-base] which.x86_64 0:2.20-7.el7 [openshift/origin-base] xfsprogs.x86_64 0:4.5.0-12.el7 [openshift/origin-base] Dependency Installed: [openshift/origin-base] at.x86_64 0:3.1.13-22.el7_4.2 [openshift/origin-base] avahi-libs.x86_64 0:0.6.31-17.el7 [openshift/origin-base] bc.x86_64 0:1.06.95-13.el7 [openshift/origin-base] boost-system.x86_64 0:1.53.0-27.el7 [openshift/origin-base] boost-thread.x86_64 0:1.53.0-27.el7 [openshift/origin-base] cronie.x86_64 0:1.4.11-17.el7 [openshift/origin-base] cronie-anacron.x86_64 0:1.4.11-17.el7 [openshift/origin-base] crontabs.noarch 0:1.11-6.20121102git.el7 [openshift/origin-base] cryptsetup.x86_64 0:1.7.4-3.el7_4.1 [openshift/origin-base] cups-client.x86_64 1:1.6.3-29.el7 [openshift/origin-base] cups-libs.x86_64 1:1.6.3-29.el7 [openshift/origin-base] e2fsprogs-libs.x86_64 0:1.42.9-10.el7 [openshift/origin-base] ed.x86_64 0:1.9-4.el7 [openshift/origin-base] file.x86_64 0:5.11-33.el7 [openshift/origin-base] fipscheck.x86_64 0:1.4.1-6.el7 [openshift/origin-base] fipscheck-lib.x86_64 0:1.4.1-6.el7 [openshift/origin-base] gdisk.x86_64 0:0.8.6-5.el7 [openshift/origin-base] gettext.x86_64 0:0.19.8.1-2.el7 [openshift/origin-base] gettext-libs.x86_64 0:0.19.8.1-2.el7 [openshift/origin-base] groff-base.x86_64 0:1.22.2-8.el7 [openshift/origin-base] hdparm.x86_64 0:9.43-5.el7 [openshift/origin-base] initscripts.x86_64 0:9.49.39-1.el7_4.1 [openshift/origin-base] iproute.x86_64 0:3.10.0-87.el7 [openshift/origin-base] less.x86_64 0:458-9.el7 [openshift/origin-base] libaio.x86_64 0:0.3.109-13.el7 [openshift/origin-base] libarchive.x86_64 0:3.1.2-10.el7_2 [openshift/origin-base] libcroco.x86_64 0:0.6.11-1.el7 [openshift/origin-base] libedit.x86_64 0:3.0-12.20121213cvs.el7 [openshift/origin-base] libgnome-keyring.x86_64 0:3.12.0-1.el7 [openshift/origin-base] libgomp.x86_64 0:4.8.5-16.el7_4.2 [openshift/origin-base] libicu.x86_64 0:50.1.2-15.el7 [openshift/origin-base] libmnl.x86_64 0:1.0.3-7.el7 [openshift/origin-base] libnetfilter_conntrack.x86_64 0:1.0.6-1.el7_3 [openshift/origin-base] libnfnetlink.x86_64 0:1.0.1-4.el7 [openshift/origin-base] libpcap.x86_64 14:1.5.3-9.el7 [openshift/origin-base] libpipeline.x86_64 0:1.2.3-3.el7 [openshift/origin-base] librados2.x86_64 1:0.94.5-2.el7 [openshift/origin-base] librbd1.x86_64 1:0.94.5-2.el7 [openshift/origin-base] libss.x86_64 0:1.42.9-10.el7 [openshift/origin-base] libunistring.x86_64 0:0.9.3-9.el7 [openshift/origin-base] lzo.x86_64 0:2.06-8.el7 [openshift/origin-base] m4.x86_64 0:1.4.16-10.el7 [openshift/origin-base] mailx.x86_64 0:12.5-16.el7 [openshift/origin-base] make.x86_64 1:3.82-23.el7 [openshift/origin-base] man-db.x86_64 0:2.6.3-9.el7 [openshift/origin-base] mariadb-libs.x86_64 1:5.5.56-2.el7 [openshift/origin-base] openssh.x86_64 0:7.4p1-13.el7_4 [openshift/origin-base] openssh-clients.x86_64 0:7.4p1-13.el7_4 [openshift/origin-base] parted.x86_64 0:3.1-28.el7 [openshift/origin-base] patch.x86_64 0:2.7.1-8.el7 [openshift/origin-base] perl.x86_64 4:5.16.3-292.el7 [openshift/origin-base] perl-Carp.noarch 0:1.26-244.el7 [openshift/origin-base] perl-Encode.x86_64 0:2.51-7.el7 [openshift/origin-base] perl-Error.noarch 1:0.17020-2.el7 [openshift/origin-base] perl-Exporter.noarch 0:5.68-3.el7 [openshift/origin-base] perl-File-Path.noarch 0:2.09-2.el7 [openshift/origin-base] perl-File-Temp.noarch 0:0.23.01-3.el7 [openshift/origin-base] perl-Filter.x86_64 0:1.49-3.el7 [openshift/origin-base] perl-Getopt-Long.noarch 0:2.40-2.el7 [openshift/origin-base] perl-Git.noarch 0:1.8.3.1-12.el7_4 [openshift/origin-base] perl-HTTP-Tiny.noarch 0:0.033-3.el7 [openshift/origin-base] perl-PathTools.x86_64 0:3.40-5.el7 [openshift/origin-base] perl-Pod-Escapes.noarch 1:1.04-292.el7 [openshift/origin-base] perl-Pod-Perldoc.noarch 0:3.20-4.el7 [openshift/origin-base] perl-Pod-Simple.noarch 1:3.28-4.el7 [openshift/origin-base] perl-Pod-Usage.noarch 0:1.63-3.el7 [openshift/origin-base] perl-Scalar-List-Utils.x86_64 0:1.27-248.el7 [openshift/origin-base] perl-Socket.x86_64 0:2.010-4.el7 [openshift/origin-base] perl-Storable.x86_64 0:2.45-3.el7 [openshift/origin-base] perl-TermReadKey.x86_64 0:2.30-20.el7 [openshift/origin-base] perl-Text-ParseWords.noarch 0:3.29-4.el7 [openshift/origin-base] perl-Time-HiRes.x86_64 4:1.9725-3.el7 [openshift/origin-base] perl-Time-Local.noarch 0:1.2300-2.el7 [openshift/origin-base] perl-constant.noarch 0:1.27-2.el7 [openshift/origin-base] perl-libs.x86_64 4:5.16.3-292.el7 [openshift/origin-base] perl-macros.x86_64 4:5.16.3-292.el7 [openshift/origin-base] perl-parent.noarch 1:0.225-244.el7 [openshift/origin-base] perl-podlators.noarch 0:2.5.1-3.el7 [openshift/origin-base] perl-threads.x86_64 0:1.87-4.el7 [openshift/origin-base] perl-threads-shared.x86_64 0:1.43-6.el7 [openshift/origin-base] postfix.x86_64 2:2.10.1-6.el7 [openshift/origin-base] psmisc.x86_64 0:22.20-15.el7 [openshift/origin-base] python-backports.x86_64 0:1.0-8.el7 [openshift/origin-base] python-backports-ssl_match_hostname.noarch 0:3.4.0.2-4.el7 [openshift/origin-base] python-rados.x86_64 1:0.94.5-2.el7 [openshift/origin-base] python-rbd.x86_64 1:0.94.5-2.el7 [openshift/origin-base] python-requests.noarch 0:2.6.0-1.el7_1 [openshift/origin-base] python-six.noarch 0:1.9.0-2.el7 [openshift/origin-base] python-urllib3.noarch 0:1.10.2-3.el7 [openshift/origin-base] redhat-lsb-core.x86_64 0:4.1-27.el7.centos.1 [openshift/origin-base] redhat-lsb-submod-security.x86_64 0:4.1-27.el7.centos.1 [openshift/origin-base] rsync.x86_64 0:3.0.9-18.el7 [openshift/origin-base] spax.x86_64 0:1.5.2-13.el7 [openshift/origin-base] systemd-sysv.x86_64 0:219-42.el7_4.10 [openshift/origin-base] tcp_wrappers-libs.x86_64 0:7.6-77.el7 [openshift/origin-base] time.x86_64 0:1.7-45.el7 [openshift/origin-base] Dependency Updated: [openshift/origin-base] systemd.x86_64 0:219-42.el7_4.10 systemd-libs.x86_64 0:219-42.el7_4.10 [openshift/origin-base] Complete! [openshift/origin-base] Loaded plugins: fastestmirror, ovl [openshift/origin-base] Cleaning repos: base cbs-paas7-openshift-multiarch-el7-build extras updates [openshift/origin-base] Cleaning up everything [openshift/origin-base] Maybe you want: rm -rf /var/cache/yum, to also free up space taken by orphaned data from disabled or removed repos [openshift/origin-base] Cleaning up list of fastest mirrors [openshift/origin-base] --> LABEL io.k8s.display-name="OpenShift Origin CentOS 7 Base" io.k8s.description="This is the base image from which all OpenShift Origin images inherit." io.openshift.tags="openshift,base" [openshift/origin-base] --> Committing changes to openshift/origin-base:5eda3fa ... [openshift/origin-base] --> Tagged as openshift/origin-base:latest [openshift/origin-base] --> Done hack/build-base-images.sh took 216 seconds + [[ release-3.7 == release-1.[4-5] ]] + OS_BUILD_ENV_PULL_IMAGE=true + hack/env make release BUILD_TESTS=1 [INFO] [21:07:03+0000] Pulling the openshift/origin-release:golang-1.8 image to update it... Trying to pull repository registry.access.redhat.com/openshift/origin-release ... Trying to pull repository docker.io/openshift/origin-release ... golang-1.8: Pulling from docker.io/openshift/origin-release Digest: sha256:a3e3b0df1fd6056082b7017ff29123836933a1360ca6d0b461e16f63a0280d73 Status: Image is up to date for docker.io/openshift/origin-release:golang-1.8 OS_ONLY_BUILD_PLATFORMS='linux/amd64' hack/build-rpm-release.sh [INFO] [21:08:21+0000] Building Origin release RPMs with tito... Creating output directory: /tmp/tito OS_GIT_MINOR::7+ OS_GIT_MAJOR::3 OS_GIT_VERSION::v3.7.2+5eda3fa-5 OS_GIT_TREE_STATE::clean OS_GIT_CATALOG_VERSION::v0.1.2 OS_GIT_COMMIT::5eda3fa Tagging new version of origin: 0.0.1 -> 3.7.2-1.5.5eda3fa Created tag: v3.7.2-1.5.5eda3fa View: git show HEAD Undo: tito tag -u Push: git push origin && git push origin v3.7.2-1.5.5eda3fa Building package [v3.7.2-1.5.5eda3fa] Wrote: /tmp/openshift/build-rpm-release/tito/origin-3.7.2.tar.gz Wrote: /tmp/openshift/build-rpm-release/tito/origin-3.7.2-1.5.5eda3fa.src.rpm Building package [v3.7.2-1.5.5eda3fa] Wrote: /tmp/openshift/build-rpm-release/tito/origin-3.7.2.tar.gz Successfully built: /tmp/openshift/build-rpm-release/tito/origin-3.7.2-1.5.5eda3fa.src.rpm /tmp/openshift/build-rpm-release/tito/x86_64/origin-3.7.2-1.5.5eda3fa.x86_64.rpm /tmp/openshift/build-rpm-release/tito/x86_64/origin-master-3.7.2-1.5.5eda3fa.x86_64.rpm /tmp/openshift/build-rpm-release/tito/x86_64/origin-tests-3.7.2-1.5.5eda3fa.x86_64.rpm /tmp/openshift/build-rpm-release/tito/x86_64/origin-node-3.7.2-1.5.5eda3fa.x86_64.rpm /tmp/openshift/build-rpm-release/tito/x86_64/tuned-profiles-origin-node-3.7.2-1.5.5eda3fa.x86_64.rpm /tmp/openshift/build-rpm-release/tito/x86_64/origin-clients-3.7.2-1.5.5eda3fa.x86_64.rpm /tmp/openshift/build-rpm-release/tito/x86_64/origin-dockerregistry-3.7.2-1.5.5eda3fa.x86_64.rpm /tmp/openshift/build-rpm-release/tito/x86_64/origin-pod-3.7.2-1.5.5eda3fa.x86_64.rpm /tmp/openshift/build-rpm-release/tito/x86_64/origin-sdn-ovs-3.7.2-1.5.5eda3fa.x86_64.rpm /tmp/openshift/build-rpm-release/tito/x86_64/origin-federation-services-3.7.2-1.5.5eda3fa.x86_64.rpm /tmp/openshift/build-rpm-release/tito/x86_64/origin-service-catalog-3.7.2-1.5.5eda3fa.x86_64.rpm /tmp/openshift/build-rpm-release/tito/x86_64/origin-template-service-broker-3.7.2-1.5.5eda3fa.x86_64.rpm /tmp/openshift/build-rpm-release/tito/x86_64/origin-cluster-capacity-3.7.2-1.5.5eda3fa.x86_64.rpm /tmp/openshift/build-rpm-release/tito/noarch/origin-excluder-3.7.2-1.5.5eda3fa.noarch.rpm /tmp/openshift/build-rpm-release/tito/noarch/origin-docker-excluder-3.7.2-1.5.5eda3fa.noarch.rpm WARNING: Leaving rpmbuild files in: /tmp/openshift/build-rpm-release/tito/rpmbuild-originRYQ_zw Undoing tag: v3.7.2-1.5.5eda3fa Deleted tag 'v3.7.2-1.5.5eda3fa' (was 9dd8c67) [INFO] [21:24:16+0000] Unpacking tito artifacts for reuse... make[1]: Entering directory `/go/src/github.com/openshift/origin' rm -rf _output make[1]: Leaving directory `/go/src/github.com/openshift/origin' Spawning worker 0 with 4 pkgs Spawning worker 1 with 4 pkgs Spawning worker 2 with 4 pkgs Spawning worker 3 with 4 pkgs Workers Finished Saving Primary metadata Saving file lists metadata Saving other metadata Generating sqlite DBs Sqlite DBs complete [INFO] [21:24:34+0000] Repository file for `yum` or `dnf` placed at /go/src/github.com/openshift/origin/_output/local/releases/rpms/origin-local-release.repo [INFO] [21:24:34+0000] Install it with: [INFO] [21:24:34+0000] $ mv '/go/src/github.com/openshift/origin/_output/local/releases/rpms/origin-local-release.repo' '/etc/yum.repos.d [INFO] [21:24:34+0000] hack/build-rpm-release.sh exited with code 0 after 00h 16m 40s hack/build-images.sh [openshift/origin-pod] --> FROM openshift/origin-source [openshift/origin-pod] --> RUN INSTALL_PKGS="origin-pod" && yum --enablerepo=origin-local-release install -y ${INSTALL_PKGS} && rpm -V ${INSTALL_PKGS} && yum clean all [openshift/origin-pod] Loaded plugins: fastestmirror, ovl [openshift/origin-pod] Determining fastest mirrors [openshift/origin-pod] * base: mirror.vtti.vt.edu [openshift/origin-pod] * extras: mirror.wdc1.us.leaseweb.net [openshift/origin-pod] * updates: mirrors.tripadvisor.com [openshift/origin-pod] Resolving Dependencies [openshift/origin-pod] --> Running transaction check [openshift/origin-pod] ---> Package origin-pod.x86_64 0:3.7.2-1.5.5eda3fa will be installed [openshift/origin-pod] --> Finished Dependency Resolution [openshift/origin-pod] Dependencies Resolved [openshift/origin-pod] ================================================================================ [openshift/origin-pod] Package Arch Version Repository Size [openshift/origin-pod] ================================================================================ [openshift/origin-pod] Installing: [openshift/origin-pod] origin-pod x86_64 3.7.2-1.5.5eda3fa origin-local-release 353 k [openshift/origin-pod] Transaction Summary [openshift/origin-pod] ================================================================================ [openshift/origin-pod] Install 1 Package [openshift/origin-pod] Total download size: 353 k [openshift/origin-pod] Installed size: 1.1 M [openshift/origin-pod] Downloading packages: [openshift/origin-pod] Running transaction check [openshift/origin-pod] Running transaction test [openshift/origin-pod] Transaction test succeeded [openshift/origin-pod] Running transaction [openshift/origin-pod] Installing : origin-pod-3.7.2-1.5.5eda3fa.x86_64 1/1 [openshift/origin-pod] Verifying : origin-pod-3.7.2-1.5.5eda3fa.x86_64 1/1 [openshift/origin-pod] Installed: [openshift/origin-pod] origin-pod.x86_64 0:3.7.2-1.5.5eda3fa [openshift/origin-pod] Complete! [openshift/origin-pod] Loaded plugins: fastestmirror, ovl [openshift/origin-pod] Cleaning repos: base cbs-paas7-openshift-multiarch-el7-build extras updates [openshift/origin-pod] Cleaning up everything [openshift/origin-pod] Maybe you want: rm -rf /var/cache/yum, to also free up space taken by orphaned data from disabled or removed repos [openshift/origin-pod] Cleaning up list of fastest mirrors [openshift/origin-pod] --> LABEL io.k8s.display-name="OpenShift Origin Pod Infrastructure" io.k8s.description="This is a component of OpenShift Origin and holds on to the shared Linux namespaces within a Pod." io.openshift.tags="openshift,pod" [openshift/origin-pod] --> USER 1001 [openshift/origin-pod] --> ENTRYPOINT ["/usr/bin/pod"] [openshift/origin-pod] --> Committing changes to openshift/origin-pod:5eda3fa ... [openshift/origin-pod] --> Tagged as openshift/origin-pod:latest [openshift/origin-pod] --> Done [openshift/origin-service-catalog] --> FROM openshift/origin-source [openshift/origin-service-catalog] --> RUN INSTALL_PKGS="origin-service-catalog" && yum --enablerepo=origin-local-release install -y ${INSTALL_PKGS} && rpm -V ${INSTALL_PKGS} && yum clean all [openshift/origin-service-catalog] Loaded plugins: fastestmirror, ovl [openshift/origin-service-catalog] Determining fastest mirrors [openshift/origin-service-catalog] * base: mirror.vtti.vt.edu [openshift/origin-service-catalog] * extras: mirror.wdc1.us.leaseweb.net [openshift/origin-service-catalog] * updates: mirrors.tripadvisor.com [openshift/origin-service-catalog] Resolving Dependencies [openshift/origin-service-catalog] --> Running transaction check [openshift/origin-service-catalog] ---> Package origin-service-catalog.x86_64 0:3.7.2-1.5.5eda3fa will be installed [openshift/origin-service-catalog] --> Finished Dependency Resolution [openshift/origin-service-catalog] Dependencies Resolved [openshift/origin-service-catalog] ================================================================================ [openshift/origin-service-catalog] Package Arch Version Repository Size [openshift/origin-service-catalog] ================================================================================ [openshift/origin-service-catalog] Installing: [openshift/origin-service-catalog] origin-service-catalog x86_64 3.7.2-1.5.5eda3fa origin-local-release 9.9 M [openshift/origin-service-catalog] Transaction Summary [openshift/origin-service-catalog] ================================================================================ [openshift/origin-service-catalog] Install 1 Package [openshift/origin-service-catalog] Total download size: 9.9 M [openshift/origin-service-catalog] Installed size: 58 M [openshift/origin-service-catalog] Downloading packages: [openshift/origin-service-catalog] Running transaction check [openshift/origin-service-catalog] Running transaction test [openshift/origin-service-catalog] Transaction test succeeded [openshift/origin-service-catalog] Running transaction [openshift/origin-service-catalog] Installing : origin-service-catalog-3.7.2-1.5.5eda3fa.x86_64 1/1 [openshift/origin-service-catalog] Verifying : origin-service-catalog-3.7.2-1.5.5eda3fa.x86_64 1/1 [openshift/origin-service-catalog] Installed: [openshift/origin-service-catalog] origin-service-catalog.x86_64 0:3.7.2-1.5.5eda3fa [openshift/origin-service-catalog] Complete! [openshift/origin-service-catalog] Loaded plugins: fastestmirror, ovl [openshift/origin-service-catalog] Cleaning repos: base cbs-paas7-openshift-multiarch-el7-build extras updates [openshift/origin-service-catalog] Cleaning up everything [openshift/origin-service-catalog] Maybe you want: rm -rf /var/cache/yum, to also free up space taken by orphaned data from disabled or removed repos [openshift/origin-service-catalog] Cleaning up list of fastest mirrors [openshift/origin-service-catalog] --> CMD [ "/usr/bin/service-catalog" ] [openshift/origin-service-catalog] --> Committing changes to openshift/origin-service-catalog:5eda3fa ... [openshift/origin-service-catalog] --> Tagged as openshift/origin-service-catalog:latest [openshift/origin-service-catalog] --> Done [openshift/origin-template-service-broker] --> FROM openshift/origin-source [openshift/origin-template-service-broker] --> RUN INSTALL_PKGS="origin-template-service-broker" && yum --enablerepo=origin-local-release install -y ${INSTALL_PKGS} && rpm -V ${INSTALL_PKGS} && yum clean all [openshift/origin-template-service-broker] Loaded plugins: fastestmirror, ovl [openshift/origin-template-service-broker] Determining fastest mirrors [openshift/origin-template-service-broker] * base: mirror.vtti.vt.edu [openshift/origin-template-service-broker] * extras: mirror.wdc1.us.leaseweb.net [openshift/origin-template-service-broker] * updates: mirrors.tripadvisor.com [openshift/origin-template-service-broker] Resolving Dependencies [openshift/origin-template-service-broker] --> Running transaction check [openshift/origin-template-service-broker] ---> Package origin-template-service-broker.x86_64 0:3.7.2-1.5.5eda3fa will be installed [openshift/origin-template-service-broker] --> Finished Dependency Resolution [openshift/origin-template-service-broker] Dependencies Resolved [openshift/origin-template-service-broker] ================================================================================ [openshift/origin-template-service-broker] Package Arch Version Repository Size [openshift/origin-template-service-broker] ================================================================================ [openshift/origin-template-service-broker] Installing: [openshift/origin-template-service-broker] origin-template-service-broker [openshift/origin-template-service-broker] x86_64 3.7.2-1.5.5eda3fa origin-local-release 11 M [openshift/origin-template-service-broker] Transaction Summary [openshift/origin-template-service-broker] ================================================================================ [openshift/origin-template-service-broker] Install 1 Package [openshift/origin-template-service-broker] Total download size: 11 M [openshift/origin-template-service-broker] Installed size: 69 M [openshift/origin-template-service-broker] Downloading packages: [openshift/origin-template-service-broker] Running transaction check [openshift/origin-template-service-broker] Running transaction test [openshift/origin-template-service-broker] Transaction test succeeded [openshift/origin-template-service-broker] Running transaction [openshift/origin-template-service-broker] Installing : origin-template-service-broker-3.7.2-1.5.5eda3fa.x86_64 1/1 [openshift/origin-template-service-broker] Verifying : origin-template-service-broker-3.7.2-1.5.5eda3fa.x86_64 1/1 [openshift/origin-template-service-broker] Installed: [openshift/origin-template-service-broker] origin-template-service-broker.x86_64 0:3.7.2-1.5.5eda3fa [openshift/origin-template-service-broker] Complete! [openshift/origin-template-service-broker] Loaded plugins: fastestmirror, ovl [openshift/origin-template-service-broker] Cleaning repos: base cbs-paas7-openshift-multiarch-el7-build extras updates [openshift/origin-template-service-broker] Cleaning up everything [openshift/origin-template-service-broker] Maybe you want: rm -rf /var/cache/yum, to also free up space taken by orphaned data from disabled or removed repos [openshift/origin-template-service-broker] Cleaning up list of fastest mirrors [openshift/origin-template-service-broker] --> CMD [ "/usr/bin/template-service-broker" ] [openshift/origin-template-service-broker] --> Committing changes to openshift/origin-template-service-broker:5eda3fa ... [openshift/origin-template-service-broker] --> Tagged as openshift/origin-template-service-broker:latest [openshift/origin-template-service-broker] --> Done [openshift/origin-cluster-capacity] --> FROM openshift/origin-source [openshift/origin-cluster-capacity] --> RUN INSTALL_PKGS="origin-cluster-capacity" && yum --enablerepo=origin-local-release install -y ${INSTALL_PKGS} && rpm -V ${INSTALL_PKGS} && yum clean all [openshift/origin-cluster-capacity] Loaded plugins: fastestmirror, ovl [openshift/origin-cluster-capacity] Determining fastest mirrors [openshift/origin-cluster-capacity] * base: mirror.vtti.vt.edu [openshift/origin-cluster-capacity] * extras: mirror.wdc1.us.leaseweb.net [openshift/origin-cluster-capacity] * updates: mirrors.tripadvisor.com [openshift/origin-cluster-capacity] Resolving Dependencies [openshift/origin-cluster-capacity] --> Running transaction check [openshift/origin-cluster-capacity] ---> Package origin-cluster-capacity.x86_64 0:3.7.2-1.5.5eda3fa will be installed [openshift/origin-cluster-capacity] --> Finished Dependency Resolution [openshift/origin-cluster-capacity] Dependencies Resolved [openshift/origin-cluster-capacity] ================================================================================ [openshift/origin-cluster-capacity] Package Arch Version Repository Size [openshift/origin-cluster-capacity] ================================================================================ [openshift/origin-cluster-capacity] Installing: [openshift/origin-cluster-capacity] origin-cluster-capacity x86_64 3.7.2-1.5.5eda3fa origin-local-release 12 M [openshift/origin-cluster-capacity] Transaction Summary [openshift/origin-cluster-capacity] ================================================================================ [openshift/origin-cluster-capacity] Install 1 Package [openshift/origin-cluster-capacity] Total download size: 12 M [openshift/origin-cluster-capacity] Installed size: 90 M [openshift/origin-cluster-capacity] Downloading packages: [openshift/origin-cluster-capacity] Running transaction check [openshift/origin-cluster-capacity] Running transaction test [openshift/origin-cluster-capacity] Transaction test succeeded [openshift/origin-cluster-capacity] Running transaction [openshift/origin-cluster-capacity] Installing : origin-cluster-capacity-3.7.2-1.5.5eda3fa.x86_64 1/1 [openshift/origin-cluster-capacity] Verifying : origin-cluster-capacity-3.7.2-1.5.5eda3fa.x86_64 1/1 [openshift/origin-cluster-capacity] Installed: [openshift/origin-cluster-capacity] origin-cluster-capacity.x86_64 0:3.7.2-1.5.5eda3fa [openshift/origin-cluster-capacity] Complete! [openshift/origin-cluster-capacity] Loaded plugins: fastestmirror, ovl [openshift/origin-cluster-capacity] Cleaning repos: base cbs-paas7-openshift-multiarch-el7-build extras updates [openshift/origin-cluster-capacity] Cleaning up everything [openshift/origin-cluster-capacity] Maybe you want: rm -rf /var/cache/yum, to also free up space taken by orphaned data from disabled or removed repos [openshift/origin-cluster-capacity] Cleaning up list of fastest mirrors [openshift/origin-cluster-capacity] --> LABEL io.k8s.display-name="OpenShift Origin Cluster Capacity" io.k8s.description="This is a component of OpenShift Origin and runs cluster capacity analysis tool." [openshift/origin-cluster-capacity] --> CMD ["/usr/bin/cluster-capacity --help"] [openshift/origin-cluster-capacity] --> Committing changes to openshift/origin-cluster-capacity:5eda3fa ... [openshift/origin-cluster-capacity] --> Tagged as openshift/origin-cluster-capacity:latest [openshift/origin-cluster-capacity] --> Done [openshift/origin-egress-router] --> FROM openshift/origin-base [openshift/origin-egress-router] --> RUN INSTALL_PKGS="iproute iputils" && yum install -y $INSTALL_PKGS && rpm -V $INSTALL_PKGS && yum clean all [openshift/origin-egress-router] Loaded plugins: fastestmirror, ovl [openshift/origin-egress-router] Determining fastest mirrors [openshift/origin-egress-router] * base: mirror.vtti.vt.edu [openshift/origin-egress-router] * extras: mirror.wdc1.us.leaseweb.net [openshift/origin-egress-router] * updates: mirrors.tripadvisor.com [openshift/origin-egress-router] Package iproute-3.10.0-87.el7.x86_64 already installed and latest version [openshift/origin-egress-router] Package iputils-20160308-10.el7.x86_64 already installed and latest version [openshift/origin-egress-router] Nothing to do [openshift/origin-egress-router] Loaded plugins: fastestmirror, ovl [openshift/origin-egress-router] Cleaning repos: base cbs-paas7-openshift-multiarch-el7-build extras updates [openshift/origin-egress-router] Cleaning up everything [openshift/origin-egress-router] Maybe you want: rm -rf /var/cache/yum, to also free up space taken by orphaned data from disabled or removed repos [openshift/origin-egress-router] Cleaning up list of fastest mirrors [openshift/origin-egress-router] --> ADD egress-router.sh /bin/egress-router.sh [openshift/origin-egress-router] --> LABEL io.k8s.display-name="OpenShift Origin Egress Router" io.k8s.description="This is a component of OpenShift Origin and contains an egress router." io.openshift.tags="openshift,router,egress" [openshift/origin-egress-router] --> ENTRYPOINT /bin/egress-router.sh [openshift/origin-egress-router] --> Committing changes to openshift/origin-egress-router:5eda3fa ... [openshift/origin-egress-router] --> Tagged as openshift/origin-egress-router:latest [openshift/origin-egress-router] --> Done [openshift/origin-docker-registry] --> FROM openshift/origin-base [openshift/origin-docker-registry] --> RUN INSTALL_PKGS="origin-dockerregistry" && yum --enablerepo=origin-local-release install -y ${INSTALL_PKGS} && rpm -V ${INSTALL_PKGS} && yum clean all [openshift/origin-docker-registry] Loaded plugins: fastestmirror, ovl [openshift/origin-docker-registry] Determining fastest mirrors [openshift/origin-docker-registry] * base: mirror.vtti.vt.edu [openshift/origin-docker-registry] * extras: mirror.wdc1.us.leaseweb.net [openshift/origin-docker-registry] * updates: mirrors.tripadvisor.com [openshift/origin-docker-registry] Resolving Dependencies [openshift/origin-docker-registry] --> Running transaction check [openshift/origin-docker-registry] ---> Package origin-dockerregistry.x86_64 0:3.7.2-1.5.5eda3fa will be installed [openshift/origin-docker-registry] --> Finished Dependency Resolution [openshift/origin-docker-registry] Dependencies Resolved [openshift/origin-docker-registry] ================================================================================ [openshift/origin-docker-registry] Package Arch Version Repository Size [openshift/origin-docker-registry] ================================================================================ [openshift/origin-docker-registry] Installing: [openshift/origin-docker-registry] origin-dockerregistry x86_64 3.7.2-1.5.5eda3fa origin-local-release 13 M [openshift/origin-docker-registry] Transaction Summary [openshift/origin-docker-registry] ================================================================================ [openshift/origin-docker-registry] Install 1 Package [openshift/origin-docker-registry] Total download size: 13 M [openshift/origin-docker-registry] Installed size: 77 M [openshift/origin-docker-registry] Downloading packages: [openshift/origin-docker-registry] Running transaction check [openshift/origin-docker-registry] Running transaction test [openshift/origin-docker-registry] Transaction test succeeded [openshift/origin-docker-registry] Running transaction [openshift/origin-docker-registry] Installing : origin-dockerregistry-3.7.2-1.5.5eda3fa.x86_64 1/1 [openshift/origin-docker-registry] Verifying : origin-dockerregistry-3.7.2-1.5.5eda3fa.x86_64 1/1 [openshift/origin-docker-registry] Installed: [openshift/origin-docker-registry] origin-dockerregistry.x86_64 0:3.7.2-1.5.5eda3fa [openshift/origin-docker-registry] Complete! [openshift/origin-docker-registry] Loaded plugins: fastestmirror, ovl [openshift/origin-docker-registry] Cleaning repos: base cbs-paas7-openshift-multiarch-el7-build extras updates [openshift/origin-docker-registry] Cleaning up everything [openshift/origin-docker-registry] Maybe you want: rm -rf /var/cache/yum, to also free up space taken by orphaned data from disabled or removed repos [openshift/origin-docker-registry] Cleaning up list of fastest mirrors [openshift/origin-docker-registry] --> COPY config.yml ${REGISTRY_CONFIGURATION_PATH} [openshift/origin-docker-registry] --> LABEL io.k8s.display-name="OpenShift Container Platform Image Registry" io.k8s.description="This is a component of OpenShift Container Platform and exposes a Docker registry that is integrated with the cluster for authentication and management." io.openshift.tags="openshift,docker,registry" [openshift/origin-docker-registry] --> USER 1001 [openshift/origin-docker-registry] --> EXPOSE 5000 [openshift/origin-docker-registry] --> VOLUME /registry [openshift/origin-docker-registry] --> ENV REGISTRY_CONFIGURATION_PATH=/config.yml [openshift/origin-docker-registry] --> CMD /usr/bin/dockerregistry ${REGISTRY_CONFIGURATION_PATH} [openshift/origin-docker-registry] --> Committing changes to openshift/origin-docker-registry:5eda3fa ... [openshift/origin-docker-registry] --> Tagged as openshift/origin-docker-registry:latest [openshift/origin-docker-registry] --> Done [openshift/origin-federation] --> FROM openshift/origin-base [openshift/origin-federation] --> RUN INSTALL_PKGS="origin-federation-services" && yum --enablerepo=origin-local-release install -y ${INSTALL_PKGS} && rpm -V ${INSTALL_PKGS} && yum clean all && ln -s /usr/bin/hyperkube /hyperkube [openshift/origin-federation] Loaded plugins: fastestmirror, ovl [openshift/origin-federation] Determining fastest mirrors [openshift/origin-federation] * base: mirror.vtti.vt.edu [openshift/origin-federation] * extras: mirror.wdc1.us.leaseweb.net [openshift/origin-federation] * updates: mirrors.tripadvisor.com [openshift/origin-federation] Resolving Dependencies [openshift/origin-federation] --> Running transaction check [openshift/origin-federation] ---> Package origin-federation-services.x86_64 0:3.7.2-1.5.5eda3fa will be installed [openshift/origin-federation] --> Finished Dependency Resolution [openshift/origin-federation] Dependencies Resolved [openshift/origin-federation] ================================================================================ [openshift/origin-federation] Package Arch Version Repository Size [openshift/origin-federation] ================================================================================ [openshift/origin-federation] Installing: [openshift/origin-federation] origin-federation-services x86_64 3.7.2-1.5.5eda3fa origin-local-release 29 M [openshift/origin-federation] Transaction Summary [openshift/origin-federation] ================================================================================ [openshift/origin-federation] Install 1 Package [openshift/origin-federation] Total download size: 29 M [openshift/origin-federation] Installed size: 218 M [openshift/origin-federation] Downloading packages: [openshift/origin-federation] Running transaction check [openshift/origin-federation] Running transaction test [openshift/origin-federation] Transaction test succeeded [openshift/origin-federation] Running transaction [openshift/origin-federation] Installing : origin-federation-services-3.7.2-1.5.5eda3fa.x86_64 1/1 [openshift/origin-federation] Verifying : origin-federation-services-3.7.2-1.5.5eda3fa.x86_64 1/1 [openshift/origin-federation] Installed: [openshift/origin-federation] origin-federation-services.x86_64 0:3.7.2-1.5.5eda3fa [openshift/origin-federation] Complete! [openshift/origin-federation] Loaded plugins: fastestmirror, ovl [openshift/origin-federation] Cleaning repos: base cbs-paas7-openshift-multiarch-el7-build extras updates [openshift/origin-federation] Cleaning up everything [openshift/origin-federation] Maybe you want: rm -rf /var/cache/yum, to also free up space taken by orphaned data from disabled or removed repos [openshift/origin-federation] Cleaning up list of fastest mirrors [openshift/origin-federation] --> LABEL io.k8s.display-name="OpenShift Origin Federation" io.k8s.description="This is a component of OpenShift Origin and contains the software for running federation servers." [openshift/origin-federation] --> Committing changes to openshift/origin-federation:5eda3fa ... [openshift/origin-federation] --> Tagged as openshift/origin-federation:latest [openshift/origin-federation] --> Done [openshift/origin-egress-http-proxy] --> FROM openshift/origin-base [openshift/origin-egress-http-proxy] --> RUN INSTALL_PKGS="squid" && yum install -y $INSTALL_PKGS && rpm -V $INSTALL_PKGS && yum clean all && rmdir /var/log/squid /var/spool/squid && rm -f /etc/squid/squid.conf [openshift/origin-egress-http-proxy] Loaded plugins: fastestmirror, ovl [openshift/origin-egress-http-proxy] Determining fastest mirrors [openshift/origin-egress-http-proxy] * base: mirror.vtti.vt.edu [openshift/origin-egress-http-proxy] * extras: mirror.wdc1.us.leaseweb.net [openshift/origin-egress-http-proxy] * updates: mirrors.tripadvisor.com [openshift/origin-egress-http-proxy] Resolving Dependencies [openshift/origin-egress-http-proxy] --> Running transaction check [openshift/origin-egress-http-proxy] ---> Package squid.x86_64 7:3.5.20-10.el7 will be installed [openshift/origin-egress-http-proxy] --> Processing Dependency: squid-migration-script for package: 7:squid-3.5.20-10.el7.x86_64 [openshift/origin-egress-http-proxy] --> Processing Dependency: perl(Digest::MD5) for package: 7:squid-3.5.20-10.el7.x86_64 [openshift/origin-egress-http-proxy] --> Processing Dependency: perl(Data::Dumper) for package: 7:squid-3.5.20-10.el7.x86_64 [openshift/origin-egress-http-proxy] --> Processing Dependency: perl(DBI) for package: 7:squid-3.5.20-10.el7.x86_64 [openshift/origin-egress-http-proxy] --> Processing Dependency: libltdl.so.7()(64bit) for package: 7:squid-3.5.20-10.el7.x86_64 [openshift/origin-egress-http-proxy] --> Processing Dependency: libecap.so.3()(64bit) for package: 7:squid-3.5.20-10.el7.x86_64 [openshift/origin-egress-http-proxy] --> Running transaction check [openshift/origin-egress-http-proxy] ---> Package libecap.x86_64 0:1.0.0-1.el7 will be installed [openshift/origin-egress-http-proxy] ---> Package libtool-ltdl.x86_64 0:2.4.2-22.el7_3 will be installed [openshift/origin-egress-http-proxy] ---> Package perl-DBI.x86_64 0:1.627-4.el7 will be installed [openshift/origin-egress-http-proxy] --> Processing Dependency: perl(RPC::PlServer) >= 0.2001 for package: perl-DBI-1.627-4.el7.x86_64 [openshift/origin-egress-http-proxy] --> Processing Dependency: perl(RPC::PlClient) >= 0.2000 for package: perl-DBI-1.627-4.el7.x86_64 [openshift/origin-egress-http-proxy] ---> Package perl-Data-Dumper.x86_64 0:2.145-3.el7 will be installed [openshift/origin-egress-http-proxy] ---> Package perl-Digest-MD5.x86_64 0:2.52-3.el7 will be installed [openshift/origin-egress-http-proxy] --> Processing Dependency: perl(Digest::base) >= 1.00 for package: perl-Digest-MD5-2.52-3.el7.x86_64 [openshift/origin-egress-http-proxy] ---> Package squid-migration-script.x86_64 7:3.5.20-10.el7 will be installed [openshift/origin-egress-http-proxy] --> Running transaction check [openshift/origin-egress-http-proxy] ---> Package perl-Digest.noarch 0:1.17-245.el7 will be installed [openshift/origin-egress-http-proxy] ---> Package perl-PlRPC.noarch 0:0.2020-14.el7 will be installed [openshift/origin-egress-http-proxy] --> Processing Dependency: perl(Net::Daemon) >= 0.13 for package: perl-PlRPC-0.2020-14.el7.noarch [openshift/origin-egress-http-proxy] --> Processing Dependency: perl(Net::Daemon::Test) for package: perl-PlRPC-0.2020-14.el7.noarch [openshift/origin-egress-http-proxy] --> Processing Dependency: perl(Net::Daemon::Log) for package: perl-PlRPC-0.2020-14.el7.noarch [openshift/origin-egress-http-proxy] --> Processing Dependency: perl(Compress::Zlib) for package: perl-PlRPC-0.2020-14.el7.noarch [openshift/origin-egress-http-proxy] --> Running transaction check [openshift/origin-egress-http-proxy] ---> Package perl-IO-Compress.noarch 0:2.061-2.el7 will be installed [openshift/origin-egress-http-proxy] --> Processing Dependency: perl(Compress::Raw::Zlib) >= 2.061 for package: perl-IO-Compress-2.061-2.el7.noarch [openshift/origin-egress-http-proxy] --> Processing Dependency: perl(Compress::Raw::Bzip2) >= 2.061 for package: perl-IO-Compress-2.061-2.el7.noarch [openshift/origin-egress-http-proxy] ---> Package perl-Net-Daemon.noarch 0:0.48-5.el7 will be installed [openshift/origin-egress-http-proxy] --> Running transaction check [openshift/origin-egress-http-proxy] ---> Package perl-Compress-Raw-Bzip2.x86_64 0:2.061-3.el7 will be installed [openshift/origin-egress-http-proxy] ---> Package perl-Compress-Raw-Zlib.x86_64 1:2.061-4.el7 will be installed [openshift/origin-egress-http-proxy] --> Finished Dependency Resolution [openshift/origin-egress-http-proxy] Dependencies Resolved [openshift/origin-egress-http-proxy] ================================================================================ [openshift/origin-egress-http-proxy] Package Arch Version Repository [openshift/origin-egress-http-proxy] Size [openshift/origin-egress-http-proxy] ================================================================================ [openshift/origin-egress-http-proxy] Installing: [openshift/origin-egress-http-proxy] squid x86_64 7:3.5.20-10.el7 base 3.1 M [openshift/origin-egress-http-proxy] Installing for dependencies: [openshift/origin-egress-http-proxy] libecap x86_64 1.0.0-1.el7 base 21 k [openshift/origin-egress-http-proxy] libtool-ltdl x86_64 2.4.2-22.el7_3 base 49 k [openshift/origin-egress-http-proxy] perl-Compress-Raw-Bzip2 x86_64 2.061-3.el7 base 32 k [openshift/origin-egress-http-proxy] perl-Compress-Raw-Zlib x86_64 1:2.061-4.el7 base 57 k [openshift/origin-egress-http-proxy] perl-DBI x86_64 1.627-4.el7 base 802 k [openshift/origin-egress-http-proxy] perl-Data-Dumper x86_64 2.145-3.el7 base 47 k [openshift/origin-egress-http-proxy] perl-Digest noarch 1.17-245.el7 base 23 k [openshift/origin-egress-http-proxy] perl-Digest-MD5 x86_64 2.52-3.el7 base 30 k [openshift/origin-egress-http-proxy] perl-IO-Compress noarch 2.061-2.el7 base 260 k [openshift/origin-egress-http-proxy] perl-Net-Daemon noarch 0.48-5.el7 base 51 k [openshift/origin-egress-http-proxy] perl-PlRPC noarch 0.2020-14.el7 base 36 k [openshift/origin-egress-http-proxy] squid-migration-script x86_64 7:3.5.20-10.el7 base 48 k [openshift/origin-egress-http-proxy] Transaction Summary [openshift/origin-egress-http-proxy] ================================================================================ [openshift/origin-egress-http-proxy] Install 1 Package (+12 Dependent packages) [openshift/origin-egress-http-proxy] Total download size: 4.5 M [openshift/origin-egress-http-proxy] Installed size: 14 M [openshift/origin-egress-http-proxy] Downloading packages: [openshift/origin-egress-http-proxy] -------------------------------------------------------------------------------- [openshift/origin-egress-http-proxy] Total 8.9 MB/s | 4.5 MB 00:00 [openshift/origin-egress-http-proxy] Running transaction check [openshift/origin-egress-http-proxy] Running transaction test [openshift/origin-egress-http-proxy] Transaction test succeeded [openshift/origin-egress-http-proxy] Running transaction [openshift/origin-egress-http-proxy] Installing : perl-Data-Dumper-2.145-3.el7.x86_64 1/13 [openshift/origin-egress-http-proxy] Installing : perl-Compress-Raw-Bzip2-2.061-3.el7.x86_64 2/13 [openshift/origin-egress-http-proxy] Installing : perl-Digest-1.17-245.el7.noarch 3/13 [openshift/origin-egress-http-proxy] Installing : perl-Digest-MD5-2.52-3.el7.x86_64 4/13 [openshift/origin-egress-http-proxy] Installing : 1:perl-Compress-Raw-Zlib-2.061-4.el7.x86_64 5/13 [openshift/origin-egress-http-proxy] Installing : perl-IO-Compress-2.061-2.el7.noarch 6/13 [openshift/origin-egress-http-proxy] Installing : libtool-ltdl-2.4.2-22.el7_3.x86_64 7/13 [openshift/origin-egress-http-proxy] Installing : 7:squid-migration-script-3.5.20-10.el7.x86_64 8/13 [openshift/origin-egress-http-proxy] Installing : libecap-1.0.0-1.el7.x86_64 9/13 [openshift/origin-egress-http-proxy] Installing : perl-Net-Daemon-0.48-5.el7.noarch 10/13 [openshift/origin-egress-http-proxy] Installing : perl-PlRPC-0.2020-14.el7.noarch 11/13 [openshift/origin-egress-http-proxy] Installing : perl-DBI-1.627-4.el7.x86_64 12/13 [openshift/origin-egress-http-proxy] Installing : 7:squid-3.5.20-10.el7.x86_64 13/13 [openshift/origin-egress-http-proxy] Verifying : perl-Net-Daemon-0.48-5.el7.noarch 1/13 [openshift/origin-egress-http-proxy] Verifying : perl-Data-Dumper-2.145-3.el7.x86_64 2/13 [openshift/origin-egress-http-proxy] Verifying : libecap-1.0.0-1.el7.x86_64 3/13 [openshift/origin-egress-http-proxy] Verifying : perl-Digest-MD5-2.52-3.el7.x86_64 4/13 [openshift/origin-egress-http-proxy] Verifying : 7:squid-migration-script-3.5.20-10.el7.x86_64 5/13 [openshift/origin-egress-http-proxy] Verifying : perl-IO-Compress-2.061-2.el7.noarch 6/13 [openshift/origin-egress-http-proxy] Verifying : libtool-ltdl-2.4.2-22.el7_3.x86_64 7/13 [openshift/origin-egress-http-proxy] Verifying : 1:perl-Compress-Raw-Zlib-2.061-4.el7.x86_64 8/13 [openshift/origin-egress-http-proxy] Verifying : perl-Digest-1.17-245.el7.noarch 9/13 [openshift/origin-egress-http-proxy] Verifying : perl-DBI-1.627-4.el7.x86_64 10/13 [openshift/origin-egress-http-proxy] Verifying : perl-Compress-Raw-Bzip2-2.061-3.el7.x86_64 11/13 [openshift/origin-egress-http-proxy] Verifying : perl-PlRPC-0.2020-14.el7.noarch 12/13 [openshift/origin-egress-http-proxy] Verifying : 7:squid-3.5.20-10.el7.x86_64 13/13 [openshift/origin-egress-http-proxy] Installed: [openshift/origin-egress-http-proxy] squid.x86_64 7:3.5.20-10.el7 [openshift/origin-egress-http-proxy] Dependency Installed: [openshift/origin-egress-http-proxy] libecap.x86_64 0:1.0.0-1.el7 [openshift/origin-egress-http-proxy] libtool-ltdl.x86_64 0:2.4.2-22.el7_3 [openshift/origin-egress-http-proxy] perl-Compress-Raw-Bzip2.x86_64 0:2.061-3.el7 [openshift/origin-egress-http-proxy] perl-Compress-Raw-Zlib.x86_64 1:2.061-4.el7 [openshift/origin-egress-http-proxy] perl-DBI.x86_64 0:1.627-4.el7 [openshift/origin-egress-http-proxy] perl-Data-Dumper.x86_64 0:2.145-3.el7 [openshift/origin-egress-http-proxy] perl-Digest.noarch 0:1.17-245.el7 [openshift/origin-egress-http-proxy] perl-Digest-MD5.x86_64 0:2.52-3.el7 [openshift/origin-egress-http-proxy] perl-IO-Compress.noarch 0:2.061-2.el7 [openshift/origin-egress-http-proxy] perl-Net-Daemon.noarch 0:0.48-5.el7 [openshift/origin-egress-http-proxy] perl-PlRPC.noarch 0:0.2020-14.el7 [openshift/origin-egress-http-proxy] squid-migration-script.x86_64 7:3.5.20-10.el7 [openshift/origin-egress-http-proxy] Complete! [openshift/origin-egress-http-proxy] Loaded plugins: fastestmirror, ovl [openshift/origin-egress-http-proxy] Cleaning repos: base cbs-paas7-openshift-multiarch-el7-build extras updates [openshift/origin-egress-http-proxy] Cleaning up everything [openshift/origin-egress-http-proxy] Maybe you want: rm -rf /var/cache/yum, to also free up space taken by orphaned data from disabled or removed repos [openshift/origin-egress-http-proxy] Cleaning up list of fastest mirrors [openshift/origin-egress-http-proxy] --> ADD egress-http-proxy.sh /bin/egress-http-proxy.sh [openshift/origin-egress-http-proxy] --> ENTRYPOINT /bin/egress-http-proxy.sh [openshift/origin-egress-http-proxy] --> Committing changes to openshift/origin-egress-http-proxy:5eda3fa ... [openshift/origin-egress-http-proxy] --> Tagged as openshift/origin-egress-http-proxy:latest [openshift/origin-egress-http-proxy] --> Done [openshift/origin] --> FROM openshift/origin-base [openshift/origin] --> COPY system-container/system-container-wrapper.sh /usr/local/bin/ [openshift/origin] --> COPY system-container/config.json.template system-container/manifest.json system-container/service.template system-container/tmpfiles.template /exports/ [openshift/origin] --> RUN INSTALL_PKGS="origin" && yum --enablerepo=origin-local-release install -y ${INSTALL_PKGS} && rpm -V ${INSTALL_PKGS} && yum clean all && setcap 'cap_net_bind_service=ep' /usr/bin/openshift [openshift/origin] Loaded plugins: fastestmirror, ovl [openshift/origin] Determining fastest mirrors [openshift/origin] * base: mirror.vtti.vt.edu [openshift/origin] * extras: mirror.wdc1.us.leaseweb.net [openshift/origin] * updates: mirrors.tripadvisor.com [openshift/origin] Resolving Dependencies [openshift/origin] --> Running transaction check [openshift/origin] ---> Package origin.x86_64 0:3.7.2-1.5.5eda3fa will be installed [openshift/origin] --> Processing Dependency: origin-clients = 3.7.2-1.5.5eda3fa for package: origin-3.7.2-1.5.5eda3fa.x86_64 [openshift/origin] --> Running transaction check [openshift/origin] ---> Package origin-clients.x86_64 0:3.7.2-1.5.5eda3fa will be installed [openshift/origin] --> Processing Dependency: bash-completion for package: origin-clients-3.7.2-1.5.5eda3fa.x86_64 [openshift/origin] --> Running transaction check [openshift/origin] ---> Package bash-completion.noarch 1:2.1-6.el7 will be installed [openshift/origin] --> Finished Dependency Resolution [openshift/origin] Dependencies Resolved [openshift/origin] ================================================================================ [openshift/origin] Package Arch Version Repository Size [openshift/origin] ================================================================================ [openshift/origin] Installing: [openshift/origin] origin x86_64 3.7.2-1.5.5eda3fa origin-local-release 47 M [openshift/origin] Installing for dependencies: [openshift/origin] bash-completion noarch 1:2.1-6.el7 base 85 k [openshift/origin] origin-clients x86_64 3.7.2-1.5.5eda3fa origin-local-release 42 M [openshift/origin] Transaction Summary [openshift/origin] ================================================================================ [openshift/origin] Install 1 Package (+2 Dependent packages) [openshift/origin] Total download size: 89 M [openshift/origin] Installed size: 650 M [openshift/origin] Downloading packages: [openshift/origin] -------------------------------------------------------------------------------- [openshift/origin] Total 136 MB/s | 89 MB 00:00 [openshift/origin] Running transaction check [openshift/origin] Running transaction test [openshift/origin] Transaction test succeeded [openshift/origin] Running transaction [openshift/origin] Installing : 1:bash-completion-2.1-6.el7.noarch 1/3 [openshift/origin] Installing : origin-clients-3.7.2-1.5.5eda3fa.x86_64 2/3 [openshift/origin] Installing : origin-3.7.2-1.5.5eda3fa.x86_64 3/3 [openshift/origin] Verifying : origin-clients-3.7.2-1.5.5eda3fa.x86_64 1/3 [openshift/origin] Verifying : origin-3.7.2-1.5.5eda3fa.x86_64 2/3 [openshift/origin] Verifying : 1:bash-completion-2.1-6.el7.noarch 3/3 [openshift/origin] Installed: [openshift/origin] origin.x86_64 0:3.7.2-1.5.5eda3fa [openshift/origin] Dependency Installed: [openshift/origin] bash-completion.noarch 1:2.1-6.el7 origin-clients.x86_64 0:3.7.2-1.5.5eda3fa [openshift/origin] Complete! [openshift/origin] Loaded plugins: fastestmirror, ovl [openshift/origin] Cleaning repos: base cbs-paas7-openshift-multiarch-el7-build extras updates [openshift/origin] Cleaning up everything [openshift/origin] Maybe you want: rm -rf /var/cache/yum, to also free up space taken by orphaned data from disabled or removed repos [openshift/origin] Cleaning up list of fastest mirrors [openshift/origin] --> LABEL io.k8s.display-name="OpenShift Origin Application Platform" io.k8s.description="OpenShift Origin is a platform for developing, building, and deploying containerized applications." io.openshift.tags="openshift,core" [openshift/origin] --> ENV HOME=/root OPENSHIFT_CONTAINERIZED=true KUBECONFIG=/var/lib/origin/openshift.local.config/master/admin.kubeconfig [openshift/origin] --> WORKDIR /var/lib/origin [openshift/origin] --> EXPOSE 8443 53 [openshift/origin] --> ENTRYPOINT ["/usr/bin/openshift"] [openshift/origin] --> Committing changes to openshift/origin:5eda3fa ... [openshift/origin] --> Tagged as openshift/origin:latest [openshift/origin] --> Done [openshift/origin-sti-builder] --> FROM openshift/origin [openshift/origin-sti-builder] --> LABEL io.k8s.display-name="OpenShift Origin S2I Builder" io.k8s.description="This is a component of OpenShift Origin and is responsible for executing source-to-image (s2i) image builds." io.openshift.tags="openshift,sti,builder" [openshift/origin-sti-builder] --> ENTRYPOINT ["/usr/bin/openshift-sti-build"] [openshift/origin-sti-builder] --> Committing changes to openshift/origin-sti-builder:5eda3fa ... [openshift/origin-sti-builder] --> Tagged as openshift/origin-sti-builder:latest [openshift/origin-sti-builder] --> Done [openshift/origin-f5-router] --> FROM openshift/origin [openshift/origin-f5-router] --> LABEL io.k8s.display-name="OpenShift Origin F5 Router" io.k8s.description="This is a component of OpenShift Origin and programs a BigIP F5 router to expose services within the cluster." io.openshift.tags="openshift,router,f5" [openshift/origin-f5-router] --> ENTRYPOINT ["/usr/bin/openshift-f5-router"] [openshift/origin-f5-router] --> Committing changes to openshift/origin-f5-router:5eda3fa ... [openshift/origin-f5-router] --> Tagged as openshift/origin-f5-router:latest [openshift/origin-f5-router] --> Done [openshift/origin-deployer] --> FROM openshift/origin [openshift/origin-deployer] --> LABEL io.k8s.display-name="OpenShift Origin Deployer" io.k8s.description="This is a component of OpenShift Origin and executes the user deployment process to roll out new containers. It may be used as a base image for building your own custom deployer image." io.openshift.tags="openshift,deployer" [openshift/origin-deployer] --> USER 1001 [openshift/origin-deployer] --> ENTRYPOINT ["/usr/bin/openshift-deploy"] [openshift/origin-deployer] --> Committing changes to openshift/origin-deployer:5eda3fa ... [openshift/origin-deployer] --> Tagged as openshift/origin-deployer:latest [openshift/origin-deployer] --> Done [openshift/origin-recycler] --> FROM openshift/origin [openshift/origin-recycler] --> LABEL io.k8s.display-name="OpenShift Origin Volume Recycler" io.k8s.description="This is a component of OpenShift Origin and is used to prepare persistent volumes for reuse after they are deleted." io.openshift.tags="openshift,recycler" [openshift/origin-recycler] --> ENTRYPOINT ["/usr/bin/openshift-recycle"] [openshift/origin-recycler] --> Committing changes to openshift/origin-recycler:5eda3fa ... [openshift/origin-recycler] --> Tagged as openshift/origin-recycler:latest [openshift/origin-recycler] --> Done [openshift/origin-docker-builder] --> FROM openshift/origin [openshift/origin-docker-builder] --> LABEL io.k8s.display-name="OpenShift Origin Docker Builder" io.k8s.description="This is a component of OpenShift Origin and is responsible for executing Docker image builds." io.openshift.tags="openshift,builder" [openshift/origin-docker-builder] --> ENTRYPOINT ["/usr/bin/openshift-docker-build"] [openshift/origin-docker-builder] --> Committing changes to openshift/origin-docker-builder:5eda3fa ... [openshift/origin-docker-builder] --> Tagged as openshift/origin-docker-builder:latest [openshift/origin-docker-builder] --> Done [openshift/origin-gitserver] --> FROM openshift/origin [openshift/origin-gitserver] --> COPY bin/gitserver /usr/bin/gitserver [openshift/origin-gitserver] --> COPY hooks/ /var/lib/git-hooks/ [openshift/origin-gitserver] --> COPY gitconfig /var/lib/gitconfig/.gitconfig [openshift/origin-gitserver] --> RUN mkdir -p /var/lib/git && mkdir -p /var/lib/gitconfig && chmod 777 /var/lib/gitconfig && ln -s /usr/bin/gitserver /usr/bin/gitrepo-buildconfigs [openshift/origin-gitserver] --> VOLUME /var/lib/git [openshift/origin-gitserver] --> ENV HOME=/var/lib/gitconfig [openshift/origin-gitserver] --> ENTRYPOINT ["/usr/bin/gitserver"] [openshift/origin-gitserver] --> Committing changes to openshift/origin-gitserver:5eda3fa ... [openshift/origin-gitserver] --> Tagged as openshift/origin-gitserver:latest [openshift/origin-gitserver] --> Done [openshift/origin-gitserver] Removing examples/gitserver/bin/gitserver [openshift/origin-haproxy-router] --> FROM openshift/origin [openshift/origin-haproxy-router] --> RUN INSTALL_PKGS="haproxy" && yum install -y $INSTALL_PKGS && rpm -V $INSTALL_PKGS && yum clean all && mkdir -p /var/lib/haproxy/router/{certs,cacerts} && mkdir -p /var/lib/haproxy/{conf,run,bin,log} && touch /var/lib/haproxy/conf/{{os_http_be,os_edge_http_be,os_tcp_be,os_sni_passthrough,os_reencrypt,os_route_http_expose,os_route_http_redirect,cert_config,os_wildcard_domain}.map,haproxy.config} && setcap 'cap_net_bind_service=ep' /usr/sbin/haproxy && chown -R :0 /var/lib/haproxy && chmod -R g+w /var/lib/haproxy [openshift/origin-haproxy-router] Loaded plugins: fastestmirror, ovl [openshift/origin-haproxy-router] Determining fastest mirrors [openshift/origin-haproxy-router] * base: mirror.vtti.vt.edu [openshift/origin-haproxy-router] * extras: mirror.wdc1.us.leaseweb.net [openshift/origin-haproxy-router] * updates: mirrors.tripadvisor.com [openshift/origin-haproxy-router] Resolving Dependencies [openshift/origin-haproxy-router] --> Running transaction check [openshift/origin-haproxy-router] ---> Package haproxy.x86_64 0:1.5.18-6.el7 will be installed [openshift/origin-haproxy-router] --> Finished Dependency Resolution [openshift/origin-haproxy-router] Dependencies Resolved [openshift/origin-haproxy-router] ================================================================================ [openshift/origin-haproxy-router] Package Arch Version Repository Size [openshift/origin-haproxy-router] ================================================================================ [openshift/origin-haproxy-router] Installing: [openshift/origin-haproxy-router] haproxy x86_64 1.5.18-6.el7 base 834 k [openshift/origin-haproxy-router] Transaction Summary [openshift/origin-haproxy-router] ================================================================================ [openshift/origin-haproxy-router] Install 1 Package [openshift/origin-haproxy-router] Total download size: 834 k [openshift/origin-haproxy-router] Installed size: 2.6 M [openshift/origin-haproxy-router] Downloading packages: [openshift/origin-haproxy-router] Running transaction check [openshift/origin-haproxy-router] Running transaction test [openshift/origin-haproxy-router] Transaction test succeeded [openshift/origin-haproxy-router] Running transaction [openshift/origin-haproxy-router] Installing : haproxy-1.5.18-6.el7.x86_64 1/1 [openshift/origin-haproxy-router] Verifying : haproxy-1.5.18-6.el7.x86_64 1/1 [openshift/origin-haproxy-router] Installed: [openshift/origin-haproxy-router] haproxy.x86_64 0:1.5.18-6.el7 [openshift/origin-haproxy-router] Complete! [openshift/origin-haproxy-router] Loaded plugins: fastestmirror, ovl [openshift/origin-haproxy-router] Cleaning repos: base cbs-paas7-openshift-multiarch-el7-build extras updates [openshift/origin-haproxy-router] Cleaning up everything [openshift/origin-haproxy-router] Maybe you want: rm -rf /var/cache/yum, to also free up space taken by orphaned data from disabled or removed repos [openshift/origin-haproxy-router] Cleaning up list of fastest mirrors [openshift/origin-haproxy-router] --> COPY . /var/lib/haproxy/ [openshift/origin-haproxy-router] --> LABEL io.k8s.display-name="OpenShift Origin HAProxy Router" io.k8s.description="This is a component of OpenShift Origin and contains an HAProxy instance that automatically exposes services within the cluster through routes, and offers TLS termination, reencryption, or SNI-passthrough on ports 80 and 443." io.openshift.tags="openshift,router,haproxy" [openshift/origin-haproxy-router] --> USER 1001 [openshift/origin-haproxy-router] --> EXPOSE 80 443 [openshift/origin-haproxy-router] --> WORKDIR /var/lib/haproxy/conf [openshift/origin-haproxy-router] --> ENV TEMPLATE_FILE=/var/lib/haproxy/conf/haproxy-config.template RELOAD_SCRIPT=/var/lib/haproxy/reload-haproxy [openshift/origin-haproxy-router] --> ENTRYPOINT ["/usr/bin/openshift-router"] [openshift/origin-haproxy-router] --> Committing changes to openshift/origin-haproxy-router:5eda3fa ... [openshift/origin-haproxy-router] --> Tagged as openshift/origin-haproxy-router:latest [openshift/origin-haproxy-router] --> Done [openshift/origin-keepalived-ipfailover] --> FROM openshift/origin [openshift/origin-keepalived-ipfailover] --> RUN INSTALL_PKGS="kmod keepalived iproute psmisc nmap-ncat net-tools" && yum install -y $INSTALL_PKGS && rpm -V $INSTALL_PKGS && yum clean all [openshift/origin-keepalived-ipfailover] Loaded plugins: fastestmirror, ovl [openshift/origin-keepalived-ipfailover] Determining fastest mirrors [openshift/origin-keepalived-ipfailover] * base: mirror.vtti.vt.edu [openshift/origin-keepalived-ipfailover] * extras: mirror.wdc1.us.leaseweb.net [openshift/origin-keepalived-ipfailover] * updates: mirrors.tripadvisor.com [openshift/origin-keepalived-ipfailover] Package kmod-20-15.el7_4.7.x86_64 already installed and latest version [openshift/origin-keepalived-ipfailover] Package iproute-3.10.0-87.el7.x86_64 already installed and latest version [openshift/origin-keepalived-ipfailover] Package psmisc-22.20-15.el7.x86_64 already installed and latest version [openshift/origin-keepalived-ipfailover] Package 2:nmap-ncat-6.40-7.el7.x86_64 already installed and latest version [openshift/origin-keepalived-ipfailover] Resolving Dependencies [openshift/origin-keepalived-ipfailover] --> Running transaction check [openshift/origin-keepalived-ipfailover] ---> Package keepalived.x86_64 0:1.3.5-1.el7 will be installed [openshift/origin-keepalived-ipfailover] --> Processing Dependency: libnl-genl-3.so.200(libnl_3)(64bit) for package: keepalived-1.3.5-1.el7.x86_64 [openshift/origin-keepalived-ipfailover] --> Processing Dependency: libnl-3.so.200(libnl_3_2_27)(64bit) for package: keepalived-1.3.5-1.el7.x86_64 [openshift/origin-keepalived-ipfailover] --> Processing Dependency: libnl-3.so.200(libnl_3)(64bit) for package: keepalived-1.3.5-1.el7.x86_64 [openshift/origin-keepalived-ipfailover] --> Processing Dependency: libnl-genl-3.so.200()(64bit) for package: keepalived-1.3.5-1.el7.x86_64 [openshift/origin-keepalived-ipfailover] --> Processing Dependency: libnl-3.so.200()(64bit) for package: keepalived-1.3.5-1.el7.x86_64 [openshift/origin-keepalived-ipfailover] --> Processing Dependency: libnetsnmpmibs.so.31()(64bit) for package: keepalived-1.3.5-1.el7.x86_64 [openshift/origin-keepalived-ipfailover] --> Processing Dependency: libnetsnmpagent.so.31()(64bit) for package: keepalived-1.3.5-1.el7.x86_64 [openshift/origin-keepalived-ipfailover] --> Processing Dependency: libnetsnmp.so.31()(64bit) for package: keepalived-1.3.5-1.el7.x86_64 [openshift/origin-keepalived-ipfailover] ---> Package net-tools.x86_64 0:2.0-0.22.20131004git.el7 will be installed [openshift/origin-keepalived-ipfailover] --> Running transaction check [openshift/origin-keepalived-ipfailover] ---> Package libnl3.x86_64 0:3.2.28-4.el7 will be installed [openshift/origin-keepalived-ipfailover] ---> Package net-snmp-agent-libs.x86_64 1:5.7.2-28.el7_4.1 will be installed [openshift/origin-keepalived-ipfailover] --> Processing Dependency: libsensors.so.4()(64bit) for package: 1:net-snmp-agent-libs-5.7.2-28.el7_4.1.x86_64 [openshift/origin-keepalived-ipfailover] ---> Package net-snmp-libs.x86_64 1:5.7.2-28.el7_4.1 will be installed [openshift/origin-keepalived-ipfailover] --> Running transaction check [openshift/origin-keepalived-ipfailover] ---> Package lm_sensors-libs.x86_64 0:3.4.0-4.20160601gitf9185e5.el7 will be installed [openshift/origin-keepalived-ipfailover] --> Finished Dependency Resolution [openshift/origin-keepalived-ipfailover] Dependencies Resolved [openshift/origin-keepalived-ipfailover] ================================================================================ [openshift/origin-keepalived-ipfailover] Package Arch Version Repository [openshift/origin-keepalived-ipfailover] Size [openshift/origin-keepalived-ipfailover] ================================================================================ [openshift/origin-keepalived-ipfailover] Installing: [openshift/origin-keepalived-ipfailover] keepalived x86_64 1.3.5-1.el7 base 327 k [openshift/origin-keepalived-ipfailover] net-tools x86_64 2.0-0.22.20131004git.el7 base 305 k [openshift/origin-keepalived-ipfailover] Installing for dependencies: [openshift/origin-keepalived-ipfailover] libnl3 x86_64 3.2.28-4.el7 base 278 k [openshift/origin-keepalived-ipfailover] lm_sensors-libs x86_64 3.4.0-4.20160601gitf9185e5.el7 base 41 k [openshift/origin-keepalived-ipfailover] net-snmp-agent-libs x86_64 1:5.7.2-28.el7_4.1 updates 704 k [openshift/origin-keepalived-ipfailover] net-snmp-libs x86_64 1:5.7.2-28.el7_4.1 updates 748 k [openshift/origin-keepalived-ipfailover] Transaction Summary [openshift/origin-keepalived-ipfailover] ================================================================================ [openshift/origin-keepalived-ipfailover] Install 2 Packages (+4 Dependent packages) [openshift/origin-keepalived-ipfailover] Total download size: 2.3 M [openshift/origin-keepalived-ipfailover] Installed size: 7.7 M [openshift/origin-keepalived-ipfailover] Downloading packages: [openshift/origin-keepalived-ipfailover] -------------------------------------------------------------------------------- [openshift/origin-keepalived-ipfailover] Total 3.5 MB/s | 2.3 MB 00:00 [openshift/origin-keepalived-ipfailover] Running transaction check [openshift/origin-keepalived-ipfailover] Running transaction test [openshift/origin-keepalived-ipfailover] Transaction test succeeded [openshift/origin-keepalived-ipfailover] Running transaction [openshift/origin-keepalived-ipfailover] Installing : 1:net-snmp-libs-5.7.2-28.el7_4.1.x86_64 1/6 [openshift/origin-keepalived-ipfailover] Installing : lm_sensors-libs-3.4.0-4.20160601gitf9185e5.el7.x86_64 2/6 [openshift/origin-keepalived-ipfailover] Installing : 1:net-snmp-agent-libs-5.7.2-28.el7_4.1.x86_64 3/6 [openshift/origin-keepalived-ipfailover] Installing : libnl3-3.2.28-4.el7.x86_64 4/6 [openshift/origin-keepalived-ipfailover] Installing : keepalived-1.3.5-1.el7.x86_64 5/6 [openshift/origin-keepalived-ipfailover] Installing : net-tools-2.0-0.22.20131004git.el7.x86_64 6/6 [openshift/origin-keepalived-ipfailover] Verifying : net-tools-2.0-0.22.20131004git.el7.x86_64 1/6 [openshift/origin-keepalived-ipfailover] Verifying : 1:net-snmp-libs-5.7.2-28.el7_4.1.x86_64 2/6 [openshift/origin-keepalived-ipfailover] Verifying : libnl3-3.2.28-4.el7.x86_64 3/6 [openshift/origin-keepalived-ipfailover] Verifying : 1:net-snmp-agent-libs-5.7.2-28.el7_4.1.x86_64 4/6 [openshift/origin-keepalived-ipfailover] Verifying : lm_sensors-libs-3.4.0-4.20160601gitf9185e5.el7.x86_64 5/6 [openshift/origin-keepalived-ipfailover] Verifying : keepalived-1.3.5-1.el7.x86_64 6/6 [openshift/origin-keepalived-ipfailover] Installed: [openshift/origin-keepalived-ipfailover] keepalived.x86_64 0:1.3.5-1.el7 net-tools.x86_64 0:2.0-0.22.20131004git.el7 [openshift/origin-keepalived-ipfailover] Dependency Installed: [openshift/origin-keepalived-ipfailover] libnl3.x86_64 0:3.2.28-4.el7 [openshift/origin-keepalived-ipfailover] lm_sensors-libs.x86_64 0:3.4.0-4.20160601gitf9185e5.el7 [openshift/origin-keepalived-ipfailover] net-snmp-agent-libs.x86_64 1:5.7.2-28.el7_4.1 [openshift/origin-keepalived-ipfailover] net-snmp-libs.x86_64 1:5.7.2-28.el7_4.1 [openshift/origin-keepalived-ipfailover] Complete! [openshift/origin-keepalived-ipfailover] Loaded plugins: fastestmirror, ovl [openshift/origin-keepalived-ipfailover] Cleaning repos: base cbs-paas7-openshift-multiarch-el7-build extras updates [openshift/origin-keepalived-ipfailover] Cleaning up everything [openshift/origin-keepalived-ipfailover] Maybe you want: rm -rf /var/cache/yum, to also free up space taken by orphaned data from disabled or removed repos [openshift/origin-keepalived-ipfailover] Cleaning up list of fastest mirrors [openshift/origin-keepalived-ipfailover] --> COPY . /var/lib/ipfailover/keepalived/ [openshift/origin-keepalived-ipfailover] --> LABEL io.k8s.display-name="OpenShift Origin IP Failover" io.k8s.description="This is a component of OpenShift Origin and runs a clustered keepalived instance across multiple hosts to allow highly available IP addresses." io.openshift.tags="openshift,ha,ip,failover" [openshift/origin-keepalived-ipfailover] --> EXPOSE 1985 [openshift/origin-keepalived-ipfailover] --> WORKDIR /var/lib/ipfailover [openshift/origin-keepalived-ipfailover] --> ENTRYPOINT ["/var/lib/ipfailover/keepalived/monitor.sh"] [openshift/origin-keepalived-ipfailover] --> Committing changes to openshift/origin-keepalived-ipfailover:5eda3fa ... [openshift/origin-keepalived-ipfailover] --> Tagged as openshift/origin-keepalived-ipfailover:latest [openshift/origin-keepalived-ipfailover] --> Done [openshift/node] --> FROM openshift/origin [openshift/node] --> COPY scripts/* /usr/local/bin/ [openshift/node] --> COPY system-container/system-container-wrapper.sh /usr/local/bin/ [openshift/node] --> COPY system-container/manifest.json system-container/config.json.template system-container/service.template system-container/tmpfiles.template /exports/ [openshift/node] --> RUN INSTALL_PKGS="origin-sdn-ovs libmnl libnetfilter_conntrack conntrack-tools openvswitch libnfnetlink iptables iproute bridge-utils procps-ng ethtool socat openssl binutils xz kmod-libs kmod sysvinit-tools device-mapper-libs dbus iscsi-initiator-utils bind-utils" && yum --enablerepo=origin-local-release install -y $INSTALL_PKGS && rpm -V $INSTALL_PKGS && yum clean all && mkdir -p /usr/lib/systemd/system/origin-node.service.d /usr/lib/systemd/system/docker.service.d [openshift/node] Loaded plugins: fastestmirror, ovl [openshift/node] Determining fastest mirrors [openshift/node] * base: mirror.vtti.vt.edu [openshift/node] * extras: mirror.wdc1.us.leaseweb.net [openshift/node] * updates: mirror.teklinks.com [openshift/node] Package libmnl-1.0.3-7.el7.x86_64 already installed and latest version [openshift/node] Package libnetfilter_conntrack-1.0.6-1.el7_3.x86_64 already installed and latest version [openshift/node] Package libnfnetlink-1.0.1-4.el7.x86_64 already installed and latest version [openshift/node] Package iptables-1.4.21-18.3.el7_4.x86_64 already installed and latest version [openshift/node] Package iproute-3.10.0-87.el7.x86_64 already installed and latest version [openshift/node] Package procps-ng-3.3.10-16.el7.x86_64 already installed and latest version [openshift/node] Package 2:ethtool-4.8-1.el7.x86_64 already installed and latest version [openshift/node] Package socat-1.7.3.2-2.el7.x86_64 already installed and latest version [openshift/node] Package binutils-2.25.1-32.base.el7_4.2.x86_64 already installed and latest version [openshift/node] Package xz-5.2.2-1.el7.x86_64 already installed and latest version [openshift/node] Package kmod-libs-20-15.el7_4.7.x86_64 already installed and latest version [openshift/node] Package kmod-20-15.el7_4.7.x86_64 already installed and latest version [openshift/node] Package sysvinit-tools-2.88-14.dsf.el7.x86_64 already installed and latest version [openshift/node] Package 7:device-mapper-libs-1.02.140-8.el7.x86_64 already installed and latest version [openshift/node] Package 1:dbus-1.6.12-17.el7.x86_64 already installed and latest version [openshift/node] Resolving Dependencies [openshift/node] --> Running transaction check [openshift/node] ---> Package bind-utils.x86_64 32:9.9.4-51.el7_4.2 will be installed [openshift/node] --> Processing Dependency: bind-libs = 32:9.9.4-51.el7_4.2 for package: 32:bind-utils-9.9.4-51.el7_4.2.x86_64 [openshift/node] --> Processing Dependency: liblwres.so.90()(64bit) for package: 32:bind-utils-9.9.4-51.el7_4.2.x86_64 [openshift/node] --> Processing Dependency: libisccfg.so.90()(64bit) for package: 32:bind-utils-9.9.4-51.el7_4.2.x86_64 [openshift/node] --> Processing Dependency: libisccc.so.90()(64bit) for package: 32:bind-utils-9.9.4-51.el7_4.2.x86_64 [openshift/node] --> Processing Dependency: libisc.so.95()(64bit) for package: 32:bind-utils-9.9.4-51.el7_4.2.x86_64 [openshift/node] --> Processing Dependency: libdns.so.100()(64bit) for package: 32:bind-utils-9.9.4-51.el7_4.2.x86_64 [openshift/node] --> Processing Dependency: libbind9.so.90()(64bit) for package: 32:bind-utils-9.9.4-51.el7_4.2.x86_64 [openshift/node] --> Processing Dependency: libGeoIP.so.1()(64bit) for package: 32:bind-utils-9.9.4-51.el7_4.2.x86_64 [openshift/node] ---> Package bridge-utils.x86_64 0:1.5-9.el7 will be installed [openshift/node] ---> Package conntrack-tools.x86_64 0:1.4.4-3.el7_3 will be installed [openshift/node] --> Processing Dependency: libnetfilter_cttimeout.so.1(LIBNETFILTER_CTTIMEOUT_1.1)(64bit) for package: conntrack-tools-1.4.4-3.el7_3.x86_64 [openshift/node] --> Processing Dependency: libnetfilter_cttimeout.so.1(LIBNETFILTER_CTTIMEOUT_1.0)(64bit) for package: conntrack-tools-1.4.4-3.el7_3.x86_64 [openshift/node] --> Processing Dependency: libnetfilter_cthelper.so.0(LIBNETFILTER_CTHELPER_1.0)(64bit) for package: conntrack-tools-1.4.4-3.el7_3.x86_64 [openshift/node] --> Processing Dependency: libnetfilter_queue.so.1()(64bit) for package: conntrack-tools-1.4.4-3.el7_3.x86_64 [openshift/node] --> Processing Dependency: libnetfilter_cttimeout.so.1()(64bit) for package: conntrack-tools-1.4.4-3.el7_3.x86_64 [openshift/node] --> Processing Dependency: libnetfilter_cthelper.so.0()(64bit) for package: conntrack-tools-1.4.4-3.el7_3.x86_64 [openshift/node] ---> Package iscsi-initiator-utils.x86_64 0:6.2.0.874-4.el7 will be installed [openshift/node] --> Processing Dependency: iscsi-initiator-utils-iscsiuio >= 6.2.0.874-4.el7 for package: iscsi-initiator-utils-6.2.0.874-4.el7.x86_64 [openshift/node] ---> Package openssl.x86_64 1:1.0.2k-8.el7 will be installed [openshift/node] ---> Package openvswitch.x86_64 0:2.7.0-1.el7 will be installed [openshift/node] ---> Package origin-sdn-ovs.x86_64 0:3.7.2-1.5.5eda3fa will be installed [openshift/node] --> Processing Dependency: origin-node = 3.7.2-1.5.5eda3fa for package: origin-sdn-ovs-3.7.2-1.5.5eda3fa.x86_64 [openshift/node] --> Running transaction check [openshift/node] ---> Package GeoIP.x86_64 0:1.5.0-11.el7 will be installed [openshift/node] ---> Package bind-libs.x86_64 32:9.9.4-51.el7_4.2 will be installed [openshift/node] ---> Package iscsi-initiator-utils-iscsiuio.x86_64 0:6.2.0.874-4.el7 will be installed [openshift/node] ---> Package libnetfilter_cthelper.x86_64 0:1.0.0-9.el7 will be installed [openshift/node] ---> Package libnetfilter_cttimeout.x86_64 0:1.0.0-6.el7 will be installed [openshift/node] ---> Package libnetfilter_queue.x86_64 0:1.0.2-2.el7_2 will be installed [openshift/node] ---> Package origin-node.x86_64 0:3.7.2-1.5.5eda3fa will be installed [openshift/node] --> Processing Dependency: tuned-profiles-origin-node = 3.7.2-1.5.5eda3fa for package: origin-node-3.7.2-1.5.5eda3fa.x86_64 [openshift/node] --> Processing Dependency: docker >= 1.12 for package: origin-node-3.7.2-1.5.5eda3fa.x86_64 [openshift/node] --> Processing Dependency: nfs-utils for package: origin-node-3.7.2-1.5.5eda3fa.x86_64 [openshift/node] --> Running transaction check [openshift/node] ---> Package docker.x86_64 2:1.13.1-53.git774336d.el7.centos will be installed [openshift/node] --> Processing Dependency: docker-common = 2:1.13.1-53.git774336d.el7.centos for package: 2:docker-1.13.1-53.git774336d.el7.centos.x86_64 [openshift/node] --> Processing Dependency: docker-client = 2:1.13.1-53.git774336d.el7.centos for package: 2:docker-1.13.1-53.git774336d.el7.centos.x86_64 [openshift/node] --> Processing Dependency: libseccomp.so.2()(64bit) for package: 2:docker-1.13.1-53.git774336d.el7.centos.x86_64 [openshift/node] ---> Package nfs-utils.x86_64 1:1.3.0-0.48.el7_4.2 will be installed [openshift/node] --> Processing Dependency: libtirpc >= 0.2.4-0.7 for package: 1:nfs-utils-1.3.0-0.48.el7_4.2.x86_64 [openshift/node] --> Processing Dependency: gssproxy >= 0.7.0-3 for package: 1:nfs-utils-1.3.0-0.48.el7_4.2.x86_64 [openshift/node] --> Processing Dependency: rpcbind for package: 1:nfs-utils-1.3.0-0.48.el7_4.2.x86_64 [openshift/node] --> Processing Dependency: quota for package: 1:nfs-utils-1.3.0-0.48.el7_4.2.x86_64 [openshift/node] --> Processing Dependency: libnfsidmap for package: 1:nfs-utils-1.3.0-0.48.el7_4.2.x86_64 [openshift/node] --> Processing Dependency: libevent for package: 1:nfs-utils-1.3.0-0.48.el7_4.2.x86_64 [openshift/node] --> Processing Dependency: keyutils for package: 1:nfs-utils-1.3.0-0.48.el7_4.2.x86_64 [openshift/node] --> Processing Dependency: libtirpc.so.1()(64bit) for package: 1:nfs-utils-1.3.0-0.48.el7_4.2.x86_64 [openshift/node] --> Processing Dependency: libnfsidmap.so.0()(64bit) for package: 1:nfs-utils-1.3.0-0.48.el7_4.2.x86_64 [openshift/node] --> Processing Dependency: libevent-2.0.so.5()(64bit) for package: 1:nfs-utils-1.3.0-0.48.el7_4.2.x86_64 [openshift/node] ---> Package tuned-profiles-origin-node.x86_64 0:3.7.2-1.5.5eda3fa will be installed [openshift/node] --> Processing Dependency: tuned >= 2.3 for package: tuned-profiles-origin-node-3.7.2-1.5.5eda3fa.x86_64 [openshift/node] --> Running transaction check [openshift/node] ---> Package docker-client.x86_64 2:1.13.1-53.git774336d.el7.centos will be installed [openshift/node] ---> Package docker-common.x86_64 2:1.13.1-53.git774336d.el7.centos will be installed [openshift/node] --> Processing Dependency: skopeo-containers >= 1:0.1.26-2 for package: 2:docker-common-1.13.1-53.git774336d.el7.centos.x86_64 [openshift/node] --> Processing Dependency: oci-umount >= 2:2.0.0-1 for package: 2:docker-common-1.13.1-53.git774336d.el7.centos.x86_64 [openshift/node] --> Processing Dependency: oci-systemd-hook >= 1:0.1.4-9 for package: 2:docker-common-1.13.1-53.git774336d.el7.centos.x86_64 [openshift/node] --> Processing Dependency: oci-register-machine >= 1:0-5.13 for package: 2:docker-common-1.13.1-53.git774336d.el7.centos.x86_64 [openshift/node] --> Processing Dependency: lvm2 >= 2.02.112 for package: 2:docker-common-1.13.1-53.git774336d.el7.centos.x86_64 [openshift/node] --> Processing Dependency: container-storage-setup >= 0.7.0-1 for package: 2:docker-common-1.13.1-53.git774336d.el7.centos.x86_64 [openshift/node] --> Processing Dependency: container-selinux >= 2:2.21-2 for package: 2:docker-common-1.13.1-53.git774336d.el7.centos.x86_64 [openshift/node] ---> Package gssproxy.x86_64 0:0.7.0-4.el7 will be installed [openshift/node] --> Processing Dependency: libverto-module-base for package: gssproxy-0.7.0-4.el7.x86_64 [openshift/node] --> Processing Dependency: libref_array.so.1(REF_ARRAY_0.1.1)(64bit) for package: gssproxy-0.7.0-4.el7.x86_64 [openshift/node] --> Processing Dependency: libini_config.so.3(INI_CONFIG_1.2.0)(64bit) for package: gssproxy-0.7.0-4.el7.x86_64 [openshift/node] --> Processing Dependency: libini_config.so.3(INI_CONFIG_1.1.0)(64bit) for package: gssproxy-0.7.0-4.el7.x86_64 [openshift/node] --> Processing Dependency: libref_array.so.1()(64bit) for package: gssproxy-0.7.0-4.el7.x86_64 [openshift/node] --> Processing Dependency: libini_config.so.3()(64bit) for package: gssproxy-0.7.0-4.el7.x86_64 [openshift/node] --> Processing Dependency: libcollection.so.2()(64bit) for package: gssproxy-0.7.0-4.el7.x86_64 [openshift/node] --> Processing Dependency: libbasicobjects.so.0()(64bit) for package: gssproxy-0.7.0-4.el7.x86_64 [openshift/node] ---> Package keyutils.x86_64 0:1.5.8-3.el7 will be installed [openshift/node] ---> Package libevent.x86_64 0:2.0.21-4.el7 will be installed [openshift/node] ---> Package libnfsidmap.x86_64 0:0.25-17.el7 will be installed [openshift/node] ---> Package libseccomp.x86_64 0:2.3.1-3.el7 will be installed [openshift/node] ---> Package libtirpc.x86_64 0:0.2.4-0.10.el7 will be installed [openshift/node] ---> Package quota.x86_64 1:4.01-14.el7 will be installed [openshift/node] --> Processing Dependency: quota-nls = 1:4.01-14.el7 for package: 1:quota-4.01-14.el7.x86_64 [openshift/node] --> Processing Dependency: tcp_wrappers for package: 1:quota-4.01-14.el7.x86_64 [openshift/node] ---> Package rpcbind.x86_64 0:0.2.0-42.el7 will be installed [openshift/node] ---> Package tuned.noarch 0:2.8.0-5.el7_4.2 will be installed [openshift/node] --> Processing Dependency: virt-what for package: tuned-2.8.0-5.el7_4.2.noarch [openshift/node] --> Processing Dependency: virt-what for package: tuned-2.8.0-5.el7_4.2.noarch [openshift/node] --> Processing Dependency: python-schedutils for package: tuned-2.8.0-5.el7_4.2.noarch [openshift/node] --> Processing Dependency: python-pyudev for package: tuned-2.8.0-5.el7_4.2.noarch [openshift/node] --> Processing Dependency: python-perf for package: tuned-2.8.0-5.el7_4.2.noarch [openshift/node] --> Processing Dependency: python-linux-procfs for package: tuned-2.8.0-5.el7_4.2.noarch [openshift/node] --> Processing Dependency: python-decorator for package: tuned-2.8.0-5.el7_4.2.noarch [openshift/node] --> Processing Dependency: python-configobj for package: tuned-2.8.0-5.el7_4.2.noarch [openshift/node] --> Processing Dependency: polkit for package: tuned-2.8.0-5.el7_4.2.noarch [openshift/node] --> Running transaction check [openshift/node] ---> Package container-selinux.noarch 2:2.42-1.gitad8f0f7.el7 will be installed [openshift/node] --> Processing Dependency: selinux-policy-targeted >= 3.13.1-39 for package: 2:container-selinux-2.42-1.gitad8f0f7.el7.noarch [openshift/node] --> Processing Dependency: selinux-policy-base >= 3.13.1-39 for package: 2:container-selinux-2.42-1.gitad8f0f7.el7.noarch [openshift/node] --> Processing Dependency: selinux-policy >= 3.13.1-39 for package: 2:container-selinux-2.42-1.gitad8f0f7.el7.noarch [openshift/node] --> Processing Dependency: policycoreutils >= 2.5-11 for package: 2:container-selinux-2.42-1.gitad8f0f7.el7.noarch [openshift/node] --> Processing Dependency: policycoreutils-python for package: 2:container-selinux-2.42-1.gitad8f0f7.el7.noarch [openshift/node] --> Processing Dependency: libselinux-utils for package: 2:container-selinux-2.42-1.gitad8f0f7.el7.noarch [openshift/node] ---> Package container-storage-setup.noarch 0:0.8.0-3.git1d27ecf.el7 will be installed [openshift/node] ---> Package libbasicobjects.x86_64 0:0.1.1-27.el7 will be installed [openshift/node] ---> Package libcollection.x86_64 0:0.6.2-27.el7 will be installed [openshift/node] ---> Package libini_config.x86_64 0:1.3.0-27.el7 will be installed [openshift/node] --> Processing Dependency: libpath_utils.so.1(PATH_UTILS_0.2.1)(64bit) for package: libini_config-1.3.0-27.el7.x86_64 [openshift/node] --> Processing Dependency: libpath_utils.so.1()(64bit) for package: libini_config-1.3.0-27.el7.x86_64 [openshift/node] ---> Package libref_array.x86_64 0:0.1.5-27.el7 will be installed [openshift/node] ---> Package libverto-libevent.x86_64 0:0.2.5-4.el7 will be installed [openshift/node] ---> Package lvm2.x86_64 7:2.02.171-8.el7 will be installed [openshift/node] --> Processing Dependency: lvm2-libs = 7:2.02.171-8.el7 for package: 7:lvm2-2.02.171-8.el7.x86_64 [openshift/node] --> Processing Dependency: liblvm2app.so.2.2(Base)(64bit) for package: 7:lvm2-2.02.171-8.el7.x86_64 [openshift/node] --> Processing Dependency: libdevmapper-event.so.1.02(Base)(64bit) for package: 7:lvm2-2.02.171-8.el7.x86_64 [openshift/node] --> Processing Dependency: liblvm2app.so.2.2()(64bit) for package: 7:lvm2-2.02.171-8.el7.x86_64 [openshift/node] --> Processing Dependency: libdevmapper-event.so.1.02()(64bit) for package: 7:lvm2-2.02.171-8.el7.x86_64 [openshift/node] ---> Package oci-register-machine.x86_64 1:0-6.git2b44233.el7 will be installed [openshift/node] ---> Package oci-systemd-hook.x86_64 1:0.1.15-2.gitc04483d.el7 will be installed [openshift/node] --> Processing Dependency: libyajl.so.2()(64bit) for package: 1:oci-systemd-hook-0.1.15-2.gitc04483d.el7.x86_64 [openshift/node] ---> Package oci-umount.x86_64 2:2.3.3-3.gite3c9055.el7 will be installed [openshift/node] ---> Package polkit.x86_64 0:0.112-12.el7_3 will be installed [openshift/node] --> Processing Dependency: polkit-pkla-compat for package: polkit-0.112-12.el7_3.x86_64 [openshift/node] --> Processing Dependency: libmozjs-17.0.so(mozjs_17.0)(64bit) for package: polkit-0.112-12.el7_3.x86_64 [openshift/node] --> Processing Dependency: libmozjs-17.0.so()(64bit) for package: polkit-0.112-12.el7_3.x86_64 [openshift/node] ---> Package python-configobj.noarch 0:4.7.2-7.el7 will be installed [openshift/node] ---> Package python-decorator.noarch 0:3.4.0-3.el7 will be installed [openshift/node] ---> Package python-linux-procfs.noarch 0:0.4.9-3.el7 will be installed [openshift/node] ---> Package python-perf.x86_64 0:3.10.0-693.21.1.el7 will be installed [openshift/node] ---> Package python-pyudev.noarch 0:0.15-9.el7 will be installed [openshift/node] ---> Package python-schedutils.x86_64 0:0.4-6.el7 will be installed [openshift/node] ---> Package quota-nls.noarch 1:4.01-14.el7 will be installed [openshift/node] ---> Package skopeo-containers.x86_64 1:0.1.28-1.git0270e56.el7 will be installed [openshift/node] ---> Package tcp_wrappers.x86_64 0:7.6-77.el7 will be installed [openshift/node] ---> Package virt-what.x86_64 0:1.13-10.el7 will be installed [openshift/node] --> Processing Dependency: dmidecode for package: virt-what-1.13-10.el7.x86_64 [openshift/node] --> Running transaction check [openshift/node] ---> Package device-mapper-event-libs.x86_64 7:1.02.140-8.el7 will be installed [openshift/node] ---> Package dmidecode.x86_64 1:3.0-5.el7 will be installed [openshift/node] ---> Package libpath_utils.x86_64 0:0.2.1-27.el7 will be installed [openshift/node] ---> Package libselinux-utils.x86_64 0:2.5-11.el7 will be installed [openshift/node] ---> Package lvm2-libs.x86_64 7:2.02.171-8.el7 will be installed [openshift/node] --> Processing Dependency: device-mapper-event = 7:1.02.140-8.el7 for package: 7:lvm2-libs-2.02.171-8.el7.x86_64 [openshift/node] ---> Package mozjs17.x86_64 0:17.0.0-19.el7 will be installed [openshift/node] ---> Package policycoreutils.x86_64 0:2.5-17.1.el7 will be installed [openshift/node] ---> Package policycoreutils-python.x86_64 0:2.5-17.1.el7 will be installed [openshift/node] --> Processing Dependency: setools-libs >= 3.3.8-1 for package: policycoreutils-python-2.5-17.1.el7.x86_64 [openshift/node] --> Processing Dependency: libsemanage-python >= 2.5-5 for package: policycoreutils-python-2.5-17.1.el7.x86_64 [openshift/node] --> Processing Dependency: audit-libs-python >= 2.1.3-4 for package: policycoreutils-python-2.5-17.1.el7.x86_64 [openshift/node] --> Processing Dependency: python-IPy for package: policycoreutils-python-2.5-17.1.el7.x86_64 [openshift/node] --> Processing Dependency: libselinux-python for package: policycoreutils-python-2.5-17.1.el7.x86_64 [openshift/node] --> Processing Dependency: libqpol.so.1(VERS_1.4)(64bit) for package: policycoreutils-python-2.5-17.1.el7.x86_64 [openshift/node] --> Processing Dependency: libqpol.so.1(VERS_1.2)(64bit) for package: policycoreutils-python-2.5-17.1.el7.x86_64 [openshift/node] --> Processing Dependency: libcgroup for package: policycoreutils-python-2.5-17.1.el7.x86_64 [openshift/node] --> Processing Dependency: libapol.so.4(VERS_4.0)(64bit) for package: policycoreutils-python-2.5-17.1.el7.x86_64 [openshift/node] --> Processing Dependency: checkpolicy for package: policycoreutils-python-2.5-17.1.el7.x86_64 [openshift/node] --> Processing Dependency: libqpol.so.1()(64bit) for package: policycoreutils-python-2.5-17.1.el7.x86_64 [openshift/node] --> Processing Dependency: libapol.so.4()(64bit) for package: policycoreutils-python-2.5-17.1.el7.x86_64 [openshift/node] ---> Package polkit-pkla-compat.x86_64 0:0.1-4.el7 will be installed [openshift/node] ---> Package selinux-policy.noarch 0:3.13.1-166.el7_4.9 will be installed [openshift/node] ---> Package selinux-policy-targeted.noarch 0:3.13.1-166.el7_4.9 will be installed [openshift/node] ---> Package yajl.x86_64 0:2.0.4-4.el7 will be installed [openshift/node] --> Running transaction check [openshift/node] ---> Package audit-libs-python.x86_64 0:2.7.6-3.el7 will be installed [openshift/node] ---> Package checkpolicy.x86_64 0:2.5-4.el7 will be installed [openshift/node] ---> Package device-mapper-event.x86_64 7:1.02.140-8.el7 will be installed [openshift/node] ---> Package libcgroup.x86_64 0:0.41-13.el7 will be installed [openshift/node] ---> Package libselinux-python.x86_64 0:2.5-11.el7 will be installed [openshift/node] ---> Package libsemanage-python.x86_64 0:2.5-8.el7 will be installed [openshift/node] ---> Package python-IPy.noarch 0:0.75-6.el7 will be installed [openshift/node] ---> Package setools-libs.x86_64 0:3.3.8-1.1.el7 will be installed [openshift/node] --> Finished Dependency Resolution [openshift/node] Dependencies Resolved [openshift/node] ================================================================================ [openshift/node] Package Arch Version Repository [openshift/node] Size [openshift/node] ================================================================================ [openshift/node] Installing: [openshift/node] bind-utils x86_64 32:9.9.4-51.el7_4.2 updates 203 k [openshift/node] bridge-utils x86_64 1.5-9.el7 base 32 k [openshift/node] conntrack-tools x86_64 1.4.4-3.el7_3 base 186 k [openshift/node] iscsi-initiator-utils x86_64 6.2.0.874-4.el7 base 420 k [openshift/node] openssl x86_64 1:1.0.2k-8.el7 base 492 k [openshift/node] openvswitch x86_64 2.7.0-1.el7 cbs-paas7-openshift-multiarch-el7-build [openshift/node] 2.5 M [openshift/node] origin-sdn-ovs x86_64 3.7.2-1.5.5eda3fa origin-local-release [openshift/node] 3.4 M [openshift/node] Installing for dependencies: [openshift/node] GeoIP x86_64 1.5.0-11.el7 base 1.1 M [openshift/node] audit-libs-python x86_64 2.7.6-3.el7 base 73 k [openshift/node] bind-libs x86_64 32:9.9.4-51.el7_4.2 updates 1.0 M [openshift/node] checkpolicy x86_64 2.5-4.el7 base 290 k [openshift/node] container-selinux noarch 2:2.42-1.gitad8f0f7.el7 extras 32 k [openshift/node] container-storage-setup noarch 0.8.0-3.git1d27ecf.el7 extras 33 k [openshift/node] device-mapper-event x86_64 7:1.02.140-8.el7 base 180 k [openshift/node] device-mapper-event-libs [openshift/node] x86_64 7:1.02.140-8.el7 base 179 k [openshift/node] dmidecode x86_64 1:3.0-5.el7 base 88 k [openshift/node] docker x86_64 2:1.13.1-53.git774336d.el7.centos extras 16 M [openshift/node] docker-client x86_64 2:1.13.1-53.git774336d.el7.centos extras 3.7 M [openshift/node] docker-common x86_64 2:1.13.1-53.git774336d.el7.centos extras 86 k [openshift/node] gssproxy x86_64 0.7.0-4.el7 base 105 k [openshift/node] iscsi-initiator-utils-iscsiuio [openshift/node] x86_64 6.2.0.874-4.el7 base 90 k [openshift/node] keyutils x86_64 1.5.8-3.el7 base 54 k [openshift/node] libbasicobjects x86_64 0.1.1-27.el7 base 25 k [openshift/node] libcgroup x86_64 0.41-13.el7 base 65 k [openshift/node] libcollection x86_64 0.6.2-27.el7 base 41 k [openshift/node] libevent x86_64 2.0.21-4.el7 base 214 k [openshift/node] libini_config x86_64 1.3.0-27.el7 base 63 k [openshift/node] libnetfilter_cthelper x86_64 1.0.0-9.el7 base 18 k [openshift/node] libnetfilter_cttimeout x86_64 1.0.0-6.el7 base 18 k [openshift/node] libnetfilter_queue x86_64 1.0.2-2.el7_2 base 23 k [openshift/node] libnfsidmap x86_64 0.25-17.el7 base 49 k [openshift/node] libpath_utils x86_64 0.2.1-27.el7 base 27 k [openshift/node] libref_array x86_64 0.1.5-27.el7 base 26 k [openshift/node] libseccomp x86_64 2.3.1-3.el7 base 56 k [openshift/node] libselinux-python x86_64 2.5-11.el7 base 234 k [openshift/node] libselinux-utils x86_64 2.5-11.el7 base 151 k [openshift/node] libsemanage-python x86_64 2.5-8.el7 base 104 k [openshift/node] libtirpc x86_64 0.2.4-0.10.el7 base 88 k [openshift/node] libverto-libevent x86_64 0.2.5-4.el7 base 8.9 k [openshift/node] lvm2 x86_64 7:2.02.171-8.el7 base 1.3 M [openshift/node] lvm2-libs x86_64 7:2.02.171-8.el7 base 1.0 M [openshift/node] mozjs17 x86_64 17.0.0-19.el7 base 1.4 M [openshift/node] nfs-utils x86_64 1:1.3.0-0.48.el7_4.2 updates 399 k [openshift/node] oci-register-machine x86_64 1:0-6.git2b44233.el7 extras 1.1 M [openshift/node] oci-systemd-hook x86_64 1:0.1.15-2.gitc04483d.el7 extras 33 k [openshift/node] oci-umount x86_64 2:2.3.3-3.gite3c9055.el7 extras 32 k [openshift/node] origin-node x86_64 3.7.2-1.5.5eda3fa origin-local-release [openshift/node] 6.8 k [openshift/node] policycoreutils x86_64 2.5-17.1.el7 base 858 k [openshift/node] policycoreutils-python x86_64 2.5-17.1.el7 base 446 k [openshift/node] polkit x86_64 0.112-12.el7_3 base 167 k [openshift/node] polkit-pkla-compat x86_64 0.1-4.el7 base 39 k [openshift/node] python-IPy noarch 0.75-6.el7 base 32 k [openshift/node] python-configobj noarch 4.7.2-7.el7 base 117 k [openshift/node] python-decorator noarch 3.4.0-3.el7 base 27 k [openshift/node] python-linux-procfs noarch 0.4.9-3.el7 base 33 k [openshift/node] python-perf x86_64 3.10.0-693.21.1.el7 updates 5.2 M [openshift/node] python-pyudev noarch 0.15-9.el7 base 55 k [openshift/node] python-schedutils x86_64 0.4-6.el7 base 21 k [openshift/node] quota x86_64 1:4.01-14.el7 base 179 k [openshift/node] quota-nls noarch 1:4.01-14.el7 base 90 k [openshift/node] rpcbind x86_64 0.2.0-42.el7 base 59 k [openshift/node] selinux-policy noarch 3.13.1-166.el7_4.9 updates 437 k [openshift/node] selinux-policy-targeted noarch 3.13.1-166.el7_4.9 updates 6.5 M [openshift/node] setools-libs x86_64 3.3.8-1.1.el7 base 612 k [openshift/node] skopeo-containers x86_64 1:0.1.28-1.git0270e56.el7 extras 13 k [openshift/node] tcp_wrappers x86_64 7.6-77.el7 base 78 k [openshift/node] tuned noarch 2.8.0-5.el7_4.2 updates 234 k [openshift/node] tuned-profiles-origin-node [openshift/node] x86_64 3.7.2-1.5.5eda3fa origin-local-release [openshift/node] 11 k [openshift/node] virt-what x86_64 1.13-10.el7 base 28 k [openshift/node] yajl x86_64 2.0.4-4.el7 base 39 k [openshift/node] Transaction Summary [openshift/node] ================================================================================ [openshift/node] Install 7 Packages (+63 Dependent packages) [openshift/node] Total download size: 52 M [openshift/node] Installed size: 151 M [openshift/node] Downloading packages: [openshift/node] -------------------------------------------------------------------------------- [openshift/node] Total 16 MB/s | 52 MB 00:03 [openshift/node] Running transaction check [openshift/node] Running transaction test [openshift/node] Transaction test succeeded [openshift/node] Running transaction [openshift/node] Installing : 7:device-mapper-event-libs-1.02.140-8.el7.x86_64 1/70 [openshift/node] Installing : GeoIP-1.5.0-11.el7.x86_64 2/70 [openshift/node] Installing : libcollection-0.6.2-27.el7.x86_64 3/70 [openshift/node] Installing : libevent-2.0.21-4.el7.x86_64 4/70 [openshift/node] Installing : libbasicobjects-0.1.1-27.el7.x86_64 5/70 [openshift/node] Installing : libref_array-0.1.5-27.el7.x86_64 6/70 [openshift/node] Installing : yajl-2.0.4-4.el7.x86_64 7/70 [openshift/node] Installing : libtirpc-0.2.4-0.10.el7.x86_64 8/70 [openshift/node] Installing : rpcbind-0.2.0-42.el7.x86_64 9/70 [openshift/node] Installing : libselinux-utils-2.5-11.el7.x86_64 10/70 [openshift/node] Installing : policycoreutils-2.5-17.1.el7.x86_64 11/70 [openshift/node] Installing : selinux-policy-3.13.1-166.el7_4.9.noarch 12/70 [openshift/node] Installing : selinux-policy-targeted-3.13.1-166.el7_4.9.noarch 13/70 [openshift/node] Installing : 1:oci-systemd-hook-0.1.15-2.gitc04483d.el7.x86_64 14/70 [openshift/node] Installing : 2:oci-umount-2.3.3-3.gite3c9055.el7.x86_64 15/70 [openshift/node] Installing : libverto-libevent-0.2.5-4.el7.x86_64 16/70 [openshift/node] Installing : 32:bind-libs-9.9.4-51.el7_4.2.x86_64 17/70 [openshift/node] Installing : 32:bind-utils-9.9.4-51.el7_4.2.x86_64 18/70 [openshift/node] Installing : 7:device-mapper-event-1.02.140-8.el7.x86_64 19/70 [openshift/node] Failed to get D-Bus connection: Operation not permitted [openshift/node] warning: %post(device-mapper-event-7:1.02.140-8.el7.x86_64) scriptlet failed, exit status 1 [openshift/node] Non-fatal POSTIN scriptlet failure in rpm package 7:device-mapper-event-1.02.140-8.el7.x86_64 [openshift/node] Installing : 7:lvm2-libs-2.02.171-8.el7.x86_64 20/70 [openshift/node] Installing : 7:lvm2-2.02.171-8.el7.x86_64 21/70 [openshift/node] Failed to get D-Bus connection: Operation not permitted [openshift/node] Failed to get D-Bus connection: Operation not permitted [openshift/node] Created symlink /etc/systemd/system/sysinit.target.wants/lvm2-lvmpolld.socket, pointing to /usr/lib/systemd/system/lvm2-lvmpolld.socket. [openshift/node] Failed to get D-Bus connection: Operation not permitted [openshift/node] warning: %post(lvm2-7:2.02.171-8.el7.x86_64) scriptlet failed, exit status 1 [openshift/node] Non-fatal POSTIN scriptlet failure in rpm package 7:lvm2-2.02.171-8.el7.x86_64 [openshift/node] Installing : container-storage-setup-0.8.0-3.git1d27ecf.el7.noarch 22/70 [openshift/node] Installing : iscsi-initiator-utils-iscsiuio-6.2.0.874-4.el7.x86_64 23/70 [openshift/node] Installing : iscsi-initiator-utils-6.2.0.874-4.el7.x86_64 24/70 [openshift/node] Installing : setools-libs-3.3.8-1.1.el7.x86_64 25/70 [openshift/node] Installing : python-configobj-4.7.2-7.el7.noarch 26/70 [openshift/node] Installing : libpath_utils-0.2.1-27.el7.x86_64 27/70 [openshift/node] Installing : libini_config-1.3.0-27.el7.x86_64 28/70 [openshift/node] Installing : gssproxy-0.7.0-4.el7.x86_64 29/70 [openshift/node] Installing : tcp_wrappers-7.6-77.el7.x86_64 30/70 [openshift/node] Installing : keyutils-1.5.8-3.el7.x86_64 31/70 [openshift/node] Installing : python-decorator-3.4.0-3.el7.noarch 32/70 [openshift/node] Installing : 1:dmidecode-3.0-5.el7.x86_64 33/70 [openshift/node] Installing : virt-what-1.13-10.el7.x86_64 34/70 [openshift/node] Installing : libselinux-python-2.5-11.el7.x86_64 35/70 [openshift/node] Installing : checkpolicy-2.5-4.el7.x86_64 36/70 [openshift/node] Installing : python-schedutils-0.4-6.el7.x86_64 37/70 [openshift/node] Installing : bridge-utils-1.5-9.el7.x86_64 38/70 [openshift/node] Installing : 1:oci-register-machine-0-6.git2b44233.el7.x86_64 39/70 [openshift/node] Installing : libnfsidmap-0.25-17.el7.x86_64 40/70 [openshift/node] Installing : libcgroup-0.41-13.el7.x86_64 41/70 [openshift/node] Installing : python-IPy-0.75-6.el7.noarch 42/70 [openshift/node] Installing : libnetfilter_queue-1.0.2-2.el7_2.x86_64 43/70 [openshift/node] Installing : audit-libs-python-2.7.6-3.el7.x86_64 44/70 [openshift/node] Installing : 1:openssl-1.0.2k-8.el7.x86_64 45/70 [openshift/node] Installing : openvswitch-2.7.0-1.el7.x86_64 46/70 [openshift/node] Installing : 1:quota-nls-4.01-14.el7.noarch 47/70 [openshift/node] Installing : 1:quota-4.01-14.el7.x86_64 48/70 [openshift/node] Installing : 1:nfs-utils-1.3.0-0.48.el7_4.2.x86_64 49/70 [openshift/node] Installing : python-linux-procfs-0.4.9-3.el7.noarch 50/70 [openshift/node] Installing : libnetfilter_cthelper-1.0.0-9.el7.x86_64 51/70 [openshift/node] Installing : python-pyudev-0.15-9.el7.noarch 52/70 [openshift/node] Installing : libseccomp-2.3.1-3.el7.x86_64 53/70 [openshift/node] Installing : python-perf-3.10.0-693.21.1.el7.x86_64 54/70 [openshift/node] Installing : mozjs17-17.0.0-19.el7.x86_64 55/70 [openshift/node] Installing : polkit-0.112-12.el7_3.x86_64 56/70 [openshift/node] Installing : polkit-pkla-compat-0.1-4.el7.x86_64 57/70 [openshift/node] Installing : tuned-2.8.0-5.el7_4.2.noarch 58/70 [openshift/node] Installing : tuned-profiles-origin-node-3.7.2-1.5.5eda3fa.x86_64 59/70 [openshift/node] Cannot talk to Tuned daemon via DBus. Is Tuned daemon running? [openshift/node] Installing : libsemanage-python-2.5-8.el7.x86_64 60/70 [openshift/node] Installing : policycoreutils-python-2.5-17.1.el7.x86_64 61/70 [openshift/node] Installing : 2:container-selinux-2.42-1.gitad8f0f7.el7.noarch 62/70 [openshift/node] setsebool: SELinux is disabled. [openshift/node] Installing : libnetfilter_cttimeout-1.0.0-6.el7.x86_64 63/70 [openshift/node] Installing : conntrack-tools-1.4.4-3.el7_3.x86_64 64/70 [openshift/node] Installing : 1:skopeo-containers-0.1.28-1.git0270e56.el7.x86_64 65/70 [openshift/node] Installing : 2:docker-common-1.13.1-53.git774336d.el7.centos.x86_64 66/70 [openshift/node] Installing : 2:docker-client-1.13.1-53.git774336d.el7.centos.x86_64 67/70 [openshift/node] Installing : 2:docker-1.13.1-53.git774336d.el7.centos.x86_64 68/70 [openshift/node] Installing : origin-node-3.7.2-1.5.5eda3fa.x86_64 69/70 [openshift/node] Failed to get D-Bus connection: Operation not permitted [openshift/node] Installing : origin-sdn-ovs-3.7.2-1.5.5eda3fa.x86_64 70/70 [openshift/node] Verifying : libselinux-utils-2.5-11.el7.x86_64 1/70 [openshift/node] Verifying : 1:quota-4.01-14.el7.x86_64 2/70 [openshift/node] Verifying : 1:skopeo-containers-0.1.28-1.git0270e56.el7.x86_64 3/70 [openshift/node] Verifying : libtirpc-0.2.4-0.10.el7.x86_64 4/70 [openshift/node] Verifying : libnetfilter_cttimeout-1.0.0-6.el7.x86_64 5/70 [openshift/node] Verifying : libsemanage-python-2.5-8.el7.x86_64 6/70 [openshift/node] Verifying : libini_config-1.3.0-27.el7.x86_64 7/70 [openshift/node] Verifying : yajl-2.0.4-4.el7.x86_64 8/70 [openshift/node] Verifying : openvswitch-2.7.0-1.el7.x86_64 9/70 [openshift/node] Verifying : mozjs17-17.0.0-19.el7.x86_64 10/70 [openshift/node] Verifying : python-perf-3.10.0-693.21.1.el7.x86_64 11/70 [openshift/node] Verifying : libseccomp-2.3.1-3.el7.x86_64 12/70 [openshift/node] Verifying : policycoreutils-python-2.5-17.1.el7.x86_64 13/70 [openshift/node] Verifying : python-pyudev-0.15-9.el7.noarch 14/70 [openshift/node] Verifying : libnetfilter_cthelper-1.0.0-9.el7.x86_64 15/70 [openshift/node] Verifying : python-linux-procfs-0.4.9-3.el7.noarch 16/70 [openshift/node] Verifying : conntrack-tools-1.4.4-3.el7_3.x86_64 17/70 [openshift/node] Verifying : 32:bind-libs-9.9.4-51.el7_4.2.x86_64 18/70 [openshift/node] Verifying : 7:device-mapper-event-1.02.140-8.el7.x86_64 19/70 [openshift/node] Verifying : 2:docker-client-1.13.1-53.git774336d.el7.centos.x86_64 20/70 [openshift/node] Verifying : 1:quota-nls-4.01-14.el7.noarch 21/70 [openshift/node] Verifying : selinux-policy-targeted-3.13.1-166.el7_4.9.noarch 22/70 [openshift/node] Verifying : 2:container-selinux-2.42-1.gitad8f0f7.el7.noarch 23/70 [openshift/node] Verifying : 7:device-mapper-event-libs-1.02.140-8.el7.x86_64 24/70 [openshift/node] Verifying : 1:openssl-1.0.2k-8.el7.x86_64 25/70 [openshift/node] Verifying : audit-libs-python-2.7.6-3.el7.x86_64 26/70 [openshift/node] Verifying : iscsi-initiator-utils-6.2.0.874-4.el7.x86_64 27/70 [openshift/node] Verifying : 1:nfs-utils-1.3.0-0.48.el7_4.2.x86_64 28/70 [openshift/node] Verifying : libnetfilter_queue-1.0.2-2.el7_2.x86_64 29/70 [openshift/node] Verifying : tuned-2.8.0-5.el7_4.2.noarch 30/70 [openshift/node] Verifying : python-IPy-0.75-6.el7.noarch 31/70 [openshift/node] Verifying : 2:docker-common-1.13.1-53.git774336d.el7.centos.x86_64 32/70 [openshift/node] Verifying : 2:docker-1.13.1-53.git774336d.el7.centos.x86_64 33/70 [openshift/node] Verifying : libcgroup-0.41-13.el7.x86_64 34/70 [openshift/node] Verifying : 32:bind-utils-9.9.4-51.el7_4.2.x86_64 35/70 [openshift/node] Verifying : gssproxy-0.7.0-4.el7.x86_64 36/70 [openshift/node] Verifying : policycoreutils-2.5-17.1.el7.x86_64 37/70 [openshift/node] Verifying : selinux-policy-3.13.1-166.el7_4.9.noarch 38/70 [openshift/node] Verifying : origin-sdn-ovs-3.7.2-1.5.5eda3fa.x86_64 39/70 [openshift/node] Verifying : iscsi-initiator-utils-iscsiuio-6.2.0.874-4.el7.x86_64 40/70 [openshift/node] Verifying : libref_array-0.1.5-27.el7.x86_64 41/70 [openshift/node] Verifying : libnfsidmap-0.25-17.el7.x86_64 42/70 [openshift/node] Verifying : libbasicobjects-0.1.1-27.el7.x86_64 43/70 [openshift/node] Verifying : tuned-profiles-origin-node-3.7.2-1.5.5eda3fa.x86_64 44/70 [openshift/node] Verifying : libevent-2.0.21-4.el7.x86_64 45/70 [openshift/node] Verifying : 1:oci-register-machine-0-6.git2b44233.el7.x86_64 46/70 [openshift/node] Verifying : bridge-utils-1.5-9.el7.x86_64 47/70 [openshift/node] Verifying : libverto-libevent-0.2.5-4.el7.x86_64 48/70 [openshift/node] Verifying : 1:oci-systemd-hook-0.1.15-2.gitc04483d.el7.x86_64 49/70 [openshift/node] Verifying : python-schedutils-0.4-6.el7.x86_64 50/70 [openshift/node] Verifying : 7:lvm2-2.02.171-8.el7.x86_64 51/70 [openshift/node] Verifying : virt-what-1.13-10.el7.x86_64 52/70 [openshift/node] Verifying : checkpolicy-2.5-4.el7.x86_64 53/70 [openshift/node] Verifying : libselinux-python-2.5-11.el7.x86_64 54/70 [openshift/node] Verifying : 2:oci-umount-2.3.3-3.gite3c9055.el7.x86_64 55/70 [openshift/node] Verifying : 1:dmidecode-3.0-5.el7.x86_64 56/70 [openshift/node] Verifying : python-decorator-3.4.0-3.el7.noarch 57/70 [openshift/node] Verifying : 7:lvm2-libs-2.02.171-8.el7.x86_64 58/70 [openshift/node] Verifying : rpcbind-0.2.0-42.el7.x86_64 59/70 [openshift/node] Verifying : libcollection-0.6.2-27.el7.x86_64 60/70 [openshift/node] Verifying : GeoIP-1.5.0-11.el7.x86_64 61/70 [openshift/node] Verifying : keyutils-1.5.8-3.el7.x86_64 62/70 [openshift/node] Verifying : polkit-pkla-compat-0.1-4.el7.x86_64 63/70 [openshift/node] Verifying : origin-node-3.7.2-1.5.5eda3fa.x86_64 64/70 [openshift/node] Verifying : tcp_wrappers-7.6-77.el7.x86_64 65/70 [openshift/node] Verifying : libpath_utils-0.2.1-27.el7.x86_64 66/70 [openshift/node] Verifying : polkit-0.112-12.el7_3.x86_64 67/70 [openshift/node] Verifying : python-configobj-4.7.2-7.el7.noarch 68/70 [openshift/node] Verifying : container-storage-setup-0.8.0-3.git1d27ecf.el7.noarch 69/70 [openshift/node] Verifying : setools-libs-3.3.8-1.1.el7.x86_64 70/70 [openshift/node] Installed: [openshift/node] bind-utils.x86_64 32:9.9.4-51.el7_4.2 [openshift/node] bridge-utils.x86_64 0:1.5-9.el7 [openshift/node] conntrack-tools.x86_64 0:1.4.4-3.el7_3 [openshift/node] iscsi-initiator-utils.x86_64 0:6.2.0.874-4.el7 [openshift/node] openssl.x86_64 1:1.0.2k-8.el7 [openshift/node] openvswitch.x86_64 0:2.7.0-1.el7 [openshift/node] origin-sdn-ovs.x86_64 0:3.7.2-1.5.5eda3fa [openshift/node] Dependency Installed: [openshift/node] GeoIP.x86_64 0:1.5.0-11.el7 [openshift/node] audit-libs-python.x86_64 0:2.7.6-3.el7 [openshift/node] bind-libs.x86_64 32:9.9.4-51.el7_4.2 [openshift/node] checkpolicy.x86_64 0:2.5-4.el7 [openshift/node] container-selinux.noarch 2:2.42-1.gitad8f0f7.el7 [openshift/node] container-storage-setup.noarch 0:0.8.0-3.git1d27ecf.el7 [openshift/node] device-mapper-event.x86_64 7:1.02.140-8.el7 [openshift/node] device-mapper-event-libs.x86_64 7:1.02.140-8.el7 [openshift/node] dmidecode.x86_64 1:3.0-5.el7 [openshift/node] docker.x86_64 2:1.13.1-53.git774336d.el7.centos [openshift/node] docker-client.x86_64 2:1.13.1-53.git774336d.el7.centos [openshift/node] docker-common.x86_64 2:1.13.1-53.git774336d.el7.centos [openshift/node] gssproxy.x86_64 0:0.7.0-4.el7 [openshift/node] iscsi-initiator-utils-iscsiuio.x86_64 0:6.2.0.874-4.el7 [openshift/node] keyutils.x86_64 0:1.5.8-3.el7 [openshift/node] libbasicobjects.x86_64 0:0.1.1-27.el7 [openshift/node] libcgroup.x86_64 0:0.41-13.el7 [openshift/node] libcollection.x86_64 0:0.6.2-27.el7 [openshift/node] libevent.x86_64 0:2.0.21-4.el7 [openshift/node] libini_config.x86_64 0:1.3.0-27.el7 [openshift/node] libnetfilter_cthelper.x86_64 0:1.0.0-9.el7 [openshift/node] libnetfilter_cttimeout.x86_64 0:1.0.0-6.el7 [openshift/node] libnetfilter_queue.x86_64 0:1.0.2-2.el7_2 [openshift/node] libnfsidmap.x86_64 0:0.25-17.el7 [openshift/node] libpath_utils.x86_64 0:0.2.1-27.el7 [openshift/node] libref_array.x86_64 0:0.1.5-27.el7 [openshift/node] libseccomp.x86_64 0:2.3.1-3.el7 [openshift/node] libselinux-python.x86_64 0:2.5-11.el7 [openshift/node] libselinux-utils.x86_64 0:2.5-11.el7 [openshift/node] libsemanage-python.x86_64 0:2.5-8.el7 [openshift/node] libtirpc.x86_64 0:0.2.4-0.10.el7 [openshift/node] libverto-libevent.x86_64 0:0.2.5-4.el7 [openshift/node] lvm2.x86_64 7:2.02.171-8.el7 [openshift/node] lvm2-libs.x86_64 7:2.02.171-8.el7 [openshift/node] mozjs17.x86_64 0:17.0.0-19.el7 [openshift/node] nfs-utils.x86_64 1:1.3.0-0.48.el7_4.2 [openshift/node] oci-register-machine.x86_64 1:0-6.git2b44233.el7 [openshift/node] oci-systemd-hook.x86_64 1:0.1.15-2.gitc04483d.el7 [openshift/node] oci-umount.x86_64 2:2.3.3-3.gite3c9055.el7 [openshift/node] origin-node.x86_64 0:3.7.2-1.5.5eda3fa [openshift/node] policycoreutils.x86_64 0:2.5-17.1.el7 [openshift/node] policycoreutils-python.x86_64 0:2.5-17.1.el7 [openshift/node] polkit.x86_64 0:0.112-12.el7_3 [openshift/node] polkit-pkla-compat.x86_64 0:0.1-4.el7 [openshift/node] python-IPy.noarch 0:0.75-6.el7 [openshift/node] python-configobj.noarch 0:4.7.2-7.el7 [openshift/node] python-decorator.noarch 0:3.4.0-3.el7 [openshift/node] python-linux-procfs.noarch 0:0.4.9-3.el7 [openshift/node] python-perf.x86_64 0:3.10.0-693.21.1.el7 [openshift/node] python-pyudev.noarch 0:0.15-9.el7 [openshift/node] python-schedutils.x86_64 0:0.4-6.el7 [openshift/node] quota.x86_64 1:4.01-14.el7 [openshift/node] quota-nls.noarch 1:4.01-14.el7 [openshift/node] rpcbind.x86_64 0:0.2.0-42.el7 [openshift/node] selinux-policy.noarch 0:3.13.1-166.el7_4.9 [openshift/node] selinux-policy-targeted.noarch 0:3.13.1-166.el7_4.9 [openshift/node] setools-libs.x86_64 0:3.3.8-1.1.el7 [openshift/node] skopeo-containers.x86_64 1:0.1.28-1.git0270e56.el7 [openshift/node] tcp_wrappers.x86_64 0:7.6-77.el7 [openshift/node] tuned.noarch 0:2.8.0-5.el7_4.2 [openshift/node] tuned-profiles-origin-node.x86_64 0:3.7.2-1.5.5eda3fa [openshift/node] virt-what.x86_64 0:1.13-10.el7 [openshift/node] yajl.x86_64 0:2.0.4-4.el7 [openshift/node] Complete! [openshift/node] Loaded plugins: fastestmirror, ovl [openshift/node] Cleaning repos: base cbs-paas7-openshift-multiarch-el7-build extras updates [openshift/node] Cleaning up everything [openshift/node] Maybe you want: rm -rf /var/cache/yum, to also free up space taken by orphaned data from disabled or removed repos [openshift/node] Cleaning up list of fastest mirrors [openshift/node] --> RUN if test -e /opt/cni/bin; then mkdir -p /exports/hostfs/opt/cni/bin/ && cp -r /opt/cni/bin/* /exports/hostfs/opt/cni/bin/; fi [openshift/node] --> LABEL io.k8s.display-name="OpenShift Origin Node" io.k8s.description="This is a component of OpenShift Origin and contains the software for individual nodes when using SDN." io.openshift.tags="openshift,node" [openshift/node] --> VOLUME /etc/origin/node [openshift/node] --> ENV KUBECONFIG=/etc/origin/node/node.kubeconfig [openshift/node] --> ENTRYPOINT [ "/usr/local/bin/origin-node-run.sh" ] [openshift/node] --> Committing changes to openshift/node:5eda3fa ... [openshift/node] --> Tagged as openshift/node:latest [openshift/node] --> Done [openshift/hello-openshift] --> FROM scratchig55mpr8cur3t1rx17upqcdz [openshift/hello-openshift] --> MAINTAINER Jessica Forrester <jforrest@redhat.com> [openshift/hello-openshift] --> COPY bin/hello-openshift /hello-openshift [openshift/hello-openshift] --> EXPOSE 8080 8888 [openshift/hello-openshift] --> ENTRYPOINT ["/hello-openshift"] [openshift/hello-openshift] --> Committing changes to openshift/hello-openshift:5eda3fa ... [openshift/hello-openshift] --> Tagged as openshift/hello-openshift:latest [openshift/hello-openshift] --> Done [openshift/hello-openshift] Removing examples/hello-openshift/bin/hello-openshift [openshift/openvswitch] --> FROM openshift/node [openshift/openvswitch] --> COPY scripts/* /usr/local/bin/ [openshift/openvswitch] --> RUN INSTALL_PKGS="openvswitch" && yum install -y ${INSTALL_PKGS} && rpm -V ${INSTALL_PKGS} && yum clean all [openshift/openvswitch] Loaded plugins: fastestmirror, ovl [openshift/openvswitch] Determining fastest mirrors [openshift/openvswitch] * base: mirror.vtti.vt.edu [openshift/openvswitch] * extras: mirror.wdc1.us.leaseweb.net [openshift/openvswitch] * updates: mirrors.tripadvisor.com [openshift/openvswitch] Package openvswitch-2.7.0-1.el7.x86_64 already installed and latest version [openshift/openvswitch] Nothing to do [openshift/openvswitch] Loaded plugins: fastestmirror, ovl [openshift/openvswitch] Cleaning repos: base cbs-paas7-openshift-multiarch-el7-build extras updates [openshift/openvswitch] Cleaning up everything [openshift/openvswitch] Maybe you want: rm -rf /var/cache/yum, to also free up space taken by orphaned data from disabled or removed repos [openshift/openvswitch] Cleaning up list of fastest mirrors [openshift/openvswitch] --> LABEL io.openshift.tags="openshift,openvswitch" io.k8s.display-name="OpenShift Origin OpenVSwitch Daemon" io.k8s.description="This is a component of OpenShift Origin and runs an OpenVSwitch daemon process." [openshift/openvswitch] --> VOLUME /etc/openswitch [openshift/openvswitch] --> ENV HOME /root [openshift/openvswitch] --> COPY system-container/system-container-wrapper.sh /usr/local/bin/ [openshift/openvswitch] --> COPY system-container/config.json.template system-container/service.template system-container/tmpfiles.template system-container/manifest.json /exports/ [openshift/openvswitch] --> ENTRYPOINT ["/usr/local/bin/ovs-run.sh"] [openshift/openvswitch] --> Committing changes to openshift/openvswitch:5eda3fa ... [openshift/openvswitch] --> Tagged as openshift/openvswitch:latest [openshift/openvswitch] --> Done [INFO] [21:29:47+0000] hack/build-images.sh exited with code 0 after 00h 05m 13s + sed -i 's|go/src|data/src|' _output/local/releases/rpms/origin-local-release.repo + sudo cp _output/local/releases/rpms/origin-local-release.repo /etc/yum.repos.d/ + sudo systemctl restart docker.service + set +o xtrace ########## FINISHED STAGE: SUCCESS: BUILD AN ORIGIN RELEASE [00h 27m 24s] ########## [workspace@2] $ /bin/bash /tmp/jenkins5638433170098230054.sh ########## STARTING STAGE: DETERMINE THE RELEASE COMMIT FOR ORIGIN IMAGES AND VERSION FOR RPMS ########## + [[ -s /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/activate ]] + source /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/activate ++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66 ++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66 ++ export PATH=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ PATH=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ unset PYTHON_HOME ++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config ++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config ++ mktemp + script=/tmp/tmp.1kUdaYoF3s + cat + chmod +x /tmp/tmp.1kUdaYoF3s + scp -F ./.config/origin-ci-tool/inventory/.ssh_config /tmp/tmp.1kUdaYoF3s openshiftdevel:/tmp/tmp.1kUdaYoF3s + ssh -F ./.config/origin-ci-tool/inventory/.ssh_config -t openshiftdevel 'bash -l -c "timeout 600 /tmp/tmp.1kUdaYoF3s"' + cd /data/src/github.com/openshift/origin + jobs_repo=/data/src/github.com/openshift/aos-cd-jobs/ + git log -1 --pretty=%h + source hack/lib/init.sh ++ set -o errexit ++ set -o nounset ++ set -o pipefail +++ date +%s ++ OS_SCRIPT_START_TIME=1523050250 ++ export OS_SCRIPT_START_TIME ++ readonly -f os::util::absolute_path +++ dirname hack/lib/init.sh ++ init_source=hack/lib/../.. +++ os::util::absolute_path hack/lib/../.. +++ local relative_path=hack/lib/../.. +++ local absolute_path +++ pushd hack/lib/../.. ++++ pwd +++ relative_path=/data/src/github.com/openshift/origin +++ [[ -h /data/src/github.com/openshift/origin ]] +++ absolute_path=/data/src/github.com/openshift/origin +++ popd +++ echo /data/src/github.com/openshift/origin ++ OS_ROOT=/data/src/github.com/openshift/origin ++ export OS_ROOT ++ cd /data/src/github.com/openshift/origin +++ find /data/src/github.com/openshift/origin/hack/lib -type f -name '*.sh' -not -path '*/hack/lib/init.sh' ++ for library_file in '$( find "${OS_ROOT}/hack/lib" -type f -name '\''*.sh'\'' -not -path '\''*/hack/lib/init.sh'\'' )' ++ source /data/src/github.com/openshift/origin/hack/lib/build/archive.sh +++ readonly -f os::build::archive::name +++ readonly -f os::build::archive::zip +++ readonly -f os::build::archive::tar +++ readonly -f os::build::archive::internal::is_hardlink_supported +++ readonly -f os::build::archive::extract_tar +++ readonly -f os::build::archive::detect_local_release_tars ++ for library_file in '$( find "${OS_ROOT}/hack/lib" -type f -name '\''*.sh'\'' -not -path '\''*/hack/lib/init.sh'\'' )' ++ source /data/src/github.com/openshift/origin/hack/lib/build/images.sh +++ readonly -f os::build::image +++ readonly -f os::build::image::internal::generic +++ readonly -f os::build::image::internal::imagebuilder +++ readonly -f os::build::image::internal::docker ++ for library_file in '$( find "${OS_ROOT}/hack/lib" -type f -name '\''*.sh'\'' -not -path '\''*/hack/lib/init.sh'\'' )' ++ source /data/src/github.com/openshift/origin/hack/lib/build/release.sh ++ for library_file in '$( find "${OS_ROOT}/hack/lib" -type f -name '\''*.sh'\'' -not -path '\''*/hack/lib/init.sh'\'' )' ++ source /data/src/github.com/openshift/origin/hack/lib/build/binaries.sh +++ readonly -f os::build::binaries_from_targets +++ readonly -f os::build::host_platform +++ readonly -f os::build::host_platform_friendly +++ readonly -f os::build::platform_arch +++ readonly -f os::build::setup_env +++ readonly -f os::build::build_static_binaries +++ readonly -f os::build::build_binaries +++ readonly -f os::build::generate_windows_versioninfo +++ readonly -f os::build::export_targets +++ readonly -f os::build::place_bins +++ readonly -f os::build::release_sha +++ readonly -f os::build::make_openshift_binary_symlinks +++ readonly -f os::build::ldflag +++ readonly -f os::build::ldflags +++ readonly -f os::build::require_clean_tree +++ readonly -f os::build::commit_range ++ for library_file in '$( find "${OS_ROOT}/hack/lib" -type f -name '\''*.sh'\'' -not -path '\''*/hack/lib/init.sh'\'' )' ++ source /data/src/github.com/openshift/origin/hack/lib/build/constants.sh +++ readonly OS_BUILD_ENV_GOLANG=1.8 +++ OS_BUILD_ENV_GOLANG=1.8 +++ readonly OS_BUILD_ENV_IMAGE=openshift/origin-release:golang-1.8 +++ OS_BUILD_ENV_IMAGE=openshift/origin-release:golang-1.8 +++ readonly OS_OUTPUT_BASEPATH=_output +++ OS_OUTPUT_BASEPATH=_output +++ readonly OS_BASE_OUTPUT=/data/src/github.com/openshift/origin/_output +++ OS_BASE_OUTPUT=/data/src/github.com/openshift/origin/_output +++ readonly OS_OUTPUT_SCRIPTPATH=/data/src/github.com/openshift/origin/_output/scripts +++ OS_OUTPUT_SCRIPTPATH=/data/src/github.com/openshift/origin/_output/scripts +++ readonly OS_OUTPUT_SUBPATH=_output/local +++ OS_OUTPUT_SUBPATH=_output/local +++ readonly OS_OUTPUT=/data/src/github.com/openshift/origin/_output/local +++ OS_OUTPUT=/data/src/github.com/openshift/origin/_output/local +++ readonly OS_OUTPUT_RELEASEPATH=/data/src/github.com/openshift/origin/_output/local/releases +++ OS_OUTPUT_RELEASEPATH=/data/src/github.com/openshift/origin/_output/local/releases +++ readonly OS_OUTPUT_RPMPATH=/data/src/github.com/openshift/origin/_output/local/releases/rpms +++ OS_OUTPUT_RPMPATH=/data/src/github.com/openshift/origin/_output/local/releases/rpms +++ readonly OS_OUTPUT_BINPATH=/data/src/github.com/openshift/origin/_output/local/bin +++ OS_OUTPUT_BINPATH=/data/src/github.com/openshift/origin/_output/local/bin +++ readonly OS_OUTPUT_PKGDIR=/data/src/github.com/openshift/origin/_output/local/pkgdir +++ OS_OUTPUT_PKGDIR=/data/src/github.com/openshift/origin/_output/local/pkgdir +++ readonly OS_GO_PACKAGE=github.com/openshift/origin +++ OS_GO_PACKAGE=github.com/openshift/origin +++ OS_SDN_COMPILE_TARGETS_LINUX=(pkg/network/sdn-cni-plugin vendor/github.com/containernetworking/cni/plugins/ipam/host-local vendor/github.com/containernetworking/cni/plugins/main/loopback) +++ readonly OS_SDN_COMPILE_TARGETS_LINUX +++ OS_IMAGE_COMPILE_TARGETS_LINUX=(cmd/dockerregistry cmd/gitserver vendor/k8s.io/kubernetes/cmd/hyperkube "${OS_SDN_COMPILE_TARGETS_LINUX[@]}") +++ readonly OS_IMAGE_COMPILE_TARGETS_LINUX +++ OS_SCRATCH_IMAGE_COMPILE_TARGETS_LINUX=(images/pod examples/hello-openshift) +++ readonly OS_SCRATCH_IMAGE_COMPILE_TARGETS_LINUX +++ OS_IMAGE_COMPILE_BINARIES=("${OS_SCRATCH_IMAGE_COMPILE_TARGETS_LINUX[@]##*/}" "${OS_IMAGE_COMPILE_TARGETS_LINUX[@]##*/}") +++ readonly OS_IMAGE_COMPILE_BINARIES +++ OS_CROSS_COMPILE_TARGETS=(cmd/openshift cmd/oc cmd/kubefed cmd/template-service-broker) +++ readonly OS_CROSS_COMPILE_TARGETS +++ OS_CROSS_COMPILE_BINARIES=("${OS_CROSS_COMPILE_TARGETS[@]##*/}") +++ readonly OS_CROSS_COMPILE_BINARIES +++ OS_TEST_TARGETS=(test/extended/extended.test) +++ readonly OS_TEST_TARGETS +++ OPENSHIFT_BINARY_SYMLINKS=(openshift-router openshift-deploy openshift-recycle openshift-sti-build openshift-docker-build openshift-git-clone openshift-manage-dockerfile openshift-extract-image-content origin osc oadm osadm kubectl kubernetes kubelet kube-proxy kube-apiserver kube-controller-manager kube-scheduler) +++ readonly OPENSHIFT_BINARY_SYMLINKS +++ OPENSHIFT_BINARY_COPY=(oadm kubelet kube-proxy kube-apiserver kube-controller-manager kube-scheduler) +++ readonly OPENSHIFT_BINARY_COPY +++ OC_BINARY_COPY=(kubectl) +++ readonly OC_BINARY_COPY +++ OS_BINARY_RELEASE_CLIENT_WINDOWS=(oc.exe README.md ./LICENSE) +++ readonly OS_BINARY_RELEASE_CLIENT_WINDOWS +++ OS_BINARY_RELEASE_CLIENT_MAC=(oc README.md ./LICENSE) +++ readonly OS_BINARY_RELEASE_CLIENT_MAC +++ OS_BINARY_RELEASE_CLIENT_LINUX=(./oc ./README.md ./LICENSE) +++ readonly OS_BINARY_RELEASE_CLIENT_LINUX +++ OS_BINARY_RELEASE_SERVER_LINUX=('./*') +++ readonly OS_BINARY_RELEASE_SERVER_LINUX +++ OS_BINARY_RELEASE_CLIENT_EXTRA=(${OS_ROOT}/README.md ${OS_ROOT}/LICENSE) +++ readonly OS_BINARY_RELEASE_CLIENT_EXTRA ++ for library_file in '$( find "${OS_ROOT}/hack/lib" -type f -name '\''*.sh'\'' -not -path '\''*/hack/lib/init.sh'\'' )' ++ source /data/src/github.com/openshift/origin/hack/lib/build/environment.sh +++ readonly -f os::build::environment::create +++ readonly -f os::build::environment::release::workingdir +++ readonly -f os::build::environment::cleanup +++ readonly -f os::build::environment::start +++ readonly -f os::build::environment::withsource +++ readonly -f os::build::environment::volume_name +++ readonly -f os::build::environment::remove_volume +++ readonly -f os::build::environment::run ++ for library_file in '$( find "${OS_ROOT}/hack/lib" -type f -name '\''*.sh'\'' -not -path '\''*/hack/lib/init.sh'\'' )' ++ source /data/src/github.com/openshift/origin/hack/lib/build/rpm.sh ++ for library_file in '$( find "${OS_ROOT}/hack/lib" -type f -name '\''*.sh'\'' -not -path '\''*/hack/lib/init.sh'\'' )' ++ source /data/src/github.com/openshift/origin/hack/lib/build/version.sh +++ readonly -f os::build::version::get_vars +++ readonly -f os::build::version::openshift_vars +++ readonly -f os::build::version::etcd_vars +++ readonly -f os::build::version::kubernetes_vars +++ readonly -f os::build::version::save_vars ++ for library_file in '$( find "${OS_ROOT}/hack/lib" -type f -name '\''*.sh'\'' -not -path '\''*/hack/lib/init.sh'\'' )' ++ source /data/src/github.com/openshift/origin/hack/lib/cleanup.sh +++ readonly -f os::cleanup::all +++ readonly -f os::cleanup::dump_etcd +++ readonly -f os::cleanup::internal::dump_etcd_v3 +++ readonly -f os::cleanup::prune_etcd +++ readonly -f os::cleanup::containers +++ readonly -f os::cleanup::dump_container_logs +++ readonly -f os::cleanup::internal::list_our_containers +++ readonly -f os::cleanup::internal::list_k8s_containers +++ readonly -f os::cleanup::internal::list_containers +++ readonly -f os::cleanup::tmpdir +++ readonly -f os::cleanup::dump_events +++ readonly -f os::cleanup::find_cache_alterations +++ readonly -f os::cleanup::dump_pprof_output +++ readonly -f os::cleanup::truncate_large_logs +++ readonly -f os::cleanup::processes ++ for library_file in '$( find "${OS_ROOT}/hack/lib" -type f -name '\''*.sh'\'' -not -path '\''*/hack/lib/init.sh'\'' )' ++ source /data/src/github.com/openshift/origin/hack/lib/cmd.sh +++ readonly -f os::cmd::expect_success +++ readonly -f os::cmd::expect_failure +++ readonly -f os::cmd::expect_success_and_text +++ readonly -f os::cmd::expect_failure_and_text +++ readonly -f os::cmd::expect_success_and_not_text +++ readonly -f os::cmd::expect_failure_and_not_text +++ readonly -f os::cmd::expect_code +++ readonly -f os::cmd::expect_code_and_text +++ readonly -f os::cmd::expect_code_and_not_text +++ millisecond=1 +++ second=1000 +++ minute=60000 +++ readonly -f os::cmd::try_until_success +++ readonly -f os::cmd::try_until_failure +++ readonly -f os::cmd::try_until_text +++ readonly -f os::cmd::try_until_text +++ os_cmd_internal_tmpdir=/tmp/openshift +++ os_cmd_internal_tmpout=/tmp/openshift/tmp_stdout.log +++ os_cmd_internal_tmperr=/tmp/openshift/tmp_stderr.log +++ readonly -f os::cmd::internal::expect_exit_code_run_grep +++ readonly -f os::cmd::internal::init_tempdir +++ readonly -f os::cmd::internal::describe_call +++ readonly -f os::cmd::internal::determine_caller +++ readonly -f os::cmd::internal::describe_expectation +++ readonly -f os::cmd::internal::seconds_since_epoch +++ readonly -f os::cmd::internal::run_collecting_output +++ readonly -f os::cmd::internal::success_func +++ readonly -f os::cmd::internal::failure_func +++ readonly -f os::cmd::internal::specific_code_func +++ readonly -f os::cmd::internal::get_results +++ readonly -f os::cmd::internal::get_last_results +++ readonly -f os::cmd::internal::mark_attempt +++ readonly -f os::cmd::internal::compress_output +++ readonly -f os::cmd::internal::print_results +++ readonly -f os::cmd::internal::assemble_causes +++ readonly -f os::cmd::internal::run_until_exit_code +++ readonly -f os::cmd::internal::run_until_text ++ for library_file in '$( find "${OS_ROOT}/hack/lib" -type f -name '\''*.sh'\'' -not -path '\''*/hack/lib/init.sh'\'' )' ++ source /data/src/github.com/openshift/origin/hack/lib/log/stacktrace.sh +++ readonly -f os::log::stacktrace::install +++ readonly -f os::log::stacktrace::print ++ for library_file in '$( find "${OS_ROOT}/hack/lib" -type f -name '\''*.sh'\'' -not -path '\''*/hack/lib/init.sh'\'' )' ++ source /data/src/github.com/openshift/origin/hack/lib/log/system.sh +++ readonly -f os::log::system::install_cleanup +++ readonly -f os::log::system::clean_up +++ readonly -f os::log::system::internal::prune_datafile +++ readonly -f os::log::system::internal::plot +++ readonly -f os::log::system::start +++ readonly -f os::log::system::internal::run ++ for library_file in '$( find "${OS_ROOT}/hack/lib" -type f -name '\''*.sh'\'' -not -path '\''*/hack/lib/init.sh'\'' )' ++ source /data/src/github.com/openshift/origin/hack/lib/log/output.sh +++ readonly -f os::log::info +++ readonly -f os::log::warning +++ readonly -f os::log::error +++ readonly -f os::log::fatal +++ readonly -f os::log::debug ++ for library_file in '$( find "${OS_ROOT}/hack/lib" -type f -name '\''*.sh'\'' -not -path '\''*/hack/lib/init.sh'\'' )' ++ source /data/src/github.com/openshift/origin/hack/lib/test/junit.sh +++ readonly -f os::test::junit::declare_suite_start +++ readonly -f os::test::junit::declare_suite_end +++ readonly -f os::test::junit::declare_test_start +++ readonly -f os::test::junit::declare_test_end +++ readonly -f os::test::junit::check_test_counters +++ readonly -f os::test::junit::reconcile_output ++ for library_file in '$( find "${OS_ROOT}/hack/lib" -type f -name '\''*.sh'\'' -not -path '\''*/hack/lib/init.sh'\'' )' ++ source /data/src/github.com/openshift/origin/hack/lib/util/ensure.sh +++ readonly -f os::util::ensure::system_binary_exists +++ readonly -f os::util::ensure::built_binary_exists +++ readonly -f os::util::ensure::gopath_binary_exists +++ readonly -f os::util::ensure::iptables_privileges_exist ++ for library_file in '$( find "${OS_ROOT}/hack/lib" -type f -name '\''*.sh'\'' -not -path '\''*/hack/lib/init.sh'\'' )' ++ source /data/src/github.com/openshift/origin/hack/lib/util/environment.sh +++ readonly -f os::util::environment::use_sudo +++ readonly -f os::util::environment::setup_time_vars +++ readonly -f os::util::environment::setup_all_server_vars +++ readonly -f os::util::environment::update_path_var +++ readonly -f os::util::environment::setup_tmpdir_vars +++ readonly -f os::util::environment::setup_kubelet_vars +++ readonly -f os::util::environment::setup_etcd_vars +++ readonly -f os::util::environment::setup_server_vars +++ readonly -f os::util::environment::setup_images_vars ++ for library_file in '$( find "${OS_ROOT}/hack/lib" -type f -name '\''*.sh'\'' -not -path '\''*/hack/lib/init.sh'\'' )' ++ source /data/src/github.com/openshift/origin/hack/lib/util/find.sh +++ readonly -f os::util::find::system_binary +++ readonly -f os::util::find::built_binary +++ readonly -f os::util::find::gopath_binary ++ for library_file in '$( find "${OS_ROOT}/hack/lib" -type f -name '\''*.sh'\'' -not -path '\''*/hack/lib/init.sh'\'' )' ++ source /data/src/github.com/openshift/origin/hack/lib/util/text.sh +++ readonly -f os::text::reset +++ readonly -f os::text::bold +++ readonly -f os::text::red +++ readonly -f os::text::green +++ readonly -f os::text::blue +++ readonly -f os::text::yellow +++ readonly -f os::text::clear_last_line +++ readonly -f os::text::internal::is_tty +++ readonly -f os::text::print_bold +++ readonly -f os::text::print_red +++ readonly -f os::text::print_red_bold +++ readonly -f os::text::print_green +++ readonly -f os::text::print_green_bold +++ readonly -f os::text::print_blue +++ readonly -f os::text::print_blue_bold +++ readonly -f os::text::print_yellow +++ readonly -f os::text::print_yellow_bold ++ for library_file in '$( find "${OS_ROOT}/hack/lib" -type f -name '\''*.sh'\'' -not -path '\''*/hack/lib/init.sh'\'' )' ++ source /data/src/github.com/openshift/origin/hack/lib/util/trap.sh +++ readonly -f os::util::trap::init_err +++ readonly -f os::util::trap::init_exit +++ readonly -f os::util::trap::err_handler +++ readonly -f os::util::trap::exit_handler ++ for library_file in '$( find "${OS_ROOT}/hack/lib" -type f -name '\''*.sh'\'' -not -path '\''*/hack/lib/init.sh'\'' )' ++ source /data/src/github.com/openshift/origin/hack/lib/util/docs.sh +++ readonly -f generate_manual_pages +++ readonly -f generate_documentation +++ readonly -f os::util::gen-docs +++ readonly -f os::util::set-man-placeholder +++ readonly -f os::util::set-docs-placeholder ++ for library_file in '$( find "${OS_ROOT}/hack/lib" -type f -name '\''*.sh'\'' -not -path '\''*/hack/lib/init.sh'\'' )' ++ source /data/src/github.com/openshift/origin/hack/lib/util/golang.sh +++ readonly -f os::golang::verify_go_version ++ for library_file in '$( find "${OS_ROOT}/hack/lib" -type f -name '\''*.sh'\'' -not -path '\''*/hack/lib/init.sh'\'' )' ++ source /data/src/github.com/openshift/origin/hack/lib/util/misc.sh +++ readonly -f os::util::describe_return_code +++ readonly -f os::util::install_describe_return_code +++ [[ -z '' ]] ++++ pwd +++ OS_ORIGINAL_WD=/data/src/github.com/openshift/origin +++ readonly OS_ORIGINAL_WD +++ export OS_ORIGINAL_WD +++ readonly -f os::util::repository_relative_path +++ readonly -f os::util::format_seconds +++ readonly -f os::util::sed +++ readonly -f os::util::base64decode +++ readonly -f os::util::list_go_src_files +++ readonly -f os::util::list_go_src_dirs ++ for library_file in '$( find "${OS_ROOT}/hack/lib" -type f -name '\''*.sh'\'' -not -path '\''*/hack/lib/init.sh'\'' )' ++ source /data/src/github.com/openshift/origin/hack/lib/start.sh +++ readonly -f os::start::configure_server +++ readonly -f os::start::internal::create_master_certs +++ readonly -f os::start::internal::configure_node +++ readonly -f os::start::internal::configure_master +++ readonly -f os::start::internal::patch_master_config +++ readonly -f os::start::server +++ readonly -f os::start::master +++ readonly -f os::start::all_in_one +++ readonly -f os::start::etcd +++ readonly -f os::start::api_server +++ readonly -f os::start::controllers +++ readonly -f os::start::internal::start_node +++ readonly -f os::start::internal::openshift_executable +++ readonly -f os::start::internal::determine_hostnames +++ readonly -f os::start::router +++ readonly -f os::start::registry ++ unset library_files library_file init_source ++ os::log::stacktrace::install ++ set -o errtrace ++ export OS_USE_STACKTRACE=true ++ OS_USE_STACKTRACE=true ++ os::util::trap::init_err ++ trap -p ERR ++ grep -q os::util::trap::err_handler ++ trap 'os::util::trap::err_handler;' ERR ++ os::util::environment::update_path_var ++ local prefix ++ os::util::find::system_binary go +++ os::build::host_platform ++++ go env GOHOSTOS ++++ go env GOHOSTARCH +++ echo linux/amd64 ++ prefix+=/data/src/github.com/openshift/origin/_output/local/bin/linux/amd64: ++ [[ -n /data ]] ++ prefix+=/data/bin: ++ PATH=/data/src/github.com/openshift/origin/_output/local/bin/linux/amd64:/data/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin:/home/origin/.local/bin:/home/origin/bin ++ export PATH ++ [[ -z '' ]] +++ basename /tmp/tmp.1kUdaYoF3s .sh ++ os::util::environment::setup_tmpdir_vars tmp.1kUdaYoF3s ++ local sub_dir=tmp.1kUdaYoF3s ++ BASETMPDIR=/tmp/openshift/tmp.1kUdaYoF3s ++ export BASETMPDIR ++ VOLUME_DIR=/tmp/openshift/tmp.1kUdaYoF3s/volumes ++ export VOLUME_DIR ++ BASEOUTDIR=/data/src/github.com/openshift/origin/_output/scripts/tmp.1kUdaYoF3s ++ export BASEOUTDIR ++ LOG_DIR=/data/src/github.com/openshift/origin/_output/scripts/tmp.1kUdaYoF3s/logs ++ export LOG_DIR ++ ARTIFACT_DIR=/data/src/github.com/openshift/origin/_output/scripts/tmp.1kUdaYoF3s/artifacts ++ export ARTIFACT_DIR ++ FAKE_HOME_DIR=/data/src/github.com/openshift/origin/_output/scripts/tmp.1kUdaYoF3s/openshift.local.home ++ export FAKE_HOME_DIR ++ mkdir -p /data/src/github.com/openshift/origin/_output/scripts/tmp.1kUdaYoF3s/logs /tmp/openshift/tmp.1kUdaYoF3s/volumes /data/src/github.com/openshift/origin/_output/scripts/tmp.1kUdaYoF3s/artifacts /data/src/github.com/openshift/origin/_output/scripts/tmp.1kUdaYoF3s/openshift.local.home ++ export OS_TMP_ENV_SET=tmp.1kUdaYoF3s ++ OS_TMP_ENV_SET=tmp.1kUdaYoF3s ++ [[ -n '' ]] + os::build::rpm::get_nvra_vars + OS_RPM_NAME=origin ++ uname -i + OS_RPM_ARCHITECTURE=x86_64 + os::build::version::get_vars + [[ -n '' ]] + os::build::version::openshift_vars + git=(git --work-tree "${OS_ROOT}") + local git + [[ -z '' ]] ++ git --work-tree /data/src/github.com/openshift/origin log --merges --grep 'Merge version v.* of Service Catalog from https://github.com/openshift/service-catalog' --pretty=%s -1 + summary_text='Merge version v0.1.2 of Service Catalog from https://github.com/openshift/service-catalog:v0.1.2+origin' + [[ Merge version v0.1.2 of Service Catalog from https://github.com/openshift/service-catalog:v0.1.2+origin =~ Merge[[:space:]]version[[:space:]](v.*)[[:space:]]of[[:space:]]Service[[:space:]]Catalog ]] + OS_GIT_CATALOG_VERSION=v0.1.2 ++ git --work-tree /data/src/github.com/openshift/origin status --porcelain cmd/service-catalog + git_status= + [[ -n '' ]] + [[ -n '' ]] ++ git --work-tree /data/src/github.com/openshift/origin rev-parse --short 'HEAD^{commit}' + OS_GIT_COMMIT=5eda3fa + [[ -z '' ]] ++ git --work-tree /data/src/github.com/openshift/origin status --porcelain + git_status= + [[ -z '' ]] + OS_GIT_TREE_STATE=clean + [[ -n '' ]] ++ git --work-tree /data/src/github.com/openshift/origin describe --long --tags --abbrev=7 --match 'v[0-9]*' '5eda3fa^{commit}' + OS_GIT_VERSION=v3.7.2-5-g5eda3fa + [[ v3.7.2-5-g5eda3fa =~ ^v([0-9]+)\.([0-9]+)\.([0-9]+)(\.[0-9]+)*([-].*)?$ ]] + OS_GIT_MAJOR=3 + OS_GIT_MINOR=7 + OS_GIT_PATCH=2 + [[ -n -5-g5eda3fa ]] + OS_GIT_MINOR+=+ ++ echo v3.7.2-5-g5eda3fa ++ sed 's/-\([0-9]\{1,\}\)-g\([0-9a-f]\{7,40\}\)$/\+\2-\1/' + OS_GIT_VERSION=v3.7.2+5eda3fa-5 ++ echo v3.7.2+5eda3fa-5 ++ sed 's/-0$//' + OS_GIT_VERSION=v3.7.2+5eda3fa-5 + [[ clean == \d\i\r\t\y ]] + os::build::version::kubernetes_vars ++ go run /data/src/github.com/openshift/origin/tools/godepversion/godepversion.go /data/src/github.com/openshift/origin/Godeps/Godeps.json k8s.io/kubernetes/pkg/api comment + KUBE_GIT_VERSION=v1.7.6-166-ga08f5eeb62 ++ go run /data/src/github.com/openshift/origin/tools/godepversion/godepversion.go /data/src/github.com/openshift/origin/Godeps/Godeps.json k8s.io/kubernetes/pkg/api + KUBE_GIT_COMMIT=c84beff ++ echo v1.7.6-166-ga08f5eeb62 ++ sed 's/-\([0-9]\{1,\}\)-g\([0-9a-f]\{7,40\}\)$/\+\2/' + KUBE_GIT_VERSION=v1.7.6+a08f5eeb62 + [[ v1.7.6+a08f5eeb62 =~ ^v([0-9]+)\.([0-9]+)(\.[0-9]+)*([-].*)?$ ]] + os::build::version::etcd_vars ++ go run /data/src/github.com/openshift/origin/tools/godepversion/godepversion.go /data/src/github.com/openshift/origin/Godeps/Godeps.json github.com/coreos/etcd/etcdserver comment + ETCD_GIT_VERSION=v3.2.8 ++ go run /data/src/github.com/openshift/origin/tools/godepversion/godepversion.go /data/src/github.com/openshift/origin/Godeps/Godeps.json github.com/coreos/etcd/etcdserver + ETCD_GIT_COMMIT=e211fb6 + [[ v3.7.2+5eda3fa-5 =~ ^v([0-9](\.[0-9]+)*)(.*) ]] + OS_RPM_VERSION=3.7.2 + metadata=+5eda3fa-5 + [[ + == \+ ]] + [[ +5eda3fa-5 =~ ^\+([a-z0-9]{7,40})(-([0-9]+))?(-dirty)?$ ]] + build_sha=5eda3fa + build_num=5 + OS_RPM_RELEASE=1.5.5eda3fa + export OS_RPM_NAME OS_RPM_VERSION OS_RPM_RELEASE OS_RPM_ARCHITECTURE + echo -3.7.2-1.5.5eda3fa + echo 3.7+ + sed s/+// + echo 3.7.2 + cut -d. -f2 ++ echo v3.7+ ++ sed s/+// + tag=v3.7 + echo v3.7 + set +o xtrace ########## FINISHED STAGE: SUCCESS: DETERMINE THE RELEASE COMMIT FOR ORIGIN IMAGES AND VERSION FOR RPMS [00h 00m 31s] ########## [workspace@2] $ /bin/bash /tmp/jenkins4393310428939433025.sh ########## STARTING STAGE: BUILD AN OPENSHIFT-ANSIBLE RELEASE ########## + [[ -s /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/activate ]] + source /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/activate ++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66 ++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66 ++ export PATH=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ PATH=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ unset PYTHON_HOME ++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config ++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config ++ mktemp + script=/tmp/tmp.za8q64YPsU + cat + chmod +x /tmp/tmp.za8q64YPsU + scp -F ./.config/origin-ci-tool/inventory/.ssh_config /tmp/tmp.za8q64YPsU openshiftdevel:/tmp/tmp.za8q64YPsU + ssh -F ./.config/origin-ci-tool/inventory/.ssh_config -t openshiftdevel 'bash -l -c "timeout 14400 /tmp/tmp.za8q64YPsU"' + cd /data/src/github.com/openshift/openshift-ansible + tito_tmp_dir=tito + mkdir -p tito + tito tag --offline --accept-auto-changelog Creating output directory: /tmp/tito Tagging new version of openshift-ansible: 3.7.42-1 -> 3.7.43-1 Created tag: openshift-ansible-3.7.43-1 View: git show HEAD Undo: tito tag -u Push: git push --follow-tags origin + tito build --output=tito --rpm --test --offline --quiet Creating output directory: /data/src/github.com/openshift/openshift-ansible/tito Building package [openshift-ansible-3.7.43-1] Wrote: /data/src/github.com/openshift/openshift-ansible/tito/openshift-ansible-git-0.42633e0.tar.gz Successfully built: /data/src/github.com/openshift/openshift-ansible/tito/openshift-ansible-3.7.43-1.git.0.42633e0.el7.src.rpm - /data/src/github.com/openshift/openshift-ansible/tito/noarch/openshift-ansible-3.7.43-1.git.0.42633e0.el7.noarch.rpm - /data/src/github.com/openshift/openshift-ansible/tito/noarch/openshift-ansible-docs-3.7.43-1.git.0.42633e0.el7.noarch.rpm - /data/src/github.com/openshift/openshift-ansible/tito/noarch/openshift-ansible-playbooks-3.7.43-1.git.0.42633e0.el7.noarch.rpm - /data/src/github.com/openshift/openshift-ansible/tito/noarch/openshift-ansible-roles-3.7.43-1.git.0.42633e0.el7.noarch.rpm - /data/src/github.com/openshift/openshift-ansible/tito/noarch/openshift-ansible-filter-plugins-3.7.43-1.git.0.42633e0.el7.noarch.rpm - /data/src/github.com/openshift/openshift-ansible/tito/noarch/openshift-ansible-lookup-plugins-3.7.43-1.git.0.42633e0.el7.noarch.rpm - /data/src/github.com/openshift/openshift-ansible/tito/noarch/openshift-ansible-callback-plugins-3.7.43-1.git.0.42633e0.el7.noarch.rpm - /data/src/github.com/openshift/openshift-ansible/tito/noarch/atomic-openshift-utils-3.7.43-1.git.0.42633e0.el7.noarch.rpm + createrepo tito/noarch Spawning worker 0 with 2 pkgs Spawning worker 1 with 2 pkgs Spawning worker 2 with 2 pkgs Spawning worker 3 with 2 pkgs Workers Finished Saving Primary metadata Saving file lists metadata Saving other metadata Generating sqlite DBs Sqlite DBs complete + cat ++ pwd + sudo cp ./openshift-ansible-local-release.repo /etc/yum.repos.d + set +o xtrace ########## FINISHED STAGE: SUCCESS: BUILD AN OPENSHIFT-ANSIBLE RELEASE [00h 00m 52s] ########## [workspace@2] $ /bin/bash /tmp/jenkins6796075778537269215.sh ########## STARTING STAGE: DETERMINE THE RELEASE COMMIT FOR ORIGIN IMAGES AND VERSION FOR RPMS ########## + [[ -s /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/activate ]] + source /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/activate ++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66 ++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66 ++ export PATH=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ PATH=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ unset PYTHON_HOME ++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config ++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config ++ mktemp + script=/tmp/tmp.yfs0FS9jaI + cat + chmod +x /tmp/tmp.yfs0FS9jaI + scp -F ./.config/origin-ci-tool/inventory/.ssh_config /tmp/tmp.yfs0FS9jaI openshiftdevel:/tmp/tmp.yfs0FS9jaI + ssh -F ./.config/origin-ci-tool/inventory/.ssh_config -t openshiftdevel 'bash -l -c "timeout 14400 /tmp/tmp.yfs0FS9jaI"' + cd /data/src/github.com/openshift/origin + registry_repo=/data/src/github.com/openshift/image-registry/ + git log -1 --pretty=%h + metrics_repo=/data/src/github.com/openshift/kubernetes-metrics-server/ + git log -1 --pretty=%h + console_repo=/data/src/github.com/openshift/origin-web-console-server/ + git log -1 --pretty=%h + set +o xtrace ########## FINISHED STAGE: SUCCESS: DETERMINE THE RELEASE COMMIT FOR ORIGIN IMAGES AND VERSION FOR RPMS [00h 00m 01s] ########## [workspace@2] $ /bin/bash /tmp/jenkins4198417773279036086.sh ########## STARTING STAGE: BUILD THE IMAGE REGISTRY CONTAINER IMAGE ########## + [[ -s /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/activate ]] + source /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/activate ++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66 ++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66 ++ export PATH=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ PATH=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ unset PYTHON_HOME ++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config ++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config ++ mktemp + script=/tmp/tmp.TP7ZtfcmK9 + cat + chmod +x /tmp/tmp.TP7ZtfcmK9 + scp -F ./.config/origin-ci-tool/inventory/.ssh_config /tmp/tmp.TP7ZtfcmK9 openshiftdevel:/tmp/tmp.TP7ZtfcmK9 + ssh -F ./.config/origin-ci-tool/inventory/.ssh_config -t openshiftdevel 'bash -l -c "timeout 3600 /tmp/tmp.TP7ZtfcmK9"' + cd /data/src/github.com/openshift/image-registry + make build-images OS_ONLY_BUILD_PLATFORMS='linux/amd64' hack/build-rpms.sh [INFO] Building release RPMs for /data/src/github.com/openshift/image-registry/image-registry.spec ... Executing(%prep): /bin/sh -e /var/tmp/rpm-tmp.umyhlO + umask 022 + cd /tmp/openshift/build-rpms/rpm/BUILD + cd /tmp/openshift/build-rpms/rpm/BUILD + rm -rf origin-dockerregistry-3.10.0 + /usr/bin/gzip -dc /tmp/openshift/build-rpms/rpm/SOURCES/origin-dockerregistry-3.10.0.tar.gz + /usr/bin/tar -xf - + STATUS=0 + '[' 0 -ne 0 ']' + cd origin-dockerregistry-3.10.0 + /usr/bin/chmod -Rf a+rX,u+w,g-w,o-w . + exit 0 Executing(%build): /bin/sh -e /var/tmp/rpm-tmp.SPT6Iq + umask 022 + cd /tmp/openshift/build-rpms/rpm/BUILD + cd origin-dockerregistry-3.10.0 + BUILD_PLATFORM=linux/amd64 + OS_ONLY_BUILD_PLATFORMS=linux/amd64 + OS_GIT_COMMIT=eed61d5 + OS_GIT_TREE_STATE=dirty + OS_GIT_VERSION=v3.10.0-alpha.0+eed61d5-4-dirty + OS_GIT_MAJOR=3 + OS_GIT_MINOR=10+ + OS_GIT_PATCH=0 + make build-cross make[1]: Entering directory `/tmp/openshift/build-rpms/rpm/BUILD/origin-dockerregistry-3.10.0' hack/build-cross.sh ++ Building go targets for linux/amd64: cmd/dockerregistry hack/build-cross.sh took 40 seconds make[1]: Leaving directory `/tmp/openshift/build-rpms/rpm/BUILD/origin-dockerregistry-3.10.0' + exit 0 Executing(%install): /bin/sh -e /var/tmp/rpm-tmp.iJg14E + umask 022 + cd /tmp/openshift/build-rpms/rpm/BUILD + '[' /tmp/openshift/build-rpms/rpm/BUILDROOT/origin-dockerregistry-3.10.0-0.alpha.0.4.eed61d5.x86_64 '!=' / ']' + rm -rf /tmp/openshift/build-rpms/rpm/BUILDROOT/origin-dockerregistry-3.10.0-0.alpha.0.4.eed61d5.x86_64 ++ dirname /tmp/openshift/build-rpms/rpm/BUILDROOT/origin-dockerregistry-3.10.0-0.alpha.0.4.eed61d5.x86_64 + mkdir -p /tmp/openshift/build-rpms/rpm/BUILDROOT + mkdir /tmp/openshift/build-rpms/rpm/BUILDROOT/origin-dockerregistry-3.10.0-0.alpha.0.4.eed61d5.x86_64 + cd origin-dockerregistry-3.10.0 ++ go env GOHOSTOS ++ go env GOHOSTARCH + PLATFORM=linux/amd64 + install -d /tmp/openshift/build-rpms/rpm/BUILDROOT/origin-dockerregistry-3.10.0-0.alpha.0.4.eed61d5.x86_64/usr/bin + for bin in dockerregistry +++ INSTALLING dockerregistry + echo '+++ INSTALLING dockerregistry' + install -p -m 755 _output/local/bin/linux/amd64/dockerregistry /tmp/openshift/build-rpms/rpm/BUILDROOT/origin-dockerregistry-3.10.0-0.alpha.0.4.eed61d5.x86_64/usr/bin/dockerregistry + /usr/lib/rpm/check-buildroot + /usr/lib/rpm/brp-compress Processing files: origin-dockerregistry-3.10.0-0.alpha.0.4.eed61d5.el7.x86_64 Executing(%doc): /bin/sh -e /var/tmp/rpm-tmp.DQilOT + umask 022 + cd /tmp/openshift/build-rpms/rpm/BUILD + cd origin-dockerregistry-3.10.0 + DOCDIR=/tmp/openshift/build-rpms/rpm/BUILDROOT/origin-dockerregistry-3.10.0-0.alpha.0.4.eed61d5.x86_64/usr/share/doc/origin-dockerregistry-3.10.0 + export DOCDIR + /usr/bin/mkdir -p /tmp/openshift/build-rpms/rpm/BUILDROOT/origin-dockerregistry-3.10.0-0.alpha.0.4.eed61d5.x86_64/usr/share/doc/origin-dockerregistry-3.10.0 + cp -pr README.md /tmp/openshift/build-rpms/rpm/BUILDROOT/origin-dockerregistry-3.10.0-0.alpha.0.4.eed61d5.x86_64/usr/share/doc/origin-dockerregistry-3.10.0 + exit 0 Executing(%license): /bin/sh -e /var/tmp/rpm-tmp.R52ny8 + umask 022 + cd /tmp/openshift/build-rpms/rpm/BUILD + cd origin-dockerregistry-3.10.0 + LICENSEDIR=/tmp/openshift/build-rpms/rpm/BUILDROOT/origin-dockerregistry-3.10.0-0.alpha.0.4.eed61d5.x86_64/usr/share/licenses/origin-dockerregistry-3.10.0 + export LICENSEDIR + /usr/bin/mkdir -p /tmp/openshift/build-rpms/rpm/BUILDROOT/origin-dockerregistry-3.10.0-0.alpha.0.4.eed61d5.x86_64/usr/share/licenses/origin-dockerregistry-3.10.0 + cp -pr LICENSE /tmp/openshift/build-rpms/rpm/BUILDROOT/origin-dockerregistry-3.10.0-0.alpha.0.4.eed61d5.x86_64/usr/share/licenses/origin-dockerregistry-3.10.0 + exit 0 Provides: origin-dockerregistry = 3.10.0-0.alpha.0.4.eed61d5.el7 origin-dockerregistry(x86-64) = 3.10.0-0.alpha.0.4.eed61d5.el7 Requires(interp): /bin/sh Requires(rpmlib): rpmlib(CompressedFileNames) <= 3.0.4-1 rpmlib(FileDigests) <= 4.6.0-1 rpmlib(PayloadFilesHavePrefix) <= 4.0-1 Requires(pre): /bin/sh Requires: libc.so.6()(64bit) libc.so.6(GLIBC_2.2.5)(64bit) libpthread.so.0()(64bit) libpthread.so.0(GLIBC_2.2.5)(64bit) libpthread.so.0(GLIBC_2.3.2)(64bit) Checking for unpackaged file(s): /usr/lib/rpm/check-files /tmp/openshift/build-rpms/rpm/BUILDROOT/origin-dockerregistry-3.10.0-0.alpha.0.4.eed61d5.x86_64 Wrote: /tmp/openshift/build-rpms/rpm/RPMS/x86_64/origin-dockerregistry-3.10.0-0.alpha.0.4.eed61d5.el7.x86_64.rpm Executing(%clean): /bin/sh -e /var/tmp/rpm-tmp.Z546MP + umask 022 + cd /tmp/openshift/build-rpms/rpm/BUILD + cd origin-dockerregistry-3.10.0 + /usr/bin/rm -rf /tmp/openshift/build-rpms/rpm/BUILDROOT/origin-dockerregistry-3.10.0-0.alpha.0.4.eed61d5.x86_64 + exit 0 make[1]: Entering directory `/data/src/github.com/openshift/image-registry' rm -rf _output make[1]: Leaving directory `/data/src/github.com/openshift/image-registry' Spawning worker 0 with 1 pkgs Spawning worker 1 with 0 pkgs Spawning worker 2 with 0 pkgs Spawning worker 3 with 0 pkgs Workers Finished Saving Primary metadata Saving file lists metadata Saving other metadata Generating sqlite DBs Sqlite DBs complete [INFO] Repository file for `yum` or `dnf` placed at /data/src/github.com/openshift/image-registry/_output/local/releases/rpms/local-release.repo [INFO] Install it with: [INFO] $ mv '/data/src/github.com/openshift/image-registry/_output/local/releases/rpms/local-release.repo' '/etc/yum.repos.d [INFO] hack/build-rpms.sh exited with code 0 after 00h 01m 12s hack/build-images.sh [openshift/origin-docker-registry] --> FROM openshift/origin-base as 0 [openshift/origin-docker-registry] --> RUN INSTALL_PKGS="origin-dockerregistry" && yum --enablerepo=origin-local-release install -y ${INSTALL_PKGS} && rpm -V ${INSTALL_PKGS} && yum clean all [openshift/origin-docker-registry] Loaded plugins: fastestmirror, ovl [openshift/origin-docker-registry] Determining fastest mirrors [openshift/origin-docker-registry] * base: mirror.vtti.vt.edu [openshift/origin-docker-registry] * extras: mirror.wdc1.us.leaseweb.net [openshift/origin-docker-registry] * updates: mirror.teklinks.com [openshift/origin-docker-registry] Resolving Dependencies [openshift/origin-docker-registry] --> Running transaction check [openshift/origin-docker-registry] ---> Package origin-dockerregistry.x86_64 0:3.10.0-0.alpha.0.4.eed61d5.el7 will be installed [openshift/origin-docker-registry] --> Finished Dependency Resolution [openshift/origin-docker-registry] Dependencies Resolved [openshift/origin-docker-registry] ================================================================================ [openshift/origin-docker-registry] Package Arch Version Repository Size [openshift/origin-docker-registry] ================================================================================ [openshift/origin-docker-registry] Installing: [openshift/origin-docker-registry] origin-dockerregistry [openshift/origin-docker-registry] x86_64 3.10.0-0.alpha.0.4.eed61d5.el7 origin-local-release 9.1 M [openshift/origin-docker-registry] Transaction Summary [openshift/origin-docker-registry] ================================================================================ [openshift/origin-docker-registry] Install 1 Package [openshift/origin-docker-registry] Total download size: 9.1 M [openshift/origin-docker-registry] Installed size: 49 M [openshift/origin-docker-registry] Downloading packages: [openshift/origin-docker-registry] Running transaction check [openshift/origin-docker-registry] Running transaction test [openshift/origin-docker-registry] Transaction test succeeded [openshift/origin-docker-registry] Running transaction [openshift/origin-docker-registry] Installing : origin-dockerregistry-3.10.0-0.alpha.0.4.eed61d5.el7.x86_6 1/1 [openshift/origin-docker-registry] Verifying : origin-dockerregistry-3.10.0-0.alpha.0.4.eed61d5.el7.x86_6 1/1 [openshift/origin-docker-registry] Installed: [openshift/origin-docker-registry] origin-dockerregistry.x86_64 0:3.10.0-0.alpha.0.4.eed61d5.el7 [openshift/origin-docker-registry] Complete! [openshift/origin-docker-registry] Loaded plugins: fastestmirror, ovl [openshift/origin-docker-registry] Cleaning repos: base cbs-paas7-openshift-multiarch-el7-build extras updates [openshift/origin-docker-registry] Cleaning up everything [openshift/origin-docker-registry] Maybe you want: rm -rf /var/cache/yum, to also free up space taken by orphaned data from disabled or removed repos [openshift/origin-docker-registry] Cleaning up list of fastest mirrors [openshift/origin-docker-registry] --> COPY config.yml ${REGISTRY_CONFIGURATION_PATH} [openshift/origin-docker-registry] --> LABEL io.k8s.display-name="OpenShift Container Platform Image Registry" io.k8s.description="This is a component of OpenShift Container Platform and exposes a Docker registry that is integrated with the cluster for authentication and management." io.openshift.tags="openshift,docker,registry" [openshift/origin-docker-registry] --> USER 1001 [openshift/origin-docker-registry] --> EXPOSE 5000 [openshift/origin-docker-registry] --> VOLUME /registry [openshift/origin-docker-registry] --> ENV REGISTRY_CONFIGURATION_PATH=/config.yml [openshift/origin-docker-registry] --> CMD /usr/bin/dockerregistry ${REGISTRY_CONFIGURATION_PATH} [openshift/origin-docker-registry] --> Committing changes to openshift/origin-docker-registry:eed61d5 ... [openshift/origin-docker-registry] --> Tagged as openshift/origin-docker-registry:latest [openshift/origin-docker-registry] --> Done [INFO] hack/build-images.sh exited with code 0 after 00h 00m 17s ++ cat ./ORIGIN_COMMIT + docker tag openshift/origin-docker-registry:latest openshift/origin-docker-registry:5eda3fa + set +o xtrace ########## FINISHED STAGE: SUCCESS: BUILD THE IMAGE REGISTRY CONTAINER IMAGE [00h 01m 30s] ########## [workspace@2] $ /bin/bash /tmp/jenkins93161602667973805.sh ########## STARTING STAGE: BUILD THE KUBERNETES METRICS SERVER CONTAINER IMAGE ########## + [[ -s /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/activate ]] + source /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/activate ++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66 ++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66 ++ export PATH=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ PATH=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ unset PYTHON_HOME ++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config ++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config ++ mktemp + script=/tmp/tmp.7qWCEaOF4U + cat + chmod +x /tmp/tmp.7qWCEaOF4U + scp -F ./.config/origin-ci-tool/inventory/.ssh_config /tmp/tmp.7qWCEaOF4U openshiftdevel:/tmp/tmp.7qWCEaOF4U + ssh -F ./.config/origin-ci-tool/inventory/.ssh_config -t openshiftdevel 'bash -l -c "timeout 3600 /tmp/tmp.7qWCEaOF4U"' + cd /data/src/github.com/openshift/kubernetes-metrics-server + make build-images OS_ONLY_BUILD_PLATFORMS='linux/amd64' hack/build-rpms.sh [INFO] Building release RPMs for /data/src/github.com/openshift/kubernetes-metrics-server/metrics-server.spec ... Executing(%prep): /bin/sh -e /var/tmp/rpm-tmp.acs0rl + umask 022 + cd /tmp/openshift/build-rpms/rpm/BUILD + cd /tmp/openshift/build-rpms/rpm/BUILD + rm -rf origin-metrics-server-3.10.0 + /usr/bin/gzip -dc /tmp/openshift/build-rpms/rpm/SOURCES/origin-metrics-server-3.10.0.tar.gz + /usr/bin/tar -xf - + STATUS=0 + '[' 0 -ne 0 ']' + cd origin-metrics-server-3.10.0 + /usr/bin/chmod -Rf a+rX,u+w,g-w,o-w . + exit 0 Executing(%build): /bin/sh -e /var/tmp/rpm-tmp.RCkR2I + umask 022 + cd /tmp/openshift/build-rpms/rpm/BUILD + cd origin-metrics-server-3.10.0 + BUILD_PLATFORM=linux/amd64 + OS_ONLY_BUILD_PLATFORMS=linux/amd64 + OS_GIT_COMMIT=9d7d21e + OS_GIT_TREE_STATE=dirty + OS_GIT_VERSION=v3.10.0-alpha.0+9d7d21e-dirty + OS_GIT_MAJOR=3 + OS_GIT_MINOR=10+ + OS_GIT_PATCH=0 + make build-cross make[1]: Entering directory `/tmp/openshift/build-rpms/rpm/BUILD/origin-metrics-server-3.10.0' hack/build-cross.sh ++ Building go targets for linux/amd64: cmd/metrics-server hack/build-cross.sh took 61 seconds make[1]: Leaving directory `/tmp/openshift/build-rpms/rpm/BUILD/origin-metrics-server-3.10.0' + exit 0 Executing(%install): /bin/sh -e /var/tmp/rpm-tmp.gYwn0y + umask 022 + cd /tmp/openshift/build-rpms/rpm/BUILD + '[' /tmp/openshift/build-rpms/rpm/BUILDROOT/origin-metrics-server-3.10.0-0.alpha.0.0.9d7d21e.x86_64 '!=' / ']' + rm -rf /tmp/openshift/build-rpms/rpm/BUILDROOT/origin-metrics-server-3.10.0-0.alpha.0.0.9d7d21e.x86_64 ++ dirname /tmp/openshift/build-rpms/rpm/BUILDROOT/origin-metrics-server-3.10.0-0.alpha.0.0.9d7d21e.x86_64 + mkdir -p /tmp/openshift/build-rpms/rpm/BUILDROOT + mkdir /tmp/openshift/build-rpms/rpm/BUILDROOT/origin-metrics-server-3.10.0-0.alpha.0.0.9d7d21e.x86_64 + cd origin-metrics-server-3.10.0 ++ go env GOHOSTOS ++ go env GOHOSTARCH + PLATFORM=linux/amd64 + install -d /tmp/openshift/build-rpms/rpm/BUILDROOT/origin-metrics-server-3.10.0-0.alpha.0.0.9d7d21e.x86_64/usr/bin +++ INSTALLING metrics-server + for bin in metrics-server + echo '+++ INSTALLING metrics-server' + install -p -m 755 _output/local/bin/linux/amd64/metrics-server /tmp/openshift/build-rpms/rpm/BUILDROOT/origin-metrics-server-3.10.0-0.alpha.0.0.9d7d21e.x86_64/usr/bin/metrics-server + /usr/lib/rpm/check-buildroot + /usr/lib/rpm/brp-compress Processing files: origin-metrics-server-3.10.0-0.alpha.0.0.9d7d21e.el7.x86_64 Executing(%doc): /bin/sh -e /var/tmp/rpm-tmp.B7rwmp + umask 022 + cd /tmp/openshift/build-rpms/rpm/BUILD + cd origin-metrics-server-3.10.0 + DOCDIR=/tmp/openshift/build-rpms/rpm/BUILDROOT/origin-metrics-server-3.10.0-0.alpha.0.0.9d7d21e.x86_64/usr/share/doc/origin-metrics-server-3.10.0 + export DOCDIR + /usr/bin/mkdir -p /tmp/openshift/build-rpms/rpm/BUILDROOT/origin-metrics-server-3.10.0-0.alpha.0.0.9d7d21e.x86_64/usr/share/doc/origin-metrics-server-3.10.0 + cp -pr README.md /tmp/openshift/build-rpms/rpm/BUILDROOT/origin-metrics-server-3.10.0-0.alpha.0.0.9d7d21e.x86_64/usr/share/doc/origin-metrics-server-3.10.0 + exit 0 Executing(%license): /bin/sh -e /var/tmp/rpm-tmp.FfAnJf + umask 022 + cd /tmp/openshift/build-rpms/rpm/BUILD + cd origin-metrics-server-3.10.0 + LICENSEDIR=/tmp/openshift/build-rpms/rpm/BUILDROOT/origin-metrics-server-3.10.0-0.alpha.0.0.9d7d21e.x86_64/usr/share/licenses/origin-metrics-server-3.10.0 + export LICENSEDIR + /usr/bin/mkdir -p /tmp/openshift/build-rpms/rpm/BUILDROOT/origin-metrics-server-3.10.0-0.alpha.0.0.9d7d21e.x86_64/usr/share/licenses/origin-metrics-server-3.10.0 + cp -pr LICENSE /tmp/openshift/build-rpms/rpm/BUILDROOT/origin-metrics-server-3.10.0-0.alpha.0.0.9d7d21e.x86_64/usr/share/licenses/origin-metrics-server-3.10.0 + exit 0 Provides: origin-metrics-server = 3.10.0-0.alpha.0.0.9d7d21e.el7 origin-metrics-server(x86-64) = 3.10.0-0.alpha.0.0.9d7d21e.el7 Requires(interp): /bin/sh Requires(rpmlib): rpmlib(CompressedFileNames) <= 3.0.4-1 rpmlib(FileDigests) <= 4.6.0-1 rpmlib(PayloadFilesHavePrefix) <= 4.0-1 Requires(pre): /bin/sh Requires: libc.so.6()(64bit) libc.so.6(GLIBC_2.2.5)(64bit) libpthread.so.0()(64bit) libpthread.so.0(GLIBC_2.2.5)(64bit) libpthread.so.0(GLIBC_2.3.2)(64bit) Checking for unpackaged file(s): /usr/lib/rpm/check-files /tmp/openshift/build-rpms/rpm/BUILDROOT/origin-metrics-server-3.10.0-0.alpha.0.0.9d7d21e.x86_64 Wrote: /tmp/openshift/build-rpms/rpm/RPMS/x86_64/origin-metrics-server-3.10.0-0.alpha.0.0.9d7d21e.el7.x86_64.rpm Executing(%clean): /bin/sh -e /var/tmp/rpm-tmp.ZlnC7a + umask 022 + cd /tmp/openshift/build-rpms/rpm/BUILD + cd origin-metrics-server-3.10.0 + /usr/bin/rm -rf /tmp/openshift/build-rpms/rpm/BUILDROOT/origin-metrics-server-3.10.0-0.alpha.0.0.9d7d21e.x86_64 + exit 0 make[1]: Entering directory `/data/src/github.com/openshift/kubernetes-metrics-server' rm -rf _output make[1]: Leaving directory `/data/src/github.com/openshift/kubernetes-metrics-server' Spawning worker 0 with 1 pkgs Spawning worker 1 with 0 pkgs Spawning worker 2 with 0 pkgs Spawning worker 3 with 0 pkgs Workers Finished Saving Primary metadata Saving file lists metadata Saving other metadata Generating sqlite DBs Sqlite DBs complete [INFO] Repository file for `yum` or `dnf` placed at /data/src/github.com/openshift/kubernetes-metrics-server/_output/local/releases/rpms/local-release.repo [INFO] Install it with: [INFO] $ mv '/data/src/github.com/openshift/kubernetes-metrics-server/_output/local/releases/rpms/local-release.repo' '/etc/yum.repos.d [INFO] hack/build-rpms.sh exited with code 0 after 00h 01m 12s hack/build-images.sh [openshift/origin-metrics-server] --> FROM openshift/origin-base as 0 [openshift/origin-metrics-server] --> RUN INSTALL_PKGS="origin-metrics-server" && yum --enablerepo=origin-local-release install -y ${INSTALL_PKGS} && rpm -V ${INSTALL_PKGS} && yum clean all [openshift/origin-metrics-server] Loaded plugins: fastestmirror, ovl [openshift/origin-metrics-server] Determining fastest mirrors [openshift/origin-metrics-server] * base: mirror.vtti.vt.edu [openshift/origin-metrics-server] * extras: mirror.wdc1.us.leaseweb.net [openshift/origin-metrics-server] * updates: mirror.us-midwest-1.nexcess.net [openshift/origin-metrics-server] Resolving Dependencies [openshift/origin-metrics-server] --> Running transaction check [openshift/origin-metrics-server] ---> Package origin-metrics-server.x86_64 0:3.10.0-0.alpha.0.0.9d7d21e.el7 will be installed [openshift/origin-metrics-server] --> Finished Dependency Resolution [openshift/origin-metrics-server] Dependencies Resolved [openshift/origin-metrics-server] ================================================================================ [openshift/origin-metrics-server] Package Arch Version Repository Size [openshift/origin-metrics-server] ================================================================================ [openshift/origin-metrics-server] Installing: [openshift/origin-metrics-server] origin-metrics-server [openshift/origin-metrics-server] x86_64 3.10.0-0.alpha.0.0.9d7d21e.el7 origin-local-release 9.5 M [openshift/origin-metrics-server] Transaction Summary [openshift/origin-metrics-server] ================================================================================ [openshift/origin-metrics-server] Install 1 Package [openshift/origin-metrics-server] Total download size: 9.5 M [openshift/origin-metrics-server] Installed size: 55 M [openshift/origin-metrics-server] Downloading packages: [openshift/origin-metrics-server] Running transaction check [openshift/origin-metrics-server] Running transaction test [openshift/origin-metrics-server] Transaction test succeeded [openshift/origin-metrics-server] Running transaction [openshift/origin-metrics-server] Installing : origin-metrics-server-3.10.0-0.alpha.0.0.9d7d21e.el7.x86_6 1/1 [openshift/origin-metrics-server] Verifying : origin-metrics-server-3.10.0-0.alpha.0.0.9d7d21e.el7.x86_6 1/1 [openshift/origin-metrics-server] Installed: [openshift/origin-metrics-server] origin-metrics-server.x86_64 0:3.10.0-0.alpha.0.0.9d7d21e.el7 [openshift/origin-metrics-server] Complete! [openshift/origin-metrics-server] Loaded plugins: fastestmirror, ovl [openshift/origin-metrics-server] Cleaning repos: base cbs-paas7-openshift-multiarch-el7-build extras updates [openshift/origin-metrics-server] Cleaning up everything [openshift/origin-metrics-server] Maybe you want: rm -rf /var/cache/yum, to also free up space taken by orphaned data from disabled or removed repos [openshift/origin-metrics-server] Cleaning up list of fastest mirrors [openshift/origin-metrics-server] --> LABEL io.k8s.display-name="OpenShift Container Platform Metrics Server" io.k8s.description="This is a component of OpenShift Container Platform and exposes the cluster resources metrics API." io.openshift.tags="openshift,metrics" [openshift/origin-metrics-server] --> USER 1001 [openshift/origin-metrics-server] --> CMD /usr/bin/metrics-server [openshift/origin-metrics-server] --> Committing changes to openshift/origin-metrics-server:9d7d21e ... [openshift/origin-metrics-server] --> Tagged as openshift/origin-metrics-server:latest [openshift/origin-metrics-server] --> Done [INFO] hack/build-images.sh exited with code 0 after 00h 00m 17s ++ cat ./ORIGIN_COMMIT + docker tag openshift/origin-metrics-server:latest openshift/origin-metrics-server:5eda3fa + set +o xtrace ########## FINISHED STAGE: SUCCESS: BUILD THE KUBERNETES METRICS SERVER CONTAINER IMAGE [00h 01m 30s] ########## [workspace@2] $ /bin/bash /tmp/jenkins6054311075170373743.sh ########## STARTING STAGE: BUILD THE ORIGIN WEB CONSOLE SERVER CONTAINER IMAGE ########## + [[ -s /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/activate ]] + source /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/activate ++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66 ++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66 ++ export PATH=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ PATH=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ unset PYTHON_HOME ++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config ++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config ++ mktemp + script=/tmp/tmp.NnuQmbq5Bc + cat + chmod +x /tmp/tmp.NnuQmbq5Bc + scp -F ./.config/origin-ci-tool/inventory/.ssh_config /tmp/tmp.NnuQmbq5Bc openshiftdevel:/tmp/tmp.NnuQmbq5Bc + ssh -F ./.config/origin-ci-tool/inventory/.ssh_config -t openshiftdevel 'bash -l -c "timeout 3600 /tmp/tmp.NnuQmbq5Bc"' + cd /data/src/github.com/openshift/origin-web-console-server + make build-images OS_ONLY_BUILD_PLATFORMS='linux/amd64' hack/build-rpms.sh [INFO] Building release RPMs for /data/src/github.com/openshift/origin-web-console-server/origin-web-console-server.spec ... Executing(%prep): /bin/sh -e /var/tmp/rpm-tmp.deQN4p + umask 022 + cd /tmp/openshift/build-rpms/rpm/BUILD + cd /tmp/openshift/build-rpms/rpm/BUILD + rm -rf origin-web-console-3.10.0 + /usr/bin/gzip -dc /tmp/openshift/build-rpms/rpm/SOURCES/origin-web-console-3.10.0.tar.gz + /usr/bin/tar -xf - + STATUS=0 + '[' 0 -ne 0 ']' + cd origin-web-console-3.10.0 + /usr/bin/chmod -Rf a+rX,u+w,g-w,o-w . + exit 0 Executing(%build): /bin/sh -e /var/tmp/rpm-tmp.nx2E19 + umask 022 + cd /tmp/openshift/build-rpms/rpm/BUILD + cd origin-web-console-3.10.0 + BUILD_PLATFORM=linux/amd64 + OS_ONLY_BUILD_PLATFORMS=linux/amd64 + OS_GIT_COMMIT=e91f97f + OS_GIT_TREE_STATE=dirty + OS_GIT_VERSION=v3.10.0-alpha.0+e91f97f-5-dirty + OS_GIT_MAJOR=3 + OS_GIT_MINOR=10+ + OS_GIT_PATCH=0 + make build-cross make[1]: Entering directory `/tmp/openshift/build-rpms/rpm/BUILD/origin-web-console-3.10.0' hack/build-cross.sh ++ Building go targets for linux/amd64: cmd/origin-web-console hack/build-cross.sh took 57 seconds make[1]: Leaving directory `/tmp/openshift/build-rpms/rpm/BUILD/origin-web-console-3.10.0' Executing(%install): /bin/sh -e /var/tmp/rpm-tmp.or29fc + exit 0 + umask 022 + cd /tmp/openshift/build-rpms/rpm/BUILD + '[' /tmp/openshift/build-rpms/rpm/BUILDROOT/origin-web-console-3.10.0-0.alpha.0.5.e91f97f.x86_64 '!=' / ']' + rm -rf /tmp/openshift/build-rpms/rpm/BUILDROOT/origin-web-console-3.10.0-0.alpha.0.5.e91f97f.x86_64 ++ dirname /tmp/openshift/build-rpms/rpm/BUILDROOT/origin-web-console-3.10.0-0.alpha.0.5.e91f97f.x86_64 + mkdir -p /tmp/openshift/build-rpms/rpm/BUILDROOT + mkdir /tmp/openshift/build-rpms/rpm/BUILDROOT/origin-web-console-3.10.0-0.alpha.0.5.e91f97f.x86_64 + cd origin-web-console-3.10.0 ++ go env GOHOSTOS ++ go env GOHOSTARCH +++ INSTALLING origin-web-console + PLATFORM=linux/amd64 + install -d /tmp/openshift/build-rpms/rpm/BUILDROOT/origin-web-console-3.10.0-0.alpha.0.5.e91f97f.x86_64/usr/bin + for bin in origin-web-console + echo '+++ INSTALLING origin-web-console' + install -p -m 755 _output/local/bin/linux/amd64/origin-web-console /tmp/openshift/build-rpms/rpm/BUILDROOT/origin-web-console-3.10.0-0.alpha.0.5.e91f97f.x86_64/usr/bin/origin-web-console + /usr/lib/rpm/check-buildroot + /usr/lib/rpm/brp-compress Processing files: origin-web-console-3.10.0-0.alpha.0.5.e91f97f.el7.x86_64 Executing(%doc): /bin/sh -e /var/tmp/rpm-tmp.TR8zWe + umask 022 + cd /tmp/openshift/build-rpms/rpm/BUILD + cd origin-web-console-3.10.0 + DOCDIR=/tmp/openshift/build-rpms/rpm/BUILDROOT/origin-web-console-3.10.0-0.alpha.0.5.e91f97f.x86_64/usr/share/doc/origin-web-console-3.10.0 + export DOCDIR + /usr/bin/mkdir -p /tmp/openshift/build-rpms/rpm/BUILDROOT/origin-web-console-3.10.0-0.alpha.0.5.e91f97f.x86_64/usr/share/doc/origin-web-console-3.10.0 + cp -pr README.md /tmp/openshift/build-rpms/rpm/BUILDROOT/origin-web-console-3.10.0-0.alpha.0.5.e91f97f.x86_64/usr/share/doc/origin-web-console-3.10.0 Executing(%license): /bin/sh -e /var/tmp/rpm-tmp.n5OIDh + exit 0 + umask 022 + cd /tmp/openshift/build-rpms/rpm/BUILD + cd origin-web-console-3.10.0 + LICENSEDIR=/tmp/openshift/build-rpms/rpm/BUILDROOT/origin-web-console-3.10.0-0.alpha.0.5.e91f97f.x86_64/usr/share/licenses/origin-web-console-3.10.0 + export LICENSEDIR + /usr/bin/mkdir -p /tmp/openshift/build-rpms/rpm/BUILDROOT/origin-web-console-3.10.0-0.alpha.0.5.e91f97f.x86_64/usr/share/licenses/origin-web-console-3.10.0 + cp -pr LICENSE /tmp/openshift/build-rpms/rpm/BUILDROOT/origin-web-console-3.10.0-0.alpha.0.5.e91f97f.x86_64/usr/share/licenses/origin-web-console-3.10.0 + exit 0 Provides: origin-web-console = 3.10.0-0.alpha.0.5.e91f97f.el7 origin-web-console(x86-64) = 3.10.0-0.alpha.0.5.e91f97f.el7 Requires(interp): /bin/sh Requires(rpmlib): rpmlib(CompressedFileNames) <= 3.0.4-1 rpmlib(FileDigests) <= 4.6.0-1 rpmlib(PayloadFilesHavePrefix) <= 4.0-1 Requires(pre): /bin/sh Requires: libc.so.6()(64bit) libc.so.6(GLIBC_2.2.5)(64bit) libpthread.so.0()(64bit) libpthread.so.0(GLIBC_2.2.5)(64bit) libpthread.so.0(GLIBC_2.3.2)(64bit) Checking for unpackaged file(s): /usr/lib/rpm/check-files /tmp/openshift/build-rpms/rpm/BUILDROOT/origin-web-console-3.10.0-0.alpha.0.5.e91f97f.x86_64 Wrote: /tmp/openshift/build-rpms/rpm/RPMS/x86_64/origin-web-console-3.10.0-0.alpha.0.5.e91f97f.el7.x86_64.rpm Executing(%clean): /bin/sh -e /var/tmp/rpm-tmp.VedENJ + umask 022 + cd /tmp/openshift/build-rpms/rpm/BUILD + cd origin-web-console-3.10.0 + /usr/bin/rm -rf /tmp/openshift/build-rpms/rpm/BUILDROOT/origin-web-console-3.10.0-0.alpha.0.5.e91f97f.x86_64 + exit 0 make[1]: Entering directory `/data/src/github.com/openshift/origin-web-console-server' rm -rf _output make[1]: Leaving directory `/data/src/github.com/openshift/origin-web-console-server' Spawning worker 0 with 1 pkgs Spawning worker 1 with 0 pkgs Spawning worker 2 with 0 pkgs Spawning worker 3 with 0 pkgs Workers Finished Saving Primary metadata Saving file lists metadata Saving other metadata Generating sqlite DBs Sqlite DBs complete [INFO] Repository file for `yum` or `dnf` placed at /data/src/github.com/openshift/origin-web-console-server/_output/local/releases/rpms/local-release.repo [INFO] Install it with: [INFO] $ mv '/data/src/github.com/openshift/origin-web-console-server/_output/local/releases/rpms/local-release.repo' '/etc/yum.repos.d [INFO] hack/build-rpms.sh exited with code 0 after 00h 01m 28s hack/build-images.sh [openshift/origin-web-console] --> FROM openshift/origin-base as 0 [openshift/origin-web-console] --> RUN INSTALL_PKGS="origin-web-console" && yum --enablerepo=origin-local-release install -y ${INSTALL_PKGS} && rpm -V ${INSTALL_PKGS} && yum clean all [openshift/origin-web-console] Loaded plugins: fastestmirror, ovl [openshift/origin-web-console] Determining fastest mirrors [openshift/origin-web-console] * base: mirror.vtti.vt.edu [openshift/origin-web-console] * extras: mirror.wdc1.us.leaseweb.net [openshift/origin-web-console] * updates: mirrors.tripadvisor.com [openshift/origin-web-console] Resolving Dependencies [openshift/origin-web-console] --> Running transaction check [openshift/origin-web-console] ---> Package origin-web-console.x86_64 0:3.10.0-0.alpha.0.5.e91f97f.el7 will be installed [openshift/origin-web-console] --> Finished Dependency Resolution [openshift/origin-web-console] Dependencies Resolved [openshift/origin-web-console] ================================================================================ [openshift/origin-web-console] Package Arch Version Repository Size [openshift/origin-web-console] ================================================================================ [openshift/origin-web-console] Installing: [openshift/origin-web-console] origin-web-console [openshift/origin-web-console] x86_64 3.10.0-0.alpha.0.5.e91f97f.el7 origin-local-release 16 M [openshift/origin-web-console] Transaction Summary [openshift/origin-web-console] ================================================================================ [openshift/origin-web-console] Install 1 Package [openshift/origin-web-console] Total download size: 16 M [openshift/origin-web-console] Installed size: 78 M [openshift/origin-web-console] Downloading packages: [openshift/origin-web-console] Running transaction check [openshift/origin-web-console] Running transaction test [openshift/origin-web-console] Transaction test succeeded [openshift/origin-web-console] Running transaction [openshift/origin-web-console] Installing : origin-web-console-3.10.0-0.alpha.0.5.e91f97f.el7.x86_64 1/1 [openshift/origin-web-console] Verifying : origin-web-console-3.10.0-0.alpha.0.5.e91f97f.el7.x86_64 1/1 [openshift/origin-web-console] Installed: [openshift/origin-web-console] origin-web-console.x86_64 0:3.10.0-0.alpha.0.5.e91f97f.el7 [openshift/origin-web-console] Complete! [openshift/origin-web-console] Loaded plugins: fastestmirror, ovl [openshift/origin-web-console] Cleaning repos: base cbs-paas7-openshift-multiarch-el7-build extras updates [openshift/origin-web-console] Cleaning up everything [openshift/origin-web-console] Maybe you want: rm -rf /var/cache/yum, to also free up space taken by orphaned data from disabled or removed repos [openshift/origin-web-console] Cleaning up list of fastest mirrors [openshift/origin-web-console] --> LABEL io.k8s.display-name="OpenShift Web Console" io.k8s.description="This is a component of OpenShift Container Platform and provides a web console." io.openshift.tags="openshift" [openshift/origin-web-console] --> USER 1001 [openshift/origin-web-console] --> EXPOSE 5000 [openshift/origin-web-console] --> CMD [ "/usr/bin/origin-web-console" ] [openshift/origin-web-console] --> Committing changes to openshift/origin-web-console:e91f97f ... [openshift/origin-web-console] --> Tagged as openshift/origin-web-console:latest [openshift/origin-web-console] --> Done [INFO] hack/build-images.sh exited with code 0 after 00h 00m 19s ++ cat ./ORIGIN_COMMIT + docker tag openshift/origin-web-console:latest openshift/origin-web-console:5eda3fa + set +o xtrace ########## FINISHED STAGE: SUCCESS: BUILD THE ORIGIN WEB CONSOLE SERVER CONTAINER IMAGE [00h 01m 48s] ########## [workspace@2] $ /bin/bash /tmp/jenkins4938777296202784067.sh ########## STARTING STAGE: INSTALL THE OPENSHIFT-ANSIBLE RELEASE ########## + [[ -s /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/activate ]] + source /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/activate ++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66 ++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66 ++ export PATH=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ PATH=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ unset PYTHON_HOME ++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config ++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config ++ mktemp + script=/tmp/tmp.zI3RYilmoD + cat + chmod +x /tmp/tmp.zI3RYilmoD + scp -F ./.config/origin-ci-tool/inventory/.ssh_config /tmp/tmp.zI3RYilmoD openshiftdevel:/tmp/tmp.zI3RYilmoD + ssh -F ./.config/origin-ci-tool/inventory/.ssh_config -t openshiftdevel 'bash -l -c "timeout 3600 /tmp/tmp.zI3RYilmoD"' + cd /data/src/github.com/openshift/openshift-ansible ++ git describe --tags --abbrev=0 --exact-match HEAD + last_tag=openshift-ansible-3.7.43-1 ++ git log -n 1 --pretty=%h + last_commit=42633e0 + [[ release-3.7 == \r\e\l\e\a\s\e\-\3\.\7 ]] + sudo yum downgrade -y 'ansible-2.3*' Loaded plugins: amazon-id, rhui-lb, search-disabled-repos Resolving Dependencies --> Running transaction check ---> Package ansible.noarch 0:2.3.2.0-2.el7 will be a downgrade --> Processing Dependency: python-crypto for package: ansible-2.3.2.0-2.el7.noarch ---> Package ansible.noarch 0:2.4.3.0-1.el7ae will be erased --> Running transaction check ---> Package python2-crypto.x86_64 0:2.6.1-15.el7 will be installed --> Processing Dependency: libtomcrypt.so.0()(64bit) for package: python2-crypto-2.6.1-15.el7.x86_64 --> Running transaction check ---> Package libtomcrypt.x86_64 0:1.17-26.el7 will be installed --> Processing Dependency: libtommath >= 0.42.0 for package: libtomcrypt-1.17-26.el7.x86_64 --> Processing Dependency: libtommath.so.0()(64bit) for package: libtomcrypt-1.17-26.el7.x86_64 --> Running transaction check ---> Package libtommath.x86_64 0:0.42.0-6.el7 will be installed --> Finished Dependency Resolution Dependencies Resolved ================================================================================ Package Arch Version Repository Size ================================================================================ Downgrading: ansible noarch 2.3.2.0-2.el7 oso-rhui-rhel-server-extras 5.7 M Installing for dependencies: libtomcrypt x86_64 1.17-26.el7 oso-rhui-rhel-server-extras 224 k libtommath x86_64 0.42.0-6.el7 oso-rhui-rhel-server-extras 36 k python2-crypto x86_64 2.6.1-15.el7 oso-rhui-rhel-server-extras 477 k Transaction Summary ================================================================================ Install ( 3 Dependent packages) Downgrade 1 Package Total download size: 6.4 M Downloading packages: -------------------------------------------------------------------------------- Total 19 MB/s | 6.4 MB 00:00 Running transaction check Running transaction test Transaction test succeeded Running transaction Installing : libtommath-0.42.0-6.el7.x86_64 1/5 Installing : libtomcrypt-1.17-26.el7.x86_64 2/5 Installing : python2-crypto-2.6.1-15.el7.x86_64 3/5 Installing : ansible-2.3.2.0-2.el7.noarch 4/5 Cleanup : ansible-2.4.3.0-1.el7ae.noarch 5/5 Verifying : ansible-2.3.2.0-2.el7.noarch 1/5 Verifying : libtomcrypt-1.17-26.el7.x86_64 2/5 Verifying : libtommath-0.42.0-6.el7.x86_64 3/5 Verifying : python2-crypto-2.6.1-15.el7.x86_64 4/5 Verifying : ansible-2.4.3.0-1.el7ae.noarch 5/5 Removed: ansible.noarch 0:2.4.3.0-1.el7ae Installed: ansible.noarch 0:2.3.2.0-2.el7 Dependency Installed: libtomcrypt.x86_64 0:1.17-26.el7 libtommath.x86_64 0:0.42.0-6.el7 python2-crypto.x86_64 0:2.6.1-15.el7 Complete! + sudo yum install -y openshift-ansible-3.7.43-1.git.0.42633e0.el7 Loaded plugins: amazon-id, rhui-lb, search-disabled-repos Resolving Dependencies --> Running transaction check ---> Package openshift-ansible.noarch 0:3.7.43-1.git.0.42633e0.el7 will be installed --> Processing Dependency: openshift-ansible-roles = 3.7.43-1.git.0.42633e0.el7 for package: openshift-ansible-3.7.43-1.git.0.42633e0.el7.noarch --> Processing Dependency: openshift-ansible-playbooks = 3.7.43-1.git.0.42633e0.el7 for package: openshift-ansible-3.7.43-1.git.0.42633e0.el7.noarch --> Processing Dependency: openshift-ansible-lookup-plugins = 3.7.43-1.git.0.42633e0.el7 for package: openshift-ansible-3.7.43-1.git.0.42633e0.el7.noarch --> Processing Dependency: openshift-ansible-filter-plugins = 3.7.43-1.git.0.42633e0.el7 for package: openshift-ansible-3.7.43-1.git.0.42633e0.el7.noarch --> Processing Dependency: openshift-ansible-docs = 3.7.43-1.git.0.42633e0.el7 for package: openshift-ansible-3.7.43-1.git.0.42633e0.el7.noarch --> Processing Dependency: openshift-ansible-callback-plugins = 3.7.43-1.git.0.42633e0.el7 for package: openshift-ansible-3.7.43-1.git.0.42633e0.el7.noarch --> Processing Dependency: httpd-tools for package: openshift-ansible-3.7.43-1.git.0.42633e0.el7.noarch --> Running transaction check ---> Package httpd-tools.x86_64 0:2.4.6-67.el7_4.6 will be installed --> Processing Dependency: libaprutil-1.so.0()(64bit) for package: httpd-tools-2.4.6-67.el7_4.6.x86_64 --> Processing Dependency: libapr-1.so.0()(64bit) for package: httpd-tools-2.4.6-67.el7_4.6.x86_64 ---> Package openshift-ansible-callback-plugins.noarch 0:3.7.43-1.git.0.42633e0.el7 will be installed ---> Package openshift-ansible-docs.noarch 0:3.7.43-1.git.0.42633e0.el7 will be installed ---> Package openshift-ansible-filter-plugins.noarch 0:3.7.43-1.git.0.42633e0.el7 will be installed ---> Package openshift-ansible-lookup-plugins.noarch 0:3.7.43-1.git.0.42633e0.el7 will be installed ---> Package openshift-ansible-playbooks.noarch 0:3.7.43-1.git.0.42633e0.el7 will be installed ---> Package openshift-ansible-roles.noarch 0:3.7.43-1.git.0.42633e0.el7 will be installed --> Running transaction check ---> Package apr.x86_64 0:1.4.8-3.el7_4.1 will be installed ---> Package apr-util.x86_64 0:1.5.2-6.el7 will be installed --> Finished Dependency Resolution Dependencies Resolved ================================================================================ Package Arch Version Repository Size ================================================================================ Installing: openshift-ansible noarch 3.7.43-1.git.0.42633e0.el7 openshift-ansible-local-release 338 k Installing for dependencies: apr x86_64 1.4.8-3.el7_4.1 oso-rhui-rhel-server-releases 103 k apr-util x86_64 1.5.2-6.el7 oso-rhui-rhel-server-releases 92 k httpd-tools x86_64 2.4.6-67.el7_4.6 oso-rhui-rhel-server-releases 88 k openshift-ansible-callback-plugins noarch 3.7.43-1.git.0.42633e0.el7 openshift-ansible-local-release 329 k openshift-ansible-docs noarch 3.7.43-1.git.0.42633e0.el7 openshift-ansible-local-release 352 k openshift-ansible-filter-plugins noarch 3.7.43-1.git.0.42633e0.el7 openshift-ansible-local-release 342 k openshift-ansible-lookup-plugins noarch 3.7.43-1.git.0.42633e0.el7 openshift-ansible-local-release 320 k openshift-ansible-playbooks noarch 3.7.43-1.git.0.42633e0.el7 openshift-ansible-local-release 422 k openshift-ansible-roles noarch 3.7.43-1.git.0.42633e0.el7 openshift-ansible-local-release 1.9 M Transaction Summary ================================================================================ Install 1 Package (+9 Dependent packages) Total download size: 4.2 M Installed size: 30 M Downloading packages: -------------------------------------------------------------------------------- Total 15 MB/s | 4.2 MB 00:00 Running transaction check Running transaction test Transaction test succeeded Running transaction Installing : apr-1.4.8-3.el7_4.1.x86_64 1/10 Installing : apr-util-1.5.2-6.el7.x86_64 2/10 Installing : httpd-tools-2.4.6-67.el7_4.6.x86_64 3/10 Installing : openshift-ansible-docs-3.7.43-1.git.0.42633e0.el7.noarch 4/10 Installing : openshift-ansible-lookup-plugins-3.7.43-1.git.0.42633e0. 5/10 Installing : openshift-ansible-roles-3.7.43-1.git.0.42633e0.el7.noarc 6/10 Installing : openshift-ansible-callback-plugins-3.7.43-1.git.0.42633e 7/10 Installing : openshift-ansible-3.7.43-1.git.0.42633e0.el7.noarch 8/10 Installing : openshift-ansible-filter-plugins-3.7.43-1.git.0.42633e0. 9/10 Installing : openshift-ansible-playbooks-3.7.43-1.git.0.42633e0.el7.n 10/10 Verifying : openshift-ansible-filter-plugins-3.7.43-1.git.0.42633e0. 1/10 Verifying : httpd-tools-2.4.6-67.el7_4.6.x86_64 2/10 Verifying : apr-util-1.5.2-6.el7.x86_64 3/10 Verifying : openshift-ansible-roles-3.7.43-1.git.0.42633e0.el7.noarc 4/10 Verifying : openshift-ansible-docs-3.7.43-1.git.0.42633e0.el7.noarch 5/10 Verifying : openshift-ansible-3.7.43-1.git.0.42633e0.el7.noarch 6/10 Verifying : openshift-ansible-lookup-plugins-3.7.43-1.git.0.42633e0. 7/10 Verifying : openshift-ansible-playbooks-3.7.43-1.git.0.42633e0.el7.n 8/10 Verifying : openshift-ansible-callback-plugins-3.7.43-1.git.0.42633e 9/10 Verifying : apr-1.4.8-3.el7_4.1.x86_64 10/10 Installed: openshift-ansible.noarch 0:3.7.43-1.git.0.42633e0.el7 Dependency Installed: apr.x86_64 0:1.4.8-3.el7_4.1 apr-util.x86_64 0:1.5.2-6.el7 httpd-tools.x86_64 0:2.4.6-67.el7_4.6 openshift-ansible-callback-plugins.noarch 0:3.7.43-1.git.0.42633e0.el7 openshift-ansible-docs.noarch 0:3.7.43-1.git.0.42633e0.el7 openshift-ansible-filter-plugins.noarch 0:3.7.43-1.git.0.42633e0.el7 openshift-ansible-lookup-plugins.noarch 0:3.7.43-1.git.0.42633e0.el7 openshift-ansible-playbooks.noarch 0:3.7.43-1.git.0.42633e0.el7 openshift-ansible-roles.noarch 0:3.7.43-1.git.0.42633e0.el7 Complete! + rpm -V openshift-ansible-3.7.43-1.git.0.42633e0.el7 + set +o xtrace ########## FINISHED STAGE: SUCCESS: INSTALL THE OPENSHIFT-ANSIBLE RELEASE [00h 00m 42s] ########## [workspace@2] $ /bin/bash /tmp/jenkins7772619639158308083.sh ########## STARTING STAGE: INSTALL ANSIBLE PLUGINS ########## + [[ -s /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/activate ]] + source /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/activate ++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66 ++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66 ++ export PATH=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ PATH=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ unset PYTHON_HOME ++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config ++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config ++ mktemp + script=/tmp/tmp.ME6wDB3dZE + cat + chmod +x /tmp/tmp.ME6wDB3dZE + scp -F ./.config/origin-ci-tool/inventory/.ssh_config /tmp/tmp.ME6wDB3dZE openshiftdevel:/tmp/tmp.ME6wDB3dZE + ssh -F ./.config/origin-ci-tool/inventory/.ssh_config -t openshiftdevel 'bash -l -c "timeout 600 /tmp/tmp.ME6wDB3dZE"' + cd /data/src/github.com/openshift/origin + sudo chmod o+rw /etc/environment ++ pwd + echo ANSIBLE_JUNIT_DIR=/data/src/github.com/openshift/origin/_output/scripts/ansible_junit + sudo mkdir -p /usr/share/ansible/plugins/callback + for plugin in ''\''default_with_output_lists'\''' ''\''generate_junit'\''' + wget https://raw.githubusercontent.com/openshift/origin-ci-tool/master/oct/ansible/oct/callback_plugins/default_with_output_lists.py --2018-04-06 21:37:44-- https://raw.githubusercontent.com/openshift/origin-ci-tool/master/oct/ansible/oct/callback_plugins/default_with_output_lists.py Resolving raw.githubusercontent.com (raw.githubusercontent.com)... 151.101.200.133 Connecting to raw.githubusercontent.com (raw.githubusercontent.com)|151.101.200.133|:443... connected. HTTP request sent, awaiting response... 200 OK Length: 1932 (1.9K) [text/plain] Saving to: ‘default_with_output_lists.py’ 0K . 100% 27.8M=0s 2018-04-06 21:37:44 (27.8 MB/s) - ‘default_with_output_lists.py’ saved [1932/1932] + sudo mv default_with_output_lists.py /usr/share/ansible/plugins/callback + for plugin in ''\''default_with_output_lists'\''' ''\''generate_junit'\''' + wget https://raw.githubusercontent.com/openshift/origin-ci-tool/master/oct/ansible/oct/callback_plugins/generate_junit.py --2018-04-06 21:37:44-- https://raw.githubusercontent.com/openshift/origin-ci-tool/master/oct/ansible/oct/callback_plugins/generate_junit.py Resolving raw.githubusercontent.com (raw.githubusercontent.com)... 151.101.200.133 Connecting to raw.githubusercontent.com (raw.githubusercontent.com)|151.101.200.133|:443... connected. HTTP request sent, awaiting response... 200 OK Length: 11212 (11K) [text/plain] Saving to: ‘generate_junit.py’ 0K .......... 100% 60.2M=0s 2018-04-06 21:37:44 (60.2 MB/s) - ‘generate_junit.py’ saved [11212/11212] + sudo mv generate_junit.py /usr/share/ansible/plugins/callback + sudo sed -r -i -e 's/^#?stdout_callback.*/stdout_callback = default_with_output_lists/' -e 's/^#?callback_whitelist.*/callback_whitelist = generate_junit/' /etc/ansible/ansible.cfg + set +o xtrace ########## FINISHED STAGE: SUCCESS: INSTALL ANSIBLE PLUGINS [00h 00m 00s] ########## [workspace@2] $ /bin/bash /tmp/jenkins5671994947025824619.sh ########## STARTING STAGE: FORWARD PARAMETERS TO THE REMOTE HOST ########## + [[ -s /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/activate ]] + source /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/activate ++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66 ++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66 ++ export PATH=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ PATH=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ unset PYTHON_HOME ++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config ++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config + ssh -F ./.config/origin-ci-tool/inventory/.ssh_config openshiftdevel sudo chmod o+rw /etc/environment + ssh -F ./.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'echo '\''OSTREE_COPY='\'' >> /etc/environment' + set +o xtrace ########## FINISHED STAGE: SUCCESS: FORWARD PARAMETERS TO THE REMOTE HOST [00h 00m 01s] ########## [workspace@2] $ /bin/bash /tmp/jenkins2555663148580337112.sh ########## STARTING STAGE: ATOMIC OSTREE COPIES ########## + [[ -s /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/activate ]] + source /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/activate ++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66 ++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66 ++ export PATH=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ PATH=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ unset PYTHON_HOME ++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config ++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config ++ mktemp + script=/tmp/tmp.TAAGy74WyD + cat + chmod +x /tmp/tmp.TAAGy74WyD + scp -F ./.config/origin-ci-tool/inventory/.ssh_config /tmp/tmp.TAAGy74WyD openshiftdevel:/tmp/tmp.TAAGy74WyD + ssh -F ./.config/origin-ci-tool/inventory/.ssh_config -t openshiftdevel 'bash -l -c "timeout 14400 /tmp/tmp.TAAGy74WyD"' + cd /data/src/github.com/openshift/aos-cd-jobs + [[ '' == \t\r\u\e ]] + set +o xtrace ########## FINISHED STAGE: SUCCESS: ATOMIC OSTREE COPIES [00h 00m 01s] ########## [workspace@2] $ /bin/bash /tmp/jenkins6419931244504391660.sh ########## STARTING STAGE: ORIGIN PREREQUISITES ########## + [[ -s /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/activate ]] + source /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/activate ++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66 ++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66 ++ export PATH=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ PATH=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ unset PYTHON_HOME ++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config ++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config ++ mktemp + script=/tmp/tmp.sLiu1YAUoZ + cat + chmod +x /tmp/tmp.sLiu1YAUoZ + scp -F ./.config/origin-ci-tool/inventory/.ssh_config /tmp/tmp.sLiu1YAUoZ openshiftdevel:/tmp/tmp.sLiu1YAUoZ + ssh -F ./.config/origin-ci-tool/inventory/.ssh_config -t openshiftdevel 'bash -l -c "timeout 14400 /tmp/tmp.sLiu1YAUoZ"' + cd /data/src/github.com/openshift/aos-cd-jobs + [[ -f sjb/inventory/release-3.7.cfg ]] + evars='-e @sjb/inventory/base.cfg' ++ cat ./ORIGIN_PKG_VERSION ++ cat ./ORIGIN_RELEASE ++ cat ./ORIGIN_COMMIT + ansible-playbook -vv --become --become-user root --connection local --inventory sjb/inventory/ -e openshift_deployment_type=origin -e openshift_pkg_version=-3.7.2-1.5.5eda3fa -e openshift_release=3.7 -e 'oreg_url=openshift/origin-${component}:5eda3fa' '-e @sjb/inventory/base.cfg' -e skip_sanity_checks=true -e 'openshift_disable_check=*' -e openshift_install_examples=false /usr/share/ansible/openshift-ansible/playbooks/prerequisites.yml Using /etc/ansible/ansible.cfg as config file PLAYBOOK: prerequisites.yml **************************************************** 1 plays in /usr/share/ansible/openshift-ansible/playbooks/prerequisites.yml PLAY [Place holder for prerequisites] ****************************************** META: ran handlers TASK [Debug placeholder] ******************************************************* task path: /usr/share/ansible/openshift-ansible/playbooks/prerequisites.yml:6 ok: [localhost] => { "generated_timestamp": "2018-04-06 21:37:47.550223", "msg": "Prerequisites ran." } META: ran handlers META: ran handlers PLAY RECAP ********************************************************************* localhost : ok=1 changed=0 unreachable=0 failed=0 + set +o xtrace ########## FINISHED STAGE: SUCCESS: ORIGIN PREREQUISITES [00h 00m 01s] ########## [workspace@2] $ /bin/bash /tmp/jenkins1085391971284586714.sh ########## STARTING STAGE: INSTALL ORIGIN ########## + [[ -s /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/activate ]] + source /var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/activate ++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66 ++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66 ++ export PATH=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ PATH=/var/lib/jenkins/origin-ci-tool/437e1037dfc38a9b27d44a96a21bde8b638ccf66/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ unset PYTHON_HOME ++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config ++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_openshift_ansible_logging_37/workspace@2/.config ++ mktemp + script=/tmp/tmp.Fp3F75t184 + cat + chmod +x /tmp/tmp.Fp3F75t184 + scp -F ./.config/origin-ci-tool/inventory/.ssh_config /tmp/tmp.Fp3F75t184 openshiftdevel:/tmp/tmp.Fp3F75t184 + ssh -F ./.config/origin-ci-tool/inventory/.ssh_config -t openshiftdevel 'bash -l -c "timeout 7200 /tmp/tmp.Fp3F75t184"' + cd /data/src/github.com/openshift/aos-cd-jobs + playbook_base=/usr/share/ansible/openshift-ansible/playbooks/ + [[ -s /usr/share/ansible/openshift-ansible/playbooks//openshift-node/network_manager.yml ]] + playbook=/usr/share/ansible/openshift-ansible/playbooks/byo/openshift-node/network_manager.yml ++ curl http://169.254.169.254/latest/meta-data/local-ipv4 % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 12 100 12 0 0 6880 0 --:--:-- --:--:-- --:--:-- 12000 + local_ip=172.18.1.211 ++ cat ./ORIGIN_PKG_VERSION ++ cat ./ORIGIN_RELEASE ++ cat ./ORIGIN_COMMIT + ansible-playbook -vv --become --become-user root --connection local --inventory sjb/inventory/ -e openshift_deployment_type=origin -e openshift_pkg_version=-3.7.2-1.5.5eda3fa -e openshift_release=3.7 -e 'oreg_url=openshift/origin-${component}:5eda3fa' -e skip_sanity_checks=true -e 'openshift_disable_check=*' -e openshift_install_examples=false /usr/share/ansible/openshift-ansible/playbooks/byo/openshift-node/network_manager.yml Using /etc/ansible/ansible.cfg as config file PLAYBOOK: network_manager.yml ************************************************** 3 plays in /usr/share/ansible/openshift-ansible/playbooks/byo/openshift-node/network_manager.yml PLAY [Create initial host groups for localhost] ******************************** META: ran handlers TASK [include_vars] ************************************************************ task path: /usr/share/ansible/openshift-ansible/playbooks/byo/openshift-cluster/initialize_groups.yml:10 ok: [localhost] => { "ansible_facts": { "g_all_hosts": "{{ g_master_hosts | union(g_node_hosts) | union(g_etcd_hosts) | union(g_new_etcd_hosts) | union(g_lb_hosts) | union(g_nfs_hosts) | union(g_new_node_hosts)| union(g_new_master_hosts) | union(g_glusterfs_hosts) | union(g_glusterfs_registry_hosts) | default([]) }}", "g_etcd_hosts": "{{ groups.etcd | default([]) }}", "g_glusterfs_hosts": "{{ groups.glusterfs | default([]) }}", "g_glusterfs_registry_hosts": "{{ groups.glusterfs_registry | default(g_glusterfs_hosts) }}", "g_lb_hosts": "{{ groups.lb | default([]) }}", "g_master_hosts": "{{ groups.masters | default([]) }}", "g_new_etcd_hosts": "{{ groups.new_etcd | default([]) }}", "g_new_master_hosts": "{{ groups.new_masters | default([]) }}", "g_new_node_hosts": "{{ groups.new_nodes | default([]) }}", "g_nfs_hosts": "{{ groups.nfs | default([]) }}", "g_node_hosts": "{{ groups.nodes | default([]) }}" }, "changed": false, "generated_timestamp": "2018-04-06 21:37:48.772335" } META: ran handlers META: ran handlers PLAY [Populate config host groups] ********************************************* META: ran handlers TASK [Evaluate groups - g_etcd_hosts or g_new_etcd_hosts required] ************* task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/evaluate_groups.yml:8 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:37:48.799991", "skip_reason": "Conditional result was False", "skipped": true } TASK [Evaluate groups - g_master_hosts or g_new_master_hosts required] ********* task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/evaluate_groups.yml:13 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:37:48.820424", "skip_reason": "Conditional result was False", "skipped": true } TASK [Evaluate groups - g_node_hosts or g_new_node_hosts required] ************* task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/evaluate_groups.yml:18 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:37:48.840947", "skip_reason": "Conditional result was False", "skipped": true } TASK [Evaluate groups - g_lb_hosts required] *********************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/evaluate_groups.yml:23 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:37:48.859524", "skip_reason": "Conditional result was False", "skipped": true } TASK [Evaluate groups - g_nfs_hosts required] ********************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/evaluate_groups.yml:28 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:37:48.877847", "skip_reason": "Conditional result was False", "skipped": true } TASK [Evaluate groups - g_nfs_hosts is single host] **************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/evaluate_groups.yml:33 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:37:48.897147", "skip_reason": "Conditional result was False", "skipped": true } TASK [Evaluate groups - g_glusterfs_hosts required] **************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/evaluate_groups.yml:38 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:37:48.916105", "skip_reason": "Conditional result was False", "skipped": true } TASK [Evaluate groups - Fail if no etcd hosts group is defined] **************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/evaluate_groups.yml:43 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:37:48.935286", "skip_reason": "Conditional result was False", "skipped": true } TASK [Evaluate oo_all_hosts] *************************************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/evaluate_groups.yml:56 creating host via 'add_host': hostname=localhost ok: [localhost] => (item=localhost) => { "add_host": { "groups": [ "oo_all_hosts" ], "host_name": "localhost", "host_vars": {} }, "changed": false, "generated_timestamp": "2018-04-06 21:37:48.981645", "item": "localhost" } TASK [Evaluate oo_masters] ***************************************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/evaluate_groups.yml:65 creating host via 'add_host': hostname=localhost ok: [localhost] => (item=localhost) => { "add_host": { "groups": [ "oo_masters" ], "host_name": "localhost", "host_vars": {} }, "changed": false, "generated_timestamp": "2018-04-06 21:37:49.012370", "item": "localhost" } TASK [Evaluate oo_first_master] ************************************************ task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/evaluate_groups.yml:74 creating host via 'add_host': hostname=localhost ok: [localhost] => { "add_host": { "groups": [ "oo_first_master" ], "host_name": "localhost", "host_vars": {} }, "changed": false, "generated_timestamp": "2018-04-06 21:37:49.042697" } TASK [Evaluate oo_new_etcd_to_config] ****************************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/evaluate_groups.yml:83 TASK [Evaluate oo_masters_to_config] ******************************************* task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/evaluate_groups.yml:92 creating host via 'add_host': hostname=localhost ok: [localhost] => (item=localhost) => { "add_host": { "groups": [ "oo_masters_to_config" ], "host_name": "localhost", "host_vars": {} }, "changed": false, "generated_timestamp": "2018-04-06 21:37:49.089845", "item": "localhost" } TASK [Evaluate oo_etcd_to_config] ********************************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/evaluate_groups.yml:101 creating host via 'add_host': hostname=localhost ok: [localhost] => (item=localhost) => { "add_host": { "groups": [ "oo_etcd_to_config" ], "host_name": "localhost", "host_vars": {} }, "changed": false, "generated_timestamp": "2018-04-06 21:37:49.119032", "item": "localhost" } TASK [Evaluate oo_first_etcd] ************************************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/evaluate_groups.yml:110 creating host via 'add_host': hostname=localhost ok: [localhost] => { "add_host": { "groups": [ "oo_first_etcd" ], "host_name": "localhost", "host_vars": {} }, "changed": false, "generated_timestamp": "2018-04-06 21:37:49.147001" } TASK [Evaluate oo_etcd_hosts_to_upgrade] *************************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/evaluate_groups.yml:122 creating host via 'add_host': hostname=localhost ok: [localhost] => (item=localhost) => { "add_host": { "groups": [ "oo_etcd_hosts_to_upgrade" ], "host_name": "localhost", "host_vars": {} }, "changed": false, "generated_timestamp": "2018-04-06 21:37:49.173015", "item": "localhost" } TASK [Evaluate oo_etcd_hosts_to_backup] **************************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/evaluate_groups.yml:129 creating host via 'add_host': hostname=localhost ok: [localhost] => (item=localhost) => { "add_host": { "groups": [ "oo_etcd_hosts_to_backup" ], "host_name": "localhost", "host_vars": {} }, "changed": false, "generated_timestamp": "2018-04-06 21:37:49.199008", "item": "localhost" } TASK [Evaluate oo_nodes_to_config] ********************************************* task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/evaluate_groups.yml:136 creating host via 'add_host': hostname=localhost ok: [localhost] => (item=localhost) => { "add_host": { "groups": [ "oo_nodes_to_config" ], "host_name": "localhost", "host_vars": {} }, "changed": false, "generated_timestamp": "2018-04-06 21:37:49.229524", "item": "localhost" } TASK [Add master to oo_nodes_to_config] **************************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/evaluate_groups.yml:146 skipping: [localhost] => (item=localhost) => { "changed": false, "generated_timestamp": "2018-04-06 21:37:49.254765", "item": "localhost", "skip_reason": "Conditional result was False", "skipped": true } TASK [Evaluate oo_lb_to_config] ************************************************ task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/evaluate_groups.yml:156 TASK [Evaluate oo_nfs_to_config] *********************************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/evaluate_groups.yml:165 TASK [Evaluate oo_glusterfs_to_config] ***************************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/evaluate_groups.yml:174 TASK [Evaluate oo_etcd_to_migrate] ********************************************* task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/evaluate_groups.yml:183 creating host via 'add_host': hostname=localhost ok: [localhost] => (item=localhost) => { "add_host": { "groups": [ "oo_etcd_to_migrate" ], "host_name": "localhost", "host_vars": {} }, "changed": false, "generated_timestamp": "2018-04-06 21:37:49.332542", "item": "localhost" } META: ran handlers META: ran handlers PLAY [Install and configure NetworkManager] ************************************ TASK [Gathering Facts] ********************************************************* ok: [localhost] META: ran handlers TASK [install NetworkManager] ************************************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-node/network_manager.yml:8 ok: [localhost] => { "attempts": 1, "changed": false, "generated_timestamp": "2018-04-06 21:37:51.779329", "msg": "", "rc": 0, "results": [ "NetworkManager-1:1.8.0-11.el7_4.x86_64 providing NetworkManager is already installed" ] } TASK [configure NetworkManager] ************************************************ task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-node/network_manager.yml:15 changed: [localhost] => (item=USE_PEERDNS) => { "backup": "", "changed": true, "generated_timestamp": "2018-04-06 21:37:52.039836", "item": "USE_PEERDNS", "msg": "line added" } changed: [localhost] => (item=NM_CONTROLLED) => { "backup": "", "changed": true, "generated_timestamp": "2018-04-06 21:37:52.186924", "item": "NM_CONTROLLED", "msg": "line added" } TASK [enable and start NetworkManager] ***************************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-node/network_manager.yml:26 ok: [localhost] => { "changed": false, "enabled": true, "generated_timestamp": "2018-04-06 21:37:52.461732", "name": "NetworkManager", "state": "started", "status": { "ActiveEnterTimestamp": "Fri 2018-04-06 20:57:15 UTC", "ActiveEnterTimestampMonotonic": "24341948", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "network-pre.target system.slice dbus.service systemd-journald.socket basic.target cloud-init-local.service", "AllowIsolate": "no", "AmbientCapabilities": "0", "AssertResult": "yes", "AssertTimestamp": "Fri 2018-04-06 20:57:13 UTC", "AssertTimestampMonotonic": "22945398", "Before": "shutdown.target cloud-init.service NetworkManager-wait-online.service network.service multi-user.target network.target", "BlockIOAccounting": "no", "BlockIOWeight": "18446744073709551615", "BusName": "org.freedesktop.NetworkManager", "CPUAccounting": "yes", "CPUQuotaPerSecUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "18446744073709551615", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "537212130", "ConditionResult": "yes", "ConditionTimestamp": "Fri 2018-04-06 20:57:13 UTC", "ConditionTimestampMonotonic": "22945397", "Conflicts": "shutdown.target", "ControlGroup": "/system.slice/NetworkManager.service", "ControlPID": "0", "DefaultDependencies": "yes", "Delegate": "no", "Description": "Network Manager", "DevicePolicy": "auto", "Documentation": "man:NetworkManager(8)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "683", "ExecMainStartTimestamp": "Fri 2018-04-06 20:57:13 UTC", "ExecMainStartTimestampMonotonic": "22946208", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/NetworkManager ; argv[]=/usr/sbin/NetworkManager --no-daemon ; ignore_errors=no ; start_time=[Fri 2018-04-06 20:57:13 UTC] ; stop_time=[n/a] ; pid=683 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/NetworkManager.service", "GuessMainPID": "yes", "IOScheduling": "0", "Id": "NetworkManager.service", "IgnoreOnIsolate": "no", "IgnoreOnSnapshot": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Fri 2018-04-06 20:57:13 UTC", "InactiveExitTimestampMonotonic": "22946237", "JobTimeoutAction": "none", "JobTimeoutUSec": "0", "KillMode": "process", "KillSignal": "15", "LimitAS": "18446744073709551615", "LimitCORE": "18446744073709551615", "LimitCPU": "18446744073709551615", "LimitDATA": "18446744073709551615", "LimitFSIZE": "18446744073709551615", "LimitLOCKS": "18446744073709551615", "LimitMEMLOCK": "65536", "LimitMSGQUEUE": "819200", "LimitNICE": "0", "LimitNOFILE": "4096", "LimitNPROC": "63327", "LimitRSS": "18446744073709551615", "LimitRTPRIO": "0", "LimitRTTIME": "18446744073709551615", "LimitSIGPENDING": "63327", "LimitSTACK": "18446744073709551615", "LoadState": "loaded", "MainPID": "683", "MemoryAccounting": "yes", "MemoryCurrent": "21774336", "MemoryLimit": "18446744073709551615", "MountFlags": "0", "Names": "NetworkManager.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "PrivateDevices": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "ProtectHome": "read-only", "ProtectSystem": "yes", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RequiredBy": "NetworkManager-wait-online.service", "Requires": "basic.target", "Restart": "on-failure", "RestartUSec": "100ms", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitInterval": "10000000", "StartupBlockIOWeight": "18446744073709551615", "StartupCPUShares": "18446744073709551615", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "no", "TasksCurrent": "4", "TasksMax": "18446744073709551615", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "WantedBy": "multi-user.target", "Wants": "network.target system.slice", "WatchdogTimestamp": "Fri 2018-04-06 20:57:15 UTC", "WatchdogTimestampMonotonic": "24341884", "WatchdogUSec": "0" } } META: ran handlers META: ran handlers PLAY RECAP ********************************************************************* localhost : ok=15 changed=1 unreachable=0 failed=0 + [[ -s /usr/share/ansible/openshift-ansible/playbooks/deploy_cluster.yml ]] + playbook=/usr/share/ansible/openshift-ansible/playbooks/byo/config.yml + [[ -f sjb/inventory/release-3.7.cfg ]] + evars='-e @sjb/inventory/base.cfg' ++ cat ./ORIGIN_PKG_VERSION ++ cat ./ORIGIN_RELEASE ++ cat ./ORIGIN_COMMIT + ansible-playbook -vv --become --become-user root --connection local --inventory sjb/inventory/ -e openshift_deployment_type=origin -e etcd_data_dir=/tmp/etcd -e openshift_master_default_subdomain=172.18.1.211.nip.io -e openshift_pkg_version=-3.7.2-1.5.5eda3fa -e openshift_release=3.7 -e 'oreg_url=openshift/origin-${component}:5eda3fa' -e openshift_node_port_range=30000-32000 -e 'osm_controller_args={"enable-hostpath-provisioner":["true"]}' '-e @sjb/inventory/base.cfg' -e skip_sanity_checks=true -e 'openshift_disable_check=*' -e openshift_install_examples=false /usr/share/ansible/openshift-ansible/playbooks/byo/config.yml Using /etc/ansible/ansible.cfg as config file statically included: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/firewalld.yml statically included: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/iptables.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_sanitize_inventory/tasks/deprecations.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_sanitize_inventory/tasks/unsupported.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_repos/tasks/centos_repos.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/udev_workaround.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/registry_auth.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/registry_auth.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/crio_firewall.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/registry_auth.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_version/tasks/set_version_rpm.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_version/tasks/set_version_containerized.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/udev_workaround.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/registry_auth.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/registry_auth.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/crio_firewall.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/registry_auth.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_version/tasks/set_version_rpm.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_version/tasks/set_version_containerized.yml statically included: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/deploy_ca.yml statically included: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/ca.yml statically included: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/deploy_ca.yml statically included: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/fetch_server_certificates_from_ca.yml statically included: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/fetch_client_certificates_from_ca.yml statically included: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/firewalld.yml statically included: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/iptables.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/udev_workaround.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/registry_auth.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/registry_auth.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/crio_firewall.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/registry_auth.yml statically included: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/firewall.yml statically included: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/drop_etcdctl.yml statically included: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/auxiliary/drop_etcdctl.yml statically included: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/system_container.yml statically included: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/firewalld.yml statically included: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/iptables.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_storage_nfs/tasks/firewall.yml statically included: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/firewalld.yml statically included: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/iptables.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/udev_workaround.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/registry_auth.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/registry_auth.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/crio_firewall.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/registry_auth.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_loadbalancer/tasks/firewall.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/udev_workaround.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/registry_auth.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/registry_auth.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/crio_firewall.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/registry_auth.yml statically included: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/firewalld.yml statically included: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/iptables.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_cloud_provider/tasks/openstack.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_cloud_provider/tasks/aws.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_cloud_provider/tasks/gce.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_master/tasks/firewall.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_master/tasks/journald.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_master/tasks/systemd_units.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_master/tasks/upgrade_facts.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_master/tasks/registry_auth.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_master/tasks/system_container.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_master/tasks/bootstrap_settings.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_master/tasks/set_loopback_context.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_master/tasks/check_master_api_is_ready.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_master/tasks/bootstrap.yml statically included: /usr/share/ansible/openshift-ansible/roles/nuage_master/tasks/firewall.yml statically included: /usr/share/ansible/openshift-ansible/roles/nuage_master/tasks/serviceaccount.yml statically included: /usr/share/ansible/openshift-ansible/roles/nuage_master/tasks/certificates.yml statically included: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/fetch_client_certificates_from_ca.yml statically included: /usr/share/ansible/openshift-ansible/roles/kuryr/tasks/serviceaccount.yaml statically included: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-master/tasks/wire_aggregator.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_master_cluster/tasks/configure.yml statically included: /usr/share/ansible/openshift-ansible/roles/cockpit/tasks/firewall.yml statically included: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/firewalld.yml statically included: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/iptables.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/udev_workaround.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/registry_auth.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/registry_auth.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/crio_firewall.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/registry_auth.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_cloud_provider/tasks/openstack.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_cloud_provider/tasks/aws.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_cloud_provider/tasks/gce.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_node_dnsmasq/tasks/./network-manager.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_node_dnsmasq/tasks/./no-network-manager.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_node_dnsmasq/tasks/network-manager.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_node/tasks/firewall.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_node/tasks/install.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_node/tasks/registry_auth.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_node/tasks/config.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_node/tasks/systemd_units.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_node/tasks/config/install-node-deps-docker-service-file.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_node/tasks/config/install-ovs-service-env-file.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_node/tasks/node_system_container.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_node/tasks/openvswitch_system_container.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_node/tasks/config/install-ovs-docker-service-file.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_node/tasks/config/configure-node-settings.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_node/tasks/config/configure-proxy-settings.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_node/tasks/aws.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_node/tasks/storage_plugins/nfs.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_node/tasks/storage_plugins/glusterfs.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_node/tasks/storage_plugins/ceph.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_node/tasks/storage_plugins/iscsi.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_node/tasks/config/workaround-bz1331590-ovs-oom-fix.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_node/tasks/bootstrap.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_node/tasks/aws.yml statically included: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/firewalld.yml statically included: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/iptables.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/udev_workaround.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/registry_auth.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/registry_auth.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/crio_firewall.yml statically included: /usr/share/ansible/openshift-ansible/roles/docker/tasks/registry_auth.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_cloud_provider/tasks/openstack.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_cloud_provider/tasks/aws.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_cloud_provider/tasks/gce.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_node_dnsmasq/tasks/./network-manager.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_node_dnsmasq/tasks/./no-network-manager.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_node_dnsmasq/tasks/network-manager.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_node/tasks/firewall.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_node/tasks/install.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_node/tasks/registry_auth.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_node/tasks/config.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_node/tasks/systemd_units.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_node/tasks/config/install-node-deps-docker-service-file.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_node/tasks/config/install-ovs-service-env-file.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_node/tasks/node_system_container.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_node/tasks/openvswitch_system_container.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_node/tasks/config/install-ovs-docker-service-file.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_node/tasks/config/configure-node-settings.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_node/tasks/config/configure-proxy-settings.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_node/tasks/aws.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_node/tasks/storage_plugins/nfs.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_node/tasks/storage_plugins/glusterfs.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_node/tasks/storage_plugins/ceph.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_node/tasks/storage_plugins/iscsi.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_node/tasks/config/workaround-bz1331590-ovs-oom-fix.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_node/tasks/bootstrap.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_node/tasks/aws.yml statically included: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/fetch_client_certificates_from_ca.yml statically included: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/fetch_client_certificates_from_ca.yml statically included: /usr/share/ansible/openshift-ansible/roles/nuage_node/tasks/certificates.yml statically included: /usr/share/ansible/openshift-ansible/roles/nuage_node/tasks/iptables.yml statically included: /usr/share/ansible/openshift-ansible/roles/nuage_node/tasks/firewall.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_storage_glusterfs/tasks/glusterfs_config.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_storage_glusterfs/tasks/glusterfs_config_facts.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_storage_glusterfs/tasks/glusterfs_common.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_storage_glusterfs/tasks/glusterfs_deploy.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_storage_glusterfs/tasks/heketi_deploy_part1.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_storage_glusterfs/tasks/heketi_deploy_part2.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_storage_glusterfs/tasks/glusterblock_deploy.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_storage_glusterfs/tasks/gluster_s3_deploy.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_storage_glusterfs/tasks/glusterfs_registry.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_storage_glusterfs/tasks/glusterfs_registry_facts.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_storage_glusterfs/tasks/glusterfs_common.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_storage_glusterfs/tasks/glusterfs_deploy.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_storage_glusterfs/tasks/heketi_deploy_part1.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_storage_glusterfs/tasks/heketi_deploy_part2.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_storage_glusterfs/tasks/glusterblock_deploy.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_storage_glusterfs/tasks/gluster_s3_deploy.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_hosted/tasks/firewall.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_hosted/tasks/wait_for_pod.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_hosted/tasks/firewall.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_hosted/tasks/storage/glusterfs_endpoints.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_hosted/tasks/wait_for_pod.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_hosted/tasks/storage/glusterfs.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_metrics/tasks/install_metrics.yaml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_metrics/tasks/pre_install.yaml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_metrics/tasks/install_heapster.yaml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_metrics/tasks/generate_heapster_secrets.yaml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_metrics/tasks/install_hosa.yaml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_metrics/tasks/update_master_config.yaml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_metrics/tasks/stop_metrics.yaml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_metrics/tasks/start_metrics.yaml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_metrics/tasks/uninstall_metrics.yaml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_metrics/tasks/stop_metrics.yaml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_metrics/tasks/uninstall_hosa.yaml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_logging/tasks/install_logging.yaml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_logging/tasks/set_defaults_from_current.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_logging/tasks/generate_certs.yaml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_logging/tasks/generate_jks.yaml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_logging/tasks/annotate_ops_projects.yaml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_logging/tasks/update_master_config.yaml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_logging/tasks/delete_logging.yaml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_prometheus/tasks/install_prometheus.yaml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_service_catalog/tasks/install.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_service_catalog/tasks/generate_certs.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_service_catalog/tasks/start_api_server.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_service_catalog/tasks/remove.yml statically included: /usr/share/ansible/openshift-ansible/roles/ansible_service_broker/tasks/install.yml statically included: /usr/share/ansible/openshift-ansible/roles/ansible_service_broker/tasks/validate_facts.yml statically included: /usr/share/ansible/openshift-ansible/roles/ansible_service_broker/tasks/generate_certs.yml statically included: /usr/share/ansible/openshift-ansible/roles/ansible_service_broker/tasks/remove.yml statically included: /usr/share/ansible/openshift-ansible/roles/template_service_broker/tasks/install.yml statically included: /usr/share/ansible/openshift-ansible/roles/template_service_broker/tasks/remove.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_management/tasks/validate.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_management/tasks/accounts.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_management/tasks/storage/nfs_server.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_management/tasks/storage/nfs.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_nfs/tasks/firewall.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_management/tasks/template.yml statically included: /usr/share/ansible/openshift-ansible/roles/openshift_management/tasks/storage/create_nfs_pvs.yml PLAYBOOK: config.yml *********************************************************** 90 plays in /usr/share/ansible/openshift-ansible/playbooks/byo/config.yml PLAY [Create initial host groups for localhost] ******************************** META: ran handlers TASK [include_vars] ************************************************************ task path: /usr/share/ansible/openshift-ansible/playbooks/byo/openshift-cluster/initialize_groups.yml:10 ok: [localhost] => { "ansible_facts": { "g_all_hosts": "{{ g_master_hosts | union(g_node_hosts) | union(g_etcd_hosts) | union(g_new_etcd_hosts) | union(g_lb_hosts) | union(g_nfs_hosts) | union(g_new_node_hosts)| union(g_new_master_hosts) | union(g_glusterfs_hosts) | union(g_glusterfs_registry_hosts) | default([]) }}", "g_etcd_hosts": "{{ groups.etcd | default([]) }}", "g_glusterfs_hosts": "{{ groups.glusterfs | default([]) }}", "g_glusterfs_registry_hosts": "{{ groups.glusterfs_registry | default(g_glusterfs_hosts) }}", "g_lb_hosts": "{{ groups.lb | default([]) }}", "g_master_hosts": "{{ groups.masters | default([]) }}", "g_new_etcd_hosts": "{{ groups.new_etcd | default([]) }}", "g_new_master_hosts": "{{ groups.new_masters | default([]) }}", "g_new_node_hosts": "{{ groups.new_nodes | default([]) }}", "g_nfs_hosts": "{{ groups.nfs | default([]) }}", "g_node_hosts": "{{ groups.nodes | default([]) }}" }, "changed": false, "generated_timestamp": "2018-04-06 21:37:58.350097" } META: ran handlers META: ran handlers PLAY [Initialization Checkpoint Start] ***************************************** skipping: no hosts matched PLAY [Populate config host groups] ********************************************* META: ran handlers TASK [Evaluate groups - g_etcd_hosts or g_new_etcd_hosts required] ************* task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/evaluate_groups.yml:8 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:37:58.389491", "skip_reason": "Conditional result was False", "skipped": true } TASK [Evaluate groups - g_master_hosts or g_new_master_hosts required] ********* task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/evaluate_groups.yml:13 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:37:58.414894", "skip_reason": "Conditional result was False", "skipped": true } TASK [Evaluate groups - g_node_hosts or g_new_node_hosts required] ************* task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/evaluate_groups.yml:18 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:37:58.440895", "skip_reason": "Conditional result was False", "skipped": true } TASK [Evaluate groups - g_lb_hosts required] *********************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/evaluate_groups.yml:23 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:37:58.464183", "skip_reason": "Conditional result was False", "skipped": true } TASK [Evaluate groups - g_nfs_hosts required] ********************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/evaluate_groups.yml:28 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:37:58.487332", "skip_reason": "Conditional result was False", "skipped": true } TASK [Evaluate groups - g_nfs_hosts is single host] **************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/evaluate_groups.yml:33 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:37:58.510898", "skip_reason": "Conditional result was False", "skipped": true } TASK [Evaluate groups - g_glusterfs_hosts required] **************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/evaluate_groups.yml:38 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:37:58.534194", "skip_reason": "Conditional result was False", "skipped": true } TASK [Evaluate groups - Fail if no etcd hosts group is defined] **************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/evaluate_groups.yml:43 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:37:58.557376", "skip_reason": "Conditional result was False", "skipped": true } TASK [Evaluate oo_all_hosts] *************************************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/evaluate_groups.yml:56 creating host via 'add_host': hostname=localhost ok: [localhost] => (item=localhost) => { "add_host": { "groups": [ "oo_all_hosts" ], "host_name": "localhost", "host_vars": {} }, "changed": false, "generated_timestamp": "2018-04-06 21:37:58.614859", "item": "localhost" } TASK [Evaluate oo_masters] ***************************************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/evaluate_groups.yml:65 creating host via 'add_host': hostname=localhost ok: [localhost] => (item=localhost) => { "add_host": { "groups": [ "oo_masters" ], "host_name": "localhost", "host_vars": {} }, "changed": false, "generated_timestamp": "2018-04-06 21:37:58.653426", "item": "localhost" } TASK [Evaluate oo_first_master] ************************************************ task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/evaluate_groups.yml:74 creating host via 'add_host': hostname=localhost ok: [localhost] => { "add_host": { "groups": [ "oo_first_master" ], "host_name": "localhost", "host_vars": {} }, "changed": false, "generated_timestamp": "2018-04-06 21:37:58.685958" } TASK [Evaluate oo_new_etcd_to_config] ****************************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/evaluate_groups.yml:83 TASK [Evaluate oo_masters_to_config] ******************************************* task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/evaluate_groups.yml:92 creating host via 'add_host': hostname=localhost ok: [localhost] => (item=localhost) => { "add_host": { "groups": [ "oo_masters_to_config" ], "host_name": "localhost", "host_vars": {} }, "changed": false, "generated_timestamp": "2018-04-06 21:37:58.742680", "item": "localhost" } TASK [Evaluate oo_etcd_to_config] ********************************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/evaluate_groups.yml:101 creating host via 'add_host': hostname=localhost ok: [localhost] => (item=localhost) => { "add_host": { "groups": [ "oo_etcd_to_config" ], "host_name": "localhost", "host_vars": {} }, "changed": false, "generated_timestamp": "2018-04-06 21:37:58.778413", "item": "localhost" } TASK [Evaluate oo_first_etcd] ************************************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/evaluate_groups.yml:110 creating host via 'add_host': hostname=localhost ok: [localhost] => { "add_host": { "groups": [ "oo_first_etcd" ], "host_name": "localhost", "host_vars": {} }, "changed": false, "generated_timestamp": "2018-04-06 21:37:58.811067" } TASK [Evaluate oo_etcd_hosts_to_upgrade] *************************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/evaluate_groups.yml:122 creating host via 'add_host': hostname=localhost ok: [localhost] => (item=localhost) => { "add_host": { "groups": [ "oo_etcd_hosts_to_upgrade" ], "host_name": "localhost", "host_vars": {} }, "changed": false, "generated_timestamp": "2018-04-06 21:37:58.842347", "item": "localhost" } TASK [Evaluate oo_etcd_hosts_to_backup] **************************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/evaluate_groups.yml:129 creating host via 'add_host': hostname=localhost ok: [localhost] => (item=localhost) => { "add_host": { "groups": [ "oo_etcd_hosts_to_backup" ], "host_name": "localhost", "host_vars": {} }, "changed": false, "generated_timestamp": "2018-04-06 21:37:58.874243", "item": "localhost" } TASK [Evaluate oo_nodes_to_config] ********************************************* task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/evaluate_groups.yml:136 creating host via 'add_host': hostname=localhost ok: [localhost] => (item=localhost) => { "add_host": { "groups": [ "oo_nodes_to_config" ], "host_name": "localhost", "host_vars": {} }, "changed": false, "generated_timestamp": "2018-04-06 21:37:58.910756", "item": "localhost" } TASK [Add master to oo_nodes_to_config] **************************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/evaluate_groups.yml:146 skipping: [localhost] => (item=localhost) => { "changed": false, "generated_timestamp": "2018-04-06 21:37:58.942814", "item": "localhost", "skip_reason": "Conditional result was False", "skipped": true } TASK [Evaluate oo_lb_to_config] ************************************************ task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/evaluate_groups.yml:156 TASK [Evaluate oo_nfs_to_config] *********************************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/evaluate_groups.yml:165 TASK [Evaluate oo_glusterfs_to_config] ***************************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/evaluate_groups.yml:174 TASK [Evaluate oo_etcd_to_migrate] ********************************************* task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/evaluate_groups.yml:183 creating host via 'add_host': hostname=localhost ok: [localhost] => (item=localhost) => { "add_host": { "groups": [ "oo_etcd_to_migrate" ], "host_name": "localhost", "host_vars": {} }, "changed": false, "generated_timestamp": "2018-04-06 21:37:59.040036", "item": "localhost" } META: ran handlers META: ran handlers PLAY [Ensure that all non-node hosts are accessible] *************************** TASK [Gathering Facts] ********************************************************* ok: [localhost] META: ran handlers META: ran handlers META: ran handlers PLAY [Initialize host facts] *************************************************** TASK [Gathering Facts] ********************************************************* ok: [localhost] META: ran handlers TASK [os_firewall : Detecting Atomic Host Operating System] ******************** task path: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/main.yml:2 ok: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:01.112961", "stat": { "exists": false } } TASK [os_firewall : Set fact r_os_firewall_is_atomic] ************************** task path: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/main.yml:7 ok: [localhost] => { "ansible_facts": { "r_os_firewall_is_atomic": false }, "changed": false, "generated_timestamp": "2018-04-06 21:38:01.141902" } TASK [os_firewall : Fail - Firewalld is not supported on Atomic Host] ********** task path: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/firewalld.yml:2 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:01.169225", "skip_reason": "Conditional result was False", "skipped": true } TASK [os_firewall : Install firewalld packages] ******************************** task path: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/firewalld.yml:9 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:01.197842", "skip_reason": "Conditional result was False", "skipped": true } TASK [os_firewall : Ensure iptables services are not enabled] ****************** task path: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/firewalld.yml:17 skipping: [localhost] => (item=iptables) => { "changed": false, "generated_timestamp": "2018-04-06 21:38:01.229073", "item": "iptables", "skip_reason": "Conditional result was False", "skipped": true } skipping: [localhost] => (item=ip6tables) => { "changed": false, "generated_timestamp": "2018-04-06 21:38:01.239505", "item": "ip6tables", "skip_reason": "Conditional result was False", "skipped": true } TASK [os_firewall : Wait 10 seconds after disabling iptables] ****************** task path: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/firewalld.yml:29 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:01.267846", "skip_reason": "Conditional result was False", "skipped": true } TASK [os_firewall : Start and enable firewalld service] ************************ task path: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/firewalld.yml:34 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:01.295206", "skip_reason": "Conditional result was False", "skipped": true } TASK [os_firewall : need to pause here, otherwise the firewalld service starting can sometimes cause ssh to fail] *** task path: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/firewalld.yml:43 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:01.323381", "skip_reason": "Conditional result was False", "skipped": true } TASK [os_firewall : Restart polkitd] ******************************************* task path: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/firewalld.yml:48 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:01.350384", "skip_reason": "Conditional result was False", "skipped": true } TASK [os_firewall : Wait for polkit action to have been created] *************** task path: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/firewalld.yml:55 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:01.378138", "skip_reason": "Conditional result was False", "skipped": true } TASK [os_firewall : Ensure firewalld service is not enabled] ******************* task path: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/iptables.yml:3 ok: [localhost] => { "changed": false, "failed": false, "failed_when_result": false, "generated_timestamp": "2018-04-06 21:38:01.819453", "msg": "Could not find the requested service firewalld: host" } TASK [os_firewall : Wait 10 seconds after disabling firewalld] ***************** task path: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/iptables.yml:12 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:01.854480", "skip_reason": "Conditional result was False", "skipped": true } TASK [os_firewall : Install iptables packages] ********************************* task path: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/iptables.yml:17 ok: [localhost] => (item=iptables) => { "attempts": 1, "changed": false, "generated_timestamp": "2018-04-06 21:38:02.729685", "item": "iptables", "msg": "", "rc": 0, "results": [ "iptables-1.4.21-18.3.el7_4.x86_64 providing iptables is already installed" ] } changed: [localhost] => (item=iptables-services) => { "attempts": 1, "changed": true, "generated_timestamp": "2018-04-06 21:38:07.719221", "item": "iptables-services", "msg": "", "rc": 0, "results": [ "Loaded plugins: amazon-id, rhui-lb, search-disabled-repos\nResolving Dependencies\n--> Running transaction check\n---> Package iptables-services.x86_64 0:1.4.21-18.3.el7_4 will be installed\n--> Finished Dependency Resolution\n\nDependencies Resolved\n\n================================================================================\n Package Arch Version Repository Size\n================================================================================\nInstalling:\n iptables-services x86_64 1.4.21-18.3.el7_4 oso-rhui-rhel-server-releases 51 k\n\nTransaction Summary\n================================================================================\nInstall 1 Package\n\nTotal download size: 51 k\nInstalled size: 25 k\nDownloading packages:\nRunning transaction check\nRunning transaction test\nTransaction test succeeded\nRunning transaction\n Installing : iptables-services-1.4.21-18.3.el7_4.x86_64 1/1 \n Verifying : iptables-services-1.4.21-18.3.el7_4.x86_64 1/1 \n\nInstalled:\n iptables-services.x86_64 0:1.4.21-18.3.el7_4 \n\nComplete!\n" ] } TASK [os_firewall : Start and enable iptables service] ************************* task path: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/iptables.yml:28 changed: [localhost -> localhost] => (item=localhost) => { "changed": true, "enabled": true, "generated_timestamp": "2018-04-06 21:38:08.095680", "item": "localhost", "name": "iptables", "state": "started", "status": { "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "systemd-journald.socket syslog.target basic.target system.slice", "AllowIsolate": "no", "AmbientCapabilities": "0", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target network.service ip6tables.service", "BlockIOAccounting": "no", "BlockIOWeight": "18446744073709551615", "CPUAccounting": "yes", "CPUQuotaPerSecUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "18446744073709551615", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "18446744073709551615", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "Conflicts": "shutdown.target", "ControlPID": "0", "DefaultDependencies": "yes", "Delegate": "no", "Description": "IPv4 firewall with iptables", "DevicePolicy": "auto", "Environment": "BOOTUP=serial CONSOLETYPE=serial", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecReload": "{ path=/usr/libexec/iptables/iptables.init ; argv[]=/usr/libexec/iptables/iptables.init reload ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/libexec/iptables/iptables.init ; argv[]=/usr/libexec/iptables/iptables.init start ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/libexec/iptables/iptables.init ; argv[]=/usr/libexec/iptables/iptables.init stop ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/iptables.service", "GuessMainPID": "yes", "IOScheduling": "0", "Id": "iptables.service", "IgnoreOnIsolate": "no", "IgnoreOnSnapshot": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobTimeoutAction": "none", "JobTimeoutUSec": "0", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "18446744073709551615", "LimitCORE": "18446744073709551615", "LimitCPU": "18446744073709551615", "LimitDATA": "18446744073709551615", "LimitFSIZE": "18446744073709551615", "LimitLOCKS": "18446744073709551615", "LimitMEMLOCK": "65536", "LimitMSGQUEUE": "819200", "LimitNICE": "0", "LimitNOFILE": "4096", "LimitNPROC": "63327", "LimitRSS": "18446744073709551615", "LimitRTPRIO": "0", "LimitRTTIME": "18446744073709551615", "LimitSIGPENDING": "63327", "LimitSTACK": "18446744073709551615", "LoadState": "loaded", "MainPID": "0", "MemoryAccounting": "yes", "MemoryCurrent": "18446744073709551615", "MemoryLimit": "18446744073709551615", "MountFlags": "0", "Names": "iptables.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "PrivateDevices": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "ProtectHome": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "yes", "Requires": "basic.target", "Restart": "no", "RestartUSec": "100ms", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "syslog", "StandardInput": "null", "StandardOutput": "syslog", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitInterval": "10000000", "StartupBlockIOWeight": "18446744073709551615", "StartupCPUShares": "18446744073709551615", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "no", "TasksCurrent": "18446744073709551615", "TasksMax": "18446744073709551615", "TimeoutStartUSec": "0", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "disabled", "Wants": "system.slice", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [os_firewall : need to pause here, otherwise the iptables service starting can sometimes cause ssh to fail] *** task path: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/iptables.yml:40 Pausing for 10 seconds (ctrl+C then 'C' = continue early, ctrl+C then 'A' = abort) ok: [localhost] => { "changed": false, "delta": 10, "generated_timestamp": "2018-04-06 21:38:18.139055", "rc": 0, "start": "2018-04-06 21:38:08.136357", "stderr": [], "stdout": [ "Paused for 10.0 seconds" ], "stop": "2018-04-06 21:38:18.136529", "user_input": "" } TASK [openshift_sanitize_inventory : Check for usage of deprecated variables] *** task path: /usr/share/ansible/openshift-ansible/roles/openshift_sanitize_inventory/tasks/deprecations.yml:3 ok: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:18.250403", "msg": "No deprecations found" } TASK [openshift_sanitize_inventory : debug] ************************************ task path: /usr/share/ansible/openshift-ansible/roles/openshift_sanitize_inventory/tasks/deprecations.yml:11 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:18.277789", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_sanitize_inventory : pause] ************************************ task path: /usr/share/ansible/openshift-ansible/roles/openshift_sanitize_inventory/tasks/deprecations.yml:12 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:18.305514", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_sanitize_inventory : Assign deprecated variables to correct counterparts] *** task path: /usr/share/ansible/openshift-ansible/roles/openshift_sanitize_inventory/tasks/deprecations.yml:18 included: /usr/share/ansible/openshift-ansible/roles/openshift_sanitize_inventory/tasks/__deprecations_logging.yml for localhost included: /usr/share/ansible/openshift-ansible/roles/openshift_sanitize_inventory/tasks/__deprecations_metrics.yml for localhost TASK [openshift_sanitize_inventory : conditional_set_fact] ********************* task path: /usr/share/ansible/openshift-ansible/roles/openshift_sanitize_inventory/tasks/__deprecations_logging.yml:5 ok: [localhost] => { "ansible_facts": {}, "changed": false, "generated_timestamp": "2018-04-06 21:38:18.545133" } TASK [openshift_sanitize_inventory : set_fact] ********************************* task path: /usr/share/ansible/openshift-ansible/roles/openshift_sanitize_inventory/tasks/__deprecations_logging.yml:42 ok: [localhost] => { "ansible_facts": { "openshift_logging_elasticsearch_ops_pvc_dynamic": "", "openshift_logging_elasticsearch_ops_pvc_prefix": "", "openshift_logging_elasticsearch_ops_pvc_size": "", "openshift_logging_elasticsearch_pvc_dynamic": "", "openshift_logging_elasticsearch_pvc_prefix": "", "openshift_logging_elasticsearch_pvc_size": "" }, "changed": false, "generated_timestamp": "2018-04-06 21:38:18.585487" } TASK [openshift_sanitize_inventory : conditional_set_fact] ********************* task path: /usr/share/ansible/openshift-ansible/roles/openshift_sanitize_inventory/tasks/__deprecations_metrics.yml:5 ok: [localhost] => { "ansible_facts": {}, "changed": false, "generated_timestamp": "2018-04-06 21:38:18.694205" } TASK [openshift_sanitize_inventory : Abort when conflicting deployment type variables are set] *** task path: /usr/share/ansible/openshift-ansible/roles/openshift_sanitize_inventory/tasks/main.yml:7 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:18.717523", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_sanitize_inventory : Standardize on latest variable names] ***** task path: /usr/share/ansible/openshift-ansible/roles/openshift_sanitize_inventory/tasks/main.yml:20 ok: [localhost] => { "ansible_facts": { "deployment_subtype": "basic", "deployment_type": "origin", "openshift_deployment_subtype": "basic", "openshift_deployment_type": "origin" }, "changed": false, "generated_timestamp": "2018-04-06 21:38:18.748601" } TASK [openshift_sanitize_inventory : Abort when deployment type is invalid] **** task path: /usr/share/ansible/openshift-ansible/roles/openshift_sanitize_inventory/tasks/main.yml:30 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:18.772587", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_sanitize_inventory : Normalize openshift_release] ************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_sanitize_inventory/tasks/main.yml:38 ok: [localhost] => { "ansible_facts": { "openshift_release": "3.7" }, "changed": false, "generated_timestamp": "2018-04-06 21:38:18.804002" } TASK [openshift_sanitize_inventory : Abort when openshift_release is invalid] *** task path: /usr/share/ansible/openshift-ansible/roles/openshift_sanitize_inventory/tasks/main.yml:48 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:18.830391", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_sanitize_inventory : Ensure that openshift_use_dnsmasq is true] *** task path: /usr/share/ansible/openshift-ansible/roles/openshift_sanitize_inventory/tasks/unsupported.yml:5 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:18.857858", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_sanitize_inventory : Ensure that openshift_node_dnsmasq_install_network_manager_hook is true] *** task path: /usr/share/ansible/openshift-ansible/roles/openshift_sanitize_inventory/tasks/unsupported.yml:14 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:18.885339", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_sanitize_inventory : set_fact] ********************************* task path: /usr/share/ansible/openshift-ansible/roles/openshift_sanitize_inventory/tasks/unsupported.yml:22 TASK [openshift_sanitize_inventory : Ensure that dynamic provisioning is set if using dynamic storage] *** task path: /usr/share/ansible/openshift-ansible/roles/openshift_sanitize_inventory/tasks/unsupported.yml:29 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:19.008251", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_sanitize_inventory : Ensure the hosted registry's GlusterFS storage is configured correctly] *** task path: /usr/share/ansible/openshift-ansible/roles/openshift_sanitize_inventory/tasks/unsupported.yml:45 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:19.039284", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_sanitize_inventory : Ensure the hosted registry's GlusterFS storage is configured correctly] *** task path: /usr/share/ansible/openshift-ansible/roles/openshift_sanitize_inventory/tasks/unsupported.yml:57 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:19.066400", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_sanitize_inventory : Ensure clusterid is set along with the cloudprovider] *** task path: /usr/share/ansible/openshift-ansible/roles/openshift_sanitize_inventory/tasks/main.yml:61 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:19.092377", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_sanitize_inventory : Ensure ansible_service_broker_remove and ansible_service_broker_install are mutually exclusive] *** task path: /usr/share/ansible/openshift-ansible/roles/openshift_sanitize_inventory/tasks/main.yml:74 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:19.116029", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_sanitize_inventory : Ensure template_service_broker_remove and template_service_broker_install are mutually exclusive] *** task path: /usr/share/ansible/openshift-ansible/roles/openshift_sanitize_inventory/tasks/main.yml:83 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:19.139169", "skip_reason": "Conditional result was False", "skipped": true } TASK [Detecting Operating System from ostree_booted] *************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/initialize_facts.yml:27 ok: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:19.303197", "stat": { "exists": false } } TASK [initialize_facts set fact l_is_atomic] *********************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/initialize_facts.yml:33 ok: [localhost] => { "ansible_facts": { "l_is_atomic": false }, "changed": false, "generated_timestamp": "2018-04-06 21:38:19.329902" } TASK [initialize_facts set fact for containerized and l_is_*_system_container] *** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/initialize_facts.yml:37 ok: [localhost] => { "ansible_facts": { "l_is_containerized": false, "l_is_etcd_system_container": false, "l_is_master_system_container": false, "l_is_node_system_container": false, "l_is_openvswitch_system_container": false }, "changed": false, "generated_timestamp": "2018-04-06 21:38:19.368049" } TASK [initialize_facts set facts for l_any_system_container] ******************* task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/initialize_facts.yml:45 ok: [localhost] => { "ansible_facts": { "l_any_system_container": false }, "changed": false, "generated_timestamp": "2018-04-06 21:38:19.395113" } TASK [initialize_facts set fact for l_etcd_runtime] **************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/initialize_facts.yml:49 ok: [localhost] => { "ansible_facts": { "l_etcd_runtime": "host" }, "changed": false, "generated_timestamp": "2018-04-06 21:38:19.422797" } TASK [Validate python version - ans_dist is fedora and python is v3] *********** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/initialize_facts.yml:55 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:19.446121", "skip_reason": "Conditional result was False", "skipped": true } TASK [Validate python version - ans_dist not Fedora and python must be v2] ***** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/initialize_facts.yml:66 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:19.472427", "skip_reason": "Conditional result was False", "skipped": true } TASK [Determine Atomic Host Docker Version] ************************************ task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/initialize_facts.yml:85 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:19.495800", "skip_reason": "Conditional result was False", "skipped": true } TASK [assert atomic host docker version is 1.12 or later] ********************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/initialize_facts.yml:89 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:19.518163", "skip_reason": "Conditional result was False", "skipped": true } TASK [Ensure openshift-ansible installer package deps are installed] *********** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/initialize_facts.yml:98 ok: [localhost] => (item=iproute) => { "attempts": 1, "changed": false, "generated_timestamp": "2018-04-06 21:38:20.218593", "item": "iproute", "msg": "", "rc": 0, "results": [ "iproute-3.10.0-87.el7.x86_64 providing iproute is already installed" ] } ok: [localhost] => (item=dbus-python) => { "attempts": 1, "changed": false, "generated_timestamp": "2018-04-06 21:38:20.890667", "item": "dbus-python", "msg": "", "rc": 0, "results": [ "dbus-python-1.1.1-9.el7.x86_64 providing dbus-python is already installed" ] } ok: [localhost] => (item=PyYAML) => { "attempts": 1, "changed": false, "generated_timestamp": "2018-04-06 21:38:21.569568", "item": "PyYAML", "msg": "", "rc": 0, "results": [ "PyYAML-3.10-11.el7.x86_64 providing PyYAML is already installed" ] } ok: [localhost] => (item=python-ipaddress) => { "attempts": 1, "changed": false, "generated_timestamp": "2018-04-06 21:38:22.239566", "item": "python-ipaddress", "msg": "", "rc": 0, "results": [ "python-ipaddress-1.0.16-2.el7.noarch providing python-ipaddress is already installed" ] } ok: [localhost] => (item=yum-utils) => { "attempts": 1, "changed": false, "generated_timestamp": "2018-04-06 21:38:22.917777", "item": "yum-utils", "msg": "", "rc": 0, "results": [ "yum-utils-1.1.31-42.el7.noarch providing yum-utils is already installed" ] } TASK [Ensure various deps for running system containers are installed] ********* task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/initialize_facts.yml:113 skipping: [localhost] => (item=atomic) => { "changed": false, "generated_timestamp": "2018-04-06 21:38:22.947106", "item": "atomic", "skip_reason": "Conditional result was False", "skipped": true } skipping: [localhost] => (item=ostree) => { "changed": false, "generated_timestamp": "2018-04-06 21:38:22.955255", "item": "ostree", "skip_reason": "Conditional result was False", "skipped": true } skipping: [localhost] => (item=runc) => { "changed": false, "generated_timestamp": "2018-04-06 21:38:22.964304", "item": "runc", "skip_reason": "Conditional result was False", "skipped": true } TASK [Default system_images_registry to a enterprise registry] ***************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/initialize_facts.yml:126 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:22.989538", "skip_reason": "Conditional result was False", "skipped": true } TASK [Default system_images_registry to community registry] ******************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/initialize_facts.yml:133 ok: [localhost] => { "ansible_facts": { "system_images_registry": "docker.io" }, "changed": false, "generated_timestamp": "2018-04-06 21:38:23.021874" } TASK [Gather Cluster facts and set is_containerized if needed] ***************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/initialize_facts.yml:140 changed: [localhost] => { "ansible_facts": { "openshift": { "common": { "admin_binary": "oadm", "all_hostnames": [ "54.152.227.161", "ec2-54-152-227-161.compute-1.amazonaws.com", "172.18.1.211", "ip-172-18-1-211.ec2.internal" ], "cli_image": "openshift/origin", "client_binary": "oc", "config_base": "/etc/origin", "deployer_image": "openshift/origin-deployer", "deployment_subtype": "basic", "deployment_type": "origin", "dns_domain": "cluster.local", "etcd_runtime": "host", "examples_content_version": "v3.6", "generate_no_proxy_hosts": true, "hostname": "ip-172-18-1-211.ec2.internal", "installed_variant_rpms": [], "internal_hostnames": [ "172.18.1.211", "ip-172-18-1-211.ec2.internal" ], "ip": "172.18.1.211", "is_atomic": false, "is_containerized": false, "is_etcd_system_container": false, "is_master_system_container": false, "is_node_system_container": false, "is_openvswitch_system_container": false, "kube_svc_ip": "172.30.0.1", "pod_image": "openshift/origin-pod", "portal_net": "172.30.0.0/16", "public_hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public_ip": "54.152.227.161", "registry_image": "openshift/origin-docker-registry", "router_image": "openshift/origin-haproxy-router", "service_type": "origin", "system_images_registry": "docker.io", "version_gte_3_1_1_or_1_1_1": true, "version_gte_3_1_or_1_1": true, "version_gte_3_2_or_1_2": true, "version_gte_3_3_or_1_3": true, "version_gte_3_4_or_1_4": true, "version_gte_3_5_or_1_5": true, "version_gte_3_6": true, "version_gte_3_7": false }, "current_config": { "roles": [ "node", "docker" ] }, "docker": { "api_version": "1.26 (minimum version 1.12)", "disable_push_dockerhub": false, "gte_1_10": true, "options": "--log-driver=journald", "service_name": "docker", "version": "1.13.1" }, "hosted": { "etcd": { "selector": null }, "metrics": { "selector": null }, "registry": { "selector": "region=infra" }, "router": { "selector": "region=infra" } }, "logging": { "selector": null }, "node": { "annotations": {}, "iptables_sync_period": "30s", "kubelet_args": { "node-labels": [] }, "labels": {}, "local_quota_per_fsgroup": "", "node_image": "openshift/node", "node_system_image": "openshift/node", "nodename": "ip-172-18-1-211.ec2.internal", "ovs_image": "openshift/openvswitch", "ovs_system_image": "openshift/openvswitch", "registry_url": "openshift/origin-${component}:${version}", "schedulable": true, "sdn_mtu": "8951", "set_node_ip": false, "storage_plugin_deps": [ "ceph", "glusterfs", "iscsi" ] }, "prometheus": { "alertbuffer": { "selector": null }, "alertmanager": { "selector": null }, "selector": null }, "provider": { "metadata": { "ami-id": "ami-069c0ca6cc091e8fa", "ami-launch-index": "0", "ami-manifest-path": "(unknown)", "block-device-mapping": { "ami": "/dev/sda1", "ebs12": "sdb", "root": "/dev/sda1" }, "hostname": "ip-172-18-1-211.ec2.internal", "instance-action": "none", "instance-id": "i-0cd57e9e471604c34", "instance-type": "m4.xlarge", "local-hostname": "ip-172-18-1-211.ec2.internal", "local-ipv4": "172.18.1.211", "mac": "0e:25:a6:6d:04:5a", "metrics": { "vhostmd": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" }, "network": { "interfaces": { "macs": { "0e:25:a6:6d:04:5a": { "device-number": "0", "interface-id": "eni-04b95497687bf9813", "ipv4-associations": { "54.152.227.161": "172.18.1.211" }, "local-hostname": "ip-172-18-1-211.ec2.internal", "local-ipv4s": "172.18.1.211", "mac": "0e:25:a6:6d:04:5a", "owner-id": "531415883065", "public-hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public-ipv4s": "54.152.227.161", "security-group-ids": "sg-7e73221a", "security-groups": "default", "subnet-id": "subnet-cf57c596", "subnet-ipv4-cidr-block": "172.18.0.0/20", "vpc-id": "vpc-69705d0c", "vpc-ipv4-cidr-block": "172.18.0.0/16", "vpc-ipv4-cidr-blocks": "172.18.0.0/16" } } } }, "placement": { "availability-zone": "us-east-1d" }, "profile": "default-hvm", "public-hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public-ipv4": "54.152.227.161", "public-keys/": "0=libra", "reservation-id": "r-0f638cc4ebb0e4aae", "security-groups": "default", "services": { "domain": "amazonaws.com", "partition": "aws" } }, "name": "aws", "network": { "hostname": "ip-172-18-1-211.ec2.internal", "interfaces": [ { "ips": [ "172.18.1.211" ], "network_id": "subnet-cf57c596", "network_type": "vpc", "public_ips": [ "54.152.227.161" ] } ], "ip": "172.18.1.211", "ipv6_enabled": false, "public_hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public_ip": "54.152.227.161" }, "zone": "us-east-1d" } } }, "changed": true, "generated_timestamp": "2018-04-06 21:38:25.098027" } TASK [Set fact of no_proxy_internal_hostnames] ********************************* task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/initialize_facts.yml:164 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:25.123004", "skip_reason": "Conditional result was False", "skipped": true } TASK [initialize_facts set_fact repoquery command] ***************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/initialize_facts.yml:177 ok: [localhost] => { "ansible_facts": { "repoquery_cmd": "repoquery --plugins" }, "changed": false, "generated_timestamp": "2018-04-06 21:38:25.150356" } TASK [initialize_facts set_fact on openshift_docker_hosted_registry_network] *** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/initialize_facts.yml:181 ok: [localhost] => { "ansible_facts": { "openshift_docker_hosted_registry_network": "172.30.0.0/16" }, "changed": false, "generated_timestamp": "2018-04-06 21:38:25.237000" } META: ran handlers META: ran handlers PLAY [Verify Requirements] ***************************************************** TASK [Gathering Facts] ********************************************************* ok: [localhost] META: ran handlers TASK [fail] ******************************************************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/sanity_checks.yml:5 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:25.973918", "skip_reason": "Conditional result was False", "skipped": true } TASK [fail] ******************************************************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/sanity_checks.yml:9 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:25.999040", "skip_reason": "Conditional result was False", "skipped": true } TASK [fail] ******************************************************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/sanity_checks.yml:13 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:26.024189", "skip_reason": "Conditional result was False", "skipped": true } TASK [fail] ******************************************************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/sanity_checks.yml:17 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:26.048785", "skip_reason": "Conditional result was False", "skipped": true } TASK [fail] ******************************************************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/sanity_checks.yml:21 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:26.073643", "skip_reason": "Conditional result was False", "skipped": true } TASK [fail] ******************************************************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/sanity_checks.yml:25 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:26.098652", "skip_reason": "Conditional result was False", "skipped": true } TASK [fail] ******************************************************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/sanity_checks.yml:29 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:26.123504", "skip_reason": "Conditional result was False", "skipped": true } TASK [fail] ******************************************************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/sanity_checks.yml:33 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:26.147880", "skip_reason": "Conditional result was False", "skipped": true } TASK [fail] ******************************************************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/sanity_checks.yml:37 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:26.172011", "skip_reason": "Conditional result was False", "skipped": true } TASK [fail] ******************************************************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/sanity_checks.yml:41 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:26.196799", "skip_reason": "Conditional result was False", "skipped": true } TASK [fail] ******************************************************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/sanity_checks.yml:45 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:26.221298", "skip_reason": "Conditional result was False", "skipped": true } TASK [fail] ******************************************************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/sanity_checks.yml:49 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:26.244641", "skip_reason": "Conditional result was False", "skipped": true } META: ran handlers META: ran handlers PLAY [Validate node hostnames] ************************************************* TASK [Gathering Facts] ********************************************************* ok: [localhost] META: ran handlers TASK [Query DNS for IP address of ip-172-18-1-211.ec2.internal] **************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/validate_hostnames.yml:6 ok: [localhost] => { "changed": false, "cmd": "getent ahostsv4 ip-172-18-1-211.ec2.internal | head -n 1 | awk '{ print $1 }'", "delta": "0:00:00.006651", "end": "2018-04-06 21:38:27.391190", "failed": false, "failed_when_result": false, "generated_timestamp": "2018-04-06 21:38:27.408695", "rc": 0, "start": "2018-04-06 21:38:27.384539", "stderr": [], "stdout": [ "172.18.1.211" ] } TASK [Validate openshift_hostname when defined] ******************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/validate_hostnames.yml:13 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:27.436662", "skip_reason": "Conditional result was False", "skipped": true } TASK [Validate openshift_ip exists on node when defined] *********************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/validate_hostnames.yml:30 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:27.460068", "skip_reason": "Conditional result was False", "skipped": true } META: ran handlers META: ran handlers PLAY [Setup yum repositories for all hosts] ************************************ META: ran handlers TASK [openshift_repos : openshift_repos detect ostree] ************************* task path: /usr/share/ansible/openshift-ansible/roles/openshift_repos/tasks/main.yaml:2 ok: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:27.637491", "stat": { "exists": false } } TASK [openshift_repos : Ensure libselinux-python is installed] ***************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_repos/tasks/main.yaml:10 ok: [localhost] => { "attempts": 1, "changed": false, "generated_timestamp": "2018-04-06 21:38:28.346716", "msg": "", "rc": 0, "results": [ "libselinux-python-2.5-11.el7.x86_64 providing libselinux-python is already installed" ] } TASK [openshift_repos : Remove openshift_additional.repo file] ***************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_repos/tasks/main.yaml:15 ok: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:28.801421", "path": "/etc/yum.repos.d/openshift_additional.repo", "state": "absent" } TASK [openshift_repos : Create any additional repos that are defined] ********** task path: /usr/share/ansible/openshift-ansible/roles/openshift_repos/tasks/main.yaml:20 TASK [openshift_repos : Configure origin gpg keys] ***************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_repos/tasks/centos_repos.yml:7 NOTIFIED HANDLER refresh cache changed: [localhost] => { "changed": true, "checksum": "e350bf139f91980ff9cc39877e14643ba3139da1", "dest": "/etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-SIG-PaaS", "generated_timestamp": "2018-04-06 21:38:29.463072", "gid": 0, "group": "root", "md5sum": "dcc03fee597ca1939365152258676a1e", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:cert_t:s0", "size": 1037, "src": "/home/origin/.ansible/tmp/ansible-tmp-1523050709.03-35719536209353/source", "state": "file", "uid": 0 } TASK [openshift_repos : Configure correct origin release repository] *********** task path: /usr/share/ansible/openshift-ansible/roles/openshift_repos/tasks/centos_repos.yml:17 changed: [localhost] => (item=/usr/share/ansible/openshift-ansible/roles/openshift_repos/templates/CentOS-OpenShift-Origin37.repo.j2) => { "changed": true, "checksum": "3d20b33d01580da93028447876c79f30bcff5a23", "dest": "/etc/yum.repos.d/CentOS-OpenShift-Origin37.repo", "generated_timestamp": "2018-04-06 21:38:30.011036", "gid": 0, "group": "root", "item": "/usr/share/ansible/openshift-ansible/roles/openshift_repos/templates/CentOS-OpenShift-Origin37.repo.j2", "md5sum": "27cb58c0db2d1bfe2dd0ef918dd7dc42", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:system_conf_t:s0", "size": 883, "src": "/home/origin/.ansible/tmp/ansible-tmp-1523050709.72-84082984074869/source", "state": "file", "uid": 0 } TASK [openshift_repos : Ensure clean repo cache in the event repos have been changed manually] *** task path: /usr/share/ansible/openshift-ansible/roles/openshift_repos/tasks/main.yaml:47 ok: [localhost] => { "generated_timestamp": "2018-04-06 21:38:30.053619", "msg": "First run of openshift_repos" } TASK [openshift_repos : Record that openshift_repos already ran] *************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_repos/tasks/main.yaml:53 ok: [localhost] => { "ansible_facts": { "r_openshift_repos_has_run": true }, "changed": false, "generated_timestamp": "2018-04-06 21:38:30.092056" } RUNNING HANDLER [openshift_repos : refresh cache] ****************************** changed: [localhost] => { "changed": true, "cmd": [ "yum", "clean", "all" ], "delta": "0:00:00.359954", "end": "2018-04-06 21:38:30.601691", "generated_timestamp": "2018-04-06 21:38:30.619220", "rc": 0, "start": "2018-04-06 21:38:30.241737", "stderr": [], "stdout": [ "Loaded plugins: amazon-id, rhui-lb, search-disabled-repos", "Cleaning repos: centos-openshift-origin37 charlie", " : openshift-ansible-local-release origin-deps-rhel7", " : origin-local-release oso-rhui-rhel-server-extras", " : oso-rhui-rhel-server-releases", " : oso-rhui-rhel-server-releases-optional", " : oso-rhui-rhel-server-rhscl rhel-7-server-ansible-2.4-rpms", " : rhel-7-server-ose-3.1-rpms rhel-7-server-ose-3.2-rpms", " : rhel-7-server-ose-3.3-rpms rhel-7-server-ose-3.4-rpms", " : rhel-7-server-ose-3.5-rpms rhel-7-server-ose-3.6-rpms", " : rhel-7-server-ose-3.7-rpms rhui-REGION-client-config-server-7", " : rhui-REGION-rhel-server-releases", " : rhui-REGION-rhel-server-rh-common", "Cleaning up everything", "Maybe you want: rm -rf /var/cache/yum, to also free up space taken by orphaned data from disabled or removed repos" ] } META: ran handlers META: ran handlers META: ran handlers PLAY [Determine openshift_version to configure on first master] **************** TASK [Gathering Facts] ********************************************************* ok: [localhost] META: ran handlers TASK [openshift_docker_facts : Set docker facts] ******************************* task path: /usr/share/ansible/openshift-ansible/roles/openshift_docker_facts/tasks/main.yml:2 changed: [localhost] => (item={u'local_facts': {u'use_crio': False, u'log_driver': u'', u'disable_push_dockerhub': u'', u'selinux_enabled': u'', u'hosted_registry_insecure': False, u'hosted_registry_network': u'172.30.0.0/16', u'log_options': u'', u'options': u'', u'use_system_container': False}, u'role': u'docker'}) => { "ansible_facts": { "openshift": { "common": { "admin_binary": "oadm", "all_hostnames": [ "54.152.227.161", "ec2-54-152-227-161.compute-1.amazonaws.com", "172.18.1.211", "ip-172-18-1-211.ec2.internal" ], "cli_image": "openshift/origin", "client_binary": "oc", "config_base": "/etc/origin", "deployer_image": "openshift/origin-deployer", "deployment_subtype": "basic", "deployment_type": "origin", "dns_domain": "cluster.local", "etcd_runtime": "host", "examples_content_version": "v3.6", "generate_no_proxy_hosts": true, "hostname": "ip-172-18-1-211.ec2.internal", "installed_variant_rpms": [], "internal_hostnames": [ "172.18.1.211", "ip-172-18-1-211.ec2.internal" ], "ip": "172.18.1.211", "is_atomic": false, "is_containerized": false, "is_etcd_system_container": false, "is_master_system_container": false, "is_node_system_container": false, "is_openvswitch_system_container": false, "kube_svc_ip": "172.30.0.1", "pod_image": "openshift/origin-pod", "portal_net": "172.30.0.0/16", "public_hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public_ip": "54.152.227.161", "registry_image": "openshift/origin-docker-registry", "router_image": "openshift/origin-haproxy-router", "service_type": "origin", "system_images_registry": "docker.io", "version_gte_3_1_1_or_1_1_1": true, "version_gte_3_1_or_1_1": true, "version_gte_3_2_or_1_2": true, "version_gte_3_3_or_1_3": true, "version_gte_3_4_or_1_4": true, "version_gte_3_5_or_1_5": true, "version_gte_3_6": true, "version_gte_3_7": false }, "current_config": { "roles": [ "node", "docker" ] }, "docker": { "api_version": "1.26 (minimum version 1.12)", "disable_push_dockerhub": false, "gte_1_10": true, "hosted_registry_insecure": false, "hosted_registry_network": "172.30.0.0/16", "options": "--log-driver=journald", "service_name": "docker", "use_crio": false, "use_system_container": false, "version": "1.13.1" }, "hosted": { "etcd": { "selector": null }, "metrics": { "selector": null }, "registry": { "selector": "region=infra" }, "router": { "selector": "region=infra" } }, "logging": { "selector": null }, "node": { "annotations": {}, "iptables_sync_period": "30s", "kubelet_args": { "node-labels": [] }, "labels": {}, "local_quota_per_fsgroup": "", "node_image": "openshift/node", "node_system_image": "openshift/node", "nodename": "ip-172-18-1-211.ec2.internal", "ovs_image": "openshift/openvswitch", "ovs_system_image": "openshift/openvswitch", "registry_url": "openshift/origin-${component}:${version}", "schedulable": true, "sdn_mtu": "8951", "set_node_ip": false, "storage_plugin_deps": [ "ceph", "glusterfs", "iscsi" ] }, "prometheus": { "alertbuffer": { "selector": null }, "alertmanager": { "selector": null }, "selector": null }, "provider": { "metadata": { "ami-id": "ami-069c0ca6cc091e8fa", "ami-launch-index": "0", "ami-manifest-path": "(unknown)", "block-device-mapping": { "ami": "/dev/sda1", "ebs12": "sdb", "root": "/dev/sda1" }, "hostname": "ip-172-18-1-211.ec2.internal", "instance-action": "none", "instance-id": "i-0cd57e9e471604c34", "instance-type": "m4.xlarge", "local-hostname": "ip-172-18-1-211.ec2.internal", "local-ipv4": "172.18.1.211", "mac": "0e:25:a6:6d:04:5a", "metrics": { "vhostmd": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" }, "network": { "interfaces": { "macs": { "0e:25:a6:6d:04:5a": { "device-number": "0", "interface-id": "eni-04b95497687bf9813", "ipv4-associations": { "54.152.227.161": "172.18.1.211" }, "local-hostname": "ip-172-18-1-211.ec2.internal", "local-ipv4s": "172.18.1.211", "mac": "0e:25:a6:6d:04:5a", "owner-id": "531415883065", "public-hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public-ipv4s": "54.152.227.161", "security-group-ids": "sg-7e73221a", "security-groups": "default", "subnet-id": "subnet-cf57c596", "subnet-ipv4-cidr-block": "172.18.0.0/20", "vpc-id": "vpc-69705d0c", "vpc-ipv4-cidr-block": "172.18.0.0/16", "vpc-ipv4-cidr-blocks": "172.18.0.0/16" } } } }, "placement": { "availability-zone": "us-east-1d" }, "profile": "default-hvm", "public-hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public-ipv4": "54.152.227.161", "public-keys/": "0=libra", "reservation-id": "r-0f638cc4ebb0e4aae", "security-groups": "default", "services": { "domain": "amazonaws.com", "partition": "aws" } }, "name": "aws", "network": { "hostname": "ip-172-18-1-211.ec2.internal", "interfaces": [ { "ips": [ "172.18.1.211" ], "network_id": "subnet-cf57c596", "network_type": "vpc", "public_ips": [ "54.152.227.161" ] } ], "ip": "172.18.1.211", "ipv6_enabled": false, "public_hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public_ip": "54.152.227.161" }, "zone": "us-east-1d" } } }, "changed": true, "generated_timestamp": "2018-04-06 21:38:33.008805", "item": { "local_facts": { "disable_push_dockerhub": "", "hosted_registry_insecure": false, "hosted_registry_network": "172.30.0.0/16", "log_driver": "", "log_options": "", "options": "", "selinux_enabled": "", "use_crio": false, "use_system_container": false }, "role": "docker" } } ok: [localhost] => (item={u'local_facts': {u'sdn_mtu': u''}, u'role': u'node'}) => { "ansible_facts": { "openshift": { "common": { "admin_binary": "oadm", "all_hostnames": [ "54.152.227.161", "ec2-54-152-227-161.compute-1.amazonaws.com", "172.18.1.211", "ip-172-18-1-211.ec2.internal" ], "cli_image": "openshift/origin", "client_binary": "oc", "config_base": "/etc/origin", "deployer_image": "openshift/origin-deployer", "deployment_subtype": "basic", "deployment_type": "origin", "dns_domain": "cluster.local", "etcd_runtime": "host", "examples_content_version": "v3.6", "generate_no_proxy_hosts": true, "hostname": "ip-172-18-1-211.ec2.internal", "installed_variant_rpms": [], "internal_hostnames": [ "172.18.1.211", "ip-172-18-1-211.ec2.internal" ], "ip": "172.18.1.211", "is_atomic": false, "is_containerized": false, "is_etcd_system_container": false, "is_master_system_container": false, "is_node_system_container": false, "is_openvswitch_system_container": false, "kube_svc_ip": "172.30.0.1", "pod_image": "openshift/origin-pod", "portal_net": "172.30.0.0/16", "public_hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public_ip": "54.152.227.161", "registry_image": "openshift/origin-docker-registry", "router_image": "openshift/origin-haproxy-router", "service_type": "origin", "system_images_registry": "docker.io", "version_gte_3_1_1_or_1_1_1": true, "version_gte_3_1_or_1_1": true, "version_gte_3_2_or_1_2": true, "version_gte_3_3_or_1_3": true, "version_gte_3_4_or_1_4": true, "version_gte_3_5_or_1_5": true, "version_gte_3_6": true, "version_gte_3_7": false }, "current_config": { "roles": [ "node", "docker" ] }, "docker": { "api_version": "1.26 (minimum version 1.12)", "disable_push_dockerhub": false, "gte_1_10": true, "hosted_registry_insecure": false, "hosted_registry_network": "172.30.0.0/16", "options": "--log-driver=journald", "service_name": "docker", "use_crio": false, "use_system_container": false, "version": "1.13.1" }, "hosted": { "etcd": { "selector": null }, "metrics": { "selector": null }, "registry": { "selector": "region=infra" }, "router": { "selector": "region=infra" } }, "logging": { "selector": null }, "node": { "annotations": {}, "iptables_sync_period": "30s", "kubelet_args": { "node-labels": [] }, "labels": {}, "local_quota_per_fsgroup": "", "node_image": "openshift/node", "node_system_image": "openshift/node", "nodename": "ip-172-18-1-211.ec2.internal", "ovs_image": "openshift/openvswitch", "ovs_system_image": "openshift/openvswitch", "registry_url": "openshift/origin-${component}:${version}", "schedulable": true, "sdn_mtu": "8951", "set_node_ip": false, "storage_plugin_deps": [ "ceph", "glusterfs", "iscsi" ] }, "prometheus": { "alertbuffer": { "selector": null }, "alertmanager": { "selector": null }, "selector": null }, "provider": { "metadata": { "ami-id": "ami-069c0ca6cc091e8fa", "ami-launch-index": "0", "ami-manifest-path": "(unknown)", "block-device-mapping": { "ami": "/dev/sda1", "ebs12": "sdb", "root": "/dev/sda1" }, "hostname": "ip-172-18-1-211.ec2.internal", "instance-action": "none", "instance-id": "i-0cd57e9e471604c34", "instance-type": "m4.xlarge", "local-hostname": "ip-172-18-1-211.ec2.internal", "local-ipv4": "172.18.1.211", "mac": "0e:25:a6:6d:04:5a", "metrics": { "vhostmd": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" }, "network": { "interfaces": { "macs": { "0e:25:a6:6d:04:5a": { "device-number": "0", "interface-id": "eni-04b95497687bf9813", "ipv4-associations": { "54.152.227.161": "172.18.1.211" }, "local-hostname": "ip-172-18-1-211.ec2.internal", "local-ipv4s": "172.18.1.211", "mac": "0e:25:a6:6d:04:5a", "owner-id": "531415883065", "public-hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public-ipv4s": "54.152.227.161", "security-group-ids": "sg-7e73221a", "security-groups": "default", "subnet-id": "subnet-cf57c596", "subnet-ipv4-cidr-block": "172.18.0.0/20", "vpc-id": "vpc-69705d0c", "vpc-ipv4-cidr-block": "172.18.0.0/16", "vpc-ipv4-cidr-blocks": "172.18.0.0/16" } } } }, "placement": { "availability-zone": "us-east-1d" }, "profile": "default-hvm", "public-hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public-ipv4": "54.152.227.161", "public-keys/": "0=libra", "reservation-id": "r-0f638cc4ebb0e4aae", "security-groups": "default", "services": { "domain": "amazonaws.com", "partition": "aws" } }, "name": "aws", "network": { "hostname": "ip-172-18-1-211.ec2.internal", "interfaces": [ { "ips": [ "172.18.1.211" ], "network_id": "subnet-cf57c596", "network_type": "vpc", "public_ips": [ "54.152.227.161" ] } ], "ip": "172.18.1.211", "ipv6_enabled": false, "public_hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public_ip": "54.152.227.161" }, "zone": "us-east-1d" } } }, "changed": false, "generated_timestamp": "2018-04-06 21:38:34.568792", "item": { "local_facts": { "sdn_mtu": "" }, "role": "node" } } TASK [openshift_docker_facts : set_fact] *************************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_docker_facts/tasks/main.yml:22 ok: [localhost] => { "ansible_facts": { "docker_push_dockerhub": false }, "changed": false, "generated_timestamp": "2018-04-06 21:38:34.609825" } TASK [openshift_docker_facts : set_fact] *************************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_docker_facts/tasks/main.yml:32 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:34.636751", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_docker_facts : set_fact] *************************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_docker_facts/tasks/main.yml:37 ok: [localhost] => { "ansible_facts": { "docker_options": "--log-driver=journald" }, "changed": false, "generated_timestamp": "2018-04-06 21:38:34.669258" } TASK [docker : Getting current systemd-udevd exec command] ********************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/udev_workaround.yml:3 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:34.697157", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Assure systemd-udevd.service.d directory exists] **************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/udev_workaround.yml:8 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:34.725406", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Create systemd-udevd override file] ***************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/udev_workaround.yml:13 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:34.753075", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : set_fact] ******************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/main.yml:8 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:34.780188", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Add enterprise registry, if necessary] ************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/main.yml:13 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:34.806283", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Get current installed Docker version] *************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:2 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:34.833892", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Error out if Docker pre-installed but too old] ****************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:10 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:34.861431", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Error out if requested Docker is too old] *********************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:15 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:34.889397", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Fail if Docker version requested but downgrade is required] ***** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:22 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:34.918235", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Error out if attempting to upgrade Docker across the 1.10 boundary] *** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:29 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:34.946717", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Install Docker] ************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:36 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:34.975376", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Ensure docker.service.d directory exists] *********************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:44 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:35.002659", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Configure Docker service unit file] ***************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:49 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:35.030257", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : stat] *********************************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:57 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:35.058880", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Set registry params] ******************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:60 skipping: [localhost] => (item={u'reg_conf_var': u'ADD_REGISTRY', u'reg_flag': u'--add-registry', u'reg_fact_val': []}) => { "changed": false, "generated_timestamp": "2018-04-06 21:38:35.111903", "item": { "reg_conf_var": "ADD_REGISTRY", "reg_fact_val": [], "reg_flag": "--add-registry" }, "skip_reason": "Conditional result was False", "skipped": true } skipping: [localhost] => (item={u'reg_conf_var': u'BLOCK_REGISTRY', u'reg_flag': u'--block-registry', u'reg_fact_val': []}) => { "changed": false, "generated_timestamp": "2018-04-06 21:38:35.120868", "item": { "reg_conf_var": "BLOCK_REGISTRY", "reg_fact_val": [], "reg_flag": "--block-registry" }, "skip_reason": "Conditional result was False", "skipped": true } skipping: [localhost] => (item={u'reg_conf_var': u'INSECURE_REGISTRY', u'reg_flag': u'--insecure-registry', u'reg_fact_val': []}) => { "changed": false, "generated_timestamp": "2018-04-06 21:38:35.129971", "item": { "reg_conf_var": "INSECURE_REGISTRY", "reg_fact_val": [], "reg_flag": "--insecure-registry" }, "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Place additional/blocked/insecure registries in /etc/containers/registries.conf] *** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:82 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:35.158201", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Set Proxy Settings] ********************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:90 skipping: [localhost] => (item={u'reg_conf_var': u'HTTP_PROXY', u'reg_fact_val': u''}) => { "changed": false, "generated_timestamp": "2018-04-06 21:38:35.193599", "item": { "reg_conf_var": "HTTP_PROXY", "reg_fact_val": "" }, "skip_reason": "Conditional result was False", "skipped": true } skipping: [localhost] => (item={u'reg_conf_var': u'HTTPS_PROXY', u'reg_fact_val': u''}) => { "changed": false, "generated_timestamp": "2018-04-06 21:38:35.202553", "item": { "reg_conf_var": "HTTPS_PROXY", "reg_fact_val": "" }, "skip_reason": "Conditional result was False", "skipped": true } skipping: [localhost] => (item={u'reg_conf_var': u'NO_PROXY', u'reg_fact_val': u''}) => { "changed": false, "generated_timestamp": "2018-04-06 21:38:35.211546", "item": { "reg_conf_var": "NO_PROXY", "reg_fact_val": "" }, "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Set various Docker options] ************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:108 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:35.240966", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : stat] *********************************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:123 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:35.269285", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Configure Docker Network OPTIONS] ******************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:126 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:35.296227", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Detect if docker is already started] **************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:140 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:35.323679", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Start the Docker service] *************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:145 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:35.351481", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : set_fact] ******************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:156 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:35.379240", "skip_reason": "Conditional result was False", "skipped": true } META: ran handlers TASK [docker : Check for credentials file for registry auth] ******************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/registry_auth.yml:2 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:35.408593", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Create credentials for docker cli registry auth] **************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/registry_auth.yml:8 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:35.437717", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Create credentials for docker cli registry auth (alternative)] *** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/registry_auth.yml:23 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:35.466433", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Ensure /var/lib/containers exists] ****************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/main.yml:28 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:35.495096", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Fix SELinux Permissions on /var/lib/containers] ***************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/main.yml:33 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:35.523917", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : set_fact] ******************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:3 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:35.553107", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : set_fact] ******************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:7 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:35.581208", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : set_fact] ******************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:13 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:35.609930", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Fail quickly if openshift_docker_options are set] *************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:21 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:35.639558", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Ensure container-selinux is installed] ************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:32 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:35.666805", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Ensure atomic is installed] ************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:41 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:35.695672", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Ensure runc is installed] *************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:51 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:35.724352", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Install Docker so we can use the client] ************************ task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:60 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:35.752365", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Disable Docker] ************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:67 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:35.780268", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_atomic : Add http_proxy to /etc/atomic.conf] ******************* task path: /usr/share/ansible/openshift-ansible/roles/openshift_atomic/tasks/proxy.yml:7 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:35.810092", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_atomic : Add https_proxy to /etc/atomic.conf] ****************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_atomic/tasks/proxy.yml:16 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:35.839856", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_atomic : Add no_proxy to /etc/atomic.conf] ********************* task path: /usr/share/ansible/openshift-ansible/roles/openshift_atomic/tasks/proxy.yml:25 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:35.868835", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Set to default prepend] ***************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:87 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:35.896374", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Set container engine image tag] ********************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:92 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:35.924917", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Use Red Hat Registry for image when distribution is Red Hat] **** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:98 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:35.951839", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Use Fedora Registry for image when distribution is Fedora] ****** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:103 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:35.979529", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Set the full image name] **************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:108 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:36.008214", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Use a specific image if requested] ****************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:113 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:36.036255", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : debug] ********************************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:121 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:36.063945", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Check for credentials file for registry auth] ******************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/registry_auth.yml:2 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:36.093288", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Create credentials for docker cli registry auth] **************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/registry_auth.yml:8 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:36.122825", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Create credentials for docker cli registry auth (alternative)] *** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/registry_auth.yml:23 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:36.151573", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Pre-pull Container Engine System Container image] *************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:131 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:36.180237", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Ensure container-engine.service.d directory exists] ************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:138 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:36.209837", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Ensure /etc/docker directory exists] **************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:143 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:36.236804", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Install Container Engine System Container] ********************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:148 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:36.264824", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Configure Container Engine Service File] ************************ task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:154 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:36.292717", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : set_fact] ******************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:161 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:36.320813", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Configure Container Engine] ************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:171 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:36.348702", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Start the Container Engine service] ***************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:177 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:36.376598", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : set_fact] ******************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:188 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:36.404282", "skip_reason": "Conditional result was False", "skipped": true } META: ran handlers TASK [docker : set_fact] ******************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:4 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:36.431987", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : set_fact] ******************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:7 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:36.459704", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : set_fact] ******************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:10 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:36.488561", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : set_fact] ******************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:13 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:36.516929", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : set_fact] ******************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:17 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:36.545008", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : set_fact] ******************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:21 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:36.572950", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : set_fact] ******************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:27 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:36.601555", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Ensure container-selinux is installed] ************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:33 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:36.631855", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Check we are not using node as a Docker container with CRI-O] *** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:41 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:36.661128", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Ensure atomic is installed] ************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:48 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:36.690400", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Ensure runc is installed] *************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:58 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:36.718396", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Check that overlay is in the kernel] **************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:67 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:36.746354", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Add overlay to modprobe.d] ************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:76 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:36.775726", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Manually modprobe overlay into the kernel] ********************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:82 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:36.803146", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Enable and start systemd-modules-load] ************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:85 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:36.832425", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_atomic : Add http_proxy to /etc/atomic.conf] ******************* task path: /usr/share/ansible/openshift-ansible/roles/openshift_atomic/tasks/proxy.yml:7 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:36.862759", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_atomic : Add https_proxy to /etc/atomic.conf] ****************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_atomic/tasks/proxy.yml:16 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:36.890750", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_atomic : Add no_proxy to /etc/atomic.conf] ********************* task path: /usr/share/ansible/openshift-ansible/roles/openshift_atomic/tasks/proxy.yml:25 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:36.919599", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Set CRI-O image defaults] *************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:99 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:36.947417", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Use Centos based image when distribution is CentOS] ************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:105 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:36.975395", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Set CRI-O image tag] ******************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:110 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:37.002715", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Use RHEL based image when distribution is Red Hat] ************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:116 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:37.030609", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Set the full image name] **************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:122 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:37.059216", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Use a specific image if requested] ****************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:127 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:37.086271", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : debug] ********************************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:135 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:37.114123", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Pre-pull CRI-O System Container image] ************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:139 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:37.143007", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Install CRI-O System Container] ********************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:146 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:37.171219", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Remove CRI-O default configuration files] *********************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:152 skipping: [localhost] => (item=/etc/cni/net.d/200-loopback.conf) => { "changed": false, "generated_timestamp": "2018-04-06 21:38:37.202283", "item": "/etc/cni/net.d/200-loopback.conf", "skip_reason": "Conditional result was False", "skipped": true } skipping: [localhost] => (item=/etc/cni/net.d/100-crio-bridge.conf) => { "changed": false, "generated_timestamp": "2018-04-06 21:38:37.210520", "item": "/etc/cni/net.d/100-crio-bridge.conf", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Create the CRI-O configuration] ********************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:160 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:37.238826", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Ensure CNI configuration directory exists] ********************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:166 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:37.266815", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Add iptables allow rules] *************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/crio_firewall.yml:4 skipping: [localhost] => (item={u'port': u'10010/tcp', u'service': u'crio'}) => { "changed": false, "generated_timestamp": "2018-04-06 21:38:37.305316", "item": { "port": "10010/tcp", "service": "crio" }, "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Remove iptables rules] ****************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/crio_firewall.yml:13 TASK [docker : Add firewalld allow rules] ************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/crio_firewall.yml:24 skipping: [localhost] => (item={u'port': u'10010/tcp', u'service': u'crio'}) => { "changed": false, "generated_timestamp": "2018-04-06 21:38:37.369067", "item": { "port": "10010/tcp", "service": "crio" }, "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Remove firewalld allow rules] *********************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/crio_firewall.yml:33 TASK [docker : Configure the CNI network] ************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:175 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:37.422258", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Start the CRI-O service] **************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:180 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:37.451274", "skip_reason": "Conditional result was False", "skipped": true } META: ran handlers TASK [docker : Check for credentials file for registry auth] ******************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/registry_auth.yml:2 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:37.480543", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Create credentials for docker cli registry auth] **************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/registry_auth.yml:8 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:37.509245", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Create credentials for docker cli registry auth (alternative)] *** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/registry_auth.yml:23 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:37.539082", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : stat the docker data dir] *************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/main.yml:49 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:37.565863", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : stop the current running docker] ******************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/main.yml:58 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:37.593052", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Ensure /var/lib/containers/docker exists] *********************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/main.yml:63 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:37.622377", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Set the selinux context on /var/lib/containers/docker] ********** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/main.yml:68 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:37.652322", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : restorecon the /var/lib/containers/docker] ********************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/main.yml:75 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:37.681821", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Remove the old docker location] ********************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/main.yml:78 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:37.710215", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Setup the link] ************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/main.yml:83 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:37.739541", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : start docker] *************************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/main.yml:89 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:37.768923", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_version : set_fact] ******************************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_version/tasks/main.yml:4 ok: [localhost] => { "ansible_facts": { "is_atomic": false, "is_containerized": false }, "changed": false, "generated_timestamp": "2018-04-06 21:38:37.805578" } TASK [openshift_version : Abort when we cannot safely guess what Origin image version the user wanted] *** task path: /usr/share/ansible/openshift-ansible/roles/openshift_version/tasks/main.yml:11 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:37.830248", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_version : set_fact] ******************************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_version/tasks/main.yml:25 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:37.858691", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_version : set_fact] ******************************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_version/tasks/main.yml:31 ok: [localhost] => { "ansible_facts": { "openshift_release": "3.7" }, "changed": false, "generated_timestamp": "2018-04-06 21:38:37.890874" } TASK [openshift_version : (Origin) Verify openshift_image_tag is valid] ******** task path: /usr/share/ansible/openshift-ansible/roles/openshift_version/tasks/main.yml:48 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:37.916212", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_version : (Enterprise) Verify openshift_image_tag is valid] **** task path: /usr/share/ansible/openshift-ansible/roles/openshift_version/tasks/main.yml:65 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:37.941763", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_version : set_fact] ******************************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_version/tasks/main.yml:76 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:37.966955", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_version : Use openshift.common.version fact as version to configure if already installed] *** task path: /usr/share/ansible/openshift-ansible/roles/openshift_version/tasks/main.yml:82 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:37.993058", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_version : Set rpm version to configure if openshift_pkg_version specified] *** task path: /usr/share/ansible/openshift-ansible/roles/openshift_version/tasks/set_version_rpm.yml:2 ok: [localhost] => { "ansible_facts": { "openshift_version": "3.7.2" }, "changed": false, "generated_timestamp": "2018-04-06 21:38:38.042490" } TASK [openshift_version : Get available origin version] ************************ task path: /usr/share/ansible/openshift-ansible/roles/openshift_version/tasks/set_version_rpm.yml:11 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:38.079395", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_version : fail] ************************************************ task path: /usr/share/ansible/openshift-ansible/roles/openshift_version/tasks/set_version_rpm.yml:17 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:38.112187", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_version : set_fact] ******************************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_version/tasks/set_version_rpm.yml:21 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:38.145906", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_version : set_fact] ******************************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_version/tasks/set_version_containerized.yml:2 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:38.176949", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_version : Set containerized version to configure if openshift_image_tag specified] *** task path: /usr/share/ansible/openshift-ansible/roles/openshift_version/tasks/set_version_containerized.yml:5 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:38.207074", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_version : Set containerized version to configure if openshift_release specified] *** task path: /usr/share/ansible/openshift-ansible/roles/openshift_version/tasks/set_version_containerized.yml:14 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:38.236938", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_version : Lookup latest containerized version if no version specified] *** task path: /usr/share/ansible/openshift-ansible/roles/openshift_version/tasks/set_version_containerized.yml:21 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:38.268082", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_version : set_fact] ******************************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_version/tasks/set_version_containerized.yml:30 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:38.298067", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_version : set_fact] ******************************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_version/tasks/set_version_containerized.yml:38 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:38.327961", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_version : Set precise containerized version to configure if openshift_release specified] *** task path: /usr/share/ansible/openshift-ansible/roles/openshift_version/tasks/set_version_containerized.yml:44 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:38.359686", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_version : set_fact] ******************************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_version/tasks/set_version_containerized.yml:53 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:38.390715", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_version : set_fact] ******************************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_version/tasks/set_version_containerized.yml:67 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:38.420834", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_version : Get available origin version] ************************ task path: /usr/share/ansible/openshift-ansible/roles/openshift_version/tasks/main.yml:104 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:38.453416", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_version : fail] ************************************************ task path: /usr/share/ansible/openshift-ansible/roles/openshift_version/tasks/main.yml:109 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:38.482933", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_version : set_fact] ******************************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_version/tasks/main.yml:112 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:38.512789", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_version : Fail if rpm version and docker image version are different] *** task path: /usr/share/ansible/openshift-ansible/roles/openshift_version/tasks/main.yml:114 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:38.542703", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_version : Warn if openshift_image_tag is defined when not doing a containerized install] *** task path: /usr/share/ansible/openshift-ansible/roles/openshift_version/tasks/main.yml:129 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:38.575040", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_version : debug] *********************************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_version/tasks/main.yml:144 ok: [localhost] => { "generated_timestamp": "2018-04-06 21:38:38.616940", "msg": "openshift_image_tag was not defined. Falling back to v3.7.2" } TASK [openshift_version : set_fact] ******************************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_version/tasks/main.yml:147 ok: [localhost] => { "ansible_facts": { "openshift_image_tag": "v3.7.2" }, "changed": false, "generated_timestamp": "2018-04-06 21:38:38.659303" } TASK [openshift_version : debug] *********************************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_version/tasks/main.yml:153 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:38.689538", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_version : set_fact] ******************************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_version/tasks/main.yml:156 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:38.719505", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_version : Abort if openshift_version was not set] ************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_version/tasks/main.yml:163 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:38.749230", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_version : Abort if openshift_image_tag was not set] ************ task path: /usr/share/ansible/openshift-ansible/roles/openshift_version/tasks/main.yml:168 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:38.779015", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_version : Abort if openshift_pkg_version was not set] ********** task path: /usr/share/ansible/openshift-ansible/roles/openshift_version/tasks/main.yml:173 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:38.808237", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_version : Abort if openshift_pkg_version was not set] ********** task path: /usr/share/ansible/openshift-ansible/roles/openshift_version/tasks/main.yml:181 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:38.839977", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_version : For an RPM install, abort when the release requested does not match the available version.] *** task path: /usr/share/ansible/openshift-ansible/roles/openshift_version/tasks/main.yml:190 ok: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:38.885564", "msg": "All assertions passed" } TASK [openshift_version : debug] *********************************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_version/tasks/main.yml:206 ok: [localhost] => { "generated_timestamp": "2018-04-06 21:38:38.919386", "openshift_release": "3.7" } TASK [openshift_version : debug] *********************************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_version/tasks/main.yml:208 ok: [localhost] => { "generated_timestamp": "2018-04-06 21:38:38.953372", "openshift_image_tag": "v3.7.2" } TASK [openshift_version : debug] *********************************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_version/tasks/main.yml:210 ok: [localhost] => { "generated_timestamp": "2018-04-06 21:38:38.988227", "openshift_pkg_version": "-3.7.2-1.5.5eda3fa" } META: ran handlers META: ran handlers PLAY [Set openshift_version for etcd, node, and master hosts] ****************** skipping: no hosts matched PLAY [Initialization Checkpoint End] ******************************************* META: ran handlers TASK [Set install initialization 'Complete'] *********************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-cluster/std_include.yml:42 ok: [localhost] => { "ansible_stats": { "aggregate": false, "data": { "installer_phase_initialize": "Complete" }, "per_host": false }, "changed": false, "generated_timestamp": "2018-04-06 21:38:39.023763" } META: ran handlers META: ran handlers PLAY [Health Check Checkpoint Start] ******************************************* META: ran handlers TASK [Set Health Check 'In Progress'] ****************************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-checks/install.yml:6 ok: [localhost] => { "ansible_stats": { "aggregate": false, "data": { "installer_phase_health": "In Progress" }, "per_host": false }, "changed": false, "generated_timestamp": "2018-04-06 21:38:39.056624" } META: ran handlers META: ran handlers PLAY [OpenShift Health Checks] ************************************************* TASK [Gathering Facts] ********************************************************* ok: [localhost] META: ran handlers META: ran handlers TASK [Run health checks (install) - EL] **************************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-checks/install.yml:20 CHECK [docker_storage : localhost] ********************************************* CHECK [disk_availability : localhost] ****************************************** CHECK [package_availability : localhost] *************************************** CHECK [package_version : localhost] ******************************************** CHECK [docker_image_availability : localhost] ********************************** CHECK [memory_availability : localhost] **************************************** ok: [localhost] => { "changed": false, "checks": { "disk_availability": { "skipped": true, "skipped_reason": "Disabled by user request" }, "docker_image_availability": { "skipped": true, "skipped_reason": "Disabled by user request" }, "docker_storage": { "skipped": true, "skipped_reason": "Disabled by user request" }, "memory_availability": { "skipped": true, "skipped_reason": "Disabled by user request" }, "package_availability": { "skipped": true, "skipped_reason": "Disabled by user request" }, "package_version": { "skipped": true, "skipped_reason": "Disabled by user request" } }, "generated_timestamp": "2018-04-06 21:38:39.812098", "playbook_context": "install" } TASK [Run health checks (install) - Fedora] ************************************ task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-checks/install.yml:32 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:39.837818", "skip_reason": "Conditional result was False", "skipped": true } META: ran handlers PLAY [Health Check Checkpoint End] ********************************************* META: ran handlers TASK [Set Health Check 'Complete'] ********************************************* task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-checks/install.yml:43 ok: [localhost] => { "ansible_stats": { "aggregate": false, "data": { "installer_phase_health": "Complete" }, "per_host": false }, "changed": false, "generated_timestamp": "2018-04-06 21:38:39.871184" } META: ran handlers META: ran handlers PLAY [etcd Install Checkpoint Start] ******************************************* META: ran handlers TASK [Set etcd install 'In Progress'] ****************************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-etcd/config.yml:6 ok: [localhost] => { "ansible_stats": { "aggregate": false, "data": { "installer_phase_etcd": "In Progress" }, "per_host": false }, "changed": false, "generated_timestamp": "2018-04-06 21:38:39.905007" } META: ran handlers META: ran handlers PLAY [Generate new etcd CA] **************************************************** TASK [Gathering Facts] ********************************************************* ok: [localhost] META: ran handlers TASK [openshift_etcd_facts : openshift_facts] ********************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_etcd_facts/tasks/main.yml:2 changed: [localhost] => { "ansible_facts": { "openshift": { "common": { "admin_binary": "oadm", "all_hostnames": [ "54.152.227.161", "ec2-54-152-227-161.compute-1.amazonaws.com", "172.18.1.211", "ip-172-18-1-211.ec2.internal" ], "cli_image": "openshift/origin", "client_binary": "oc", "config_base": "/etc/origin", "deployer_image": "openshift/origin-deployer", "deployment_subtype": "basic", "deployment_type": "origin", "dns_domain": "cluster.local", "etcd_runtime": "host", "examples_content_version": "v3.6", "generate_no_proxy_hosts": true, "hostname": "ip-172-18-1-211.ec2.internal", "installed_variant_rpms": [], "internal_hostnames": [ "172.18.1.211", "ip-172-18-1-211.ec2.internal" ], "ip": "172.18.1.211", "is_atomic": false, "is_containerized": false, "is_etcd_system_container": false, "is_master_system_container": false, "is_node_system_container": false, "is_openvswitch_system_container": false, "kube_svc_ip": "172.30.0.1", "pod_image": "openshift/origin-pod", "portal_net": "172.30.0.0/16", "public_hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public_ip": "54.152.227.161", "registry_image": "openshift/origin-docker-registry", "router_image": "openshift/origin-haproxy-router", "service_type": "origin", "system_images_registry": "docker.io", "version_gte_3_1_1_or_1_1_1": true, "version_gte_3_1_or_1_1": true, "version_gte_3_2_or_1_2": true, "version_gte_3_3_or_1_3": true, "version_gte_3_4_or_1_4": true, "version_gte_3_5_or_1_5": true, "version_gte_3_6": true, "version_gte_3_7": false }, "current_config": { "roles": [ "node", "docker", "etcd" ] }, "docker": { "api_version": "1.26 (minimum version 1.12)", "disable_push_dockerhub": false, "gte_1_10": true, "hosted_registry_insecure": false, "hosted_registry_network": "172.30.0.0/16", "options": "--log-driver=journald", "service_name": "docker", "use_crio": false, "use_system_container": false, "version": "1.13.1" }, "etcd": {}, "hosted": { "etcd": { "selector": null }, "metrics": { "selector": null }, "registry": { "selector": "region=infra" }, "router": { "selector": "region=infra" } }, "logging": { "selector": null }, "node": { "annotations": {}, "iptables_sync_period": "30s", "kubelet_args": { "node-labels": [] }, "labels": {}, "local_quota_per_fsgroup": "", "node_image": "openshift/node", "node_system_image": "openshift/node", "nodename": "ip-172-18-1-211.ec2.internal", "ovs_image": "openshift/openvswitch", "ovs_system_image": "openshift/openvswitch", "registry_url": "openshift/origin-${component}:${version}", "schedulable": true, "sdn_mtu": "8951", "set_node_ip": false, "storage_plugin_deps": [ "ceph", "glusterfs", "iscsi" ] }, "prometheus": { "alertbuffer": { "selector": null }, "alertmanager": { "selector": null }, "selector": null }, "provider": { "metadata": { "ami-id": "ami-069c0ca6cc091e8fa", "ami-launch-index": "0", "ami-manifest-path": "(unknown)", "block-device-mapping": { "ami": "/dev/sda1", "ebs12": "sdb", "root": "/dev/sda1" }, "hostname": "ip-172-18-1-211.ec2.internal", "instance-action": "none", "instance-id": "i-0cd57e9e471604c34", "instance-type": "m4.xlarge", "local-hostname": "ip-172-18-1-211.ec2.internal", "local-ipv4": "172.18.1.211", "mac": "0e:25:a6:6d:04:5a", "metrics": { "vhostmd": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" }, "network": { "interfaces": { "macs": { "0e:25:a6:6d:04:5a": { "device-number": "0", "interface-id": "eni-04b95497687bf9813", "ipv4-associations": { "54.152.227.161": "172.18.1.211" }, "local-hostname": "ip-172-18-1-211.ec2.internal", "local-ipv4s": "172.18.1.211", "mac": "0e:25:a6:6d:04:5a", "owner-id": "531415883065", "public-hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public-ipv4s": "54.152.227.161", "security-group-ids": "sg-7e73221a", "security-groups": "default", "subnet-id": "subnet-cf57c596", "subnet-ipv4-cidr-block": "172.18.0.0/20", "vpc-id": "vpc-69705d0c", "vpc-ipv4-cidr-block": "172.18.0.0/16", "vpc-ipv4-cidr-blocks": "172.18.0.0/16" } } } }, "placement": { "availability-zone": "us-east-1d" }, "profile": "default-hvm", "public-hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public-ipv4": "54.152.227.161", "public-keys/": "0=libra", "reservation-id": "r-0f638cc4ebb0e4aae", "security-groups": "default", "services": { "domain": "amazonaws.com", "partition": "aws" } }, "name": "aws", "network": { "hostname": "ip-172-18-1-211.ec2.internal", "interfaces": [ { "ips": [ "172.18.1.211" ], "network_id": "subnet-cf57c596", "network_type": "vpc", "public_ips": [ "54.152.227.161" ] } ], "ip": "172.18.1.211", "ipv6_enabled": false, "public_hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public_ip": "54.152.227.161" }, "zone": "us-east-1d" } } }, "changed": true, "generated_timestamp": "2018-04-06 21:38:42.235245" } TASK [etcd : Install openssl] ************************************************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/deploy_ca.yml:2 ok: [localhost -> localhost] => { "attempts": 1, "changed": false, "generated_timestamp": "2018-04-06 21:38:43.042573", "msg": "", "rc": 0, "results": [ "openssl-1:1.0.2k-8.el7.x86_64 providing openssl is already installed" ] } TASK [etcd : file] ************************************************************* task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/deploy_ca.yml:12 changed: [localhost -> localhost] => (item=/etc/etcd/ca/certs) => { "changed": true, "generated_timestamp": "2018-04-06 21:38:43.254567", "gid": 0, "group": "root", "item": "/etc/etcd/ca/certs", "mode": "0700", "owner": "root", "path": "/etc/etcd/ca/certs", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } changed: [localhost -> localhost] => (item=/etc/etcd/ca/crl) => { "changed": true, "generated_timestamp": "2018-04-06 21:38:43.418287", "gid": 0, "group": "root", "item": "/etc/etcd/ca/crl", "mode": "0700", "owner": "root", "path": "/etc/etcd/ca/crl", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } changed: [localhost -> localhost] => (item=/etc/etcd/ca/fragments) => { "changed": true, "generated_timestamp": "2018-04-06 21:38:43.582735", "gid": 0, "group": "root", "item": "/etc/etcd/ca/fragments", "mode": "0700", "owner": "root", "path": "/etc/etcd/ca/fragments", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [etcd : command] ********************************************************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/deploy_ca.yml:25 changed: [localhost -> localhost] => { "changed": true, "cmd": [ "cp", "/etc/pki/tls/openssl.cnf", "./" ], "delta": "0:00:00.003086", "end": "2018-04-06 21:38:43.765984", "generated_timestamp": "2018-04-06 21:38:43.783116", "rc": 0, "start": "2018-04-06 21:38:43.762898", "stderr": [], "stdout": [] } TASK [etcd : template] ********************************************************* task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/deploy_ca.yml:32 changed: [localhost -> localhost] => { "changed": true, "checksum": "89d806189e2eeb08170b2b3ad59048fba712608d", "dest": "/etc/etcd/ca/fragments/openssl_append.cnf", "generated_timestamp": "2018-04-06 21:38:44.275818", "gid": 0, "group": "root", "md5sum": "987645e41bb6e5b5236b6595d1704ece", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 1624, "src": "/home/origin/.ansible/tmp/ansible-tmp-1523050723.98-239003081303517/source", "state": "file", "uid": 0 } TASK [etcd : assemble] ********************************************************* task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/deploy_ca.yml:39 changed: [localhost -> localhost] => { "changed": true, "checksum": "0cfa42aed961a813b9d5a7bd78d2b3a030c2a3b7", "dest": "/etc/etcd/ca/openssl.cnf", "generated_timestamp": "2018-04-06 21:38:44.759056", "gid": 0, "group": "root", "md5sum": "2ff7c09c59d7481b970ad5010da1143f", "mode": "0644", "msg": "OK", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 12547, "src": "/etc/etcd/ca/fragments", "state": "file", "uid": 0 } TASK [etcd : Check etcd_ca_db exist] ******************************************* task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/deploy_ca.yml:45 ok: [localhost -> localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:44.954232", "stat": { "exists": false } } TASK [etcd : Touch etcd_ca_db file] ******************************************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/deploy_ca.yml:52 changed: [localhost -> localhost] => { "changed": true, "dest": "/etc/etcd/ca/index.txt", "generated_timestamp": "2018-04-06 21:38:45.156200", "gid": 0, "group": "root", "mode": "0644", "owner": "root", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 0, "state": "file", "uid": 0 } TASK [etcd : copy] ************************************************************* task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/deploy_ca.yml:60 changed: [localhost -> localhost] => { "changed": true, "checksum": "ddfe163345d338193ac2bdc183f8e9dcff904b43", "dest": "/etc/etcd/ca/serial", "generated_timestamp": "2018-04-06 21:38:45.505772", "gid": 0, "group": "root", "md5sum": "96a3be3cf272e017046d1b2674a52bd3", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 2, "src": "/home/origin/.ansible/tmp/ansible-tmp-1523050725.35-189214426388541/source", "state": "file", "uid": 0 } TASK [etcd : Create etcd CA certificate] *************************************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/deploy_ca.yml:67 changed: [localhost -> localhost] => { "changed": true, "cmd": [ "openssl", "req", "-config", "/etc/etcd/ca/openssl.cnf", "-newkey", "rsa:4096", "-keyout", "/etc/etcd/ca/ca.key", "-new", "-out", "/etc/etcd/ca/ca.crt", "-x509", "-extensions", "etcd_v3_ca_self", "-batch", "-nodes", "-days", "1825", "-subj", "/CN=etcd-signer@1523050720" ], "delta": "0:00:00.957844", "end": "2018-04-06 21:38:46.648099", "generated_timestamp": "2018-04-06 21:38:46.666669", "rc": 0, "start": "2018-04-06 21:38:45.690255", "stderr": [ "Generating a 4096 bit RSA private key", "...................................................................................................................++", "............................................++", "writing new private key to '/etc/etcd/ca/ca.key'", "-----" ], "stdout": [] } META: ran handlers META: ran handlers PLAY [Create etcd server certificates for etcd hosts] ************************** TASK [Gathering Facts] ********************************************************* ok: [localhost] META: ran handlers TASK [openshift_etcd_facts : openshift_facts] ********************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_etcd_facts/tasks/main.yml:2 ok: [localhost] => { "ansible_facts": { "openshift": { "common": { "admin_binary": "oadm", "all_hostnames": [ "54.152.227.161", "ec2-54-152-227-161.compute-1.amazonaws.com", "172.18.1.211", "ip-172-18-1-211.ec2.internal" ], "cli_image": "openshift/origin", "client_binary": "oc", "config_base": "/etc/origin", "deployer_image": "openshift/origin-deployer", "deployment_subtype": "basic", "deployment_type": "origin", "dns_domain": "cluster.local", "etcd_runtime": "host", "examples_content_version": "v3.6", "generate_no_proxy_hosts": true, "hostname": "ip-172-18-1-211.ec2.internal", "installed_variant_rpms": [], "internal_hostnames": [ "172.18.1.211", "ip-172-18-1-211.ec2.internal" ], "ip": "172.18.1.211", "is_atomic": false, "is_containerized": false, "is_etcd_system_container": false, "is_master_system_container": false, "is_node_system_container": false, "is_openvswitch_system_container": false, "kube_svc_ip": "172.30.0.1", "pod_image": "openshift/origin-pod", "portal_net": "172.30.0.0/16", "public_hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public_ip": "54.152.227.161", "registry_image": "openshift/origin-docker-registry", "router_image": "openshift/origin-haproxy-router", "service_type": "origin", "system_images_registry": "docker.io", "version_gte_3_1_1_or_1_1_1": true, "version_gte_3_1_or_1_1": true, "version_gte_3_2_or_1_2": true, "version_gte_3_3_or_1_3": true, "version_gte_3_4_or_1_4": true, "version_gte_3_5_or_1_5": true, "version_gte_3_6": true, "version_gte_3_7": false }, "current_config": { "roles": [ "node", "docker", "etcd" ] }, "docker": { "api_version": "1.26 (minimum version 1.12)", "disable_push_dockerhub": false, "gte_1_10": true, "hosted_registry_insecure": false, "hosted_registry_network": "172.30.0.0/16", "options": "--log-driver=journald", "service_name": "docker", "use_crio": false, "use_system_container": false, "version": "1.13.1" }, "etcd": {}, "hosted": { "etcd": { "selector": null }, "metrics": { "selector": null }, "registry": { "selector": "region=infra" }, "router": { "selector": "region=infra" } }, "logging": { "selector": null }, "node": { "annotations": {}, "iptables_sync_period": "30s", "kubelet_args": { "node-labels": [] }, "labels": {}, "local_quota_per_fsgroup": "", "node_image": "openshift/node", "node_system_image": "openshift/node", "nodename": "ip-172-18-1-211.ec2.internal", "ovs_image": "openshift/openvswitch", "ovs_system_image": "openshift/openvswitch", "registry_url": "openshift/origin-${component}:${version}", "schedulable": true, "sdn_mtu": "8951", "set_node_ip": false, "storage_plugin_deps": [ "ceph", "glusterfs", "iscsi" ] }, "prometheus": { "alertbuffer": { "selector": null }, "alertmanager": { "selector": null }, "selector": null }, "provider": { "metadata": { "ami-id": "ami-069c0ca6cc091e8fa", "ami-launch-index": "0", "ami-manifest-path": "(unknown)", "block-device-mapping": { "ami": "/dev/sda1", "ebs12": "sdb", "root": "/dev/sda1" }, "hostname": "ip-172-18-1-211.ec2.internal", "instance-action": "none", "instance-id": "i-0cd57e9e471604c34", "instance-type": "m4.xlarge", "local-hostname": "ip-172-18-1-211.ec2.internal", "local-ipv4": "172.18.1.211", "mac": "0e:25:a6:6d:04:5a", "metrics": { "vhostmd": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" }, "network": { "interfaces": { "macs": { "0e:25:a6:6d:04:5a": { "device-number": "0", "interface-id": "eni-04b95497687bf9813", "ipv4-associations": { "54.152.227.161": "172.18.1.211" }, "local-hostname": "ip-172-18-1-211.ec2.internal", "local-ipv4s": "172.18.1.211", "mac": "0e:25:a6:6d:04:5a", "owner-id": "531415883065", "public-hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public-ipv4s": "54.152.227.161", "security-group-ids": "sg-7e73221a", "security-groups": "default", "subnet-id": "subnet-cf57c596", "subnet-ipv4-cidr-block": "172.18.0.0/20", "vpc-id": "vpc-69705d0c", "vpc-ipv4-cidr-block": "172.18.0.0/16", "vpc-ipv4-cidr-blocks": "172.18.0.0/16" } } } }, "placement": { "availability-zone": "us-east-1d" }, "profile": "default-hvm", "public-hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public-ipv4": "54.152.227.161", "public-keys/": "0=libra", "reservation-id": "r-0f638cc4ebb0e4aae", "security-groups": "default", "services": { "domain": "amazonaws.com", "partition": "aws" } }, "name": "aws", "network": { "hostname": "ip-172-18-1-211.ec2.internal", "interfaces": [ { "ips": [ "172.18.1.211" ], "network_id": "subnet-cf57c596", "network_type": "vpc", "public_ips": [ "54.152.227.161" ] } ], "ip": "172.18.1.211", "ipv6_enabled": false, "public_hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public_ip": "54.152.227.161" }, "zone": "us-east-1d" } } }, "changed": false, "generated_timestamp": "2018-04-06 21:38:48.977227" } META: ran handlers TASK [etcd : Install openssl] ************************************************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/deploy_ca.yml:2 ok: [localhost -> localhost] => { "attempts": 1, "changed": false, "generated_timestamp": "2018-04-06 21:38:49.768996", "msg": "", "rc": 0, "results": [ "openssl-1:1.0.2k-8.el7.x86_64 providing openssl is already installed" ] } TASK [etcd : file] ************************************************************* task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/deploy_ca.yml:12 ok: [localhost -> localhost] => (item=/etc/etcd/ca/certs) => { "changed": false, "generated_timestamp": "2018-04-06 21:38:49.983559", "gid": 0, "group": "root", "item": "/etc/etcd/ca/certs", "mode": "0700", "owner": "root", "path": "/etc/etcd/ca/certs", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } ok: [localhost -> localhost] => (item=/etc/etcd/ca/crl) => { "changed": false, "generated_timestamp": "2018-04-06 21:38:50.143580", "gid": 0, "group": "root", "item": "/etc/etcd/ca/crl", "mode": "0700", "owner": "root", "path": "/etc/etcd/ca/crl", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } ok: [localhost -> localhost] => (item=/etc/etcd/ca/fragments) => { "changed": false, "generated_timestamp": "2018-04-06 21:38:50.307797", "gid": 0, "group": "root", "item": "/etc/etcd/ca/fragments", "mode": "0700", "owner": "root", "path": "/etc/etcd/ca/fragments", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 51, "state": "directory", "uid": 0 } TASK [etcd : command] ********************************************************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/deploy_ca.yml:25 ok: [localhost -> localhost] => { "changed": false, "cmd": "cp /etc/pki/tls/openssl.cnf ./", "generated_timestamp": "2018-04-06 21:38:50.498906", "rc": 0, "stdout": [ "skipped, since /etc/etcd/ca/fragments/openssl.cnf exists" ] } TASK [etcd : template] ********************************************************* task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/deploy_ca.yml:32 ok: [localhost -> localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:51.010987", "gid": 0, "group": "root", "mode": "0644", "owner": "root", "path": "/etc/etcd/ca/fragments/openssl_append.cnf", "secontext": "system_u:object_r:etc_t:s0", "size": 1624, "state": "file", "uid": 0 } TASK [etcd : assemble] ********************************************************* task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/deploy_ca.yml:39 ok: [localhost -> localhost] => { "changed": false, "checksum": "0cfa42aed961a813b9d5a7bd78d2b3a030c2a3b7", "dest": "/etc/etcd/ca/openssl.cnf", "generated_timestamp": "2018-04-06 21:38:51.207185", "gid": 0, "group": "root", "md5sum": "2ff7c09c59d7481b970ad5010da1143f", "mode": "0644", "msg": "OK", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 12547, "src": "/etc/etcd/ca/fragments", "state": "file", "uid": 0 } TASK [etcd : Check etcd_ca_db exist] ******************************************* task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/deploy_ca.yml:45 ok: [localhost -> localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:51.409633", "stat": { "atime": 1523050725.1375995, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1523050725.1375995, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 83886562, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "md5": "d41d8cd98f00b204e9800998ecf8427e", "mimetype": "inode/x-empty", "mode": "0644", "mtime": 1523050725.1375995, "nlink": 1, "path": "/etc/etcd/ca/index.txt", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 0, "uid": 0, "version": "335547684", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [etcd : Touch etcd_ca_db file] ******************************************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/deploy_ca.yml:52 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:38:51.451411", "skip_reason": "Conditional result was False", "skipped": true } TASK [etcd : copy] ************************************************************* task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/deploy_ca.yml:60 ok: [localhost -> localhost] => { "changed": false, "dest": "/etc/etcd/ca/serial", "generated_timestamp": "2018-04-06 21:38:51.656829", "src": "/tmp/tmps4IGPj" } TASK [etcd : Create etcd CA certificate] *************************************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/deploy_ca.yml:67 ok: [localhost -> localhost] => { "changed": false, "cmd": "openssl req -config /etc/etcd/ca/openssl.cnf -newkey rsa:4096 -keyout /etc/etcd/ca/ca.key -new -out /etc/etcd/ca/ca.crt -x509 -extensions etcd_v3_ca_self -batch -nodes -days 1825 -subj /CN=etcd-signer@1523050727", "generated_timestamp": "2018-04-06 21:38:51.855936", "rc": 0, "stdout": [ "skipped, since /etc/etcd/ca/ca.crt exists" ] } TASK [etcd : Install etcd] ***************************************************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/fetch_server_certificates_from_ca.yml:2 changed: [localhost] => { "attempts": 1, "changed": true, "generated_timestamp": "2018-04-06 21:39:24.103275", "msg": "", "rc": 0, "results": [ "Loaded plugins: amazon-id, rhui-lb, search-disabled-repos\nResolving Dependencies\n--> Running transaction check\n---> Package etcd.x86_64 0:3.2.15-1.el7 will be installed\n--> Finished Dependency Resolution\n\nDependencies Resolved\n\n================================================================================\n Package Arch Version Repository Size\n================================================================================\nInstalling:\n etcd x86_64 3.2.15-1.el7 oso-rhui-rhel-server-extras 9.2 M\n\nTransaction Summary\n================================================================================\nInstall 1 Package\n\nTotal download size: 9.2 M\nInstalled size: 41 M\nDownloading packages:\nRunning transaction check\nRunning transaction test\nTransaction test succeeded\nRunning transaction\n Installing : etcd-3.2.15-1.el7.x86_64 1/1 \n Verifying : etcd-3.2.15-1.el7.x86_64 1/1 \n\nInstalled:\n etcd.x86_64 0:3.2.15-1.el7 \n\nComplete!\n" ] } TASK [etcd : Check status of etcd certificates] ******************************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/fetch_server_certificates_from_ca.yml:10 ok: [localhost] => (item=/etc/etcd/server.crt) => { "changed": false, "generated_timestamp": "2018-04-06 21:39:24.293176", "item": "/etc/etcd/server.crt", "stat": { "exists": false } } ok: [localhost] => (item=/etc/etcd/peer.crt) => { "changed": false, "generated_timestamp": "2018-04-06 21:39:24.441877", "item": "/etc/etcd/peer.crt", "stat": { "exists": false } } ok: [localhost] => (item=/etc/etcd/ca.crt) => { "changed": false, "generated_timestamp": "2018-04-06 21:39:24.591743", "item": "/etc/etcd/ca.crt", "stat": { "exists": false } } TASK [etcd : set_fact] ********************************************************* task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/fetch_server_certificates_from_ca.yml:20 ok: [localhost] => { "ansible_facts": { "etcd_server_certs_missing": true }, "changed": false, "generated_timestamp": "2018-04-06 21:39:24.627813" } TASK [etcd : Ensure generated_certs directory present] ************************* task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/fetch_server_certificates_from_ca.yml:27 changed: [localhost -> localhost] => { "changed": true, "generated_timestamp": "2018-04-06 21:39:24.825386", "gid": 0, "group": "root", "mode": "0700", "owner": "root", "path": "/etc/etcd/generated_certs/etcd-ip-172-18-1-211.ec2.internal", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [etcd : Create the server csr] ******************************************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/fetch_server_certificates_from_ca.yml:35 changed: [localhost -> localhost] => { "changed": true, "cmd": [ "openssl", "req", "-new", "-keyout", "server.key", "-config", "/etc/etcd/ca/openssl.cnf", "-out", "server.csr", "-reqexts", "etcd_v3_req", "-batch", "-nodes", "-subj", "/CN=ip-172-18-1-211.ec2.internal" ], "delta": "0:00:00.072991", "end": "2018-04-06 21:39:25.081435", "generated_timestamp": "2018-04-06 21:39:25.099789", "rc": 0, "start": "2018-04-06 21:39:25.008444", "stderr": [ "Generating a 2048 bit RSA private key", "..................................+++", ".......+++", "writing new private key to 'server.key'", "-----" ], "stdout": [] } TASK [etcd : Sign and create the server crt] *********************************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/fetch_server_certificates_from_ca.yml:53 changed: [localhost -> localhost] => { "changed": true, "cmd": [ "openssl", "ca", "-name", "etcd_ca", "-config", "/etc/etcd/ca/openssl.cnf", "-out", "server.crt", "-in", "server.csr", "-extensions", "etcd_v3_ca_server", "-batch" ], "delta": "0:00:00.018814", "end": "2018-04-06 21:39:25.585764", "generated_timestamp": "2018-04-06 21:39:25.603381", "iterated": 0, "rc": 0, "start": "2018-04-06 21:39:25.566950", "stderr": [ "Using configuration from /etc/etcd/ca/openssl.cnf", "Check that the request matches the signature", "Signature ok", "Certificate Details:", " Serial Number: 1 (0x1)", " Validity", " Not Before: Apr 6 21:39:25 2018 GMT", " Not After : Apr 5 21:39:25 2023 GMT", " Subject:", " commonName = ip-172-18-1-211.ec2.internal", " X509v3 extensions:", " X509v3 Authority Key Identifier: ", " keyid:B8:6E:C6:AF:B3:40:1F:0C:E7:7F:38:36:A3:D1:82:72:9C:76:8F:A0", " DirName:/CN=etcd-signer@1523050720", " serial:CA:39:2A:4B:32:43:B2:90", "", " X509v3 Basic Constraints: critical", " CA:FALSE", " X509v3 Extended Key Usage: ", " TLS Web Server Authentication", " X509v3 Key Usage: ", " Digital Signature, Key Encipherment", " X509v3 Subject Key Identifier: ", " F1:B7:63:FF:21:B0:9B:0F:45:6E:41:61:A7:C1:F5:A3:17:A2:88:0A", " X509v3 Subject Alternative Name: ", " IP Address:172.18.1.211, DNS:ip-172-18-1-211.ec2.internal", "Certificate is to be certified until Apr 5 21:39:25 2023 GMT (1825 days)", "", "Write out database with 1 new entries", "Data Base Updated" ], "stdout": [] } TASK [etcd : Create the peer csr] ********************************************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/fetch_server_certificates_from_ca.yml:68 changed: [localhost -> localhost] => { "changed": true, "cmd": [ "openssl", "req", "-new", "-keyout", "peer.key", "-config", "/etc/etcd/ca/openssl.cnf", "-out", "peer.csr", "-reqexts", "etcd_v3_req", "-batch", "-nodes", "-subj", "/CN=ip-172-18-1-211.ec2.internal" ], "delta": "0:00:00.122282", "end": "2018-04-06 21:39:25.909767", "generated_timestamp": "2018-04-06 21:39:25.927244", "rc": 0, "start": "2018-04-06 21:39:25.787485", "stderr": [ "Generating a 2048 bit RSA private key", "......................................+++", ".....................................+++", "writing new private key to 'peer.key'", "-----" ], "stdout": [] } TASK [etcd : Sign and create the peer crt] ************************************* task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/fetch_server_certificates_from_ca.yml:86 changed: [localhost -> localhost] => { "changed": true, "cmd": [ "openssl", "ca", "-name", "etcd_ca", "-config", "/etc/etcd/ca/openssl.cnf", "-out", "peer.crt", "-in", "peer.csr", "-extensions", "etcd_v3_ca_peer", "-batch" ], "delta": "0:00:00.018692", "end": "2018-04-06 21:39:26.131798", "generated_timestamp": "2018-04-06 21:39:26.149765", "iterated": 0, "rc": 0, "start": "2018-04-06 21:39:26.113106", "stderr": [ "Using configuration from /etc/etcd/ca/openssl.cnf", "Check that the request matches the signature", "Signature ok", "Certificate Details:", " Serial Number: 2 (0x2)", " Validity", " Not Before: Apr 6 21:39:26 2018 GMT", " Not After : Apr 5 21:39:26 2023 GMT", " Subject:", " commonName = ip-172-18-1-211.ec2.internal", " X509v3 extensions:", " X509v3 Authority Key Identifier: ", " keyid:B8:6E:C6:AF:B3:40:1F:0C:E7:7F:38:36:A3:D1:82:72:9C:76:8F:A0", " DirName:/CN=etcd-signer@1523050720", " serial:CA:39:2A:4B:32:43:B2:90", "", " X509v3 Basic Constraints: critical", " CA:FALSE", " X509v3 Extended Key Usage: ", " TLS Web Client Authentication, TLS Web Server Authentication", " X509v3 Key Usage: ", " Digital Signature, Key Encipherment", " X509v3 Subject Key Identifier: ", " CD:B1:01:65:FB:FA:E4:5B:B8:F2:47:04:3F:93:EA:6C:57:11:23:7C", " X509v3 Subject Alternative Name: ", " IP Address:172.18.1.211, DNS:ip-172-18-1-211.ec2.internal", "Certificate is to be certified until Apr 5 21:39:26 2023 GMT (1825 days)", "", "Write out database with 1 new entries", "Data Base Updated" ], "stdout": [] } TASK [etcd : file] ************************************************************* task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/fetch_server_certificates_from_ca.yml:101 changed: [localhost -> localhost] => { "changed": true, "dest": "/etc/etcd/generated_certs/etcd-ip-172-18-1-211.ec2.internal/ca.crt", "generated_timestamp": "2018-04-06 21:39:26.353168", "gid": 0, "group": "root", "mode": "0644", "owner": "root", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 1895, "src": "/etc/etcd/ca/ca.crt", "state": "hard", "uid": 0 } TASK [etcd : Create local temp directory for syncing certs] ******************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/fetch_server_certificates_from_ca.yml:108 ok: [localhost -> localhost] => { "changed": false, "cmd": [ "mktemp", "-d", "/tmp/etcd_certificates-XXXXXXX" ], "delta": "0:00:00.002717", "end": "2018-04-06 21:39:26.516790", "generated_timestamp": "2018-04-06 21:39:26.532757", "rc": 0, "start": "2018-04-06 21:39:26.514073", "stderr": [], "stdout": [ "/tmp/etcd_certificates-jIsfIhK" ] } TASK [etcd : Create a tarball of the etcd certs] ******************************* task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/fetch_server_certificates_from_ca.yml:115 changed: [localhost -> localhost] => { "changed": true, "cmd": [ "tar", "-czvf", "/etc/etcd/generated_certs/etcd-ip-172-18-1-211.ec2.internal.tgz", "-C", "/etc/etcd/generated_certs/etcd-ip-172-18-1-211.ec2.internal", "." ], "delta": "0:00:00.004937", "end": "2018-04-06 21:39:26.713892", "generated_timestamp": "2018-04-06 21:39:26.732021", "rc": 0, "start": "2018-04-06 21:39:26.708955", "stderr": [], "stdout": [ "./", "./server.key", "./server.csr", "./server.crt", "./peer.key", "./peer.csr", "./peer.crt", "./ca.crt" ] } TASK [etcd : Retrieve etcd cert tarball] *************************************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/fetch_server_certificates_from_ca.yml:127 changed: [localhost -> localhost] => { "changed": true, "checksum": "51191d80487bf24b474d8b489a69c9856742d938", "dest": "/tmp/etcd_certificates-jIsfIhK/etcd-ip-172-18-1-211.ec2.internal.tgz", "generated_timestamp": "2018-04-06 21:39:27.208198", "md5sum": "b628abd4ef163aa5ae074c672cff10fd", "remote_checksum": "51191d80487bf24b474d8b489a69c9856742d938", "remote_md5sum": null } TASK [etcd : Ensure certificate directory exists] ****************************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/fetch_server_certificates_from_ca.yml:137 ok: [localhost] => (item=/etc/etcd) => { "changed": false, "generated_timestamp": "2018-04-06 21:39:27.404283", "gid": 0, "group": "root", "item": "/etc/etcd", "mode": "0755", "owner": "root", "path": "/etc/etcd", "secontext": "system_u:object_r:etc_t:s0", "size": 56, "state": "directory", "uid": 0 } TASK [etcd : Unarchive cert tarball] ******************************************* task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/fetch_server_certificates_from_ca.yml:145 changed: [localhost] => { "changed": true, "dest": "/etc/etcd", "extract_results": { "cmd": [ "/bin/gtar", "--extract", "-C", "/etc/etcd", "-z", "-f", "/home/origin/.ansible/tmp/ansible-tmp-1523050767.44-117677747323259/source" ], "err": "", "out": "", "rc": 0 }, "generated_timestamp": "2018-04-06 21:39:28.337444", "gid": 0, "group": "root", "handler": "TgzArchive", "mode": "0700", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 172, "src": "/home/origin/.ansible/tmp/ansible-tmp-1523050767.44-117677747323259/source", "state": "directory", "uid": 0 } TASK [etcd : Create a tarball of the etcd ca certs] **************************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/fetch_server_certificates_from_ca.yml:151 changed: [localhost -> localhost] => { "changed": true, "cmd": [ "tar", "-czvf", "/etc/etcd/generated_certs/etcd_ca.tgz", "-C", "/etc/etcd/ca", "." ], "delta": "0:00:00.005593", "end": "2018-04-06 21:39:28.516520", "generated_timestamp": "2018-04-06 21:39:28.534089", "rc": 0, "start": "2018-04-06 21:39:28.510927", "stderr": [], "stdout": [ "./", "./certs/", "./certs/01.pem", "./certs/02.pem", "./crl/", "./fragments/", "./fragments/openssl.cnf", "./fragments/openssl_append.cnf", "./openssl.cnf", "./ca.key", "./ca.crt", "./serial.old", "./index.txt.old", "./serial", "./index.txt", "./index.txt.attr.old", "./index.txt.attr" ] } TASK [etcd : Retrieve etcd ca cert tarball] ************************************ task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/fetch_server_certificates_from_ca.yml:161 changed: [localhost -> localhost] => { "changed": true, "checksum": "aad6bf1803741eb86f37d9844a2dbe25a640656a", "dest": "/tmp/etcd_certificates-jIsfIhK/etcd_ca.tgz", "generated_timestamp": "2018-04-06 21:39:28.729021", "md5sum": "f7399b52abcd67ceefe23df1a2684383", "remote_checksum": "aad6bf1803741eb86f37d9844a2dbe25a640656a", "remote_md5sum": null } TASK [etcd : Ensure ca directory exists] *************************************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/fetch_server_certificates_from_ca.yml:171 ok: [localhost] => (item=/etc/etcd/ca) => { "changed": false, "generated_timestamp": "2018-04-06 21:39:28.917599", "gid": 0, "group": "root", "item": "/etc/etcd/ca", "mode": "0700", "owner": "root", "path": "/etc/etcd/ca", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 212, "state": "directory", "uid": 0 } TASK [etcd : Delete temporary directory] *************************************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/fetch_server_certificates_from_ca.yml:179 ok: [localhost -> localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:29.095811", "path": "/tmp/etcd_certificates-jIsfIhK", "state": "absent" } TASK [etcd : Validate permissions on certificate files] ************************ task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/fetch_server_certificates_from_ca.yml:185 changed: [localhost] => (item=/etc/etcd/ca.crt) => { "changed": true, "generated_timestamp": "2018-04-06 21:39:29.290250", "gid": 993, "group": "etcd", "item": "/etc/etcd/ca.crt", "mode": "0600", "owner": "etcd", "path": "/etc/etcd/ca.crt", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 1895, "state": "file", "uid": 996 } changed: [localhost] => (item=/etc/etcd/server.crt) => { "changed": true, "generated_timestamp": "2018-04-06 21:39:29.452881", "gid": 993, "group": "etcd", "item": "/etc/etcd/server.crt", "mode": "0600", "owner": "etcd", "path": "/etc/etcd/server.crt", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 5933, "state": "file", "uid": 996 } changed: [localhost] => (item=/etc/etcd/server.key) => { "changed": true, "generated_timestamp": "2018-04-06 21:39:29.616682", "gid": 993, "group": "etcd", "item": "/etc/etcd/server.key", "mode": "0600", "owner": "etcd", "path": "/etc/etcd/server.key", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 1704, "state": "file", "uid": 996 } TASK [etcd : Validate permissions on peer certificate files] ******************* task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/fetch_server_certificates_from_ca.yml:197 ok: [localhost] => (item=/etc/etcd/ca.crt) => { "changed": false, "generated_timestamp": "2018-04-06 21:39:29.816943", "gid": 993, "group": "etcd", "item": "/etc/etcd/ca.crt", "mode": "0600", "owner": "etcd", "path": "/etc/etcd/ca.crt", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 1895, "state": "file", "uid": 996 } changed: [localhost] => (item=/etc/etcd/peer.crt) => { "changed": true, "generated_timestamp": "2018-04-06 21:39:29.974497", "gid": 993, "group": "etcd", "item": "/etc/etcd/peer.crt", "mode": "0600", "owner": "etcd", "path": "/etc/etcd/peer.crt", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 5976, "state": "file", "uid": 996 } changed: [localhost] => (item=/etc/etcd/peer.key) => { "changed": true, "generated_timestamp": "2018-04-06 21:39:30.141086", "gid": 993, "group": "etcd", "item": "/etc/etcd/peer.key", "mode": "0600", "owner": "etcd", "path": "/etc/etcd/peer.key", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 1704, "state": "file", "uid": 996 } TASK [etcd : Validate permissions on the config dir] *************************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/fetch_server_certificates_from_ca.yml:209 changed: [localhost] => { "changed": true, "generated_timestamp": "2018-04-06 21:39:30.328649", "gid": 993, "group": "etcd", "mode": "0700", "owner": "etcd", "path": "/etc/etcd", "secontext": "system_u:object_r:etc_t:s0", "size": 172, "state": "directory", "uid": 996 } META: ran handlers PLAY [Create etcd client certificates for master hosts] ************************ TASK [Gathering Facts] ********************************************************* ok: [localhost] META: ran handlers TASK [openshift_etcd_facts : openshift_facts] ********************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_etcd_facts/tasks/main.yml:2 ok: [localhost] => { "ansible_facts": { "openshift": { "common": { "admin_binary": "oadm", "all_hostnames": [ "54.152.227.161", "ec2-54-152-227-161.compute-1.amazonaws.com", "172.18.1.211", "ip-172-18-1-211.ec2.internal" ], "cli_image": "openshift/origin", "client_binary": "oc", "config_base": "/etc/origin", "deployer_image": "openshift/origin-deployer", "deployment_subtype": "basic", "deployment_type": "origin", "dns_domain": "cluster.local", "etcd_runtime": "host", "examples_content_version": "v3.6", "generate_no_proxy_hosts": true, "hostname": "ip-172-18-1-211.ec2.internal", "installed_variant_rpms": [], "internal_hostnames": [ "172.18.1.211", "ip-172-18-1-211.ec2.internal" ], "ip": "172.18.1.211", "is_atomic": false, "is_containerized": false, "is_etcd_system_container": false, "is_master_system_container": false, "is_node_system_container": false, "is_openvswitch_system_container": false, "kube_svc_ip": "172.30.0.1", "pod_image": "openshift/origin-pod", "portal_net": "172.30.0.0/16", "public_hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public_ip": "54.152.227.161", "registry_image": "openshift/origin-docker-registry", "router_image": "openshift/origin-haproxy-router", "service_type": "origin", "system_images_registry": "docker.io", "version_gte_3_1_1_or_1_1_1": true, "version_gte_3_1_or_1_1": true, "version_gte_3_2_or_1_2": true, "version_gte_3_3_or_1_3": true, "version_gte_3_4_or_1_4": true, "version_gte_3_5_or_1_5": true, "version_gte_3_6": true, "version_gte_3_7": false }, "current_config": { "roles": [ "node", "docker", "etcd" ] }, "docker": { "api_version": "1.26 (minimum version 1.12)", "disable_push_dockerhub": false, "gte_1_10": true, "hosted_registry_insecure": false, "hosted_registry_network": "172.30.0.0/16", "options": "--log-driver=journald", "service_name": "docker", "use_crio": false, "use_system_container": false, "version": "1.13.1" }, "etcd": { "etcd_data_dir": "/var/lib/etcd/default.etcd" }, "hosted": { "etcd": { "selector": null }, "metrics": { "selector": null }, "registry": { "selector": "region=infra" }, "router": { "selector": "region=infra" } }, "logging": { "selector": null }, "node": { "annotations": {}, "iptables_sync_period": "30s", "kubelet_args": { "node-labels": [] }, "labels": {}, "local_quota_per_fsgroup": "", "node_image": "openshift/node", "node_system_image": "openshift/node", "nodename": "ip-172-18-1-211.ec2.internal", "ovs_image": "openshift/openvswitch", "ovs_system_image": "openshift/openvswitch", "registry_url": "openshift/origin-${component}:${version}", "schedulable": true, "sdn_mtu": "8951", "set_node_ip": false, "storage_plugin_deps": [ "ceph", "glusterfs", "iscsi" ] }, "prometheus": { "alertbuffer": { "selector": null }, "alertmanager": { "selector": null }, "selector": null }, "provider": { "metadata": { "ami-id": "ami-069c0ca6cc091e8fa", "ami-launch-index": "0", "ami-manifest-path": "(unknown)", "block-device-mapping": { "ami": "/dev/sda1", "ebs12": "sdb", "root": "/dev/sda1" }, "hostname": "ip-172-18-1-211.ec2.internal", "instance-action": "none", "instance-id": "i-0cd57e9e471604c34", "instance-type": "m4.xlarge", "local-hostname": "ip-172-18-1-211.ec2.internal", "local-ipv4": "172.18.1.211", "mac": "0e:25:a6:6d:04:5a", "metrics": { "vhostmd": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" }, "network": { "interfaces": { "macs": { "0e:25:a6:6d:04:5a": { "device-number": "0", "interface-id": "eni-04b95497687bf9813", "ipv4-associations": { "54.152.227.161": "172.18.1.211" }, "local-hostname": "ip-172-18-1-211.ec2.internal", "local-ipv4s": "172.18.1.211", "mac": "0e:25:a6:6d:04:5a", "owner-id": "531415883065", "public-hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public-ipv4s": "54.152.227.161", "security-group-ids": "sg-7e73221a", "security-groups": "default", "subnet-id": "subnet-cf57c596", "subnet-ipv4-cidr-block": "172.18.0.0/20", "vpc-id": "vpc-69705d0c", "vpc-ipv4-cidr-block": "172.18.0.0/16", "vpc-ipv4-cidr-blocks": "172.18.0.0/16" } } } }, "placement": { "availability-zone": "us-east-1d" }, "profile": "default-hvm", "public-hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public-ipv4": "54.152.227.161", "public-keys/": "0=libra", "reservation-id": "r-0f638cc4ebb0e4aae", "security-groups": "default", "services": { "domain": "amazonaws.com", "partition": "aws" } }, "name": "aws", "network": { "hostname": "ip-172-18-1-211.ec2.internal", "interfaces": [ { "ips": [ "172.18.1.211" ], "network_id": "subnet-cf57c596", "network_type": "vpc", "public_ips": [ "54.152.227.161" ] } ], "ip": "172.18.1.211", "ipv6_enabled": false, "public_hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public_ip": "54.152.227.161" }, "zone": "us-east-1d" } } }, "changed": false, "generated_timestamp": "2018-04-06 21:39:32.667529" } TASK [etcd : Ensure CA certificate exists on etcd_ca_host] ********************* task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/fetch_client_certificates_from_ca.yml:2 ok: [localhost -> localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:32.874548", "stat": { "atime": 1523050766.7120495, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "361139addd8af78e27c76ff68b64f3453b43c0cb", "ctime": 1523050766.3340638, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 83886564, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "md5": "35b3681e2c296f72abd9c57bb6b5005f", "mimetype": "text/plain", "mode": "0644", "mtime": 1523050726.6455433, "nlink": 2, "path": "/etc/etcd/ca/ca.crt", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1895, "uid": 0, "version": "18446744073591937737", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [etcd : fail] ************************************************************* task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/fetch_client_certificates_from_ca.yml:9 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:32.906969", "skip_reason": "Conditional result was False", "skipped": true } TASK [etcd : Check status of external etcd certificatees] ********************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/fetch_client_certificates_from_ca.yml:17 ok: [localhost] => (item=master.etcd-client.crt) => { "changed": false, "generated_timestamp": "2018-04-06 21:39:33.103964", "item": "master.etcd-client.crt", "stat": { "exists": false } } ok: [localhost] => (item=master.etcd-client.key) => { "changed": false, "generated_timestamp": "2018-04-06 21:39:33.259858", "item": "master.etcd-client.key", "stat": { "exists": false } } ok: [localhost] => (item=master.etcd-ca.crt) => { "changed": false, "generated_timestamp": "2018-04-06 21:39:33.422149", "item": "master.etcd-ca.crt", "stat": { "exists": false } } TASK [etcd : set_fact] ********************************************************* task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/fetch_client_certificates_from_ca.yml:27 ok: [localhost] => { "ansible_facts": { "etcd_client_certs_missing": true }, "changed": false, "generated_timestamp": "2018-04-06 21:39:33.462169" } TASK [etcd : Ensure generated_certs directory present] ************************* task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/fetch_client_certificates_from_ca.yml:34 changed: [localhost -> localhost] => { "changed": true, "generated_timestamp": "2018-04-06 21:39:33.666380", "gid": 0, "group": "root", "mode": "0700", "owner": "root", "path": "/etc/etcd/generated_certs/openshift-master-ip-172-18-1-211.ec2.internal", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [etcd : Create the client csr] ******************************************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/fetch_client_certificates_from_ca.yml:42 changed: [localhost -> localhost] => { "changed": true, "cmd": [ "openssl", "req", "-new", "-keyout", "master.etcd-client.key", "-config", "/etc/etcd/ca/openssl.cnf", "-out", "master.etcd-client.csr", "-reqexts", "etcd_v3_req", "-batch", "-nodes", "-subj", "/CN=ip-172-18-1-211.ec2.internal" ], "delta": "0:00:00.221392", "end": "2018-04-06 21:39:34.076578", "generated_timestamp": "2018-04-06 21:39:34.094524", "rc": 0, "start": "2018-04-06 21:39:33.855186", "stderr": [ "Generating a 2048 bit RSA private key", ".................................................................................+++", "...............................................................+++", "writing new private key to 'master.etcd-client.key'", "-----" ], "stdout": [] } TASK [etcd : Sign and create the client crt] *********************************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/fetch_client_certificates_from_ca.yml:60 changed: [localhost -> localhost] => { "changed": true, "cmd": [ "openssl", "ca", "-name", "etcd_ca", "-config", "/etc/etcd/ca/openssl.cnf", "-out", "master.etcd-client.crt", "-in", "master.etcd-client.csr", "-batch" ], "delta": "0:00:00.018675", "end": "2018-04-06 21:39:34.302810", "generated_timestamp": "2018-04-06 21:39:34.320356", "iterated": 0, "rc": 0, "start": "2018-04-06 21:39:34.284135", "stderr": [ "Using configuration from /etc/etcd/ca/openssl.cnf", "Check that the request matches the signature", "Signature ok", "Certificate Details:", " Serial Number: 3 (0x3)", " Validity", " Not Before: Apr 6 21:39:34 2018 GMT", " Not After : Apr 5 21:39:34 2023 GMT", " Subject:", " commonName = ip-172-18-1-211.ec2.internal", " X509v3 extensions:", " X509v3 Authority Key Identifier: ", " keyid:B8:6E:C6:AF:B3:40:1F:0C:E7:7F:38:36:A3:D1:82:72:9C:76:8F:A0", " DirName:/CN=etcd-signer@1523050720", " serial:CA:39:2A:4B:32:43:B2:90", "", " X509v3 Basic Constraints: critical", " CA:FALSE", " X509v3 Extended Key Usage: ", " TLS Web Client Authentication", " X509v3 Key Usage: ", " Digital Signature, Key Encipherment", " X509v3 Subject Key Identifier: ", " D4:7B:7C:1F:41:96:6F:3C:B6:78:94:FA:A9:04:FA:CD:8F:D1:51:DF", " X509v3 Subject Alternative Name: ", " IP Address:172.18.1.211, DNS:ip-172-18-1-211.ec2.internal", "Certificate is to be certified until Apr 5 21:39:34 2023 GMT (1825 days)", "", "Write out database with 1 new entries", "Data Base Updated" ], "stdout": [] } TASK [etcd : file] ************************************************************* task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/fetch_client_certificates_from_ca.yml:75 changed: [localhost -> localhost] => { "changed": true, "dest": "/etc/etcd/generated_certs/openshift-master-ip-172-18-1-211.ec2.internal/master.etcd-ca.crt", "generated_timestamp": "2018-04-06 21:39:34.525295", "gid": 0, "group": "root", "mode": "0644", "owner": "root", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 1895, "src": "/etc/etcd/ca/ca.crt", "state": "hard", "uid": 0 } TASK [etcd : Create local temp directory for syncing certs] ******************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/fetch_client_certificates_from_ca.yml:82 ok: [localhost -> localhost] => { "changed": false, "cmd": [ "mktemp", "-d", "/tmp/etcd_certificates-XXXXXXX" ], "delta": "0:00:00.002741", "end": "2018-04-06 21:39:34.695196", "generated_timestamp": "2018-04-06 21:39:34.711859", "rc": 0, "start": "2018-04-06 21:39:34.692455", "stderr": [], "stdout": [ "/tmp/etcd_certificates-WhGNiow" ] } TASK [etcd : Create a tarball of the etcd certs] ******************************* task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/fetch_client_certificates_from_ca.yml:89 changed: [localhost -> localhost] => { "changed": true, "cmd": [ "tar", "-czvf", "/etc/etcd/generated_certs/openshift-master-ip-172-18-1-211.ec2.internal.tgz", "-C", "/etc/etcd/generated_certs/openshift-master-ip-172-18-1-211.ec2.internal", "." ], "delta": "0:00:00.004639", "end": "2018-04-06 21:39:34.900148", "generated_timestamp": "2018-04-06 21:39:34.918571", "rc": 0, "start": "2018-04-06 21:39:34.895509", "stderr": [], "stdout": [ "./", "./master.etcd-client.key", "./master.etcd-client.csr", "./master.etcd-client.crt", "./master.etcd-ca.crt" ] } TASK [etcd : Retrieve the etcd cert tarballs] ********************************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/fetch_client_certificates_from_ca.yml:101 changed: [localhost -> localhost] => { "changed": true, "checksum": "bd11b1af0a588c5e8ca0f4973245640896f63576", "dest": "/tmp/etcd_certificates-WhGNiow/openshift-master-ip-172-18-1-211.ec2.internal.tgz", "generated_timestamp": "2018-04-06 21:39:35.116072", "md5sum": "185929247ae73c8dc35c3afeeb228edf", "remote_checksum": "bd11b1af0a588c5e8ca0f4973245640896f63576", "remote_md5sum": null } TASK [etcd : Ensure certificate directory exists] ****************************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/fetch_client_certificates_from_ca.yml:111 changed: [localhost] => { "changed": true, "generated_timestamp": "2018-04-06 21:39:35.306620", "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/origin/master", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [etcd : Unarchive etcd cert tarballs] ************************************* task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/fetch_client_certificates_from_ca.yml:117 changed: [localhost] => { "changed": true, "dest": "/etc/origin/master", "extract_results": { "cmd": [ "/bin/gtar", "--extract", "-C", "/etc/origin/master", "-z", "-f", "/home/origin/.ansible/tmp/ansible-tmp-1523050775.35-39244872904056/source" ], "err": "", "out": "", "rc": 0 }, "generated_timestamp": "2018-04-06 21:39:35.866665", "gid": 0, "group": "root", "handler": "TgzArchive", "mode": "0700", "owner": "root", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 122, "src": "/home/origin/.ansible/tmp/ansible-tmp-1523050775.35-39244872904056/source", "state": "directory", "uid": 0 } TASK [etcd : file] ************************************************************* task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/fetch_client_certificates_from_ca.yml:123 changed: [localhost] => (item=master.etcd-client.crt) => { "changed": true, "generated_timestamp": "2018-04-06 21:39:36.064976", "gid": 0, "group": "root", "item": "master.etcd-client.crt", "mode": "0600", "owner": "root", "path": "/etc/origin/master/master.etcd-client.crt", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 5933, "state": "file", "uid": 0 } changed: [localhost] => (item=master.etcd-client.key) => { "changed": true, "generated_timestamp": "2018-04-06 21:39:36.228725", "gid": 0, "group": "root", "item": "master.etcd-client.key", "mode": "0600", "owner": "root", "path": "/etc/origin/master/master.etcd-client.key", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 1708, "state": "file", "uid": 0 } changed: [localhost] => (item=master.etcd-ca.crt) => { "changed": true, "generated_timestamp": "2018-04-06 21:39:36.392448", "gid": 0, "group": "root", "item": "master.etcd-ca.crt", "mode": "0600", "owner": "root", "path": "/etc/origin/master/master.etcd-ca.crt", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 1895, "state": "file", "uid": 0 } TASK [etcd : Delete temporary directory] *************************************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/certificates/fetch_client_certificates_from_ca.yml:134 ok: [localhost -> localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:36.573438", "path": "/tmp/etcd_certificates-WhGNiow", "state": "absent" } META: ran handlers META: ran handlers PLAY [Configure etcd] ********************************************************** TASK [Gathering Facts] ********************************************************* ok: [localhost] META: ran handlers TASK [os_firewall : Detecting Atomic Host Operating System] ******************** task path: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/main.yml:2 ok: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:37.701197", "stat": { "exists": false } } TASK [os_firewall : Set fact r_os_firewall_is_atomic] ************************** task path: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/main.yml:7 ok: [localhost] => { "ansible_facts": { "r_os_firewall_is_atomic": false }, "changed": false, "generated_timestamp": "2018-04-06 21:39:37.734796" } TASK [os_firewall : Fail - Firewalld is not supported on Atomic Host] ********** task path: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/firewalld.yml:2 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:37.765782", "skip_reason": "Conditional result was False", "skipped": true } TASK [os_firewall : Install firewalld packages] ******************************** task path: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/firewalld.yml:9 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:37.798561", "skip_reason": "Conditional result was False", "skipped": true } TASK [os_firewall : Ensure iptables services are not enabled] ****************** task path: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/firewalld.yml:17 skipping: [localhost] => (item=iptables) => { "changed": false, "generated_timestamp": "2018-04-06 21:39:37.832904", "item": "iptables", "skip_reason": "Conditional result was False", "skipped": true } skipping: [localhost] => (item=ip6tables) => { "changed": false, "generated_timestamp": "2018-04-06 21:39:37.841854", "item": "ip6tables", "skip_reason": "Conditional result was False", "skipped": true } TASK [os_firewall : Wait 10 seconds after disabling iptables] ****************** task path: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/firewalld.yml:29 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:37.873041", "skip_reason": "Conditional result was False", "skipped": true } TASK [os_firewall : Start and enable firewalld service] ************************ task path: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/firewalld.yml:34 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:37.903687", "skip_reason": "Conditional result was False", "skipped": true } TASK [os_firewall : need to pause here, otherwise the firewalld service starting can sometimes cause ssh to fail] *** task path: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/firewalld.yml:43 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:37.934816", "skip_reason": "Conditional result was False", "skipped": true } TASK [os_firewall : Restart polkitd] ******************************************* task path: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/firewalld.yml:48 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:37.967083", "skip_reason": "Conditional result was False", "skipped": true } TASK [os_firewall : Wait for polkit action to have been created] *************** task path: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/firewalld.yml:55 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:37.997548", "skip_reason": "Conditional result was False", "skipped": true } TASK [os_firewall : Ensure firewalld service is not enabled] ******************* task path: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/iptables.yml:3 ok: [localhost] => { "changed": false, "enabled": false, "failed": false, "failed_when_result": false, "generated_timestamp": "2018-04-06 21:39:38.207883", "name": "firewalld", "state": "stopped", "status": { "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "AllowIsolate": "no", "AmbientCapabilities": "0", "AssertResult": "no", "AssertTimestampMonotonic": "0", "BlockIOAccounting": "no", "BlockIOWeight": "18446744073709551615", "CPUAccounting": "yes", "CPUQuotaPerSecUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "18446744073709551615", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "18446744073709551615", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ControlPID": "0", "DefaultDependencies": "yes", "Delegate": "no", "Description": "firewalld.service", "DevicePolicy": "auto", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/dev/null", "GuessMainPID": "yes", "IOScheduling": "0", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreOnSnapshot": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobTimeoutAction": "none", "JobTimeoutUSec": "0", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "18446744073709551615", "LimitCORE": "18446744073709551615", "LimitCPU": "18446744073709551615", "LimitDATA": "18446744073709551615", "LimitFSIZE": "18446744073709551615", "LimitLOCKS": "18446744073709551615", "LimitMEMLOCK": "65536", "LimitMSGQUEUE": "819200", "LimitNICE": "0", "LimitNOFILE": "65536", "LimitNPROC": "63327", "LimitRSS": "18446744073709551615", "LimitRTPRIO": "0", "LimitRTTIME": "18446744073709551615", "LimitSIGPENDING": "63327", "LimitSTACK": "18446744073709551615", "LoadState": "masked", "MainPID": "0", "MemoryAccounting": "yes", "MemoryCurrent": "18446744073709551615", "MemoryLimit": "18446744073709551615", "MountFlags": "0", "Names": "firewalld.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "PrivateDevices": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "ProtectHome": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "Restart": "no", "RestartUSec": "100ms", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "inherit", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitInterval": "10000000", "StartupBlockIOWeight": "18446744073709551615", "StartupCPUShares": "18446744073709551615", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "no", "TasksCurrent": "18446744073709551615", "TasksMax": "18446744073709551615", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "UMask": "0022", "UnitFileState": "bad", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [os_firewall : Wait 10 seconds after disabling firewalld] ***************** task path: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/iptables.yml:12 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:38.248379", "skip_reason": "Conditional result was False", "skipped": true } TASK [os_firewall : Install iptables packages] ********************************* task path: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/iptables.yml:17 ok: [localhost] => (item=iptables) => { "attempts": 1, "changed": false, "generated_timestamp": "2018-04-06 21:39:38.999421", "item": "iptables", "msg": "", "rc": 0, "results": [ "iptables-1.4.21-18.3.el7_4.x86_64 providing iptables is already installed" ] } ok: [localhost] => (item=iptables-services) => { "attempts": 1, "changed": false, "generated_timestamp": "2018-04-06 21:39:39.707055", "item": "iptables-services", "msg": "", "rc": 0, "results": [ "iptables-services-1.4.21-18.3.el7_4.x86_64 providing iptables-services is already installed" ] } TASK [os_firewall : Start and enable iptables service] ************************* task path: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/iptables.yml:28 ok: [localhost -> localhost] => (item=localhost) => { "changed": false, "enabled": true, "generated_timestamp": "2018-04-06 21:39:39.999244", "item": "localhost", "name": "iptables", "state": "started", "status": { "ActiveEnterTimestamp": "Fri 2018-04-06 21:38:08 UTC", "ActiveEnterTimestampMonotonic": "2477349213", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "basic.target systemd-journald.socket syslog.target system.slice", "AllowIsolate": "no", "AmbientCapabilities": "0", "AssertResult": "yes", "AssertTimestamp": "Fri 2018-04-06 21:38:08 UTC", "AssertTimestampMonotonic": "2477293553", "Before": "shutdown.target network.service ip6tables.service", "BlockIOAccounting": "no", "BlockIOWeight": "18446744073709551615", "CPUAccounting": "yes", "CPUQuotaPerSecUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "18446744073709551615", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "18446744073709551615", "ConditionResult": "yes", "ConditionTimestamp": "Fri 2018-04-06 21:38:08 UTC", "ConditionTimestampMonotonic": "2477293552", "Conflicts": "shutdown.target", "ControlPID": "0", "DefaultDependencies": "yes", "Delegate": "no", "Description": "IPv4 firewall with iptables", "DevicePolicy": "auto", "Environment": "BOOTUP=serial CONSOLETYPE=serial", "ExecMainCode": "1", "ExecMainExitTimestamp": "Fri 2018-04-06 21:38:08 UTC", "ExecMainExitTimestampMonotonic": "2477344445", "ExecMainPID": "20527", "ExecMainStartTimestamp": "Fri 2018-04-06 21:38:08 UTC", "ExecMainStartTimestampMonotonic": "2477297983", "ExecMainStatus": "0", "ExecReload": "{ path=/usr/libexec/iptables/iptables.init ; argv[]=/usr/libexec/iptables/iptables.init reload ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/libexec/iptables/iptables.init ; argv[]=/usr/libexec/iptables/iptables.init start ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/libexec/iptables/iptables.init ; argv[]=/usr/libexec/iptables/iptables.init stop ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/iptables.service", "GuessMainPID": "yes", "IOScheduling": "0", "Id": "iptables.service", "IgnoreOnIsolate": "no", "IgnoreOnSnapshot": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Fri 2018-04-06 21:38:08 UTC", "InactiveExitTimestampMonotonic": "2477298016", "JobTimeoutAction": "none", "JobTimeoutUSec": "0", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "18446744073709551615", "LimitCORE": "18446744073709551615", "LimitCPU": "18446744073709551615", "LimitDATA": "18446744073709551615", "LimitFSIZE": "18446744073709551615", "LimitLOCKS": "18446744073709551615", "LimitMEMLOCK": "65536", "LimitMSGQUEUE": "819200", "LimitNICE": "0", "LimitNOFILE": "4096", "LimitNPROC": "63327", "LimitRSS": "18446744073709551615", "LimitRTPRIO": "0", "LimitRTTIME": "18446744073709551615", "LimitSIGPENDING": "63327", "LimitSTACK": "18446744073709551615", "LoadState": "loaded", "MainPID": "0", "MemoryAccounting": "yes", "MemoryCurrent": "18446744073709551615", "MemoryLimit": "18446744073709551615", "MountFlags": "0", "Names": "iptables.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "PrivateDevices": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "ProtectHome": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "yes", "Requires": "basic.target", "Restart": "no", "RestartUSec": "100ms", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "syslog", "StandardInput": "null", "StandardOutput": "syslog", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitInterval": "10000000", "StartupBlockIOWeight": "18446744073709551615", "StartupCPUShares": "18446744073709551615", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "exited", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "no", "TasksCurrent": "18446744073709551615", "TasksMax": "18446744073709551615", "TimeoutStartUSec": "0", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "WantedBy": "basic.target", "Wants": "system.slice", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [os_firewall : need to pause here, otherwise the iptables service starting can sometimes cause ssh to fail] *** task path: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/iptables.yml:40 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:40.040498", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_etcd_facts : openshift_facts] ********************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_etcd_facts/tasks/main.yml:2 ok: [localhost] => { "ansible_facts": { "openshift": { "common": { "admin_binary": "oadm", "all_hostnames": [ "54.152.227.161", "ec2-54-152-227-161.compute-1.amazonaws.com", "172.18.1.211", "ip-172-18-1-211.ec2.internal" ], "cli_image": "openshift/origin", "client_binary": "oc", "config_base": "/etc/origin", "deployer_image": "openshift/origin-deployer", "deployment_subtype": "basic", "deployment_type": "origin", "dns_domain": "cluster.local", "etcd_runtime": "host", "examples_content_version": "v3.6", "generate_no_proxy_hosts": true, "hostname": "ip-172-18-1-211.ec2.internal", "installed_variant_rpms": [], "internal_hostnames": [ "172.18.1.211", "ip-172-18-1-211.ec2.internal" ], "ip": "172.18.1.211", "is_atomic": false, "is_containerized": false, "is_etcd_system_container": false, "is_master_system_container": false, "is_node_system_container": false, "is_openvswitch_system_container": false, "kube_svc_ip": "172.30.0.1", "pod_image": "openshift/origin-pod", "portal_net": "172.30.0.0/16", "public_hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public_ip": "54.152.227.161", "registry_image": "openshift/origin-docker-registry", "router_image": "openshift/origin-haproxy-router", "service_type": "origin", "system_images_registry": "docker.io", "version_gte_3_1_1_or_1_1_1": true, "version_gte_3_1_or_1_1": true, "version_gte_3_2_or_1_2": true, "version_gte_3_3_or_1_3": true, "version_gte_3_4_or_1_4": true, "version_gte_3_5_or_1_5": true, "version_gte_3_6": true, "version_gte_3_7": false }, "current_config": { "roles": [ "node", "docker", "etcd" ] }, "docker": { "api_version": "1.26 (minimum version 1.12)", "disable_push_dockerhub": false, "gte_1_10": true, "hosted_registry_insecure": false, "hosted_registry_network": "172.30.0.0/16", "options": "--log-driver=journald", "service_name": "docker", "use_crio": false, "use_system_container": false, "version": "1.13.1" }, "etcd": { "etcd_data_dir": "/var/lib/etcd/default.etcd" }, "hosted": { "etcd": { "selector": null }, "metrics": { "selector": null }, "registry": { "selector": "region=infra" }, "router": { "selector": "region=infra" } }, "logging": { "selector": null }, "node": { "annotations": {}, "iptables_sync_period": "30s", "kubelet_args": { "node-labels": [] }, "labels": {}, "local_quota_per_fsgroup": "", "node_image": "openshift/node", "node_system_image": "openshift/node", "nodename": "ip-172-18-1-211.ec2.internal", "ovs_image": "openshift/openvswitch", "ovs_system_image": "openshift/openvswitch", "registry_url": "openshift/origin-${component}:${version}", "schedulable": true, "sdn_mtu": "8951", "set_node_ip": false, "storage_plugin_deps": [ "ceph", "glusterfs", "iscsi" ] }, "prometheus": { "alertbuffer": { "selector": null }, "alertmanager": { "selector": null }, "selector": null }, "provider": { "metadata": { "ami-id": "ami-069c0ca6cc091e8fa", "ami-launch-index": "0", "ami-manifest-path": "(unknown)", "block-device-mapping": { "ami": "/dev/sda1", "ebs12": "sdb", "root": "/dev/sda1" }, "hostname": "ip-172-18-1-211.ec2.internal", "instance-action": "none", "instance-id": "i-0cd57e9e471604c34", "instance-type": "m4.xlarge", "local-hostname": "ip-172-18-1-211.ec2.internal", "local-ipv4": "172.18.1.211", "mac": "0e:25:a6:6d:04:5a", "metrics": { "vhostmd": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" }, "network": { "interfaces": { "macs": { "0e:25:a6:6d:04:5a": { "device-number": "0", "interface-id": "eni-04b95497687bf9813", "ipv4-associations": { "54.152.227.161": "172.18.1.211" }, "local-hostname": "ip-172-18-1-211.ec2.internal", "local-ipv4s": "172.18.1.211", "mac": "0e:25:a6:6d:04:5a", "owner-id": "531415883065", "public-hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public-ipv4s": "54.152.227.161", "security-group-ids": "sg-7e73221a", "security-groups": "default", "subnet-id": "subnet-cf57c596", "subnet-ipv4-cidr-block": "172.18.0.0/20", "vpc-id": "vpc-69705d0c", "vpc-ipv4-cidr-block": "172.18.0.0/16", "vpc-ipv4-cidr-blocks": "172.18.0.0/16" } } } }, "placement": { "availability-zone": "us-east-1d" }, "profile": "default-hvm", "public-hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public-ipv4": "54.152.227.161", "public-keys/": "0=libra", "reservation-id": "r-0f638cc4ebb0e4aae", "security-groups": "default", "services": { "domain": "amazonaws.com", "partition": "aws" } }, "name": "aws", "network": { "hostname": "ip-172-18-1-211.ec2.internal", "interfaces": [ { "ips": [ "172.18.1.211" ], "network_id": "subnet-cf57c596", "network_type": "vpc", "public_ips": [ "54.152.227.161" ] } ], "ip": "172.18.1.211", "ipv6_enabled": false, "public_hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public_ip": "54.152.227.161" }, "zone": "us-east-1d" } } }, "changed": false, "generated_timestamp": "2018-04-06 21:39:41.633897" } TASK [openshift_clock : Determine if chrony is installed] ********************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_clock/tasks/main.yaml:2 [WARNING]: Consider using yum, dnf or zypper module rather than running rpm changed: [localhost] => { "changed": true, "cmd": [ "rpm", "-q", "chrony" ], "delta": "0:00:00.034991", "end": "2018-04-06 21:39:41.832025", "failed": false, "failed_when_result": false, "generated_timestamp": "2018-04-06 21:39:41.852716", "rc": 0, "start": "2018-04-06 21:39:41.797034", "stderr": [], "stdout": [ "chrony-3.1-2.el7.x86_64" ] } TASK [openshift_clock : Install ntp package] *********************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_clock/tasks/main.yaml:7 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:41.885429", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_clock : Start and enable ntpd/chronyd] ************************* task path: /usr/share/ansible/openshift-ansible/roles/openshift_clock/tasks/main.yaml:15 changed: [localhost] => { "changed": true, "cmd": [ "timedatectl", "set-ntp", "true" ], "delta": "0:00:00.321306", "end": "2018-04-06 21:39:42.377901", "generated_timestamp": "2018-04-06 21:39:42.396217", "rc": 0, "start": "2018-04-06 21:39:42.056595", "stderr": [], "stdout": [] } TASK [openshift_docker_facts : Set docker facts] ******************************* task path: /usr/share/ansible/openshift-ansible/roles/openshift_docker_facts/tasks/main.yml:2 skipping: [localhost] => (item={u'local_facts': {u'use_crio': False, u'log_driver': u'', u'disable_push_dockerhub': u'', u'selinux_enabled': u'', u'hosted_registry_insecure': False, u'hosted_registry_network': u'172.30.0.0/16', u'log_options': u'', u'options': u'', u'use_system_container': False}, u'role': u'docker'}) => { "changed": false, "generated_timestamp": "2018-04-06 21:39:42.444821", "item": { "local_facts": { "disable_push_dockerhub": "", "hosted_registry_insecure": false, "hosted_registry_network": "172.30.0.0/16", "log_driver": "", "log_options": "", "options": "", "selinux_enabled": "", "use_crio": false, "use_system_container": false }, "role": "docker" }, "skip_reason": "Conditional result was False", "skipped": true } skipping: [localhost] => (item={u'local_facts': {u'sdn_mtu': u''}, u'role': u'node'}) => { "changed": false, "generated_timestamp": "2018-04-06 21:39:42.452326", "item": { "local_facts": { "sdn_mtu": "" }, "role": "node" }, "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_docker_facts : set_fact] *************************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_docker_facts/tasks/main.yml:22 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:42.480920", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_docker_facts : set_fact] *************************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_docker_facts/tasks/main.yml:32 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:42.510880", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_docker_facts : set_fact] *************************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_docker_facts/tasks/main.yml:37 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:42.540044", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Getting current systemd-udevd exec command] ********************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/udev_workaround.yml:3 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:42.569779", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Assure systemd-udevd.service.d directory exists] **************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/udev_workaround.yml:8 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:42.599344", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Create systemd-udevd override file] ***************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/udev_workaround.yml:13 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:42.629046", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : set_fact] ******************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/main.yml:8 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:42.657436", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Add enterprise registry, if necessary] ************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/main.yml:13 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:42.685619", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Get current installed Docker version] *************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:2 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:42.715792", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Error out if Docker pre-installed but too old] ****************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:10 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:42.747450", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Error out if requested Docker is too old] *********************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:15 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:42.775691", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Fail if Docker version requested but downgrade is required] ***** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:22 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:42.805545", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Error out if attempting to upgrade Docker across the 1.10 boundary] *** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:29 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:42.835395", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Install Docker] ************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:36 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:42.865383", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Ensure docker.service.d directory exists] *********************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:44 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:42.894283", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Configure Docker service unit file] ***************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:49 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:42.923389", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : stat] *********************************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:57 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:42.953375", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Set registry params] ******************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:60 skipping: [localhost] => (item={u'reg_conf_var': u'ADD_REGISTRY', u'reg_flag': u'--add-registry', u'reg_fact_val': []}) => { "changed": false, "generated_timestamp": "2018-04-06 21:39:43.008517", "item": { "reg_conf_var": "ADD_REGISTRY", "reg_fact_val": [], "reg_flag": "--add-registry" }, "skip_reason": "Conditional result was False", "skipped": true } skipping: [localhost] => (item={u'reg_conf_var': u'BLOCK_REGISTRY', u'reg_flag': u'--block-registry', u'reg_fact_val': []}) => { "changed": false, "generated_timestamp": "2018-04-06 21:39:43.016376", "item": { "reg_conf_var": "BLOCK_REGISTRY", "reg_fact_val": [], "reg_flag": "--block-registry" }, "skip_reason": "Conditional result was False", "skipped": true } skipping: [localhost] => (item={u'reg_conf_var': u'INSECURE_REGISTRY', u'reg_flag': u'--insecure-registry', u'reg_fact_val': []}) => { "changed": false, "generated_timestamp": "2018-04-06 21:39:43.024452", "item": { "reg_conf_var": "INSECURE_REGISTRY", "reg_fact_val": [], "reg_flag": "--insecure-registry" }, "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Place additional/blocked/insecure registries in /etc/containers/registries.conf] *** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:82 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:43.055109", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Set Proxy Settings] ********************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:90 skipping: [localhost] => (item={u'reg_conf_var': u'HTTP_PROXY', u'reg_fact_val': u''}) => { "changed": false, "generated_timestamp": "2018-04-06 21:39:43.091717", "item": { "reg_conf_var": "HTTP_PROXY", "reg_fact_val": "" }, "skip_reason": "Conditional result was False", "skipped": true } skipping: [localhost] => (item={u'reg_conf_var': u'HTTPS_PROXY', u'reg_fact_val': u''}) => { "changed": false, "generated_timestamp": "2018-04-06 21:39:43.099484", "item": { "reg_conf_var": "HTTPS_PROXY", "reg_fact_val": "" }, "skip_reason": "Conditional result was False", "skipped": true } skipping: [localhost] => (item={u'reg_conf_var': u'NO_PROXY', u'reg_fact_val': u''}) => { "changed": false, "generated_timestamp": "2018-04-06 21:39:43.107550", "item": { "reg_conf_var": "NO_PROXY", "reg_fact_val": "" }, "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Set various Docker options] ************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:108 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:43.136831", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : stat] *********************************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:123 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:43.165534", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Configure Docker Network OPTIONS] ******************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:126 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:43.194816", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Detect if docker is already started] **************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:140 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:43.224177", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Start the Docker service] *************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:145 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:43.253878", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : set_fact] ******************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:156 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:43.281906", "skip_reason": "Conditional result was False", "skipped": true } META: ran handlers TASK [docker : Check for credentials file for registry auth] ******************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/registry_auth.yml:2 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:43.312751", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Create credentials for docker cli registry auth] **************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/registry_auth.yml:8 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:43.344576", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Create credentials for docker cli registry auth (alternative)] *** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/registry_auth.yml:23 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:43.373101", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Ensure /var/lib/containers exists] ****************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/main.yml:28 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:43.401525", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Fix SELinux Permissions on /var/lib/containers] ***************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/main.yml:33 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:43.429925", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : set_fact] ******************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:3 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:43.458380", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : set_fact] ******************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:7 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:43.487267", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : set_fact] ******************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:13 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:43.516034", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Fail quickly if openshift_docker_options are set] *************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:21 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:43.544908", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Ensure container-selinux is installed] ************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:32 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:43.572975", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Ensure atomic is installed] ************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:41 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:43.601717", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Ensure runc is installed] *************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:51 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:43.632635", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Install Docker so we can use the client] ************************ task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:60 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:43.660713", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Disable Docker] ************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:67 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:43.691231", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_atomic : Add http_proxy to /etc/atomic.conf] ******************* task path: /usr/share/ansible/openshift-ansible/roles/openshift_atomic/tasks/proxy.yml:7 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:43.723732", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_atomic : Add https_proxy to /etc/atomic.conf] ****************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_atomic/tasks/proxy.yml:16 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:43.755667", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_atomic : Add no_proxy to /etc/atomic.conf] ********************* task path: /usr/share/ansible/openshift-ansible/roles/openshift_atomic/tasks/proxy.yml:25 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:43.786835", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Set to default prepend] ***************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:87 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:43.816638", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Set container engine image tag] ********************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:92 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:43.845795", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Use Red Hat Registry for image when distribution is Red Hat] **** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:98 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:43.875359", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Use Fedora Registry for image when distribution is Fedora] ****** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:103 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:43.904949", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Set the full image name] **************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:108 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:43.934899", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Use a specific image if requested] ****************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:113 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:43.962940", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : debug] ********************************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:121 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:43.992661", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Check for credentials file for registry auth] ******************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/registry_auth.yml:2 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:44.023310", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Create credentials for docker cli registry auth] **************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/registry_auth.yml:8 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:44.053878", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Create credentials for docker cli registry auth (alternative)] *** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/registry_auth.yml:23 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:44.085277", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Pre-pull Container Engine System Container image] *************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:131 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:44.115959", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Ensure container-engine.service.d directory exists] ************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:138 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:44.145406", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Ensure /etc/docker directory exists] **************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:143 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:44.175222", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Install Container Engine System Container] ********************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:148 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:44.204540", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Configure Container Engine Service File] ************************ task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:154 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:44.234373", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : set_fact] ******************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:161 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:44.262528", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Configure Container Engine] ************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:171 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:44.291341", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Start the Container Engine service] ***************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:177 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:44.321210", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : set_fact] ******************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:188 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:44.350214", "skip_reason": "Conditional result was False", "skipped": true } META: ran handlers TASK [docker : set_fact] ******************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:4 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:44.379404", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : set_fact] ******************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:7 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:44.408050", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : set_fact] ******************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:10 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:44.437542", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : set_fact] ******************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:13 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:44.466307", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : set_fact] ******************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:17 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:44.494998", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : set_fact] ******************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:21 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:44.524688", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : set_fact] ******************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:27 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:44.552973", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Ensure container-selinux is installed] ************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:33 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:44.581546", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Check we are not using node as a Docker container with CRI-O] *** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:41 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:44.611241", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Ensure atomic is installed] ************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:48 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:44.638772", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Ensure runc is installed] *************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:58 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:44.667697", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Check that overlay is in the kernel] **************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:67 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:44.697824", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Add overlay to modprobe.d] ************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:76 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:44.727224", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Manually modprobe overlay into the kernel] ********************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:82 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:44.756016", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Enable and start systemd-modules-load] ************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:85 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:44.784930", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_atomic : Add http_proxy to /etc/atomic.conf] ******************* task path: /usr/share/ansible/openshift-ansible/roles/openshift_atomic/tasks/proxy.yml:7 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:44.814769", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_atomic : Add https_proxy to /etc/atomic.conf] ****************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_atomic/tasks/proxy.yml:16 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:44.847080", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_atomic : Add no_proxy to /etc/atomic.conf] ********************* task path: /usr/share/ansible/openshift-ansible/roles/openshift_atomic/tasks/proxy.yml:25 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:44.879702", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Set CRI-O image defaults] *************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:99 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:44.910370", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Use Centos based image when distribution is CentOS] ************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:105 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:44.940547", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Set CRI-O image tag] ******************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:110 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:44.970398", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Use RHEL based image when distribution is Red Hat] ************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:116 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:45.000609", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Set the full image name] **************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:122 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:45.029380", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Use a specific image if requested] ****************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:127 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:45.059242", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : debug] ********************************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:135 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:45.088148", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Pre-pull CRI-O System Container image] ************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:139 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:45.117236", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Install CRI-O System Container] ********************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:146 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:45.145919", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Remove CRI-O default configuration files] *********************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:152 skipping: [localhost] => (item=/etc/cni/net.d/200-loopback.conf) => { "changed": false, "generated_timestamp": "2018-04-06 21:39:45.177732", "item": "/etc/cni/net.d/200-loopback.conf", "skip_reason": "Conditional result was False", "skipped": true } skipping: [localhost] => (item=/etc/cni/net.d/100-crio-bridge.conf) => { "changed": false, "generated_timestamp": "2018-04-06 21:39:45.185731", "item": "/etc/cni/net.d/100-crio-bridge.conf", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Create the CRI-O configuration] ********************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:160 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:45.213737", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Ensure CNI configuration directory exists] ********************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:166 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:45.242557", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Add iptables allow rules] *************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/crio_firewall.yml:4 skipping: [localhost] => (item={u'port': u'10010/tcp', u'service': u'crio'}) => { "changed": false, "generated_timestamp": "2018-04-06 21:39:45.283125", "item": { "port": "10010/tcp", "service": "crio" }, "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Remove iptables rules] ****************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/crio_firewall.yml:13 TASK [docker : Add firewalld allow rules] ************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/crio_firewall.yml:24 skipping: [localhost] => (item={u'port': u'10010/tcp', u'service': u'crio'}) => { "changed": false, "generated_timestamp": "2018-04-06 21:39:45.348369", "item": { "port": "10010/tcp", "service": "crio" }, "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Remove firewalld allow rules] *********************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/crio_firewall.yml:33 TASK [docker : Configure the CNI network] ************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:175 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:45.404369", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Start the CRI-O service] **************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:180 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:45.432379", "skip_reason": "Conditional result was False", "skipped": true } META: ran handlers TASK [docker : Check for credentials file for registry auth] ******************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/registry_auth.yml:2 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:45.462907", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Create credentials for docker cli registry auth] **************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/registry_auth.yml:8 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:45.494160", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Create credentials for docker cli registry auth (alternative)] *** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/registry_auth.yml:23 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:45.522523", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : stat the docker data dir] *************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/main.yml:49 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:45.550883", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : stop the current running docker] ******************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/main.yml:58 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:45.579183", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Ensure /var/lib/containers/docker exists] *********************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/main.yml:63 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:45.609048", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Set the selinux context on /var/lib/containers/docker] ********** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/main.yml:68 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:45.639453", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : restorecon the /var/lib/containers/docker] ********************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/main.yml:75 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:45.669880", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Remove the old docker location] ********************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/main.yml:78 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:45.696791", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Setup the link] ************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/main.yml:83 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:45.724920", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : start docker] *************************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/main.yml:89 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:45.753193", "skip_reason": "Conditional result was False", "skipped": true } TASK [etcd : Set hostname and ip facts] **************************************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/main.yml:2 ok: [localhost] => { "ansible_facts": { "etcd_hostname": "ip-172-18-1-211.ec2.internal", "etcd_ip": "172.18.1.211" }, "changed": false, "generated_timestamp": "2018-04-06 21:39:45.788537" } TASK [etcd : Add iptables allow rules] ***************************************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/firewall.yml:4 changed: [localhost] => (item={u'port': u'2379/tcp', u'service': u'etcd'}) => { "changed": true, "generated_timestamp": "2018-04-06 21:39:46.198966", "item": { "port": "2379/tcp", "service": "etcd" }, "output": [ "", "Successfully created chain OS_FIREWALL_ALLOW", "iptables: Saving firewall rules to /etc/sysconfig/iptables: [ OK ]\r\n", "", "iptables: Saving firewall rules to /etc/sysconfig/iptables: [ OK ]\r\n", "", "iptables: Saving firewall rules to /etc/sysconfig/iptables: [ OK ]\r\n" ] } changed: [localhost] => (item={u'port': u'2380/tcp', u'service': u'etcd peering'}) => { "changed": true, "generated_timestamp": "2018-04-06 21:39:46.400347", "item": { "port": "2380/tcp", "service": "etcd peering" }, "output": [ "", "iptables: Saving firewall rules to /etc/sysconfig/iptables: [ OK ]\r\n" ] } TASK [etcd : Remove iptables rules] ******************************************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/firewall.yml:13 TASK [etcd : Add firewalld allow rules] **************************************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/firewall.yml:24 skipping: [localhost] => (item={u'port': u'2379/tcp', u'service': u'etcd'}) => { "changed": false, "generated_timestamp": "2018-04-06 21:39:46.468218", "item": { "port": "2379/tcp", "service": "etcd" }, "skip_reason": "Conditional result was False", "skipped": true } skipping: [localhost] => (item={u'port': u'2380/tcp', u'service': u'etcd peering'}) => { "changed": false, "generated_timestamp": "2018-04-06 21:39:46.480788", "item": { "port": "2380/tcp", "service": "etcd peering" }, "skip_reason": "Conditional result was False", "skipped": true } TASK [etcd : Remove firewalld allow rules] ************************************* task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/firewall.yml:33 TASK [etcd : Install etcd] ***************************************************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/main.yml:13 ok: [localhost] => { "attempts": 1, "changed": false, "generated_timestamp": "2018-04-06 21:39:47.225732", "msg": "", "rc": 0, "results": [ "etcd-3.2.15-1.el7.x86_64 providing etcd is already installed" ] } TASK [etcd : Install etcd for etcdctl] ***************************************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/auxiliary/drop_etcdctl.yml:2 ok: [localhost] => { "attempts": 1, "changed": false, "generated_timestamp": "2018-04-06 21:39:47.949000", "msg": "", "rc": 0, "results": [ "etcd-3.2.15-1.el7.x86_64 providing etcd is already installed" ] } TASK [etcd : Configure etcd profile.d aliases] ********************************* task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/auxiliary/drop_etcdctl.yml:8 changed: [localhost] => { "changed": true, "checksum": "f5e974c8afe68412c9386c21e1275b938eba48be", "dest": "/etc/profile.d/etcdctl.sh", "generated_timestamp": "2018-04-06 21:39:48.419924", "gid": 0, "group": "root", "md5sum": "1d82b8368272155d0c57e64d1ec4ad68", "mode": "0755", "owner": "root", "secontext": "system_u:object_r:bin_t:s0", "size": 546, "src": "/home/origin/.ansible/tmp/ansible-tmp-1523050788.13-30250585621966/source", "state": "file", "uid": 0 } TASK [etcd : Pull etcd container] ********************************************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/main.yml:24 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:48.450697", "skip_reason": "Conditional result was False", "skipped": true } TASK [etcd : Install etcd container service file] ****************************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/main.yml:29 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:48.481613", "skip_reason": "Conditional result was False", "skipped": true } TASK [etcd : Create configuration directory] *********************************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/main.yml:40 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:48.502531", "skip_reason": "Conditional result was False", "skipped": true } TASK [etcd : Copy service file for etcd instance] ****************************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/main.yml:47 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:48.523363", "skip_reason": "Conditional result was False", "skipped": true } TASK [etcd : Create third party etcd service.d directory exists] *************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/main.yml:53 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:48.545426", "skip_reason": "Conditional result was False", "skipped": true } TASK [etcd : Configure third part etcd service unit file] ********************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/main.yml:58 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:48.566528", "skip_reason": "Conditional result was False", "skipped": true } TASK [etcd : Ensure etcd datadir exists] *************************************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/main.yml:65 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:48.597558", "skip_reason": "Conditional result was False", "skipped": true } TASK [etcd : Ensure etcd datadir ownership for thirdparty datadir] ************* task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/main.yml:72 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:48.625369", "skip_reason": "Conditional result was False", "skipped": true } TASK [etcd : command] ********************************************************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/main.yml:83 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:48.653073", "skip_reason": "Conditional result was False", "skipped": true } TASK [etcd : Disable system etcd when containerized] *************************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/main.yml:88 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:48.683049", "skip_reason": "Conditional result was False", "skipped": true } TASK [etcd : Install etcd container service file] ****************************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/main.yml:99 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:48.713690", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_atomic : Add http_proxy to /etc/atomic.conf] ******************* task path: /usr/share/ansible/openshift-ansible/roles/openshift_atomic/tasks/proxy.yml:7 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:48.747047", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_atomic : Add https_proxy to /etc/atomic.conf] ****************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_atomic/tasks/proxy.yml:16 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:48.777842", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_atomic : Add no_proxy to /etc/atomic.conf] ********************* task path: /usr/share/ansible/openshift-ansible/roles/openshift_atomic/tasks/proxy.yml:25 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:48.810254", "skip_reason": "Conditional result was False", "skipped": true } TASK [etcd : Pull etcd system container] *************************************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/system_container.yml:8 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:48.841492", "skip_reason": "Conditional result was False", "skipped": true } TASK [etcd : Set initial Etcd cluster] ***************************************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/system_container.yml:13 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:48.873220", "skip_reason": "Conditional result was False", "skipped": true } TASK [etcd : Check etcd system container package] ****************************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/system_container.yml:25 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:48.904226", "skip_reason": "Conditional result was False", "skipped": true } TASK [etcd : Unmask etcd service] ********************************************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/system_container.yml:30 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:48.935512", "skip_reason": "Conditional result was False", "skipped": true } TASK [etcd : Disable etcd_container] ******************************************* task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/system_container.yml:41 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:48.967374", "skip_reason": "Conditional result was False", "skipped": true } TASK [etcd : Remove etcd_container.service] ************************************ task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/system_container.yml:50 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:48.998531", "skip_reason": "Conditional result was False", "skipped": true } TASK [etcd : Systemd reload configuration] ************************************* task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/system_container.yml:55 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:49.029514", "skip_reason": "Conditional result was False", "skipped": true } TASK [etcd : Install or Update Etcd system container package] ****************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/system_container.yml:58 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:49.060846", "skip_reason": "Conditional result was False", "skipped": true } TASK [etcd : Ensure etcd datadir ownership for the system container] *********** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/system_container.yml:83 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:49.091060", "skip_reason": "Conditional result was False", "skipped": true } TASK [etcd : Validate permissions on the config dir] *************************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/main.yml:110 ok: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:49.274577", "gid": 993, "group": "etcd", "mode": "0700", "owner": "etcd", "path": "/etc/etcd", "secontext": "system_u:object_r:etc_t:s0", "size": 172, "state": "directory", "uid": 996 } TASK [etcd : Write etcd global config file] ************************************ task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/main.yml:118 NOTIFIED HANDLER restart etcd changed: [localhost] => { "backup_file": "/etc/etcd/etcd.conf.26594.2018-04-06@21:39:49~", "changed": true, "checksum": "2b169813384a333fc4a2e8589a13ae150b73027c", "dest": "/etc/etcd/etcd.conf", "generated_timestamp": "2018-04-06 21:39:49.864313", "gid": 0, "group": "root", "md5sum": "2c5c13fae99408f314a3279ce5dca6af", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 1483, "src": "/home/origin/.ansible/tmp/ansible-tmp-1523050789.55-259630365567928/source", "state": "file", "uid": 0 } TASK [etcd : Enable etcd] ****************************************************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/main.yml:126 changed: [localhost] => { "changed": true, "enabled": true, "generated_timestamp": "2018-04-06 21:39:51.678801", "name": "etcd", "state": "started", "status": { "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "-.mount network.target network-online.target system.slice basic.target systemd-journald.socket", "AllowIsolate": "no", "AmbientCapabilities": "0", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "18446744073709551615", "CPUAccounting": "yes", "CPUQuotaPerSecUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "18446744073709551615", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "18446744073709551615", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "Conflicts": "shutdown.target", "ControlPID": "0", "DefaultDependencies": "yes", "Delegate": "no", "Description": "Etcd Server", "DevicePolicy": "auto", "EnvironmentFile": "/etc/etcd/etcd.conf (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/bin/bash ; argv[]=/bin/bash -c GOMAXPROCS=$(nproc) /usr/bin/etcd --name=\"${ETCD_NAME}\" --data-dir=\"${ETCD_DATA_DIR}\" --listen-client-urls=\"${ETCD_LISTEN_CLIENT_URLS}\" ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/etcd.service", "GuessMainPID": "yes", "IOScheduling": "0", "Id": "etcd.service", "IgnoreOnIsolate": "no", "IgnoreOnSnapshot": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobTimeoutAction": "none", "JobTimeoutUSec": "0", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "18446744073709551615", "LimitCORE": "18446744073709551615", "LimitCPU": "18446744073709551615", "LimitDATA": "18446744073709551615", "LimitFSIZE": "18446744073709551615", "LimitLOCKS": "18446744073709551615", "LimitMEMLOCK": "65536", "LimitMSGQUEUE": "819200", "LimitNICE": "0", "LimitNOFILE": "65536", "LimitNPROC": "63327", "LimitRSS": "18446744073709551615", "LimitRTPRIO": "0", "LimitRTTIME": "18446744073709551615", "LimitSIGPENDING": "63327", "LimitSTACK": "18446744073709551615", "LoadState": "loaded", "MainPID": "0", "MemoryAccounting": "yes", "MemoryCurrent": "18446744073709551615", "MemoryLimit": "18446744073709551615", "MountFlags": "0", "Names": "etcd.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "main", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "PrivateDevices": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "ProtectHome": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "Requires": "-.mount basic.target", "RequiresMountsFor": "/var/lib/etcd", "Restart": "on-failure", "RestartUSec": "100ms", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitInterval": "10000000", "StartupBlockIOWeight": "18446744073709551615", "StartupCPUShares": "18446744073709551615", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "no", "TasksCurrent": "18446744073709551615", "TasksMax": "18446744073709551615", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "disabled", "User": "etcd", "Wants": "network-online.target system.slice", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0", "WorkingDirectory": "/var/lib/etcd" } } TASK [etcd : Set fact etcd_service_status_changed] ***************************** task path: /usr/share/ansible/openshift-ansible/roles/etcd/tasks/main.yml:133 ok: [localhost] => { "ansible_facts": { "etcd_service_status_changed": true }, "changed": false, "generated_timestamp": "2018-04-06 21:39:51.712803" } TASK [nickhammond.logrotate : nickhammond.logrotate | Install logrotate] ******* task path: /usr/share/ansible/openshift-ansible/roles/nickhammond.logrotate/tasks/main.yml:2 ok: [localhost] => { "attempts": 1, "changed": false, "generated_timestamp": "2018-04-06 21:39:52.432834", "msg": "", "rc": 0, "results": [ "logrotate-3.8.6-14.el7.x86_64 providing logrotate is already installed" ] } TASK [nickhammond.logrotate : nickhammond.logrotate | Setup logrotate.d scripts] *** task path: /usr/share/ansible/openshift-ansible/roles/nickhammond.logrotate/tasks/main.yml:8 RUNNING HANDLER [etcd : restart etcd] ****************************************** skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:52.485223", "skip_reason": "Conditional result was False", "skipped": true } META: ran handlers META: ran handlers PLAY [etcd Install Checkpoint End] ********************************************* META: ran handlers TASK [Set etcd install 'Complete'] ********************************************* task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-etcd/config.yml:32 ok: [localhost] => { "ansible_stats": { "aggregate": false, "data": { "installer_phase_etcd": "Complete" }, "per_host": false }, "changed": false, "generated_timestamp": "2018-04-06 21:39:52.520252" } META: ran handlers META: ran handlers PLAY [NFS Install Checkpoint Start] ******************************************** META: ran handlers TASK [Set NFS install 'In Progress'] ******************************************* task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-nfs/config.yml:6 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:52.555624", "skip_reason": "Conditional result was False", "skipped": true } META: ran handlers META: ran handlers PLAY [Configure nfs] *********************************************************** skipping: no hosts matched PLAY [NFS Install Checkpoint End] ********************************************** META: ran handlers TASK [Set NFS install 'Complete'] ********************************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-nfs/config.yml:22 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:52.592618", "skip_reason": "Conditional result was False", "skipped": true } META: ran handlers META: ran handlers PLAY [Load Balancer Install Checkpoint Start] ********************************** META: ran handlers TASK [Set load balancer install 'In Progress'] ********************************* task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-loadbalancer/config.yml:6 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:52.626802", "skip_reason": "Conditional result was False", "skipped": true } META: ran handlers META: ran handlers PLAY [Configure firewall and docker for load balancers] ************************ skipping: no hosts matched PLAY [Configure load balancers] ************************************************ skipping: no hosts matched PLAY [Load Balancer Install Checkpoint End] ************************************ META: ran handlers TASK [Set load balancer install 'Complete'] ************************************ task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-loadbalancer/config.yml:43 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:52.666659", "skip_reason": "Conditional result was False", "skipped": true } META: ran handlers META: ran handlers PLAY [Master Install Checkpoint Start] ***************************************** META: ran handlers TASK [Set Master install 'In Progress'] **************************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-master/config.yml:6 ok: [localhost] => { "ansible_stats": { "aggregate": false, "data": { "installer_phase_master": "In Progress" }, "per_host": false }, "changed": false, "generated_timestamp": "2018-04-06 21:39:52.701503" } META: ran handlers META: ran handlers PLAY [Create OpenShift certificates for master hosts] ************************** TASK [Gathering Facts] ********************************************************* ok: [localhost] META: ran handlers TASK [openshift_master_facts : Migrate legacy osm_default_subdomain fact] ****** task path: /usr/share/ansible/openshift-ansible/roles/openshift_master_facts/tasks/main.yml:3 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:53.504343", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_master_facts : Verify required variables are set] ************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_master_facts/tasks/main.yml:8 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:53.531698", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_master_facts : Set g_metrics_hostname] ************************* task path: /usr/share/ansible/openshift-ansible/roles/openshift_master_facts/tasks/main.yml:20 ok: [localhost] => { "ansible_facts": { "g_metrics_hostname": "hawkular-metrics.172.18.1.211.nip.io" }, "changed": false, "generated_timestamp": "2018-04-06 21:39:53.563082" } TASK [openshift_master_facts : set_fact] *************************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_master_facts/tasks/main.yml:26 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:53.590570", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_master_facts : Set master facts] ******************************* task path: /usr/share/ansible/openshift-ansible/roles/openshift_master_facts/tasks/main.yml:30 changed: [localhost] => { "ansible_facts": { "openshift": { "common": { "admin_binary": "oadm", "all_hostnames": [ "172.30.0.1", "ec2-54-152-227-161.compute-1.amazonaws.com", "kubernetes.default.svc.cluster.local", "kubernetes", "openshift.default", "54.152.227.161", "openshift.default.svc", "kubernetes.default", "ip-172-18-1-211.ec2.internal", "openshift.default.svc.cluster.local", "kubernetes.default.svc", "openshift", "172.18.1.211" ], "cli_image": "openshift/origin", "client_binary": "oc", "config_base": "/etc/origin", "deployer_image": "openshift/origin-deployer", "deployment_subtype": "basic", "deployment_type": "origin", "dns_domain": "cluster.local", "etcd_runtime": "host", "examples_content_version": "v3.6", "generate_no_proxy_hosts": true, "hostname": "ip-172-18-1-211.ec2.internal", "installed_variant_rpms": [], "internal_hostnames": [ "kubernetes.default", "kubernetes.default.svc.cluster.local", "kubernetes", "openshift.default", "openshift.default.svc", "172.30.0.1", "ip-172-18-1-211.ec2.internal", "openshift.default.svc.cluster.local", "kubernetes.default.svc", "openshift", "172.18.1.211" ], "ip": "172.18.1.211", "is_atomic": false, "is_containerized": false, "is_etcd_system_container": false, "is_master_system_container": false, "is_node_system_container": false, "is_openvswitch_system_container": false, "kube_svc_ip": "172.30.0.1", "pod_image": "openshift/origin-pod", "portal_net": "172.30.0.0/16", "public_hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public_ip": "54.152.227.161", "registry_image": "openshift/origin-docker-registry", "router_image": "openshift/origin-haproxy-router", "service_type": "origin", "system_images_registry": "docker.io", "version_gte_3_1_1_or_1_1_1": true, "version_gte_3_1_or_1_1": true, "version_gte_3_2_or_1_2": true, "version_gte_3_3_or_1_3": true, "version_gte_3_4_or_1_4": true, "version_gte_3_5_or_1_5": true, "version_gte_3_6": true, "version_gte_3_7": false }, "current_config": { "roles": [ "node", "master", "etcd", "docker" ] }, "docker": { "api_version": "1.26 (minimum version 1.12)", "disable_push_dockerhub": false, "gte_1_10": true, "hosted_registry_insecure": false, "hosted_registry_network": "172.30.0.0/16", "options": "--log-driver=journald", "service_name": "docker", "use_crio": false, "use_system_container": false, "version": "1.13.1" }, "etcd": {}, "hosted": { "etcd": { "selector": null }, "metrics": { "selector": null }, "registry": { "selector": "region=infra" }, "router": { "selector": "region=infra" } }, "logging": { "selector": null }, "master": { "access_token_max_seconds": 86400, "admission_plugin_config": { "openshift.io/ImagePolicy": { "configuration": { "apiVersion": "v1", "executionRules": [ { "matchImageAnnotations": [ { "key": "images.openshift.io/deny-execution", "value": "true" } ], "name": "execution-denied", "onResources": [ { "resource": "pods" }, { "resource": "builds" } ], "reject": true, "skipOnResolutionFailure": true } ], "kind": "ImagePolicyConfig" } } }, "api_port": "8443", "api_url": "https://ip-172-18-1-211.ec2.internal:8443", "api_use_ssl": true, "audit_config": { "enabled": true }, "auth_token_max_seconds": 500, "bind_addr": "0.0.0.0", "cluster_method": "native", "console_path": "/console", "console_port": "8443", "console_url": "https://ip-172-18-1-211.ec2.internal:8443/console", "console_use_ssl": true, "controller_args": { "enable-hostpath-provisioner": [ "true" ] }, "controllers_port": "8444", "default_node_selector": "region=infra", "dns_port": 8053, "dynamic_provisioning_enabled": true, "embedded_dns": true, "embedded_etcd": true, "embedded_kube": true, "etcd_hosts": "", "etcd_port": "2379", "etcd_urls": [ "https://ip-172-18-1-211.ec2.internal:2379" ], "etcd_use_ssl": true, "identity_providers": [ { "challenge": "true", "kind": "AllowAllPasswordIdentityProvider", "login": "true", "name": "allow_all" } ], "loopback_api_url": "https://ip-172-18-1-211.ec2.internal:8443", "loopback_cluster_name": "ip-172-18-1-211-ec2-internal:8443", "loopback_context_name": "default/ip-172-18-1-211-ec2-internal:8443/system:openshift-master", "loopback_user": "system:openshift-master/ip-172-18-1-211-ec2-internal:8443", "manage_htpasswd": true, "master_image": "openshift/origin", "master_system_image": "openshift/origin", "max_requests_inflight": 500, "mcs_allocator_range": "s0:/2", "mcs_labels_per_project": 5, "oauth_grant_method": "auto", "portal_net": "172.30.0.0/16", "project_request_message": "", "project_request_template": "", "public_api_url": "https://ec2-54-152-227-161.compute-1.amazonaws.com:8443", "public_console_url": "https://ec2-54-152-227-161.compute-1.amazonaws.com:8443/console", "registry_url": "openshift/origin-${component}:5eda3fa", "sdn_cluster_network_cidr": "10.128.0.0/14", "sdn_host_subnet_length": "9", "session_max_seconds": 3600, "session_name": "ssn", "session_secrets_file": "", "uid_allocator_range": "1000000000-1999999999/10000" }, "node": { "annotations": {}, "iptables_sync_period": "30s", "kubelet_args": { "node-labels": [] }, "labels": {}, "local_quota_per_fsgroup": "", "node_image": "openshift/node", "node_system_image": "openshift/node", "nodename": "ip-172-18-1-211.ec2.internal", "ovs_image": "openshift/openvswitch", "ovs_system_image": "openshift/openvswitch", "registry_url": "openshift/origin-${component}:${version}", "schedulable": false, "sdn_mtu": "8951", "set_node_ip": false, "storage_plugin_deps": [ "ceph", "glusterfs", "iscsi" ] }, "prometheus": { "alertbuffer": { "selector": null }, "alertmanager": { "selector": null }, "selector": null }, "provider": { "metadata": { "ami-id": "ami-069c0ca6cc091e8fa", "ami-launch-index": "0", "ami-manifest-path": "(unknown)", "block-device-mapping": { "ami": "/dev/sda1", "ebs12": "sdb", "root": "/dev/sda1" }, "hostname": "ip-172-18-1-211.ec2.internal", "instance-action": "none", "instance-id": "i-0cd57e9e471604c34", "instance-type": "m4.xlarge", "local-hostname": "ip-172-18-1-211.ec2.internal", "local-ipv4": "172.18.1.211", "mac": "0e:25:a6:6d:04:5a", "metrics": { "vhostmd": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" }, "network": { "interfaces": { "macs": { "0e:25:a6:6d:04:5a": { "device-number": "0", "interface-id": "eni-04b95497687bf9813", "ipv4-associations": { "54.152.227.161": "172.18.1.211" }, "local-hostname": "ip-172-18-1-211.ec2.internal", "local-ipv4s": "172.18.1.211", "mac": "0e:25:a6:6d:04:5a", "owner-id": "531415883065", "public-hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public-ipv4s": "54.152.227.161", "security-group-ids": "sg-7e73221a", "security-groups": "default", "subnet-id": "subnet-cf57c596", "subnet-ipv4-cidr-block": "172.18.0.0/20", "vpc-id": "vpc-69705d0c", "vpc-ipv4-cidr-block": "172.18.0.0/16", "vpc-ipv4-cidr-blocks": "172.18.0.0/16" } } } }, "placement": { "availability-zone": "us-east-1d" }, "profile": "default-hvm", "public-hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public-ipv4": "54.152.227.161", "public-keys/": "0=libra", "reservation-id": "r-0f638cc4ebb0e4aae", "security-groups": "default", "services": { "domain": "amazonaws.com", "partition": "aws" } }, "name": "aws", "network": { "hostname": "ip-172-18-1-211.ec2.internal", "interfaces": [ { "ips": [ "172.18.1.211" ], "network_id": "subnet-cf57c596", "network_type": "vpc", "public_ips": [ "54.152.227.161" ] } ], "ip": "172.18.1.211", "ipv6_enabled": false, "public_hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public_ip": "54.152.227.161" }, "zone": "us-east-1d" } } }, "changed": true, "generated_timestamp": "2018-04-06 21:39:55.251918" } TASK [openshift_master_facts : Determine if scheduler config present] ********** task path: /usr/share/ansible/openshift-ansible/roles/openshift_master_facts/tasks/main.yml:105 ok: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:55.429948", "stat": { "exists": false } } TASK [openshift_master_facts : Set Default scheduler predicates and priorities] *** task path: /usr/share/ansible/openshift-ansible/roles/openshift_master_facts/tasks/main.yml:110 ok: [localhost] => { "ansible_facts": { "openshift_master_scheduler_default_predicates": [ { "name": "NoVolumeZoneConflict" }, { "name": "MaxEBSVolumeCount" }, { "name": "MaxGCEPDVolumeCount" }, { "name": "MaxAzureDiskVolumeCount" }, { "name": "MatchInterPodAffinity" }, { "name": "NoDiskConflict" }, { "name": "GeneralPredicates" }, { "name": "PodToleratesNodeTaints" }, { "name": "CheckNodeMemoryPressure" }, { "name": "CheckNodeDiskPressure" }, { "name": "NoVolumeNodeConflict" }, { "argument": { "serviceAffinity": { "labels": [ "region" ] } }, "name": "Region" } ], "openshift_master_scheduler_default_priorities": [ { "name": "SelectorSpreadPriority", "weight": 1 }, { "name": "InterPodAffinityPriority", "weight": 1 }, { "name": "LeastRequestedPriority", "weight": 1 }, { "name": "BalancedResourceAllocation", "weight": 1 }, { "name": "NodePreferAvoidPodsPriority", "weight": 10000 }, { "name": "NodeAffinityPriority", "weight": 1 }, { "name": "TaintTolerationPriority", "weight": 1 }, { "argument": { "serviceAntiAffinity": { "label": "zone" } }, "name": "Zone", "weight": 2 } ] }, "changed": false, "generated_timestamp": "2018-04-06 21:39:55.467808" } TASK [openshift_master_facts : Retrieve current scheduler config] ************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_master_facts/tasks/main.yml:116 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:55.495766", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_master_facts : Set openshift_master_scheduler_current_config] *** task path: /usr/share/ansible/openshift-ansible/roles/openshift_master_facts/tasks/main.yml:121 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:55.524410", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_master_facts : Test if scheduler config is readable] *********** task path: /usr/share/ansible/openshift-ansible/roles/openshift_master_facts/tasks/main.yml:125 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:55.550220", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_master_facts : Set current scheduler predicates and priorities] *** task path: /usr/share/ansible/openshift-ansible/roles/openshift_master_facts/tasks/main.yml:130 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:55.577525", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_named_certificates : set_fact] ********************************* task path: /usr/share/ansible/openshift-ansible/roles/openshift_named_certificates/tasks/main.yml:2 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:55.614545", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_named_certificates : openshift_facts] ************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_named_certificates/tasks/main.yml:9 changed: [localhost] => { "ansible_facts": { "openshift": { "common": { "admin_binary": "oadm", "all_hostnames": [ "172.30.0.1", "ec2-54-152-227-161.compute-1.amazonaws.com", "kubernetes.default.svc.cluster.local", "kubernetes", "openshift.default", "54.152.227.161", "openshift.default.svc", "kubernetes.default", "ip-172-18-1-211.ec2.internal", "openshift.default.svc.cluster.local", "kubernetes.default.svc", "openshift", "172.18.1.211" ], "cli_image": "openshift/origin", "client_binary": "oc", "config_base": "/etc/origin", "deployer_image": "openshift/origin-deployer", "deployment_subtype": "basic", "deployment_type": "origin", "dns_domain": "cluster.local", "etcd_runtime": "host", "examples_content_version": "v3.6", "generate_no_proxy_hosts": true, "hostname": "ip-172-18-1-211.ec2.internal", "installed_variant_rpms": [], "internal_hostnames": [ "kubernetes.default", "kubernetes.default.svc.cluster.local", "kubernetes", "openshift.default", "openshift.default.svc", "172.30.0.1", "ip-172-18-1-211.ec2.internal", "openshift.default.svc.cluster.local", "kubernetes.default.svc", "openshift", "172.18.1.211" ], "ip": "172.18.1.211", "is_atomic": false, "is_containerized": false, "is_etcd_system_container": false, "is_master_system_container": false, "is_node_system_container": false, "is_openvswitch_system_container": false, "kube_svc_ip": "172.30.0.1", "pod_image": "openshift/origin-pod", "portal_net": "172.30.0.0/16", "public_hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public_ip": "54.152.227.161", "registry_image": "openshift/origin-docker-registry", "router_image": "openshift/origin-haproxy-router", "service_type": "origin", "system_images_registry": "docker.io", "version_gte_3_1_1_or_1_1_1": true, "version_gte_3_1_or_1_1": true, "version_gte_3_2_or_1_2": true, "version_gte_3_3_or_1_3": true, "version_gte_3_4_or_1_4": true, "version_gte_3_5_or_1_5": true, "version_gte_3_6": true, "version_gte_3_7": false }, "current_config": { "roles": [ "node", "master", "etcd", "docker" ] }, "docker": { "api_version": "1.26 (minimum version 1.12)", "disable_push_dockerhub": false, "gte_1_10": true, "hosted_registry_insecure": false, "hosted_registry_network": "172.30.0.0/16", "options": "--log-driver=journald", "service_name": "docker", "use_crio": false, "use_system_container": false, "version": "1.13.1" }, "etcd": {}, "hosted": { "etcd": { "selector": null }, "metrics": { "selector": null }, "registry": { "selector": "region=infra" }, "router": { "selector": "region=infra" } }, "logging": { "selector": null }, "master": { "access_token_max_seconds": 86400, "admission_plugin_config": { "openshift.io/ImagePolicy": { "configuration": { "apiVersion": "v1", "executionRules": [ { "matchImageAnnotations": [ { "key": "images.openshift.io/deny-execution", "value": "true" } ], "name": "execution-denied", "onResources": [ { "resource": "pods" }, { "resource": "builds" } ], "reject": true, "skipOnResolutionFailure": true } ], "kind": "ImagePolicyConfig" } } }, "api_port": "8443", "api_url": "https://ip-172-18-1-211.ec2.internal:8443", "api_use_ssl": true, "audit_config": { "enabled": true }, "auth_token_max_seconds": 500, "bind_addr": "0.0.0.0", "cluster_method": "native", "console_path": "/console", "console_port": "8443", "console_url": "https://ip-172-18-1-211.ec2.internal:8443/console", "console_use_ssl": true, "controller_args": { "enable-hostpath-provisioner": [ "true" ] }, "controllers_port": "8444", "default_node_selector": "region=infra", "dns_port": 8053, "dynamic_provisioning_enabled": true, "embedded_dns": true, "embedded_etcd": true, "embedded_kube": true, "etcd_hosts": "", "etcd_port": "2379", "etcd_urls": [ "https://ip-172-18-1-211.ec2.internal:2379" ], "etcd_use_ssl": true, "identity_providers": [ { "challenge": "true", "kind": "AllowAllPasswordIdentityProvider", "login": "true", "name": "allow_all" } ], "loopback_api_url": "https://ip-172-18-1-211.ec2.internal:8443", "loopback_cluster_name": "ip-172-18-1-211-ec2-internal:8443", "loopback_context_name": "default/ip-172-18-1-211-ec2-internal:8443/system:openshift-master", "loopback_user": "system:openshift-master/ip-172-18-1-211-ec2-internal:8443", "manage_htpasswd": true, "master_image": "openshift/origin", "master_system_image": "openshift/origin", "max_requests_inflight": 500, "mcs_allocator_range": "s0:/2", "mcs_labels_per_project": 5, "named_certificates": [], "oauth_grant_method": "auto", "portal_net": "172.30.0.0/16", "project_request_message": "", "project_request_template": "", "public_api_url": "https://ec2-54-152-227-161.compute-1.amazonaws.com:8443", "public_console_url": "https://ec2-54-152-227-161.compute-1.amazonaws.com:8443/console", "registry_url": "openshift/origin-${component}:5eda3fa", "sdn_cluster_network_cidr": "10.128.0.0/14", "sdn_host_subnet_length": "9", "session_max_seconds": 3600, "session_name": "ssn", "session_secrets_file": "", "uid_allocator_range": "1000000000-1999999999/10000" }, "node": { "annotations": {}, "iptables_sync_period": "30s", "kubelet_args": { "node-labels": [] }, "labels": {}, "local_quota_per_fsgroup": "", "node_image": "openshift/node", "node_system_image": "openshift/node", "nodename": "ip-172-18-1-211.ec2.internal", "ovs_image": "openshift/openvswitch", "ovs_system_image": "openshift/openvswitch", "registry_url": "openshift/origin-${component}:${version}", "schedulable": false, "sdn_mtu": "8951", "set_node_ip": false, "storage_plugin_deps": [ "ceph", "glusterfs", "iscsi" ] }, "prometheus": { "alertbuffer": { "selector": null }, "alertmanager": { "selector": null }, "selector": null }, "provider": { "metadata": { "ami-id": "ami-069c0ca6cc091e8fa", "ami-launch-index": "0", "ami-manifest-path": "(unknown)", "block-device-mapping": { "ami": "/dev/sda1", "ebs12": "sdb", "root": "/dev/sda1" }, "hostname": "ip-172-18-1-211.ec2.internal", "instance-action": "none", "instance-id": "i-0cd57e9e471604c34", "instance-type": "m4.xlarge", "local-hostname": "ip-172-18-1-211.ec2.internal", "local-ipv4": "172.18.1.211", "mac": "0e:25:a6:6d:04:5a", "metrics": { "vhostmd": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" }, "network": { "interfaces": { "macs": { "0e:25:a6:6d:04:5a": { "device-number": "0", "interface-id": "eni-04b95497687bf9813", "ipv4-associations": { "54.152.227.161": "172.18.1.211" }, "local-hostname": "ip-172-18-1-211.ec2.internal", "local-ipv4s": "172.18.1.211", "mac": "0e:25:a6:6d:04:5a", "owner-id": "531415883065", "public-hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public-ipv4s": "54.152.227.161", "security-group-ids": "sg-7e73221a", "security-groups": "default", "subnet-id": "subnet-cf57c596", "subnet-ipv4-cidr-block": "172.18.0.0/20", "vpc-id": "vpc-69705d0c", "vpc-ipv4-cidr-block": "172.18.0.0/16", "vpc-ipv4-cidr-blocks": "172.18.0.0/16" } } } }, "placement": { "availability-zone": "us-east-1d" }, "profile": "default-hvm", "public-hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public-ipv4": "54.152.227.161", "public-keys/": "0=libra", "reservation-id": "r-0f638cc4ebb0e4aae", "security-groups": "default", "services": { "domain": "amazonaws.com", "partition": "aws" } }, "name": "aws", "network": { "hostname": "ip-172-18-1-211.ec2.internal", "interfaces": [ { "ips": [ "172.18.1.211" ], "network_id": "subnet-cf57c596", "network_type": "vpc", "public_ips": [ "54.152.227.161" ] } ], "ip": "172.18.1.211", "ipv6_enabled": false, "public_hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public_ip": "54.152.227.161" }, "zone": "us-east-1d" } } }, "changed": true, "generated_timestamp": "2018-04-06 21:39:57.193264" } TASK [openshift_named_certificates : Clear named certificates] ***************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_named_certificates/tasks/main.yml:16 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:39:57.224955", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_named_certificates : Ensure named certificate directory exists] *** task path: /usr/share/ansible/openshift-ansible/roles/openshift_named_certificates/tasks/main.yml:22 changed: [localhost] => { "changed": true, "generated_timestamp": "2018-04-06 21:39:57.410011", "gid": 0, "group": "root", "mode": "0700", "owner": "root", "path": "/etc/origin/master/named_certificates/", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [openshift_named_certificates : Land named certificates] ****************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_named_certificates/tasks/main.yml:28 TASK [openshift_named_certificates : Land named certificate keys] ************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_named_certificates/tasks/main.yml:34 TASK [openshift_named_certificates : Land named CA certificates] *************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_named_certificates/tasks/main.yml:41 TASK [openshift_docker_facts : Set docker facts] ******************************* task path: /usr/share/ansible/openshift-ansible/roles/openshift_docker_facts/tasks/main.yml:2 ok: [localhost] => (item={u'local_facts': {u'use_crio': False, u'log_driver': u'', u'disable_push_dockerhub': u'', u'selinux_enabled': u'', u'hosted_registry_insecure': False, u'hosted_registry_network': u'172.30.0.0/16', u'log_options': u'', u'options': u'', u'use_system_container': False}, u'role': u'docker'}) => { "ansible_facts": { "openshift": { "common": { "admin_binary": "oadm", "all_hostnames": [ "172.30.0.1", "ec2-54-152-227-161.compute-1.amazonaws.com", "kubernetes.default.svc.cluster.local", "kubernetes", "openshift.default", "54.152.227.161", "openshift.default.svc", "kubernetes.default", "ip-172-18-1-211.ec2.internal", "openshift.default.svc.cluster.local", "kubernetes.default.svc", "openshift", "172.18.1.211" ], "cli_image": "openshift/origin", "client_binary": "oc", "config_base": "/etc/origin", "deployer_image": "openshift/origin-deployer", "deployment_subtype": "basic", "deployment_type": "origin", "dns_domain": "cluster.local", "etcd_runtime": "host", "examples_content_version": "v3.6", "generate_no_proxy_hosts": true, "hostname": "ip-172-18-1-211.ec2.internal", "installed_variant_rpms": [], "internal_hostnames": [ "kubernetes.default", "kubernetes.default.svc.cluster.local", "kubernetes", "openshift.default", "openshift.default.svc", "172.30.0.1", "ip-172-18-1-211.ec2.internal", "openshift.default.svc.cluster.local", "kubernetes.default.svc", "openshift", "172.18.1.211" ], "ip": "172.18.1.211", "is_atomic": false, "is_containerized": false, "is_etcd_system_container": false, "is_master_system_container": false, "is_node_system_container": false, "is_openvswitch_system_container": false, "kube_svc_ip": "172.30.0.1", "pod_image": "openshift/origin-pod", "portal_net": "172.30.0.0/16", "public_hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public_ip": "54.152.227.161", "registry_image": "openshift/origin-docker-registry", "router_image": "openshift/origin-haproxy-router", "service_type": "origin", "system_images_registry": "docker.io", "version_gte_3_1_1_or_1_1_1": true, "version_gte_3_1_or_1_1": true, "version_gte_3_2_or_1_2": true, "version_gte_3_3_or_1_3": true, "version_gte_3_4_or_1_4": true, "version_gte_3_5_or_1_5": true, "version_gte_3_6": true, "version_gte_3_7": false }, "current_config": { "roles": [ "node", "master", "etcd", "docker" ] }, "docker": { "api_version": "1.26 (minimum version 1.12)", "disable_push_dockerhub": false, "gte_1_10": true, "hosted_registry_insecure": false, "hosted_registry_network": "172.30.0.0/16", "options": "--log-driver=journald", "service_name": "docker", "use_crio": false, "use_system_container": false, "version": "1.13.1" }, "etcd": {}, "hosted": { "etcd": { "selector": null }, "metrics": { "selector": null }, "registry": { "selector": "region=infra" }, "router": { "selector": "region=infra" } }, "logging": { "selector": null }, "master": { "access_token_max_seconds": 86400, "admission_plugin_config": { "openshift.io/ImagePolicy": { "configuration": { "apiVersion": "v1", "executionRules": [ { "matchImageAnnotations": [ { "key": "images.openshift.io/deny-execution", "value": "true" } ], "name": "execution-denied", "onResources": [ { "resource": "pods" }, { "resource": "builds" } ], "reject": true, "skipOnResolutionFailure": true } ], "kind": "ImagePolicyConfig" } } }, "api_port": "8443", "api_url": "https://ip-172-18-1-211.ec2.internal:8443", "api_use_ssl": true, "audit_config": { "enabled": true }, "auth_token_max_seconds": 500, "bind_addr": "0.0.0.0", "cluster_method": "native", "console_path": "/console", "console_port": "8443", "console_url": "https://ip-172-18-1-211.ec2.internal:8443/console", "console_use_ssl": true, "controller_args": { "enable-hostpath-provisioner": [ "true" ] }, "controllers_port": "8444", "default_node_selector": "region=infra", "dns_port": 8053, "dynamic_provisioning_enabled": true, "embedded_dns": true, "embedded_etcd": true, "embedded_kube": true, "etcd_hosts": "", "etcd_port": "2379", "etcd_urls": [ "https://ip-172-18-1-211.ec2.internal:2379" ], "etcd_use_ssl": true, "identity_providers": [ { "challenge": "true", "kind": "AllowAllPasswordIdentityProvider", "login": "true", "name": "allow_all" } ], "loopback_api_url": "https://ip-172-18-1-211.ec2.internal:8443", "loopback_cluster_name": "ip-172-18-1-211-ec2-internal:8443", "loopback_context_name": "default/ip-172-18-1-211-ec2-internal:8443/system:openshift-master", "loopback_user": "system:openshift-master/ip-172-18-1-211-ec2-internal:8443", "manage_htpasswd": true, "master_image": "openshift/origin", "master_system_image": "openshift/origin", "max_requests_inflight": 500, "mcs_allocator_range": "s0:/2", "mcs_labels_per_project": 5, "named_certificates": [], "oauth_grant_method": "auto", "portal_net": "172.30.0.0/16", "project_request_message": "", "project_request_template": "", "public_api_url": "https://ec2-54-152-227-161.compute-1.amazonaws.com:8443", "public_console_url": "https://ec2-54-152-227-161.compute-1.amazonaws.com:8443/console", "registry_url": "openshift/origin-${component}:5eda3fa", "sdn_cluster_network_cidr": "10.128.0.0/14", "sdn_host_subnet_length": "9", "session_max_seconds": 3600, "session_name": "ssn", "session_secrets_file": "", "uid_allocator_range": "1000000000-1999999999/10000" }, "node": { "annotations": {}, "iptables_sync_period": "30s", "kubelet_args": { "node-labels": [] }, "labels": {}, "local_quota_per_fsgroup": "", "node_image": "openshift/node", "node_system_image": "openshift/node", "nodename": "ip-172-18-1-211.ec2.internal", "ovs_image": "openshift/openvswitch", "ovs_system_image": "openshift/openvswitch", "registry_url": "openshift/origin-${component}:${version}", "schedulable": false, "sdn_mtu": "8951", "set_node_ip": false, "storage_plugin_deps": [ "ceph", "glusterfs", "iscsi" ] }, "prometheus": { "alertbuffer": { "selector": null }, "alertmanager": { "selector": null }, "selector": null }, "provider": { "metadata": { "ami-id": "ami-069c0ca6cc091e8fa", "ami-launch-index": "0", "ami-manifest-path": "(unknown)", "block-device-mapping": { "ami": "/dev/sda1", "ebs12": "sdb", "root": "/dev/sda1" }, "hostname": "ip-172-18-1-211.ec2.internal", "instance-action": "none", "instance-id": "i-0cd57e9e471604c34", "instance-type": "m4.xlarge", "local-hostname": "ip-172-18-1-211.ec2.internal", "local-ipv4": "172.18.1.211", "mac": "0e:25:a6:6d:04:5a", "metrics": { "vhostmd": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" }, "network": { "interfaces": { "macs": { "0e:25:a6:6d:04:5a": { "device-number": "0", "interface-id": "eni-04b95497687bf9813", "ipv4-associations": { "54.152.227.161": "172.18.1.211" }, "local-hostname": "ip-172-18-1-211.ec2.internal", "local-ipv4s": "172.18.1.211", "mac": "0e:25:a6:6d:04:5a", "owner-id": "531415883065", "public-hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public-ipv4s": "54.152.227.161", "security-group-ids": "sg-7e73221a", "security-groups": "default", "subnet-id": "subnet-cf57c596", "subnet-ipv4-cidr-block": "172.18.0.0/20", "vpc-id": "vpc-69705d0c", "vpc-ipv4-cidr-block": "172.18.0.0/16", "vpc-ipv4-cidr-blocks": "172.18.0.0/16" } } } }, "placement": { "availability-zone": "us-east-1d" }, "profile": "default-hvm", "public-hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public-ipv4": "54.152.227.161", "public-keys/": "0=libra", "reservation-id": "r-0f638cc4ebb0e4aae", "security-groups": "default", "services": { "domain": "amazonaws.com", "partition": "aws" } }, "name": "aws", "network": { "hostname": "ip-172-18-1-211.ec2.internal", "interfaces": [ { "ips": [ "172.18.1.211" ], "network_id": "subnet-cf57c596", "network_type": "vpc", "public_ips": [ "54.152.227.161" ] } ], "ip": "172.18.1.211", "ipv6_enabled": false, "public_hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public_ip": "54.152.227.161" }, "zone": "us-east-1d" } } }, "changed": false, "generated_timestamp": "2018-04-06 21:39:59.107722", "item": { "local_facts": { "disable_push_dockerhub": "", "hosted_registry_insecure": false, "hosted_registry_network": "172.30.0.0/16", "log_driver": "", "log_options": "", "options": "", "selinux_enabled": "", "use_crio": false, "use_system_container": false }, "role": "docker" } } ok: [localhost] => (item={u'local_facts': {u'sdn_mtu': u''}, u'role': u'node'}) => { "ansible_facts": { "openshift": { "common": { "admin_binary": "oadm", "all_hostnames": [ "172.30.0.1", "ec2-54-152-227-161.compute-1.amazonaws.com", "kubernetes.default.svc.cluster.local", "kubernetes", "openshift.default", "54.152.227.161", "openshift.default.svc", "kubernetes.default", "ip-172-18-1-211.ec2.internal", "openshift.default.svc.cluster.local", "kubernetes.default.svc", "openshift", "172.18.1.211" ], "cli_image": "openshift/origin", "client_binary": "oc", "config_base": "/etc/origin", "deployer_image": "openshift/origin-deployer", "deployment_subtype": "basic", "deployment_type": "origin", "dns_domain": "cluster.local", "etcd_runtime": "host", "examples_content_version": "v3.6", "generate_no_proxy_hosts": true, "hostname": "ip-172-18-1-211.ec2.internal", "installed_variant_rpms": [], "internal_hostnames": [ "kubernetes.default", "kubernetes.default.svc.cluster.local", "kubernetes", "openshift.default", "openshift.default.svc", "172.30.0.1", "ip-172-18-1-211.ec2.internal", "openshift.default.svc.cluster.local", "kubernetes.default.svc", "openshift", "172.18.1.211" ], "ip": "172.18.1.211", "is_atomic": false, "is_containerized": false, "is_etcd_system_container": false, "is_master_system_container": false, "is_node_system_container": false, "is_openvswitch_system_container": false, "kube_svc_ip": "172.30.0.1", "pod_image": "openshift/origin-pod", "portal_net": "172.30.0.0/16", "public_hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public_ip": "54.152.227.161", "registry_image": "openshift/origin-docker-registry", "router_image": "openshift/origin-haproxy-router", "service_type": "origin", "system_images_registry": "docker.io", "version_gte_3_1_1_or_1_1_1": true, "version_gte_3_1_or_1_1": true, "version_gte_3_2_or_1_2": true, "version_gte_3_3_or_1_3": true, "version_gte_3_4_or_1_4": true, "version_gte_3_5_or_1_5": true, "version_gte_3_6": true, "version_gte_3_7": false }, "current_config": { "roles": [ "node", "master", "etcd", "docker" ] }, "docker": { "api_version": "1.26 (minimum version 1.12)", "disable_push_dockerhub": false, "gte_1_10": true, "hosted_registry_insecure": false, "hosted_registry_network": "172.30.0.0/16", "options": "--log-driver=journald", "service_name": "docker", "use_crio": false, "use_system_container": false, "version": "1.13.1" }, "etcd": {}, "hosted": { "etcd": { "selector": null }, "metrics": { "selector": null }, "registry": { "selector": "region=infra" }, "router": { "selector": "region=infra" } }, "logging": { "selector": null }, "master": { "access_token_max_seconds": 86400, "admission_plugin_config": { "openshift.io/ImagePolicy": { "configuration": { "apiVersion": "v1", "executionRules": [ { "matchImageAnnotations": [ { "key": "images.openshift.io/deny-execution", "value": "true" } ], "name": "execution-denied", "onResources": [ { "resource": "pods" }, { "resource": "builds" } ], "reject": true, "skipOnResolutionFailure": true } ], "kind": "ImagePolicyConfig" } } }, "api_port": "8443", "api_url": "https://ip-172-18-1-211.ec2.internal:8443", "api_use_ssl": true, "audit_config": { "enabled": true }, "auth_token_max_seconds": 500, "bind_addr": "0.0.0.0", "cluster_method": "native", "console_path": "/console", "console_port": "8443", "console_url": "https://ip-172-18-1-211.ec2.internal:8443/console", "console_use_ssl": true, "controller_args": { "enable-hostpath-provisioner": [ "true" ] }, "controllers_port": "8444", "default_node_selector": "region=infra", "dns_port": 8053, "dynamic_provisioning_enabled": true, "embedded_dns": true, "embedded_etcd": true, "embedded_kube": true, "etcd_hosts": "", "etcd_port": "2379", "etcd_urls": [ "https://ip-172-18-1-211.ec2.internal:2379" ], "etcd_use_ssl": true, "identity_providers": [ { "challenge": "true", "kind": "AllowAllPasswordIdentityProvider", "login": "true", "name": "allow_all" } ], "loopback_api_url": "https://ip-172-18-1-211.ec2.internal:8443", "loopback_cluster_name": "ip-172-18-1-211-ec2-internal:8443", "loopback_context_name": "default/ip-172-18-1-211-ec2-internal:8443/system:openshift-master", "loopback_user": "system:openshift-master/ip-172-18-1-211-ec2-internal:8443", "manage_htpasswd": true, "master_image": "openshift/origin", "master_system_image": "openshift/origin", "max_requests_inflight": 500, "mcs_allocator_range": "s0:/2", "mcs_labels_per_project": 5, "named_certificates": [], "oauth_grant_method": "auto", "portal_net": "172.30.0.0/16", "project_request_message": "", "project_request_template": "", "public_api_url": "https://ec2-54-152-227-161.compute-1.amazonaws.com:8443", "public_console_url": "https://ec2-54-152-227-161.compute-1.amazonaws.com:8443/console", "registry_url": "openshift/origin-${component}:5eda3fa", "sdn_cluster_network_cidr": "10.128.0.0/14", "sdn_host_subnet_length": "9", "session_max_seconds": 3600, "session_name": "ssn", "session_secrets_file": "", "uid_allocator_range": "1000000000-1999999999/10000" }, "node": { "annotations": {}, "iptables_sync_period": "30s", "kubelet_args": { "node-labels": [] }, "labels": {}, "local_quota_per_fsgroup": "", "node_image": "openshift/node", "node_system_image": "openshift/node", "nodename": "ip-172-18-1-211.ec2.internal", "ovs_image": "openshift/openvswitch", "ovs_system_image": "openshift/openvswitch", "registry_url": "openshift/origin-${component}:${version}", "schedulable": false, "sdn_mtu": "8951", "set_node_ip": false, "storage_plugin_deps": [ "ceph", "glusterfs", "iscsi" ] }, "prometheus": { "alertbuffer": { "selector": null }, "alertmanager": { "selector": null }, "selector": null }, "provider": { "metadata": { "ami-id": "ami-069c0ca6cc091e8fa", "ami-launch-index": "0", "ami-manifest-path": "(unknown)", "block-device-mapping": { "ami": "/dev/sda1", "ebs12": "sdb", "root": "/dev/sda1" }, "hostname": "ip-172-18-1-211.ec2.internal", "instance-action": "none", "instance-id": "i-0cd57e9e471604c34", "instance-type": "m4.xlarge", "local-hostname": "ip-172-18-1-211.ec2.internal", "local-ipv4": "172.18.1.211", "mac": "0e:25:a6:6d:04:5a", "metrics": { "vhostmd": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" }, "network": { "interfaces": { "macs": { "0e:25:a6:6d:04:5a": { "device-number": "0", "interface-id": "eni-04b95497687bf9813", "ipv4-associations": { "54.152.227.161": "172.18.1.211" }, "local-hostname": "ip-172-18-1-211.ec2.internal", "local-ipv4s": "172.18.1.211", "mac": "0e:25:a6:6d:04:5a", "owner-id": "531415883065", "public-hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public-ipv4s": "54.152.227.161", "security-group-ids": "sg-7e73221a", "security-groups": "default", "subnet-id": "subnet-cf57c596", "subnet-ipv4-cidr-block": "172.18.0.0/20", "vpc-id": "vpc-69705d0c", "vpc-ipv4-cidr-block": "172.18.0.0/16", "vpc-ipv4-cidr-blocks": "172.18.0.0/16" } } } }, "placement": { "availability-zone": "us-east-1d" }, "profile": "default-hvm", "public-hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public-ipv4": "54.152.227.161", "public-keys/": "0=libra", "reservation-id": "r-0f638cc4ebb0e4aae", "security-groups": "default", "services": { "domain": "amazonaws.com", "partition": "aws" } }, "name": "aws", "network": { "hostname": "ip-172-18-1-211.ec2.internal", "interfaces": [ { "ips": [ "172.18.1.211" ], "network_id": "subnet-cf57c596", "network_type": "vpc", "public_ips": [ "54.152.227.161" ] } ], "ip": "172.18.1.211", "ipv6_enabled": false, "public_hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public_ip": "54.152.227.161" }, "zone": "us-east-1d" } } }, "changed": false, "generated_timestamp": "2018-04-06 21:40:00.660851", "item": { "local_facts": { "sdn_mtu": "" }, "role": "node" } } TASK [openshift_docker_facts : set_fact] *************************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_docker_facts/tasks/main.yml:22 ok: [localhost] => { "ansible_facts": { "docker_push_dockerhub": false }, "changed": false, "generated_timestamp": "2018-04-06 21:40:00.714098" } TASK [openshift_docker_facts : set_fact] *************************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_docker_facts/tasks/main.yml:32 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:00.747648", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_docker_facts : set_fact] *************************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_docker_facts/tasks/main.yml:37 ok: [localhost] => { "ansible_facts": { "docker_options": "--log-driver=journald" }, "changed": false, "generated_timestamp": "2018-04-06 21:40:00.787426" } TASK [docker : Getting current systemd-udevd exec command] ********************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/udev_workaround.yml:3 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:00.819718", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Assure systemd-udevd.service.d directory exists] **************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/udev_workaround.yml:8 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:00.852202", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Create systemd-udevd override file] ***************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/udev_workaround.yml:13 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:00.885074", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : set_fact] ******************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/main.yml:8 ok: [localhost] => { "ansible_facts": { "l_use_crio": false, "l_use_crio_only": false, "l_use_system_container": false }, "changed": false, "generated_timestamp": "2018-04-06 21:40:00.928439" } TASK [docker : Add enterprise registry, if necessary] ************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/main.yml:13 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:00.960956", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Get current installed Docker version] *************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:2 ok: [localhost] => { "attempts": 1, "changed": false, "cmd": [ "repoquery", "--plugins", "--installed", "--qf", "%{version}", "docker" ], "delta": "0:00:00.252504", "end": "2018-04-06 21:40:01.394681", "generated_timestamp": "2018-04-06 21:40:01.416595", "rc": 0, "start": "2018-04-06 21:40:01.142177", "stderr": [], "stdout": [ "1.13.1" ] } TASK [docker : Error out if Docker pre-installed but too old] ****************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:10 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:01.460642", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Error out if requested Docker is too old] *********************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:15 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:01.507940", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Fail if Docker version requested but downgrade is required] ***** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:22 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:01.560236", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Error out if attempting to upgrade Docker across the 1.10 boundary] *** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:29 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:01.606083", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Install Docker] ************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:36 ok: [localhost] => { "attempts": 1, "changed": false, "generated_timestamp": "2018-04-06 21:40:02.358211", "msg": "", "rc": 0, "results": [ "docker-2:1.13.1-53.git774336d.el7.x86_64 providing docker is already installed" ] } TASK [docker : Ensure docker.service.d directory exists] *********************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:44 changed: [localhost] => { "changed": true, "generated_timestamp": "2018-04-06 21:40:02.567165", "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/systemd/system/docker.service.d", "secontext": "unconfined_u:object_r:systemd_unit_file_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [docker : Configure Docker service unit file] ***************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:49 NOTIFIED HANDLER restart docker changed: [localhost] => { "changed": true, "checksum": "9e1e25c0262550dd6c750b8b58bfd898ccca1cb2", "dest": "/etc/systemd/system/docker.service.d/custom.conf", "generated_timestamp": "2018-04-06 21:40:03.062960", "gid": 0, "group": "root", "md5sum": "3c142b6e4f182f866ab2cf8c98dc71ca", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:container_unit_file_t:s0", "size": 345, "src": "/home/origin/.ansible/tmp/ansible-tmp-1523050802.76-233382622273343/source", "state": "file", "uid": 0 } TASK [docker : stat] *********************************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:57 ok: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:03.265762", "stat": { "atime": 1523048233.699, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "d74f4d62e9e709daacb521216b739e4e797ce244", "ctime": 1520548153.1781683, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 13067715, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "md5": "42b205ccd461488ba8f2f47638b30c8e", "mimetype": "text/plain", "mode": "0644", "mtime": 1520548153.1771684, "nlink": 1, "path": "/etc/sysconfig/docker", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1382, "uid": 0, "version": "1532570339", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [docker : Set registry params] ******************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:60 skipping: [localhost] => (item={u'reg_conf_var': u'ADD_REGISTRY', u'reg_flag': u'--add-registry', u'reg_fact_val': []}) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:03.348690", "item": { "reg_conf_var": "ADD_REGISTRY", "reg_fact_val": [], "reg_flag": "--add-registry" }, "skip_reason": "Conditional result was False", "skipped": true } skipping: [localhost] => (item={u'reg_conf_var': u'BLOCK_REGISTRY', u'reg_flag': u'--block-registry', u'reg_fact_val': []}) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:03.382238", "item": { "reg_conf_var": "BLOCK_REGISTRY", "reg_fact_val": [], "reg_flag": "--block-registry" }, "skip_reason": "Conditional result was False", "skipped": true } skipping: [localhost] => (item={u'reg_conf_var': u'INSECURE_REGISTRY', u'reg_flag': u'--insecure-registry', u'reg_fact_val': []}) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:03.414240", "item": { "reg_conf_var": "INSECURE_REGISTRY", "reg_fact_val": [], "reg_flag": "--insecure-registry" }, "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Place additional/blocked/insecure registries in /etc/containers/registries.conf] *** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:82 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:03.458204", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Set Proxy Settings] ********************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:90 ok: [localhost] => (item={u'reg_conf_var': u'HTTP_PROXY', u'reg_fact_val': u''}) => { "backup": "", "changed": false, "found": 0, "generated_timestamp": "2018-04-06 21:40:03.782943", "item": { "reg_conf_var": "HTTP_PROXY", "reg_fact_val": "" }, "msg": "" } ok: [localhost] => (item={u'reg_conf_var': u'HTTPS_PROXY', u'reg_fact_val': u''}) => { "backup": "", "changed": false, "found": 0, "generated_timestamp": "2018-04-06 21:40:03.970277", "item": { "reg_conf_var": "HTTPS_PROXY", "reg_fact_val": "" }, "msg": "" } ok: [localhost] => (item={u'reg_conf_var': u'NO_PROXY', u'reg_fact_val': u''}) => { "backup": "", "changed": false, "found": 0, "generated_timestamp": "2018-04-06 21:40:04.157034", "item": { "reg_conf_var": "NO_PROXY", "reg_fact_val": "" }, "msg": "" } TASK [docker : Set various Docker options] ************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:108 changed: [localhost] => { "backup": "", "changed": true, "generated_timestamp": "2018-04-06 21:40:04.373742", "msg": "line replaced" } TASK [docker : stat] *********************************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:123 ok: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:04.572753", "stat": { "atime": 1523048239.6510875, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "d30da59f8bb0969a680f73f836c3f8123deba6cc", "ctime": 1520548148.0851715, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 4331624, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "md5": "d4fa8a35c813dac393f629dc2ce2852e", "mimetype": "text/plain", "mode": "0644", "mtime": 1519152255.0, "nlink": 1, "path": "/etc/sysconfig/docker-network", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 56, "uid": 0, "version": "18446744072507462951", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [docker : Configure Docker Network OPTIONS] ******************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:126 changed: [localhost] => { "backup": "", "changed": true, "generated_timestamp": "2018-04-06 21:40:04.781541", "msg": "line replaced" } TASK [docker : Detect if docker is already started] **************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:140 ok: [localhost] => { "changed": false, "cmd": [ "systemctl", "show", "docker", "-p", "ActiveState" ], "delta": "0:00:00.008525", "end": "2018-04-06 21:40:04.961201", "generated_timestamp": "2018-04-06 21:40:04.979961", "rc": 0, "start": "2018-04-06 21:40:04.952676", "stderr": [], "stdout": [ "ActiveState=active" ] } TASK [docker : Start the Docker service] *************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:145 ok: [localhost] => { "attempts": 1, "changed": false, "enabled": true, "generated_timestamp": "2018-04-06 21:40:05.262811", "name": "docker", "state": "started", "status": { "ActiveEnterTimestamp": "Fri 2018-04-06 21:30:50 UTC", "ActiveEnterTimestampMonotonic": "2039409790", "ActiveExitTimestamp": "Fri 2018-04-06 21:30:46 UTC", "ActiveExitTimestampMonotonic": "2035729408", "ActiveState": "active", "After": "iptables.service system.slice systemd-journald.socket network.target rhel-push-plugin.socket docker-storage-setup.service registries.service basic.target", "AllowIsolate": "no", "AmbientCapabilities": "0", "AssertResult": "yes", "AssertTimestamp": "Fri 2018-04-06 21:30:48 UTC", "AssertTimestampMonotonic": "2037916575", "Before": "multi-user.target shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "18446744073709551615", "CPUAccounting": "yes", "CPUQuotaPerSecUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "18446744073709551615", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "18446744073709551615", "ConditionResult": "yes", "ConditionTimestamp": "Fri 2018-04-06 21:30:48 UTC", "ConditionTimestampMonotonic": "2037916574", "Conflicts": "shutdown.target", "ControlGroup": "/system.slice/docker.service", "ControlPID": "0", "DefaultDependencies": "yes", "Delegate": "no", "Description": "Docker Application Container Engine", "DevicePolicy": "auto", "Documentation": "http://docs.docker.com", "DropInPaths": "/etc/systemd/system/docker.service.d/custom.conf", "Environment": "GOTRACEBACK=crash DOCKER_HTTP_HOST_COMPAT=1 PATH=/usr/libexec/docker:/usr/bin:/usr/sbin", "EnvironmentFile": "/etc/sysconfig/docker-network (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "20613", "ExecMainStartTimestamp": "Fri 2018-04-06 21:30:48 UTC", "ExecMainStartTimestampMonotonic": "2037918930", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -s HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/bin/dockerd-current ; argv[]=/usr/bin/dockerd-current --add-runtime docker-runc=/usr/libexec/docker/docker-runc-current --default-runtime=docker-runc --authorization-plugin=rhel-push-plugin --exec-opt native.cgroupdriver=systemd --userland-proxy-path=/usr/libexec/docker/docker-proxy-current --seccomp-profile=/etc/docker/seccomp.json $OPTIONS $DOCKER_STORAGE_OPTIONS $DOCKER_NETWORK_OPTIONS $ADD_REGISTRY $BLOCK_REGISTRY $INSECURE_REGISTRY $REGISTRIES ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/docker.service", "GuessMainPID": "yes", "IOScheduling": "0", "Id": "docker.service", "IgnoreOnIsolate": "no", "IgnoreOnSnapshot": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Fri 2018-04-06 21:30:48 UTC", "InactiveEnterTimestampMonotonic": "2037530560", "InactiveExitTimestamp": "Fri 2018-04-06 21:30:48 UTC", "InactiveExitTimestampMonotonic": "2037918964", "JobTimeoutAction": "none", "JobTimeoutUSec": "0", "KillMode": "process", "KillSignal": "15", "LimitAS": "18446744073709551615", "LimitCORE": "18446744073709551615", "LimitCPU": "18446744073709551615", "LimitDATA": "18446744073709551615", "LimitFSIZE": "18446744073709551615", "LimitLOCKS": "18446744073709551615", "LimitMEMLOCK": "65536", "LimitMSGQUEUE": "819200", "LimitNICE": "0", "LimitNOFILE": "1048576", "LimitNPROC": "1048576", "LimitRSS": "18446744073709551615", "LimitRTPRIO": "0", "LimitRTTIME": "18446744073709551615", "LimitSIGPENDING": "63327", "LimitSTACK": "18446744073709551615", "LoadState": "loaded", "MainPID": "20613", "MemoryAccounting": "yes", "MemoryCurrent": "18446744073709551615", "MemoryLimit": "18446744073709551615", "MountFlags": "524288", "Names": "docker.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PartOf": "iptables.service", "PermissionsStartOnly": "no", "PrivateDevices": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "ProtectHome": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RequiredBy": "docker-cleanup.service", "Requires": "basic.target registries.service docker-cleanup.timer rhel-push-plugin.socket", "Restart": "on-abnormal", "RestartUSec": "100ms", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitInterval": "10000000", "StartupBlockIOWeight": "18446744073709551615", "StartupCPUShares": "18446744073709551615", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "no", "TasksCurrent": "18446744073709551615", "TasksMax": "18446744073709551615", "TimeoutStartUSec": "10min", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "WantedBy": "multi-user.target", "Wants": "iptables.service system.slice docker-storage-setup.service", "WatchdogTimestamp": "Fri 2018-04-06 21:30:50 UTC", "WatchdogTimestampMonotonic": "2039409646", "WatchdogUSec": "0" } } TASK [docker : set_fact] ******************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/package_docker.yml:156 ok: [localhost] => { "ansible_facts": { "docker_service_status_changed": false }, "changed": false, "generated_timestamp": "2018-04-06 21:40:05.314699" } RUNNING HANDLER [docker : restart docker] ************************************** changed: [localhost] => { "attempts": 1, "changed": true, "generated_timestamp": "2018-04-06 21:40:08.163002", "name": "docker", "state": "started", "status": { "ActiveEnterTimestamp": "Fri 2018-04-06 21:30:50 UTC", "ActiveEnterTimestampMonotonic": "2039409790", "ActiveExitTimestamp": "Fri 2018-04-06 21:30:46 UTC", "ActiveExitTimestampMonotonic": "2035729408", "ActiveState": "active", "After": "network.target system.slice rhel-push-plugin.socket basic.target systemd-journald.socket registries.service docker-storage-setup.service iptables.service", "AllowIsolate": "no", "AmbientCapabilities": "0", "AssertResult": "yes", "AssertTimestamp": "Fri 2018-04-06 21:30:48 UTC", "AssertTimestampMonotonic": "2037916575", "Before": "multi-user.target shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "18446744073709551615", "CPUAccounting": "yes", "CPUQuotaPerSecUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "18446744073709551615", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "18446744073709551615", "ConditionResult": "yes", "ConditionTimestamp": "Fri 2018-04-06 21:30:48 UTC", "ConditionTimestampMonotonic": "2037916574", "Conflicts": "shutdown.target", "ControlGroup": "/system.slice/docker.service", "ControlPID": "0", "DefaultDependencies": "yes", "Delegate": "no", "Description": "Docker Application Container Engine", "DevicePolicy": "auto", "Documentation": "http://docs.docker.com", "DropInPaths": "/etc/systemd/system/docker.service.d/custom.conf", "Environment": "GOTRACEBACK=crash DOCKER_HTTP_HOST_COMPAT=1 PATH=/usr/libexec/docker:/usr/bin:/usr/sbin", "EnvironmentFile": "/etc/sysconfig/docker-network (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "20613", "ExecMainStartTimestamp": "Fri 2018-04-06 21:30:48 UTC", "ExecMainStartTimestampMonotonic": "2037918930", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -s HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/bin/dockerd-current ; argv[]=/usr/bin/dockerd-current --add-runtime docker-runc=/usr/libexec/docker/docker-runc-current --default-runtime=docker-runc --authorization-plugin=rhel-push-plugin --exec-opt native.cgroupdriver=systemd --userland-proxy-path=/usr/libexec/docker/docker-proxy-current --seccomp-profile=/etc/docker/seccomp.json $OPTIONS $DOCKER_STORAGE_OPTIONS $DOCKER_NETWORK_OPTIONS $ADD_REGISTRY $BLOCK_REGISTRY $INSECURE_REGISTRY $REGISTRIES ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/docker.service", "GuessMainPID": "yes", "IOScheduling": "0", "Id": "docker.service", "IgnoreOnIsolate": "no", "IgnoreOnSnapshot": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Fri 2018-04-06 21:30:48 UTC", "InactiveEnterTimestampMonotonic": "2037530560", "InactiveExitTimestamp": "Fri 2018-04-06 21:30:48 UTC", "InactiveExitTimestampMonotonic": "2037918964", "JobTimeoutAction": "none", "JobTimeoutUSec": "0", "KillMode": "process", "KillSignal": "15", "LimitAS": "18446744073709551615", "LimitCORE": "18446744073709551615", "LimitCPU": "18446744073709551615", "LimitDATA": "18446744073709551615", "LimitFSIZE": "18446744073709551615", "LimitLOCKS": "18446744073709551615", "LimitMEMLOCK": "65536", "LimitMSGQUEUE": "819200", "LimitNICE": "0", "LimitNOFILE": "1048576", "LimitNPROC": "1048576", "LimitRSS": "18446744073709551615", "LimitRTPRIO": "0", "LimitRTTIME": "18446744073709551615", "LimitSIGPENDING": "63327", "LimitSTACK": "18446744073709551615", "LoadState": "loaded", "MainPID": "20613", "MemoryAccounting": "yes", "MemoryCurrent": "18446744073709551615", "MemoryLimit": "18446744073709551615", "MountFlags": "524288", "Names": "docker.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PartOf": "iptables.service", "PermissionsStartOnly": "no", "PrivateDevices": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "ProtectHome": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RequiredBy": "docker-cleanup.service", "Requires": "docker-cleanup.timer rhel-push-plugin.socket registries.service basic.target", "Restart": "on-abnormal", "RestartUSec": "100ms", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitInterval": "10000000", "StartupBlockIOWeight": "18446744073709551615", "StartupCPUShares": "18446744073709551615", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "no", "TasksCurrent": "18446744073709551615", "TasksMax": "18446744073709551615", "TimeoutStartUSec": "10min", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "WantedBy": "multi-user.target", "Wants": "docker-storage-setup.service system.slice iptables.service", "WatchdogTimestamp": "Fri 2018-04-06 21:30:50 UTC", "WatchdogTimestampMonotonic": "2039409646", "WatchdogUSec": "0" } } META: ran handlers TASK [docker : Check for credentials file for registry auth] ******************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/registry_auth.yml:2 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:08.214224", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Create credentials for docker cli registry auth] **************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/registry_auth.yml:8 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:08.266673", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Create credentials for docker cli registry auth (alternative)] *** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/registry_auth.yml:23 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:08.316258", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Ensure /var/lib/containers exists] ****************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/main.yml:28 changed: [localhost] => { "changed": true, "generated_timestamp": "2018-04-06 21:40:08.513934", "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/var/lib/containers", "secontext": "unconfined_u:object_r:container_var_lib_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [docker : Fix SELinux Permissions on /var/lib/containers] ***************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/main.yml:33 ok: [localhost] => { "changed": false, "cmd": [ "restorecon", "-R", "/var/lib/containers/" ], "delta": "0:00:00.010558", "end": "2018-04-06 21:40:08.690108", "generated_timestamp": "2018-04-06 21:40:08.707408", "rc": 0, "start": "2018-04-06 21:40:08.679550", "stderr": [], "stdout": [] } TASK [docker : set_fact] ******************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:3 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:08.736827", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : set_fact] ******************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:7 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:08.766508", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : set_fact] ******************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:13 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:08.794944", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Fail quickly if openshift_docker_options are set] *************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:21 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:08.824667", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Ensure container-selinux is installed] ************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:32 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:08.854705", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Ensure atomic is installed] ************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:41 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:08.884104", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Ensure runc is installed] *************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:51 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:08.914198", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Install Docker so we can use the client] ************************ task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:60 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:08.944762", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Disable Docker] ************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:67 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:08.975252", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_atomic : Add http_proxy to /etc/atomic.conf] ******************* task path: /usr/share/ansible/openshift-ansible/roles/openshift_atomic/tasks/proxy.yml:7 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:09.006092", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_atomic : Add https_proxy to /etc/atomic.conf] ****************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_atomic/tasks/proxy.yml:16 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:09.036843", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_atomic : Add no_proxy to /etc/atomic.conf] ********************* task path: /usr/share/ansible/openshift-ansible/roles/openshift_atomic/tasks/proxy.yml:25 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:09.067527", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Set to default prepend] ***************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:87 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:09.095563", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Set container engine image tag] ********************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:92 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:09.124679", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Use Red Hat Registry for image when distribution is Red Hat] **** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:98 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:09.154025", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Use Fedora Registry for image when distribution is Fedora] ****** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:103 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:09.182885", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Set the full image name] **************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:108 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:09.213533", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Use a specific image if requested] ****************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:113 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:09.243976", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : debug] ********************************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:121 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:09.273357", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Check for credentials file for registry auth] ******************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/registry_auth.yml:2 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:09.304313", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Create credentials for docker cli registry auth] **************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/registry_auth.yml:8 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:09.334947", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Create credentials for docker cli registry auth (alternative)] *** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/registry_auth.yml:23 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:09.365201", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Pre-pull Container Engine System Container image] *************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:131 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:09.393162", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Ensure container-engine.service.d directory exists] ************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:138 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:09.422378", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Ensure /etc/docker directory exists] **************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:143 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:09.451731", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Install Container Engine System Container] ********************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:148 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:09.480293", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Configure Container Engine Service File] ************************ task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:154 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:09.509365", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : set_fact] ******************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:161 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:09.538668", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Configure Container Engine] ************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:171 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:09.569193", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Start the Container Engine service] ***************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:177 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:09.597983", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : set_fact] ******************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_docker.yml:188 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:09.627086", "skip_reason": "Conditional result was False", "skipped": true } META: ran handlers TASK [docker : set_fact] ******************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:4 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:09.656777", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : set_fact] ******************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:7 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:09.685348", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : set_fact] ******************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:10 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:09.714734", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : set_fact] ******************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:13 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:09.743911", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : set_fact] ******************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:17 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:09.773716", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : set_fact] ******************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:21 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:09.803945", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : set_fact] ******************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:27 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:09.834239", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Ensure container-selinux is installed] ************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:33 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:09.866815", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Check we are not using node as a Docker container with CRI-O] *** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:41 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:09.896232", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Ensure atomic is installed] ************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:48 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:09.926390", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Ensure runc is installed] *************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:58 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:09.956232", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Check that overlay is in the kernel] **************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:67 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:09.985561", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Add overlay to modprobe.d] ************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:76 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:10.015237", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Manually modprobe overlay into the kernel] ********************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:82 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:10.044897", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Enable and start systemd-modules-load] ************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:85 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:10.074422", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_atomic : Add http_proxy to /etc/atomic.conf] ******************* task path: /usr/share/ansible/openshift-ansible/roles/openshift_atomic/tasks/proxy.yml:7 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:10.106191", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_atomic : Add https_proxy to /etc/atomic.conf] ****************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_atomic/tasks/proxy.yml:16 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:10.138382", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_atomic : Add no_proxy to /etc/atomic.conf] ********************* task path: /usr/share/ansible/openshift-ansible/roles/openshift_atomic/tasks/proxy.yml:25 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:10.170136", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Set CRI-O image defaults] *************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:99 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:10.200103", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Use Centos based image when distribution is CentOS] ************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:105 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:10.229786", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Set CRI-O image tag] ******************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:110 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:10.260745", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Use RHEL based image when distribution is Red Hat] ************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:116 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:10.289963", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Set the full image name] **************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:122 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:10.319673", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Use a specific image if requested] ****************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:127 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:10.349705", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : debug] ********************************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:135 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:10.378782", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Pre-pull CRI-O System Container image] ************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:139 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:10.410574", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Install CRI-O System Container] ********************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:146 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:10.442210", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Remove CRI-O default configuration files] *********************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:152 skipping: [localhost] => (item=/etc/cni/net.d/200-loopback.conf) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:10.476764", "item": "/etc/cni/net.d/200-loopback.conf", "skip_reason": "Conditional result was False", "skipped": true } skipping: [localhost] => (item=/etc/cni/net.d/100-crio-bridge.conf) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:10.484825", "item": "/etc/cni/net.d/100-crio-bridge.conf", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Create the CRI-O configuration] ********************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:160 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:10.514665", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Ensure CNI configuration directory exists] ********************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:166 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:10.544520", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Add iptables allow rules] *************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/crio_firewall.yml:4 skipping: [localhost] => (item={u'port': u'10010/tcp', u'service': u'crio'}) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:10.585672", "item": { "port": "10010/tcp", "service": "crio" }, "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Remove iptables rules] ****************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/crio_firewall.yml:13 TASK [docker : Add firewalld allow rules] ************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/crio_firewall.yml:24 skipping: [localhost] => (item={u'port': u'10010/tcp', u'service': u'crio'}) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:10.656142", "item": { "port": "10010/tcp", "service": "crio" }, "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Remove firewalld allow rules] *********************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/crio_firewall.yml:33 TASK [docker : Configure the CNI network] ************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:175 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:10.715082", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Start the CRI-O service] **************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/systemcontainer_crio.yml:180 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:10.744972", "skip_reason": "Conditional result was False", "skipped": true } META: ran handlers TASK [docker : Check for credentials file for registry auth] ******************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/registry_auth.yml:2 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:10.777959", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Create credentials for docker cli registry auth] **************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/registry_auth.yml:8 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:10.807707", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Create credentials for docker cli registry auth (alternative)] *** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/registry_auth.yml:23 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:10.838869", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : stat the docker data dir] *************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/main.yml:49 ok: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:11.031514", "stat": { "atime": 1519152438.0, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1523050806.774556, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 83924512, "isblk": false, "ischr": false, "isdir": true, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/directory", "mode": "0711", "mtime": 1520548159.5401645, "nlink": 11, "path": "/var/lib/docker", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 139, "uid": 0, "version": "18446744073709362428", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [docker : stop the current running docker] ******************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/main.yml:58 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:11.060160", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Ensure /var/lib/containers/docker exists] *********************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/main.yml:63 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:11.090362", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Set the selinux context on /var/lib/containers/docker] ********** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/main.yml:68 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:11.121331", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : restorecon the /var/lib/containers/docker] ********************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/main.yml:75 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:11.152234", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Remove the old docker location] ********************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/main.yml:78 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:11.182215", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : Setup the link] ************************************************* task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/main.yml:83 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:11.211394", "skip_reason": "Conditional result was False", "skipped": true } TASK [docker : start docker] *************************************************** task path: /usr/share/ansible/openshift-ansible/roles/docker/tasks/main.yml:89 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:11.240092", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_cli : set_fact] ************************************************ task path: /usr/share/ansible/openshift-ansible/roles/openshift_cli/tasks/main.yml:2 ok: [localhost] => { "ansible_facts": { "l_is_system_container_image": false, "l_use_crio_only": false }, "changed": false, "generated_timestamp": "2018-04-06 21:40:11.276743" } TASK [openshift_cli : set_fact] ************************************************ task path: /usr/share/ansible/openshift-ansible/roles/openshift_cli/tasks/main.yml:5 ok: [localhost] => { "ansible_facts": { "l_use_cli_atomic_image": false }, "changed": false, "generated_timestamp": "2018-04-06 21:40:11.310992" } TASK [openshift_cli : Install clients] ***************************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_cli/tasks/main.yml:8 changed: [localhost] => { "attempts": 1, "changed": true, "generated_timestamp": "2018-04-06 21:40:24.270593", "msg": "", "rc": 0, "results": [ "Loaded plugins: amazon-id, rhui-lb, search-disabled-repos\nResolving Dependencies\n--> Running transaction check\n---> Package origin-clients.x86_64 0:3.7.2-1.5.5eda3fa will be installed\n--> Processing Dependency: bash-completion for package: origin-clients-3.7.2-1.5.5eda3fa.x86_64\n--> Running transaction check\n---> Package bash-completion.noarch 1:2.1-6.el7 will be installed\n--> Finished Dependency Resolution\n\nDependencies Resolved\n\n================================================================================\n Package Arch Version Repository Size\n================================================================================\nInstalling:\n origin-clients x86_64 3.7.2-1.5.5eda3fa origin-local-release 42 M\nInstalling for dependencies:\n bash-completion noarch 1:2.1-6.el7 oso-rhui-rhel-server-releases 85 k\n\nTransaction Summary\n================================================================================\nInstall 1 Package (+1 Dependent package)\n\nTotal download size: 42 M\nInstalled size: 275 M\nDownloading packages:\n--------------------------------------------------------------------------------\nTotal 125 MB/s | 42 MB 00:00 \nRunning transaction check\nRunning transaction test\nTransaction test succeeded\nRunning transaction\n Installing : 1:bash-completion-2.1-6.el7.noarch 1/2 \n Installing : origin-clients-3.7.2-1.5.5eda3fa.x86_64 2/2 \n Verifying : origin-clients-3.7.2-1.5.5eda3fa.x86_64 1/2 \n Verifying : 1:bash-completion-2.1-6.el7.noarch 2/2 \n\nInstalled:\n origin-clients.x86_64 0:3.7.2-1.5.5eda3fa \n\nDependency Installed:\n bash-completion.noarch 1:2.1-6.el7 \n\nComplete!\n" ] } TASK [openshift_cli : Pull CLI Image] ****************************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_cli/tasks/main.yml:15 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:24.300342", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_cli : Copy client binaries/symlinks out of CLI image for use on the host] *** task path: /usr/share/ansible/openshift-ansible/roles/openshift_cli/tasks/main.yml:21 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:24.330679", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_cli : Pull CLI Image] ****************************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_cli/tasks/main.yml:31 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:24.359161", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_cli : Copy client binaries/symlinks out of CLI image for use on the host] *** task path: /usr/share/ansible/openshift-ansible/roles/openshift_cli/tasks/main.yml:37 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:24.388675", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_cli : Reload facts to pick up installed OpenShift version] ***** task path: /usr/share/ansible/openshift-ansible/roles/openshift_cli/tasks/main.yml:46 ok: [localhost] => { "ansible_facts": { "openshift": { "common": { "admin_binary": "oadm", "all_hostnames": [ "172.30.0.1", "ec2-54-152-227-161.compute-1.amazonaws.com", "kubernetes.default.svc.cluster.local", "kubernetes", "openshift.default", "54.152.227.161", "openshift.default.svc", "kubernetes.default", "ip-172-18-1-211.ec2.internal", "openshift.default.svc.cluster.local", "kubernetes.default.svc", "openshift", "172.18.1.211" ], "cli_image": "openshift/origin", "client_binary": "oc", "config_base": "/etc/origin", "deployer_image": "openshift/origin-deployer", "deployment_subtype": "basic", "deployment_type": "origin", "dns_domain": "cluster.local", "etcd_runtime": "host", "examples_content_version": "v3.6", "generate_no_proxy_hosts": true, "hostname": "ip-172-18-1-211.ec2.internal", "installed_variant_rpms": [ "origin-clients" ], "internal_hostnames": [ "kubernetes.default", "kubernetes.default.svc.cluster.local", "kubernetes", "openshift.default", "openshift.default.svc", "172.30.0.1", "ip-172-18-1-211.ec2.internal", "openshift.default.svc.cluster.local", "kubernetes.default.svc", "openshift", "172.18.1.211" ], "ip": "172.18.1.211", "is_atomic": false, "is_containerized": false, "is_etcd_system_container": false, "is_master_system_container": false, "is_node_system_container": false, "is_openvswitch_system_container": false, "kube_svc_ip": "172.30.0.1", "pod_image": "openshift/origin-pod", "portal_net": "172.30.0.0/16", "public_hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public_ip": "54.152.227.161", "registry_image": "openshift/origin-docker-registry", "router_image": "openshift/origin-haproxy-router", "service_type": "origin", "system_images_registry": "docker.io", "version_gte_3_1_1_or_1_1_1": true, "version_gte_3_1_or_1_1": true, "version_gte_3_2_or_1_2": true, "version_gte_3_3_or_1_3": true, "version_gte_3_4_or_1_4": true, "version_gte_3_5_or_1_5": true, "version_gte_3_6": true, "version_gte_3_7": false }, "current_config": { "roles": [ "node", "master", "etcd", "docker" ] }, "docker": { "api_version": "1.26 (minimum version 1.12)", "disable_push_dockerhub": false, "gte_1_10": true, "hosted_registry_insecure": false, "hosted_registry_network": "172.30.0.0/16", "options": "--log-driver=journald", "service_name": "docker", "use_crio": false, "use_system_container": false, "version": "1.13.1" }, "etcd": {}, "hosted": { "etcd": { "selector": null }, "metrics": { "selector": null }, "registry": { "selector": "region=infra" }, "router": { "selector": "region=infra" } }, "logging": { "selector": null }, "master": { "access_token_max_seconds": 86400, "admission_plugin_config": { "openshift.io/ImagePolicy": { "configuration": { "apiVersion": "v1", "executionRules": [ { "matchImageAnnotations": [ { "key": "images.openshift.io/deny-execution", "value": "true" } ], "name": "execution-denied", "onResources": [ { "resource": "pods" }, { "resource": "builds" } ], "reject": true, "skipOnResolutionFailure": true } ], "kind": "ImagePolicyConfig" } } }, "api_port": "8443", "api_url": "https://ip-172-18-1-211.ec2.internal:8443", "api_use_ssl": true, "audit_config": { "enabled": true }, "auth_token_max_seconds": 500, "bind_addr": "0.0.0.0", "cluster_method": "native", "console_path": "/console", "console_port": "8443", "console_url": "https://ip-172-18-1-211.ec2.internal:8443/console", "console_use_ssl": true, "controller_args": { "enable-hostpath-provisioner": [ "true" ] }, "controllers_port": "8444", "default_node_selector": "region=infra", "dns_port": 8053, "dynamic_provisioning_enabled": true, "embedded_dns": true, "embedded_etcd": true, "embedded_kube": true, "etcd_hosts": "", "etcd_port": "2379", "etcd_urls": [ "https://ip-172-18-1-211.ec2.internal:2379" ], "etcd_use_ssl": true, "identity_providers": [ { "challenge": "true", "kind": "AllowAllPasswordIdentityProvider", "login": "true", "name": "allow_all" } ], "loopback_api_url": "https://ip-172-18-1-211.ec2.internal:8443", "loopback_cluster_name": "ip-172-18-1-211-ec2-internal:8443", "loopback_context_name": "default/ip-172-18-1-211-ec2-internal:8443/system:openshift-master", "loopback_user": "system:openshift-master/ip-172-18-1-211-ec2-internal:8443", "manage_htpasswd": true, "master_image": "openshift/origin", "master_system_image": "openshift/origin", "max_requests_inflight": 500, "mcs_allocator_range": "s0:/2", "mcs_labels_per_project": 5, "named_certificates": [], "oauth_grant_method": "auto", "portal_net": "172.30.0.0/16", "project_request_message": "", "project_request_template": "", "public_api_url": "https://ec2-54-152-227-161.compute-1.amazonaws.com:8443", "public_console_url": "https://ec2-54-152-227-161.compute-1.amazonaws.com:8443/console", "registry_url": "openshift/origin-${component}:5eda3fa", "sdn_cluster_network_cidr": "10.128.0.0/14", "sdn_host_subnet_length": "9", "session_max_seconds": 3600, "session_name": "ssn", "session_secrets_file": "", "uid_allocator_range": "1000000000-1999999999/10000" }, "node": { "annotations": {}, "iptables_sync_period": "30s", "kubelet_args": { "node-labels": [] }, "labels": {}, "local_quota_per_fsgroup": "", "node_image": "openshift/node", "node_system_image": "openshift/node", "nodename": "ip-172-18-1-211.ec2.internal", "ovs_image": "openshift/openvswitch", "ovs_system_image": "openshift/openvswitch", "registry_url": "openshift/origin-${component}:${version}", "schedulable": false, "sdn_mtu": "8951", "set_node_ip": false, "storage_plugin_deps": [ "ceph", "glusterfs", "iscsi" ] }, "prometheus": { "alertbuffer": { "selector": null }, "alertmanager": { "selector": null }, "selector": null }, "provider": { "metadata": { "ami-id": "ami-069c0ca6cc091e8fa", "ami-launch-index": "0", "ami-manifest-path": "(unknown)", "block-device-mapping": { "ami": "/dev/sda1", "ebs12": "sdb", "root": "/dev/sda1" }, "hostname": "ip-172-18-1-211.ec2.internal", "instance-action": "none", "instance-id": "i-0cd57e9e471604c34", "instance-type": "m4.xlarge", "local-hostname": "ip-172-18-1-211.ec2.internal", "local-ipv4": "172.18.1.211", "mac": "0e:25:a6:6d:04:5a", "metrics": { "vhostmd": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" }, "network": { "interfaces": { "macs": { "0e:25:a6:6d:04:5a": { "device-number": "0", "interface-id": "eni-04b95497687bf9813", "ipv4-associations": { "54.152.227.161": "172.18.1.211" }, "local-hostname": "ip-172-18-1-211.ec2.internal", "local-ipv4s": "172.18.1.211", "mac": "0e:25:a6:6d:04:5a", "owner-id": "531415883065", "public-hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public-ipv4s": "54.152.227.161", "security-group-ids": "sg-7e73221a", "security-groups": "default", "subnet-id": "subnet-cf57c596", "subnet-ipv4-cidr-block": "172.18.0.0/20", "vpc-id": "vpc-69705d0c", "vpc-ipv4-cidr-block": "172.18.0.0/16", "vpc-ipv4-cidr-blocks": "172.18.0.0/16" } } } }, "placement": { "availability-zone": "us-east-1d" }, "profile": "default-hvm", "public-hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public-ipv4": "54.152.227.161", "public-keys/": "0=libra", "reservation-id": "r-0f638cc4ebb0e4aae", "security-groups": "default", "services": { "domain": "amazonaws.com", "partition": "aws" } }, "name": "aws", "network": { "hostname": "ip-172-18-1-211.ec2.internal", "interfaces": [ { "ips": [ "172.18.1.211" ], "network_id": "subnet-cf57c596", "network_type": "vpc", "public_ips": [ "54.152.227.161" ] } ], "ip": "172.18.1.211", "ipv6_enabled": false, "public_hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public_ip": "54.152.227.161" }, "zone": "us-east-1d" } } }, "changed": false, "generated_timestamp": "2018-04-06 21:40:25.995278" } TASK [openshift_cli : Install bash completion for oc tools] ******************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_cli/tasks/main.yml:49 ok: [localhost] => { "attempts": 1, "changed": false, "generated_timestamp": "2018-04-06 21:40:26.709211", "msg": "", "rc": 0, "results": [ "bash-completion-1:2.1-6.el7.noarch providing bash-completion is already installed" ] } TASK [openshift_ca : fail] ***************************************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_ca/tasks/main.yml:2 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:26.737796", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_ca : fail] ***************************************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_ca/tasks/main.yml:6 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:26.767797", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_ca : Install the base package for admin tooling] *************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_ca/tasks/main.yml:10 changed: [localhost -> localhost] => { "attempts": 1, "changed": true, "generated_timestamp": "2018-04-06 21:40:39.851576", "msg": "", "rc": 0, "results": [ "Loaded plugins: amazon-id, rhui-lb, search-disabled-repos\nResolving Dependencies\n--> Running transaction check\n---> Package origin.x86_64 0:3.7.2-1.5.5eda3fa will be installed\n--> Finished Dependency Resolution\n\nDependencies Resolved\n\n================================================================================\n Package Arch Version Repository Size\n================================================================================\nInstalling:\n origin x86_64 3.7.2-1.5.5eda3fa origin-local-release 47 M\n\nTransaction Summary\n================================================================================\nInstall 1 Package\n\nTotal download size: 47 M\nInstalled size: 374 M\nDownloading packages:\nRunning transaction check\nRunning transaction test\nTransaction test succeeded\nRunning transaction\n Installing : origin-3.7.2-1.5.5eda3fa.x86_64 1/1 \n Verifying : origin-3.7.2-1.5.5eda3fa.x86_64 1/1 \n\nInstalled:\n origin.x86_64 0:3.7.2-1.5.5eda3fa \n\nComplete!\n" ] } TASK [openshift_ca : Reload generated facts] *********************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_ca/tasks/main.yml:20 ok: [localhost] => { "ansible_facts": { "openshift": { "common": { "admin_binary": "oadm", "all_hostnames": [ "172.30.0.1", "ec2-54-152-227-161.compute-1.amazonaws.com", "kubernetes.default.svc.cluster.local", "kubernetes", "openshift.default", "54.152.227.161", "openshift.default.svc", "kubernetes.default", "ip-172-18-1-211.ec2.internal", "openshift.default.svc.cluster.local", "kubernetes.default.svc", "openshift", "172.18.1.211" ], "cli_image": "openshift/origin", "client_binary": "oc", "config_base": "/etc/origin", "deployer_image": "openshift/origin-deployer", "deployment_subtype": "basic", "deployment_type": "origin", "dns_domain": "cluster.local", "etcd_runtime": "host", "examples_content_version": "v3.7", "generate_no_proxy_hosts": true, "hostname": "ip-172-18-1-211.ec2.internal", "installed_variant_rpms": [ "origin", "origin-clients" ], "internal_hostnames": [ "kubernetes.default", "kubernetes.default.svc.cluster.local", "kubernetes", "openshift.default", "openshift.default.svc", "172.30.0.1", "ip-172-18-1-211.ec2.internal", "openshift.default.svc.cluster.local", "kubernetes.default.svc", "openshift", "172.18.1.211" ], "ip": "172.18.1.211", "is_atomic": false, "is_containerized": false, "is_etcd_system_container": false, "is_master_system_container": false, "is_node_system_container": false, "is_openvswitch_system_container": false, "kube_svc_ip": "172.30.0.1", "pod_image": "openshift/origin-pod", "portal_net": "172.30.0.0/16", "public_hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public_ip": "54.152.227.161", "registry_image": "openshift/origin-docker-registry", "router_image": "openshift/origin-haproxy-router", "service_type": "origin", "short_version": "3.7", "system_images_registry": "docker.io", "version": "3.7.2", "version_gte_3_1_1_or_1_1_1": true, "version_gte_3_1_or_1_1": true, "version_gte_3_2_or_1_2": true, "version_gte_3_3_or_1_3": true, "version_gte_3_4_or_1_4": true, "version_gte_3_5_or_1_5": true, "version_gte_3_6": true, "version_gte_3_7": true }, "current_config": { "roles": [ "node", "master", "etcd", "docker" ] }, "docker": { "api_version": "1.26 (minimum version 1.12)", "disable_push_dockerhub": false, "gte_1_10": true, "hosted_registry_insecure": false, "hosted_registry_network": "172.30.0.0/16", "options": "--log-driver=journald", "service_name": "docker", "use_crio": false, "use_system_container": false, "version": "1.13.1" }, "etcd": {}, "hosted": { "etcd": { "selector": null }, "metrics": { "selector": null }, "registry": { "selector": "region=infra" }, "router": { "selector": "region=infra" } }, "logging": { "selector": null }, "master": { "access_token_max_seconds": 86400, "admission_plugin_config": { "openshift.io/ImagePolicy": { "configuration": { "apiVersion": "v1", "executionRules": [ { "matchImageAnnotations": [ { "key": "images.openshift.io/deny-execution", "value": "true" } ], "name": "execution-denied", "onResources": [ { "resource": "pods" }, { "resource": "builds" } ], "reject": true, "skipOnResolutionFailure": true } ], "kind": "ImagePolicyConfig" } } }, "api_port": "8443", "api_url": "https://ip-172-18-1-211.ec2.internal:8443", "api_use_ssl": true, "audit_config": { "enabled": true }, "auth_token_max_seconds": 500, "bind_addr": "0.0.0.0", "cluster_method": "native", "console_path": "/console", "console_port": "8443", "console_url": "https://ip-172-18-1-211.ec2.internal:8443/console", "console_use_ssl": true, "controller_args": { "enable-hostpath-provisioner": [ "true" ] }, "controllers_port": "8444", "default_node_selector": "region=infra", "dns_port": 8053, "dynamic_provisioning_enabled": true, "embedded_dns": true, "embedded_etcd": true, "embedded_kube": true, "etcd_hosts": "", "etcd_port": "2379", "etcd_urls": [ "https://ip-172-18-1-211.ec2.internal:2379" ], "etcd_use_ssl": true, "identity_providers": [ { "challenge": "true", "kind": "AllowAllPasswordIdentityProvider", "login": "true", "name": "allow_all" } ], "loopback_api_url": "https://ip-172-18-1-211.ec2.internal:8443", "loopback_cluster_name": "ip-172-18-1-211-ec2-internal:8443", "loopback_context_name": "default/ip-172-18-1-211-ec2-internal:8443/system:openshift-master", "loopback_user": "system:openshift-master/ip-172-18-1-211-ec2-internal:8443", "manage_htpasswd": true, "master_image": "openshift/origin", "master_system_image": "openshift/origin", "max_requests_inflight": 500, "mcs_allocator_range": "s0:/2", "mcs_labels_per_project": 5, "named_certificates": [], "oauth_grant_method": "auto", "portal_net": "172.30.0.0/16", "project_request_message": "", "project_request_template": "", "public_api_url": "https://ec2-54-152-227-161.compute-1.amazonaws.com:8443", "public_console_url": "https://ec2-54-152-227-161.compute-1.amazonaws.com:8443/console", "registry_url": "openshift/origin-${component}:5eda3fa", "sdn_cluster_network_cidr": "10.128.0.0/14", "sdn_host_subnet_length": "9", "session_max_seconds": 3600, "session_name": "ssn", "session_secrets_file": "", "uid_allocator_range": "1000000000-1999999999/10000" }, "node": { "annotations": {}, "iptables_sync_period": "30s", "kubelet_args": { "node-labels": [] }, "labels": {}, "local_quota_per_fsgroup": "", "node_image": "openshift/node", "node_system_image": "openshift/node", "nodename": "ip-172-18-1-211.ec2.internal", "ovs_image": "openshift/openvswitch", "ovs_system_image": "openshift/openvswitch", "registry_url": "openshift/origin-${component}:${version}", "schedulable": false, "sdn_mtu": "8951", "set_node_ip": false, "storage_plugin_deps": [ "ceph", "glusterfs", "iscsi" ] }, "prometheus": { "alertbuffer": { "selector": null }, "alertmanager": { "selector": null }, "selector": null }, "provider": { "metadata": { "ami-id": "ami-069c0ca6cc091e8fa", "ami-launch-index": "0", "ami-manifest-path": "(unknown)", "block-device-mapping": { "ami": "/dev/sda1", "ebs12": "sdb", "root": "/dev/sda1" }, "hostname": "ip-172-18-1-211.ec2.internal", "instance-action": "none", "instance-id": "i-0cd57e9e471604c34", "instance-type": "m4.xlarge", "local-hostname": "ip-172-18-1-211.ec2.internal", "local-ipv4": "172.18.1.211", "mac": "0e:25:a6:6d:04:5a", "metrics": { "vhostmd": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" }, "network": { "interfaces": { "macs": { "0e:25:a6:6d:04:5a": { "device-number": "0", "interface-id": "eni-04b95497687bf9813", "ipv4-associations": { "54.152.227.161": "172.18.1.211" }, "local-hostname": "ip-172-18-1-211.ec2.internal", "local-ipv4s": "172.18.1.211", "mac": "0e:25:a6:6d:04:5a", "owner-id": "531415883065", "public-hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public-ipv4s": "54.152.227.161", "security-group-ids": "sg-7e73221a", "security-groups": "default", "subnet-id": "subnet-cf57c596", "subnet-ipv4-cidr-block": "172.18.0.0/20", "vpc-id": "vpc-69705d0c", "vpc-ipv4-cidr-block": "172.18.0.0/16", "vpc-ipv4-cidr-blocks": "172.18.0.0/16" } } } }, "placement": { "availability-zone": "us-east-1d" }, "profile": "default-hvm", "public-hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public-ipv4": "54.152.227.161", "public-keys/": "0=libra", "reservation-id": "r-0f638cc4ebb0e4aae", "security-groups": "default", "services": { "domain": "amazonaws.com", "partition": "aws" } }, "name": "aws", "network": { "hostname": "ip-172-18-1-211.ec2.internal", "interfaces": [ { "ips": [ "172.18.1.211" ], "network_id": "subnet-cf57c596", "network_type": "vpc", "public_ips": [ "54.152.227.161" ] } ], "ip": "172.18.1.211", "ipv6_enabled": false, "public_hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public_ip": "54.152.227.161" }, "zone": "us-east-1d" } } }, "changed": false, "generated_timestamp": "2018-04-06 21:40:41.762809" } TASK [openshift_ca : Create openshift_ca_config_dir if it does not exist] ****** task path: /usr/share/ansible/openshift-ansible/roles/openshift_ca/tasks/main.yml:25 ok: [localhost -> localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:41.958505", "gid": 0, "group": "root", "mode": "0700", "owner": "root", "path": "/etc/origin/master", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 148, "state": "directory", "uid": 0 } TASK [openshift_ca : Determine if CA must be created] ************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_ca/tasks/main.yml:32 ok: [localhost -> localhost] => (item=ca-bundle.crt) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:42.155694", "item": "ca-bundle.crt", "stat": { "exists": false } } ok: [localhost -> localhost] => (item=ca.crt) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:42.305233", "item": "ca.crt", "stat": { "exists": false } } ok: [localhost -> localhost] => (item=ca.key) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:42.455301", "item": "ca.key", "stat": { "exists": false } } TASK [openshift_ca : set_fact] ************************************************* task path: /usr/share/ansible/openshift-ansible/roles/openshift_ca/tasks/main.yml:43 ok: [localhost] => { "ansible_facts": { "master_ca_missing": true }, "changed": false, "generated_timestamp": "2018-04-06 21:40:42.489129" } TASK [openshift_ca : Retain original serviceaccount keys] ********************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_ca/tasks/main.yml:49 skipping: [localhost] => (item=/etc/origin/master/serviceaccounts.private.key) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:42.523500", "item": "/etc/origin/master/serviceaccounts.private.key", "skip_reason": "Conditional result was False", "skipped": true } skipping: [localhost] => (item=/etc/origin/master/serviceaccounts.public.key) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:42.530117", "item": "/etc/origin/master/serviceaccounts.public.key", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_ca : Deploy master ca certificate] ***************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_ca/tasks/main.yml:59 skipping: [localhost] => (item={u'dest': u'ca.crt', u'src': u''}) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:42.579376", "item": { "dest": "ca.crt", "src": "" }, "skip_reason": "Conditional result was False", "skipped": true } skipping: [localhost] => (item={u'dest': u'ca.key', u'src': u''}) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:42.588047", "item": { "dest": "ca.key", "src": "" }, "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_ca : Create ca serial] ***************************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_ca/tasks/main.yml:73 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:42.630234", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_ca : find] ***************************************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_ca/tasks/main.yml:82 ok: [localhost] => { "changed": false, "examined": 0, "files": [], "generated_timestamp": "2018-04-06 21:40:42.910954", "matched": 0, "msg": "/etc/origin/master/legacy-ca/ was skipped as it does not seem to be a valid directory or it cannot be accessed\n" } TASK [openshift_ca : Create the master certificates if they do not already exist] *** task path: /usr/share/ansible/openshift-ansible/roles/openshift_ca/tasks/main.yml:89 changed: [localhost -> localhost] => { "changed": true, "cmd": [ "oc", "adm", "ca", "create-master-certs", "--hostnames=172.30.0.1,ec2-54-152-227-161.compute-1.amazonaws.com,kubernetes.default.svc.cluster.local,kubernetes,openshift.default,54.152.227.161,openshift.default.svc,kubernetes.default,ip-172-18-1-211.ec2.internal,openshift.default.svc.cluster.local,kubernetes.default.svc,openshift,172.18.1.211", "--master=https://ip-172-18-1-211.ec2.internal:8443", "--public-master=https://ec2-54-152-227-161.compute-1.amazonaws.com:8443", "--cert-dir=/etc/origin/master", "--expire-days=730", "--signer-expire-days=1825", "--overwrite=false" ], "delta": "0:00:02.193029", "end": "2018-04-06 21:40:45.357222", "generated_timestamp": "2018-04-06 21:40:45.374755", "rc": 0, "start": "2018-04-06 21:40:43.164193", "stderr": [], "stdout": [ "Generated new key pair as /etc/origin/master/serviceaccounts.public.key and /etc/origin/master/serviceaccounts.private.key" ] } TASK [openshift_ca : command] ************************************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_ca/tasks/main.yml:116 changed: [localhost -> localhost] => { "changed": true, "cmd": [ "mktemp", "-d", "/tmp/openshift-ansible-XXXXXX" ], "delta": "0:00:00.002795", "end": "2018-04-06 21:40:45.554591", "generated_timestamp": "2018-04-06 21:40:45.572671", "rc": 0, "start": "2018-04-06 21:40:45.551796", "stderr": [], "stdout": [ "/tmp/openshift-ansible-99A0t1" ] } TASK [openshift_ca : copy] ***************************************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_ca/tasks/main.yml:119 TASK [openshift_ca : copy] ***************************************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_ca/tasks/main.yml:126 changed: [localhost -> localhost] => { "changed": true, "checksum": "0aa274c2e4e9308760fb2140d5fd2a0121428957", "dest": "/tmp/openshift-ansible-99A0t1/ca.crt", "generated_timestamp": "2018-04-06 21:40:45.804132", "gid": 0, "group": "root", "md5sum": "010999aa71ec575114257fa775236639", "mode": "0644", "owner": "root", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 1070, "src": "/etc/origin/master/ca.crt", "state": "file", "uid": 0 } TASK [openshift_ca : assemble] ************************************************* task path: /usr/share/ansible/openshift-ansible/roles/openshift_ca/tasks/main.yml:132 changed: [localhost -> localhost] => { "changed": true, "checksum": "0aa274c2e4e9308760fb2140d5fd2a0121428957", "dest": "/etc/origin/master/client-ca-bundle.crt", "generated_timestamp": "2018-04-06 21:40:46.005484", "gid": 0, "group": "root", "md5sum": "010999aa71ec575114257fa775236639", "mode": "0644", "msg": "OK", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 1070, "src": "/tmp/openshift-ansible-99A0t1", "state": "file", "uid": 0 } TASK [openshift_ca : Test local loopback context] ****************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_ca/tasks/main.yml:141 ok: [localhost -> localhost] => { "changed": false, "cmd": [ "oc", "config", "view", "--config=/etc/origin/master/openshift-master.kubeconfig" ], "delta": "0:00:00.168099", "end": "2018-04-06 21:40:46.416420", "generated_timestamp": "2018-04-06 21:40:46.437122", "rc": 0, "start": "2018-04-06 21:40:46.248321", "stderr": [], "stdout": [ "apiVersion: v1", "clusters:", "- cluster:", " certificate-authority-data: REDACTED", " server: https://ec2-54-152-227-161.compute-1.amazonaws.com:8443", " name: ec2-54-152-227-161-compute-1-amazonaws-com:8443", "- cluster:", " certificate-authority-data: REDACTED", " server: https://ip-172-18-1-211.ec2.internal:8443", " name: ip-172-18-1-211-ec2-internal:8443", "contexts:", "- context:", " cluster: ec2-54-152-227-161-compute-1-amazonaws-com:8443", " namespace: default", " user: system:openshift-master/ip-172-18-1-211-ec2-internal:8443", " name: default/ec2-54-152-227-161-compute-1-amazonaws-com:8443/system:openshift-master", "- context:", " cluster: ip-172-18-1-211-ec2-internal:8443", " namespace: default", " user: system:openshift-master/ip-172-18-1-211-ec2-internal:8443", " name: default/ip-172-18-1-211-ec2-internal:8443/system:openshift-master", "current-context: default/ip-172-18-1-211-ec2-internal:8443/system:openshift-master", "kind: Config", "preferences: {}", "users:", "- name: system:openshift-master/ip-172-18-1-211-ec2-internal:8443", " user:", " client-certificate-data: REDACTED", " client-key-data: REDACTED" ] } TASK [openshift_ca : Create temp directory for loopback master client config] *** task path: /usr/share/ansible/openshift-ansible/roles/openshift_ca/tasks/main.yml:155 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:46.480342", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_ca : Generate the loopback master client config] *************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_ca/tasks/main.yml:158 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:46.523682", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_ca : Copy generated loopback master client config to master config dir] *** task path: /usr/share/ansible/openshift-ansible/roles/openshift_ca/tasks/main.yml:177 skipping: [localhost] => (item=openshift-master.crt) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:46.572445", "item": "openshift-master.crt", "skip_reason": "Conditional result was False", "skipped": true } skipping: [localhost] => (item=openshift-master.key) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:46.581214", "item": "openshift-master.key", "skip_reason": "Conditional result was False", "skipped": true } skipping: [localhost] => (item=openshift-master.kubeconfig) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:46.590131", "item": "openshift-master.kubeconfig", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_ca : Delete temp directory] ************************************ task path: /usr/share/ansible/openshift-ansible/roles/openshift_ca/tasks/main.yml:186 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:46.637389", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_ca : Restore original serviceaccount keys] ********************* task path: /usr/share/ansible/openshift-ansible/roles/openshift_ca/tasks/main.yml:194 skipping: [localhost] => (item=/etc/origin/master/serviceaccounts.private.key) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:46.675633", "item": "/etc/origin/master/serviceaccounts.private.key", "skip_reason": "Conditional result was False", "skipped": true } skipping: [localhost] => (item=/etc/origin/master/serviceaccounts.public.key) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:46.682544", "item": "/etc/origin/master/serviceaccounts.public.key", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_ca : Remove backup serviceaccount keys] ************************ task path: /usr/share/ansible/openshift-ansible/roles/openshift_ca/tasks/main.yml:204 skipping: [localhost] => (item=/etc/origin/master/serviceaccounts.private.key) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:46.719046", "item": "/etc/origin/master/serviceaccounts.private.key", "skip_reason": "Conditional result was False", "skipped": true } skipping: [localhost] => (item=/etc/origin/master/serviceaccounts.public.key) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:46.726093", "item": "/etc/origin/master/serviceaccounts.public.key", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_master_certificates : set_fact] ******************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_master_certificates/tasks/main.yml:2 ok: [localhost] => { "ansible_facts": { "openshift_master_certs_etcd": [ "master.etcd-client.crt" ], "openshift_master_certs_no_etcd": [ "admin.crt", "master.kubelet-client.crt", "master.proxy-client.crt", "master.server.crt", "openshift-master.crt", "openshift-registry.crt", "openshift-router.crt", "etcd.server.crt" ] }, "changed": false, "generated_timestamp": "2018-04-06 21:40:46.759897" } TASK [openshift_master_certificates : set_fact] ******************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_master_certificates/tasks/main.yml:15 ok: [localhost] => { "ansible_facts": { "openshift_master_certs": [ "admin.crt", "master.kubelet-client.crt", "master.proxy-client.crt", "master.server.crt", "openshift-master.crt", "openshift-registry.crt", "openshift-router.crt", "etcd.server.crt", "master.etcd-client.crt" ] }, "changed": false, "generated_timestamp": "2018-04-06 21:40:46.864126" } TASK [openshift_master_certificates : Check status of master certificates] ***** task path: /usr/share/ansible/openshift-ansible/roles/openshift_master_certificates/tasks/main.yml:18 ok: [localhost] => (item=admin.crt) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:47.064706", "item": "admin.crt", "stat": { "atime": 1523050845.269121, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "603ea51b654e35c32fafbb95ee05320403bee818", "ctime": 1523050845.2681208, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 205830388, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "md5": "777cc756828766b073ec61f5aa5c0a9b", "mimetype": "text/plain", "mode": "0644", "mtime": 1523050845.2681208, "nlink": 1, "path": "/etc/origin/master/admin.crt", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1119, "uid": 0, "version": "1205017676", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } ok: [localhost] => (item=master.kubelet-client.crt) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:47.234127", "item": "master.kubelet-client.crt", "stat": { "atime": 1523050844.3951535, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "7d79514fdc1ee4a9e979a3bd0c9d9aaa03d51517", "ctime": 1523050844.3951535, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 205830373, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "md5": "38c62141c022712f310236903b08fb6f", "mimetype": "text/plain", "mode": "0644", "mtime": 1523050844.3951535, "nlink": 1, "path": "/etc/origin/master/master.kubelet-client.crt", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1135, "uid": 0, "version": "18446744073668544713", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } ok: [localhost] => (item=master.proxy-client.crt) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:47.399141", "item": "master.proxy-client.crt", "stat": { "atime": 1523050845.3521178, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "1a314a5a11ed3ebd5ea0d86bbb039cf8a8dbe494", "ctime": 1523050845.3521178, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 205830391, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "md5": "f538fab7a24edf3b36abf9e83403b242", "mimetype": "text/plain", "mode": "0644", "mtime": 1523050845.3521178, "nlink": 1, "path": "/etc/origin/master/master.proxy-client.crt", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1086, "uid": 0, "version": "1086202064", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } ok: [localhost] => (item=master.server.crt) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:47.566443", "item": "master.server.crt", "stat": { "atime": 1523050844.5051494, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "605d8afa03cd04744e1144741950ecbb0c0ee066", "ctime": 1523050844.5051494, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 205830379, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "md5": "6f5263e7809121ea2f6cca4e6bcd8bf3", "mimetype": "text/plain", "mode": "0644", "mtime": 1523050844.5051494, "nlink": 1, "path": "/etc/origin/master/master.server.crt", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 2603, "uid": 0, "version": "18446744072676673324", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } ok: [localhost] => (item=openshift-master.crt) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:47.733487", "item": "openshift-master.crt", "stat": { "atime": 1523050844.48515, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "375a459f090a0636a4ad8474e66a784c0ce79293", "ctime": 1523050844.4841502, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 205830377, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "md5": "537bdb2ae7b5147f80099916435669a8", "mimetype": "text/plain", "mode": "0644", "mtime": 1523050844.4841502, "nlink": 1, "path": "/etc/origin/master/openshift-master.crt", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1123, "uid": 0, "version": "360493743", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } ok: [localhost] => (item=openshift-registry.crt) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:47.897636", "item": "openshift-registry.crt", "stat": { "exists": false } } ok: [localhost] => (item=openshift-router.crt) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:48.053834", "item": "openshift-router.crt", "stat": { "exists": false } } ok: [localhost] => (item=etcd.server.crt) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:48.220067", "item": "etcd.server.crt", "stat": { "atime": 1523050844.8081381, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "4617a0af62573c00e0e3222eaf962e0e53da3d50", "ctime": 1523050844.8081381, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 205830384, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "md5": "f882fdc65458ac12f448c4d5f88ef5e7", "mimetype": "text/plain", "mode": "0644", "mtime": 1523050844.8081381, "nlink": 1, "path": "/etc/origin/master/etcd.server.crt", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 2603, "uid": 0, "version": "18446744072228694659", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } ok: [localhost] => (item=master.etcd-client.crt) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:48.392629", "item": "master.etcd-client.crt", "stat": { "atime": 1523050843.895172, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 16, "charset": "us-ascii", "checksum": "606b56ef23d0ea5c4183a837ced9a9309c793a2e", "ctime": 1523050776.0477016, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 205830364, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "md5": "a095f1f8b4d128a96f3aa562483059f3", "mimetype": "text/plain", "mode": "0600", "mtime": 1523050774.0, "nlink": 1, "path": "/etc/origin/master/master.etcd-client.crt", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 5933, "uid": 0, "version": "2066816280", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [openshift_master_certificates : set_fact] ******************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_master_certificates/tasks/main.yml:26 ok: [localhost] => { "ansible_facts": { "master_certs_missing": true }, "changed": false, "generated_timestamp": "2018-04-06 21:40:48.430193" } TASK [openshift_master_certificates : Ensure the generated_configs directory present] *** task path: /usr/share/ansible/openshift-ansible/roles/openshift_master_certificates/tasks/main.yml:33 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:48.472416", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_master_certificates : find] ************************************ task path: /usr/share/ansible/openshift-ansible/roles/openshift_master_certificates/tasks/main.yml:41 ok: [localhost -> localhost] => { "changed": false, "examined": 0, "files": [], "generated_timestamp": "2018-04-06 21:40:48.665581", "matched": 0, "msg": "/etc/origin/master/legacy-ca/ was skipped as it does not seem to be a valid directory or it cannot be accessed\n" } TASK [openshift_master_certificates : Create the master server certificate] **** task path: /usr/share/ansible/openshift-ansible/roles/openshift_master_certificates/tasks/main.yml:48 skipping: [localhost] => (item=localhost) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:48.780061", "item": "localhost", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_master_certificates : Generate the loopback master client config] *** task path: /usr/share/ansible/openshift-ansible/roles/openshift_master_certificates/tasks/main.yml:74 skipping: [localhost] => (item=localhost) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:48.843607", "item": "localhost", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_master_certificates : file] ************************************ task path: /usr/share/ansible/openshift-ansible/roles/openshift_master_certificates/tasks/main.yml:102 skipping: [localhost] => (item=admin.crt) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:48.926523", "item": "admin.crt", "skip_reason": "Conditional result was False", "skipped": true } skipping: [localhost] => (item=admin.key) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:48.935232", "item": "admin.key", "skip_reason": "Conditional result was False", "skipped": true } skipping: [localhost] => (item=admin.kubeconfig) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:48.943800", "item": "admin.kubeconfig", "skip_reason": "Conditional result was False", "skipped": true } skipping: [localhost] => (item=master.kubelet-client.crt) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:48.952114", "item": "master.kubelet-client.crt", "skip_reason": "Conditional result was False", "skipped": true } skipping: [localhost] => (item=master.kubelet-client.key) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:48.960712", "item": "master.kubelet-client.key", "skip_reason": "Conditional result was False", "skipped": true } skipping: [localhost] => (item=ca.crt) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:48.969206", "item": "ca.crt", "skip_reason": "Conditional result was False", "skipped": true } skipping: [localhost] => (item=ca.key) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:48.978964", "item": "ca.key", "skip_reason": "Conditional result was False", "skipped": true } skipping: [localhost] => (item=ca-bundle.crt) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:48.987507", "item": "ca-bundle.crt", "skip_reason": "Conditional result was False", "skipped": true } skipping: [localhost] => (item=client-ca-bundle.crt) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:48.995929", "item": "client-ca-bundle.crt", "skip_reason": "Conditional result was False", "skipped": true } skipping: [localhost] => (item=serviceaccounts.private.key) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:49.003378", "item": "serviceaccounts.private.key", "skip_reason": "Conditional result was False", "skipped": true } skipping: [localhost] => (item=serviceaccounts.public.key) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:49.012085", "item": "serviceaccounts.public.key", "skip_reason": "Conditional result was False", "skipped": true } skipping: [localhost] => (item=master.proxy-client.crt) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:49.020799", "item": "master.proxy-client.crt", "skip_reason": "Conditional result was False", "skipped": true } skipping: [localhost] => (item=master.proxy-client.key) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:49.029322", "item": "master.proxy-client.key", "skip_reason": "Conditional result was False", "skipped": true } skipping: [localhost] => (item=service-signer.crt) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:49.037832", "item": "service-signer.crt", "skip_reason": "Conditional result was False", "skipped": true } skipping: [localhost] => (item=service-signer.key) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:49.045305", "item": "service-signer.key", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_master_certificates : Remove generated etcd client certs when using external etcd] *** task path: /usr/share/ansible/openshift-ansible/roles/openshift_master_certificates/tasks/main.yml:112 ok: [localhost -> localhost] => (item=master.etcd-client.crt) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:49.259527", "item": "master.etcd-client.crt", "path": "/etc/origin/generated-configs/master-ip-172-18-1-211.ec2.internal/master.etcd-client.crt", "state": "absent" } ok: [localhost -> localhost] => (item=master.etcd-client.key) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:49.431643", "item": "master.etcd-client.key", "path": "/etc/origin/generated-configs/master-ip-172-18-1-211.ec2.internal/master.etcd-client.key", "state": "absent" } TASK [openshift_master_certificates : Create local temp directory for syncing certs] *** task path: /usr/share/ansible/openshift-ansible/roles/openshift_master_certificates/tasks/main.yml:122 ok: [localhost -> localhost] => { "changed": false, "cmd": [ "mktemp", "-d", "/tmp/openshift-ansible-XXXXXXX" ], "delta": "0:00:00.002746", "end": "2018-04-06 21:40:49.599067", "generated_timestamp": "2018-04-06 21:40:49.615804", "rc": 0, "start": "2018-04-06 21:40:49.596321", "stderr": [], "stdout": [ "/tmp/openshift-ansible-WuONQvx" ] } TASK [openshift_master_certificates : Create a tarball of the master certs] **** task path: /usr/share/ansible/openshift-ansible/roles/openshift_master_certificates/tasks/main.yml:129 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:49.655919", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_master_certificates : Retrieve the master cert tarball from the master] *** task path: /usr/share/ansible/openshift-ansible/roles/openshift_master_certificates/tasks/main.yml:138 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:49.695978", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_master_certificates : Ensure certificate directory exists] ***** task path: /usr/share/ansible/openshift-ansible/roles/openshift_master_certificates/tasks/main.yml:148 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:49.727158", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_master_certificates : Unarchive the tarball on the master] ***** task path: /usr/share/ansible/openshift-ansible/roles/openshift_master_certificates/tasks/main.yml:154 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:49.755989", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_master_certificates : Delete local temp directory] ************* task path: /usr/share/ansible/openshift-ansible/roles/openshift_master_certificates/tasks/main.yml:160 ok: [localhost -> localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:49.940637", "path": "/tmp/openshift-ansible-WuONQvx", "state": "absent" } TASK [openshift_master_certificates : Lookup default group for ansible_ssh_user] *** task path: /usr/share/ansible/openshift-ansible/roles/openshift_master_certificates/tasks/main.yml:166 ok: [localhost] => { "changed": false, "cmd": [ "/usr/bin/id", "-g", "ec2-user" ], "delta": "0:00:00.002844", "end": "2018-04-06 21:40:50.108050", "generated_timestamp": "2018-04-06 21:40:50.125739", "rc": 0, "start": "2018-04-06 21:40:50.105206", "stderr": [], "stdout": [ "1000" ] } TASK [openshift_master_certificates : set_fact] ******************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_master_certificates/tasks/main.yml:171 ok: [localhost] => { "ansible_facts": { "client_users": [ "ec2-user", "root" ] }, "changed": false, "generated_timestamp": "2018-04-06 21:40:50.160271" } TASK [openshift_master_certificates : Create the client config dir(s)] ********* task path: /usr/share/ansible/openshift-ansible/roles/openshift_master_certificates/tasks/main.yml:174 changed: [localhost] => (item=ec2-user) => { "changed": true, "generated_timestamp": "2018-04-06 21:40:50.349776", "gid": 1000, "group": "ec2-user", "item": "ec2-user", "mode": "0700", "owner": "ec2-user", "path": "/home/ec2-user/.kube", "secontext": "unconfined_u:object_r:user_home_t:s0", "size": 6, "state": "directory", "uid": 1000 } changed: [localhost] => (item=root) => { "changed": true, "generated_timestamp": "2018-04-06 21:40:50.505342", "gid": 0, "group": "root", "item": "root", "mode": "0700", "owner": "root", "path": "/root/.kube", "secontext": "unconfined_u:object_r:admin_home_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [openshift_master_certificates : Copy the admin client config(s)] ********* task path: /usr/share/ansible/openshift-ansible/roles/openshift_master_certificates/tasks/main.yml:185 changed: [localhost] => (item=ec2-user) => { "changed": true, "checksum": "195c206a6aa6694e41fc2ca8c97bbb2494a05145", "dest": "/home/ec2-user/.kube/config", "generated_timestamp": "2018-04-06 21:40:50.696287", "gid": 0, "group": "root", "item": "ec2-user", "md5sum": "a6f68691d4096e7092db23921c3b6b8d", "mode": "0644", "owner": "root", "secontext": "unconfined_u:object_r:user_home_t:s0", "size": 7580, "src": "/etc/origin/master/admin.kubeconfig", "state": "file", "uid": 0 } changed: [localhost] => (item=root) => { "changed": true, "checksum": "195c206a6aa6694e41fc2ca8c97bbb2494a05145", "dest": "/root/.kube/config", "generated_timestamp": "2018-04-06 21:40:50.855939", "gid": 0, "group": "root", "item": "root", "md5sum": "a6f68691d4096e7092db23921c3b6b8d", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:admin_home_t:s0", "size": 7580, "src": "/etc/origin/master/admin.kubeconfig", "state": "file", "uid": 0 } TASK [openshift_master_certificates : Update the permissions on the admin client config(s)] *** task path: /usr/share/ansible/openshift-ansible/roles/openshift_master_certificates/tasks/main.yml:193 changed: [localhost] => (item=ec2-user) => { "changed": true, "generated_timestamp": "2018-04-06 21:40:51.048696", "gid": 1000, "group": "ec2-user", "item": "ec2-user", "mode": "0700", "owner": "ec2-user", "path": "/home/ec2-user/.kube/config", "secontext": "unconfined_u:object_r:user_home_t:s0", "size": 7580, "state": "file", "uid": 1000 } changed: [localhost] => (item=root) => { "changed": true, "generated_timestamp": "2018-04-06 21:40:51.210684", "gid": 0, "group": "root", "item": "root", "mode": "0700", "owner": "root", "path": "/root/.kube/config", "secontext": "system_u:object_r:admin_home_t:s0", "size": 7580, "state": "file", "uid": 0 } TASK [openshift_master_certificates : Check for ca-bundle.crt] ***************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_master_certificates/tasks/main.yml:203 ok: [localhost] => { "changed": false, "failed": false, "failed_when_result": false, "generated_timestamp": "2018-04-06 21:40:51.405548", "stat": { "atime": 1523050843.8941722, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "0aa274c2e4e9308760fb2140d5fd2a0121428957", "ctime": 1523050843.8941722, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 205830372, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "md5": "010999aa71ec575114257fa775236639", "mimetype": "text/plain", "mode": "0644", "mtime": 1523050843.8941722, "nlink": 1, "path": "/etc/origin/master/ca-bundle.crt", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1070, "uid": 0, "version": "18446744071652100005", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [openshift_master_certificates : Check for ca.crt] ************************ task path: /usr/share/ansible/openshift-ansible/roles/openshift_master_certificates/tasks/main.yml:209 ok: [localhost] => { "changed": false, "failed": false, "failed_when_result": false, "generated_timestamp": "2018-04-06 21:40:51.599325", "stat": { "atime": 1523050843.8941722, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "0aa274c2e4e9308760fb2140d5fd2a0121428957", "ctime": 1523050843.5811837, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 205830366, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "md5": "010999aa71ec575114257fa775236639", "mimetype": "text/plain", "mode": "0644", "mtime": 1523050843.5811837, "nlink": 1, "path": "/etc/origin/master/ca.crt", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1070, "uid": 0, "version": "791564464", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [openshift_master_certificates : Migrate ca.crt to ca-bundle.crt] ********* task path: /usr/share/ansible/openshift-ansible/roles/openshift_master_certificates/tasks/main.yml:215 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:51.627368", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_master_certificates : Link ca.crt to ca-bundle.crt] ************ task path: /usr/share/ansible/openshift-ansible/roles/openshift_master_certificates/tasks/main.yml:221 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:51.656690", "skip_reason": "Conditional result was False", "skipped": true } META: ran handlers META: ran handlers PLAY [Disable excluders] ******************************************************* META: ran handlers TASK [openshift_excluder : Detecting Atomic Host Operating System] ************* task path: /usr/share/ansible/openshift-ansible/roles/openshift_excluder/tasks/main.yml:2 ok: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:51.843126", "stat": { "exists": false } } TASK [openshift_excluder : Debug r_openshift_excluder_enable_docker_excluder] *** task path: /usr/share/ansible/openshift-ansible/roles/openshift_excluder/tasks/main.yml:9 ok: [localhost] => { "generated_timestamp": "2018-04-06 21:40:51.881115", "r_openshift_excluder_enable_docker_excluder": "false" } TASK [openshift_excluder : Debug r_openshift_excluder_enable_openshift_excluder] *** task path: /usr/share/ansible/openshift-ansible/roles/openshift_excluder/tasks/main.yml:13 ok: [localhost] => { "generated_timestamp": "2018-04-06 21:40:51.919114", "r_openshift_excluder_enable_openshift_excluder": "false" } TASK [openshift_excluder : Fail if invalid openshift_excluder_action provided] *** task path: /usr/share/ansible/openshift-ansible/roles/openshift_excluder/tasks/main.yml:17 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:51.950100", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_excluder : Fail if r_openshift_excluder_service_type is not defined] *** task path: /usr/share/ansible/openshift-ansible/roles/openshift_excluder/tasks/main.yml:22 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:51.982699", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_excluder : Fail if r_openshift_excluder_upgrade_target is not defined] *** task path: /usr/share/ansible/openshift-ansible/roles/openshift_excluder/tasks/main.yml:27 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:52.012839", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_excluder : Include main action task file] ********************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_excluder/tasks/main.yml:34 included: /usr/share/ansible/openshift-ansible/roles/openshift_excluder/tasks/disable.yml for localhost TASK [openshift_excluder : Include verify_upgrade.yml when upgrading] ********** task path: /usr/share/ansible/openshift-ansible/roles/openshift_excluder/tasks/disable.yml:4 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:52.100107", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_excluder : Disable excluders before the upgrade to remove older excluding expressions] *** task path: /usr/share/ansible/openshift-ansible/roles/openshift_excluder/tasks/disable.yml:8 included: /usr/share/ansible/openshift-ansible/roles/openshift_excluder/tasks/unexclude.yml for localhost TASK [openshift_excluder : Check for docker-excluder] ************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_excluder/tasks/unexclude.yml:6 ok: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:52.334280", "stat": { "exists": false } } TASK [openshift_excluder : disable docker excluder] **************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_excluder/tasks/unexclude.yml:11 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:52.368094", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_excluder : Check for openshift excluder] *********************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_excluder/tasks/unexclude.yml:17 ok: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:52.553420", "stat": { "exists": false } } TASK [openshift_excluder : disable openshift excluder] ************************* task path: /usr/share/ansible/openshift-ansible/roles/openshift_excluder/tasks/unexclude.yml:22 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:52.587245", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_excluder : Include install.yml] ******************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_excluder/tasks/disable.yml:17 included: /usr/share/ansible/openshift-ansible/roles/openshift_excluder/tasks/install.yml for localhost TASK [openshift_excluder : Install docker excluder - yum] ********************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_excluder/tasks/install.yml:9 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:52.680360", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_excluder : Install docker excluder - dnf] ********************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_excluder/tasks/install.yml:24 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:52.720604", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_excluder : Install openshift excluder - yum] ******************* task path: /usr/share/ansible/openshift-ansible/roles/openshift_excluder/tasks/install.yml:34 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:52.760371", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_excluder : Install openshift excluder - dnf] ******************* task path: /usr/share/ansible/openshift-ansible/roles/openshift_excluder/tasks/install.yml:48 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:52.801117", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_excluder : set_fact] ******************************************* task path: /usr/share/ansible/openshift-ansible/roles/openshift_excluder/tasks/install.yml:58 ok: [localhost] => { "ansible_facts": { "r_openshift_excluder_install_ran": true }, "changed": false, "generated_timestamp": "2018-04-06 21:40:52.843769" } TASK [openshift_excluder : Include exclude.yml] ******************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_excluder/tasks/disable.yml:22 included: /usr/share/ansible/openshift-ansible/roles/openshift_excluder/tasks/exclude.yml for localhost TASK [openshift_excluder : Check for docker-excluder] ************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_excluder/tasks/exclude.yml:2 ok: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:53.093257", "stat": { "exists": false } } TASK [openshift_excluder : Enable docker excluder] ***************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_excluder/tasks/exclude.yml:7 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:53.127728", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_excluder : Check for openshift excluder] *********************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_excluder/tasks/exclude.yml:13 ok: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:53.316268", "stat": { "exists": false } } TASK [openshift_excluder : Enable openshift excluder] ************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_excluder/tasks/exclude.yml:18 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:53.348997", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_excluder : Include unexclude.yml] ****************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_excluder/tasks/disable.yml:32 included: /usr/share/ansible/openshift-ansible/roles/openshift_excluder/tasks/unexclude.yml for localhost TASK [openshift_excluder : Check for docker-excluder] ************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_excluder/tasks/unexclude.yml:6 ok: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:53.597606", "stat": { "exists": false } } TASK [openshift_excluder : disable docker excluder] **************************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_excluder/tasks/unexclude.yml:11 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:53.629756", "skip_reason": "Conditional result was False", "skipped": true } TASK [openshift_excluder : Check for openshift excluder] *********************** task path: /usr/share/ansible/openshift-ansible/roles/openshift_excluder/tasks/unexclude.yml:17 ok: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:53.819581", "stat": { "exists": false } } TASK [openshift_excluder : disable openshift excluder] ************************* task path: /usr/share/ansible/openshift-ansible/roles/openshift_excluder/tasks/unexclude.yml:22 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:53.853848", "skip_reason": "Conditional result was False", "skipped": true } META: ran handlers META: ran handlers PLAY [Gather and set facts for master hosts] *********************************** TASK [Gathering Facts] ********************************************************* ok: [localhost] TASK [Detect if this host is a new master in a scale up] *********************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-master/config.yml:30 ok: [localhost] => { "ansible_facts": { "g_openshift_master_is_scaleup": false }, "changed": false, "generated_timestamp": "2018-04-06 21:40:54.624528" } TASK [Scaleup Detection] ******************************************************* task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-master/config.yml:34 ok: [localhost] => { "g_openshift_master_is_scaleup": false, "generated_timestamp": "2018-04-06 21:40:54.653152" } TASK [Check for RPM generated config marker file .config_managed] ************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-master/config.yml:38 ok: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:54.825851", "stat": { "exists": false } } TASK [Remove RPM generated config files if present] **************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-master/config.yml:43 skipping: [localhost] => (item=master) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:54.853141", "item": "master", "skip_reason": "Conditional result was False", "skipped": true } skipping: [localhost] => (item=node) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:54.858502", "item": "node", "skip_reason": "Conditional result was False", "skipped": true } skipping: [localhost] => (item=.config_managed) => { "changed": false, "generated_timestamp": "2018-04-06 21:40:54.864550", "item": ".config_managed", "skip_reason": "Conditional result was False", "skipped": true } TASK [set_fact] **************************************************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-master/config.yml:55 ok: [localhost] => { "ansible_facts": { "openshift_master_etcd_hosts": [ "ip-172-18-1-211.ec2.internal" ], "openshift_master_etcd_port": "2379" }, "changed": false, "generated_timestamp": "2018-04-06 21:40:54.965398" } META: ran handlers META: ran handlers TASK [openshift_facts] ********************************************************* task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-master/config.yml:65 changed: [localhost] => { "ansible_facts": { "openshift": { "common": { "admin_binary": "oadm", "all_hostnames": [ "172.30.0.1", "ec2-54-152-227-161.compute-1.amazonaws.com", "kubernetes.default.svc.cluster.local", "kubernetes", "openshift.default", "54.152.227.161", "openshift.default.svc", "kubernetes.default", "ip-172-18-1-211.ec2.internal", "openshift.default.svc.cluster.local", "kubernetes.default.svc", "openshift", "172.18.1.211" ], "cli_image": "openshift/origin", "client_binary": "oc", "config_base": "/etc/origin", "deployer_image": "openshift/origin-deployer", "deployment_subtype": "basic", "deployment_type": "origin", "dns_domain": "cluster.local", "etcd_runtime": "host", "examples_content_version": "v3.7", "generate_no_proxy_hosts": true, "hostname": "ip-172-18-1-211.ec2.internal", "installed_variant_rpms": [ "origin", "origin-clients" ], "internal_hostnames": [ "kubernetes.default", "kubernetes.default.svc.cluster.local", "kubernetes", "openshift.default", "openshift.default.svc", "172.30.0.1", "ip-172-18-1-211.ec2.internal", "openshift.default.svc.cluster.local", "kubernetes.default.svc", "openshift", "172.18.1.211" ], "ip": "172.18.1.211", "is_atomic": false, "is_containerized": false, "is_etcd_system_container": false, "is_master_system_container": false, "is_node_system_container": false, "is_openvswitch_system_container": false, "kube_svc_ip": "172.30.0.1", "pod_image": "openshift/origin-pod", "portal_net": "172.30.0.0/16", "public_hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public_ip": "54.152.227.161", "registry_image": "openshift/origin-docker-registry", "router_image": "openshift/origin-haproxy-router", "service_type": "origin", "short_version": "3.7", "system_images_registry": "docker.io", "version": "3.7.2", "version_gte_3_1_1_or_1_1_1": true, "version_gte_3_1_or_1_1": true, "version_gte_3_2_or_1_2": true, "version_gte_3_3_or_1_3": true, "version_gte_3_4_or_1_4": true, "version_gte_3_5_or_1_5": true, "version_gte_3_6": true, "version_gte_3_7": true }, "current_config": { "roles": [ "node", "master", "etcd", "docker" ] }, "docker": { "api_version": "1.26 (minimum version 1.12)", "disable_push_dockerhub": false, "gte_1_10": true, "hosted_registry_insecure": false, "hosted_registry_network": "172.30.0.0/16", "options": "--log-driver=journald", "service_name": "docker", "use_crio": false, "use_system_container": false, "version": "1.13.1" }, "etcd": {}, "hosted": { "etcd": { "selector": null }, "metrics": { "selector": null }, "registry": { "selector": "region=infra" }, "router": { "selector": "region=infra" } }, "logging": { "selector": null }, "master": { "access_token_max_seconds": 86400, "admission_plugin_config": { "openshift.io/ImagePolicy": { "configuration": { "apiVersion": "v1", "executionRules": [ { "matchImageAnnotations": [ { "key": "images.openshift.io/deny-execution", "value": "true" } ], "name": "execution-denied", "onResources": [ { "resource": "pods" }, { "resource": "builds" } ], "reject": true, "skipOnResolutionFailure": true } ], "kind": "ImagePolicyConfig" } } }, "api_port": "8443", "api_url": "https://ip-172-18-1-211.ec2.internal:8443", "api_use_ssl": true, "audit_config": { "enabled": true }, "auth_token_max_seconds": 500, "bind_addr": "0.0.0.0", "cluster_method": "native", "console_path": "/console", "console_port": "8443", "console_url": "https://ip-172-18-1-211.ec2.internal:8443/console", "console_use_ssl": true, "controller_args": { "enable-hostpath-provisioner": [ "true" ] }, "controllers_port": "8444", "default_node_selector": "region=infra", "dns_port": 8053, "dynamic_provisioning_enabled": true, "embedded_dns": true, "embedded_etcd": true, "embedded_kube": true, "etcd_hosts": "", "etcd_port": "2379", "etcd_urls": [ "https://ip-172-18-1-211.ec2.internal:2379" ], "etcd_use_ssl": true, "ha": false, "identity_providers": [ { "challenge": "true", "kind": "AllowAllPasswordIdentityProvider", "login": "true", "name": "allow_all" } ], "loopback_api_url": "https://ip-172-18-1-211.ec2.internal:8443", "loopback_cluster_name": "ip-172-18-1-211-ec2-internal:8443", "loopback_context_name": "default/ip-172-18-1-211-ec2-internal:8443/system:openshift-master", "loopback_user": "system:openshift-master/ip-172-18-1-211-ec2-internal:8443", "manage_htpasswd": true, "master_count": "1", "master_image": "openshift/origin", "master_system_image": "openshift/origin", "max_requests_inflight": 500, "mcs_allocator_range": "s0:/2", "mcs_labels_per_project": 5, "named_certificates": [], "oauth_grant_method": "auto", "portal_net": "172.30.0.0/16", "project_request_message": "", "project_request_template": "", "public_api_url": "https://ec2-54-152-227-161.compute-1.amazonaws.com:8443", "public_console_url": "https://ec2-54-152-227-161.compute-1.amazonaws.com:8443/console", "registry_url": "openshift/origin-${component}:5eda3fa", "sdn_cluster_network_cidr": "10.128.0.0/14", "sdn_host_subnet_length": "9", "session_max_seconds": 3600, "session_name": "ssn", "session_secrets_file": "", "uid_allocator_range": "1000000000-1999999999/10000" }, "node": { "annotations": {}, "iptables_sync_period": "30s", "kubelet_args": { "node-labels": [] }, "labels": {}, "local_quota_per_fsgroup": "", "node_image": "openshift/node", "node_system_image": "openshift/node", "nodename": "ip-172-18-1-211.ec2.internal", "ovs_image": "openshift/openvswitch", "ovs_system_image": "openshift/openvswitch", "registry_url": "openshift/origin-${component}:${version}", "schedulable": false, "sdn_mtu": "8951", "set_node_ip": false, "storage_plugin_deps": [ "ceph", "glusterfs", "iscsi" ] }, "prometheus": { "alertbuffer": { "selector": null }, "alertmanager": { "selector": null }, "selector": null }, "provider": { "metadata": { "ami-id": "ami-069c0ca6cc091e8fa", "ami-launch-index": "0", "ami-manifest-path": "(unknown)", "block-device-mapping": { "ami": "/dev/sda1", "ebs12": "sdb", "root": "/dev/sda1" }, "hostname": "ip-172-18-1-211.ec2.internal", "instance-action": "none", "instance-id": "i-0cd57e9e471604c34", "instance-type": "m4.xlarge", "local-hostname": "ip-172-18-1-211.ec2.internal", "local-ipv4": "172.18.1.211", "mac": "0e:25:a6:6d:04:5a", "metrics": { "vhostmd": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" }, "network": { "interfaces": { "macs": { "0e:25:a6:6d:04:5a": { "device-number": "0", "interface-id": "eni-04b95497687bf9813", "ipv4-associations": { "54.152.227.161": "172.18.1.211" }, "local-hostname": "ip-172-18-1-211.ec2.internal", "local-ipv4s": "172.18.1.211", "mac": "0e:25:a6:6d:04:5a", "owner-id": "531415883065", "public-hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public-ipv4s": "54.152.227.161", "security-group-ids": "sg-7e73221a", "security-groups": "default", "subnet-id": "subnet-cf57c596", "subnet-ipv4-cidr-block": "172.18.0.0/20", "vpc-id": "vpc-69705d0c", "vpc-ipv4-cidr-block": "172.18.0.0/16", "vpc-ipv4-cidr-blocks": "172.18.0.0/16" } } } }, "placement": { "availability-zone": "us-east-1d" }, "profile": "default-hvm", "public-hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public-ipv4": "54.152.227.161", "public-keys/": "0=libra", "reservation-id": "r-0f638cc4ebb0e4aae", "security-groups": "default", "services": { "domain": "amazonaws.com", "partition": "aws" } }, "name": "aws", "network": { "hostname": "ip-172-18-1-211.ec2.internal", "interfaces": [ { "ips": [ "172.18.1.211" ], "network_id": "subnet-cf57c596", "network_type": "vpc", "public_ips": [ "54.152.227.161" ] } ], "ip": "172.18.1.211", "ipv6_enabled": false, "public_hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public_ip": "54.152.227.161" }, "zone": "us-east-1d" } } }, "changed": true, "generated_timestamp": "2018-04-06 21:40:56.821383" } META: ran handlers PLAY [Inspect state of first master config settings] *************************** TASK [Gathering Facts] ********************************************************* ok: [localhost] META: ran handlers META: ran handlers TASK [openshift_facts] ********************************************************* task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-master/config.yml:88 ok: [localhost] => { "ansible_facts": { "openshift": { "common": { "admin_binary": "oadm", "all_hostnames": [ "172.30.0.1", "ec2-54-152-227-161.compute-1.amazonaws.com", "kubernetes.default.svc.cluster.local", "kubernetes", "openshift.default", "54.152.227.161", "openshift.default.svc", "kubernetes.default", "ip-172-18-1-211.ec2.internal", "openshift.default.svc.cluster.local", "kubernetes.default.svc", "openshift", "172.18.1.211" ], "cli_image": "openshift/origin", "client_binary": "oc", "config_base": "/etc/origin", "deployer_image": "openshift/origin-deployer", "deployment_subtype": "basic", "deployment_type": "origin", "dns_domain": "cluster.local", "etcd_runtime": "host", "examples_content_version": "v3.7", "generate_no_proxy_hosts": true, "hostname": "ip-172-18-1-211.ec2.internal", "installed_variant_rpms": [ "origin", "origin-clients" ], "internal_hostnames": [ "kubernetes.default", "kubernetes.default.svc.cluster.local", "kubernetes", "openshift.default", "openshift.default.svc", "172.30.0.1", "ip-172-18-1-211.ec2.internal", "openshift.default.svc.cluster.local", "kubernetes.default.svc", "openshift", "172.18.1.211" ], "ip": "172.18.1.211", "is_atomic": false, "is_containerized": false, "is_etcd_system_container": false, "is_master_system_container": false, "is_node_system_container": false, "is_openvswitch_system_container": false, "kube_svc_ip": "172.30.0.1", "pod_image": "openshift/origin-pod", "portal_net": "172.30.0.0/16", "public_hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public_ip": "54.152.227.161", "registry_image": "openshift/origin-docker-registry", "router_image": "openshift/origin-haproxy-router", "service_type": "origin", "short_version": "3.7", "system_images_registry": "docker.io", "version": "3.7.2", "version_gte_3_1_1_or_1_1_1": true, "version_gte_3_1_or_1_1": true, "version_gte_3_2_or_1_2": true, "version_gte_3_3_or_1_3": true, "version_gte_3_4_or_1_4": true, "version_gte_3_5_or_1_5": true, "version_gte_3_6": true, "version_gte_3_7": true }, "current_config": { "roles": [ "node", "master", "etcd", "docker" ] }, "docker": { "api_version": "1.26 (minimum version 1.12)", "disable_push_dockerhub": false, "gte_1_10": true, "hosted_registry_insecure": false, "hosted_registry_network": "172.30.0.0/16", "options": "--log-driver=journald", "service_name": "docker", "use_crio": false, "use_system_container": false, "version": "1.13.1" }, "etcd": {}, "hosted": { "etcd": { "selector": null }, "metrics": { "selector": null }, "registry": { "selector": "region=infra" }, "router": { "selector": "region=infra" } }, "logging": { "selector": null }, "master": { "access_token_max_seconds": 86400, "admission_plugin_config": { "openshift.io/ImagePolicy": { "configuration": { "apiVersion": "v1", "executionRules": [ { "matchImageAnnotations": [ { "key": "images.openshift.io/deny-execution", "value": "true" } ], "name": "execution-denied", "onResources": [ { "resource": "pods" }, { "resource": "builds" } ], "reject": true, "skipOnResolutionFailure": true } ], "kind": "ImagePolicyConfig" } } }, "api_port": "8443", "api_url": "https://ip-172-18-1-211.ec2.internal:8443", "api_use_ssl": true, "audit_config": { "enabled": true }, "auth_token_max_seconds": 500, "bind_addr": "0.0.0.0", "cluster_method": "native", "console_path": "/console", "console_port": "8443", "console_url": "https://ip-172-18-1-211.ec2.internal:8443/console", "console_use_ssl": true, "controller_args": { "enable-hostpath-provisioner": [ "true" ] }, "controllers_port": "8444", "default_node_selector": "region=infra", "dns_port": 8053, "dynamic_provisioning_enabled": true, "embedded_dns": true, "embedded_etcd": true, "embedded_kube": true, "etcd_hosts": "", "etcd_port": "2379", "etcd_urls": [ "https://ip-172-18-1-211.ec2.internal:2379" ], "etcd_use_ssl": true, "ha": false, "identity_providers": [ { "challenge": "true", "kind": "AllowAllPasswordIdentityProvider", "login": "true", "name": "allow_all" } ], "loopback_api_url": "https://ip-172-18-1-211.ec2.internal:8443", "loopback_cluster_name": "ip-172-18-1-211-ec2-internal:8443", "loopback_context_name": "default/ip-172-18-1-211-ec2-internal:8443/system:openshift-master", "loopback_user": "system:openshift-master/ip-172-18-1-211-ec2-internal:8443", "manage_htpasswd": true, "master_count": "1", "master_image": "openshift/origin", "master_system_image": "openshift/origin", "max_requests_inflight": 500, "mcs_allocator_range": "s0:/2", "mcs_labels_per_project": 5, "named_certificates": [], "oauth_grant_method": "auto", "portal_net": "172.30.0.0/16", "project_request_message": "", "project_request_template": "", "public_api_url": "https://ec2-54-152-227-161.compute-1.amazonaws.com:8443", "public_console_url": "https://ec2-54-152-227-161.compute-1.amazonaws.com:8443/console", "registry_url": "openshift/origin-${component}:5eda3fa", "sdn_cluster_network_cidr": "10.128.0.0/14", "sdn_host_subnet_length": "9", "session_max_seconds": 3600, "session_name": "ssn", "session_secrets_file": "", "uid_allocator_range": "1000000000-1999999999/10000" }, "node": { "annotations": {}, "iptables_sync_period": "30s", "kubelet_args": { "node-labels": [] }, "labels": {}, "local_quota_per_fsgroup": "", "node_image": "openshift/node", "node_system_image": "openshift/node", "nodename": "ip-172-18-1-211.ec2.internal", "ovs_image": "openshift/openvswitch", "ovs_system_image": "openshift/openvswitch", "registry_url": "openshift/origin-${component}:${version}", "schedulable": false, "sdn_mtu": "8951", "set_node_ip": false, "storage_plugin_deps": [ "ceph", "glusterfs", "iscsi" ] }, "prometheus": { "alertbuffer": { "selector": null }, "alertmanager": { "selector": null }, "selector": null }, "provider": { "metadata": { "ami-id": "ami-069c0ca6cc091e8fa", "ami-launch-index": "0", "ami-manifest-path": "(unknown)", "block-device-mapping": { "ami": "/dev/sda1", "ebs12": "sdb", "root": "/dev/sda1" }, "hostname": "ip-172-18-1-211.ec2.internal", "instance-action": "none", "instance-id": "i-0cd57e9e471604c34", "instance-type": "m4.xlarge", "local-hostname": "ip-172-18-1-211.ec2.internal", "local-ipv4": "172.18.1.211", "mac": "0e:25:a6:6d:04:5a", "metrics": { "vhostmd": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" }, "network": { "interfaces": { "macs": { "0e:25:a6:6d:04:5a": { "device-number": "0", "interface-id": "eni-04b95497687bf9813", "ipv4-associations": { "54.152.227.161": "172.18.1.211" }, "local-hostname": "ip-172-18-1-211.ec2.internal", "local-ipv4s": "172.18.1.211", "mac": "0e:25:a6:6d:04:5a", "owner-id": "531415883065", "public-hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public-ipv4s": "54.152.227.161", "security-group-ids": "sg-7e73221a", "security-groups": "default", "subnet-id": "subnet-cf57c596", "subnet-ipv4-cidr-block": "172.18.0.0/20", "vpc-id": "vpc-69705d0c", "vpc-ipv4-cidr-block": "172.18.0.0/16", "vpc-ipv4-cidr-blocks": "172.18.0.0/16" } } } }, "placement": { "availability-zone": "us-east-1d" }, "profile": "default-hvm", "public-hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public-ipv4": "54.152.227.161", "public-keys/": "0=libra", "reservation-id": "r-0f638cc4ebb0e4aae", "security-groups": "default", "services": { "domain": "amazonaws.com", "partition": "aws" } }, "name": "aws", "network": { "hostname": "ip-172-18-1-211.ec2.internal", "interfaces": [ { "ips": [ "172.18.1.211" ], "network_id": "subnet-cf57c596", "network_type": "vpc", "public_ips": [ "54.152.227.161" ] } ], "ip": "172.18.1.211", "ipv6_enabled": false, "public_hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public_ip": "54.152.227.161" }, "zone": "us-east-1d" } } }, "changed": false, "generated_timestamp": "2018-04-06 21:40:59.399871" } TASK [Check for existing configuration] **************************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-master/config.yml:93 ok: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:59.574559", "stat": { "exists": false } } TASK [Set clean install fact] ************************************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-master/config.yml:98 ok: [localhost] => { "ansible_facts": { "l_clean_install": true }, "changed": false, "generated_timestamp": "2018-04-06 21:40:59.606811" } TASK [Determine if etcd3 storage is in use] ************************************ task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-master/config.yml:102 ok: [localhost] => { "changed": false, "cmd": [ "grep", "-Pzo", "storage-backend:\\n.*etcd3", "/etc/origin/master/master-config.yaml", "-q" ], "delta": "0:00:00.003196", "end": "2018-04-06 21:40:59.767126", "failed": false, "failed_when_result": false, "generated_timestamp": "2018-04-06 21:40:59.785590", "rc": 2, "start": "2018-04-06 21:40:59.763930", "stderr": [ "grep: /etc/origin/master/master-config.yaml: No such file or directory" ], "stdout": [] } TASK [Set etcd3 fact] ********************************************************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-master/config.yml:108 ok: [localhost] => { "ansible_facts": { "l_etcd3_enabled": false }, "changed": false, "generated_timestamp": "2018-04-06 21:40:59.817521" } TASK [Check if atomic-openshift-master sysconfig exists yet] ******************* task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-master/config.yml:112 ok: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:40:59.994870", "stat": { "exists": false } } TASK [Preserve OPENSHIFT_DEFAULT_REGISTRY master parameter if present] ********* task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-master/config.yml:117 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:41:00.021873", "skip_reason": "Conditional result was False", "skipped": true } TASK [Check if atomic-openshift-master-api sysconfig exists yet] *************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-master/config.yml:122 ok: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:41:00.194440", "stat": { "exists": false } } TASK [Preserve OPENSHIFT_DEFAULT_REGISTRY master-api parameter if present] ***** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-master/config.yml:127 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:41:00.219616", "skip_reason": "Conditional result was False", "skipped": true } TASK [Check if atomic-openshift-master-controllers sysconfig exists yet] ******* task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-master/config.yml:132 ok: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:41:00.394373", "stat": { "exists": false } } TASK [Preserve OPENSHIFT_DEFAULT_REGISTRY master-controllers parameter if present] *** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-master/config.yml:137 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:41:00.421757", "skip_reason": "Conditional result was False", "skipped": true } TASK [Update facts with OPENSHIFT_DEFAULT_REGISTRY value] ********************** task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-master/config.yml:142 ok: [localhost] => { "ansible_facts": { "l_default_registry_value": "", "l_default_registry_value_api": "", "l_default_registry_value_controllers": "" }, "changed": false, "generated_timestamp": "2018-04-06 21:41:00.455149" } META: ran handlers PLAY [Generate master session secrets] ***************************************** TASK [Gathering Facts] ********************************************************* ok: [localhost] META: ran handlers TASK [openshift_facts] ********************************************************* task path: /usr/share/ansible/openshift-ansible/playbooks/common/openshift-master/config.yml:157 changed: [localhost] => { "ansible_facts": { "openshift": { "common": { "admin_binary": "oadm", "all_hostnames": [ "172.30.0.1", "ec2-54-152-227-161.compute-1.amazonaws.com", "kubernetes.default.svc.cluster.local", "kubernetes", "openshift.default", "54.152.227.161", "openshift.default.svc", "kubernetes.default", "ip-172-18-1-211.ec2.internal", "openshift.default.svc.cluster.local", "kubernetes.default.svc", "openshift", "172.18.1.211" ], "cli_image": "openshift/origin", "client_binary": "oc", "config_base": "/etc/origin", "deployer_image": "openshift/origin-deployer", "deployment_subtype": "basic", "deployment_type": "origin", "dns_domain": "cluster.local", "etcd_runtime": "host", "examples_content_version": "v3.7", "generate_no_proxy_hosts": true, "hostname": "ip-172-18-1-211.ec2.internal", "installed_variant_rpms": [ "origin", "origin-clients" ], "internal_hostnames": [ "kubernetes.default", "kubernetes.default.svc.cluster.local", "kubernetes", "openshift.default", "openshift.default.svc", "172.30.0.1", "ip-172-18-1-211.ec2.internal", "openshift.default.svc.cluster.local", "kubernetes.default.svc", "openshift", "172.18.1.211" ], "ip": "172.18.1.211", "is_atomic": false, "is_containerized": false, "is_etcd_system_container": false, "is_master_system_container": false, "is_node_system_container": false, "is_openvswitch_system_container": false, "kube_svc_ip": "172.30.0.1", "pod_image": "openshift/origin-pod", "portal_net": "172.30.0.0/16", "public_hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public_ip": "54.152.227.161", "registry_image": "openshift/origin-docker-registry", "router_image": "openshift/origin-haproxy-router", "service_type": "origin", "short_version": "3.7", "system_images_registry": "docker.io", "version": "3.7.2", "version_gte_3_1_1_or_1_1_1": true, "version_gte_3_1_or_1_1": true, "version_gte_3_2_or_1_2": true, "version_gte_3_3_or_1_3": true, "version_gte_3_4_or_1_4": true, "version_gte_3_5_or_1_5": true, "version_gte_3_6": true, "version_gte_3_7": true }, "current_config": { "roles": [ "node", "master", "etcd", "docker" ] }, "docker": { "api_version": "1.26 (minimum version 1.12)", "disable_push_dockerhub": false, "gte_1_10": true, "hosted_registry_insecure": false, "hosted_registry_network": "172.30.0.0/16", "options": "--log-driver=journald", "service_name": "docker", "use_crio": false, "use_system_container": false, "version": "1.13.1" }, "etcd": {}, "hosted": { "etcd": { "selector": null }, "metrics": { "selector": null }, "registry": { "selector": "region=infra" }, "router": { "selector": "region=infra" } }, "logging": { "selector": null }, "master": { "access_token_max_seconds": 86400, "admission_plugin_config": { "openshift.io/ImagePolicy": { "configuration": { "apiVersion": "v1", "executionRules": [ { "matchImageAnnotations": [ { "key": "images.openshift.io/deny-execution", "value": "true" } ], "name": "execution-denied", "onResources": [ { "resource": "pods" }, { "resource": "builds" } ], "reject": true, "skipOnResolutionFailure": true } ], "kind": "ImagePolicyConfig" } } }, "api_port": "8443", "api_url": "https://ip-172-18-1-211.ec2.internal:8443", "api_use_ssl": true, "audit_config": { "enabled": true }, "auth_token_max_seconds": 500, "bind_addr": "0.0.0.0", "cluster_method": "native", "console_path": "/console", "console_port": "8443", "console_url": "https://ip-172-18-1-211.ec2.internal:8443/console", "console_use_ssl": true, "controller_args": { "enable-hostpath-provisioner": [ "true" ] }, "controllers_port": "8444", "default_node_selector": "region=infra", "dns_port": 8053, "dynamic_provisioning_enabled": true, "embedded_dns": true, "embedded_etcd": true, "embedded_kube": true, "etcd_hosts": "", "etcd_port": "2379", "etcd_urls": [ "https://ip-172-18-1-211.ec2.internal:2379" ], "etcd_use_ssl": true, "ha": false, "identity_providers": [ { "challenge": "true", "kind": "AllowAllPasswordIdentityProvider", "login": "true", "name": "allow_all" } ], "loopback_api_url": "https://ip-172-18-1-211.ec2.internal:8443", "loopback_cluster_name": "ip-172-18-1-211-ec2-internal:8443", "loopback_context_name": "default/ip-172-18-1-211-ec2-internal:8443/system:openshift-master", "loopback_user": "system:openshift-master/ip-172-18-1-211-ec2-internal:8443", "manage_htpasswd": true, "master_count": "1", "master_image": "openshift/origin", "master_system_image": "openshift/origin", "max_requests_inflight": 500, "mcs_allocator_range": "s0:/2", "mcs_labels_per_project": 5, "named_certificates": [], "oauth_grant_method": "auto", "portal_net": "172.30.0.0/16", "project_request_message": "", "project_request_template": "", "public_api_url": "https://ec2-54-152-227-161.compute-1.amazonaws.com:8443", "public_console_url": "https://ec2-54-152-227-161.compute-1.amazonaws.com:8443/console", "registry_url": "openshift/origin-${component}:5eda3fa", "sdn_cluster_network_cidr": "10.128.0.0/14", "sdn_host_subnet_length": "9", "session_auth_secrets": [ "4SxCubhJ657PONICBPuxsbrOXBONN87L" ], "session_encryption_secrets": [ "4SxCubhJ657PONICBPuxsbrOXBONN87L" ], "session_max_seconds": 3600, "session_name": "ssn", "session_secrets_file": "", "uid_allocator_range": "1000000000-1999999999/10000" }, "node": { "annotations": {}, "iptables_sync_period": "30s", "kubelet_args": { "node-labels": [] }, "labels": {}, "local_quota_per_fsgroup": "", "node_image": "openshift/node", "node_system_image": "openshift/node", "nodename": "ip-172-18-1-211.ec2.internal", "ovs_image": "openshift/openvswitch", "ovs_system_image": "openshift/openvswitch", "registry_url": "openshift/origin-${component}:${version}", "schedulable": false, "sdn_mtu": "8951", "set_node_ip": false, "storage_plugin_deps": [ "ceph", "glusterfs", "iscsi" ] }, "prometheus": { "alertbuffer": { "selector": null }, "alertmanager": { "selector": null }, "selector": null }, "provider": { "metadata": { "ami-id": "ami-069c0ca6cc091e8fa", "ami-launch-index": "0", "ami-manifest-path": "(unknown)", "block-device-mapping": { "ami": "/dev/sda1", "ebs12": "sdb", "root": "/dev/sda1" }, "hostname": "ip-172-18-1-211.ec2.internal", "instance-action": "none", "instance-id": "i-0cd57e9e471604c34", "instance-type": "m4.xlarge", "local-hostname": "ip-172-18-1-211.ec2.internal", "local-ipv4": "172.18.1.211", "mac": "0e:25:a6:6d:04:5a", "metrics": { "vhostmd": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" }, "network": { "interfaces": { "macs": { "0e:25:a6:6d:04:5a": { "device-number": "0", "interface-id": "eni-04b95497687bf9813", "ipv4-associations": { "54.152.227.161": "172.18.1.211" }, "local-hostname": "ip-172-18-1-211.ec2.internal", "local-ipv4s": "172.18.1.211", "mac": "0e:25:a6:6d:04:5a", "owner-id": "531415883065", "public-hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public-ipv4s": "54.152.227.161", "security-group-ids": "sg-7e73221a", "security-groups": "default", "subnet-id": "subnet-cf57c596", "subnet-ipv4-cidr-block": "172.18.0.0/20", "vpc-id": "vpc-69705d0c", "vpc-ipv4-cidr-block": "172.18.0.0/16", "vpc-ipv4-cidr-blocks": "172.18.0.0/16" } } } }, "placement": { "availability-zone": "us-east-1d" }, "profile": "default-hvm", "public-hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public-ipv4": "54.152.227.161", "public-keys/": "0=libra", "reservation-id": "r-0f638cc4ebb0e4aae", "security-groups": "default", "services": { "domain": "amazonaws.com", "partition": "aws" } }, "name": "aws", "network": { "hostname": "ip-172-18-1-211.ec2.internal", "interfaces": [ { "ips": [ "172.18.1.211" ], "network_id": "subnet-cf57c596", "network_type": "vpc", "public_ips": [ "54.152.227.161" ] } ], "ip": "172.18.1.211", "ipv6_enabled": false, "public_hostname": "ec2-54-152-227-161.compute-1.amazonaws.com", "public_ip": "54.152.227.161" }, "zone": "us-east-1d" } } }, "changed": true, "generated_timestamp": "2018-04-06 21:41:03.034634" } META: ran handlers META: ran handlers PLAY [Configure masters] ******************************************************* TASK [Gathering Facts] ********************************************************* ok: [localhost] META: ran handlers TASK [os_firewall : Detecting Atomic Host Operating System] ******************** task path: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/main.yml:2 ok: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:41:04.002399", "stat": { "exists": false } } TASK [os_firewall : Set fact r_os_firewall_is_atomic] ************************** task path: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/main.yml:7 ok: [localhost] => { "ansible_facts": { "r_os_firewall_is_atomic": false }, "changed": false, "generated_timestamp": "2018-04-06 21:41:04.037547" } TASK [os_firewall : Fail - Firewalld is not supported on Atomic Host] ********** task path: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/firewalld.yml:2 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:41:04.068919", "skip_reason": "Conditional result was False", "skipped": true } TASK [os_firewall : Install firewalld packages] ******************************** task path: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/firewalld.yml:9 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:41:04.099995", "skip_reason": "Conditional result was False", "skipped": true } TASK [os_firewall : Ensure iptables services are not enabled] ****************** task path: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/firewalld.yml:17 skipping: [localhost] => (item=iptables) => { "changed": false, "generated_timestamp": "2018-04-06 21:41:04.133126", "item": "iptables", "skip_reason": "Conditional result was False", "skipped": true } skipping: [localhost] => (item=ip6tables) => { "changed": false, "generated_timestamp": "2018-04-06 21:41:04.142054", "item": "ip6tables", "skip_reason": "Conditional result was False", "skipped": true } TASK [os_firewall : Wait 10 seconds after disabling iptables] ****************** task path: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/firewalld.yml:29 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:41:04.174345", "skip_reason": "Conditional result was False", "skipped": true } TASK [os_firewall : Start and enable firewalld service] ************************ task path: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/firewalld.yml:34 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:41:04.205597", "skip_reason": "Conditional result was False", "skipped": true } TASK [os_firewall : need to pause here, otherwise the firewalld service starting can sometimes cause ssh to fail] *** task path: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/firewalld.yml:43 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:41:04.236597", "skip_reason": "Conditional result was False", "skipped": true } TASK [os_firewall : Restart polkitd] ******************************************* task path: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/firewalld.yml:48 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:41:04.268068", "skip_reason": "Conditional result was False", "skipped": true } TASK [os_firewall : Wait for polkit action to have been created] *************** task path: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/firewalld.yml:55 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:41:04.299548", "skip_reason": "Conditional result was False", "skipped": true } TASK [os_firewall : Ensure firewalld service is not enabled] ******************* task path: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/iptables.yml:3 ok: [localhost] => { "changed": false, "enabled": false, "failed": false, "failed_when_result": false, "generated_timestamp": "2018-04-06 21:41:04.511403", "name": "firewalld", "state": "stopped", "status": { "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "AllowIsolate": "no", "AmbientCapabilities": "0", "AssertResult": "no", "AssertTimestampMonotonic": "0", "BlockIOAccounting": "no", "BlockIOWeight": "18446744073709551615", "CPUAccounting": "yes", "CPUQuotaPerSecUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "18446744073709551615", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "18446744073709551615", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ControlPID": "0", "DefaultDependencies": "yes", "Delegate": "no", "Description": "firewalld.service", "DevicePolicy": "auto", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/dev/null", "GuessMainPID": "yes", "IOScheduling": "0", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreOnSnapshot": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobTimeoutAction": "none", "JobTimeoutUSec": "0", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "18446744073709551615", "LimitCORE": "18446744073709551615", "LimitCPU": "18446744073709551615", "LimitDATA": "18446744073709551615", "LimitFSIZE": "18446744073709551615", "LimitLOCKS": "18446744073709551615", "LimitMEMLOCK": "65536", "LimitMSGQUEUE": "819200", "LimitNICE": "0", "LimitNOFILE": "65536", "LimitNPROC": "63327", "LimitRSS": "18446744073709551615", "LimitRTPRIO": "0", "LimitRTTIME": "18446744073709551615", "LimitSIGPENDING": "63327", "LimitSTACK": "18446744073709551615", "LoadState": "masked", "MainPID": "0", "MemoryAccounting": "yes", "MemoryCurrent": "18446744073709551615", "MemoryLimit": "18446744073709551615", "MountFlags": "0", "Names": "firewalld.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "PrivateDevices": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "ProtectHome": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "Restart": "no", "RestartUSec": "100ms", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "inherit", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitInterval": "10000000", "StartupBlockIOWeight": "18446744073709551615", "StartupCPUShares": "18446744073709551615", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "no", "TasksCurrent": "18446744073709551615", "TasksMax": "18446744073709551615", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "UMask": "0022", "UnitFileState": "bad", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [os_firewall : Wait 10 seconds after disabling firewalld] ***************** task path: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/iptables.yml:12 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-04-06 21:41:04.552845", "skip_reason": "Conditional result was False", "skipped": true } TASK [os_firewall : Install iptables packages] ********************************* task path: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/iptables.yml:17 ok: [localhost] => (item=iptables) => { "attempts": 1, "changed": false, "generated_timestamp": "2018-04-06 21:41:05.306583", "item": "iptables", "msg": "", "rc": 0, "results": [ "iptables-1.4.21-18.3.el7_4.x86_64 providing iptables is already installed" ] } ok: [localhost] => (item=iptables-services) => { "attempts": 1, "changed": false, "generated_timestamp": "2018-04-06 21:41:06.017975", "item": "iptables-services", "msg": "", "rc": 0, "results": [ "iptables-services-1.4.21-18.3.el7_4.x86_64 providing iptables-services is already installed" ] } TASK [os_firewall : Start and enable iptables service] ************************* task path: /usr/share/ansible/openshift-ansible/roles/os_firewall/tasks/iptables.yml:28 ok: [localhost -> localhost] => (item=localhost) => { "changed": false, "enabled": true, "generated_timestamp": "2018-04-06 21:41:06.324803", "item": "localhost", "name": "iptables", "state": "started", "status": { "ActiveEnterTimestamp": "Fri 2018-04-06 21:38:08 UTC", "ActiveEnterTimestampMonotonic": "2477349213", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "systemd-journald.socket system.slice basic.target syslog.target", "AllowIsolate": "no", "AmbientCapabilities": "0", "AssertResult": "yes", "AssertTimestamp": "Fri 2018-04-06 21:38:08 UTC", "AssertTimestampMonotonic": "2477293553", "Before": "ip6tables.service shutdown.target network.service docker.service", "BlockIOAccounting": "no", "BlockIOWeight": "18446744073709551615", "CPUAccounting": "yes", "CPUQuotaPerSecUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "18446744073709551615", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "18446744073709551615", "ConditionResult": "yes", "ConditionTimestamp": "Fri 2018-04-06 21:38:08 UTC", "ConditionTimestampMonotonic": "2477293552", "Conflicts": "shutdown.target", "ConsistsOf": "docker.service", "ControlGroup": "/system.slice/iptables.service", "ControlPID": "0", "DefaultDependencies": "yes", "Delegate": "no", "Description": "IPv4 firewall with iptables", "DevicePolicy": "auto", "Environment": "BOOTUP=serial CONSOLETYPE=serial", "ExecMainCode": "1", "ExecMainExitTimestamp": "Fri 2018-04-06 21:38:08 UTC", "ExecMainExitTimestampMonotonic": "2477344445", "ExecMainPID": "20527", "ExecMainStartTimestamp": "Fri 2018-04-06 21:38:08 UTC", "ExecMainStartTimestampMonotonic": "2477297983", "ExecMainStatus": "0", "ExecReload": "{ path=/usr/libexec/iptables/iptables.init ; argv[]=/usr/libexec/iptables/iptables.init reload ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/libexec/iptables/iptables.init ; argv[]=/usr/libexec/iptables/iptables.init start ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/libexec/iptables/iptables.init ; argv[]=/usr/libexec/iptables/iptables.init stop ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/iptables.service", "GuessMainPID": "yes", "IOScheduling": "0", "Id": "iptables.service", "IgnoreOnIsolate": "no", "IgnoreOnSnapshot": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Fri 2018-04-06 21:38:08 UTC", "InactiveExitTimestampMonotonic": "2477298016", "JobTimeoutAction": "none", "JobTimeoutUSec": "0", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "18446744073709551615", "LimitCORE": "18446744073709551615", "LimitCPU": "18446744073709551615", "LimitDATA": "18446744073709551615", "LimitFSIZE": "18446744073709551615", "LimitLOCKS": "18446744073709551615", "LimitMEMLOCK": "65536", "LimitMSGQUEUE": "819200", "LimitNICE": "0", "LimitNOFILE": "4096", "LimitNPROC": "63327", "LimitRSS": "18446744073709551615", "LimitRTPRIO": "0", "LimitRTTIME": "18446744073709551615", "LimitSIGPENDING": "63327", "LimitSTACK": "18446744073709551615", "LoadState": "loaded", "MainPID": "0", "MemoryAccounting": "yes", "MemoryCurrent": "18446744073709551615", "MemoryLimit": "18446744073709551615", "MountFlags": "0", "Names": "iptables.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "PrivateDevices": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "ProtectHome": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "yes", "Requires": "basic.target", "Restart": "no", "RestartUSec": "100ms", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "syslog", "StandardInput": "null", "StandardOutput": "syslog", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitInterval": "10000000", "StartupBlockIOWeight": "18446744073709551615", "StartupCPUShares": "18446744073709551615", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "exited", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "