SuccessConsole Output

Skipping 561 KB.. Full Log
changed: [40.121.60.103]

TASK [container_runtime : Detect if docker is already started] *****************
Wednesday 20 June 2018  15:02:02 +0000 (0:00:00.243)       0:04:56.792 ******** 
ok: [40.121.60.103]

TASK [container_runtime : Start the Docker service] ****************************
Wednesday 20 June 2018  15:02:02 +0000 (0:00:00.245)       0:04:57.037 ******** 
changed: [40.121.60.103]

TASK [container_runtime : set_fact] ********************************************
Wednesday 20 June 2018  15:02:04 +0000 (0:00:02.082)       0:04:59.119 ******** 
ok: [40.121.60.103]

TASK [container_runtime : include_tasks] ***************************************
Wednesday 20 June 2018  15:02:04 +0000 (0:00:00.067)       0:04:59.187 ******** 
included: /usr/share/ansible/openshift-ansible/roles/container_runtime/tasks/common/post.yml for 40.121.60.103

TASK [container_runtime : Ensure /var/lib/containers exists] *******************
Wednesday 20 June 2018  15:02:04 +0000 (0:00:00.065)       0:04:59.252 ******** 
changed: [40.121.60.103]

TASK [container_runtime : Fix SELinux Permissions on /var/lib/containers] ******
Wednesday 20 June 2018  15:02:04 +0000 (0:00:00.223)       0:04:59.476 ******** 
ok: [40.121.60.103]

RUNNING HANDLER [container_runtime : restart container runtime] ****************
Wednesday 20 June 2018  15:02:05 +0000 (0:00:00.220)       0:04:59.696 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : include_tasks] ***************************************
Wednesday 20 June 2018  15:02:05 +0000 (0:00:00.048)       0:04:59.744 ******** 
included: /usr/share/ansible/openshift-ansible/roles/container_runtime/tasks/registry_auth.yml for 40.121.60.103

TASK [container_runtime : Check for credentials file for registry auth] ********
Wednesday 20 June 2018  15:02:05 +0000 (0:00:00.059)       0:04:59.804 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : Create credentials for docker cli registry auth] *****
Wednesday 20 June 2018  15:02:05 +0000 (0:00:00.043)       0:04:59.847 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : Create credentials for docker cli registry auth (alternative)] ***
Wednesday 20 June 2018  15:02:05 +0000 (0:00:00.041)       0:04:59.889 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : stat the docker data dir] ****************************
Wednesday 20 June 2018  15:02:05 +0000 (0:00:00.039)       0:04:59.928 ******** 
ok: [40.121.60.103]

TASK [container_runtime : include_tasks] ***************************************
Wednesday 20 June 2018  15:02:05 +0000 (0:00:00.218)       0:05:00.147 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : Fail quickly if openshift_docker_options are set] ****
Wednesday 20 June 2018  15:02:05 +0000 (0:00:00.039)       0:05:00.187 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : include_tasks] ***************************************
Wednesday 20 June 2018  15:02:05 +0000 (0:00:00.033)       0:05:00.221 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : include_tasks] ***************************************
Wednesday 20 June 2018  15:02:05 +0000 (0:00:00.033)       0:05:00.254 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : Install Docker so we can use the client] *************
Wednesday 20 June 2018  15:02:05 +0000 (0:00:00.033)       0:05:00.288 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : Disable Docker] **************************************
Wednesday 20 June 2018  15:02:05 +0000 (0:00:00.033)       0:05:00.322 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : Ensure proxies are in the atomic.conf] ***************
Wednesday 20 June 2018  15:02:05 +0000 (0:00:00.034)       0:05:00.356 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : debug] ***********************************************
Wednesday 20 June 2018  15:02:05 +0000 (0:00:00.041)       0:05:00.397 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : include_tasks] ***************************************
Wednesday 20 June 2018  15:02:05 +0000 (0:00:00.033)       0:05:00.431 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : Pre-pull Container Engine System Container image] ****
Wednesday 20 June 2018  15:02:05 +0000 (0:00:00.033)       0:05:00.465 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : Ensure container-engine.service.d directory exists] ***
Wednesday 20 June 2018  15:02:05 +0000 (0:00:00.034)       0:05:00.499 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : Ensure /etc/docker directory exists] *****************
Wednesday 20 June 2018  15:02:05 +0000 (0:00:00.033)       0:05:00.533 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : Install Container Engine System Container] ***********
Wednesday 20 June 2018  15:02:05 +0000 (0:00:00.034)       0:05:00.568 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : Configure Container Engine Service File] *************
Wednesday 20 June 2018  15:02:05 +0000 (0:00:00.034)       0:05:00.602 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : Configure Container Engine] **************************
Wednesday 20 June 2018  15:02:05 +0000 (0:00:00.033)       0:05:00.636 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : Start the container-engine service] ******************
Wednesday 20 June 2018  15:02:06 +0000 (0:00:00.034)       0:05:00.671 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : set_fact] ********************************************
Wednesday 20 June 2018  15:02:06 +0000 (0:00:00.034)       0:05:00.705 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : include_tasks] ***************************************
Wednesday 20 June 2018  15:02:06 +0000 (0:00:00.033)       0:05:00.739 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : Check we are not using node as a Docker container with CRI-O] ***
Wednesday 20 June 2018  15:02:06 +0000 (0:00:00.041)       0:05:00.780 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : include_tasks] ***************************************
Wednesday 20 June 2018  15:02:06 +0000 (0:00:00.033)       0:05:00.814 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : include_tasks] ***************************************
Wednesday 20 June 2018  15:02:06 +0000 (0:00:00.034)       0:05:00.848 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : Check that overlay is in the kernel] *****************
Wednesday 20 June 2018  15:02:06 +0000 (0:00:00.034)       0:05:00.882 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : Add overlay to modprobe.d] ***************************
Wednesday 20 June 2018  15:02:06 +0000 (0:00:00.034)       0:05:00.917 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : Manually modprobe overlay into the kernel] ***********
Wednesday 20 June 2018  15:02:06 +0000 (0:00:00.033)       0:05:00.950 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : Enable and start systemd-modules-load] ***************
Wednesday 20 June 2018  15:02:06 +0000 (0:00:00.034)       0:05:00.984 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : Ensure proxies are in the atomic.conf] ***************
Wednesday 20 June 2018  15:02:06 +0000 (0:00:00.034)       0:05:01.019 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : debug] ***********************************************
Wednesday 20 June 2018  15:02:06 +0000 (0:00:00.033)       0:05:01.052 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : Pre-pull CRI-O System Container image] ***************
Wednesday 20 June 2018  15:02:06 +0000 (0:00:00.033)       0:05:01.086 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : Install CRI-O System Container] **********************
Wednesday 20 June 2018  15:02:06 +0000 (0:00:00.033)       0:05:01.119 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : Remove CRI-O default configuration files] ************
Wednesday 20 June 2018  15:02:06 +0000 (0:00:00.040)       0:05:01.160 ******** 
skipping: [40.121.60.103] => (item=/etc/cni/net.d/200-loopback.conf) 
skipping: [40.121.60.103] => (item=/etc/cni/net.d/100-crio-bridge.conf) 

TASK [container_runtime : Create the CRI-O configuration] **********************
Wednesday 20 June 2018  15:02:06 +0000 (0:00:00.044)       0:05:01.205 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : Ensure CNI configuration directory exists] ***********
Wednesday 20 June 2018  15:02:06 +0000 (0:00:00.034)       0:05:01.239 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : Add iptables allow rules] ****************************
Wednesday 20 June 2018  15:02:06 +0000 (0:00:00.033)       0:05:01.272 ******** 
skipping: [40.121.60.103] => (item={u'port': u'10010/tcp', u'service': u'crio'}) 

TASK [container_runtime : Remove iptables rules] *******************************
Wednesday 20 June 2018  15:02:06 +0000 (0:00:00.041)       0:05:01.313 ******** 

TASK [container_runtime : Add firewalld allow rules] ***************************
Wednesday 20 June 2018  15:02:06 +0000 (0:00:00.033)       0:05:01.347 ******** 
skipping: [40.121.60.103] => (item={u'port': u'10010/tcp', u'service': u'crio'}) 

TASK [container_runtime : Remove firewalld allow rules] ************************
Wednesday 20 June 2018  15:02:06 +0000 (0:00:00.040)       0:05:01.387 ******** 

TASK [container_runtime : Configure the CNI network] ***************************
Wednesday 20 June 2018  15:02:06 +0000 (0:00:00.032)       0:05:01.420 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : Create /etc/sysconfig/crio-storage] ******************
Wednesday 20 June 2018  15:02:06 +0000 (0:00:00.033)       0:05:01.454 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : Create /etc/sysconfig/crio-network] ******************
Wednesday 20 June 2018  15:02:06 +0000 (0:00:00.033)       0:05:01.488 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : debug] ***********************************************
Wednesday 20 June 2018  15:02:06 +0000 (0:00:00.033)       0:05:01.522 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : Start the CRI-O service] *****************************
Wednesday 20 June 2018  15:02:06 +0000 (0:00:00.040)       0:05:01.563 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : include_tasks] ***************************************
Wednesday 20 June 2018  15:02:06 +0000 (0:00:00.033)       0:05:01.596 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : Fail if Atomic Host since this is an rpm request] ****
Wednesday 20 June 2018  15:02:06 +0000 (0:00:00.034)       0:05:01.631 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : include_tasks] ***************************************
Wednesday 20 June 2018  15:02:07 +0000 (0:00:00.035)       0:05:01.667 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : Check that overlay is in the kernel] *****************
Wednesday 20 June 2018  15:02:07 +0000 (0:00:00.035)       0:05:01.702 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : Add overlay to modprobe.d] ***************************
Wednesday 20 June 2018  15:02:07 +0000 (0:00:00.033)       0:05:01.736 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : Manually modprobe overlay into the kernel] ***********
Wednesday 20 June 2018  15:02:07 +0000 (0:00:00.033)       0:05:01.770 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : Enable and start systemd-modules-load] ***************
Wednesday 20 June 2018  15:02:07 +0000 (0:00:00.033)       0:05:01.803 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : Install cri-o] ***************************************
Wednesday 20 June 2018  15:02:07 +0000 (0:00:00.033)       0:05:01.837 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : Remove CRI-O default configuration files] ************
Wednesday 20 June 2018  15:02:07 +0000 (0:00:00.033)       0:05:01.871 ******** 
skipping: [40.121.60.103] => (item=/etc/cni/net.d/200-loopback.conf) 
skipping: [40.121.60.103] => (item=/etc/cni/net.d/100-crio-bridge.conf) 

TASK [container_runtime : Create the CRI-O configuration] **********************
Wednesday 20 June 2018  15:02:07 +0000 (0:00:00.048)       0:05:01.919 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : Ensure CNI configuration directory exists] ***********
Wednesday 20 June 2018  15:02:07 +0000 (0:00:00.041)       0:05:01.961 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : Add iptables allow rules] ****************************
Wednesday 20 June 2018  15:02:07 +0000 (0:00:00.036)       0:05:01.997 ******** 
skipping: [40.121.60.103] => (item={u'port': u'10010/tcp', u'service': u'crio'}) 

TASK [container_runtime : Remove iptables rules] *******************************
Wednesday 20 June 2018  15:02:07 +0000 (0:00:00.039)       0:05:02.037 ******** 

TASK [container_runtime : Add firewalld allow rules] ***************************
Wednesday 20 June 2018  15:02:07 +0000 (0:00:00.032)       0:05:02.070 ******** 
skipping: [40.121.60.103] => (item={u'port': u'10010/tcp', u'service': u'crio'}) 

TASK [container_runtime : Remove firewalld allow rules] ************************
Wednesday 20 June 2018  15:02:07 +0000 (0:00:00.040)       0:05:02.111 ******** 

TASK [container_runtime : Configure the CNI network] ***************************
Wednesday 20 June 2018  15:02:07 +0000 (0:00:00.032)       0:05:02.143 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : Create /etc/sysconfig/crio-network] ******************
Wednesday 20 June 2018  15:02:07 +0000 (0:00:00.033)       0:05:02.177 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : Start the CRI-O service] *****************************
Wednesday 20 June 2018  15:02:07 +0000 (0:00:00.033)       0:05:02.211 ******** 
skipping: [40.121.60.103]

TASK [container_runtime : include_tasks] ***************************************
Wednesday 20 June 2018  15:02:07 +0000 (0:00:00.033)       0:05:02.244 ******** 
skipping: [40.121.60.103]

PLAY [Determine openshift_version to configure on first master] ****************

TASK [include_role] ************************************************************
Wednesday 20 June 2018  15:02:07 +0000 (0:00:00.047)       0:05:02.292 ******** 

TASK [openshift_version : Use openshift.common.version fact as version to configure if already installed] ***
Wednesday 20 June 2018  15:02:07 +0000 (0:00:00.075)       0:05:02.367 ******** 
skipping: [40.121.60.103]

TASK [openshift_version : include_tasks] ***************************************
Wednesday 20 June 2018  15:02:07 +0000 (0:00:00.034)       0:05:02.402 ******** 
included: /usr/share/ansible/openshift-ansible/roles/openshift_version/tasks/first_master_rpm_version.yml for 40.121.60.103

TASK [openshift_version : Set rpm version to configure if openshift_pkg_version specified] ***
Wednesday 20 June 2018  15:02:07 +0000 (0:00:00.054)       0:05:02.456 ******** 
skipping: [40.121.60.103]

TASK [openshift_version : Set openshift_version for rpm installation] **********
Wednesday 20 June 2018  15:02:07 +0000 (0:00:00.039)       0:05:02.495 ******** 
included: /usr/share/ansible/openshift-ansible/roles/openshift_version/tasks/check_available_rpms.yml for 40.121.60.103

TASK [openshift_version : Get available RPM version] ***************************
Wednesday 20 June 2018  15:02:07 +0000 (0:00:00.090)       0:05:02.586 ******** 
ok: [40.121.60.103]

TASK [openshift_version : fail] ************************************************
Wednesday 20 June 2018  15:02:09 +0000 (0:00:01.448)       0:05:04.035 ******** 
skipping: [40.121.60.103]

TASK [openshift_version : set_fact] ********************************************
Wednesday 20 June 2018  15:02:09 +0000 (0:00:00.037)       0:05:04.072 ******** 
ok: [40.121.60.103]

TASK [openshift_version : debug] ***********************************************
Wednesday 20 June 2018  15:02:09 +0000 (0:00:00.063)       0:05:04.136 ******** 
ok: [40.121.60.103] => {
    "msg": "openshift_pkg_version was not defined. Falling back to -3.9.0"
}

TASK [openshift_version : set_fact] ********************************************
Wednesday 20 June 2018  15:02:09 +0000 (0:00:00.062)       0:05:04.198 ******** 
ok: [40.121.60.103]

TASK [openshift_version : debug] ***********************************************
Wednesday 20 June 2018  15:02:09 +0000 (0:00:00.062)       0:05:04.261 ******** 
ok: [40.121.60.103] => {
    "msg": "openshift_image_tag was not defined. Falling back to v3.9.0"
}

TASK [openshift_version : set_fact] ********************************************
Wednesday 20 June 2018  15:02:09 +0000 (0:00:00.062)       0:05:04.324 ******** 
ok: [40.121.60.103]

TASK [openshift_version : debug] ***********************************************
Wednesday 20 June 2018  15:02:09 +0000 (0:00:00.062)       0:05:04.387 ******** 
ok: [40.121.60.103] => {
    "openshift_release": "VARIABLE IS NOT DEFINED!"
}

TASK [openshift_version : debug] ***********************************************
Wednesday 20 June 2018  15:02:09 +0000 (0:00:00.060)       0:05:04.447 ******** 
ok: [40.121.60.103] => {
    "openshift_image_tag": "v3.9.0"
}

TASK [openshift_version : debug] ***********************************************
Wednesday 20 June 2018  15:02:09 +0000 (0:00:00.059)       0:05:04.506 ******** 
ok: [40.121.60.103] => {
    "openshift_pkg_version": "-3.9.0"
}

TASK [openshift_version : debug] ***********************************************
Wednesday 20 June 2018  15:02:09 +0000 (0:00:00.061)       0:05:04.568 ******** 
ok: [40.121.60.103] => {
    "openshift_version": "3.9.0"
}

PLAY [Set openshift_version for etcd, node, and master hosts] ******************
skipping: no hosts matched

PLAY [Ensure the requested version packages are available.] ********************
skipping: no hosts matched

PLAY [Disable excluders] *******************************************************

TASK [openshift_excluder : Detecting Atomic Host Operating System] *************
Wednesday 20 June 2018  15:02:09 +0000 (0:00:00.080)       0:05:04.648 ******** 
ok: [40.121.60.103]

TASK [openshift_excluder : Debug r_openshift_excluder_enable_docker_excluder] ***
Wednesday 20 June 2018  15:02:10 +0000 (0:00:00.210)       0:05:04.859 ******** 
ok: [40.121.60.103] => {
    "r_openshift_excluder_enable_docker_excluder": true
}

TASK [openshift_excluder : Debug r_openshift_excluder_enable_openshift_excluder] ***
Wednesday 20 June 2018  15:02:10 +0000 (0:00:00.069)       0:05:04.929 ******** 
ok: [40.121.60.103] => {
    "r_openshift_excluder_enable_openshift_excluder": true
}

TASK [openshift_excluder : Fail if invalid openshift_excluder_action provided] ***
Wednesday 20 June 2018  15:02:10 +0000 (0:00:00.067)       0:05:04.996 ******** 
skipping: [40.121.60.103]

TASK [openshift_excluder : Fail if r_openshift_excluder_upgrade_target is not defined] ***
Wednesday 20 June 2018  15:02:10 +0000 (0:00:00.036)       0:05:05.033 ******** 
skipping: [40.121.60.103]

TASK [openshift_excluder : Include main action task file] **********************
Wednesday 20 June 2018  15:02:10 +0000 (0:00:00.037)       0:05:05.071 ******** 
included: /usr/share/ansible/openshift-ansible/roles/openshift_excluder/tasks/disable.yml for 40.121.60.103

TASK [openshift_excluder : Include verify_upgrade.yml when upgrading] **********
Wednesday 20 June 2018  15:02:10 +0000 (0:00:00.070)       0:05:05.142 ******** 
skipping: [40.121.60.103]

TASK [openshift_excluder : Disable excluders before the upgrade to remove older excluding expressions] ***
Wednesday 20 June 2018  15:02:10 +0000 (0:00:00.035)       0:05:05.177 ******** 
included: /usr/share/ansible/openshift-ansible/roles/openshift_excluder/tasks/unexclude.yml for 40.121.60.103

TASK [openshift_excluder : Check for docker-excluder] **************************
Wednesday 20 June 2018  15:02:10 +0000 (0:00:00.057)       0:05:05.235 ******** 
ok: [40.121.60.103]

TASK [openshift_excluder : disable docker excluder] ****************************
Wednesday 20 June 2018  15:02:10 +0000 (0:00:00.232)       0:05:05.468 ******** 
changed: [40.121.60.103]

TASK [openshift_excluder : Check for openshift excluder] ***********************
Wednesday 20 June 2018  15:02:11 +0000 (0:00:00.266)       0:05:05.735 ******** 
ok: [40.121.60.103]

TASK [openshift_excluder : disable openshift excluder] *************************
Wednesday 20 June 2018  15:02:11 +0000 (0:00:00.218)       0:05:05.953 ******** 
skipping: [40.121.60.103]

TASK [openshift_excluder : Include install.yml] ********************************
Wednesday 20 June 2018  15:02:11 +0000 (0:00:00.050)       0:05:06.004 ******** 
included: /usr/share/ansible/openshift-ansible/roles/openshift_excluder/tasks/install.yml for 40.121.60.103

TASK [openshift_excluder : Install docker excluder - yum] **********************
Wednesday 20 June 2018  15:02:11 +0000 (0:00:00.060)       0:05:06.064 ******** 
skipping: [40.121.60.103]

TASK [openshift_excluder : Install docker excluder - dnf] **********************
Wednesday 20 June 2018  15:02:11 +0000 (0:00:00.041)       0:05:06.105 ******** 
skipping: [40.121.60.103]

TASK [openshift_excluder : Install openshift excluder - yum] *******************
Wednesday 20 June 2018  15:02:11 +0000 (0:00:00.041)       0:05:06.146 ******** 
skipping: [40.121.60.103]

TASK [openshift_excluder : Install openshift excluder - dnf] *******************
Wednesday 20 June 2018  15:02:11 +0000 (0:00:00.041)       0:05:06.188 ******** 
skipping: [40.121.60.103]

TASK [openshift_excluder : set_fact] *******************************************
Wednesday 20 June 2018  15:02:11 +0000 (0:00:00.041)       0:05:06.229 ******** 
skipping: [40.121.60.103]

TASK [openshift_excluder : Include exclude.yml] ********************************
Wednesday 20 June 2018  15:02:11 +0000 (0:00:00.039)       0:05:06.269 ******** 
included: /usr/share/ansible/openshift-ansible/roles/openshift_excluder/tasks/exclude.yml for 40.121.60.103

TASK [openshift_excluder : Check for docker-excluder] **************************
Wednesday 20 June 2018  15:02:11 +0000 (0:00:00.063)       0:05:06.332 ******** 
ok: [40.121.60.103]

TASK [openshift_excluder : Enable docker excluder] *****************************
Wednesday 20 June 2018  15:02:11 +0000 (0:00:00.226)       0:05:06.558 ******** 
changed: [40.121.60.103]

TASK [openshift_excluder : Check for openshift excluder] ***********************
Wednesday 20 June 2018  15:02:12 +0000 (0:00:00.266)       0:05:06.825 ******** 
ok: [40.121.60.103]

TASK [openshift_excluder : Enable openshift excluder] **************************
Wednesday 20 June 2018  15:02:12 +0000 (0:00:00.225)       0:05:07.051 ******** 
skipping: [40.121.60.103]

TASK [openshift_excluder : Include unexclude.yml] ******************************
Wednesday 20 June 2018  15:02:12 +0000 (0:00:00.044)       0:05:07.096 ******** 
included: /usr/share/ansible/openshift-ansible/roles/openshift_excluder/tasks/unexclude.yml for 40.121.60.103

TASK [openshift_excluder : Check for docker-excluder] **************************
Wednesday 20 June 2018  15:02:12 +0000 (0:00:00.061)       0:05:07.158 ******** 
ok: [40.121.60.103]

TASK [openshift_excluder : disable docker excluder] ****************************
Wednesday 20 June 2018  15:02:12 +0000 (0:00:00.220)       0:05:07.378 ******** 
skipping: [40.121.60.103]

TASK [openshift_excluder : Check for openshift excluder] ***********************
Wednesday 20 June 2018  15:02:12 +0000 (0:00:00.038)       0:05:07.417 ******** 
ok: [40.121.60.103]

TASK [openshift_excluder : disable openshift excluder] *************************
Wednesday 20 June 2018  15:02:12 +0000 (0:00:00.206)       0:05:07.623 ******** 
skipping: [40.121.60.103]

PLAY [Evaluate node groups] ****************************************************

TASK [Gathering Facts] *********************************************************
Wednesday 20 June 2018  15:02:13 +0000 (0:00:00.042)       0:05:07.666 ******** 
ok: [localhost]

TASK [Evaluate oo_containerized_master_nodes] **********************************
Wednesday 20 June 2018  15:02:18 +0000 (0:00:05.804)       0:05:13.471 ******** 
skipping: [localhost] => (item=40.121.60.103) 
 [WARNING]: Could not match supplied host pattern, ignoring:
oo_containerized_master_nodes

PLAY [Configure nodes] *********************************************************

TASK [openshift_clock : Determine if chrony is installed] **********************
Wednesday 20 June 2018  15:02:18 +0000 (0:00:00.132)       0:05:13.603 ******** 
changed: [40.121.60.103]
 [WARNING]: Consider using the yum, dnf or zypper module rather than running
rpm.  If you need to use command because yum, dnf or zypper is insufficient you
can add warn=False to this command task or set command_warnings=False in
ansible.cfg to get rid of this message.

TASK [openshift_clock : Install ntp package] ***********************************
Wednesday 20 June 2018  15:02:19 +0000 (0:00:00.257)       0:05:13.860 ******** 
skipping: [40.121.60.103]

TASK [openshift_clock : Start and enable ntpd/chronyd] *************************
Wednesday 20 June 2018  15:02:19 +0000 (0:00:00.035)       0:05:13.896 ******** 
changed: [40.121.60.103]

TASK [openshift_cloud_provider : Set cloud provider facts] *********************
Wednesday 20 June 2018  15:02:19 +0000 (0:00:00.309)       0:05:14.206 ******** 
skipping: [40.121.60.103]

TASK [openshift_cloud_provider : Create cloudprovider config dir] **************
Wednesday 20 June 2018  15:02:19 +0000 (0:00:00.035)       0:05:14.241 ******** 
skipping: [40.121.60.103]

TASK [openshift_cloud_provider : include the defined cloud provider files] *****
Wednesday 20 June 2018  15:02:19 +0000 (0:00:00.033)       0:05:14.275 ******** 
skipping: [40.121.60.103]

TASK [openshift_node : fail] ***************************************************
Wednesday 20 June 2018  15:02:19 +0000 (0:00:00.033)       0:05:14.308 ******** 
skipping: [40.121.60.103]

TASK [openshift_node : Check for NetworkManager service] ***********************
Wednesday 20 June 2018  15:02:19 +0000 (0:00:00.034)       0:05:14.343 ******** 
ok: [40.121.60.103]

TASK [openshift_node : Set fact using_network_manager] *************************
Wednesday 20 June 2018  15:02:20 +0000 (0:00:00.634)       0:05:14.977 ******** 
ok: [40.121.60.103]

TASK [openshift_node : Install dnsmasq] ****************************************
Wednesday 20 June 2018  15:02:20 +0000 (0:00:00.058)       0:05:15.035 ******** 
changed: [40.121.60.103]

TASK [openshift_node : ensure origin/node directory exists] ********************
Wednesday 20 June 2018  15:02:23 +0000 (0:00:03.252)       0:05:18.288 ******** 
changed: [40.121.60.103] => (item=/etc/origin)
changed: [40.121.60.103] => (item=/etc/origin/node)

TASK [openshift_node : Install node-dnsmasq.conf] ******************************
Wednesday 20 June 2018  15:02:24 +0000 (0:00:00.479)       0:05:18.767 ******** 
changed: [40.121.60.103]

TASK [openshift_node : fail] ***************************************************
Wednesday 20 June 2018  15:02:24 +0000 (0:00:00.686)       0:05:19.454 ******** 
skipping: [40.121.60.103]

TASK [openshift_node : Install NetworkManager during node_bootstrap provisioning] ***
Wednesday 20 June 2018  15:02:24 +0000 (0:00:00.035)       0:05:19.490 ******** 
skipping: [40.121.60.103]

TASK [openshift_node : Install network manager dispatch script] ****************
Wednesday 20 June 2018  15:02:24 +0000 (0:00:00.033)       0:05:19.523 ******** 
skipping: [40.121.60.103]

TASK [openshift_node : Install dnsmasq configuration] **************************
Wednesday 20 June 2018  15:02:24 +0000 (0:00:00.035)       0:05:19.558 ******** 
changed: [40.121.60.103]

TASK [openshift_node : Deploy additional dnsmasq.conf] *************************
Wednesday 20 June 2018  15:02:25 +0000 (0:00:00.659)       0:05:20.218 ******** 
skipping: [40.121.60.103]

TASK [openshift_node : Enable dnsmasq] *****************************************
Wednesday 20 June 2018  15:02:25 +0000 (0:00:00.034)       0:05:20.253 ******** 
changed: [40.121.60.103]

TASK [openshift_node : Install network manager dispatch script] ****************
Wednesday 20 June 2018  15:02:26 +0000 (0:00:00.627)       0:05:20.880 ******** 
changed: [40.121.60.103]

RUNNING HANDLER [openshift_node : restart NetworkManager] **********************
Wednesday 20 June 2018  15:02:26 +0000 (0:00:00.761)       0:05:21.642 ******** 
skipping: [40.121.60.103]

RUNNING HANDLER [openshift_node : restart dnsmasq] *****************************
Wednesday 20 June 2018  15:02:27 +0000 (0:00:00.032)       0:05:21.675 ******** 
skipping: [40.121.60.103]

TASK [openshift_node : Add iptables allow rules] *******************************
Wednesday 20 June 2018  15:02:27 +0000 (0:00:00.050)       0:05:21.725 ******** 
changed: [40.121.60.103] => (item={u'port': u'10250/tcp', u'service': u'Kubernetes kubelet'})
changed: [40.121.60.103] => (item={u'port': u'80/tcp', u'service': u'http'})
changed: [40.121.60.103] => (item={u'port': u'443/tcp', u'service': u'https'})
changed: [40.121.60.103] => (item={u'cond': u'openshift_use_openshift_sdn | bool', u'port': u'4789/udp', u'service': u'OpenShift OVS sdn'})
skipping: [40.121.60.103] => (item={u'cond': False, u'port': u'179/tcp', u'service': u'Calico BGP Port'}) 
skipping: [40.121.60.103] => (item={u'cond': False, u'port': u'/tcp', u'service': u'Kubernetes service NodePort TCP'}) 
skipping: [40.121.60.103] => (item={u'cond': False, u'port': u'/udp', u'service': u'Kubernetes service NodePort UDP'}) 

TASK [openshift_node : Remove iptables rules] **********************************
Wednesday 20 June 2018  15:02:28 +0000 (0:00:01.580)       0:05:23.305 ******** 

TASK [openshift_node : Add firewalld allow rules] ******************************
Wednesday 20 June 2018  15:02:28 +0000 (0:00:00.032)       0:05:23.338 ******** 
skipping: [40.121.60.103] => (item={u'port': u'10250/tcp', u'service': u'Kubernetes kubelet'}) 
skipping: [40.121.60.103] => (item={u'port': u'80/tcp', u'service': u'http'}) 
skipping: [40.121.60.103] => (item={u'port': u'443/tcp', u'service': u'https'}) 
skipping: [40.121.60.103] => (item={u'cond': u'openshift_use_openshift_sdn | bool', u'port': u'4789/udp', u'service': u'OpenShift OVS sdn'}) 
skipping: [40.121.60.103] => (item={u'cond': False, u'port': u'179/tcp', u'service': u'Calico BGP Port'}) 
skipping: [40.121.60.103] => (item={u'cond': False, u'port': u'/tcp', u'service': u'Kubernetes service NodePort TCP'}) 
skipping: [40.121.60.103] => (item={u'cond': False, u'port': u'/udp', u'service': u'Kubernetes service NodePort UDP'}) 

TASK [openshift_node : Remove firewalld allow rules] ***************************
Wednesday 20 June 2018  15:02:28 +0000 (0:00:00.112)       0:05:23.450 ******** 

TASK [openshift_node : Checking for journald.conf] *****************************
Wednesday 20 June 2018  15:02:28 +0000 (0:00:00.032)       0:05:23.483 ******** 
ok: [40.121.60.103]

TASK [openshift_node : Create journald persistence directories] ****************
Wednesday 20 June 2018  15:02:29 +0000 (0:00:00.328)       0:05:23.811 ******** 
changed: [40.121.60.103]

TASK [openshift_node : Update journald setup] **********************************
Wednesday 20 June 2018  15:02:29 +0000 (0:00:00.290)       0:05:24.102 ******** 
changed: [40.121.60.103] => (item={u'var': u'Storage', u'val': u'persistent'})
changed: [40.121.60.103] => (item={u'var': u'Compress', u'val': True})
changed: [40.121.60.103] => (item={u'var': u'SyncIntervalSec', u'val': u'1s'})
changed: [40.121.60.103] => (item={u'var': u'RateLimitInterval', u'val': u'1s'})
changed: [40.121.60.103] => (item={u'var': u'RateLimitBurst', u'val': 10000})
changed: [40.121.60.103] => (item={u'var': u'SystemMaxUse', u'val': u'8G'})
ok: [40.121.60.103] => (item={u'var': u'SystemKeepFree', u'val': u'20%'})
changed: [40.121.60.103] => (item={u'var': u'SystemMaxFileSize', u'val': u'10M'})
changed: [40.121.60.103] => (item={u'var': u'MaxRetentionSec', u'val': u'1month'})
ok: [40.121.60.103] => (item={u'var': u'MaxFileSec', u'val': u'1day'})
changed: [40.121.60.103] => (item={u'var': u'ForwardToSyslog', u'val': False})
changed: [40.121.60.103] => (item={u'var': u'ForwardToWall', u'val': False})

TASK [openshift_node : Restart journald] ***************************************
Wednesday 20 June 2018  15:02:33 +0000 (0:00:04.208)       0:05:28.311 ******** 
changed: [40.121.60.103]

TASK [openshift_node : Disable swap] *******************************************
Wednesday 20 June 2018  15:02:34 +0000 (0:00:00.396)       0:05:28.707 ******** 
ok: [40.121.60.103]

TASK [openshift_node : Install Node package, sdn-ovs, conntrack packages] ******
Wednesday 20 June 2018  15:02:34 +0000 (0:00:00.431)       0:05:29.138 ******** 
changed: [40.121.60.103] => (item={u'name': u'origin-node-3.9.0'})
changed: [40.121.60.103] => (item={u'name': u'origin-sdn-ovs-3.9.0', u'install': True})
ok: [40.121.60.103] => (item={u'name': u'conntrack-tools'})

TASK [openshift_node : Pre-pull node image when containerized] *****************
Wednesday 20 June 2018  15:03:53 +0000 (0:01:19.214)       0:06:48.353 ******** 
skipping: [40.121.60.103]

TASK [openshift_node : Restart cri-o] ******************************************
Wednesday 20 June 2018  15:03:53 +0000 (0:00:00.033)       0:06:48.386 ******** 
skipping: [40.121.60.103]

TASK [openshift_node : restart NetworkManager to ensure resolv.conf is present] ***
Wednesday 20 June 2018  15:03:53 +0000 (0:00:00.034)       0:06:48.420 ******** 
changed: [40.121.60.103]

TASK [openshift_node : sysctl] *************************************************
Wednesday 20 June 2018  15:03:54 +0000 (0:00:00.372)       0:06:48.793 ******** 
changed: [40.121.60.103]

TASK [openshift_node : Setting sebool container_manage_cgroup] *****************
Wednesday 20 June 2018  15:03:54 +0000 (0:00:00.413)       0:06:49.207 ******** 
changed: [40.121.60.103]

TASK [openshift_node : Check for credentials file for registry auth] ***********
Wednesday 20 June 2018  15:03:55 +0000 (0:00:01.115)       0:06:50.323 ******** 
skipping: [40.121.60.103]

TASK [openshift_node : Create credentials for registry auth] *******************
Wednesday 20 June 2018  15:03:55 +0000 (0:00:00.034)       0:06:50.358 ******** 
skipping: [40.121.60.103]

TASK [openshift_node : Create credentials for registry auth (alternative)] *****
Wednesday 20 June 2018  15:03:55 +0000 (0:00:00.041)       0:06:50.399 ******** 
skipping: [40.121.60.103]

TASK [openshift_node : Setup ro mount of /root/.docker for containerized hosts] ***
Wednesday 20 June 2018  15:03:55 +0000 (0:00:00.040)       0:06:50.440 ******** 
skipping: [40.121.60.103]

TASK [openshift_node : Install Node service file] ******************************
Wednesday 20 June 2018  15:03:55 +0000 (0:00:00.034)       0:06:50.474 ******** 
changed: [40.121.60.103]

TASK [openshift_node : Install Node dependencies docker service file] **********
Wednesday 20 June 2018  15:03:56 +0000 (0:00:00.647)       0:06:51.121 ******** 
skipping: [40.121.60.103]

TASK [openshift_node : Create the openvswitch service env file] ****************
Wednesday 20 June 2018  15:03:56 +0000 (0:00:00.036)       0:06:51.158 ******** 
skipping: [40.121.60.103]

TASK [openshift_node : Install OpenvSwitch docker service file] ****************
Wednesday 20 June 2018  15:03:56 +0000 (0:00:00.035)       0:06:51.194 ******** 
skipping: [40.121.60.103]

TASK [openshift_node : Configure Node settings] ********************************
Wednesday 20 June 2018  15:03:56 +0000 (0:00:00.040)       0:06:51.234 ******** 
changed: [40.121.60.103] => (item={u'regex': u'^OPTIONS=', u'line': u'OPTIONS=--loglevel=2 '})
ok: [40.121.60.103] => (item={u'regex': u'^CONFIG_FILE=', u'line': u'CONFIG_FILE=/etc/origin/node/node-config.yaml'})
changed: [40.121.60.103] => (item={u'regex': u'^IMAGE_VERSION=', u'line': u'IMAGE_VERSION=v3.9.0'})

TASK [openshift_node : Configure Proxy Settings] *******************************
Wednesday 20 June 2018  15:03:57 +0000 (0:00:00.692)       0:06:51.927 ******** 
skipping: [40.121.60.103]

TASK [openshift_node : Pre-pull node system container image] *******************
Wednesday 20 June 2018  15:03:57 +0000 (0:00:00.043)       0:06:51.970 ******** 
skipping: [40.121.60.103]

TASK [openshift_node : Install or Update node system container] ****************
Wednesday 20 June 2018  15:03:57 +0000 (0:00:00.035)       0:06:52.006 ******** 
skipping: [40.121.60.103]

TASK [openshift_node : set_fact] ***********************************************
Wednesday 20 June 2018  15:03:57 +0000 (0:00:00.041)       0:06:52.048 ******** 
skipping: [40.121.60.103]

TASK [openshift_node : set_fact] ***********************************************
Wednesday 20 June 2018  15:03:57 +0000 (0:00:00.035)       0:06:52.083 ******** 
skipping: [40.121.60.103]

TASK [openshift_node : Pre-pull OpenVSwitch system container image] ************
Wednesday 20 June 2018  15:03:57 +0000 (0:00:00.035)       0:06:52.119 ******** 
skipping: [40.121.60.103]

TASK [openshift_node : Install or Update OpenVSwitch system container] *********
Wednesday 20 June 2018  15:03:57 +0000 (0:00:00.035)       0:06:52.154 ******** 
skipping: [40.121.60.103]

TASK [openshift_node : Pre-pull openvswitch image] *****************************
Wednesday 20 June 2018  15:03:57 +0000 (0:00:00.034)       0:06:52.188 ******** 
skipping: [40.121.60.103]

TASK [openshift_node : Start and enable openvswitch service] *******************
Wednesday 20 June 2018  15:03:57 +0000 (0:00:00.035)       0:06:52.223 ******** 
skipping: [40.121.60.103]

TASK [openshift_node : set_fact] ***********************************************
Wednesday 20 June 2018  15:03:57 +0000 (0:00:00.033)       0:06:52.257 ******** 
ok: [40.121.60.103]

TASK [openshift_node : Create kublet args config dir] **************************
Wednesday 20 June 2018  15:03:57 +0000 (0:00:00.060)       0:06:52.318 ******** 
skipping: [40.121.60.103]

TASK [openshift_node : Create the Node config] *********************************
Wednesday 20 June 2018  15:03:57 +0000 (0:00:00.048)       0:06:52.366 ******** 
[DEPRECATION WARNING]: Using tests as filters is deprecated. Instead of using 
`result|version_compare` instead use `result is version_compare`. This feature 
will be removed in version 2.9. Deprecation warnings can be disabled by setting
 deprecation_warnings=False in ansible.cfg.
changed: [40.121.60.103]

TASK [openshift_node : Configure Node Environment Variables] *******************
Wednesday 20 June 2018  15:03:58 +0000 (0:00:00.643)       0:06:53.009 ******** 

TASK [openshift_node : Configure AWS Cloud Provider Settings] ******************
Wednesday 20 June 2018  15:03:58 +0000 (0:00:00.035)       0:06:53.044 ******** 
skipping: [40.121.60.103] => (item=None) 
skipping: [40.121.60.103] => (item=None) 
skipping: [40.121.60.103]

TASK [openshift_node : Wait for master API to become available before proceeding] ***
Wednesday 20 June 2018  15:03:58 +0000 (0:00:00.053)       0:06:53.098 ******** 
skipping: [40.121.60.103]

TASK [openshift_node : Start and enable node dep] ******************************
Wednesday 20 June 2018  15:03:58 +0000 (0:00:00.035)       0:06:53.133 ******** 
skipping: [40.121.60.103]

TASK [openshift_node : Start and enable node] **********************************
Wednesday 20 June 2018  15:03:58 +0000 (0:00:00.034)       0:06:53.167 ******** 
skipping: [40.121.60.103]

TASK [openshift_node : Dump logs from node service if it failed] ***************
Wednesday 20 June 2018  15:03:58 +0000 (0:00:00.033)       0:06:53.201 ******** 
skipping: [40.121.60.103]

TASK [openshift_node : Abort if node failed to start] **************************
Wednesday 20 June 2018  15:03:58 +0000 (0:00:00.033)       0:06:53.235 ******** 
skipping: [40.121.60.103]

TASK [openshift_node : set_fact] ***********************************************
Wednesday 20 June 2018  15:03:58 +0000 (0:00:00.034)       0:06:53.270 ******** 
skipping: [40.121.60.103]

TASK [openshift_node : Install NFS storage plugin dependencies] ****************
Wednesday 20 June 2018  15:03:58 +0000 (0:00:00.033)       0:06:53.304 ******** 
ok: [40.121.60.103]

TASK [openshift_node : Check for existence of nfs sebooleans] ******************
Wednesday 20 June 2018  15:03:59 +0000 (0:00:00.466)       0:06:53.771 ******** 
ok: [40.121.60.103] => (item=virt_use_nfs)
ok: [40.121.60.103] => (item=virt_sandbox_use_nfs)

TASK [openshift_node : Set seboolean to allow nfs storage plugin access from containers] ***
Wednesday 20 June 2018  15:03:59 +0000 (0:00:00.409)       0:06:54.180 ******** 
ok: [40.121.60.103] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-06-20 15:03:59.327824', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_use_nfs'], u'rc': 0, 'item': u'virt_use_nfs', u'delta': u'0:00:00.021011', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'creates': None, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_nfs', u'removes': None, u'warn': True, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-06-20 15:03:59.306813', '_ansible_ignore_errors': None, 'failed': False})
skipping: [40.121.60.103] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-06-20 15:03:59.494040', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_nfs'], u'rc': 0, 'item': u'virt_sandbox_use_nfs', u'delta': u'0:00:00.002732', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'creates': None, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_nfs', u'removes': None, u'warn': True, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-06-20 15:03:59.491308', '_ansible_ignore_errors': None, 'failed': False}) 

TASK [openshift_node : Set seboolean to allow nfs storage plugin access from containers (python 3)] ***
Wednesday 20 June 2018  15:03:59 +0000 (0:00:00.237)       0:06:54.418 ******** 
skipping: [40.121.60.103] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-06-20 15:03:59.327824', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_use_nfs'], u'rc': 0, 'item': u'virt_use_nfs', u'delta': u'0:00:00.021011', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'creates': None, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_nfs', u'removes': None, u'warn': True, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-06-20 15:03:59.306813', '_ansible_ignore_errors': None, 'failed': False}) 
skipping: [40.121.60.103] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-06-20 15:03:59.494040', '_ansible_no_log': False, u'stdout': u'virt_use_nfs --> on', u'cmd': [u'getsebool', u'virt_sandbox_use_nfs'], u'rc': 0, 'item': u'virt_sandbox_use_nfs', u'delta': u'0:00:00.002732', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'creates': None, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_nfs', u'removes': None, u'warn': True, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_nfs --> on'], 'failed_when_result': False, u'start': u'2018-06-20 15:03:59.491308', '_ansible_ignore_errors': None, 'failed': False}) 

TASK [openshift_node : Install GlusterFS storage plugin dependencies] **********
Wednesday 20 June 2018  15:03:59 +0000 (0:00:00.067)       0:06:54.485 ******** 
changed: [40.121.60.103]

TASK [openshift_node : Check for existence of fusefs sebooleans] ***************
Wednesday 20 June 2018  15:04:03 +0000 (0:00:04.123)       0:06:58.609 ******** 
ok: [40.121.60.103] => (item=virt_use_fusefs)
ok: [40.121.60.103] => (item=virt_sandbox_use_fusefs)

TASK [openshift_node : Set seboolean to allow gluster storage plugin access from containers] ***
Wednesday 20 June 2018  15:04:04 +0000 (0:00:00.391)       0:06:59.001 ******** 
changed: [40.121.60.103] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-06-20 15:04:04.145804', '_ansible_no_log': False, u'stdout': u'virt_use_fusefs --> off', u'cmd': [u'getsebool', u'virt_use_fusefs'], u'rc': 0, 'item': u'virt_use_fusefs', u'delta': u'0:00:00.004311', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'creates': None, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_fusefs', u'removes': None, u'warn': True, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_fusefs --> off'], 'failed_when_result': False, u'start': u'2018-06-20 15:04:04.141493', '_ansible_ignore_errors': None, 'failed': False})
changed: [40.121.60.103] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-06-20 15:04:04.313342', '_ansible_no_log': False, u'stdout': u'virt_sandbox_use_fusefs --> off', u'cmd': [u'getsebool', u'virt_sandbox_use_fusefs'], u'rc': 0, 'item': u'virt_sandbox_use_fusefs', u'delta': u'0:00:00.003061', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'creates': None, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_fusefs', u'removes': None, u'warn': True, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_sandbox_use_fusefs --> off'], 'failed_when_result': False, u'start': u'2018-06-20 15:04:04.310281', '_ansible_ignore_errors': None, 'failed': False})

TASK [openshift_node : Set seboolean to allow gluster storage plugin access from containers (python 3)] ***
Wednesday 20 June 2018  15:04:06 +0000 (0:00:01.770)       0:07:00.771 ******** 
skipping: [40.121.60.103] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-06-20 15:04:04.145804', '_ansible_no_log': False, u'stdout': u'virt_use_fusefs --> off', u'cmd': [u'getsebool', u'virt_use_fusefs'], u'rc': 0, 'item': u'virt_use_fusefs', u'delta': u'0:00:00.004311', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'creates': None, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_use_fusefs', u'removes': None, u'warn': True, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_use_fusefs --> off'], 'failed_when_result': False, u'start': u'2018-06-20 15:04:04.141493', '_ansible_ignore_errors': None, 'failed': False}) 
skipping: [40.121.60.103] => (item={'_ansible_parsed': True, 'stderr_lines': [], '_ansible_item_result': True, u'end': u'2018-06-20 15:04:04.313342', '_ansible_no_log': False, u'stdout': u'virt_sandbox_use_fusefs --> off', u'cmd': [u'getsebool', u'virt_sandbox_use_fusefs'], u'rc': 0, 'item': u'virt_sandbox_use_fusefs', u'delta': u'0:00:00.003061', u'stderr': u'', u'changed': False, u'invocation': {u'module_args': {u'creates': None, u'executable': None, u'_uses_shell': False, u'_raw_params': u'getsebool virt_sandbox_use_fusefs', u'removes': None, u'warn': True, u'chdir': None, u'stdin': None}}, 'stdout_lines': [u'virt_sandbox_use_fusefs --> off'], 'failed_when_result': False, u'start': u'2018-06-20 15:04:04.310281', '_ansible_ignore_errors': None, 'failed': False}) 

TASK [openshift_node : Install Ceph storage plugin dependencies] ***************
Wednesday 20 June 2018  15:04:06 +0000 (0:00:00.079)       0:07:00.850 ******** 
changed: [40.121.60.103]

TASK [openshift_node : Install iSCSI storage plugin dependencies] **************
Wednesday 20 June 2018  15:04:22 +0000 (0:00:16.335)       0:07:17.186 ******** 
changed: [40.121.60.103] => (item=iscsi-initiator-utils)
changed: [40.121.60.103] => (item=device-mapper-multipath)

TASK [openshift_node : restart services] ***************************************
Wednesday 20 June 2018  15:04:28 +0000 (0:00:06.286)       0:07:23.473 ******** 
changed: [40.121.60.103] => (item=multipathd)
changed: [40.121.60.103] => (item=rpcbind)
changed: [40.121.60.103] => (item=iscsid)

TASK [openshift_node : Template multipath configuration] ***********************
Wednesday 20 June 2018  15:04:29 +0000 (0:00:00.835)       0:07:24.308 ******** 
changed: [40.121.60.103]

TASK [openshift_node : Enable and start multipath] *****************************
Wednesday 20 June 2018  15:04:30 +0000 (0:00:00.586)       0:07:24.895 ******** 
changed: [40.121.60.103]

TASK [openshift_node : Create OpenvSwitch service.d directory] *****************
Wednesday 20 June 2018  15:04:30 +0000 (0:00:00.496)       0:07:25.391 ******** 
changed: [40.121.60.103]

TASK [openshift_node : Install OpenvSwitch service OOM fix] ********************
Wednesday 20 June 2018  15:04:31 +0000 (0:00:00.288)       0:07:25.680 ******** 
changed: [40.121.60.103]

TASK [tuned : Check for tuned package] *****************************************
Wednesday 20 June 2018  15:04:31 +0000 (0:00:00.683)       0:07:26.364 ******** 
ok: [40.121.60.103]

TASK [tuned : Set tuned OpenShift variables] ***********************************
Wednesday 20 June 2018  15:04:32 +0000 (0:00:00.310)       0:07:26.674 ******** 
ok: [40.121.60.103]

TASK [tuned : Ensure directory structure exists] *******************************
Wednesday 20 June 2018  15:04:32 +0000 (0:00:00.121)       0:07:26.795 ******** 
changed: [40.121.60.103] => (item={'group': u'root', 'uid': 0, 'ctime': 1529506423.4496388, 'state': 'directory', 'gid': 0, 'mode': '02777', 'mtime': 1529504513.0, 'owner': 'root', 'path': u'openshift', 'root': u'/usr/share/ansible/openshift-ansible/roles/tuned/templates', 'size': 24})
changed: [40.121.60.103] => (item={'group': u'root', 'uid': 0, 'ctime': 1529506423.4496388, 'state': 'directory', 'gid': 0, 'mode': '02777', 'mtime': 1528290887.0, 'owner': 'root', 'path': u'openshift-control-plane', 'root': u'/usr/share/ansible/openshift-ansible/roles/tuned/templates', 'size': 24})
changed: [40.121.60.103] => (item={'group': u'root', 'uid': 0, 'ctime': 1529506423.4496388, 'state': 'directory', 'gid': 0, 'mode': '02777', 'mtime': 1528290887.0, 'owner': 'root', 'path': u'openshift-node', 'root': u'/usr/share/ansible/openshift-ansible/roles/tuned/templates', 'size': 24})
skipping: [40.121.60.103] => (item={'src': u'/usr/share/ansible/openshift-ansible/roles/tuned/templates/recommend.conf', 'group': u'root', 'uid': 0, 'ctime': 1529506423.374639, 'state': 'file', 'gid': 0, 'mode': '0666', 'mtime': 1529504513.0, 'owner': 'root', 'path': u'recommend.conf', 'root': u'/usr/share/ansible/openshift-ansible/roles/tuned/templates', 'size': 268}) 
skipping: [40.121.60.103] => (item={'src': u'/usr/share/ansible/openshift-ansible/roles/tuned/templates/openshift/tuned.conf', 'group': u'root', 'uid': 0, 'ctime': 1529506423.374639, 'state': 'file', 'gid': 0, 'mode': '0666', 'mtime': 1529504513.0, 'owner': 'root', 'path': u'openshift/tuned.conf', 'root': u'/usr/share/ansible/openshift-ansible/roles/tuned/templates', 'size': 593}) 
skipping: [40.121.60.103] => (item={'src': u'/usr/share/ansible/openshift-ansible/roles/tuned/templates/openshift-control-plane/tuned.conf', 'group': u'root', 'uid': 0, 'ctime': 1529506423.374639, 'state': 'file', 'gid': 0, 'mode': '02666', 'mtime': 1528290887.0, 'owner': 'root', 'path': u'openshift-control-plane/tuned.conf', 'root': u'/usr/share/ansible/openshift-ansible/roles/tuned/templates', 'size': 744}) 
skipping: [40.121.60.103] => (item={'src': u'/usr/share/ansible/openshift-ansible/roles/tuned/templates/openshift-node/tuned.conf', 'group': u'root', 'uid': 0, 'ctime': 1529506423.374639, 'state': 'file', 'gid': 0, 'mode': '02666', 'mtime': 1528290887.0, 'owner': 'root', 'path': u'openshift-node/tuned.conf', 'root': u'/usr/share/ansible/openshift-ansible/roles/tuned/templates', 'size': 135}) 

TASK [tuned : Ensure files are populated from templates] ***********************
Wednesday 20 June 2018  15:04:32 +0000 (0:00:00.618)       0:07:27.414 ******** 
skipping: [40.121.60.103] => (item={'group': u'root', 'uid': 0, 'ctime': 1529506423.4496388, 'state': 'directory', 'gid': 0, 'mode': '02777', 'mtime': 1529504513.0, 'owner': 'root', 'path': u'openshift', 'root': u'/usr/share/ansible/openshift-ansible/roles/tuned/templates', 'size': 24}) 
skipping: [40.121.60.103] => (item={'group': u'root', 'uid': 0, 'ctime': 1529506423.4496388, 'state': 'directory', 'gid': 0, 'mode': '02777', 'mtime': 1528290887.0, 'owner': 'root', 'path': u'openshift-control-plane', 'root': u'/usr/share/ansible/openshift-ansible/roles/tuned/templates', 'size': 24}) 
skipping: [40.121.60.103] => (item={'group': u'root', 'uid': 0, 'ctime': 1529506423.4496388, 'state': 'directory', 'gid': 0, 'mode': '02777', 'mtime': 1528290887.0, 'owner': 'root', 'path': u'openshift-node', 'root': u'/usr/share/ansible/openshift-ansible/roles/tuned/templates', 'size': 24}) 
changed: [40.121.60.103] => (item={'src': u'/usr/share/ansible/openshift-ansible/roles/tuned/templates/recommend.conf', 'group': u'root', 'uid': 0, 'ctime': 1529506423.374639, 'state': 'file', 'gid': 0, 'mode': '0666', 'mtime': 1529504513.0, 'owner': 'root', 'path': u'recommend.conf', 'root': u'/usr/share/ansible/openshift-ansible/roles/tuned/templates', 'size': 268})
changed: [40.121.60.103] => (item={'src': u'/usr/share/ansible/openshift-ansible/roles/tuned/templates/openshift/tuned.conf', 'group': u'root', 'uid': 0, 'ctime': 1529506423.374639, 'state': 'file', 'gid': 0, 'mode': '0666', 'mtime': 1529504513.0, 'owner': 'root', 'path': u'openshift/tuned.conf', 'root': u'/usr/share/ansible/openshift-ansible/roles/tuned/templates', 'size': 593})
changed: [40.121.60.103] => (item={'src': u'/usr/share/ansible/openshift-ansible/roles/tuned/templates/openshift-control-plane/tuned.conf', 'group': u'root', 'uid': 0, 'ctime': 1529506423.374639, 'state': 'file', 'gid': 0, 'mode': '02666', 'mtime': 1528290887.0, 'owner': 'root', 'path': u'openshift-control-plane/tuned.conf', 'root': u'/usr/share/ansible/openshift-ansible/roles/tuned/templates', 'size': 744})
changed: [40.121.60.103] => (item={'src': u'/usr/share/ansible/openshift-ansible/roles/tuned/templates/openshift-node/tuned.conf', 'group': u'root', 'uid': 0, 'ctime': 1529506423.374639, 'state': 'file', 'gid': 0, 'mode': '02666', 'mtime': 1528290887.0, 'owner': 'root', 'path': u'openshift-node/tuned.conf', 'root': u'/usr/share/ansible/openshift-ansible/roles/tuned/templates', 'size': 135})

TASK [tuned : Make tuned use the recommended tuned profile on restart] *********
Wednesday 20 June 2018  15:04:35 +0000 (0:00:02.397)       0:07:29.811 ******** 
changed: [40.121.60.103] => (item=/etc/tuned/active_profile)
changed: [40.121.60.103] => (item=/etc/tuned/profile_mode)

TASK [tuned : Restart tuned service] *******************************************
Wednesday 20 June 2018  15:04:35 +0000 (0:00:00.465)       0:07:30.277 ******** 
changed: [40.121.60.103]

TASK [nickhammond.logrotate : nickhammond.logrotate | Install logrotate] *******
Wednesday 20 June 2018  15:04:37 +0000 (0:00:01.566)       0:07:31.843 ******** 
ok: [40.121.60.103]

TASK [nickhammond.logrotate : nickhammond.logrotate | Setup logrotate.d scripts] ***
Wednesday 20 June 2018  15:04:37 +0000 (0:00:00.507)       0:07:32.351 ******** 

RUNNING HANDLER [openshift_node : restart openvswitch] *************************
Wednesday 20 June 2018  15:04:37 +0000 (0:00:00.018)       0:07:32.369 ******** 
skipping: [40.121.60.103]

RUNNING HANDLER [openshift_node : restart node] ********************************
Wednesday 20 June 2018  15:04:37 +0000 (0:00:00.033)       0:07:32.403 ******** 
skipping: [40.121.60.103]

RUNNING HANDLER [openshift_node : reload systemd units] ************************
Wednesday 20 June 2018  15:04:37 +0000 (0:00:00.033)       0:07:32.437 ******** 
skipping: [40.121.60.103]

PLAY [node bootstrap config] ***************************************************

TASK [openshift_node : install needed rpm(s)] **********************************
Wednesday 20 June 2018  15:04:37 +0000 (0:00:00.054)       0:07:32.491 ******** 
changed: [40.121.60.103] => (item=origin-master)
ok: [40.121.60.103] => (item=origin-node)
ok: [40.121.60.103] => (item=origin-docker-excluder)
changed: [40.121.60.103] => (item=ansible)
ok: [40.121.60.103] => (item=openvswitch)
ok: [40.121.60.103] => (item=docker)
changed: [40.121.60.103] => (item=etcd)
changed: [40.121.60.103] => (item=haproxy)
ok: [40.121.60.103] => (item=dnsmasq)
changed: [40.121.60.103] => (item=ntp)
ok: [40.121.60.103] => (item=logrotate)
changed: [40.121.60.103] => (item=httpd-tools)
changed: [40.121.60.103] => (item=bind)
ok: [40.121.60.103] => (item=firewalld)
ok: [40.121.60.103] => (item=libselinux-python)
ok: [40.121.60.103] => (item=conntrack-tools)
ok: [40.121.60.103] => (item=openssl)
ok: [40.121.60.103] => (item=iproute)
ok: [40.121.60.103] => (item=python-dbus)
ok: [40.121.60.103] => (item=PyYAML)
ok: [40.121.60.103] => (item=yum-utils)
ok: [40.121.60.103] => (item=glusterfs-fuse)
ok: [40.121.60.103] => (item=device-mapper-multipath)
ok: [40.121.60.103] => (item=nfs-utils)
changed: [40.121.60.103] => (item=flannel)
ok: [40.121.60.103] => (item=bash-completion)
changed: [40.121.60.103] => (item=cockpit-ws)
changed: [40.121.60.103] => (item=cockpit-system)
ok: [40.121.60.103] => (item=cockpit-bridge)
changed: [40.121.60.103] => (item=cockpit-docker)
ok: [40.121.60.103] => (item=iscsi-initiator-utils)
ok: [40.121.60.103] => (item=ceph-common)

TASK [openshift_node : install SDN package] ************************************
Wednesday 20 June 2018  15:05:45 +0000 (0:01:08.041)       0:08:40.533 ******** 
[DEPRECATION WARNING]: Using tests as filters is deprecated. Instead of using 
`result|success` instead use `result is success`. This feature will be removed 
in version 2.9. Deprecation warnings can be disabled by setting 
deprecation_warnings=False in ansible.cfg.
ok: [40.121.60.103]

TASK [openshift_node : create the directory for node] **************************
Wednesday 20 June 2018  15:05:46 +0000 (0:00:00.500)       0:08:41.033 ******** 
skipping: [40.121.60.103]

TASK [openshift_node : laydown systemd override] *******************************
Wednesday 20 June 2018  15:05:46 +0000 (0:00:00.039)       0:08:41.072 ******** 
skipping: [40.121.60.103]

TASK [openshift_node : update the sysconfig to have necessary variables] *******
Wednesday 20 June 2018  15:05:46 +0000 (0:00:00.040)       0:08:41.113 ******** 
changed: [40.121.60.103] => (item={u'regexp': u'^KUBECONFIG=.*', u'line': u'KUBECONFIG=/etc/origin/node/bootstrap.kubeconfig'})
ok: [40.121.60.103] => (item={u'regexp': u'^CONFIG_FILE=.*', u'line': u'CONFIG_FILE=/etc/origin/node/node-config.yaml'})

TASK [openshift_node : Configure AWS Cloud Provider Settings] ******************
Wednesday 20 June 2018  15:05:46 +0000 (0:00:00.426)       0:08:41.539 ******** 
skipping: [40.121.60.103] => (item=None) 
skipping: [40.121.60.103] => (item=None) 
skipping: [40.121.60.103]

TASK [openshift_node : disable origin-node] ************************************
Wednesday 20 June 2018  15:05:46 +0000 (0:00:00.057)       0:08:41.597 ******** 
ok: [40.121.60.103] => (item=origin-node.service)

TASK [openshift_node : Check for RPM generated config marker file .config_managed] ***
Wednesday 20 June 2018  15:05:47 +0000 (0:00:00.255)       0:08:41.852 ******** 
ok: [40.121.60.103]

TASK [openshift_node : create directories for bootstrapping] *******************
Wednesday 20 June 2018  15:05:47 +0000 (0:00:00.249)       0:08:42.101 ******** 
changed: [40.121.60.103] => (item=/root/openshift_bootstrap)
changed: [40.121.60.103] => (item=/var/lib/origin/openshift.local.config)
changed: [40.121.60.103] => (item=/var/lib/origin/openshift.local.config/node)
changed: [40.121.60.103] => (item=/etc/docker/certs.d/docker-registry.default.svc:5000)

TASK [openshift_node : laydown the bootstrap.yml file for on boot configuration] ***
Wednesday 20 June 2018  15:05:48 +0000 (0:00:00.713)       0:08:42.814 ******** 
changed: [40.121.60.103]

TASK [openshift_node : symlink master ca for docker-registry] ******************
Wednesday 20 June 2018  15:05:48 +0000 (0:00:00.663)       0:08:43.478 ******** 
changed: [40.121.60.103] => (item=/etc/origin/node/node-client-ca.crt)
 [WARNING]: Cannot set fs attributes on a non-existent symlink target. follow
should be set to False to avoid this.

TASK [openshift_node : Remove RPM generated config files if present] ***********
Wednesday 20 June 2018  15:05:49 +0000 (0:00:00.214)       0:08:43.692 ******** 
changed: [40.121.60.103] => (item=master)
changed: [40.121.60.103] => (item=.config_managed)

TASK [openshift_node : find all files in /etc/origin/node so we can remove them] ***
Wednesday 20 June 2018  15:05:49 +0000 (0:00:00.388)       0:08:44.081 ******** 
ok: [40.121.60.103]

TASK [openshift_node : Remove everything except the resolv.conf required for node] ***
Wednesday 20 June 2018  15:05:49 +0000 (0:00:00.330)       0:08:44.412 ******** 
skipping: [40.121.60.103] => (item={u'uid': 0, u'woth': False, u'mtime': 1529506944.5158784, u'inode': 16888072, u'isgid': False, u'size': 63, u'roth': True, u'isuid': False, u'isreg': True, u'pw_name': u'root', u'gid': 0, u'ischr': False, u'wusr': True, u'xoth': False, u'rusr': True, u'nlink': 1, u'issock': False, u'rgrp': True, u'gr_name': u'root', u'path': u'/etc/origin/node/node-dnsmasq.conf', u'xusr': False, u'atime': 1529506944.7268698, u'isdir': False, u'ctime': 1529506944.7308695, u'wgrp': False, u'xgrp': False, u'dev': 2049, u'isblk': False, u'isfifo': False, u'mode': u'0644', u'islnk': False}) 
skipping: [40.121.60.103] => (item={u'uid': 0, u'woth': False, u'mtime': 1529507034.3643477, u'inode': 8782093, u'isgid': False, u'size': 25, u'roth': False, u'isuid': False, u'isreg': True, u'pw_name': u'root', u'gid': 0, u'ischr': False, u'wusr': True, u'xoth': False, u'rusr': True, u'nlink': 1, u'issock': False, u'rgrp': False, u'gr_name': u'root', u'path': u'/etc/origin/node/resolv.conf', u'xusr': False, u'atime': 1529507034.3633478, u'isdir': False, u'ctime': 1529507034.3643477, u'wgrp': False, u'xgrp': False, u'dev': 2049, u'isblk': False, u'isfifo': False, u'mode': u'0600', u'islnk': False}) 
changed: [40.121.60.103] => (item={u'uid': 0, u'woth': False, u'mtime': 1529507038.097364, u'inode': 10628322, u'isgid': False, u'size': 1280, u'roth': False, u'isuid': False, u'isreg': True, u'pw_name': u'root', u'gid': 0, u'ischr': False, u'wusr': True, u'xoth': False, u'rusr': True, u'nlink': 1, u'issock': False, u'rgrp': False, u'gr_name': u'root', u'path': u'/etc/origin/node/node-config.yaml', u'xusr': False, u'atime': 1529507076.5215302, u'isdir': False, u'ctime': 1529507038.2813647, u'wgrp': False, u'xgrp': False, u'dev': 2049, u'isblk': False, u'isfifo': False, u'mode': u'0600', u'islnk': False})

PLAY [Re-enable excluder if it was previously enabled] *************************

TASK [openshift_excluder : Detecting Atomic Host Operating System] *************
Wednesday 20 June 2018  15:05:50 +0000 (0:00:00.286)       0:08:44.699 ******** 
ok: [40.121.60.103]

TASK [openshift_excluder : Debug r_openshift_excluder_enable_docker_excluder] ***
Wednesday 20 June 2018  15:05:50 +0000 (0:00:00.209)       0:08:44.908 ******** 
ok: [40.121.60.103] => {
    "r_openshift_excluder_enable_docker_excluder": true
}

TASK [openshift_excluder : Debug r_openshift_excluder_enable_openshift_excluder] ***
Wednesday 20 June 2018  15:05:50 +0000 (0:00:00.066)       0:08:44.975 ******** 
ok: [40.121.60.103] => {
    "r_openshift_excluder_enable_openshift_excluder": true
}

TASK [openshift_excluder : Fail if invalid openshift_excluder_action provided] ***
Wednesday 20 June 2018  15:05:50 +0000 (0:00:00.065)       0:08:45.041 ******** 
skipping: [40.121.60.103]

TASK [openshift_excluder : Fail if r_openshift_excluder_upgrade_target is not defined] ***
Wednesday 20 June 2018  15:05:50 +0000 (0:00:00.037)       0:08:45.078 ******** 
skipping: [40.121.60.103]

TASK [openshift_excluder : Include main action task file] **********************
Wednesday 20 June 2018  15:05:50 +0000 (0:00:00.037)       0:08:45.116 ******** 
included: /usr/share/ansible/openshift-ansible/roles/openshift_excluder/tasks/enable.yml for 40.121.60.103

TASK [openshift_excluder : Install excluders] **********************************
Wednesday 20 June 2018  15:05:50 +0000 (0:00:00.055)       0:08:45.171 ******** 
included: /usr/share/ansible/openshift-ansible/roles/openshift_excluder/tasks/install.yml for 40.121.60.103

TASK [openshift_excluder : Install docker excluder - yum] **********************
Wednesday 20 June 2018  15:05:50 +0000 (0:00:00.058)       0:08:45.229 ******** 
skipping: [40.121.60.103]

TASK [openshift_excluder : Install docker excluder - dnf] **********************
Wednesday 20 June 2018  15:05:50 +0000 (0:00:00.042)       0:08:45.272 ******** 
skipping: [40.121.60.103]

TASK [openshift_excluder : Install openshift excluder - yum] *******************
Wednesday 20 June 2018  15:05:50 +0000 (0:00:00.040)       0:08:45.313 ******** 
skipping: [40.121.60.103]

TASK [openshift_excluder : Install openshift excluder - dnf] *******************
Wednesday 20 June 2018  15:05:50 +0000 (0:00:00.039)       0:08:45.353 ******** 
skipping: [40.121.60.103]

TASK [openshift_excluder : set_fact] *******************************************
Wednesday 20 June 2018  15:05:50 +0000 (0:00:00.040)       0:08:45.393 ******** 
skipping: [40.121.60.103]

TASK [openshift_excluder : Enable excluders] ***********************************
Wednesday 20 June 2018  15:05:50 +0000 (0:00:00.039)       0:08:45.432 ******** 
included: /usr/share/ansible/openshift-ansible/roles/openshift_excluder/tasks/exclude.yml for 40.121.60.103

TASK [openshift_excluder : Check for docker-excluder] **************************
Wednesday 20 June 2018  15:05:50 +0000 (0:00:00.057)       0:08:45.489 ******** 
ok: [40.121.60.103]

TASK [openshift_excluder : Enable docker excluder] *****************************
Wednesday 20 June 2018  15:05:51 +0000 (0:00:00.226)       0:08:45.716 ******** 
changed: [40.121.60.103]

TASK [openshift_excluder : Check for openshift excluder] ***********************
Wednesday 20 June 2018  15:05:51 +0000 (0:00:00.273)       0:08:45.989 ******** 
ok: [40.121.60.103]

TASK [openshift_excluder : Enable openshift excluder] **************************
Wednesday 20 June 2018  15:05:51 +0000 (0:00:00.223)       0:08:46.213 ******** 
skipping: [40.121.60.103]

PLAY [Configure nodes] *********************************************************

TASK [Remove any ansible facts created during AMI creation] ********************
Wednesday 20 June 2018  15:05:51 +0000 (0:00:00.050)       0:08:46.264 ******** 
changed: [40.121.60.103] => (item=openshift.fact)

PLAY [nodes] *******************************************************************

TASK [openshift_master_facts : Verify required variables are set] **************
Wednesday 20 June 2018  15:05:51 +0000 (0:00:00.256)       0:08:46.520 ******** 
skipping: [40.121.60.103]

TASK [openshift_master_facts : Set g_metrics_hostname] *************************
Wednesday 20 June 2018  15:05:51 +0000 (0:00:00.035)       0:08:46.556 ******** 
ok: [40.121.60.103]

TASK [openshift_master_facts : set_fact] ***************************************
Wednesday 20 June 2018  15:05:51 +0000 (0:00:00.063)       0:08:46.620 ******** 
skipping: [40.121.60.103]

TASK [openshift_master_facts : Set master facts] *******************************
Wednesday 20 June 2018  15:05:52 +0000 (0:00:00.035)       0:08:46.655 ******** 
changed: [40.121.60.103]

TASK [openshift_master_facts : Determine if scheduler config present] **********
Wednesday 20 June 2018  15:05:53 +0000 (0:00:01.188)       0:08:47.843 ******** 
ok: [40.121.60.103]

TASK [openshift_master_facts : Set Default scheduler predicates and priorities] ***
Wednesday 20 June 2018  15:05:53 +0000 (0:00:00.212)       0:08:48.056 ******** 
ok: [40.121.60.103]

TASK [openshift_master_facts : Retrieve current scheduler config] **************
Wednesday 20 June 2018  15:05:53 +0000 (0:00:00.071)       0:08:48.127 ******** 
skipping: [40.121.60.103]

TASK [openshift_master_facts : Set openshift_master_scheduler_current_config] ***
Wednesday 20 June 2018  15:05:53 +0000 (0:00:00.035)       0:08:48.163 ******** 
skipping: [40.121.60.103]

TASK [openshift_master_facts : Test if scheduler config is readable] ***********
Wednesday 20 June 2018  15:05:53 +0000 (0:00:00.038)       0:08:48.201 ******** 
skipping: [40.121.60.103]

TASK [openshift_master_facts : Set current scheduler predicates and priorities] ***
Wednesday 20 June 2018  15:05:53 +0000 (0:00:00.035)       0:08:48.237 ******** 
skipping: [40.121.60.103]

TASK [openshift_master : include push_via_dns.yml tasks] ***********************
Wednesday 20 June 2018  15:05:53 +0000 (0:00:00.035)       0:08:48.272 ******** 
included: /usr/share/ansible/openshift-ansible/roles/openshift_master/tasks/push_via_dns.yml for 40.121.60.103

TASK [openshift_master : check whether our docker-registry setting exists in the env file] ***
Wednesday 20 June 2018  15:05:53 +0000 (0:00:00.051)       0:08:48.324 ******** 
ok: [40.121.60.103]

TASK [openshift_master : set_fact] *********************************************
Wednesday 20 June 2018  15:05:53 +0000 (0:00:00.223)       0:08:48.548 ******** 
ok: [40.121.60.103]

TASK [openshift_master : Set HA Service Info for containerized installs] *******
Wednesday 20 June 2018  15:05:53 +0000 (0:00:00.065)       0:08:48.613 ******** 
skipping: [40.121.60.103]

TASK [openshift_master : include registry_auth tasks] **************************
Wednesday 20 June 2018  15:05:53 +0000 (0:00:00.035)       0:08:48.648 ******** 
included: /usr/share/ansible/openshift-ansible/roles/openshift_master/tasks/registry_auth.yml for 40.121.60.103

TASK [openshift_master : Check for credentials file for registry auth] *********
Wednesday 20 June 2018  15:05:54 +0000 (0:00:00.057)       0:08:48.706 ******** 
skipping: [40.121.60.103]

TASK [openshift_master : Create credentials for registry auth] *****************
Wednesday 20 June 2018  15:05:54 +0000 (0:00:00.036)       0:08:48.743 ******** 
skipping: [40.121.60.103]

TASK [openshift_master : Create credentials for registry auth (alternative)] ***
Wednesday 20 June 2018  15:05:54 +0000 (0:00:00.040)       0:08:48.783 ******** 
skipping: [40.121.60.103]

TASK [openshift_master : Setup ro mount of /root/.docker for containerized hosts] ***
Wednesday 20 June 2018  15:05:54 +0000 (0:00:00.039)       0:08:48.822 ******** 
skipping: [40.121.60.103]

TASK [openshift_master : Disable the legacy master service if it exists] *******
Wednesday 20 June 2018  15:05:54 +0000 (0:00:00.034)       0:08:48.856 ******** 
changed: [40.121.60.103]

TASK [openshift_master : Remove the legacy master service if it exists] ********
Wednesday 20 June 2018  15:05:54 +0000 (0:00:00.323)       0:08:49.180 ******** 
changed: [40.121.60.103]

TASK [openshift_master : Pre-pull master image] ********************************
Wednesday 20 June 2018  15:05:54 +0000 (0:00:00.290)       0:08:49.471 ******** 
skipping: [40.121.60.103]

TASK [openshift_master : Create the ha systemd unit files] *********************
Wednesday 20 June 2018  15:05:54 +0000 (0:00:00.034)       0:08:49.506 ******** 
changed: [40.121.60.103] => (item=api)
changed: [40.121.60.103] => (item=controllers)

TASK [openshift_master : command] **********************************************
Wednesday 20 June 2018  15:05:56 +0000 (0:00:01.240)       0:08:50.746 ******** 
changed: [40.121.60.103]

TASK [openshift_master : enable master services] *******************************
Wednesday 20 June 2018  15:05:56 +0000 (0:00:00.278)       0:08:51.024 ******** 
ok: [40.121.60.103] => (item=api)
ok: [40.121.60.103] => (item=controllers)

TASK [openshift_master : Preserve Master API Proxy Config options] *************
Wednesday 20 June 2018  15:05:56 +0000 (0:00:00.504)       0:08:51.529 ******** 
ok: [40.121.60.103]

TASK [openshift_master : Preserve Master API AWS options] **********************
Wednesday 20 June 2018  15:05:57 +0000 (0:00:00.275)       0:08:51.804 ******** 
ok: [40.121.60.103]

TASK [openshift_master : Create the master api service env file] ***************
Wednesday 20 June 2018  15:05:57 +0000 (0:00:00.292)       0:08:52.097 ******** 
changed: [40.121.60.103]

TASK [openshift_master : Restore Master API Proxy Config Options] **************
Wednesday 20 June 2018  15:05:58 +0000 (0:00:00.667)       0:08:52.764 ******** 

TASK [openshift_master : Restore Master API AWS Options] ***********************
Wednesday 20 June 2018  15:05:58 +0000 (0:00:00.034)       0:08:52.799 ******** 
skipping: [40.121.60.103]

TASK [openshift_master : Preserve Master Controllers Proxy Config options] *****
Wednesday 20 June 2018  15:05:58 +0000 (0:00:00.033)       0:08:52.833 ******** 
ok: [40.121.60.103]

TASK [openshift_master : Preserve Master Controllers AWS options] **************
Wednesday 20 June 2018  15:05:58 +0000 (0:00:00.270)       0:08:53.104 ******** 
ok: [40.121.60.103]

TASK [openshift_master : Create the master controllers service env file] *******
Wednesday 20 June 2018  15:05:58 +0000 (0:00:00.285)       0:08:53.389 ******** 
changed: [40.121.60.103]

TASK [openshift_master : Restore Master Controllers Proxy Config Options] ******
Wednesday 20 June 2018  15:05:59 +0000 (0:00:00.697)       0:08:54.087 ******** 

TASK [openshift_master : Restore Master Controllers AWS Options] ***************
Wednesday 20 June 2018  15:05:59 +0000 (0:00:00.034)       0:08:54.122 ******** 
skipping: [40.121.60.103]

RUNNING HANDLER [openshift_master : restart master api] ************************
Wednesday 20 June 2018  15:05:59 +0000 (0:00:00.019)       0:08:54.141 ******** 
skipping: [40.121.60.103]

RUNNING HANDLER [openshift_master : restart master controllers] ****************
Wednesday 20 June 2018  15:05:59 +0000 (0:00:00.035)       0:08:54.177 ******** 
skipping: [40.121.60.103]

TASK [remove yum client certificate] *******************************************
Wednesday 20 June 2018  15:05:59 +0000 (0:00:00.050)       0:08:54.228 ******** 
skipping: [40.121.60.103] => (item={u'name': u'client-cert.pem'}) 
skipping: [40.121.60.103] => (item={u'name': u'client-key.pem'}) 

TASK [remove yum repositories] *************************************************
Wednesday 20 June 2018  15:05:59 +0000 (0:00:00.042)       0:08:54.270 ******** 
changed: [40.121.60.103] => (item={u'gpgkey': u'file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-SIG-PaaS', u'enabled': True, u'name': u'openshift-origin39', u'baseurl': u'http://mirror.centos.org/centos/7/paas/x86_64/openshift-origin39/'})
changed: [40.121.60.103] => (item={'gpgcheck': False, 'enabled': True, 'name': u'install_repo', 'baseurl': u'https://storage.googleapis.com/origin-ci-test/pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39_test/6/artifacts/rpms'})

TASK [record installed rpms] ***************************************************
Wednesday 20 June 2018  15:06:00 +0000 (0:00:00.429)       0:08:54.699 ******** 
ok: [40.121.60.103]

TASK [persist oreg_url] ********************************************************
Wednesday 20 June 2018  15:06:01 +0000 (0:00:01.233)       0:08:55.933 ******** 
changed: [40.121.60.103]

TASK [run waagent deprovision] *************************************************
Wednesday 20 June 2018  15:06:01 +0000 (0:00:00.615)       0:08:56.548 ******** 
changed: [40.121.60.103]

PLAY [localhost] ***************************************************************

TASK [set_fact] ****************************************************************
Wednesday 20 June 2018  15:06:03 +0000 (0:00:01.145)       0:08:57.694 ******** 
ok: [localhost]

TASK [deallocate vm] ***********************************************************
Wednesday 20 June 2018  15:06:03 +0000 (0:00:00.083)       0:08:57.777 ******** 
changed: [localhost]

TASK [generalize vm] ***********************************************************
Wednesday 20 June 2018  15:31:11 +0000 (0:25:08.201)       0:34:05.979 ******** 
changed: [localhost]

TASK [get vm details] **********************************************************
Wednesday 20 June 2018  15:31:13 +0000 (0:00:01.770)       0:34:07.749 ******** 
changed: [localhost]

TASK [create image resource group] *********************************************
Wednesday 20 June 2018  15:31:15 +0000 (0:00:02.197)       0:34:09.947 ******** 
ok: [localhost]

TASK [create image] ************************************************************
Wednesday 20 June 2018  15:31:16 +0000 (0:00:00.875)       0:34:10.823 ******** 
changed: [localhost]

TASK [get input image tags] ****************************************************
Wednesday 20 June 2018  15:31:50 +0000 (0:00:34.158)       0:34:44.981 ******** 
changed: [localhost]

TASK [remove valid tag from input image tags] **********************************
Wednesday 20 June 2018  15:31:52 +0000 (0:00:02.251)       0:34:47.232 ******** 
ok: [localhost] => (item={'value': u'3.10.0-693.el7.x86_64', 'key': u'kernel'})
skipping: [localhost] => (item={'value': u'true', 'key': u'valid'}) 
ok: [localhost] => (item={'value': u'centos7-root-201804191712', 'key': u'root_image'})

TASK [calculate final tags] ****************************************************
Wednesday 20 June 2018  15:31:52 +0000 (0:00:00.066)       0:34:47.299 ******** 
ok: [localhost]

TASK [tag image] ***************************************************************
Wednesday 20 June 2018  15:31:52 +0000 (0:00:00.081)       0:34:47.380 ******** 
changed: [localhost]

TASK [get vm details] **********************************************************
Wednesday 20 June 2018  15:32:27 +0000 (0:00:34.396)       0:35:21.777 ******** 
skipping: [localhost]

TASK [get storage account key] *************************************************
Wednesday 20 June 2018  15:32:27 +0000 (0:00:00.029)       0:35:21.807 ******** 
skipping: [localhost]

TASK [get disk sas url] ********************************************************
Wednesday 20 June 2018  15:32:27 +0000 (0:00:00.030)       0:35:21.837 ******** 
skipping: [localhost]

TASK [start copy] **************************************************************
Wednesday 20 June 2018  15:32:27 +0000 (0:00:00.029)       0:35:21.867 ******** 
skipping: [localhost]

TASK [get copy status] *********************************************************
Wednesday 20 June 2018  15:32:27 +0000 (0:00:00.030)       0:35:21.897 ******** 
skipping: [localhost]

TASK [revoke disk sas url] *****************************************************
Wednesday 20 June 2018  15:32:27 +0000 (0:00:00.030)       0:35:21.927 ******** 
skipping: [localhost]

PLAY RECAP *********************************************************************
40.121.60.103              : ok=180  changed=73   unreachable=0    failed=0   
localhost                  : ok=21   changed=12   unreachable=0    failed=0   
{}                         : ok=2    changed=0    unreachable=0    failed=0   


INSTALLER STATUS ***************************************************************
Initialization             : Complete (0:00:16)

Wednesday 20 June 2018  15:32:27 +0000 (0:00:00.024)       0:35:21.952 ******** 
=============================================================================== 
deallocate vm -------------------------------------------------------- 1508.20s
create vm ------------------------------------------------------------- 158.75s
openshift_node : Install Node package, sdn-ovs, conntrack packages ----- 79.21s
openshift_node : install needed rpm(s) --------------------------------- 68.04s
container_runtime : Install Docker ------------------------------------- 43.04s
tag image -------------------------------------------------------------- 34.40s
create image ----------------------------------------------------------- 34.16s
openshift_node : Install Ceph storage plugin dependencies -------------- 16.34s
create vnet ------------------------------------------------------------ 14.97s
Ensure openshift-ansible installer package deps are installed ---------- 11.85s
os_firewall : need to pause here, otherwise the iptables service starting can sometimes cause ssh to fail -- 10.14s
os_firewall : Wait 10 seconds after disabling firewalld ---------------- 10.06s
install centos-release-paas-common rpm ---------------------------------- 8.12s
openshift_node : Install iSCSI storage plugin dependencies -------------- 6.29s
Gathering Facts --------------------------------------------------------- 5.80s
os_firewall : Install iptables packages --------------------------------- 5.51s
os_update_latest : Update all packages ---------------------------------- 5.09s
create subnet ----------------------------------------------------------- 4.44s
openshift_node : Update journald setup ---------------------------------- 4.21s
openshift_node : Install GlusterFS storage plugin dependencies ---------- 4.12s
+ set +o xtrace
########## FINISHED STAGE: SUCCESS: BUILD NODE IMAGE [00h 35m 26s] ##########
[workspace] $ /bin/bash /tmp/jenkins7108215145744869639.sh
########## STARTING STAGE: PROVISION TEST CLUSTER ##########
+ [[ -s /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/activate ]]
+ source /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/activate
++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e
++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e
++ export PATH=/var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e/bin:/sbin:/usr/sbin:/bin:/usr/bin
++ PATH=/var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e/bin:/sbin:/usr/sbin:/bin:/usr/bin
++ unset PYTHON_HOME
++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config
++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config
++ mktemp
+ script=/tmp/tmp.BML5rmwpKj
+ cat
+ chmod +x /tmp/tmp.BML5rmwpKj
+ scp -F /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config/origin-ci-tool/inventory/.ssh_config /tmp/tmp.BML5rmwpKj openshiftdevel:/tmp/tmp.BML5rmwpKj
+ ssh -F /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config/origin-ci-tool/inventory/.ssh_config -t openshiftdevel 'bash -l -c "timeout 14400 /tmp/tmp.BML5rmwpKj"'
+ cd /data/src/github.com/openshift/release
+ cd cluster/test-deploy/azure
+ TYPE=azure
+ ../../bin/ansible.sh ansible-playbook -e openshift_azure_resource_group_name=ci-test_pr_origin_extended_c_azure_39_test-6 -e openshift_azure_resource_location=eastus -e openshift_azure_input_image_ns=ci-test_pr_origin_extended_c_azure_39_test-6 -e openshift_azure_input_image_name=centos7-origin playbooks/azure/openshift-cluster/launch.yml

PLAY [localhost] ***************************************************************

TASK [create temporary directory] **********************************************
Wednesday 20 June 2018  15:32:30 +0000 (0:00:00.084)       0:00:00.084 ******** 
changed: [localhost]

TASK [download acs-engine] *****************************************************
Wednesday 20 June 2018  15:32:30 +0000 (0:00:00.242)       0:00:00.327 ******** 
changed: [localhost] => (item=http://acs-engine-build-acs-engine.svc.ci.openshift.org/acs-engine)
changed: [localhost] => (item=http://acs-engine-build-acs-engine.svc.ci.openshift.org/openshift.json)

TASK [make acs-engine executable] **********************************************
Wednesday 20 June 2018  15:32:32 +0000 (0:00:01.362)       0:00:01.689 ******** 
changed: [localhost]

TASK [configure acs-engine] ****************************************************
Wednesday 20 June 2018  15:32:32 +0000 (0:00:00.258)       0:00:01.947 ******** 
changed: [localhost]

TASK [run acs-engine deploy] ***************************************************
Wednesday 20 June 2018  15:32:32 +0000 (0:00:00.369)       0:00:02.317 ******** 
changed: [localhost]

TASK [delete temporary directory] **********************************************
Wednesday 20 June 2018  15:46:31 +0000 (0:13:58.266)       0:14:00.583 ******** 
changed: [localhost]

TASK [get azure deployment message] ********************************************
Wednesday 20 June 2018  15:46:31 +0000 (0:00:00.150)       0:14:00.734 ******** 
skipping: [localhost]

TASK [debug] *******************************************************************
Wednesday 20 June 2018  15:46:31 +0000 (0:00:00.020)       0:14:00.755 ******** 
skipping: [localhost]

TASK [assert] ******************************************************************
Wednesday 20 June 2018  15:46:31 +0000 (0:00:00.020)       0:14:00.775 ******** 
skipping: [localhost]

PLAY RECAP *********************************************************************
localhost                  : ok=6    changed=6    unreachable=0    failed=0   

Wednesday 20 June 2018  15:46:31 +0000 (0:00:00.016)       0:14:00.792 ******** 
=============================================================================== 
run acs-engine deploy ------------------------------------------------- 838.27s
download acs-engine ----------------------------------------------------- 1.36s
configure acs-engine ---------------------------------------------------- 0.37s
make acs-engine executable ---------------------------------------------- 0.26s
create temporary directory ---------------------------------------------- 0.24s
delete temporary directory ---------------------------------------------- 0.15s
get azure deployment message -------------------------------------------- 0.02s
debug ------------------------------------------------------------------- 0.02s
assert ------------------------------------------------------------------ 0.02s
+ set +o xtrace
########## FINISHED STAGE: SUCCESS: PROVISION TEST CLUSTER [00h 14m 04s] ##########
[workspace] $ /bin/bash /tmp/jenkins6306815090636633611.sh
########## STARTING STAGE: RUN THE ACS-ENGINE E2E TEST ##########
+ [[ -s /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/activate ]]
+ source /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/activate
++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e
++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e
++ export PATH=/var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e/bin:/sbin:/usr/sbin:/bin:/usr/bin
++ PATH=/var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e/bin:/sbin:/usr/sbin:/bin:/usr/bin
++ unset PYTHON_HOME
++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config
++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config
++ mktemp
+ script=/tmp/tmp.1TMdjQAMvb
+ cat
+ chmod +x /tmp/tmp.1TMdjQAMvb
+ scp -F /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config/origin-ci-tool/inventory/.ssh_config /tmp/tmp.1TMdjQAMvb openshiftdevel:/tmp/tmp.1TMdjQAMvb
+ ssh -F /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config/origin-ci-tool/inventory/.ssh_config -t openshiftdevel 'bash -l -c "timeout 14400 /tmp/tmp.1TMdjQAMvb"'
+ cd /home/origin
+ set -a +o xtrace
+ cd /data/src/github.com/Azure/acs-engine
+ export GOPATH=/data
+ GOPATH=/data
+ export PATH=/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin:/home/origin/.local/bin:/home/origin/bin:/data/bin
+ PATH=/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin:/home/origin/.local/bin:/home/origin/bin:/data/bin
+ make build
go get -u github.com/Masterminds/glide
go get -u github.com/mitchellh/gox
go get github.com/go-bindata/go-bindata/...
go get -u github.com/alecthomas/gometalinter
gometalinter --install
Installing:
  deadcode
  dupl
  errcheck
  gas
  goconst
  gocyclo
  goimports
  golint
  gosimple
  gotype
  gotypex
  ineffassign
  interfacer
  lll
  maligned
  megacheck
  misspell
  nakedret
  safesql
  staticcheck
  structcheck
  unconvert
  unparam
  unused
  varcheck
go get -u github.com/onsi/ginkgo/ginkgo
go generate  -v `glide novendor | xargs go list`
cmd/cmd_suite_test.go
cmd/dcos-upgrade.go
cmd/dcos-upgrade_test.go
cmd/deploy.go
cmd/deploy_test.go
cmd/generate.go
cmd/generate_test.go
cmd/orchestrators.go
cmd/orchestrators_test.go
cmd/root.go
cmd/scale.go
cmd/scale_test.go
cmd/upgrade.go
cmd/upgrade_test.go
cmd/version.go
cmd/version_test.go
pkg/acsengine/addons.go
pkg/acsengine/azureconst.go
pkg/acsengine/const.go
pkg/acsengine/customfiles.go
pkg/acsengine/customfiles_test.go
pkg/acsengine/defaults-apiserver.go
pkg/acsengine/defaults-apiserver_test.go
pkg/acsengine/defaults-cloud-controller-manager.go
pkg/acsengine/defaults-controller-manager.go
pkg/acsengine/defaults-kubelet.go
pkg/acsengine/defaults-kubelet_test.go
pkg/acsengine/defaults-scheduler.go
pkg/acsengine/defaults-scheduler_test.go
pkg/acsengine/defaults.go
pkg/acsengine/defaults_test.go
pkg/acsengine/doc.go
pkg/acsengine/engine.go
pkg/acsengine/engine_test.go
pkg/acsengine/fileloader.go
templates.go
pkg/acsengine/filesaver.go
pkg/acsengine/k8s_versions.go
pkg/acsengine/k8s_versions_test.go
pkg/acsengine/output.go
pkg/acsengine/params.go
pkg/acsengine/params_k8s.go
pkg/acsengine/params_k8s_test.go
pkg/acsengine/pki.go
pkg/acsengine/pki_test.go
pkg/acsengine/ssh.go
pkg/acsengine/ssh_test.go
pkg/acsengine/template_generator.go
pkg/acsengine/tenantid.go
pkg/acsengine/types.go
pkg/acsengine/transform/apimodel_merger.go
pkg/acsengine/transform/apimodel_merger_test.go
pkg/acsengine/transform/json.go
pkg/acsengine/transform/transform.go
pkg/acsengine/transform/transform_test.go
pkg/api/apiloader.go
pkg/api/apiloader_test.go
pkg/api/const.go
pkg/api/converterfromagentpoolonlyapi.go
pkg/api/converterfromagentpoolonlyapi_test.go
pkg/api/converterfromapi.go
pkg/api/convertertoagentpoolonlyapi.go
pkg/api/convertertoagentpoolonlyapi_test.go
pkg/api/convertertoapi.go
pkg/api/convertertoapi_test.go
pkg/api/doc.go
pkg/api/orchestrators.go
pkg/api/orchestrators_test.go
pkg/api/strictjson.go
pkg/api/strictjson_test.go
pkg/api/types.go
pkg/api/types_test.go
pkg/api/agentPoolOnlyApi/v20170831/const.go
pkg/api/agentPoolOnlyApi/v20170831/doc.go
pkg/api/agentPoolOnlyApi/v20170831/merge.go
pkg/api/agentPoolOnlyApi/v20170831/types.go
pkg/api/agentPoolOnlyApi/v20170831/types_test.go
pkg/api/agentPoolOnlyApi/v20170831/validate.go
pkg/api/agentPoolOnlyApi/v20180331/apiloader_test.go
pkg/api/agentPoolOnlyApi/v20180331/const.go
pkg/api/agentPoolOnlyApi/v20180331/doc.go
pkg/api/agentPoolOnlyApi/v20180331/errors.go
pkg/api/agentPoolOnlyApi/v20180331/merge.go
pkg/api/agentPoolOnlyApi/v20180331/merge_test.go
pkg/api/agentPoolOnlyApi/v20180331/types.go
pkg/api/agentPoolOnlyApi/v20180331/types_test.go
pkg/api/agentPoolOnlyApi/v20180331/validate.go
pkg/api/agentPoolOnlyApi/v20180331/validate_test.go
pkg/api/agentPoolOnlyApi/vlabs/const.go
pkg/api/agentPoolOnlyApi/vlabs/doc.go
pkg/api/agentPoolOnlyApi/vlabs/types.go
pkg/api/agentPoolOnlyApi/vlabs/types_test.go
pkg/api/agentPoolOnlyApi/vlabs/validate.go
pkg/api/common/const.go
pkg/api/common/helper.go
pkg/api/common/helper_test.go
pkg/api/common/net.go
pkg/api/common/net_test.go
pkg/api/common/versions.go
pkg/api/common/versions_test.go
pkg/api/v20160330/const.go
pkg/api/v20160330/doc.go
pkg/api/v20160330/merge.go
pkg/api/v20160330/merge_test.go
pkg/api/v20160330/types.go
pkg/api/v20160330/types_test.go
pkg/api/v20160330/validate.go
pkg/api/v20160930/const.go
pkg/api/v20160930/doc.go
pkg/api/v20160930/merge.go
pkg/api/v20160930/merge_test.go
pkg/api/v20160930/types.go
pkg/api/v20160930/types_test.go
pkg/api/v20160930/validate.go
pkg/api/v20170131/const.go
pkg/api/v20170131/doc.go
pkg/api/v20170131/merge.go
pkg/api/v20170131/merge_test.go
pkg/api/v20170131/types.go
pkg/api/v20170131/types_test.go
pkg/api/v20170131/validate.go
pkg/api/v20170131/validate_test.go
pkg/api/v20170701/const.go
pkg/api/v20170701/doc.go
pkg/api/v20170701/merge.go
pkg/api/v20170701/merge_test.go
pkg/api/v20170701/types.go
pkg/api/v20170701/types_test.go
pkg/api/v20170701/validate.go
pkg/api/v20170701/validate_test.go
pkg/api/v20170930/const.go
pkg/api/v20170930/types.go
pkg/api/v20170930/validate.go
pkg/api/vlabs/const.go
pkg/api/vlabs/doc.go
pkg/api/vlabs/merge.go
pkg/api/vlabs/merge_test.go
pkg/api/vlabs/orchestratorversiontypes.go
pkg/api/vlabs/types.go
pkg/api/vlabs/types_test.go
pkg/api/vlabs/validate.go
pkg/api/vlabs/validate_test.go
pkg/armhelpers/azureclient.go
pkg/armhelpers/compute.go
pkg/armhelpers/deploymentError.go
pkg/armhelpers/deploymentError_test.go
pkg/armhelpers/deploymentOperations.go
pkg/armhelpers/deployments.go
pkg/armhelpers/disk.go
pkg/armhelpers/graph.go
pkg/armhelpers/groupsclient.go
pkg/armhelpers/interfaces.go
pkg/armhelpers/kubeclient.go
pkg/armhelpers/mockclients.go
pkg/armhelpers/network.go
pkg/armhelpers/providers.go
pkg/armhelpers/storage.go
pkg/armhelpers/utils/util.go
pkg/armhelpers/utils/util_test.go
pkg/helpers/helpers.go
pkg/helpers/helpers_test.go
pkg/i18n/const.go
pkg/i18n/i18n.go
pkg/i18n/i18n_test.go
pkg/i18n/resourceloader.go
translations.go
pkg/openshift/doc.go
pkg/openshift/examples_test.go
pkg/openshift/certgen/certgen.go
pkg/openshift/certgen/release39/config.go
pkg/openshift/certgen/release39/config_test.go
pkg/openshift/certgen/release39/defaults.go
pkg/openshift/certgen/release39/doc.go
pkg/openshift/certgen/release39/files.go
pkg/openshift/certgen/release39/kubeconfig.go
pkg/openshift/certgen/release39/tls.go
pkg/openshift/certgen/release39/templates/bindata.go
pkg/openshift/certgen/release39/templates/templates.go
bindata.go
pkg/openshift/certgen/unstable/config.go
pkg/openshift/certgen/unstable/config_test.go
pkg/openshift/certgen/unstable/defaults.go
pkg/openshift/certgen/unstable/doc.go
pkg/openshift/certgen/unstable/files.go
pkg/openshift/certgen/unstable/kubeconfig.go
pkg/openshift/certgen/unstable/tls.go
pkg/openshift/certgen/unstable/templates/bindata.go
pkg/openshift/certgen/unstable/templates/templates.go
bindata.go
pkg/openshift/filesystem/filesystem.go
pkg/operations/cordondrainvm.go
pkg/operations/cordondrainvm_test.go
pkg/operations/deletevm.go
pkg/operations/remote_ssh.go
pkg/operations/scaledownagentpool.go
pkg/operations/scaledownagentpool_test.go
pkg/operations/dcosupgrade/upgradecluster.go
pkg/operations/dcosupgrade/upgrader.go
pkg/operations/kubernetesupgrade/upgradeagentnode.go
pkg/operations/kubernetesupgrade/upgradecluster.go
pkg/operations/kubernetesupgrade/upgradecluster_test.go
pkg/operations/kubernetesupgrade/upgrademasternode.go
pkg/operations/kubernetesupgrade/upgrader.go
pkg/operations/kubernetesupgrade/upgradeworkflow.go
pkg/operations/kubernetesupgrade/v16upgrader.go
pkg/operations/kubernetesupgrade/v17upgrader.go
pkg/operations/kubernetesupgrade/v18upgrader.go
pkg/test/util.go
test/acs-engine-test/main.go
test/acs-engine-test/config/config.go
test/acs-engine-test/config/config_test.go
test/acs-engine-test/metrics/metrics.go
test/acs-engine-test/metrics/metrics_test.go
test/acs-engine-test/promote/promote.go
test/acs-engine-test/report/report.go
test/acs-engine-test/report/report_test.go
test/e2e/runner.go
test/e2e/azure/cli.go
test/e2e/azure/cli_test.go
test/e2e/config/config.go
test/e2e/config/config_test.go
test/e2e/dcos/dcos.go
test/e2e/dcos/dcos_suite_test.go
test/e2e/dcos/dcos_test.go
test/e2e/engine/cli.go
test/e2e/engine/template.go
test/e2e/kubernetes/config.go
test/e2e/kubernetes/kubernetes_suite_test.go
test/e2e/kubernetes/kubernetes_test.go
test/e2e/kubernetes/deployment/deployment.go
test/e2e/kubernetes/job/job.go
test/e2e/kubernetes/namespace/namespace.go
test/e2e/kubernetes/networkpolicy/networkpolicy.go
test/e2e/kubernetes/node/node.go
test/e2e/kubernetes/persistentvolumeclaims/persistentvolumeclaims.go
test/e2e/kubernetes/pod/pod.go
test/e2e/kubernetes/service/service.go
test/e2e/kubernetes/storageclass/storageclass.go
test/e2e/kubernetes/util/util.go
test/e2e/metrics/metrics.go
test/e2e/openshift/openshift_suite_test.go
test/e2e/openshift/openshift_test.go
test/e2e/openshift/node/node.go
test/e2e/openshift/util/util.go
test/e2e/remote/ssh.go
test/e2e/runner/cli_provisioner.go
test/e2e/runner/ginkgo.go
test/i18n/i18ntestinput.go
main.go
GOBIN=/data/src/github.com/Azure/acs-engine/bin go install  -ldflags '-s -X main.version=b7dc0cac1907970426ecd900d772e88d3b7fec7e -X github.com/Azure/acs-engine/pkg/test.JUnitOutDir=/data/src/github.com/Azure/acs-engine/test/junit -X github.com/Azure/acs-engine/cmd.BuildSHA=b7dc0ca -X github.com/Azure/acs-engine/cmd.GitTreeState=clean -X github.com/Azure/acs-engine/cmd.BuildTag=canary'
cd test/acs-engine-test; go build 
+ set +o xtrace
Unable to find image 'registry.svc.ci.openshift.org/ci/acs-engine-tests:v3.9' locally
Trying to pull repository registry.svc.ci.openshift.org/ci/acs-engine-tests ... 
v3.9: Pulling from registry.svc.ci.openshift.org/ci/acs-engine-tests
c83208261473: Pulling fs layer
6e1a85c1d66a: Pulling fs layer
f1320ef45e20: Pulling fs layer
5a6ab6e6fbf6: Pulling fs layer
6fd240c27767: Pulling fs layer
50296308d825: Pulling fs layer
58f53a671a27: Pulling fs layer
55b681ddb972: Pulling fs layer
78c5557b857c: Pulling fs layer
50296308d825: Waiting
58f53a671a27: Waiting
55b681ddb972: Waiting
78c5557b857c: Waiting
5a6ab6e6fbf6: Waiting
6fd240c27767: Waiting
f1320ef45e20: Verifying Checksum
f1320ef45e20: Download complete
6e1a85c1d66a: Verifying Checksum
6e1a85c1d66a: Download complete
6fd240c27767: Download complete
5a6ab6e6fbf6: Verifying Checksum
5a6ab6e6fbf6: Download complete
c83208261473: Verifying Checksum
c83208261473: Download complete
50296308d825: Download complete
55b681ddb972: Download complete
78c5557b857c: Verifying Checksum
78c5557b857c: Download complete
c83208261473: Pull complete
6e1a85c1d66a: Pull complete
f1320ef45e20: Pull complete
5a6ab6e6fbf6: Pull complete
6fd240c27767: Pull complete
58f53a671a27: Verifying Checksum
58f53a671a27: Download complete
50296308d825: Pull complete
58f53a671a27: Pull complete
55b681ddb972: Pull complete
78c5557b857c: Pull complete
Digest: sha256:ed9251dc2827a4b5ed748388e3988fa2a274efa0d3d3422b2bc31fda204a3b20
Status: Downloaded newer image for registry.svc.ci.openshift.org/ci/acs-engine-tests:v3.9
2018/06/20 15:48:21 Picking Random Region from list [eastus]
2018/06/20 15:48:21 Picked Random Region:eastus

$ which timeout

$ timeout 60 az account set --subscription 225e02bc-43d0-43d1-a01a-17e584a4ef69

$ ssh-keygen -f /go/src/github.com/Azure/acs-engine/_output/openshift-eastus-90178-ssh -q -N  -b 2048 -t rsa

$ timeout 60 az group create --name openshift-eastus-90178 --location eastus --tags now=1529509703

$ ./bin/acs-engine generate /go/src/github.com/Azure/acs-engine/_output/openshift-eastus-90178.json --output-directory /go/src/github.com/Azure/acs-engine/_output/openshift-eastus-90178

$ az group deployment create --name openshift-eastus-90178 --resource-group openshift-eastus-90178 --template-file /go/src/github.com/Azure/acs-engine/_output/openshift-eastus-90178/azuredeploy.json --parameters /go/src/github.com/Azure/acs-engine/_output/openshift-eastus-90178/azuredeploy.parameters.json
2018/06/20 15:48:37 
Kubeconfig:/go/src/github.com/Azure/acs-engine/_output/openshift-eastus-90178/etc/origin/master/admin.kubeconfig
...............
$ timeout 60 az vm list -g openshift-eastus-90178

2018/06/20 16:03:56 Waiting on nodes to go into ready state...

$ kubectl get nodes -o json

$ oc version
2018/06/20 16:03:56 Testing a openshift v3.9.0 cluster...

$ ginkgo -slowSpecThreshold 180 -r -v test/e2e/openshift
Running Suite: OpenShift Suite
==============================
Random Seed: 1529510636
Will run 13 of 13 specs

Azure Container Cluster using the OpenShift Orchestrator 
  should have bootstrap autoapprover running
  /go/src/github.com/Azure/acs-engine/test/e2e/openshift/openshift_test.go:51

$ kubectl get pods -n openshift-infra -o json

$ kubectl get pods -n openshift-infra -o json

$ kubectl get pods -n openshift-infra -o json

$ kubectl get pods -n openshift-infra -o json
β€’
------------------------------
Azure Container Cluster using the OpenShift Orchestrator 
  should have have the appropriate node count
  /go/src/github.com/Azure/acs-engine/test/e2e/openshift/openshift_test.go:57

$ kubectl get nodes -o json
β€’
------------------------------
Azure Container Cluster using the OpenShift Orchestrator 
  should label nodes correctly
  /go/src/github.com/Azure/acs-engine/test/e2e/openshift/openshift_test.go:62

$ kubectl get nodes -o json
β€’
------------------------------
Azure Container Cluster using the OpenShift Orchestrator 
  should be running the expected version
  /go/src/github.com/Azure/acs-engine/test/e2e/openshift/openshift_test.go:88

$ oc version
β€’
------------------------------
Azure Container Cluster using the OpenShift Orchestrator 
  should have router running
  /go/src/github.com/Azure/acs-engine/test/e2e/openshift/openshift_test.go:118

$ kubectl get pods -n default -o json

$ kubectl get pods -n default -o json

$ kubectl get pods -n default -o json

$ kubectl get pods -n default -o json
β€’
------------------------------
Azure Container Cluster using the OpenShift Orchestrator 
  should have docker-registry running
  /go/src/github.com/Azure/acs-engine/test/e2e/openshift/openshift_test.go:124

$ kubectl get pods -n default -o json

$ kubectl get pods -n default -o json

$ kubectl get pods -n default -o json

$ kubectl get pods -n default -o json
β€’
------------------------------
Azure Container Cluster using the OpenShift Orchestrator 
  should have registry-console running
  /go/src/github.com/Azure/acs-engine/test/e2e/openshift/openshift_test.go:130

$ kubectl get pods -n default -o json

$ kubectl get pods -n default -o json

$ kubectl get pods -n default -o json

$ kubectl get pods -n default -o json
β€’
------------------------------
Azure Container Cluster using the OpenShift Orchestrator 
  should deploy a sample app and access it via a route
  /go/src/github.com/Azure/acs-engine/test/e2e/openshift/openshift_test.go:136

$ oc process nginx-example -n openshift

$ oc apply -n default -f nginx-example

$ oc rollout status dc/nginx-example -n default

$ oc get route/nginx-example -n default -o jsonpath={.spec.host}
β€’
------------------------------
Azure Container Cluster using the OpenShift Orchestrator 
  should have the openshift webconsole running
  /go/src/github.com/Azure/acs-engine/test/e2e/openshift/openshift_test.go:145

$ kubectl get pods -n openshift-web-console -o json

$ kubectl get pods -n openshift-web-console -o json

$ kubectl get pods -n openshift-web-console -o json

$ kubectl get pods -n openshift-web-console -o json
β€’
------------------------------
Azure Container Cluster using the OpenShift Orchestrator 
  should have prometheus running
  /go/src/github.com/Azure/acs-engine/test/e2e/openshift/openshift_test.go:151

$ kubectl get pods -n openshift-metrics -o json

$ kubectl get pods -n openshift-metrics -o json

$ kubectl get pods -n openshift-metrics -o json

$ kubectl get pods -n openshift-metrics -o json
β€’
------------------------------
Azure Container Cluster using the OpenShift Orchestrator 
  should have service catalog apiserver running
  /go/src/github.com/Azure/acs-engine/test/e2e/openshift/openshift_test.go:157

$ kubectl get pods -n kube-service-catalog -o json

$ kubectl get pods -n kube-service-catalog -o json

$ kubectl get pods -n kube-service-catalog -o json

$ kubectl get pods -n kube-service-catalog -o json
β€’
------------------------------
Azure Container Cluster using the OpenShift Orchestrator 
  should have service catalog controller-manager running
  /go/src/github.com/Azure/acs-engine/test/e2e/openshift/openshift_test.go:163

$ kubectl get pods -n kube-service-catalog -o json

$ kubectl get pods -n kube-service-catalog -o json

$ kubectl get pods -n kube-service-catalog -o json

$ kubectl get pods -n kube-service-catalog -o json
β€’
------------------------------
Azure Container Cluster using the OpenShift Orchestrator 
  should have template service broker running
  /go/src/github.com/Azure/acs-engine/test/e2e/openshift/openshift_test.go:169

$ kubectl get pods -n openshift-ansible-service-broker -o json

$ kubectl get pods -n openshift-ansible-service-broker -o json

$ kubectl get pods -n openshift-ansible-service-broker -o json

$ kubectl get pods -n openshift-ansible-service-broker -o json
β€’
Ran 13 of 13 Specs in 73.074 seconds
SUCCESS! -- 13 Passed | 0 Failed | 0 Pending | 0 Skipped PASS

Ginkgo ran 1 suite in 1m17.600825803s
Test Suite Passed
2018/06/20 16:05:17 Will not gather journal for the control plane because invalid OpenShift distro was specified: ""

$ oc logs deploymentconfig/router -n default

$ oc logs deploymentconfig/docker-registry -n default

$ oc logs deploymentconfig/registry-console -n default

$ oc logs statefulset/bootstrap-autoapprover -n openshift-infra

$ oc logs statefulset/prometheus -n openshift-metrics

$ oc logs daemonset/apiserver -n kube-service-catalog

$ oc logs daemonset/controller-manager -n kube-service-catalog

$ oc logs deploymentconfig/asb -n openshift-ansible-service-broker

$ oc logs deploymentconfig/asb-etcd -n openshift-ansible-service-broker

$ oc logs daemonset/apiserver -n openshift-template-service-broker

$ oc logs deployment/webconsole -n openshift-web-console

$ oc get pods --all-namespaces -o wide

$ oc get nodes -o wide

$ oc get --raw https://localhost:8443/metrics

$ oc get --raw https://localhost:8444/metrics

$ oc get --raw https://localhost:2380/metrics

$ az monitor activity-log list --resource-group openshift-eastus-90178 --status Failed

$ timeout 60 az group delete --name openshift-eastus-90178 --no-wait --yes
2018/06/20 16:05:24 Deleting Group:openshift-eastus-90178
+ set +o xtrace
########## FINISHED STAGE: SUCCESS: RUN THE ACS-ENGINE E2E TEST [00h 18m 55s] ##########
[workspace] $ /bin/bash /tmp/jenkins9060319776897482977.sh
########## STARTING STAGE: RUN TESTS ##########
+ [[ -s /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/activate ]]
+ source /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/activate
++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e
++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e
++ export PATH=/var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e/bin:/sbin:/usr/sbin:/bin:/usr/bin
++ PATH=/var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e/bin:/sbin:/usr/sbin:/bin:/usr/bin
++ unset PYTHON_HOME
++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config
++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config
++ mktemp
+ script=/tmp/tmp.jyw8WXPXkT
+ cat
+ chmod +x /tmp/tmp.jyw8WXPXkT
+ scp -F /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config/origin-ci-tool/inventory/.ssh_config /tmp/tmp.jyw8WXPXkT openshiftdevel:/tmp/tmp.jyw8WXPXkT
+ ssh -F /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config/origin-ci-tool/inventory/.ssh_config -t openshiftdevel 'bash -l -c "timeout 900 /tmp/tmp.jyw8WXPXkT"'
+ cd /home/origin
+ exit 0
+ set +o xtrace
########## FINISHED STAGE: SUCCESS: RUN TESTS [00h 00m 01s] ##########
[PostBuildScript] - Executing post build scripts.
[workspace] $ /bin/bash /tmp/jenkins5406070425766953677.sh
########## STARTING STAGE: DOWNLOAD ARTIFACTS FROM THE REMOTE HOST ##########
+ [[ -s /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/activate ]]
+ source /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/activate
++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e
++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e
++ export PATH=/var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e/bin:/sbin:/usr/sbin:/bin:/usr/bin
++ PATH=/var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e/bin:/sbin:/usr/sbin:/bin:/usr/bin
++ unset PYTHON_HOME
++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config
++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config
+ trap 'exit 0' EXIT
++ pwd
+ ARTIFACT_DIR=/var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/artifacts/gathered
+ rm -rf /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/artifacts/gathered
+ mkdir -p /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/artifacts/gathered
+ ssh -F /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel sudo stat /data/src/github.com/openshift/origin/_output/scripts
  File: β€˜/data/src/github.com/openshift/origin/_output/scripts’
  Size: 64        	Blocks: 0          IO Block: 4096   directory
Device: ca02h/51714d	Inode: 4202140     Links: 5
Access: (2755/drwxr-sr-x)  Uid: ( 1001/  origin)   Gid: ( 1003/origin-git)
Context: unconfined_u:object_r:container_file_t:s0
Access: 2018-06-20 14:23:48.283234421 +0000
Modify: 2018-06-20 14:53:53.916603293 +0000
Change: 2018-06-20 14:53:53.916603293 +0000
 Birth: -
+ ssh -F /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel sudo chmod -R o+rX /data/src/github.com/openshift/origin/_output/scripts
+ scp -r -F /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel:/data/src/github.com/openshift/origin/_output/scripts /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/artifacts/gathered
+ tree /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/artifacts/gathered
/var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/artifacts/gathered
└── scripts
    β”œβ”€β”€ build-base-images
    β”‚Β Β  β”œβ”€β”€ artifacts
    β”‚Β Β  β”œβ”€β”€ logs
    β”‚Β Β  └── openshift.local.home
    β”œβ”€β”€ push-release
    β”‚Β Β  β”œβ”€β”€ artifacts
    β”‚Β Β  β”œβ”€β”€ logs
    β”‚Β Β  β”‚Β Β  └── scripts.log
    β”‚Β Β  └── openshift.local.home
    └── shell
        β”œβ”€β”€ artifacts
        β”œβ”€β”€ logs
        β”‚Β Β  β”œβ”€β”€ b8406432b46bf04db977fc621eb1b81c85db3aa2fe858044b3169bef8be6c446.json
        β”‚Β Β  └── scripts.log
        └── openshift.local.home

13 directories, 3 files
+ exit 0
[workspace] $ /bin/bash /tmp/jenkins8220556669539470960.sh
########## STARTING STAGE: GENERATE ARTIFACTS FROM THE REMOTE HOST ##########
+ [[ -s /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/activate ]]
+ source /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/activate
++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e
++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e
++ export PATH=/var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e/bin:/sbin:/usr/sbin:/bin:/usr/bin
++ PATH=/var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e/bin:/sbin:/usr/sbin:/bin:/usr/bin
++ unset PYTHON_HOME
++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config
++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config
+ trap 'exit 0' EXIT
++ pwd
+ ARTIFACT_DIR=/var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/artifacts/generated
+ rm -rf /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/artifacts/generated
+ mkdir /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/artifacts/generated
+ ssh -F /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'sudo docker version && sudo docker info && sudo docker images && sudo docker ps -a 2>&1'
  WARNING: You're not using the default seccomp profile
+ ssh -F /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'sudo cat /etc/sysconfig/docker /etc/sysconfig/docker-network /etc/sysconfig/docker-storage /etc/sysconfig/docker-storage-setup /etc/systemd/system/docker.service 2>&1'
+ true
+ ssh -F /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'sudo find /var/lib/docker/containers -name *.log | sudo xargs tail -vn +1 2>&1'
+ ssh -F /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'oc get --raw /metrics --server=https://$( uname --nodename ):10250 --config=/etc/origin/master/admin.kubeconfig 2>&1'
+ true
+ ssh -F /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'sudo ausearch -m AVC -m SELINUX_ERR -m USER_AVC 2>&1'
+ true
+ ssh -F /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'oc get --raw /metrics --config=/etc/origin/master/admin.kubeconfig 2>&1'
+ true
+ ssh -F /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'sudo df -T -h && sudo pvs && sudo vgs && sudo lvs && sudo findmnt --all 2>&1'
+ ssh -F /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'sudo yum list installed 2>&1'
+ ssh -F /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'sudo journalctl --dmesg --no-pager --all --lines=all 2>&1'
+ ssh -F /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'sudo journalctl _PID=1 --no-pager --all --lines=all 2>&1'
+ tree /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/artifacts/generated
/var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/artifacts/generated
β”œβ”€β”€ avc_denials.log
β”œβ”€β”€ containers.log
β”œβ”€β”€ dmesg.log
β”œβ”€β”€ docker.config
β”œβ”€β”€ docker.info
β”œβ”€β”€ filesystem.info
β”œβ”€β”€ installed_packages.log
β”œβ”€β”€ master-metrics.log
β”œβ”€β”€ node-metrics.log
└── pid1.journal

0 directories, 10 files
+ exit 0
[workspace] $ /bin/bash /tmp/jenkins6976949867510448331.sh
########## STARTING STAGE: FETCH SYSTEMD JOURNALS FROM THE REMOTE HOST ##########
+ [[ -s /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/activate ]]
+ source /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/activate
++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e
++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e
++ export PATH=/var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e/bin:/sbin:/usr/sbin:/bin:/usr/bin
++ PATH=/var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e/bin:/sbin:/usr/sbin:/bin:/usr/bin
++ unset PYTHON_HOME
++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config
++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config
+ trap 'exit 0' EXIT
++ pwd
+ ARTIFACT_DIR=/var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/artifacts/journals
+ rm -rf /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/artifacts/journals
+ mkdir /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/artifacts/journals
+ ssh -F /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel sudo journalctl --unit docker.service --no-pager --all --lines=all
+ ssh -F /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel sudo journalctl --unit dnsmasq.service --no-pager --all --lines=all
+ ssh -F /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel sudo journalctl --unit systemd-journald.service --no-pager --all --lines=all
+ tree /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/artifacts/journals
/var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/artifacts/journals
β”œβ”€β”€ dnsmasq.service
β”œβ”€β”€ docker.service
└── systemd-journald.service

0 directories, 3 files
+ exit 0
[workspace] $ /bin/bash /tmp/jenkins2061828344118286001.sh
########## STARTING STAGE: ASSEMBLE GCS OUTPUT ##########
+ [[ -s /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/activate ]]
+ source /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/activate
++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e
++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e
++ export PATH=/var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e/bin:/sbin:/usr/sbin:/bin:/usr/bin
++ PATH=/var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e/bin:/sbin:/usr/sbin:/bin:/usr/bin
++ unset PYTHON_HOME
++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config
++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config
+ trap 'exit 0' EXIT
+ mkdir -p gcs/artifacts gcs/artifacts/generated gcs/artifacts/journals gcs/artifacts/gathered
++ python -c 'import json; import urllib; print json.load(urllib.urlopen('\''https://ci.openshift.redhat.com/jenkins/job/test_pull_request_origin_extended_conformance_azure_39_test/6/api/json'\''))['\''result'\'']'
+ result=SUCCESS
+ cat
++ date +%s
+ cat /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/builds/6/log
+ cp artifacts/generated/avc_denials.log artifacts/generated/containers.log artifacts/generated/dmesg.log artifacts/generated/docker.config artifacts/generated/docker.info artifacts/generated/filesystem.info artifacts/generated/installed_packages.log artifacts/generated/master-metrics.log artifacts/generated/node-metrics.log artifacts/generated/pid1.journal gcs/artifacts/generated/
+ cp artifacts/journals/dnsmasq.service artifacts/journals/docker.service artifacts/journals/systemd-journald.service gcs/artifacts/journals/
+ cp -r artifacts/gathered/scripts gcs/artifacts/
++ pwd
+ scp -F /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config/origin-ci-tool/inventory/.ssh_config -r /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/gcs openshiftdevel:/data
+ scp -F /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config/origin-ci-tool/inventory/.ssh_config /var/lib/jenkins/.config/gcloud/gcs-publisher-credentials.json openshiftdevel:/data/credentials.json
+ exit 0
[workspace] $ /bin/bash /tmp/jenkins2012025711322502853.sh
########## STARTING STAGE: PUSH THE ARTIFACTS AND METADATA ##########
+ [[ -s /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/activate ]]
+ source /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/activate
++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e
++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e
++ export PATH=/var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e/bin:/sbin:/usr/sbin:/bin:/usr/bin
++ PATH=/var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e/bin:/sbin:/usr/sbin:/bin:/usr/bin
++ unset PYTHON_HOME
++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config
++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config
++ mktemp
+ script=/tmp/tmp.P7fny0xRjq
+ cat
+ chmod +x /tmp/tmp.P7fny0xRjq
+ scp -F /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config/origin-ci-tool/inventory/.ssh_config /tmp/tmp.P7fny0xRjq openshiftdevel:/tmp/tmp.P7fny0xRjq
+ ssh -F /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config/origin-ci-tool/inventory/.ssh_config -t openshiftdevel 'bash -l -c "timeout 300 /tmp/tmp.P7fny0xRjq"'
+ cd /home/origin
+ trap 'exit 0' EXIT
+ [[ -n {"type":"presubmit","job":"test_pull_request_origin_extended_conformance_azure_39","buildid":"16a5b5b8-7088-11e8-8611-0a58ac100c6c","refs":{"org":"openshift","repo":"origin","base_ref":"release-3.9","base_sha":"561755cee420f2c17f79928f608932cd9ee08014","pulls":[{"number":19957,"author":"tnozicka","sha":"3a64894655994d37d782b64b13ca923ab6d89409"}]}} ]]
++ jq --compact-output .buildid
+ [[ "16a5b5b8-7088-11e8-8611-0a58ac100c6c" =~ ^"[0-9]+"$ ]]
+ echo 'Using BUILD_NUMBER'
Using BUILD_NUMBER
++ jq --compact-output '.buildid |= "6"'
+ JOB_SPEC='{"type":"presubmit","job":"test_pull_request_origin_extended_conformance_azure_39","buildid":"6","refs":{"org":"openshift","repo":"origin","base_ref":"release-3.9","base_sha":"561755cee420f2c17f79928f608932cd9ee08014","pulls":[{"number":19957,"author":"tnozicka","sha":"3a64894655994d37d782b64b13ca923ab6d89409"}]}}'
+ docker run -e 'JOB_SPEC={"type":"presubmit","job":"test_pull_request_origin_extended_conformance_azure_39","buildid":"6","refs":{"org":"openshift","repo":"origin","base_ref":"release-3.9","base_sha":"561755cee420f2c17f79928f608932cd9ee08014","pulls":[{"number":19957,"author":"tnozicka","sha":"3a64894655994d37d782b64b13ca923ab6d89409"}]}}' -v /data:/data:z registry.svc.ci.openshift.org/ci/gcsupload:latest --dry-run=false --gcs-path=gs://origin-ci-test --gcs-credentials-file=/data/credentials.json --path-strategy=single --default-org=openshift --default-repo=origin /data/gcs/artifacts /data/gcs/build-log.txt /data/gcs/finished.json
Unable to find image 'registry.svc.ci.openshift.org/ci/gcsupload:latest' locally
Trying to pull repository registry.svc.ci.openshift.org/ci/gcsupload ... 
latest: Pulling from registry.svc.ci.openshift.org/ci/gcsupload
605ce1bd3f31: Already exists
dc6346da9948: Already exists
714dbeb5426b: Pulling fs layer
714dbeb5426b: Verifying Checksum
714dbeb5426b: Download complete
714dbeb5426b: Pull complete
Digest: sha256:6bc55d4011bd84c4da4bd18aaa5ff6f45a1518439f4d30dd8f873bc2437f386c
Status: Downloaded newer image for registry.svc.ci.openshift.org/ci/gcsupload:latest
{"component":"gcsupload","level":"info","msg":"Gathering artifacts from artifact directory: /data/gcs/artifacts","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","level":"info","msg":"Found /data/gcs/artifacts/generated/avc_denials.log in artifact directory. Uploading as pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/artifacts/generated/avc_denials.log\n","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","level":"info","msg":"Found /data/gcs/artifacts/generated/containers.log in artifact directory. Uploading as pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/artifacts/generated/containers.log\n","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","level":"info","msg":"Found /data/gcs/artifacts/generated/dmesg.log in artifact directory. Uploading as pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/artifacts/generated/dmesg.log\n","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","level":"info","msg":"Found /data/gcs/artifacts/generated/docker.config in artifact directory. Uploading as pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/artifacts/generated/docker.config\n","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","level":"info","msg":"Found /data/gcs/artifacts/generated/docker.info in artifact directory. Uploading as pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/artifacts/generated/docker.info\n","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","level":"info","msg":"Found /data/gcs/artifacts/generated/filesystem.info in artifact directory. Uploading as pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/artifacts/generated/filesystem.info\n","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","level":"info","msg":"Found /data/gcs/artifacts/generated/installed_packages.log in artifact directory. Uploading as pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/artifacts/generated/installed_packages.log\n","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","level":"info","msg":"Found /data/gcs/artifacts/generated/master-metrics.log in artifact directory. Uploading as pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/artifacts/generated/master-metrics.log\n","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","level":"info","msg":"Found /data/gcs/artifacts/generated/node-metrics.log in artifact directory. Uploading as pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/artifacts/generated/node-metrics.log\n","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","level":"info","msg":"Found /data/gcs/artifacts/generated/pid1.journal in artifact directory. Uploading as pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/artifacts/generated/pid1.journal\n","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","level":"info","msg":"Found /data/gcs/artifacts/journals/dnsmasq.service in artifact directory. Uploading as pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/artifacts/journals/dnsmasq.service\n","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","level":"info","msg":"Found /data/gcs/artifacts/journals/docker.service in artifact directory. Uploading as pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/artifacts/journals/docker.service\n","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","level":"info","msg":"Found /data/gcs/artifacts/journals/systemd-journald.service in artifact directory. Uploading as pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/artifacts/journals/systemd-journald.service\n","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","level":"info","msg":"Found /data/gcs/artifacts/scripts/push-release/logs/scripts.log in artifact directory. Uploading as pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/artifacts/scripts/push-release/logs/scripts.log\n","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","level":"info","msg":"Found /data/gcs/artifacts/scripts/shell/logs/b8406432b46bf04db977fc621eb1b81c85db3aa2fe858044b3169bef8be6c446.json in artifact directory. Uploading as pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/artifacts/scripts/shell/logs/b8406432b46bf04db977fc621eb1b81c85db3aa2fe858044b3169bef8be6c446.json\n","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","level":"info","msg":"Found /data/gcs/artifacts/scripts/shell/logs/scripts.log in artifact directory. Uploading as pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/artifacts/scripts/shell/logs/scripts.log\n","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","dest":"pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/artifacts/journals/dnsmasq.service","level":"info","msg":"Queued for upload","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","dest":"pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/artifacts/scripts/push-release/logs/scripts.log","level":"info","msg":"Queued for upload","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","dest":"pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/build-log.txt","level":"info","msg":"Queued for upload","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","dest":"pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/finished.json","level":"info","msg":"Queued for upload","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","dest":"pr-logs/directory/test_pull_request_origin_extended_conformance_azure_39/latest-build.txt","level":"info","msg":"Queued for upload","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","dest":"pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/artifacts/generated/avc_denials.log","level":"info","msg":"Queued for upload","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","dest":"pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/artifacts/generated/filesystem.info","level":"info","msg":"Queued for upload","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","dest":"pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/artifacts/generated/pid1.journal","level":"info","msg":"Queued for upload","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","dest":"pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/artifacts/journals/docker.service","level":"info","msg":"Queued for upload","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","dest":"pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/artifacts/journals/systemd-journald.service","level":"info","msg":"Queued for upload","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","dest":"pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/artifacts/scripts/shell/logs/scripts.log","level":"info","msg":"Queued for upload","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","dest":"pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/latest-build.txt","level":"info","msg":"Queued for upload","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","dest":"pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/artifacts/generated/dmesg.log","level":"info","msg":"Queued for upload","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","dest":"pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/artifacts/generated/docker.config","level":"info","msg":"Queued for upload","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","dest":"pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/artifacts/generated/master-metrics.log","level":"info","msg":"Queued for upload","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","dest":"pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/artifacts/generated/node-metrics.log","level":"info","msg":"Queued for upload","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","dest":"pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/artifacts/scripts/shell/logs/b8406432b46bf04db977fc621eb1b81c85db3aa2fe858044b3169bef8be6c446.json","level":"info","msg":"Queued for upload","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","dest":"pr-logs/directory/test_pull_request_origin_extended_conformance_azure_39/6.txt","level":"info","msg":"Queued for upload","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","dest":"pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/artifacts/generated/docker.info","level":"info","msg":"Queued for upload","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","dest":"pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/artifacts/generated/installed_packages.log","level":"info","msg":"Queued for upload","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","dest":"pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/artifacts/generated/containers.log","level":"info","msg":"Queued for upload","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","dest":"pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/artifacts/generated/filesystem.info","level":"info","msg":"Finished upload","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","dest":"pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/artifacts/generated/docker.info","level":"info","msg":"Finished upload","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","dest":"pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/latest-build.txt","level":"info","msg":"Finished upload","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","dest":"pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/artifacts/generated/installed_packages.log","level":"info","msg":"Finished upload","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","dest":"pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/artifacts/journals/systemd-journald.service","level":"info","msg":"Finished upload","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","dest":"pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/artifacts/generated/avc_denials.log","level":"info","msg":"Finished upload","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","dest":"pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/artifacts/generated/containers.log","level":"info","msg":"Finished upload","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","dest":"pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/artifacts/generated/docker.config","level":"info","msg":"Finished upload","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","dest":"pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/artifacts/scripts/shell/logs/scripts.log","level":"info","msg":"Finished upload","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","dest":"pr-logs/directory/test_pull_request_origin_extended_conformance_azure_39/latest-build.txt","level":"info","msg":"Finished upload","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","dest":"pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/artifacts/scripts/push-release/logs/scripts.log","level":"info","msg":"Finished upload","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","dest":"pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/artifacts/generated/master-metrics.log","level":"info","msg":"Finished upload","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","dest":"pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/artifacts/scripts/shell/logs/b8406432b46bf04db977fc621eb1b81c85db3aa2fe858044b3169bef8be6c446.json","level":"info","msg":"Finished upload","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","dest":"pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/artifacts/generated/pid1.journal","level":"info","msg":"Finished upload","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","dest":"pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/artifacts/journals/dnsmasq.service","level":"info","msg":"Finished upload","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","dest":"pr-logs/directory/test_pull_request_origin_extended_conformance_azure_39/6.txt","level":"info","msg":"Finished upload","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","dest":"pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/artifacts/generated/node-metrics.log","level":"info","msg":"Finished upload","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","dest":"pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/finished.json","level":"info","msg":"Finished upload","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","dest":"pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/artifacts/generated/dmesg.log","level":"info","msg":"Finished upload","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","dest":"pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/build-log.txt","level":"info","msg":"Finished upload","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","dest":"pr-logs/pull/19957/test_pull_request_origin_extended_conformance_azure_39/6/artifacts/journals/docker.service","level":"info","msg":"Finished upload","time":"2018-06-20T16:05:49Z"}
{"component":"gcsupload","level":"info","msg":"Finished upload to GCS","time":"2018-06-20T16:05:49Z"}
+ exit 0
+ set +o xtrace
########## FINISHED STAGE: SUCCESS: PUSH THE ARTIFACTS AND METADATA [00h 00m 09s] ##########
[workspace] $ /bin/bash /tmp/jenkins1628665180655175360.sh
########## STARTING STAGE: HOLD FAILED TEST CLUSTER FOR DEBUG ##########
+ [[ -s /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/activate ]]
+ source /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/activate
++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e
++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e
++ export PATH=/var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e/bin:/sbin:/usr/sbin:/bin:/usr/bin
++ PATH=/var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e/bin:/sbin:/usr/sbin:/bin:/usr/bin
++ unset PYTHON_HOME
++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config
++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config
+ trap 'exit 0' EXIT
+ [[ SUCCESS == \S\U\C\C\E\S\S ]]
+ exit 0
+ exit 0
[workspace] $ /bin/bash /tmp/jenkins5120351231767503425.sh
########## STARTING STAGE: DEPROVISION TEST CLUSTER ##########
+ [[ -s /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/activate ]]
+ source /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/activate
++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e
++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e
++ export PATH=/var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e/bin:/sbin:/usr/sbin:/bin:/usr/bin
++ PATH=/var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e/bin:/sbin:/usr/sbin:/bin:/usr/bin
++ unset PYTHON_HOME
++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config
++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config
++ mktemp
+ script=/tmp/tmp.z8CNZd9GdP
+ cat
+ chmod +x /tmp/tmp.z8CNZd9GdP
+ scp -F /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config/origin-ci-tool/inventory/.ssh_config /tmp/tmp.z8CNZd9GdP openshiftdevel:/tmp/tmp.z8CNZd9GdP
+ ssh -F /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config/origin-ci-tool/inventory/.ssh_config -t openshiftdevel 'bash -l -c "timeout 14400 /tmp/tmp.z8CNZd9GdP"'
+ cd /data/src/github.com/openshift/release
+ trap 'exit 0' EXIT
+ cd cluster/test-deploy/azure
+ TYPE=azure
+ ../../bin/ansible.sh ansible-playbook -e openshift_azure_resource_group_name=ci-test_pr_origin_extended_c_azure_39_test-6 playbooks/azure/openshift-cluster/deprovision.yml

PLAY [localhost] ***************************************************************

TASK [delete resource group] ***************************************************
Wednesday 20 June 2018  16:05:52 +0000 (0:00:00.070)       0:00:00.070 ******** 
changed: [localhost]

PLAY RECAP *********************************************************************
localhost                  : ok=1    changed=1    unreachable=0    failed=0   

Wednesday 20 June 2018  17:05:28 +0000 (0:59:35.255)       0:59:35.326 ******** 
=============================================================================== 
delete resource group ------------------------------------------------ 3575.26s
+ exit 0
+ set +o xtrace
########## FINISHED STAGE: SUCCESS: DEPROVISION TEST CLUSTER [00h 59m 38s] ##########
[workspace] $ /bin/bash /tmp/jenkins8601921697093457091.sh
########## STARTING STAGE: DELETE PR IMAGES ##########
+ [[ -s /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/activate ]]
+ source /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/activate
++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e
++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e
++ export PATH=/var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e/bin:/sbin:/usr/sbin:/bin:/usr/bin
++ PATH=/var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e/bin:/sbin:/usr/sbin:/bin:/usr/bin
++ unset PYTHON_HOME
++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config
++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config
+ trap 'exit 0' EXIT
+ source ./INSTANCE_PREFIX
++ INSTANCE_PREFIX=prtest-f19f428-6
++ OS_TAG=5e1685a
++ OS_PUSH_BASE_REPO=ci-pr-images/prtest-f19f428-6-
+ export KUBECONFIG=/var/lib/jenkins/secrets/image-pr-push.kubeconfig
+ KUBECONFIG=/var/lib/jenkins/secrets/image-pr-push.kubeconfig
+ oc get is -o name -n ci-pr-images
+ grep prtest-f19f428-6
+ xargs -r oc delete
imagestream "prtest-f19f428-6-hello-openshift" deleted
imagestream "prtest-f19f428-6-node" deleted
imagestream "prtest-f19f428-6-openvswitch" deleted
imagestream "prtest-f19f428-6-origin" deleted
imagestream "prtest-f19f428-6-origin-base" deleted
imagestream "prtest-f19f428-6-origin-cluster-capacity" deleted
imagestream "prtest-f19f428-6-origin-deployer" deleted
imagestream "prtest-f19f428-6-origin-docker-builder" deleted
imagestream "prtest-f19f428-6-origin-egress-http-proxy" deleted
imagestream "prtest-f19f428-6-origin-egress-router" deleted
imagestream "prtest-f19f428-6-origin-f5-router" deleted
imagestream "prtest-f19f428-6-origin-haproxy-router" deleted
imagestream "prtest-f19f428-6-origin-keepalived-ipfailover" deleted
imagestream "prtest-f19f428-6-origin-pod" deleted
imagestream "prtest-f19f428-6-origin-recycler" deleted
imagestream "prtest-f19f428-6-origin-service-catalog" deleted
imagestream "prtest-f19f428-6-origin-sti-builder" deleted
imagestream "prtest-f19f428-6-origin-template-service-broker" deleted
+ exit 0
[workspace] $ /bin/bash /tmp/jenkins8792099987338156172.sh
########## STARTING STAGE: DEPROVISION CLOUD RESOURCES ##########
+ [[ -s /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/activate ]]
+ source /var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/activate
++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e
++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e
++ export PATH=/var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e/bin:/sbin:/usr/sbin:/bin:/usr/bin
++ PATH=/var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e/bin:/sbin:/usr/sbin:/bin:/usr/bin
++ unset PYTHON_HOME
++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config
++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config
+ oct deprovision

PLAYBOOK: main.yml *************************************************************
4 plays in /var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e/lib/python2.7/site-packages/oct/ansible/oct/playbooks/deprovision/main.yml

PLAY [ensure we have the parameters necessary to deprovision virtual hosts] ****

TASK [ensure all required variables are set] ***********************************
task path: /var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e/lib/python2.7/site-packages/oct/ansible/oct/playbooks/deprovision/main.yml:9
skipping: [localhost] => (item=origin_ci_inventory_dir)  => {
    "changed": false, 
    "generated_timestamp": "2018-06-20 13:05:33.609249", 
    "item": "origin_ci_inventory_dir", 
    "skip_reason": "Conditional check failed", 
    "skipped": true
}
skipping: [localhost] => (item=origin_ci_aws_region)  => {
    "changed": false, 
    "generated_timestamp": "2018-06-20 13:05:33.612605", 
    "item": "origin_ci_aws_region", 
    "skip_reason": "Conditional check failed", 
    "skipped": true
}

PLAY [deprovision virtual hosts in EC2] ****************************************

TASK [Gathering Facts] *********************************************************
ok: [localhost]

TASK [deprovision a virtual EC2 host] ******************************************
task path: /var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e/lib/python2.7/site-packages/oct/ansible/oct/playbooks/deprovision/main.yml:28
included: /var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e/lib/python2.7/site-packages/oct/ansible/oct/playbooks/deprovision/roles/aws-down/tasks/main.yml for localhost

TASK [update the SSH configuration to remove AWS EC2 specifics] ****************
task path: /var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e/lib/python2.7/site-packages/oct/ansible/oct/playbooks/deprovision/roles/aws-down/tasks/main.yml:2
ok: [localhost] => {
    "changed": false, 
    "generated_timestamp": "2018-06-20 13:05:34.382587", 
    "msg": ""
}

TASK [rename EC2 instance for termination reaper] ******************************
task path: /var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e/lib/python2.7/site-packages/oct/ansible/oct/playbooks/deprovision/roles/aws-down/tasks/main.yml:8
changed: [localhost] => {
    "changed": true, 
    "generated_timestamp": "2018-06-20 13:05:34.942171", 
    "msg": "Tags {'Name': 'oct-terminate'} created for resource i-06ac109da33e79088."
}

TASK [tear down the EC2 instance] **********************************************
task path: /var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e/lib/python2.7/site-packages/oct/ansible/oct/playbooks/deprovision/roles/aws-down/tasks/main.yml:15
changed: [localhost] => {
    "changed": true, 
    "generated_timestamp": "2018-06-20 13:05:35.874584", 
    "instance_ids": [
        "i-06ac109da33e79088"
    ], 
    "instances": [
        {
            "ami_launch_index": "0", 
            "architecture": "x86_64", 
            "block_device_mapping": {
                "/dev/sda1": {
                    "delete_on_termination": true, 
                    "status": "attached", 
                    "volume_id": "vol-0d89f18101ddd4658"
                }, 
                "/dev/sdb": {
                    "delete_on_termination": true, 
                    "status": "attached", 
                    "volume_id": "vol-09d6dbfcde0f364cf"
                }
            }, 
            "dns_name": "ec2-54-209-237-169.compute-1.amazonaws.com", 
            "ebs_optimized": false, 
            "groups": {
                "sg-7e73221a": "default"
            }, 
            "hypervisor": "xen", 
            "id": "i-06ac109da33e79088", 
            "image_id": "ami-0f2178e5f060dbf2d", 
            "instance_type": "m4.xlarge", 
            "kernel": null, 
            "key_name": "libra", 
            "launch_time": "2018-06-20T14:19:54.000Z", 
            "placement": "us-east-1d", 
            "private_dns_name": "ip-172-18-14-10.ec2.internal", 
            "private_ip": "172.18.14.10", 
            "public_dns_name": "ec2-54-209-237-169.compute-1.amazonaws.com", 
            "public_ip": "54.209.237.169", 
            "ramdisk": null, 
            "region": "us-east-1", 
            "root_device_name": "/dev/sda1", 
            "root_device_type": "ebs", 
            "state": "running", 
            "state_code": 16, 
            "tags": {
                "Name": "oct-terminate", 
                "openshift_etcd": "", 
                "openshift_master": "", 
                "openshift_node": ""
            }, 
            "tenancy": "default", 
            "virtualization_type": "hvm"
        }
    ], 
    "tagged_instances": []
}

TASK [remove the serialized host variables] ************************************
task path: /var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e/lib/python2.7/site-packages/oct/ansible/oct/playbooks/deprovision/roles/aws-down/tasks/main.yml:22
changed: [localhost] => {
    "changed": true, 
    "generated_timestamp": "2018-06-20 13:05:36.124132", 
    "path": "/var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config/origin-ci-tool/inventory/host_vars/172.18.14.10.yml", 
    "state": "absent"
}

PLAY [deprovision virtual hosts locally manged by Vagrant] *********************

TASK [Gathering Facts] *********************************************************
ok: [localhost]

PLAY [clean up local configuration for deprovisioned instances] ****************

TASK [remove inventory configuration directory] ********************************
task path: /var/lib/jenkins/origin-ci-tool/4b405957477ba1b70cfacd1cf43c6d41a605fc8e/lib/python2.7/site-packages/oct/ansible/oct/playbooks/deprovision/main.yml:61
changed: [localhost] => {
    "changed": true, 
    "generated_timestamp": "2018-06-20 13:05:36.586016", 
    "path": "/var/lib/jenkins/jobs/test_pull_request_origin_extended_conformance_azure_39_test/workspace/.config/origin-ci-tool/inventory", 
    "state": "absent"
}

PLAY RECAP *********************************************************************
localhost                  : ok=8    changed=4    unreachable=0    failed=0   

+ set +o xtrace
########## FINISHED STAGE: SUCCESS: DEPROVISION CLOUD RESOURCES [00h 00m 04s] ##########
Archiving artifacts
Recording test results
[WS-CLEANUP] Deleting project workspace...[WS-CLEANUP] done
Finished: SUCCESS