Started by user OpenShift CI Robot [EnvInject] - Loading node environment variables. Building in workspace /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace [WS-CLEANUP] Deleting project workspace... [workspace] $ /bin/bash /tmp/jenkins507226920005071888.sh ########## STARTING STAGE: INSTALL THE ORIGIN-CI-TOOL ########## + [[ -s /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/activate ]] ++ readlink /var/lib/jenkins/origin-ci-tool/latest + latest=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740 + touch /var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740 + cp /var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/bin/activate /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/activate + cat + source /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/activate ++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740 ++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740 ++ export PATH=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ PATH=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ unset PYTHON_HOME ++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config ++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config + mkdir -p /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config + rm -rf /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool + oct configure ansible-client verbosity 2 Option verbosity updated to be 2. + oct configure aws-client keypair_name libra Option keypair_name updated to be libra. + oct configure aws-client private_key_path /var/lib/jenkins/.ssh/devenv.pem Option private_key_path updated to be /var/lib/jenkins/.ssh/devenv.pem. + set +o xtrace ########## FINISHED STAGE: SUCCESS: INSTALL THE ORIGIN-CI-TOOL [00h 00m 01s] ########## [workspace] $ /bin/bash /tmp/jenkins1181506604012583492.sh ########## STARTING STAGE: PROVISION CLOUD RESOURCES ########## + [[ -s /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/activate ]] + source /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/activate ++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740 ++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740 ++ export PATH=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ PATH=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ unset PYTHON_HOME ++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config ++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config + oct provision remote all-in-one --os rhel --stage build --provider aws --discrete-ssh-config --name test_branch_origin_web_console_server_e2e_68 PLAYBOOK: aws-up.yml *********************************************************** 2 plays in /var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/aws-up.yml PLAY [ensure we have the parameters necessary to bring up the AWS EC2 instance] *** TASK [ensure all required variables are set] *********************************** task path: /var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/aws-up.yml:9 skipping: [localhost] => (item=origin_ci_inventory_dir) => { "changed": false, "generated_timestamp": "2018-06-02 05:25:53.354097", "item": "origin_ci_inventory_dir", "skip_reason": "Conditional check failed", "skipped": true } skipping: [localhost] => (item=origin_ci_aws_keypair_name) => { "changed": false, "generated_timestamp": "2018-06-02 05:25:53.356868", "item": "origin_ci_aws_keypair_name", "skip_reason": "Conditional check failed", "skipped": true } skipping: [localhost] => (item=origin_ci_aws_private_key_path) => { "changed": false, "generated_timestamp": "2018-06-02 05:25:53.360075", "item": "origin_ci_aws_private_key_path", "skip_reason": "Conditional check failed", "skipped": true } skipping: [localhost] => (item=origin_ci_aws_region) => { "changed": false, "generated_timestamp": "2018-06-02 05:25:53.363281", "item": "origin_ci_aws_region", "skip_reason": "Conditional check failed", "skipped": true } skipping: [localhost] => (item=origin_ci_aws_ami_tags) => { "changed": false, "generated_timestamp": "2018-06-02 05:25:53.368827", "item": "origin_ci_aws_ami_tags", "skip_reason": "Conditional check failed", "skipped": true } skipping: [localhost] => (item=origin_ci_aws_instance_name) => { "changed": false, "generated_timestamp": "2018-06-02 05:25:53.371975", "item": "origin_ci_aws_instance_name", "skip_reason": "Conditional check failed", "skipped": true } skipping: [localhost] => (item=origin_ci_aws_master_instance_type) => { "changed": false, "generated_timestamp": "2018-06-02 05:25:53.375445", "item": "origin_ci_aws_master_instance_type", "skip_reason": "Conditional check failed", "skipped": true } skipping: [localhost] => (item=origin_ci_aws_identifying_tag_key) => { "changed": false, "generated_timestamp": "2018-06-02 05:25:53.380224", "item": "origin_ci_aws_identifying_tag_key", "skip_reason": "Conditional check failed", "skipped": true } skipping: [localhost] => (item=origin_ci_aws_hostname) => { "changed": false, "generated_timestamp": "2018-06-02 05:25:53.383789", "item": "origin_ci_aws_hostname", "skip_reason": "Conditional check failed", "skipped": true } skipping: [localhost] => (item=origin_ci_ssh_config_strategy) => { "changed": false, "generated_timestamp": "2018-06-02 05:25:53.388655", "item": "origin_ci_ssh_config_strategy", "skip_reason": "Conditional check failed", "skipped": true } skipping: [localhost] => (item=openshift_schedulable) => { "changed": false, "generated_timestamp": "2018-06-02 05:25:53.392179", "item": "openshift_schedulable", "skip_reason": "Conditional check failed", "skipped": true } skipping: [localhost] => (item=openshift_node_labels) => { "changed": false, "generated_timestamp": "2018-06-02 05:25:53.395807", "item": "openshift_node_labels", "skip_reason": "Conditional check failed", "skipped": true } TASK [ensure all required variables are set] *********************************** task path: /var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/aws-up.yml:27 skipping: [localhost] => (item=origin_ci_aws_master_subnet) => { "changed": false, "generated_timestamp": "2018-06-02 05:25:53.436431", "item": "origin_ci_aws_master_subnet", "skip_reason": "Conditional check failed", "skipped": true } skipping: [localhost] => (item=origin_ci_aws_etcd_security_group) => { "changed": false, "generated_timestamp": "2018-06-02 05:25:53.442654", "item": "origin_ci_aws_etcd_security_group", "skip_reason": "Conditional check failed", "skipped": true } skipping: [localhost] => (item=origin_ci_aws_node_security_group) => { "changed": false, "generated_timestamp": "2018-06-02 05:25:53.447180", "item": "origin_ci_aws_node_security_group", "skip_reason": "Conditional check failed", "skipped": true } skipping: [localhost] => (item=origin_ci_aws_master_security_group) => { "changed": false, "generated_timestamp": "2018-06-02 05:25:53.452662", "item": "origin_ci_aws_master_security_group", "skip_reason": "Conditional check failed", "skipped": true } skipping: [localhost] => (item=origin_ci_aws_master_external_elb_security_group) => { "changed": false, "generated_timestamp": "2018-06-02 05:25:53.458124", "item": "origin_ci_aws_master_external_elb_security_group", "skip_reason": "Conditional check failed", "skipped": true } skipping: [localhost] => (item=origin_ci_aws_master_internal_elb_security_group) => { "changed": false, "generated_timestamp": "2018-06-02 05:25:53.463606", "item": "origin_ci_aws_master_internal_elb_security_group", "skip_reason": "Conditional check failed", "skipped": true } skipping: [localhost] => (item=origin_ci_aws_router_security_group) => { "changed": false, "generated_timestamp": "2018-06-02 05:25:53.467877", "item": "origin_ci_aws_router_security_group", "skip_reason": "Conditional check failed", "skipped": true } skipping: [localhost] => (item=origin_ci_aws_router_elb_security_group) => { "changed": false, "generated_timestamp": "2018-06-02 05:25:53.474115", "item": "origin_ci_aws_router_elb_security_group", "skip_reason": "Conditional check failed", "skipped": true } PLAY [provision an AWS EC2 instance] ******************************************* TASK [Gathering Facts] ********************************************************* ok: [localhost] TASK [inventory : initialize the inventory directory] ************************** task path: /var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/inventory/tasks/main.yml:2 ok: [localhost] => { "changed": false, "generated_timestamp": "2018-06-02 05:25:54.253889", "gid": 995, "group": "jenkins", "mode": "0755", "owner": "jenkins", "path": "/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory", "secontext": "system_u:object_r:var_lib_t:s0", "size": 6, "state": "directory", "uid": 997 } TASK [inventory : add the nested group mapping] ******************************** task path: /var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/inventory/tasks/main.yml:7 changed: [localhost] => { "changed": true, "checksum": "18aaee00994df38cc3a63b635893175235331a9c", "dest": "/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/nested_group_mappings", "generated_timestamp": "2018-06-02 05:25:54.768561", "gid": 995, "group": "jenkins", "md5sum": "b30c3226ea63efa3ff9c5e346c14a16e", "mode": "0644", "owner": "jenkins", "secontext": "system_u:object_r:var_lib_t:s0", "size": 93, "src": "/var/lib/jenkins/.ansible/tmp/ansible-tmp-1527931554.5-202353512857037/source", "state": "file", "uid": 997 } TASK [inventory : initialize the OSEv3 group variables directory] ************** task path: /var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/inventory/tasks/main.yml:12 changed: [localhost] => { "changed": true, "generated_timestamp": "2018-06-02 05:25:54.940832", "gid": 995, "group": "jenkins", "mode": "0755", "owner": "jenkins", "path": "/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/group_vars/OSEv3", "secontext": "system_u:object_r:var_lib_t:s0", "size": 6, "state": "directory", "uid": 997 } TASK [inventory : initialize the host variables directory] ********************* task path: /var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/inventory/tasks/main.yml:17 changed: [localhost] => { "changed": true, "generated_timestamp": "2018-06-02 05:25:55.113769", "gid": 995, "group": "jenkins", "mode": "0755", "owner": "jenkins", "path": "/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/host_vars", "secontext": "system_u:object_r:var_lib_t:s0", "size": 6, "state": "directory", "uid": 997 } TASK [inventory : add the default Origin installation configuration] *********** task path: /var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/inventory/tasks/main.yml:22 changed: [localhost] => { "changed": true, "checksum": "4c06ba508f055c20f13426e8587342e8765a7b66", "dest": "/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/group_vars/OSEv3/general.yml", "generated_timestamp": "2018-06-02 05:25:55.417089", "gid": 995, "group": "jenkins", "md5sum": "8aec71c75f7d512b278ae7c6f2959b12", "mode": "0644", "owner": "jenkins", "secontext": "system_u:object_r:var_lib_t:s0", "size": 331, "src": "/var/lib/jenkins/.ansible/tmp/ansible-tmp-1527931555.28-137345449255967/source", "state": "file", "uid": 997 } TASK [aws-up : determine if we are inside AWS EC2] ***************************** task path: /var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/aws-up/tasks/main.yml:2 changed: [localhost] => { "changed": true, "cmd": [ "curl", "-s", "http://instance-data.ec2.internal" ], "delta": "0:00:00.011988", "end": "2018-06-02 05:25:55.640610", "failed": false, "failed_when_result": false, "generated_timestamp": "2018-06-02 05:25:55.657791", "rc": 0, "start": "2018-06-02 05:25:55.628622", "stderr": [], "stdout": [ "1.0", "2007-01-19", "2007-03-01", "2007-08-29", "2007-10-10", "2007-12-15", "2008-02-01", "2008-09-01", "2009-04-04", "2011-01-01", "2011-05-01", "2012-01-12", "2014-02-25", "2014-11-05", "2015-10-20", "2016-04-19", "2016-06-30", "2016-09-02", "latest" ], "warnings": [ "Consider using get_url or uri module rather than running curl" ] } [WARNING]: Consider using get_url or uri module rather than running curl TASK [aws-up : configure EC2 parameters for inventory when controlling from inside EC2] *** task path: /var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/aws-up/tasks/main.yml:7 ok: [localhost] => { "ansible_facts": { "origin_ci_aws_destination_variable": "private_dns_name", "origin_ci_aws_host_address_variable": "private_ip", "origin_ci_aws_vpc_destination_variable": "private_ip_address" }, "changed": false, "generated_timestamp": "2018-06-02 05:25:55.701609" } TASK [aws-up : determine where to put the AWS API cache] *********************** task path: /var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/aws-up/tasks/main.yml:14 ok: [localhost] => { "ansible_facts": { "origin_ci_aws_cache_dir": "/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ec2_cache" }, "changed": false, "generated_timestamp": "2018-06-02 05:25:55.742051" } TASK [aws-up : ensure we have a place to put the AWS API cache] **************** task path: /var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/aws-up/tasks/main.yml:18 changed: [localhost] => { "changed": true, "generated_timestamp": "2018-06-02 05:25:55.912967", "gid": 995, "group": "jenkins", "mode": "0755", "owner": "jenkins", "path": "/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ec2_cache", "secontext": "system_u:object_r:var_lib_t:s0", "size": 6, "state": "directory", "uid": 997 } TASK [aws-up : place the EC2 dynamic inventory script] ************************* task path: /var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/aws-up/tasks/main.yml:23 changed: [localhost] => { "changed": true, "checksum": "625b8af723189db3b96ba0026d0f997a0025bc47", "dest": "/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/ec2.py", "generated_timestamp": "2018-06-02 05:25:56.219081", "gid": 995, "group": "jenkins", "md5sum": "cac06c14065dac74904232b89d4ba24c", "mode": "0755", "owner": "jenkins", "secontext": "system_u:object_r:var_lib_t:s0", "size": 63725, "src": "/var/lib/jenkins/.ansible/tmp/ansible-tmp-1527931556.08-269314286795777/source", "state": "file", "uid": 997 } TASK [aws-up : place the EC2 dynamic inventory configuration] ****************** task path: /var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/aws-up/tasks/main.yml:29 changed: [localhost] => { "changed": true, "checksum": "63b72ac12180a9bc254b1fa4153e4d23fb752273", "dest": "/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/ec2.ini", "generated_timestamp": "2018-06-02 05:25:56.523169", "gid": 995, "group": "jenkins", "md5sum": "f54fee16ca25a4f00ac9f9212eb7d8f5", "mode": "0644", "owner": "jenkins", "secontext": "system_u:object_r:var_lib_t:s0", "size": 404, "src": "/var/lib/jenkins/.ansible/tmp/ansible-tmp-1527931556.26-149485051765549/source", "state": "file", "uid": 997 } TASK [aws-up : place the EC2 tag to group mappings] **************************** task path: /var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/aws-up/tasks/main.yml:34 changed: [localhost] => { "changed": true, "checksum": "b4205a33dc73f62bd4f77f35d045cf8e09ae62b0", "dest": "/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/tag_to_group_mappings", "generated_timestamp": "2018-06-02 05:25:56.824185", "gid": 995, "group": "jenkins", "md5sum": "bc3a567a1b6f342e1005182efc1b66be", "mode": "0644", "owner": "jenkins", "secontext": "system_u:object_r:var_lib_t:s0", "size": 287, "src": "/var/lib/jenkins/.ansible/tmp/ansible-tmp-1527931556.69-28859068846073/source", "state": "file", "uid": 997 } TASK [aws-up : list available AMIs] ******************************************** task path: /var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/aws-up/tasks/main.yml:40 ok: [localhost] => { "changed": false, "generated_timestamp": "2018-06-02 05:26:02.753946", "results": [ { "ami_id": "ami-0a4a0f11fd88cae6d", "architecture": "x86_64", "block_device_mapping": { "/dev/sda1": { "delete_on_termination": true, "encrypted": false, "size": 75, "snapshot_id": "snap-029912e06739b18ce", "volume_type": "gp2" }, "/dev/sdb": { "delete_on_termination": true, "encrypted": false, "size": 50, "snapshot_id": "snap-00aa11eb0824f439e", "volume_type": "gp2" } }, "creationDate": "2018-05-30T22:57:26.000Z", "description": "OpenShift Origin development AMI on rhel at the build stage.", "hypervisor": "xen", "is_public": false, "location": "531415883065/ami_build_origin_int_rhel_build_2858", "name": "ami_build_origin_int_rhel_build_2858", "owner_id": "531415883065", "platform": null, "root_device_name": "/dev/sda1", "root_device_type": "ebs", "state": "available", "tags": { "Name": "ami_build_origin_int_rhel_build_2858", "image_stage": "build", "operating_system": "rhel", "ready": "yes" }, "virtualization_type": "hvm" }, { "ami_id": "ami-02f7bf6134e0f2ab7", "architecture": "x86_64", "block_device_mapping": { "/dev/sda1": { "delete_on_termination": true, "encrypted": false, "size": 75, "snapshot_id": "snap-0849c4ad445460643", "volume_type": "gp2" }, "/dev/sdb": { "delete_on_termination": true, "encrypted": false, "size": 50, "snapshot_id": "snap-0d7c44b6dc5f1c9a9", "volume_type": "gp2" } }, "creationDate": "2018-05-31T04:34:07.000Z", "description": "OpenShift Origin development AMI on rhel at the build stage.", "hypervisor": "xen", "is_public": false, "location": "531415883065/ami_build_origin_int_rhel_build_2859", "name": "ami_build_origin_int_rhel_build_2859", "owner_id": "531415883065", "platform": null, "root_device_name": "/dev/sda1", "root_device_type": "ebs", "state": "available", "tags": { "Name": "ami_build_origin_int_rhel_build_2859", "image_stage": "build", "operating_system": "rhel", "ready": "yes" }, "virtualization_type": "hvm" } ] } TASK [aws-up : choose appropriate AMIs for use] ******************************** task path: /var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/aws-up/tasks/main.yml:49 ok: [localhost] => (item={u'ami_id': u'ami-0a4a0f11fd88cae6d', u'root_device_type': u'ebs', u'description': u'OpenShift Origin development AMI on rhel at the build stage.', u'tags': {u'ready': u'yes', u'image_stage': u'build', u'Name': u'ami_build_origin_int_rhel_build_2858', u'operating_system': u'rhel'}, u'hypervisor': u'xen', u'block_device_mapping': {u'/dev/sdb': {u'encrypted': False, u'snapshot_id': u'snap-00aa11eb0824f439e', u'delete_on_termination': True, u'volume_type': u'gp2', u'size': 50}, u'/dev/sda1': {u'encrypted': False, u'snapshot_id': u'snap-029912e06739b18ce', u'delete_on_termination': True, u'volume_type': u'gp2', u'size': 75}}, u'architecture': u'x86_64', u'owner_id': u'531415883065', u'platform': None, u'state': u'available', u'location': u'531415883065/ami_build_origin_int_rhel_build_2858', u'is_public': False, u'creationDate': u'2018-05-30T22:57:26.000Z', u'root_device_name': u'/dev/sda1', u'virtualization_type': u'hvm', u'name': u'ami_build_origin_int_rhel_build_2858'}) => { "ansible_facts": { "origin_ci_aws_ami_id_candidate": "ami-0a4a0f11fd88cae6d" }, "changed": false, "generated_timestamp": "2018-06-02 05:26:02.806918", "item": { "ami_id": "ami-0a4a0f11fd88cae6d", "architecture": "x86_64", "block_device_mapping": { "/dev/sda1": { "delete_on_termination": true, "encrypted": false, "size": 75, "snapshot_id": "snap-029912e06739b18ce", "volume_type": "gp2" }, "/dev/sdb": { "delete_on_termination": true, "encrypted": false, "size": 50, "snapshot_id": "snap-00aa11eb0824f439e", "volume_type": "gp2" } }, "creationDate": "2018-05-30T22:57:26.000Z", "description": "OpenShift Origin development AMI on rhel at the build stage.", "hypervisor": "xen", "is_public": false, "location": "531415883065/ami_build_origin_int_rhel_build_2858", "name": "ami_build_origin_int_rhel_build_2858", "owner_id": "531415883065", "platform": null, "root_device_name": "/dev/sda1", "root_device_type": "ebs", "state": "available", "tags": { "Name": "ami_build_origin_int_rhel_build_2858", "image_stage": "build", "operating_system": "rhel", "ready": "yes" }, "virtualization_type": "hvm" } } ok: [localhost] => (item={u'ami_id': u'ami-02f7bf6134e0f2ab7', u'root_device_type': u'ebs', u'description': u'OpenShift Origin development AMI on rhel at the build stage.', u'tags': {u'ready': u'yes', u'image_stage': u'build', u'Name': u'ami_build_origin_int_rhel_build_2859', u'operating_system': u'rhel'}, u'hypervisor': u'xen', u'block_device_mapping': {u'/dev/sdb': {u'encrypted': False, u'snapshot_id': u'snap-0d7c44b6dc5f1c9a9', u'delete_on_termination': True, u'volume_type': u'gp2', u'size': 50}, u'/dev/sda1': {u'encrypted': False, u'snapshot_id': u'snap-0849c4ad445460643', u'delete_on_termination': True, u'volume_type': u'gp2', u'size': 75}}, u'architecture': u'x86_64', u'owner_id': u'531415883065', u'platform': None, u'state': u'available', u'location': u'531415883065/ami_build_origin_int_rhel_build_2859', u'is_public': False, u'creationDate': u'2018-05-31T04:34:07.000Z', u'root_device_name': u'/dev/sda1', u'virtualization_type': u'hvm', u'name': u'ami_build_origin_int_rhel_build_2859'}) => { "ansible_facts": { "origin_ci_aws_ami_id_candidate": "ami-02f7bf6134e0f2ab7" }, "changed": false, "generated_timestamp": "2018-06-02 05:26:02.814809", "item": { "ami_id": "ami-02f7bf6134e0f2ab7", "architecture": "x86_64", "block_device_mapping": { "/dev/sda1": { "delete_on_termination": true, "encrypted": false, "size": 75, "snapshot_id": "snap-0849c4ad445460643", "volume_type": "gp2" }, "/dev/sdb": { "delete_on_termination": true, "encrypted": false, "size": 50, "snapshot_id": "snap-0d7c44b6dc5f1c9a9", "volume_type": "gp2" } }, "creationDate": "2018-05-31T04:34:07.000Z", "description": "OpenShift Origin development AMI on rhel at the build stage.", "hypervisor": "xen", "is_public": false, "location": "531415883065/ami_build_origin_int_rhel_build_2859", "name": "ami_build_origin_int_rhel_build_2859", "owner_id": "531415883065", "platform": null, "root_device_name": "/dev/sda1", "root_device_type": "ebs", "state": "available", "tags": { "Name": "ami_build_origin_int_rhel_build_2859", "image_stage": "build", "operating_system": "rhel", "ready": "yes" }, "virtualization_type": "hvm" } } TASK [aws-up : determine which AMI to use] ************************************* task path: /var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/aws-up/tasks/main.yml:55 ok: [localhost] => { "ansible_facts": { "origin_ci_aws_ami_id": "ami-02f7bf6134e0f2ab7" }, "changed": false, "generated_timestamp": "2018-06-02 05:26:02.856909" } TASK [aws-up : determine which subnets are available] ************************** task path: /var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/aws-up/tasks/main.yml:60 ok: [localhost] => { "changed": false, "generated_timestamp": "2018-06-02 05:26:03.354172", "subnets": [ { "availability_zone": "us-east-1d", "available_ip_address_count": 4016, "cidr_block": "172.18.0.0/20", "default_for_az": "false", "id": "subnet-cf57c596", "map_public_ip_on_launch": "true", "state": "available", "tags": { "Name": "devenv-subnet-1", "origin_ci_aws_cluster_component": "master_subnet" }, "vpc_id": "vpc-69705d0c" }, { "availability_zone": "us-east-1c", "available_ip_address_count": 4082, "cidr_block": "172.18.16.0/20", "default_for_az": "false", "id": "subnet-8bdb5ac2", "map_public_ip_on_launch": "true", "state": "available", "tags": { "Name": "devenv-subnet-2", "origin_ci_aws_cluster_component": "master_subnet" }, "vpc_id": "vpc-69705d0c" } ] } TASK [aws-up : determine which subnets to use for the master] ****************** task path: /var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/aws-up/tasks/main.yml:67 ok: [localhost] => { "ansible_facts": { "origin_ci_aws_master_subnet_ids": [ "subnet-cf57c596", "subnet-8bdb5ac2" ] }, "changed": false, "generated_timestamp": "2018-06-02 05:26:03.401853" } TASK [aws-up : determine which security groups are available] ****************** task path: /var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/aws-up/tasks/main.yml:72 ok: [localhost] => { "changed": false, "generated_timestamp": "2018-06-02 05:26:04.278175", "security_groups": [ { "description": "default VPC security group", "group_id": "sg-7e73221a", "group_name": "default", "ip_permissions": [ { "ip_protocol": "-1", "ip_ranges": [], "ipv6_ranges": [], "prefix_list_ids": [], "user_id_group_pairs": [ { "group_id": "sg-7e73221a", "user_id": "531415883065" } ] }, { "from_port": 53, "ip_protocol": "tcp", "ip_ranges": [ { "cidr_ip": "119.254.120.64/26" }, { "cidr_ip": "209.132.176.0/20" }, { "cidr_ip": "209.132.186.34/32" }, { "cidr_ip": "213.175.37.10/32" }, { "cidr_ip": "213.175.37.9/32" }, { "cidr_ip": "38.140.108.0/24" }, { "cidr_ip": "38.99.12.232/29" }, { "cidr_ip": "4.14.33.72/30" }, { "cidr_ip": "4.14.35.88/29" }, { "cidr_ip": "50.227.40.96/29" }, { "cidr_ip": "62.40.79.66/32" }, { "cidr_ip": "66.187.224.0/20" }, { "cidr_ip": "66.187.239.0/24" } ], "ipv6_ranges": [], "prefix_list_ids": [], "to_port": 8444, "user_id_group_pairs": [] }, { "from_port": 80, "ip_protocol": "tcp", "ip_ranges": [ { "cidr_ip": "107.20.219.35/32" }, { "cidr_ip": "108.166.48.153/32" }, { "cidr_ip": "212.199.177.64/27" }, { "cidr_ip": "212.72.208.162/32" }, { "cidr_ip": "54.241.19.245/32" }, { "cidr_ip": "97.65.119.184/29" } ], "ipv6_ranges": [], "prefix_list_ids": [], "to_port": 443, "user_id_group_pairs": [] }, { "from_port": 22, "ip_protocol": "tcp", "ip_ranges": [ { "cidr_ip": "0.0.0.0/0" } ], "ipv6_ranges": [], "prefix_list_ids": [], "to_port": 22, "user_id_group_pairs": [] }, { "from_port": 53, "ip_protocol": "udp", "ip_ranges": [ { "cidr_ip": "209.132.176.0/20" }, { "cidr_ip": "66.187.224.0/20" }, { "cidr_ip": "66.187.239.0/24" } ], "ipv6_ranges": [], "prefix_list_ids": [], "to_port": 53, "user_id_group_pairs": [] }, { "from_port": 0, "ip_protocol": "udp", "ip_ranges": [], "ipv6_ranges": [], "prefix_list_ids": [], "to_port": 65535, "user_id_group_pairs": [ { "group_id": "sg-0d1a5377", "user_id": "531415883065" }, { "group_id": "sg-5875023f", "user_id": "531415883065" }, { "group_id": "sg-7e73221a", "user_id": "531415883065" }, { "group_id": "sg-e1760186", "user_id": "531415883065" } ] }, { "from_port": 3389, "ip_protocol": "tcp", "ip_ranges": [ { "cidr_ip": "0.0.0.0/0" } ], "ipv6_ranges": [], "prefix_list_ids": [], "to_port": 3389, "user_id_group_pairs": [] }, { "from_port": -1, "ip_protocol": "icmp", "ip_ranges": [ { "cidr_ip": "0.0.0.0/0" } ], "ipv6_ranges": [], "prefix_list_ids": [], "to_port": -1, "user_id_group_pairs": [] } ], "ip_permissions_egress": [ { "ip_protocol": "-1", "ip_ranges": [ { "cidr_ip": "0.0.0.0/0" } ], "ipv6_ranges": [], "prefix_list_ids": [], "user_id_group_pairs": [] } ], "owner_id": "531415883065", "tags": { "Name": "devenv-vpc", "openshift_infra": "true", "origin_ci_aws_cluster_component": "master_security_group" }, "vpc_id": "vpc-69705d0c" } ] } TASK [aws-up : determine which security group to use] ************************** task path: /var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/aws-up/tasks/main.yml:79 ok: [localhost] => { "ansible_facts": { "origin_ci_aws_master_security_group_ids": [ "sg-7e73221a" ] }, "changed": false, "generated_timestamp": "2018-06-02 05:26:04.324291" } TASK [aws-up : provision an AWS EC2 instance] ********************************** task path: /var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/aws-up/tasks/main.yml:84 changed: [localhost] => { "changed": true, "generated_timestamp": "2018-06-02 05:26:21.419574", "instance_ids": [ "i-058cff891deed5ea1" ], "instances": [ { "ami_launch_index": "0", "architecture": "x86_64", "block_device_mapping": { "/dev/sda1": { "delete_on_termination": true, "status": "attached", "volume_id": "vol-00fc266584b1b74b5" }, "/dev/sdb": { "delete_on_termination": true, "status": "attached", "volume_id": "vol-0b7b843726a425a20" } }, "dns_name": "ec2-35-173-203-6.compute-1.amazonaws.com", "ebs_optimized": false, "groups": { "sg-7e73221a": "default" }, "hypervisor": "xen", "id": "i-058cff891deed5ea1", "image_id": "ami-02f7bf6134e0f2ab7", "instance_type": "m4.xlarge", "kernel": null, "key_name": "libra", "launch_time": "2018-06-02T09:26:05.000Z", "placement": "us-east-1d", "private_dns_name": "ip-172-18-2-81.ec2.internal", "private_ip": "172.18.2.81", "public_dns_name": "ec2-35-173-203-6.compute-1.amazonaws.com", "public_ip": "35.173.203.6", "ramdisk": null, "region": "us-east-1", "root_device_name": "/dev/sda1", "root_device_type": "ebs", "state": "running", "state_code": 16, "tags": { "Name": "test_branch_origin_web_console_server_e2e_68", "openshift_etcd": "", "openshift_master": "", "openshift_node": "" }, "tenancy": "default", "virtualization_type": "hvm" } ], "tagged_instances": [] } TASK [aws-up : determine the host address] ************************************* task path: /var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/aws-up/tasks/main.yml:110 ok: [localhost] => { "ansible_facts": { "origin_ci_aws_host": "172.18.2.81" }, "changed": false, "generated_timestamp": "2018-06-02 05:26:21.461648" } TASK [aws-up : determine the default user to use for SSH] ********************** task path: /var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/aws-up/tasks/main.yml:114 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-06-02 05:26:21.499981", "skip_reason": "Conditional check failed", "skipped": true } TASK [aws-up : determine the default user to use for SSH] ********************** task path: /var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/aws-up/tasks/main.yml:119 ok: [localhost] => { "ansible_facts": { "origin_ci_aws_ssh_user": "origin" }, "changed": false, "generated_timestamp": "2018-06-02 05:26:21.580271" } TASK [aws-up : update variables for the host] ********************************** task path: /var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/aws-up/tasks/main.yml:124 changed: [localhost] => { "changed": true, "checksum": "2abb09ff67e33b50f30ce27a42fd0a39b6f80da7", "dest": "/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/host_vars/172.18.2.81.yml", "generated_timestamp": "2018-06-02 05:26:21.929163", "gid": 995, "group": "jenkins", "md5sum": "9610c56773677272018a635e340f3b68", "mode": "0644", "owner": "jenkins", "secontext": "system_u:object_r:var_lib_t:s0", "size": 759, "src": "/var/lib/jenkins/.ansible/tmp/ansible-tmp-1527931581.79-24461264196936/source", "state": "file", "uid": 997 } TASK [aws-up : determine where updated SSH configuration should go] ************ task path: /var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/aws-up/tasks/main.yml:141 ok: [localhost] => { "ansible_facts": { "origin_ci_ssh_config_files": [ "/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config" ] }, "changed": false, "generated_timestamp": "2018-06-02 05:26:21.975952" } TASK [aws-up : determine where updated SSH configuration should go] ************ task path: /var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/aws-up/tasks/main.yml:146 skipping: [localhost] => { "changed": false, "generated_timestamp": "2018-06-02 05:26:22.013714", "skip_reason": "Conditional check failed", "skipped": true } TASK [aws-up : ensure the targeted SSH configuration file exists] ************** task path: /var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/aws-up/tasks/main.yml:151 changed: [localhost] => (item=/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config) => { "changed": true, "dest": "/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config", "generated_timestamp": "2018-06-02 05:26:22.199441", "gid": 995, "group": "jenkins", "item": "/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config", "mode": "0644", "owner": "jenkins", "secontext": "system_u:object_r:var_lib_t:s0", "size": 0, "state": "file", "uid": 997 } TASK [aws-up : update the SSH configuration] *********************************** task path: /var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/aws-up/tasks/main.yml:157 changed: [localhost] => (item=/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config) => { "changed": true, "generated_timestamp": "2018-06-02 05:26:22.507370", "item": "/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config", "msg": "Block inserted" } TASK [aws-up : wait for SSH to be available] *********************************** task path: /var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/lib/python2.7/site-packages/oct/ansible/oct/playbooks/provision/roles/aws-up/tasks/main.yml:175 ok: [localhost] => { "changed": false, "elapsed": 82, "generated_timestamp": "2018-06-02 05:27:44.901119", "path": null, "port": 22, "search_regex": null, "state": "started" } PLAY RECAP ********************************************************************* localhost : ok=28 changed=13 unreachable=0 failed=0 + set +o xtrace ########## FINISHED STAGE: SUCCESS: PROVISION CLOUD RESOURCES [00h 01m 52s] ########## [workspace] $ /bin/bash /tmp/jenkins1248233158091953128.sh ########## STARTING STAGE: FORWARD GCS CREDENTIALS TO REMOTE HOST ########## + [[ -s /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/activate ]] + source /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/activate ++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740 ++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740 ++ export PATH=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ PATH=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ unset PYTHON_HOME ++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config ++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config + (( i = 0 )) + (( i < 10 )) + scp -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config /var/lib/jenkins/.config/gcloud/gcs-publisher-credentials.json openshiftdevel:/data/credentials.json + break + set +o xtrace ########## FINISHED STAGE: SUCCESS: FORWARD GCS CREDENTIALS TO REMOTE HOST [00h 00m 04s] ########## [workspace] $ /bin/bash /tmp/jenkins5108440920403448450.sh ########## STARTING STAGE: FORWARD PARAMETERS TO THE REMOTE HOST ########## + [[ -s /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/activate ]] + source /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/activate ++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740 ++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740 ++ export PATH=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ PATH=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ unset PYTHON_HOME ++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config ++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config + ssh -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel sudo chmod o+rw /etc/environment + ssh -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'echo '\''JOB_SPEC={"type":"postsubmit","job":"test_branch_origin_web_console_server_e2e","buildid":"e465f2b2-6646-11e8-92c9-0a58ac100eda","refs":{"org":"openshift","repo":"origin-web-console-server","base_ref":"master","base_sha":"14f3f2f0d75d20f7bbe037ee6a08ec893cb1dc67"}}'\'' >> /etc/environment' + ssh -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'echo '\''buildId=e465f2b2-6646-11e8-92c9-0a58ac100eda'\'' >> /etc/environment' + ssh -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'echo '\''BUILD_ID=e465f2b2-6646-11e8-92c9-0a58ac100eda'\'' >> /etc/environment' + ssh -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'echo '\''REPO_OWNER=openshift'\'' >> /etc/environment' + ssh -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'echo '\''REPO_NAME=origin-web-console-server'\'' >> /etc/environment' + ssh -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'echo '\''PULL_BASE_REF=master'\'' >> /etc/environment' + ssh -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'echo '\''PULL_BASE_SHA=14f3f2f0d75d20f7bbe037ee6a08ec893cb1dc67'\'' >> /etc/environment' + ssh -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'echo '\''PULL_REFS=master:14f3f2f0d75d20f7bbe037ee6a08ec893cb1dc67'\'' >> /etc/environment' + ssh -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'echo '\''PULL_NUMBER='\'' >> /etc/environment' + ssh -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'echo '\''PULL_PULL_SHA='\'' >> /etc/environment' + ssh -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'echo '\''JOB_SPEC={"type":"postsubmit","job":"test_branch_origin_web_console_server_e2e","buildid":"e465f2b2-6646-11e8-92c9-0a58ac100eda","refs":{"org":"openshift","repo":"origin-web-console-server","base_ref":"master","base_sha":"14f3f2f0d75d20f7bbe037ee6a08ec893cb1dc67"}}'\'' >> /etc/environment' + ssh -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'echo '\''BUILD_NUMBER=68'\'' >> /etc/environment' + ssh -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'echo '\''CLONEREFS_ARGS='\'' >> /etc/environment' + set +o xtrace ########## FINISHED STAGE: SUCCESS: FORWARD PARAMETERS TO THE REMOTE HOST [00h 00m 04s] ########## [workspace] $ /bin/bash /tmp/jenkins7280581552514815306.sh ########## STARTING STAGE: SYNC REPOSITORIES ########## + [[ -s /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/activate ]] + source /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/activate ++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740 ++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740 ++ export PATH=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ PATH=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ unset PYTHON_HOME ++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config ++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config ++ mktemp + script=/tmp/tmp.R5aOx76yJq + cat + chmod +x /tmp/tmp.R5aOx76yJq + scp -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config /tmp/tmp.R5aOx76yJq openshiftdevel:/tmp/tmp.R5aOx76yJq + ssh -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config -t openshiftdevel 'bash -l -c "timeout 14400 /tmp/tmp.R5aOx76yJq"' + cd /home/origin ++ jq --compact-output .buildid + [[ "e465f2b2-6646-11e8-92c9-0a58ac100eda" =~ ^\[0-9]\+$ ]] + echo 'Using BUILD_NUMBER' Using BUILD_NUMBER ++ jq --compact-output '.buildid |= "68"' + JOB_SPEC='{"type":"postsubmit","job":"test_branch_origin_web_console_server_e2e","buildid":"68","refs":{"org":"openshift","repo":"origin-web-console-server","base_ref":"master","base_sha":"14f3f2f0d75d20f7bbe037ee6a08ec893cb1dc67"}}' + for image in ''\''registry.svc.ci.openshift.org/ci/clonerefs:latest'\''' ''\''registry.svc.ci.openshift.org/ci/initupload:latest'\''' + (( i = 0 )) + (( i < 5 )) + docker pull registry.svc.ci.openshift.org/ci/clonerefs:latest Trying to pull repository registry.svc.ci.openshift.org/ci/clonerefs ... latest: Pulling from registry.svc.ci.openshift.org/ci/clonerefs 1160f4abea84: Pulling fs layer be60dbe7622d: Pulling fs layer ef68ec3d2d55: Pulling fs layer 68aee63c4c28: Pulling fs layer 68aee63c4c28: Waiting 1160f4abea84: Verifying Checksum 1160f4abea84: Download complete be60dbe7622d: Verifying Checksum be60dbe7622d: Download complete ef68ec3d2d55: Verifying Checksum ef68ec3d2d55: Download complete 68aee63c4c28: Verifying Checksum 68aee63c4c28: Download complete 1160f4abea84: Pull complete be60dbe7622d: Pull complete ef68ec3d2d55: Pull complete 68aee63c4c28: Pull complete Digest: sha256:0431c6013d3dbc49e0a1f558a56e93c598e77eac4c981443f09095cc68083c67 Status: Downloaded newer image for registry.svc.ci.openshift.org/ci/clonerefs:latest + break + for image in ''\''registry.svc.ci.openshift.org/ci/clonerefs:latest'\''' ''\''registry.svc.ci.openshift.org/ci/initupload:latest'\''' + (( i = 0 )) + (( i < 5 )) + docker pull registry.svc.ci.openshift.org/ci/initupload:latest Trying to pull repository registry.svc.ci.openshift.org/ci/initupload ... latest: Pulling from registry.svc.ci.openshift.org/ci/initupload 1160f4abea84: Already exists be60dbe7622d: Already exists 62223bf3aa34: Pulling fs layer 62223bf3aa34: Verifying Checksum 62223bf3aa34: Download complete 62223bf3aa34: Pull complete Digest: sha256:3c5100fb0d2763713102e51c73d41900055ff4ebbda06e9c3c2293771279839c Status: Downloaded newer image for registry.svc.ci.openshift.org/ci/initupload:latest + break + clonerefs_args= + docker run -v /data:/data:z registry.svc.ci.openshift.org/ci/clonerefs:latest --src-root=/data --log=/data/clone.json --repo=openshift,origin-web-console-server=master:14f3f2f0d75d20f7bbe037ee6a08ec893cb1dc67 {"component":"clonerefs","level":"info","msg":"Cloning refs","refs":{"org":"openshift","repo":"origin-web-console-server","base_ref":"master","base_sha":"14f3f2f0d75d20f7bbe037ee6a08ec893cb1dc67"},"time":"2018-06-02T09:30:31Z"} {"command":"os.MkdirAll(/data/src/github.com/openshift/origin-web-console-server, 0755)","component":"clonerefs","error":null,"level":"info","msg":"Ran command","output":"","time":"2018-06-02T09:30:31Z"} {"command":"git init","component":"clonerefs","error":null,"level":"info","msg":"Ran command","output":"Reinitialized existing shared Git repository in /data/src/github.com/openshift/origin-web-console-server/.git/\n","time":"2018-06-02T09:30:31Z"} {"command":"git config user.name ci-robot","component":"clonerefs","error":null,"level":"info","msg":"Ran command","output":"","time":"2018-06-02T09:30:31Z"} {"command":"git config user.email ci-robot@k8s.io","component":"clonerefs","error":null,"level":"info","msg":"Ran command","output":"","time":"2018-06-02T09:30:31Z"} {"command":"git fetch https://github.com/openshift/origin-web-console-server.git --tags --prune","component":"clonerefs","error":null,"level":"info","msg":"Ran command","output":"From https://github.com/openshift/origin-web-console-server\n * branch HEAD -\u003e FETCH_HEAD\n * [new tag] atomic-openshift-web-console-3.10.0-0.39.0 -\u003e atomic-openshift-web-console-3.10.0-0.39.0\n * [new tag] atomic-openshift-web-console-3.10.0-0.40.0 -\u003e atomic-openshift-web-console-3.10.0-0.40.0\n * [new tag] atomic-openshift-web-console-3.10.0-0.41.0 -\u003e atomic-openshift-web-console-3.10.0-0.41.0\n * [new tag] atomic-openshift-web-console-3.10.0-0.42.0 -\u003e atomic-openshift-web-console-3.10.0-0.42.0\n * [new tag] atomic-openshift-web-console-3.10.0-0.43.0 -\u003e atomic-openshift-web-console-3.10.0-0.43.0\n * [new tag] atomic-openshift-web-console-3.10.0-0.44.0 -\u003e atomic-openshift-web-console-3.10.0-0.44.0\n * [new tag] atomic-openshift-web-console-3.10.0-0.46.0 -\u003e atomic-openshift-web-console-3.10.0-0.46.0\n * [new tag] atomic-openshift-web-console-3.10.0-0.47.0 -\u003e atomic-openshift-web-console-3.10.0-0.47.0\n * [new tag] atomic-openshift-web-console-3.10.0-0.50.0 -\u003e atomic-openshift-web-console-3.10.0-0.50.0\n * [new tag] atomic-openshift-web-console-3.10.0-0.51.0 -\u003e atomic-openshift-web-console-3.10.0-0.51.0\n * [new tag] atomic-openshift-web-console-3.10.0-0.52.0 -\u003e atomic-openshift-web-console-3.10.0-0.52.0\n * [new tag] atomic-openshift-web-console-3.10.0-0.53.0 -\u003e atomic-openshift-web-console-3.10.0-0.53.0\n * [new tag] atomic-openshift-web-console-3.10.0-0.54.0 -\u003e atomic-openshift-web-console-3.10.0-0.54.0\n * [new tag] atomic-openshift-web-console-3.10.0-0.56.0 -\u003e atomic-openshift-web-console-3.10.0-0.56.0\n * [new tag] atomic-openshift-web-console-3.10.0-0.57.0 -\u003e atomic-openshift-web-console-3.10.0-0.57.0\n * [new tag] atomic-openshift-web-console-3.10.0-0.58.0 -\u003e atomic-openshift-web-console-3.10.0-0.58.0\n * [new tag] atomic-openshift-web-console-3.9.30-1 -\u003e atomic-openshift-web-console-3.9.30-1\n","time":"2018-06-02T09:30:45Z"} {"command":"git fetch https://github.com/openshift/origin-web-console-server.git master","component":"clonerefs","error":null,"level":"info","msg":"Ran command","output":"From https://github.com/openshift/origin-web-console-server\n * branch master -\u003e FETCH_HEAD\n","time":"2018-06-02T09:30:45Z"} {"command":"git checkout 14f3f2f0d75d20f7bbe037ee6a08ec893cb1dc67","component":"clonerefs","error":null,"level":"info","msg":"Ran command","output":"Note: checking out '14f3f2f0d75d20f7bbe037ee6a08ec893cb1dc67'.\n\nYou are in 'detached HEAD' state. You can look around, make experimental\nchanges and commit them, and you can discard any commits you make in this\nstate without impacting any branches by performing another checkout.\n\nIf you want to create a new branch to retain commits you create, you may\ndo so (now or later) by using -b with the checkout command again. Example:\n\n git checkout -b \u003cnew-branch-name\u003e\n\nHEAD is now at 14f3f2f0... bump(github.com/openshift/origin-web-console): ff6bcd86af069728eeb52b986cb2a6444e42cc9e\n","time":"2018-06-02T09:31:16Z"} {"command":"git branch --force master 14f3f2f0d75d20f7bbe037ee6a08ec893cb1dc67","component":"clonerefs","error":null,"level":"info","msg":"Ran command","output":"","time":"2018-06-02T09:31:16Z"} {"command":"git checkout master","component":"clonerefs","error":null,"level":"info","msg":"Ran command","output":"Switched to branch 'master'\nYour branch is ahead of 'origin/master' by 5 commits.\n (use \"git push\" to publish your local commits)\n","time":"2018-06-02T09:31:17Z"} {"component":"clonerefs","level":"info","msg":"Finished cloning refs","time":"2018-06-02T09:31:17Z"} + docker run -e 'JOB_SPEC={"type":"postsubmit","job":"test_branch_origin_web_console_server_e2e","buildid":"68","refs":{"org":"openshift","repo":"origin-web-console-server","base_ref":"master","base_sha":"14f3f2f0d75d20f7bbe037ee6a08ec893cb1dc67"}}' -v /data:/data:z registry.svc.ci.openshift.org/ci/initupload:latest --clone-log=/data/clone.json --dry-run=false --gcs-path=gs://origin-ci-test --gcs-credentials-file=/data/credentials.json --path-strategy=single --default-org=openshift --default-repo=origin {"component":"initupload","dest":"logs/test_branch_origin_web_console_server_e2e/68/clone-records.json","level":"info","msg":"Queued for upload","time":"2018-06-02T09:31:20Z"} {"component":"initupload","dest":"logs/test_branch_origin_web_console_server_e2e/68/started.json","level":"info","msg":"Queued for upload","time":"2018-06-02T09:31:20Z"} {"component":"initupload","dest":"logs/test_branch_origin_web_console_server_e2e/latest-build.txt","level":"info","msg":"Queued for upload","time":"2018-06-02T09:31:20Z"} {"component":"initupload","dest":"logs/test_branch_origin_web_console_server_e2e/68/clone-log.txt","level":"info","msg":"Queued for upload","time":"2018-06-02T09:31:20Z"} {"component":"initupload","dest":"logs/test_branch_origin_web_console_server_e2e/68/clone-log.txt","level":"info","msg":"Finished upload","time":"2018-06-02T09:31:21Z"} {"component":"initupload","dest":"logs/test_branch_origin_web_console_server_e2e/latest-build.txt","level":"info","msg":"Finished upload","time":"2018-06-02T09:31:21Z"} {"component":"initupload","dest":"logs/test_branch_origin_web_console_server_e2e/68/started.json","level":"info","msg":"Finished upload","time":"2018-06-02T09:31:21Z"} {"component":"initupload","dest":"logs/test_branch_origin_web_console_server_e2e/68/clone-records.json","level":"info","msg":"Finished upload","time":"2018-06-02T09:31:21Z"} {"component":"initupload","level":"info","msg":"Finished upload to GCS","time":"2018-06-02T09:31:21Z"} + sudo chmod -R a+rwX /data + sudo chown -R origin:origin-git /data + set +o xtrace ########## FINISHED STAGE: SUCCESS: SYNC REPOSITORIES [00h 03m 33s] ########## [workspace] $ /bin/bash /tmp/jenkins682828401672657783.sh ########## STARTING STAGE: FORWARD PARAMETERS TO THE REMOTE HOST ########## + [[ -s /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/activate ]] + source /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/activate ++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740 ++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740 ++ export PATH=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ PATH=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ unset PYTHON_HOME ++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config ++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config + ssh -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel sudo chmod o+rw /etc/environment + ssh -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'echo '\''JOB_NAME=test_branch_origin_web_console_server_e2e'\'' >> /etc/environment' + ssh -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'echo '\''BUILD_NUMBER=68'\'' >> /etc/environment' + set +o xtrace ########## FINISHED STAGE: SUCCESS: FORWARD PARAMETERS TO THE REMOTE HOST [00h 00m 00s] ########## [workspace] $ /bin/bash /tmp/jenkins851800407927240169.sh ########## STARTING STAGE: USE A RAMDISK FOR ETCD ########## + [[ -s /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/activate ]] + source /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/activate ++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740 ++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740 ++ export PATH=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ PATH=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ unset PYTHON_HOME ++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config ++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config ++ mktemp + script=/tmp/tmp.Yqv0VRtkRZ + cat + chmod +x /tmp/tmp.Yqv0VRtkRZ + scp -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config /tmp/tmp.Yqv0VRtkRZ openshiftdevel:/tmp/tmp.Yqv0VRtkRZ + ssh -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config -t openshiftdevel 'bash -l -c "timeout 300 /tmp/tmp.Yqv0VRtkRZ"' + cd /home/origin + sudo su root + set +o xtrace ########## FINISHED STAGE: SUCCESS: USE A RAMDISK FOR ETCD [00h 00m 01s] ########## [workspace] $ /bin/bash /tmp/jenkins5318116734558115228.sh ########## STARTING STAGE: TURN OFF UNNECESSARY RPM REPOS ########## + [[ -s /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/activate ]] + source /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/activate ++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740 ++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740 ++ export PATH=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ PATH=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ unset PYTHON_HOME ++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config ++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config ++ mktemp + script=/tmp/tmp.j3aeENokJ8 + cat + chmod +x /tmp/tmp.j3aeENokJ8 + scp -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config /tmp/tmp.j3aeENokJ8 openshiftdevel:/tmp/tmp.j3aeENokJ8 + ssh -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config -t openshiftdevel 'bash -l -c "timeout 300 /tmp/tmp.j3aeENokJ8"' + cd /home/origin + sudo yum-config-manager --disable 'centos-paas-sig-openshift-origin*-rpms' Loaded plugins: amazon-id, rhui-lb + sudo yum-config-manager --disable 'origin-deps-rhel7*' Loaded plugins: amazon-id, rhui-lb + sudo yum-config-manager --disable 'rhel-7-server-ose*' Loaded plugins: amazon-id, rhui-lb ======================= repo: rhel-7-server-ose-3.1-rpms ======================= [rhel-7-server-ose-3.1-rpms] async = True bandwidth = 0 base_persistdir = /var/lib/yum/repos/x86_64/7Server baseurl = https://mirror.ops.rhcloud.com/enterprise/all/3.1/latest/x86_64/os, https://use-mirror1.ops.rhcloud.com/enterprise/all/3.1/latest/x86_64/os/, https://use-mirror2.ops.rhcloud.com/enterprise/all/3.1/latest/x86_64/os/, https://euw-mirror1.ops.rhcloud.com/enterprise/all/3.1/latest/x86_64/os/ cache = 0 cachedir = /var/cache/yum/x86_64/7Server/rhel-7-server-ose-3.1-rpms check_config_file_age = True compare_providers_priority = 80 cost = 1000 deltarpm_metadata_percentage = 100 deltarpm_percentage = enabled = False enablegroups = True exclude = failovermethod = priority ftp_disable_epsv = False gpgcadir = /var/lib/yum/repos/x86_64/7Server/rhel-7-server-ose-3.1-rpms/gpgcadir gpgcakey = gpgcheck = False gpgdir = /var/lib/yum/repos/x86_64/7Server/rhel-7-server-ose-3.1-rpms/gpgdir gpgkey = file:///etc/pki/rpm-gpg/RPM-GPG-KEY-redhat-release, file:///etc/pki/rpm-gpg/RPM-GPG-KEY-redhat-beta, https://mirror.ops.rhcloud.com/libra/keys/RPM-GPG-KEY-redhat-openshifthosted hdrdir = /var/cache/yum/x86_64/7Server/rhel-7-server-ose-3.1-rpms/headers http_caching = all includepkgs = ip_resolve = keepalive = True keepcache = False mddownloadpolicy = sqlite mdpolicy = group:small mediaid = metadata_expire = 21600 metadata_expire_filter = read-only:present metalink = minrate = 0 mirrorlist = mirrorlist_expire = 86400 name = A repository of dependencies for Atomic OpenShift 3.1 old_base_cache_dir = password = persistdir = /var/lib/yum/repos/x86_64/7Server/rhel-7-server-ose-3.1-rpms pkgdir = /var/cache/yum/x86_64/7Server/rhel-7-server-ose-3.1-rpms/packages proxy = False proxy_dict = proxy_password = proxy_username = repo_gpgcheck = False retries = 10 skip_if_unavailable = False ssl_check_cert_permissions = True sslcacert = sslclientcert = /var/lib/yum/client-cert.pem sslclientkey = /var/lib/yum/client-key.pem sslverify = False throttle = 0 timeout = 120.0 ui_id = rhel-7-server-ose-3.1-rpms ui_repoid_vars = releasever, basearch username = ====================== repo: rhel-7-server-ose-3.11-rpms ======================= [rhel-7-server-ose-3.11-rpms] async = True bandwidth = 0 base_persistdir = /var/lib/yum/repos/x86_64/7Server baseurl = https://mirror.ops.rhcloud.com/enterprise/all/3.11/latest/x86_64/os, https://use-mirror1.ops.rhcloud.com/enterprise/all/3.11/latest/x86_64/os/, https://use-mirror2.ops.rhcloud.com/enterprise/all/3.11/latest/x86_64/os/, https://euw-mirror1.ops.rhcloud.com/enterprise/all/3.11/latest/x86_64/os/ cache = 0 cachedir = /var/cache/yum/x86_64/7Server/rhel-7-server-ose-3.11-rpms check_config_file_age = True compare_providers_priority = 80 cost = 1000 deltarpm_metadata_percentage = 100 deltarpm_percentage = enabled = False enablegroups = True exclude = failovermethod = priority ftp_disable_epsv = False gpgcadir = /var/lib/yum/repos/x86_64/7Server/rhel-7-server-ose-3.11-rpms/gpgcadir gpgcakey = gpgcheck = False gpgdir = /var/lib/yum/repos/x86_64/7Server/rhel-7-server-ose-3.11-rpms/gpgdir gpgkey = file:///etc/pki/rpm-gpg/RPM-GPG-KEY-redhat-release, file:///etc/pki/rpm-gpg/RPM-GPG-KEY-redhat-beta, https://mirror.ops.rhcloud.com/libra/keys/RPM-GPG-KEY-redhat-openshifthosted hdrdir = /var/cache/yum/x86_64/7Server/rhel-7-server-ose-3.11-rpms/headers http_caching = all includepkgs = ip_resolve = keepalive = True keepcache = False mddownloadpolicy = sqlite mdpolicy = group:small mediaid = metadata_expire = 21600 metadata_expire_filter = read-only:present metalink = minrate = 0 mirrorlist = mirrorlist_expire = 86400 name = A repository of dependencies for Atomic OpenShift 3.11 old_base_cache_dir = password = persistdir = /var/lib/yum/repos/x86_64/7Server/rhel-7-server-ose-3.11-rpms pkgdir = /var/cache/yum/x86_64/7Server/rhel-7-server-ose-3.11-rpms/packages proxy = False proxy_dict = proxy_password = proxy_username = repo_gpgcheck = False retries = 10 skip_if_unavailable = False ssl_check_cert_permissions = True sslcacert = sslclientcert = /var/lib/yum/client-cert.pem sslclientkey = /var/lib/yum/client-key.pem sslverify = False throttle = 0 timeout = 120.0 ui_id = rhel-7-server-ose-3.11-rpms ui_repoid_vars = releasever, basearch username = ====================== repo: rhel-7-server-ose-3.12-rpms ======================= [rhel-7-server-ose-3.12-rpms] async = True bandwidth = 0 base_persistdir = /var/lib/yum/repos/x86_64/7Server baseurl = https://mirror.ops.rhcloud.com/enterprise/all/3.12/latest/x86_64/os, https://use-mirror1.ops.rhcloud.com/enterprise/all/3.12/latest/x86_64/os/, https://use-mirror2.ops.rhcloud.com/enterprise/all/3.12/latest/x86_64/os/, https://euw-mirror1.ops.rhcloud.com/enterprise/all/3.12/latest/x86_64/os/ cache = 0 cachedir = /var/cache/yum/x86_64/7Server/rhel-7-server-ose-3.12-rpms check_config_file_age = True compare_providers_priority = 80 cost = 1000 deltarpm_metadata_percentage = 100 deltarpm_percentage = enabled = False enablegroups = True exclude = failovermethod = priority ftp_disable_epsv = False gpgcadir = /var/lib/yum/repos/x86_64/7Server/rhel-7-server-ose-3.12-rpms/gpgcadir gpgcakey = gpgcheck = False gpgdir = /var/lib/yum/repos/x86_64/7Server/rhel-7-server-ose-3.12-rpms/gpgdir gpgkey = file:///etc/pki/rpm-gpg/RPM-GPG-KEY-redhat-release, file:///etc/pki/rpm-gpg/RPM-GPG-KEY-redhat-beta, https://mirror.ops.rhcloud.com/libra/keys/RPM-GPG-KEY-redhat-openshifthosted hdrdir = /var/cache/yum/x86_64/7Server/rhel-7-server-ose-3.12-rpms/headers http_caching = all includepkgs = ip_resolve = keepalive = True keepcache = False mddownloadpolicy = sqlite mdpolicy = group:small mediaid = metadata_expire = 21600 metadata_expire_filter = read-only:present metalink = minrate = 0 mirrorlist = mirrorlist_expire = 86400 name = A repository of dependencies for Atomic OpenShift 3.12 old_base_cache_dir = password = persistdir = /var/lib/yum/repos/x86_64/7Server/rhel-7-server-ose-3.12-rpms pkgdir = /var/cache/yum/x86_64/7Server/rhel-7-server-ose-3.12-rpms/packages proxy = False proxy_dict = proxy_password = proxy_username = repo_gpgcheck = False retries = 10 skip_if_unavailable = False ssl_check_cert_permissions = True sslcacert = sslclientcert = /var/lib/yum/client-cert.pem sslclientkey = /var/lib/yum/client-key.pem sslverify = False throttle = 0 timeout = 120.0 ui_id = rhel-7-server-ose-3.12-rpms ui_repoid_vars = releasever, basearch username = ======================= repo: rhel-7-server-ose-3.6-rpms ======================= [rhel-7-server-ose-3.6-rpms] async = True bandwidth = 0 base_persistdir = /var/lib/yum/repos/x86_64/7Server baseurl = https://mirror.ops.rhcloud.com/enterprise/all/3.6/latest/x86_64/os, https://use-mirror1.ops.rhcloud.com/enterprise/all/3.6/latest/x86_64/os/, https://use-mirror2.ops.rhcloud.com/enterprise/all/3.6/latest/x86_64/os/, https://euw-mirror1.ops.rhcloud.com/enterprise/all/3.6/latest/x86_64/os/ cache = 0 cachedir = /var/cache/yum/x86_64/7Server/rhel-7-server-ose-3.6-rpms check_config_file_age = True compare_providers_priority = 80 cost = 1000 deltarpm_metadata_percentage = 100 deltarpm_percentage = enabled = False enablegroups = True exclude = failovermethod = priority ftp_disable_epsv = False gpgcadir = /var/lib/yum/repos/x86_64/7Server/rhel-7-server-ose-3.6-rpms/gpgcadir gpgcakey = gpgcheck = False gpgdir = /var/lib/yum/repos/x86_64/7Server/rhel-7-server-ose-3.6-rpms/gpgdir gpgkey = file:///etc/pki/rpm-gpg/RPM-GPG-KEY-redhat-release, file:///etc/pki/rpm-gpg/RPM-GPG-KEY-redhat-beta, https://mirror.ops.rhcloud.com/libra/keys/RPM-GPG-KEY-redhat-openshifthosted hdrdir = /var/cache/yum/x86_64/7Server/rhel-7-server-ose-3.6-rpms/headers http_caching = all includepkgs = ip_resolve = keepalive = True keepcache = False mddownloadpolicy = sqlite mdpolicy = group:small mediaid = metadata_expire = 21600 metadata_expire_filter = read-only:present metalink = minrate = 0 mirrorlist = mirrorlist_expire = 86400 name = A repository of dependencies for Atomic OpenShift 3.6 old_base_cache_dir = password = persistdir = /var/lib/yum/repos/x86_64/7Server/rhel-7-server-ose-3.6-rpms pkgdir = /var/cache/yum/x86_64/7Server/rhel-7-server-ose-3.6-rpms/packages proxy = False proxy_dict = proxy_password = proxy_username = repo_gpgcheck = False retries = 10 skip_if_unavailable = False ssl_check_cert_permissions = True sslcacert = sslclientcert = /var/lib/yum/client-cert.pem sslclientkey = /var/lib/yum/client-key.pem sslverify = False throttle = 0 timeout = 120.0 ui_id = rhel-7-server-ose-3.6-rpms ui_repoid_vars = releasever, basearch username = ======================= repo: rhel-7-server-ose-3.7-rpms ======================= [rhel-7-server-ose-3.7-rpms] async = True bandwidth = 0 base_persistdir = /var/lib/yum/repos/x86_64/7Server baseurl = https://mirror.ops.rhcloud.com/enterprise/all/3.7/latest/x86_64/os, https://use-mirror1.ops.rhcloud.com/enterprise/all/3.7/latest/x86_64/os/, https://use-mirror2.ops.rhcloud.com/enterprise/all/3.7/latest/x86_64/os/, https://euw-mirror1.ops.rhcloud.com/enterprise/all/3.7/latest/x86_64/os/ cache = 0 cachedir = /var/cache/yum/x86_64/7Server/rhel-7-server-ose-3.7-rpms check_config_file_age = True compare_providers_priority = 80 cost = 1000 deltarpm_metadata_percentage = 100 deltarpm_percentage = enabled = False enablegroups = True exclude = failovermethod = priority ftp_disable_epsv = False gpgcadir = /var/lib/yum/repos/x86_64/7Server/rhel-7-server-ose-3.7-rpms/gpgcadir gpgcakey = gpgcheck = False gpgdir = /var/lib/yum/repos/x86_64/7Server/rhel-7-server-ose-3.7-rpms/gpgdir gpgkey = file:///etc/pki/rpm-gpg/RPM-GPG-KEY-redhat-release, file:///etc/pki/rpm-gpg/RPM-GPG-KEY-redhat-beta, https://mirror.ops.rhcloud.com/libra/keys/RPM-GPG-KEY-redhat-openshifthosted hdrdir = /var/cache/yum/x86_64/7Server/rhel-7-server-ose-3.7-rpms/headers http_caching = all includepkgs = ip_resolve = keepalive = True keepcache = False mddownloadpolicy = sqlite mdpolicy = group:small mediaid = metadata_expire = 21600 metadata_expire_filter = read-only:present metalink = minrate = 0 mirrorlist = mirrorlist_expire = 86400 name = A repository of dependencies for Atomic OpenShift 3.7 old_base_cache_dir = password = persistdir = /var/lib/yum/repos/x86_64/7Server/rhel-7-server-ose-3.7-rpms pkgdir = /var/cache/yum/x86_64/7Server/rhel-7-server-ose-3.7-rpms/packages proxy = False proxy_dict = proxy_password = proxy_username = repo_gpgcheck = False retries = 10 skip_if_unavailable = False ssl_check_cert_permissions = True sslcacert = sslclientcert = /var/lib/yum/client-cert.pem sslclientkey = /var/lib/yum/client-key.pem sslverify = False throttle = 0 timeout = 120.0 ui_id = rhel-7-server-ose-3.7-rpms ui_repoid_vars = releasever, basearch username = ======================= repo: rhel-7-server-ose-3.8-rpms ======================= [rhel-7-server-ose-3.8-rpms] async = True bandwidth = 0 base_persistdir = /var/lib/yum/repos/x86_64/7Server baseurl = https://mirror.ops.rhcloud.com/enterprise/all/3.8/latest/x86_64/os, https://use-mirror1.ops.rhcloud.com/enterprise/all/3.8/latest/x86_64/os/, https://use-mirror2.ops.rhcloud.com/enterprise/all/3.8/latest/x86_64/os/, https://euw-mirror1.ops.rhcloud.com/enterprise/all/3.8/latest/x86_64/os/ cache = 0 cachedir = /var/cache/yum/x86_64/7Server/rhel-7-server-ose-3.8-rpms check_config_file_age = True compare_providers_priority = 80 cost = 1000 deltarpm_metadata_percentage = 100 deltarpm_percentage = enabled = False enablegroups = True exclude = failovermethod = priority ftp_disable_epsv = False gpgcadir = /var/lib/yum/repos/x86_64/7Server/rhel-7-server-ose-3.8-rpms/gpgcadir gpgcakey = gpgcheck = False gpgdir = /var/lib/yum/repos/x86_64/7Server/rhel-7-server-ose-3.8-rpms/gpgdir gpgkey = file:///etc/pki/rpm-gpg/RPM-GPG-KEY-redhat-release, file:///etc/pki/rpm-gpg/RPM-GPG-KEY-redhat-beta, https://mirror.ops.rhcloud.com/libra/keys/RPM-GPG-KEY-redhat-openshifthosted hdrdir = /var/cache/yum/x86_64/7Server/rhel-7-server-ose-3.8-rpms/headers http_caching = all includepkgs = ip_resolve = keepalive = True keepcache = False mddownloadpolicy = sqlite mdpolicy = group:small mediaid = metadata_expire = 21600 metadata_expire_filter = read-only:present metalink = minrate = 0 mirrorlist = mirrorlist_expire = 86400 name = A repository of dependencies for Atomic OpenShift 3.8 old_base_cache_dir = password = persistdir = /var/lib/yum/repos/x86_64/7Server/rhel-7-server-ose-3.8-rpms pkgdir = /var/cache/yum/x86_64/7Server/rhel-7-server-ose-3.8-rpms/packages proxy = False proxy_dict = proxy_password = proxy_username = repo_gpgcheck = False retries = 10 skip_if_unavailable = False ssl_check_cert_permissions = True sslcacert = sslclientcert = /var/lib/yum/client-cert.pem sslclientkey = /var/lib/yum/client-key.pem sslverify = False throttle = 0 timeout = 120.0 ui_id = rhel-7-server-ose-3.8-rpms ui_repoid_vars = releasever, basearch username = ======================= repo: rhel-7-server-ose-3.9-rpms ======================= [rhel-7-server-ose-3.9-rpms] async = True bandwidth = 0 base_persistdir = /var/lib/yum/repos/x86_64/7Server baseurl = https://mirror.ops.rhcloud.com/enterprise/all/3.9/latest/x86_64/os, https://use-mirror1.ops.rhcloud.com/enterprise/all/3.9/latest/x86_64/os/, https://use-mirror2.ops.rhcloud.com/enterprise/all/3.9/latest/x86_64/os/, https://euw-mirror1.ops.rhcloud.com/enterprise/all/3.9/latest/x86_64/os/ cache = 0 cachedir = /var/cache/yum/x86_64/7Server/rhel-7-server-ose-3.9-rpms check_config_file_age = True compare_providers_priority = 80 cost = 1000 deltarpm_metadata_percentage = 100 deltarpm_percentage = enabled = False enablegroups = True exclude = failovermethod = priority ftp_disable_epsv = False gpgcadir = /var/lib/yum/repos/x86_64/7Server/rhel-7-server-ose-3.9-rpms/gpgcadir gpgcakey = gpgcheck = False gpgdir = /var/lib/yum/repos/x86_64/7Server/rhel-7-server-ose-3.9-rpms/gpgdir gpgkey = file:///etc/pki/rpm-gpg/RPM-GPG-KEY-redhat-release, file:///etc/pki/rpm-gpg/RPM-GPG-KEY-redhat-beta, https://mirror.ops.rhcloud.com/libra/keys/RPM-GPG-KEY-redhat-openshifthosted hdrdir = /var/cache/yum/x86_64/7Server/rhel-7-server-ose-3.9-rpms/headers http_caching = all includepkgs = ip_resolve = keepalive = True keepcache = False mddownloadpolicy = sqlite mdpolicy = group:small mediaid = metadata_expire = 21600 metadata_expire_filter = read-only:present metalink = minrate = 0 mirrorlist = mirrorlist_expire = 86400 name = A repository of dependencies for Atomic OpenShift 3.9 old_base_cache_dir = password = persistdir = /var/lib/yum/repos/x86_64/7Server/rhel-7-server-ose-3.9-rpms pkgdir = /var/cache/yum/x86_64/7Server/rhel-7-server-ose-3.9-rpms/packages proxy = False proxy_dict = proxy_password = proxy_username = repo_gpgcheck = False retries = 10 skip_if_unavailable = False ssl_check_cert_permissions = True sslcacert = sslclientcert = /var/lib/yum/client-cert.pem sslclientkey = /var/lib/yum/client-key.pem sslverify = False throttle = 0 timeout = 120.0 ui_id = rhel-7-server-ose-3.9-rpms ui_repoid_vars = releasever, basearch username = + [[ test_branch_origin_web_console_server_e2e == *update* ]] + set +o xtrace ########## FINISHED STAGE: SUCCESS: TURN OFF UNNECESSARY RPM REPOS [00h 00m 08s] ########## [workspace] $ /bin/bash /tmp/jenkins5707918714303904508.sh ########## STARTING STAGE: ENABLE DOCKER TESTED REPO ########## + [[ -s /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/activate ]] + source /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/activate ++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740 ++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740 ++ export PATH=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ PATH=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ unset PYTHON_HOME ++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config ++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config ++ mktemp + script=/tmp/tmp.XMS7LvBHbL + cat + chmod +x /tmp/tmp.XMS7LvBHbL + scp -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config /tmp/tmp.XMS7LvBHbL openshiftdevel:/tmp/tmp.XMS7LvBHbL + ssh -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config -t openshiftdevel 'bash -l -c "timeout 300 /tmp/tmp.XMS7LvBHbL"' + cd /home/origin + [[ master == \m\a\s\t\e\r ]] + sudo touch /etc/yum.repos.d/dockertested.repo + sudo chmod a+rw /etc/yum.repos.d/dockertested.repo + cat + set +o xtrace ########## FINISHED STAGE: SUCCESS: ENABLE DOCKER TESTED REPO [00h 00m 01s] ########## [workspace] $ /bin/bash /tmp/jenkins7906435145296826852.sh ########## STARTING STAGE: BUILD THE WEB CONSOLE CONTAINER IMAGE ########## + [[ -s /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/activate ]] + source /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/activate ++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740 ++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740 ++ export PATH=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ PATH=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ unset PYTHON_HOME ++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config ++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config ++ mktemp + script=/tmp/tmp.Jp5faSbgP2 + cat + chmod +x /tmp/tmp.Jp5faSbgP2 + scp -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config /tmp/tmp.Jp5faSbgP2 openshiftdevel:/tmp/tmp.Jp5faSbgP2 + ssh -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config -t openshiftdevel 'bash -l -c "timeout 1200 /tmp/tmp.Jp5faSbgP2"' + cd /data/src/github.com/openshift/origin-web-console-server + sudo yum '--disablerepo=*' --enablerepo=origin-local-release,oso-rhui-rhel-server-releases install -y origin-clients Loaded plugins: amazon-id, rhui-lb, search-disabled-repos Resolving Dependencies --> Running transaction check ---> Package origin-clients.x86_64 0:3.10.0-0.alpha.0.1354.a861408 will be installed --> Processing Dependency: bash-completion for package: origin-clients-3.10.0-0.alpha.0.1354.a861408.x86_64 --> Running transaction check ---> Package bash-completion.noarch 1:2.1-6.el7 will be installed --> Finished Dependency Resolution Dependencies Resolved ================================================================================ Package Arch Version Repository Size ================================================================================ Installing: origin-clients x86_64 3.10.0-0.alpha.0.1354.a861408 origin-local-release 19 M Installing for dependencies: bash-completion noarch 1:2.1-6.el7 oso-rhui-rhel-server-releases 85 k Transaction Summary ================================================================================ Install 1 Package (+1 Dependent package) Total download size: 20 M Installed size: 109 M Downloading packages: -------------------------------------------------------------------------------- Total 16 MB/s | 20 MB 00:01 Running transaction check Running transaction test Transaction test succeeded Running transaction Installing : 1:bash-completion-2.1-6.el7.noarch 1/2 Installing : origin-clients-3.10.0-0.alpha.0.1354.a861408.x86_64 2/2 Verifying : 1:bash-completion-2.1-6.el7.noarch 1/2 Verifying : origin-clients-3.10.0-0.alpha.0.1354.a861408.x86_64 2/2 Installed: origin-clients.x86_64 0:3.10.0-0.alpha.0.1354.a861408 Dependency Installed: bash-completion.noarch 1:2.1-6.el7 Complete! + make build-images OS_ONLY_BUILD_PLATFORMS='linux/amd64' hack/build-rpms.sh [INFO] Building release RPMs for /data/src/github.com/openshift/origin-web-console-server/origin-web-console-server.spec ... [WARNING] Repository is not clean, performing fast build and reusing _output Executing(%prep): /bin/sh -e /var/tmp/rpm-tmp.BUDp3h + umask 022 + cd /tmp/openshift/build-rpms/rpm/BUILD + exit 0 Executing(%build): /bin/sh -e /var/tmp/rpm-tmp.PwePSk + umask 022 + cd /tmp/openshift/build-rpms/rpm/BUILD + BUILD_PLATFORM=linux/amd64 + OS_ONLY_BUILD_PLATFORMS=linux/amd64 + OS_GIT_COMMIT=14f3f2f + OS_GIT_TREE_STATE=dirty + OS_GIT_VERSION=v3.10.0-alpha.0+14f3f2f-40-dirty + OS_GIT_MAJOR=3 + OS_GIT_MINOR=10+ + OS_GIT_PATCH=0 + make build-cross make[1]: Entering directory `/data/src/github.com/openshift/origin-web-console-server' hack/build-cross.sh ++ Building go targets for linux/amd64: cmd/origin-web-console hack/build-cross.sh took 55 seconds make[1]: Leaving directory `/data/src/github.com/openshift/origin-web-console-server' + exit 0 Executing(%install): /bin/sh -e /var/tmp/rpm-tmp.h2R8rA + umask 022 + cd /tmp/openshift/build-rpms/rpm/BUILD + '[' /tmp/openshift/build-rpms/rpm/BUILDROOT/origin-web-console-3.10.0-0.alpha.0.40.14f3f2f.x86_64 '!=' / ']' + rm -rf /tmp/openshift/build-rpms/rpm/BUILDROOT/origin-web-console-3.10.0-0.alpha.0.40.14f3f2f.x86_64 ++ dirname /tmp/openshift/build-rpms/rpm/BUILDROOT/origin-web-console-3.10.0-0.alpha.0.40.14f3f2f.x86_64 + mkdir -p /tmp/openshift/build-rpms/rpm/BUILDROOT + mkdir /tmp/openshift/build-rpms/rpm/BUILDROOT/origin-web-console-3.10.0-0.alpha.0.40.14f3f2f.x86_64 ++ go env GOHOSTOS ++ go env GOHOSTARCH + PLATFORM=linux/amd64 + install -d /tmp/openshift/build-rpms/rpm/BUILDROOT/origin-web-console-3.10.0-0.alpha.0.40.14f3f2f.x86_64/usr/bin + for bin in origin-web-console +++ INSTALLING origin-web-console + echo '+++ INSTALLING origin-web-console' + install -p -m 755 _output/local/bin/linux/amd64/origin-web-console /tmp/openshift/build-rpms/rpm/BUILDROOT/origin-web-console-3.10.0-0.alpha.0.40.14f3f2f.x86_64/usr/bin/origin-web-console + /usr/lib/rpm/check-buildroot + /usr/lib/rpm/brp-compress Processing files: origin-web-console-3.10.0-0.alpha.0.40.14f3f2f.el7.x86_64 Executing(%doc): /bin/sh -e /var/tmp/rpm-tmp.Hy3htQ + umask 022 + cd /tmp/openshift/build-rpms/rpm/BUILD + DOCDIR=/tmp/openshift/build-rpms/rpm/BUILDROOT/origin-web-console-3.10.0-0.alpha.0.40.14f3f2f.x86_64/usr/share/doc/origin-web-console-3.10.0 + export DOCDIR + /usr/bin/mkdir -p /tmp/openshift/build-rpms/rpm/BUILDROOT/origin-web-console-3.10.0-0.alpha.0.40.14f3f2f.x86_64/usr/share/doc/origin-web-console-3.10.0 + cp -pr README.md /tmp/openshift/build-rpms/rpm/BUILDROOT/origin-web-console-3.10.0-0.alpha.0.40.14f3f2f.x86_64/usr/share/doc/origin-web-console-3.10.0 + exit 0 Executing(%license): /bin/sh -e /var/tmp/rpm-tmp.r2Gwv6 + umask 022 + cd /tmp/openshift/build-rpms/rpm/BUILD + LICENSEDIR=/tmp/openshift/build-rpms/rpm/BUILDROOT/origin-web-console-3.10.0-0.alpha.0.40.14f3f2f.x86_64/usr/share/licenses/origin-web-console-3.10.0 + export LICENSEDIR + /usr/bin/mkdir -p /tmp/openshift/build-rpms/rpm/BUILDROOT/origin-web-console-3.10.0-0.alpha.0.40.14f3f2f.x86_64/usr/share/licenses/origin-web-console-3.10.0 + cp -pr LICENSE /tmp/openshift/build-rpms/rpm/BUILDROOT/origin-web-console-3.10.0-0.alpha.0.40.14f3f2f.x86_64/usr/share/licenses/origin-web-console-3.10.0 + exit 0 Provides: origin-web-console = 3.10.0-0.alpha.0.40.14f3f2f.el7 origin-web-console(x86-64) = 3.10.0-0.alpha.0.40.14f3f2f.el7 Requires(interp): /bin/sh Requires(rpmlib): rpmlib(CompressedFileNames) <= 3.0.4-1 rpmlib(FileDigests) <= 4.6.0-1 rpmlib(PayloadFilesHavePrefix) <= 4.0-1 Requires(pre): /bin/sh Requires: libc.so.6()(64bit) libc.so.6(GLIBC_2.2.5)(64bit) libpthread.so.0()(64bit) libpthread.so.0(GLIBC_2.2.5)(64bit) libpthread.so.0(GLIBC_2.3.2)(64bit) Checking for unpackaged file(s): /usr/lib/rpm/check-files /tmp/openshift/build-rpms/rpm/BUILDROOT/origin-web-console-3.10.0-0.alpha.0.40.14f3f2f.x86_64 Wrote: /tmp/openshift/build-rpms/rpm/RPMS/x86_64/origin-web-console-3.10.0-0.alpha.0.40.14f3f2f.el7.x86_64.rpm Executing(%clean): /bin/sh -e /var/tmp/rpm-tmp.54BGDY + umask 022 + cd /tmp/openshift/build-rpms/rpm/BUILD + /usr/bin/rm -rf /tmp/openshift/build-rpms/rpm/BUILDROOT/origin-web-console-3.10.0-0.alpha.0.40.14f3f2f.x86_64 + exit 0 Spawning worker 0 with 1 pkgs Spawning worker 1 with 1 pkgs Spawning worker 2 with 0 pkgs Spawning worker 3 with 0 pkgs Workers Finished Saving Primary metadata Saving file lists metadata Saving other metadata Generating sqlite DBs Sqlite DBs complete [INFO] Repository file for `yum` or `dnf` placed at /data/src/github.com/openshift/origin-web-console-server/_output/local/releases/rpms/local-release.repo [INFO] Install it with: [INFO] $ mv '/data/src/github.com/openshift/origin-web-console-server/_output/local/releases/rpms/local-release.repo' '/etc/yum.repos.d [INFO] hack/build-rpms.sh exited with code 0 after 00h 01m 08s hack/build-images.sh [openshift/origin-web-console] --> FROM openshift/origin-base as 0 [openshift/origin-web-console] --> RUN INSTALL_PKGS="origin-web-console" && yum --enablerepo=origin-local-release install -y ${INSTALL_PKGS} && rpm -V ${INSTALL_PKGS} && yum clean all [openshift/origin-web-console] Loaded plugins: fastestmirror, ovl [openshift/origin-web-console] Determining fastest mirrors [openshift/origin-web-console] * base: distro.ibiblio.org [openshift/origin-web-console] * extras: mirror.umd.edu [openshift/origin-web-console] * updates: mirrors.advancedhosters.com [openshift/origin-web-console] Resolving Dependencies [openshift/origin-web-console] --> Running transaction check [openshift/origin-web-console] ---> Package origin-web-console.x86_64 0:3.10.0-0.alpha.0.40.14f3f2f.el7 will be installed [openshift/origin-web-console] --> Finished Dependency Resolution [openshift/origin-web-console] Dependencies Resolved [openshift/origin-web-console] ================================================================================ [openshift/origin-web-console] Package Arch Version Repository Size [openshift/origin-web-console] ================================================================================ [openshift/origin-web-console] Installing: [openshift/origin-web-console] origin-web-console [openshift/origin-web-console] x86_64 3.10.0-0.alpha.0.40.14f3f2f.el7 origin-local-release 16 M [openshift/origin-web-console] Transaction Summary [openshift/origin-web-console] ================================================================================ [openshift/origin-web-console] Install 1 Package [openshift/origin-web-console] Total download size: 16 M [openshift/origin-web-console] Installed size: 82 M [openshift/origin-web-console] Downloading packages: [openshift/origin-web-console] Running transaction check [openshift/origin-web-console] Running transaction test [openshift/origin-web-console] Transaction test succeeded [openshift/origin-web-console] Running transaction [openshift/origin-web-console] Installing : origin-web-console-3.10.0-0.alpha.0.40.14f3f2f.el7.x86_64 1/1 [openshift/origin-web-console] Verifying : origin-web-console-3.10.0-0.alpha.0.40.14f3f2f.el7.x86_64 1/1 [openshift/origin-web-console] Installed: [openshift/origin-web-console] origin-web-console.x86_64 0:3.10.0-0.alpha.0.40.14f3f2f.el7 [openshift/origin-web-console] Complete! [openshift/origin-web-console] Loaded plugins: fastestmirror, ovl [openshift/origin-web-console] Cleaning repos: base cbs-paas7-openshift-multiarch-el7-build extras updates [openshift/origin-web-console] Cleaning up everything [openshift/origin-web-console] Maybe you want: rm -rf /var/cache/yum, to also free up space taken by orphaned data from disabled or removed repos [openshift/origin-web-console] Cleaning up list of fastest mirrors [openshift/origin-web-console] --> LABEL io.k8s.display-name="OpenShift Web Console" io.k8s.description="This is a component of OpenShift Container Platform and provides a web console." io.openshift.tags="openshift" [openshift/origin-web-console] --> USER 1001 [openshift/origin-web-console] --> EXPOSE 5000 [openshift/origin-web-console] --> CMD [ "/usr/bin/origin-web-console" ] [openshift/origin-web-console] --> Committing changes to openshift/origin-web-console:14f3f2f ... [openshift/origin-web-console] --> Tagged as openshift/origin-web-console:latest [openshift/origin-web-console] --> Done [INFO] hack/build-images.sh exited with code 0 after 00h 00m 46s + set +o xtrace ########## FINISHED STAGE: SUCCESS: BUILD THE WEB CONSOLE CONTAINER IMAGE [00h 02m 40s] ########## [workspace] $ /bin/bash /tmp/jenkins3512313976794554973.sh ########## STARTING STAGE: VALIDATE CONSOLE STARTS ########## + [[ -s /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/activate ]] + source /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/activate ++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740 ++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740 ++ export PATH=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ PATH=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ unset PYTHON_HOME ++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config ++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config ++ mktemp + script=/tmp/tmp.AY3LcsKgIT + cat + chmod +x /tmp/tmp.AY3LcsKgIT + scp -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config /tmp/tmp.AY3LcsKgIT openshiftdevel:/tmp/tmp.AY3LcsKgIT + ssh -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config -t openshiftdevel 'bash -l -c "timeout 600 /tmp/tmp.AY3LcsKgIT"' + cd /data/src/github.com/openshift/origin-web-console-server + oc cluster up --tag=latest --public-hostname=localhost --loglevel=5 Getting a Docker client ... I0602 09:34:16.108421 3285 up.go:238] Using "IfNotPresent" as default image pull policy I0602 09:34:16.108657 3285 client.go:309] No Docker environment variables found. Will attempt default socket. I0602 09:34:16.108664 3285 client.go:314] No Docker host (DOCKER_HOST) configured. Will attempt default socket. Checking if image openshift/origin-control-plane:latest is available ... I0602 09:34:16.114794 3285 helper.go:137] Inspecting Docker image "openshift/origin-control-plane:latest" I0602 09:34:16.118366 3285 helper.go:140] Image "openshift/origin-control-plane:latest" found: &types.ImageInspect{ID:"sha256:869eff902877d2e1f81a34c977482dfb0cc1449dcd52d09396e2cad6c393bdae", RepoTags:[]string{"openshift/origin-control-plane:a861408", "openshift/origin-control-plane:latest"}, RepoDigests:[]string{}, Parent:"sha256:e444a450c1946106e861789c9146977f2e9d8069d97a4a99a0bd9d4f3da5bce0", Comment:"", Created:"2018-05-31T04:15:45.692442681Z", Container:"319f70920bfb163363579fe40998483823bfba6dde5d0d2547b8678b7bdc4b11", ContainerConfig:(*container.Config)(0xc42093e000), DockerVersion:"1.13.1", Author:"", Config:(*container.Config)(0xc42093e140), Architecture:"amd64", Os:"linux", OsVersion:"", Size:672313070, VirtualSize:672313070, GraphDriver:types.GraphDriverData{Data:map[string]string{"LowerDir":"/var/lib/docker/overlay2/fca9104430a3e1d315d856e88681760cec34ff15050d8a55e5b3e2b644e28a1c/diff:/var/lib/docker/overlay2/9919b1eb69a39fe5a4ed5f8516305e9b257a0bc7a583e2b1aec8141a850fa5d2/diff:/var/lib/docker/overlay2/3dddfd819cd05bbd52003bd251336b24a46e81d5761981d478b6c9096fb4a0c3/diff:/var/lib/docker/overlay2/85db87c2e17ecad3f0afc16c8c7f9dcc8474abfb5f5a516414f55b40641d8221/diff", "MergedDir":"/var/lib/docker/overlay2/06b8878f76c267389d3ae48528393ad065ff31810f085bf2eb0d30385d943df7/merged", "UpperDir":"/var/lib/docker/overlay2/06b8878f76c267389d3ae48528393ad065ff31810f085bf2eb0d30385d943df7/diff", "WorkDir":"/var/lib/docker/overlay2/06b8878f76c267389d3ae48528393ad065ff31810f085bf2eb0d30385d943df7/work"}, Name:"overlay2"}, RootFS:types.RootFS{Type:"layers", Layers:[]string{"sha256:43e653f84b79ba52711b0f726ff5a7fd1162ae9df4be76ca1de8370b8bbf9bb0", "sha256:b21b3e3988f36613b6e2984ea2fac595b1603db13d6511b71db97cb2a958bec7", "sha256:2c4c6c13c2cc1c4f4286f64bf3e8ead3773de078c49fb022a5c433e2d92ae847", "sha256:fa7c079d2181bf097a4ac062d8d1649bc31439d6d3ba4faf9b444f52451fe9d2", "sha256:b0f4dfd7ee0c205f685291983cd216fd1996648ff499008e25bdbf03b804192f"}, BaseLayer:""}, Metadata:types.ImageMetadata{LastTagTime:time.Time{wall:0x0, ext:0, loc:(*time.Location)(nil)}}} I0602 09:34:16.118481 3285 helper.go:137] Inspecting Docker image "openshift/origin-cli:latest" I0602 09:34:16.120802 3285 helper.go:140] Image "openshift/origin-cli:latest" found: &types.ImageInspect{ID:"sha256:e444a450c1946106e861789c9146977f2e9d8069d97a4a99a0bd9d4f3da5bce0", RepoTags:[]string{"openshift/origin-cli:a861408", "openshift/origin-cli:latest"}, RepoDigests:[]string{}, Parent:"sha256:29ca45e875008789445c683583f3c965eb89b6cbbb5bf24531789f356e8a294d", Comment:"", Created:"2018-05-31T04:14:38.570355399Z", Container:"197c668b245f9c7af22bff7b4dbc5c0aa1644fc7bcda916c25d8e8b232658031", ContainerConfig:(*container.Config)(0xc4207ae000), DockerVersion:"1.13.1", Author:"", Config:(*container.Config)(0xc4207ae140), Architecture:"amd64", Os:"linux", OsVersion:"", Size:364331746, VirtualSize:364331746, GraphDriver:types.GraphDriverData{Data:map[string]string{"LowerDir":"/var/lib/docker/overlay2/9919b1eb69a39fe5a4ed5f8516305e9b257a0bc7a583e2b1aec8141a850fa5d2/diff:/var/lib/docker/overlay2/3dddfd819cd05bbd52003bd251336b24a46e81d5761981d478b6c9096fb4a0c3/diff:/var/lib/docker/overlay2/85db87c2e17ecad3f0afc16c8c7f9dcc8474abfb5f5a516414f55b40641d8221/diff", "MergedDir":"/var/lib/docker/overlay2/fca9104430a3e1d315d856e88681760cec34ff15050d8a55e5b3e2b644e28a1c/merged", "UpperDir":"/var/lib/docker/overlay2/fca9104430a3e1d315d856e88681760cec34ff15050d8a55e5b3e2b644e28a1c/diff", "WorkDir":"/var/lib/docker/overlay2/fca9104430a3e1d315d856e88681760cec34ff15050d8a55e5b3e2b644e28a1c/work"}, Name:"overlay2"}, RootFS:types.RootFS{Type:"layers", Layers:[]string{"sha256:43e653f84b79ba52711b0f726ff5a7fd1162ae9df4be76ca1de8370b8bbf9bb0", "sha256:b21b3e3988f36613b6e2984ea2fac595b1603db13d6511b71db97cb2a958bec7", "sha256:2c4c6c13c2cc1c4f4286f64bf3e8ead3773de078c49fb022a5c433e2d92ae847", "sha256:fa7c079d2181bf097a4ac062d8d1649bc31439d6d3ba4faf9b444f52451fe9d2"}, BaseLayer:""}, Metadata:types.ImageMetadata{LastTagTime:time.Time{wall:0x0, ext:0, loc:(*time.Location)(nil)}}} I0602 09:34:16.120877 3285 helper.go:137] Inspecting Docker image "openshift/origin-node:latest" I0602 09:34:16.123784 3285 helper.go:140] Image "openshift/origin-node:latest" found: &types.ImageInspect{ID:"sha256:20cf292a36f0cc3b636dd00ec071fc685129a0697fa33d3dd848c7202cff58b3", RepoTags:[]string{"openshift/origin-node:a861408", "openshift/origin-node:latest"}, RepoDigests:[]string{}, Parent:"sha256:869eff902877d2e1f81a34c977482dfb0cc1449dcd52d09396e2cad6c393bdae", Comment:"", Created:"2018-05-31T04:21:02.014599772Z", Container:"dac3edf40b05fc69768b91d8657381778cf30e2548058a7b0b67033789716435", ContainerConfig:(*container.Config)(0xc42093e280), DockerVersion:"1.13.1", Author:"", Config:(*container.Config)(0xc42093e3c0), Architecture:"amd64", Os:"linux", OsVersion:"", Size:1310847647, VirtualSize:1310847647, GraphDriver:types.GraphDriverData{Data:map[string]string{"WorkDir":"/var/lib/docker/overlay2/86fdbead0312bf846fc30f1ffb7e6ed0d5fd07d7f1f26a61c99d9f595cc0a2c0/work", "LowerDir":"/var/lib/docker/overlay2/06b8878f76c267389d3ae48528393ad065ff31810f085bf2eb0d30385d943df7/diff:/var/lib/docker/overlay2/fca9104430a3e1d315d856e88681760cec34ff15050d8a55e5b3e2b644e28a1c/diff:/var/lib/docker/overlay2/9919b1eb69a39fe5a4ed5f8516305e9b257a0bc7a583e2b1aec8141a850fa5d2/diff:/var/lib/docker/overlay2/3dddfd819cd05bbd52003bd251336b24a46e81d5761981d478b6c9096fb4a0c3/diff:/var/lib/docker/overlay2/85db87c2e17ecad3f0afc16c8c7f9dcc8474abfb5f5a516414f55b40641d8221/diff", "MergedDir":"/var/lib/docker/overlay2/86fdbead0312bf846fc30f1ffb7e6ed0d5fd07d7f1f26a61c99d9f595cc0a2c0/merged", "UpperDir":"/var/lib/docker/overlay2/86fdbead0312bf846fc30f1ffb7e6ed0d5fd07d7f1f26a61c99d9f595cc0a2c0/diff"}, Name:"overlay2"}, RootFS:types.RootFS{Type:"layers", Layers:[]string{"sha256:43e653f84b79ba52711b0f726ff5a7fd1162ae9df4be76ca1de8370b8bbf9bb0", "sha256:b21b3e3988f36613b6e2984ea2fac595b1603db13d6511b71db97cb2a958bec7", "sha256:2c4c6c13c2cc1c4f4286f64bf3e8ead3773de078c49fb022a5c433e2d92ae847", "sha256:fa7c079d2181bf097a4ac062d8d1649bc31439d6d3ba4faf9b444f52451fe9d2", "sha256:b0f4dfd7ee0c205f685291983cd216fd1996648ff499008e25bdbf03b804192f", "sha256:a87257ecfee445ce2304583b1830623e818e9fc6775f8d2fc6d865229e8e2794"}, BaseLayer:""}, Metadata:types.ImageMetadata{LastTagTime:time.Time{wall:0x0, ext:0, loc:(*time.Location)(nil)}}} I0602 09:34:16.123861 3285 helper.go:99] Retrieving Docker version Checking type of volume mount ... I0602 09:34:16.125415 3285 helper.go:105] Docker version results: &types.Version{Version:"1.13.1", APIVersion:"1.26", MinAPIVersion:"1.12", GitCommit:"87f2fab/1.13.1", GoVersion:"go1.9.2", Os:"linux", Arch:"amd64", KernelVersion:"3.10.0-862.2.3.el7.x86_64", Experimental:false, BuildTime:"2018-03-19T18:55:01.568114340+00:00"} I0602 09:34:16.125527 3285 helper.go:46] Retrieving Docker daemon info I0602 09:34:16.143654 3285 helper.go:52] Docker daemon info: &types.Info{ID:"6BJV:73LF:IA7K:ALFR:AZ7W:LB7R:QWV6:M3QJ:L6HJ:QBVN:R7RW:ZVMN", Containers:2, ContainersRunning:0, ContainersPaused:0, ContainersStopped:2, Images:38, Driver:"overlay2", DriverStatus:[][2]string{[2]string{"Backing Filesystem", "xfs"}, [2]string{"Supports d_type", "true"}, [2]string{"Native Overlay Diff", "true"}}, SystemStatus:[][2]string(nil), Plugins:types.PluginsInfo{Volume:[]string{"local"}, Network:[]string{"bridge", "host", "macvlan", "null", "overlay"}, Authorization:[]string{"rhel-push-plugin"}, Log:[]string(nil)}, MemoryLimit:true, SwapLimit:true, KernelMemory:true, CPUCfsPeriod:true, CPUCfsQuota:true, CPUShares:true, CPUSet:true, IPv4Forwarding:true, BridgeNfIptables:true, BridgeNfIP6tables:true, Debug:true, NFd:16, OomKillDisable:true, NGoroutines:24, SystemTime:"2018-06-02T09:34:16.1355816Z", LoggingDriver:"json-file", CgroupDriver:"systemd", NEventsListener:0, KernelVersion:"3.10.0-862.2.3.el7.x86_64", OperatingSystem:"Red Hat Enterprise Linux Server 7.5 (Maipo)", OSType:"linux", Architecture:"x86_64", IndexServerAddress:"https://registry.access.redhat.com/v1/", RegistryConfig:(*registry.ServiceConfig)(0xc42095c620), NCPU:4, MemTotal:16656269312, GenericResources:[]swarm.GenericResource(nil), DockerRootDir:"/var/lib/docker", HTTPProxy:"", HTTPSProxy:"", NoProxy:"", Name:"ip-172-18-2-81.ec2.internal", Labels:[]string(nil), ExperimentalBuild:false, ServerVersion:"1.13.1", ClusterStore:"", ClusterAdvertise:"", Runtimes:map[string]types.Runtime{"docker-runc":types.Runtime{Path:"/usr/libexec/docker/docker-runc-current", Args:[]string(nil)}, "runc":types.Runtime{Path:"docker-runc", Args:[]string(nil)}}, DefaultRuntime:"docker-runc", Swarm:swarm.Info{NodeID:"", NodeAddr:"", LocalNodeState:"inactive", ControlAvailable:false, Error:"", RemoteManagers:[]swarm.Peer(nil), Nodes:0, Managers:0, Cluster:(*swarm.ClusterInfo)(0xc42093e500)}, LiveRestoreEnabled:false, Isolation:"", InitBinary:"/usr/libexec/docker/docker-init-current", ContainerdCommit:types.Commit{ID:"", Expected:"aa8187dbd3b7ad67d8e5e3a15115d3eef43a7ed1"}, RuncCommit:types.Commit{ID:"e9c345b3f906d5dc5e8100b05ce37073a811c74a", Expected:"9df8b306d01f59d3a8029be411de015b7304dd8f"}, InitCommit:types.Commit{ID:"5b117de7f824f3d3825737cf09581645abbe35d4", Expected:"949e6facb77383876aeff8a6944dde66b3089574"}, SecurityOptions:[]string{"name=seccomp,profile=/etc/docker/seccomp.json", "name=selinux"}} I0602 09:34:16.143778 3285 run.go:195] Creating container named "" config: image: openshift/origin-control-plane:latest entry point: /bin/bash command: -c nsenter --mount=/rootfs/proc/1/ns/mnt findmnt host config: pid mode: user mode: network mode: volume binds: /:/rootfs:ro I0602 09:34:16.618887 3285 run.go:200] Container created with id "c8cd2a6afa05877273074f6a1288a05574046cc2fc9aa430e746b4401bf1ec11" I0602 09:34:16.618915 3285 run.go:304] Starting container "c8cd2a6afa05877273074f6a1288a05574046cc2fc9aa430e746b4401bf1ec11" I0602 09:34:17.143592 3285 run.go:311] Waiting for container "c8cd2a6afa05877273074f6a1288a05574046cc2fc9aa430e746b4401bf1ec11" I0602 09:34:17.268993 3285 run.go:317] Done waiting for container "c8cd2a6afa05877273074f6a1288a05574046cc2fc9aa430e746b4401bf1ec11", rc=0 I0602 09:34:17.269020 3285 run.go:322] Reading logs from container "c8cd2a6afa05877273074f6a1288a05574046cc2fc9aa430e746b4401bf1ec11" I0602 09:34:17.270998 3285 run.go:330] Done reading logs from container "c8cd2a6afa05877273074f6a1288a05574046cc2fc9aa430e746b4401bf1ec11" I0602 09:34:17.271031 3285 run.go:337] Stdout: TARGET SOURCE FSTYPE OPTIONS / /dev/xvda2 xfs rw,relatime,seclabel,attr2,inode64,noquota |-/sys sysfs sysfs rw,nosuid,nodev,noexec,relatime,seclabel | |-/sys/kernel/security securityfs securityfs rw,nosuid,nodev,noexec,relatime | |-/sys/fs/cgroup tmpfs tmpfs ro,nosuid,nodev,noexec,seclabel,mode=755 | | |-/sys/fs/cgroup/systemd cgroup cgroup rw,nosuid,nodev,noexec,relatime,seclabel,xattr,release_agent=/usr/lib/systemd/systemd-cgroups-agent,name=systemd | | |-/sys/fs/cgroup/net_cls,net_prio cgroup cgroup rw,nosuid,nodev,noexec,relatime,seclabel,net_prio,net_cls | | |-/sys/fs/cgroup/devices cgroup cgroup rw,nosuid,nodev,noexec,relatime,seclabel,devices | | |-/sys/fs/cgroup/hugetlb cgroup cgroup rw,nosuid,nodev,noexec,relatime,seclabel,hugetlb | | |-/sys/fs/cgroup/memory cgroup cgroup rw,nosuid,nodev,noexec,relatime,seclabel,memory | | |-/sys/fs/cgroup/freezer cgroup cgroup rw,nosuid,nodev,noexec,relatime,seclabel,freezer | | |-/sys/fs/cgroup/blkio cgroup cgroup rw,nosuid,nodev,noexec,relatime,seclabel,blkio | | |-/sys/fs/cgroup/cpu,cpuacct cgroup cgroup rw,nosuid,nodev,noexec,relatime,seclabel,cpuacct,cpu | | |-/sys/fs/cgroup/cpuset cgroup cgroup rw,nosuid,nodev,noexec,relatime,seclabel,cpuset | | |-/sys/fs/cgroup/pids cgroup cgroup rw,nosuid,nodev,noexec,relatime,seclabel,pids | | `-/sys/fs/cgroup/perf_event cgroup cgroup rw,nosuid,nodev,noexec,relatime,seclabel,perf_event | |-/sys/fs/pstore pstore pstore rw,nosuid,nodev,noexec,relatime | |-/sys/fs/selinux selinuxfs selinuxfs rw,relatime | |-/sys/kernel/debug debugfs debugfs rw,relatime | `-/sys/kernel/config configfs configfs rw,relatime |-/proc proc proc rw,nosuid,nodev,noexec,relatime | `-/proc/sys/fs/binfmt_misc systemd-1 autofs rw,relatime,fd=26,pgrp=0,timeout=0,minproto=5,maxproto=5,direct,pipe_ino=2570 |-/dev devtmpfs devtmpfs rw,nosuid,seclabel,size=8103708k,nr_inodes=2025927,mode=755 | |-/dev/shm tmpfs tmpfs rw,nosuid,nodev,seclabel | |-/dev/pts devpts devpts rw,nosuid,noexec,relatime,seclabel,gid=5,mode=620,ptmxmode=000 | |-/dev/mqueue mqueue mqueue rw,relatime,seclabel | `-/dev/hugepages hugetlbfs hugetlbfs rw,relatime,seclabel |-/run tmpfs tmpfs rw,nosuid,nodev,seclabel,mode=755 | |-/run/user/1001 tmpfs tmpfs rw,nosuid,nodev,relatime,seclabel,size=1626592k,mode=700,uid=1001,gid=1002 | `-/run/docker/netns/c01529c1cd26 proc proc rw,nosuid,nodev,noexec,relatime |-/mnt/openshift-xfs-vol-dir /dev/mapper/docker-openshift--xfs--vol--dir xfs rw,relatime,seclabel,attr2,inode64,grpquota |-/tmp tmpfs tmpfs rw,relatime,seclabel,size=4194304k |-/var/lib/docker/containers /dev/xvda2[/var/lib/docker/containers] xfs rw,relatime,seclabel,attr2,inode64,noquota | `-/var/lib/docker/containers/c8cd2a6afa05877273074f6a1288a05574046cc2fc9aa430e746b4401bf1ec11/shm shm tmpfs rw,nosuid,nodev,noexec,relatime,seclabel,size=65536k `-/var/lib/docker/overlay2 /dev/xvda2[/var/lib/docker/overlay2] xfs rw,relatime,seclabel,attr2,inode64,noquota `-/var/lib/docker/overlay2/6377e24052143258b2504815bf0719bb66fcc55d63b358d0cdd2bf3b0c790867/merged overlay overlay rw,relatime,seclabel,lowerdir=/var/lib/docker/overlay2/l/JWXS5HGNLOSOP54DNOQRBAAWWY:/var/lib/docker/overlay2/l/UG33HPWJD5AEHZIQPLKJRUZ3OG:/var/lib/docker/overlay2/l/WEAKS6AOAOLMJVBADZZRZXHUQB:/var/lib/docker/overlay2/l/4FSFURXV5A2DOBK7XQ5QK7ZQZR:/var/lib/docker/overlay2/l/SZNBBIVDHJTBVSUX2XU7ZE2XNM:/var/lib/docker/overlay2/l/AHOCS47HO5ZANYNRPOYUJWPIIF,upperdir=/var/lib/docker/overlay2/6377e24052143258b2504815bf0719bb66fcc55d63b358d0cdd2bf3b0c790867/diff,workdir=/var/lib/docker/overlay2/6377e24052143258b2504815bf0719bb66fcc55d63b358d0cdd2bf3b0c790867/work I0602 09:34:17.271057 3285 run.go:338] Stderr: I0602 09:34:17.271069 3285 run.go:342] Container run successful I0602 09:34:17.271085 3285 run.go:293] Deleting container "c8cd2a6afa05877273074f6a1288a05574046cc2fc9aa430e746b4401bf1ec11" Determining server IP ... I0602 09:34:17.279414 3285 helper.go:223] Cannot use Docker endpoint (unix:///var/run/docker.sock) because it is not using one of the following protocols: tcp, http, https I0602 09:34:17.279431 3285 up.go:973] Cannot use the Docker host IP(): <nil> I0602 09:34:17.279469 3285 run.go:195] Creating container named "" config: image: openshift/origin-control-plane:latest entry point: socat command: TCP-LISTEN:8443,crlf,reuseaddr,fork SYSTEM:"echo 'hello world'" host config: pid mode: user mode: network mode: host I0602 09:34:17.322144 3285 run.go:200] Container created with id "618e60dcb1623a1467cd869ec7b2db03766c71c7a04c0d7ed07eb580044be824" I0602 09:34:17.702656 3285 helper.go:81] Attempting to dial 127.0.0.1:8443 I0602 09:34:17.703850 3285 net.go:89] Got error &net.OpError{Op:"dial", Net:"tcp", Source:net.Addr(nil), Addr:(*net.TCPAddr)(0xc421065b60), Err:(*os.SyscallError)(0xc420464760)}, trying again: "127.0.0.1:8443" I0602 09:34:18.704380 3285 helper.go:86] Successfully dialed 127.0.0.1:8443 I0602 09:34:18.771687 3285 helper.go:202] Removing container "618e60dcb1623a1467cd869ec7b2db03766c71c7a04c0d7ed07eb580044be824" I0602 09:34:18.966963 3285 helper.go:207] Removed container "618e60dcb1623a1467cd869ec7b2db03766c71c7a04c0d7ed07eb580044be824" I0602 09:34:18.967012 3285 run.go:195] Creating container named "" config: image: openshift/origin-control-plane:latest entry point: hostname command: -I host config: pid mode: user mode: network mode: host I0602 09:34:18.993341 3285 run.go:200] Container created with id "de4cfb72034d9b4c36b6dcd875f01a03621a6d48b5f5e809c69ee859b23b8e2e" I0602 09:34:18.993362 3285 run.go:304] Starting container "de4cfb72034d9b4c36b6dcd875f01a03621a6d48b5f5e809c69ee859b23b8e2e" I0602 09:34:19.248639 3285 run.go:311] Waiting for container "de4cfb72034d9b4c36b6dcd875f01a03621a6d48b5f5e809c69ee859b23b8e2e" I0602 09:34:19.313143 3285 run.go:317] Done waiting for container "de4cfb72034d9b4c36b6dcd875f01a03621a6d48b5f5e809c69ee859b23b8e2e", rc=0 I0602 09:34:19.313167 3285 run.go:322] Reading logs from container "de4cfb72034d9b4c36b6dcd875f01a03621a6d48b5f5e809c69ee859b23b8e2e" I0602 09:34:19.314530 3285 run.go:330] Done reading logs from container "de4cfb72034d9b4c36b6dcd875f01a03621a6d48b5f5e809c69ee859b23b8e2e" I0602 09:34:19.314552 3285 run.go:337] Stdout: 172.18.2.81 172.17.0.1 I0602 09:34:19.314561 3285 run.go:338] Stderr: I0602 09:34:19.314571 3285 run.go:342] Container run successful I0602 09:34:19.314579 3285 run.go:293] Deleting container "de4cfb72034d9b4c36b6dcd875f01a03621a6d48b5f5e809c69ee859b23b8e2e" I0602 09:34:19.321996 3285 up.go:379] Using "127.0.0.1" as primary server IP and "172.17.0.1,172.18.2.81" as additional IPs Checking if OpenShift is already running ... I0602 09:34:19.322031 3285 helper.go:183] Inspecting docker container "origin" Checking for supported Docker version (=>1.22) ... I0602 09:34:19.323065 3285 helper.go:187] Container "origin" was not found I0602 09:34:19.323087 3285 helper.go:99] Retrieving Docker version Checking if insecured registry is configured properly in Docker ... I0602 09:34:19.324063 3285 helper.go:105] Docker version results: &types.Version{Version:"1.13.1", APIVersion:"1.26", MinAPIVersion:"1.12", GitCommit:"87f2fab/1.13.1", GoVersion:"go1.9.2", Os:"linux", Arch:"amd64", KernelVersion:"3.10.0-862.2.3.el7.x86_64", Experimental:false, BuildTime:"2018-03-19T18:55:01.568114340+00:00"} I0602 09:34:19.324103 3285 registryconfig.go:30] Contains 2 --insecure-registry entries Checking if required ports are available ... I0602 09:34:19.324114 3285 registryconfig.go:38] Looking if any dockerhelper.IPV4RangeList{dockerhelper.IPV4Range{from:0xac1e0000, to:0xac1effff}, dockerhelper.IPV4Range{from:0x7f000000, to:0x7fffffff}} contains CIDR "172.30.0.0/16" I0602 09:34:19.324165 3285 run.go:195] Creating container named "" config: image: openshift/origin-control-plane:latest entry point: /bin/bash command: -c cat /proc/net/tcp && ( [ -e /proc/net/tcp6 ] && cat /proc/net/tcp6 || true) host config: pid mode: host user mode: network mode: host I0602 09:34:19.433314 3285 run.go:200] Container created with id "13406d25ce07fa451e09d608901e5238c612a4d5b288e30df3f3da6dca58d820" I0602 09:34:19.433341 3285 run.go:304] Starting container "13406d25ce07fa451e09d608901e5238c612a4d5b288e30df3f3da6dca58d820" I0602 09:34:19.522161 3285 run.go:311] Waiting for container "13406d25ce07fa451e09d608901e5238c612a4d5b288e30df3f3da6dca58d820" I0602 09:34:19.696223 3285 run.go:317] Done waiting for container "13406d25ce07fa451e09d608901e5238c612a4d5b288e30df3f3da6dca58d820", rc=0 I0602 09:34:19.696248 3285 run.go:322] Reading logs from container "13406d25ce07fa451e09d608901e5238c612a4d5b288e30df3f3da6dca58d820" I0602 09:34:19.697696 3285 run.go:330] Done reading logs from container "13406d25ce07fa451e09d608901e5238c612a4d5b288e30df3f3da6dca58d820" I0602 09:34:19.697722 3285 run.go:337] Stdout: sl local_address rem_address st tx_queue rx_queue tr tm->when retrnsmt uid timeout inode 0: 00000000:0016 00000000:0000 0A 00000000:00000000 00:00000000 00000000 0 0 22513 1 ffff8bf5abb70000 100 0 0 10 0 1: 0100007F:0019 00000000:0000 0A 00000000:00000000 00:00000000 00000000 0 0 24633 1 ffff8bf1f33007c0 100 0 0 10 0 2: 0100007F:EBD6 0100007F:20FB 06 00000000:00000000 03:00001715 00000000 0 0 0 3 ffff8bf581623000 3: 510212AC:0016 400812AC:BE40 01 00000000:00000000 02:000B011A 00000000 0 0 44480 3 ffff8bf5acf81740 20 4 1 10 -1 sl local_address remote_address st tx_queue rx_queue tr tm->when retrnsmt uid timeout inode 0: 00000000000000000000000000000000:0016 00000000000000000000000000000000:0000 0A 00000000:00000000 00:00000000 00000000 0 0 22515 1 ffff8bf5a9698000 100 0 0 10 0 1: 00000000000000000000000001000000:0019 00000000000000000000000000000000:0000 0A 00000000:00000000 00:00000000 00000000 0 0 24634 1 ffff8bf1f3380000 100 0 0 10 0 I0602 09:34:19.697742 3285 run.go:338] Stderr: I0602 09:34:19.697753 3285 run.go:342] Container run successful I0602 09:34:19.697762 3285 run.go:293] Deleting container "13406d25ce07fa451e09d608901e5238c612a4d5b288e30df3f3da6dca58d820" Checking if OpenShift client is configured properly ... Checking if image openshift/origin-control-plane:latest is available ... I0602 09:34:19.815111 3285 helper.go:267] Determining port in use from: 0: 00000000:0016 00000000:0000 0A 00000000:00000000 00:00000000 00000000 0 0 22513 1 ffff8bf5abb70000 100 0 0 10 0 I0602 09:34:19.815140 3285 helper.go:267] Determining port in use from: 1: 0100007F:0019 00000000:0000 0A 00000000:00000000 00:00000000 00000000 0 0 24633 1 ffff8bf1f33007c0 100 0 0 10 0 I0602 09:34:19.815148 3285 helper.go:267] Determining port in use from: 2: 0100007F:EBD6 0100007F:20FB 06 00000000:00000000 03:00001715 00000000 0 0 0 3 ffff8bf581623000 I0602 09:34:19.815155 3285 helper.go:267] Determining port in use from: 3: 510212AC:0016 400812AC:BE40 01 00000000:00000000 02:000B011A 00000000 0 0 44480 3 ffff8bf5acf81740 20 4 1 10 -1 I0602 09:34:19.815163 3285 helper.go:267] Determining port in use from: 0: 00000000000000000000000000000000:0016 00000000000000000000000000000000:0000 0A 00000000:00000000 00:00000000 00000000 0 0 22515 1 ffff8bf5a9698000 100 0 0 10 0 I0602 09:34:19.815170 3285 helper.go:267] Determining port in use from: 1: 00000000000000000000000001000000:0019 00000000000000000000000000000000:0000 0A 00000000:00000000 00:00000000 00000000 0 0 24634 1 ffff8bf1f3380000 100 0 0 10 0 I0602 09:34:19.815176 3285 helper.go:281] Used ports in container: map[int]struct {}{22:struct {}{}, 25:struct {}{}} I0602 09:34:19.815220 3285 helper.go:137] Inspecting Docker image "openshift/origin-control-plane:latest" I0602 09:34:19.817139 3285 helper.go:140] Image "openshift/origin-control-plane:latest" found: &types.ImageInspect{ID:"sha256:869eff902877d2e1f81a34c977482dfb0cc1449dcd52d09396e2cad6c393bdae", RepoTags:[]string{"openshift/origin-control-plane:a861408", "openshift/origin-control-plane:latest"}, RepoDigests:[]string{}, Parent:"sha256:e444a450c1946106e861789c9146977f2e9d8069d97a4a99a0bd9d4f3da5bce0", Comment:"", Created:"2018-05-31T04:15:45.692442681Z", Container:"319f70920bfb163363579fe40998483823bfba6dde5d0d2547b8678b7bdc4b11", ContainerConfig:(*container.Config)(0xc42093e8c0), DockerVersion:"1.13.1", Author:"", Config:(*container.Config)(0xc42093ec80), Architecture:"amd64", Os:"linux", OsVersion:"", Size:672313070, VirtualSize:672313070, GraphDriver:types.GraphDriverData{Data:map[string]string{"LowerDir":"/var/lib/docker/overlay2/fca9104430a3e1d315d856e88681760cec34ff15050d8a55e5b3e2b644e28a1c/diff:/var/lib/docker/overlay2/9919b1eb69a39fe5a4ed5f8516305e9b257a0bc7a583e2b1aec8141a850fa5d2/diff:/var/lib/docker/overlay2/3dddfd819cd05bbd52003bd251336b24a46e81d5761981d478b6c9096fb4a0c3/diff:/var/lib/docker/overlay2/85db87c2e17ecad3f0afc16c8c7f9dcc8474abfb5f5a516414f55b40641d8221/diff", "MergedDir":"/var/lib/docker/overlay2/06b8878f76c267389d3ae48528393ad065ff31810f085bf2eb0d30385d943df7/merged", "UpperDir":"/var/lib/docker/overlay2/06b8878f76c267389d3ae48528393ad065ff31810f085bf2eb0d30385d943df7/diff", "WorkDir":"/var/lib/docker/overlay2/06b8878f76c267389d3ae48528393ad065ff31810f085bf2eb0d30385d943df7/work"}, Name:"overlay2"}, RootFS:types.RootFS{Type:"layers", Layers:[]string{"sha256:43e653f84b79ba52711b0f726ff5a7fd1162ae9df4be76ca1de8370b8bbf9bb0", "sha256:b21b3e3988f36613b6e2984ea2fac595b1603db13d6511b71db97cb2a958bec7", "sha256:2c4c6c13c2cc1c4f4286f64bf3e8ead3773de078c49fb022a5c433e2d92ae847", "sha256:fa7c079d2181bf097a4ac062d8d1649bc31439d6d3ba4faf9b444f52451fe9d2", "sha256:b0f4dfd7ee0c205f685291983cd216fd1996648ff499008e25bdbf03b804192f"}, BaseLayer:""}, Metadata:types.ImageMetadata{LastTagTime:time.Time{wall:0x0, ext:0, loc:(*time.Location)(nil)}}} I0602 09:34:19.817228 3285 helper.go:137] Inspecting Docker image "openshift/origin-cli:latest" I0602 09:34:19.818945 3285 helper.go:140] Image "openshift/origin-cli:latest" found: &types.ImageInspect{ID:"sha256:e444a450c1946106e861789c9146977f2e9d8069d97a4a99a0bd9d4f3da5bce0", RepoTags:[]string{"openshift/origin-cli:a861408", "openshift/origin-cli:latest"}, RepoDigests:[]string{}, Parent:"sha256:29ca45e875008789445c683583f3c965eb89b6cbbb5bf24531789f356e8a294d", Comment:"", Created:"2018-05-31T04:14:38.570355399Z", Container:"197c668b245f9c7af22bff7b4dbc5c0aa1644fc7bcda916c25d8e8b232658031", ContainerConfig:(*container.Config)(0xc4207ae500), DockerVersion:"1.13.1", Author:"", Config:(*container.Config)(0xc4207ae640), Architecture:"amd64", Os:"linux", OsVersion:"", Size:364331746, VirtualSize:364331746, GraphDriver:types.GraphDriverData{Data:map[string]string{"WorkDir":"/var/lib/docker/overlay2/fca9104430a3e1d315d856e88681760cec34ff15050d8a55e5b3e2b644e28a1c/work", "LowerDir":"/var/lib/docker/overlay2/9919b1eb69a39fe5a4ed5f8516305e9b257a0bc7a583e2b1aec8141a850fa5d2/diff:/var/lib/docker/overlay2/3dddfd819cd05bbd52003bd251336b24a46e81d5761981d478b6c9096fb4a0c3/diff:/var/lib/docker/overlay2/85db87c2e17ecad3f0afc16c8c7f9dcc8474abfb5f5a516414f55b40641d8221/diff", "MergedDir":"/var/lib/docker/overlay2/fca9104430a3e1d315d856e88681760cec34ff15050d8a55e5b3e2b644e28a1c/merged", "UpperDir":"/var/lib/docker/overlay2/fca9104430a3e1d315d856e88681760cec34ff15050d8a55e5b3e2b644e28a1c/diff"}, Name:"overlay2"}, RootFS:types.RootFS{Type:"layers", Layers:[]string{"sha256:43e653f84b79ba52711b0f726ff5a7fd1162ae9df4be76ca1de8370b8bbf9bb0", "sha256:b21b3e3988f36613b6e2984ea2fac595b1603db13d6511b71db97cb2a958bec7", "sha256:2c4c6c13c2cc1c4f4286f64bf3e8ead3773de078c49fb022a5c433e2d92ae847", "sha256:fa7c079d2181bf097a4ac062d8d1649bc31439d6d3ba4faf9b444f52451fe9d2"}, BaseLayer:""}, Metadata:types.ImageMetadata{LastTagTime:time.Time{wall:0x0, ext:0, loc:(*time.Location)(nil)}}} I0602 09:34:19.819030 3285 helper.go:137] Inspecting Docker image "openshift/origin-node:latest" Starting OpenShift using openshift/origin-control-plane:latest ... I0602 09:34:19.821027 3285 helper.go:140] Image "openshift/origin-node:latest" found: &types.ImageInspect{ID:"sha256:20cf292a36f0cc3b636dd00ec071fc685129a0697fa33d3dd848c7202cff58b3", RepoTags:[]string{"openshift/origin-node:a861408", "openshift/origin-node:latest"}, RepoDigests:[]string{}, Parent:"sha256:869eff902877d2e1f81a34c977482dfb0cc1449dcd52d09396e2cad6c393bdae", Comment:"", Created:"2018-05-31T04:21:02.014599772Z", Container:"dac3edf40b05fc69768b91d8657381778cf30e2548058a7b0b67033789716435", ContainerConfig:(*container.Config)(0xc42093edc0), DockerVersion:"1.13.1", Author:"", Config:(*container.Config)(0xc42093ef00), Architecture:"amd64", Os:"linux", OsVersion:"", Size:1310847647, VirtualSize:1310847647, GraphDriver:types.GraphDriverData{Data:map[string]string{"LowerDir":"/var/lib/docker/overlay2/06b8878f76c267389d3ae48528393ad065ff31810f085bf2eb0d30385d943df7/diff:/var/lib/docker/overlay2/fca9104430a3e1d315d856e88681760cec34ff15050d8a55e5b3e2b644e28a1c/diff:/var/lib/docker/overlay2/9919b1eb69a39fe5a4ed5f8516305e9b257a0bc7a583e2b1aec8141a850fa5d2/diff:/var/lib/docker/overlay2/3dddfd819cd05bbd52003bd251336b24a46e81d5761981d478b6c9096fb4a0c3/diff:/var/lib/docker/overlay2/85db87c2e17ecad3f0afc16c8c7f9dcc8474abfb5f5a516414f55b40641d8221/diff", "MergedDir":"/var/lib/docker/overlay2/86fdbead0312bf846fc30f1ffb7e6ed0d5fd07d7f1f26a61c99d9f595cc0a2c0/merged", "UpperDir":"/var/lib/docker/overlay2/86fdbead0312bf846fc30f1ffb7e6ed0d5fd07d7f1f26a61c99d9f595cc0a2c0/diff", "WorkDir":"/var/lib/docker/overlay2/86fdbead0312bf846fc30f1ffb7e6ed0d5fd07d7f1f26a61c99d9f595cc0a2c0/work"}, Name:"overlay2"}, RootFS:types.RootFS{Type:"layers", Layers:[]string{"sha256:43e653f84b79ba52711b0f726ff5a7fd1162ae9df4be76ca1de8370b8bbf9bb0", "sha256:b21b3e3988f36613b6e2984ea2fac595b1603db13d6511b71db97cb2a958bec7", "sha256:2c4c6c13c2cc1c4f4286f64bf3e8ead3773de078c49fb022a5c433e2d92ae847", "sha256:fa7c079d2181bf097a4ac062d8d1649bc31439d6d3ba4faf9b444f52451fe9d2", "sha256:b0f4dfd7ee0c205f685291983cd216fd1996648ff499008e25bdbf03b804192f", "sha256:a87257ecfee445ce2304583b1830623e818e9fc6775f8d2fc6d865229e8e2794"}, BaseLayer:""}, Metadata:types.ImageMetadata{LastTagTime:time.Time{wall:0x0, ext:0, loc:(*time.Location)(nil)}}} I0602 09:34:19.821486 3285 config.go:42] Running "create-master-config" I0602 09:34:19.821517 3285 helper.go:46] Retrieving Docker daemon info I0602 09:34:19.836051 3285 helper.go:52] Docker daemon info: &types.Info{ID:"6BJV:73LF:IA7K:ALFR:AZ7W:LB7R:QWV6:M3QJ:L6HJ:QBVN:R7RW:ZVMN", Containers:2, ContainersRunning:0, ContainersPaused:0, ContainersStopped:2, Images:38, Driver:"overlay2", DriverStatus:[][2]string{[2]string{"Backing Filesystem", "xfs"}, [2]string{"Supports d_type", "true"}, [2]string{"Native Overlay Diff", "true"}}, SystemStatus:[][2]string(nil), Plugins:types.PluginsInfo{Volume:[]string{"local"}, Network:[]string{"bridge", "host", "macvlan", "null", "overlay"}, Authorization:[]string{"rhel-push-plugin"}, Log:[]string(nil)}, MemoryLimit:true, SwapLimit:true, KernelMemory:true, CPUCfsPeriod:true, CPUCfsQuota:true, CPUShares:true, CPUSet:true, IPv4Forwarding:true, BridgeNfIptables:true, BridgeNfIP6tables:true, Debug:true, NFd:20, OomKillDisable:true, NGoroutines:24, SystemTime:"2018-06-02T09:34:19.829382513Z", LoggingDriver:"json-file", CgroupDriver:"systemd", NEventsListener:0, KernelVersion:"3.10.0-862.2.3.el7.x86_64", OperatingSystem:"Red Hat Enterprise Linux Server 7.5 (Maipo)", OSType:"linux", Architecture:"x86_64", IndexServerAddress:"https://registry.access.redhat.com/v1/", RegistryConfig:(*registry.ServiceConfig)(0xc4206eae00), NCPU:4, MemTotal:16656269312, GenericResources:[]swarm.GenericResource(nil), DockerRootDir:"/var/lib/docker", HTTPProxy:"", HTTPSProxy:"", NoProxy:"", Name:"ip-172-18-2-81.ec2.internal", Labels:[]string(nil), ExperimentalBuild:false, ServerVersion:"1.13.1", ClusterStore:"", ClusterAdvertise:"", Runtimes:map[string]types.Runtime{"docker-runc":types.Runtime{Path:"/usr/libexec/docker/docker-runc-current", Args:[]string(nil)}, "runc":types.Runtime{Path:"docker-runc", Args:[]string(nil)}}, DefaultRuntime:"docker-runc", Swarm:swarm.Info{NodeID:"", NodeAddr:"", LocalNodeState:"inactive", ControlAvailable:false, Error:"", RemoteManagers:[]swarm.Peer(nil), Nodes:0, Managers:0, Cluster:(*swarm.ClusterInfo)(0xc4207ae780)}, LiveRestoreEnabled:false, Isolation:"", InitBinary:"/usr/libexec/docker/docker-init-current", ContainerdCommit:types.Commit{ID:"", Expected:"aa8187dbd3b7ad67d8e5e3a15115d3eef43a7ed1"}, RuncCommit:types.Commit{ID:"e9c345b3f906d5dc5e8100b05ce37073a811c74a", Expected:"9df8b306d01f59d3a8029be411de015b7304dd8f"}, InitCommit:types.Commit{ID:"5b117de7f824f3d3825737cf09581645abbe35d4", Expected:"949e6facb77383876aeff8a6944dde66b3089574"}, SecurityOptions:[]string{"name=seccomp,profile=/etc/docker/seccomp.json", "name=selinux"}} I0602 09:34:19.836164 3285 run.go:195] Creating container named "" config: image: openshift/origin-control-plane:latest command: start master --write-config=/var/lib/origin/openshift.local.config --master=127.0.0.1 --images=openshift/origin-${component}:latest --dns=0.0.0.0:8053 --public-master=https://localhost:8443 --etcd-dir=/var/lib/etcd host config: pid mode: host user mode: network mode: host I0602 09:34:19.865330 3285 run.go:200] Container created with id "a7c04bc0974d9508b84587f07bb09224d84c0b5558b7d63253801c57061c5616" I0602 09:34:19.865355 3285 run.go:304] Starting container "a7c04bc0974d9508b84587f07bb09224d84c0b5558b7d63253801c57061c5616" I0602 09:34:20.318146 3285 run.go:311] Waiting for container "a7c04bc0974d9508b84587f07bb09224d84c0b5558b7d63253801c57061c5616" I0602 09:34:29.782588 3285 run.go:317] Done waiting for container "a7c04bc0974d9508b84587f07bb09224d84c0b5558b7d63253801c57061c5616", rc=0 I0602 09:34:29.782615 3285 run.go:322] Reading logs from container "a7c04bc0974d9508b84587f07bb09224d84c0b5558b7d63253801c57061c5616" I0602 09:34:29.784043 3285 run.go:330] Done reading logs from container "a7c04bc0974d9508b84587f07bb09224d84c0b5558b7d63253801c57061c5616" I0602 09:34:29.784634 3285 run.go:337] Stdout: Wrote master config to: /var/lib/origin/openshift.local.config/master-config.yaml I0602 09:34:29.784652 3285 run.go:338] Stderr: I0602 09:34:29.784662 3285 run.go:342] Container run successful I0602 09:34:29.784747 3285 config.go:72] Copying OpenShift config to local directory /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver I0602 09:34:29.792949 3285 tar.go:376] Creating directory /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver I0602 09:34:29.793000 3285 tar.go:444] Creating /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/admin.crt I0602 09:34:29.793131 3285 tar.go:454] Extracting/writing /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/admin.crt I0602 09:34:29.793230 3285 tar.go:376] Creating directory /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver I0602 09:34:29.793276 3285 tar.go:444] Creating /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/admin.key I0602 09:34:29.793349 3285 tar.go:454] Extracting/writing /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/admin.key I0602 09:34:29.793447 3285 tar.go:376] Creating directory /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver I0602 09:34:29.793483 3285 tar.go:444] Creating /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/admin.kubeconfig I0602 09:34:29.793553 3285 tar.go:454] Extracting/writing /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/admin.kubeconfig I0602 09:34:29.793650 3285 tar.go:376] Creating directory /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver I0602 09:34:29.793679 3285 tar.go:444] Creating /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/ca-bundle.crt I0602 09:34:29.793752 3285 tar.go:454] Extracting/writing /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/ca-bundle.crt I0602 09:34:29.793828 3285 tar.go:376] Creating directory /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver I0602 09:34:29.793854 3285 tar.go:444] Creating /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/ca.crt I0602 09:34:29.793924 3285 tar.go:454] Extracting/writing /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/ca.crt I0602 09:34:29.794001 3285 tar.go:376] Creating directory /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver I0602 09:34:29.794031 3285 tar.go:444] Creating /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/ca.key I0602 09:34:29.794103 3285 tar.go:454] Extracting/writing /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/ca.key I0602 09:34:29.794174 3285 tar.go:376] Creating directory /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver I0602 09:34:29.794188 3285 tar.go:444] Creating /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/ca.serial.txt I0602 09:34:29.794240 3285 tar.go:454] Extracting/writing /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/ca.serial.txt I0602 09:34:29.794319 3285 tar.go:376] Creating directory /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver I0602 09:34:29.794330 3285 tar.go:444] Creating /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/etcd.server.crt I0602 09:34:29.794382 3285 tar.go:454] Extracting/writing /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/etcd.server.crt I0602 09:34:29.794424 3285 tar.go:376] Creating directory /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver I0602 09:34:29.794436 3285 tar.go:444] Creating /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/etcd.server.key I0602 09:34:29.794481 3285 tar.go:454] Extracting/writing /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/etcd.server.key I0602 09:34:29.794523 3285 tar.go:376] Creating directory /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver I0602 09:34:29.794532 3285 tar.go:444] Creating /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/frontproxy-ca.crt I0602 09:34:29.794578 3285 tar.go:454] Extracting/writing /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/frontproxy-ca.crt I0602 09:34:29.794621 3285 tar.go:376] Creating directory /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver I0602 09:34:29.794632 3285 tar.go:444] Creating /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/frontproxy-ca.key I0602 09:34:29.794678 3285 tar.go:454] Extracting/writing /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/frontproxy-ca.key I0602 09:34:29.794725 3285 tar.go:376] Creating directory /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver I0602 09:34:29.794735 3285 tar.go:444] Creating /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/frontproxy-ca.serial.txt I0602 09:34:29.794777 3285 tar.go:454] Extracting/writing /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/frontproxy-ca.serial.txt I0602 09:34:29.794816 3285 tar.go:376] Creating directory /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver I0602 09:34:29.794826 3285 tar.go:444] Creating /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/master-config.yaml I0602 09:34:29.794898 3285 tar.go:454] Extracting/writing /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/master-config.yaml I0602 09:34:29.794965 3285 tar.go:376] Creating directory /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver I0602 09:34:29.794975 3285 tar.go:444] Creating /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/master.etcd-client.crt I0602 09:34:29.795026 3285 tar.go:454] Extracting/writing /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/master.etcd-client.crt I0602 09:34:29.795077 3285 tar.go:376] Creating directory /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver I0602 09:34:29.795088 3285 tar.go:444] Creating /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/master.etcd-client.key I0602 09:34:29.795138 3285 tar.go:454] Extracting/writing /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/master.etcd-client.key I0602 09:34:29.795641 3285 tar.go:376] Creating directory /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver I0602 09:34:29.795667 3285 tar.go:444] Creating /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/master.kubelet-client.crt I0602 09:34:29.795753 3285 tar.go:454] Extracting/writing /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/master.kubelet-client.crt I0602 09:34:29.796256 3285 tar.go:376] Creating directory /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver I0602 09:34:29.796308 3285 tar.go:444] Creating /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/master.kubelet-client.key I0602 09:34:29.796394 3285 tar.go:454] Extracting/writing /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/master.kubelet-client.key I0602 09:34:29.796753 3285 tar.go:376] Creating directory /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver I0602 09:34:29.796782 3285 tar.go:444] Creating /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/master.proxy-client.crt I0602 09:34:29.796865 3285 tar.go:454] Extracting/writing /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/master.proxy-client.crt I0602 09:34:29.797180 3285 tar.go:376] Creating directory /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver I0602 09:34:29.797210 3285 tar.go:444] Creating /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/master.proxy-client.key I0602 09:34:29.797302 3285 tar.go:454] Extracting/writing /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/master.proxy-client.key I0602 09:34:29.797751 3285 tar.go:376] Creating directory /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver I0602 09:34:29.797781 3285 tar.go:444] Creating /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/master.server.crt I0602 09:34:29.797861 3285 tar.go:454] Extracting/writing /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/master.server.crt I0602 09:34:29.798134 3285 tar.go:376] Creating directory /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver I0602 09:34:29.798164 3285 tar.go:444] Creating /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/master.server.key I0602 09:34:29.798238 3285 tar.go:454] Extracting/writing /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/master.server.key I0602 09:34:29.798511 3285 tar.go:376] Creating directory /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver I0602 09:34:29.798540 3285 tar.go:444] Creating /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/openshift-aggregator.crt I0602 09:34:29.798637 3285 tar.go:454] Extracting/writing /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/openshift-aggregator.crt I0602 09:34:29.798939 3285 tar.go:376] Creating directory /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver I0602 09:34:29.798968 3285 tar.go:444] Creating /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/openshift-aggregator.key I0602 09:34:29.799043 3285 tar.go:454] Extracting/writing /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/openshift-aggregator.key I0602 09:34:29.799303 3285 tar.go:376] Creating directory /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver I0602 09:34:29.799332 3285 tar.go:444] Creating /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/openshift-master.crt I0602 09:34:29.799407 3285 tar.go:454] Extracting/writing /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/openshift-master.crt I0602 09:34:29.799676 3285 tar.go:376] Creating directory /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver I0602 09:34:29.799706 3285 tar.go:444] Creating /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/openshift-master.key I0602 09:34:29.799778 3285 tar.go:454] Extracting/writing /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/openshift-master.key I0602 09:34:29.799990 3285 tar.go:376] Creating directory /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver I0602 09:34:29.800017 3285 tar.go:444] Creating /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/openshift-master.kubeconfig I0602 09:34:29.800089 3285 tar.go:454] Extracting/writing /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/openshift-master.kubeconfig I0602 09:34:29.800222 3285 tar.go:376] Creating directory /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver I0602 09:34:29.800251 3285 tar.go:444] Creating /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/service-signer.crt I0602 09:34:29.800350 3285 tar.go:454] Extracting/writing /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/service-signer.crt I0602 09:34:29.800617 3285 tar.go:376] Creating directory /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver I0602 09:34:29.800651 3285 tar.go:444] Creating /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/service-signer.key I0602 09:34:29.800725 3285 tar.go:454] Extracting/writing /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/service-signer.key I0602 09:34:29.800929 3285 tar.go:376] Creating directory /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver I0602 09:34:29.800964 3285 tar.go:444] Creating /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/serviceaccounts.private.key I0602 09:34:29.801041 3285 tar.go:454] Extracting/writing /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/serviceaccounts.private.key I0602 09:34:29.801221 3285 tar.go:376] Creating directory /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver I0602 09:34:29.801250 3285 tar.go:444] Creating /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/serviceaccounts.public.key I0602 09:34:29.801351 3285 tar.go:454] Extracting/writing /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/serviceaccounts.public.key I0602 09:34:29.801559 3285 tar.go:400] Done extracting tar stream I0602 09:34:29.921879 3285 openshift_apiserver.go:18] Copying kube-apiserver config to local directory /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/openshift-apiserver I0602 09:34:29.927797 3285 openshift_controller.go:18] Copying kube-apiserver config to local directory openshift-controller-manager I0602 09:34:29.932808 3285 config.go:46] Running "create-node-config" I0602 09:34:29.932829 3285 helper.go:46] Retrieving Docker daemon info I0602 09:34:29.947181 3285 helper.go:52] Docker daemon info: &types.Info{ID:"6BJV:73LF:IA7K:ALFR:AZ7W:LB7R:QWV6:M3QJ:L6HJ:QBVN:R7RW:ZVMN", Containers:2, ContainersRunning:0, ContainersPaused:0, ContainersStopped:2, Images:38, Driver:"overlay2", DriverStatus:[][2]string{[2]string{"Backing Filesystem", "xfs"}, [2]string{"Supports d_type", "true"}, [2]string{"Native Overlay Diff", "true"}}, SystemStatus:[][2]string(nil), Plugins:types.PluginsInfo{Volume:[]string{"local"}, Network:[]string{"bridge", "host", "macvlan", "null", "overlay"}, Authorization:[]string{"rhel-push-plugin"}, Log:[]string(nil)}, MemoryLimit:true, SwapLimit:true, KernelMemory:true, CPUCfsPeriod:true, CPUCfsQuota:true, CPUShares:true, CPUSet:true, IPv4Forwarding:true, BridgeNfIptables:true, BridgeNfIP6tables:true, Debug:true, NFd:24, OomKillDisable:true, NGoroutines:24, SystemTime:"2018-06-02T09:34:29.94080949Z", LoggingDriver:"json-file", CgroupDriver:"systemd", NEventsListener:0, KernelVersion:"3.10.0-862.2.3.el7.x86_64", OperatingSystem:"Red Hat Enterprise Linux Server 7.5 (Maipo)", OSType:"linux", Architecture:"x86_64", IndexServerAddress:"https://registry.access.redhat.com/v1/", RegistryConfig:(*registry.ServiceConfig)(0xc42095cee0), NCPU:4, MemTotal:16656269312, GenericResources:[]swarm.GenericResource(nil), DockerRootDir:"/var/lib/docker", HTTPProxy:"", HTTPSProxy:"", NoProxy:"", Name:"ip-172-18-2-81.ec2.internal", Labels:[]string(nil), ExperimentalBuild:false, ServerVersion:"1.13.1", ClusterStore:"", ClusterAdvertise:"", Runtimes:map[string]types.Runtime{"docker-runc":types.Runtime{Path:"/usr/libexec/docker/docker-runc-current", Args:[]string(nil)}, "runc":types.Runtime{Path:"docker-runc", Args:[]string(nil)}}, DefaultRuntime:"docker-runc", Swarm:swarm.Info{NodeID:"", NodeAddr:"", LocalNodeState:"inactive", ControlAvailable:false, Error:"", RemoteManagers:[]swarm.Peer(nil), Nodes:0, Managers:0, Cluster:(*swarm.ClusterInfo)(0xc4207ae140)}, LiveRestoreEnabled:false, Isolation:"", InitBinary:"/usr/libexec/docker/docker-init-current", ContainerdCommit:types.Commit{ID:"", Expected:"aa8187dbd3b7ad67d8e5e3a15115d3eef43a7ed1"}, RuncCommit:types.Commit{ID:"e9c345b3f906d5dc5e8100b05ce37073a811c74a", Expected:"9df8b306d01f59d3a8029be411de015b7304dd8f"}, InitCommit:types.Commit{ID:"5b117de7f824f3d3825737cf09581645abbe35d4", Expected:"949e6facb77383876aeff8a6944dde66b3089574"}, SecurityOptions:[]string{"name=seccomp,profile=/etc/docker/seccomp.json", "name=selinux"}} I0602 09:34:29.947313 3285 run.go:195] Creating container named "" config: image: openshift/origin-cli:latest entry point: oc command: adm create-node-config --node-dir=/var/lib/origin/openshift.local.config --certificate-authority=/var/lib/origin/openshift.local.masterconfig/ca.crt --dns-bind-address=0.0.0.0:8053 --hostnames=localhost --hostnames=127.0.0.1 --images=openshift/origin-${component}:latest --node=localhost --node-client-certificate-authority=/var/lib/origin/openshift.local.masterconfig/ca.crt --signer-cert=/var/lib/origin/openshift.local.masterconfig/ca.crt --signer-key=/var/lib/origin/openshift.local.masterconfig/ca.key --signer-serial=/var/lib/origin/openshift.local.masterconfig/ca.serial.txt --volume-dir=/data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/openshift.local.volumes host config: pid mode: host user mode: network mode: host volume binds: /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver:/var/lib/origin/openshift.local.masterconfig:z I0602 09:34:30.063534 3285 run.go:200] Container created with id "f9c77806ad542a3c743292019de9a15457b334a32910ea6ea153d4a56fe4e647" I0602 09:34:30.063562 3285 run.go:304] Starting container "f9c77806ad542a3c743292019de9a15457b334a32910ea6ea153d4a56fe4e647" I0602 09:34:30.826504 3285 run.go:311] Waiting for container "f9c77806ad542a3c743292019de9a15457b334a32910ea6ea153d4a56fe4e647" I0602 09:34:33.799203 3285 run.go:317] Done waiting for container "f9c77806ad542a3c743292019de9a15457b334a32910ea6ea153d4a56fe4e647", rc=0 I0602 09:34:33.799230 3285 run.go:322] Reading logs from container "f9c77806ad542a3c743292019de9a15457b334a32910ea6ea153d4a56fe4e647" I0602 09:34:33.800818 3285 run.go:330] Done reading logs from container "f9c77806ad542a3c743292019de9a15457b334a32910ea6ea153d4a56fe4e647" I0602 09:34:33.801119 3285 run.go:337] Stdout: Generating node credentials ... Created node config for localhost in /var/lib/origin/openshift.local.config I0602 09:34:33.801131 3285 run.go:338] Stderr: I0602 09:34:33.801142 3285 run.go:342] Container run successful I0602 09:34:33.801154 3285 config.go:75] Copying OpenShift node config to local directory /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/node I0602 09:34:33.810928 3285 tar.go:376] Creating directory /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/node I0602 09:34:33.811072 3285 tar.go:444] Creating /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/node/ca.crt I0602 09:34:33.811155 3285 tar.go:454] Extracting/writing /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/node/ca.crt I0602 09:34:33.811211 3285 tar.go:376] Creating directory /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/node I0602 09:34:33.811222 3285 tar.go:444] Creating /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/node/master-client.crt I0602 09:34:33.811314 3285 tar.go:454] Extracting/writing /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/node/master-client.crt I0602 09:34:33.811370 3285 tar.go:376] Creating directory /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/node I0602 09:34:33.811380 3285 tar.go:444] Creating /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/node/master-client.key I0602 09:34:33.811438 3285 tar.go:454] Extracting/writing /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/node/master-client.key I0602 09:34:33.811504 3285 tar.go:376] Creating directory /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/node I0602 09:34:33.811515 3285 tar.go:444] Creating /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/node/node-client-ca.crt I0602 09:34:33.811572 3285 tar.go:454] Extracting/writing /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/node/node-client-ca.crt I0602 09:34:33.811618 3285 tar.go:376] Creating directory /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/node I0602 09:34:33.811628 3285 tar.go:444] Creating /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/node/node-config.yaml I0602 09:34:33.811697 3285 tar.go:454] Extracting/writing /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/node/node-config.yaml I0602 09:34:33.811743 3285 tar.go:376] Creating directory /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/node I0602 09:34:33.811752 3285 tar.go:444] Creating /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/node/node-registration.json I0602 09:34:33.811805 3285 tar.go:454] Extracting/writing /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/node/node-registration.json I0602 09:34:33.811871 3285 tar.go:376] Creating directory /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/node I0602 09:34:33.811891 3285 tar.go:444] Creating /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/node/node.kubeconfig I0602 09:34:33.811965 3285 tar.go:454] Extracting/writing /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/node/node.kubeconfig I0602 09:34:33.812043 3285 tar.go:376] Creating directory /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/node I0602 09:34:33.812058 3285 tar.go:444] Creating /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/node/server.crt I0602 09:34:33.812128 3285 tar.go:454] Extracting/writing /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/node/server.crt I0602 09:34:33.812203 3285 tar.go:376] Creating directory /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/node I0602 09:34:33.812220 3285 tar.go:444] Creating /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/node/server.key I0602 09:34:33.812309 3285 tar.go:454] Extracting/writing /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/node/server.key I0602 09:34:33.812393 3285 tar.go:400] Done extracting tar stream I0602 09:34:33.850676 3285 dns.go:19] Copying kubelet config to local directory /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kubedns I0602 09:34:33.853752 3285 run_self_hosted.go:332] Creating static pod definitions in "/data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/static-pod-manifests" I0602 09:34:33.853781 3285 run_self_hosted.go:339] Substitutions: map[string]string{"ETCD_VOLUME":"hostPath:\n path: /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/etcd\n", "/path/to/master/config-dir":"/data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver", "/path/to/openshift-apiserver/config-dir":"/data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/openshift-apiserver", "OPENSHIFT_PULL_POLICY":"IfNotPresent", "IMAGE":"openshift/origin-control-plane:latest"} I0602 09:34:33.853912 3285 run_self_hosted.go:339] Substitutions: map[string]string{"ETCD_VOLUME":"hostPath:\n path: /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/etcd\n", "/path/to/master/config-dir":"/data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver", "/path/to/openshift-apiserver/config-dir":"/data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/openshift-apiserver", "OPENSHIFT_PULL_POLICY":"IfNotPresent", "IMAGE":"openshift/origin-hypershift:latest"} I0602 09:34:33.854027 3285 run_self_hosted.go:339] Substitutions: map[string]string{"OPENSHIFT_PULL_POLICY":"IfNotPresent", "IMAGE":"openshift/origin-hyperkube:latest", "ETCD_VOLUME":"hostPath:\n path: /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/etcd\n", "/path/to/master/config-dir":"/data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver", "/path/to/openshift-apiserver/config-dir":"/data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/openshift-apiserver"} I0602 09:34:33.854149 3285 run_self_hosted.go:339] Substitutions: map[string]string{"/path/to/openshift-apiserver/config-dir":"/data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/openshift-apiserver", "OPENSHIFT_PULL_POLICY":"IfNotPresent", "IMAGE":"openshift/origin-hyperkube:latest", "ETCD_VOLUME":"hostPath:\n path: /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/etcd\n", "/path/to/master/config-dir":"/data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver"} I0602 09:34:33.854272 3285 run_self_hosted.go:345] configLocations = clusterup.configDirs{masterConfigDir:"/data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver", openshiftAPIServerConfigDir:"/data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/openshift-apiserver", openshiftControllerConfigDir:"/data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/openshift-controller-manager", nodeConfigDir:"/data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/node", kubeDNSConfigDir:"/data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kubedns", podManifestDir:"/data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/static-pod-manifests", baseDir:"", err:error(nil)} I0602 09:34:33.854323 3285 flags.go:30] Running "create-kubelet-flags" I0602 09:34:33.854353 3285 run.go:195] Creating container named "" config: image: openshift/origin-node:latest entry point: openshift command: start node --write-flags --config=/var/lib/origin/openshift.local.config/node/node-config.yaml host config: pid mode: user mode: network mode: volume binds: /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/node:/var/lib/origin/openshift.local.config/node:z I0602 09:34:34.092562 3285 run.go:200] Container created with id "bce87b75800ba278e21544a9b763002d6a65bf021cdf955c5d5d498644f5d874" I0602 09:34:34.092594 3285 run.go:304] Starting container "bce87b75800ba278e21544a9b763002d6a65bf021cdf955c5d5d498644f5d874" I0602 09:34:34.714754 3285 run.go:311] Waiting for container "bce87b75800ba278e21544a9b763002d6a65bf021cdf955c5d5d498644f5d874" I0602 09:34:34.994070 3285 run.go:317] Done waiting for container "bce87b75800ba278e21544a9b763002d6a65bf021cdf955c5d5d498644f5d874", rc=0 I0602 09:34:34.994094 3285 run.go:322] Reading logs from container "bce87b75800ba278e21544a9b763002d6a65bf021cdf955c5d5d498644f5d874" I0602 09:34:34.996227 3285 run.go:330] Done reading logs from container "bce87b75800ba278e21544a9b763002d6a65bf021cdf955c5d5d498644f5d874" I0602 09:34:34.996498 3285 run.go:337] Stdout: --address=0.0.0.0 --allow-privileged=true --anonymous-auth=true --authentication-token-webhook=true --authentication-token-webhook-cache-ttl=5m --authorization-mode=Webhook --authorization-webhook-cache-authorized-ttl=5m --authorization-webhook-cache-unauthorized-ttl=5m --cadvisor-port=0 --cgroup-driver=systemd --client-ca-file=/var/lib/origin/openshift.local.config/node/node-client-ca.crt --cluster-dns= --cluster-domain=cluster.local --container-runtime-endpoint=unix:///var/run/dockershim.sock --containerized=true --experimental-dockershim-root-directory=/var/lib/dockershim --fail-swap-on=false --file-check-frequency=0s --healthz-bind-address= --healthz-port=0 --host-ipc-sources=api --host-ipc-sources=file --host-network-sources=api --host-network-sources=file --host-pid-sources=api --host-pid-sources=file --hostname-override=localhost --http-check-frequency=0s --image-service-endpoint=unix:///var/run/dockershim.sock --iptables-masquerade-bit=0 --kubeconfig=/var/lib/origin/openshift.local.config/node/node.kubeconfig --max-pods=250 --network-plugin= --node-ip= --pod-infra-container-image=openshift/origin-pod:latest --pod-manifest-path= --port=10250 --read-only-port=0 --register-node=true --root-dir=/data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/openshift.local.volumes --tls-cert-file=/var/lib/origin/openshift.local.config/node/server.crt --tls-cipher-suites=TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305 --tls-cipher-suites=TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305 --tls-cipher-suites=TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 --tls-cipher-suites=TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256 --tls-cipher-suites=TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384 --tls-cipher-suites=TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384 --tls-cipher-suites=TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256 --tls-cipher-suites=TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256 --tls-cipher-suites=TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA --tls-cipher-suites=TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA --tls-cipher-suites=TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA --tls-cipher-suites=TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA --tls-cipher-suites=TLS_RSA_WITH_AES_128_GCM_SHA256 --tls-cipher-suites=TLS_RSA_WITH_AES_256_GCM_SHA384 --tls-cipher-suites=TLS_RSA_WITH_AES_128_CBC_SHA --tls-cipher-suites=TLS_RSA_WITH_AES_256_CBC_SHA --tls-min-version=VersionTLS12 --tls-private-key-file=/var/lib/origin/openshift.local.config/node/server.key I0602 09:34:34.996517 3285 run.go:338] Stderr: I0602 09:34:34.996527 3285 run.go:342] Container run successful I0602 09:34:34.996535 3285 run.go:293] Deleting container "bce87b75800ba278e21544a9b763002d6a65bf021cdf955c5d5d498644f5d874" I0602 09:34:35.005557 3285 run_self_hosted.go:141] kubeletflags := [--address=0.0.0.0 --allow-privileged=true --anonymous-auth=true --authentication-token-webhook=true --authentication-token-webhook-cache-ttl=5m --authorization-mode=Webhook --authorization-webhook-cache-authorized-ttl=5m --authorization-webhook-cache-unauthorized-ttl=5m --cadvisor-port=0 --cgroup-driver=systemd --client-ca-file=/var/lib/origin/openshift.local.config/node/node-client-ca.crt --cluster-dns= --cluster-domain=cluster.local --container-runtime-endpoint=unix:///var/run/dockershim.sock --containerized=true --experimental-dockershim-root-directory=/var/lib/dockershim --fail-swap-on=false --file-check-frequency=0s --healthz-bind-address= --healthz-port=0 --host-ipc-sources=api --host-ipc-sources=file --host-network-sources=api --host-network-sources=file --host-pid-sources=api --host-pid-sources=file --hostname-override=localhost --http-check-frequency=0s --image-service-endpoint=unix:///var/run/dockershim.sock --iptables-masquerade-bit=0 --kubeconfig=/var/lib/origin/openshift.local.config/node/node.kubeconfig --max-pods=250 --network-plugin= --node-ip= --pod-infra-container-image=openshift/origin-pod:latest --pod-manifest-path= --port=10250 --read-only-port=0 --register-node=true --root-dir=/data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/openshift.local.volumes --tls-cert-file=/var/lib/origin/openshift.local.config/node/server.crt --tls-cipher-suites=TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305 --tls-cipher-suites=TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305 --tls-cipher-suites=TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 --tls-cipher-suites=TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256 --tls-cipher-suites=TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384 --tls-cipher-suites=TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384 --tls-cipher-suites=TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256 --tls-cipher-suites=TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256 --tls-cipher-suites=TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA --tls-cipher-suites=TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA --tls-cipher-suites=TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA --tls-cipher-suites=TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA --tls-cipher-suites=TLS_RSA_WITH_AES_128_GCM_SHA256 --tls-cipher-suites=TLS_RSA_WITH_AES_256_GCM_SHA384 --tls-cipher-suites=TLS_RSA_WITH_AES_128_CBC_SHA --tls-cipher-suites=TLS_RSA_WITH_AES_256_CBC_SHA --tls-min-version=VersionTLS12 --tls-private-key-file=/var/lib/origin/openshift.local.config/node/server.key] I0602 09:34:35.005661 3285 run_self_hosted.go:505] --address=0.0.0.0 --allow-privileged=true --anonymous-auth=true --authentication-token-webhook=true --authentication-token-webhook-cache-ttl=5m --authorization-mode=Webhook --authorization-webhook-cache-authorized-ttl=5m --authorization-webhook-cache-unauthorized-ttl=5m --cadvisor-port=0 --cgroup-driver=systemd --client-ca-file=/var/lib/origin/openshift.local.config/node/node-client-ca.crt --cluster-domain=cluster.local --container-runtime-endpoint=unix:///var/run/dockershim.sock --containerized=true --experimental-dockershim-root-directory=/var/lib/dockershim --fail-swap-on=false --file-check-frequency=0s --healthz-bind-address= --healthz-port=0 --host-ipc-sources=api --host-ipc-sources=file --host-network-sources=api --host-network-sources=file --host-pid-sources=api --host-pid-sources=file --hostname-override=localhost --http-check-frequency=0s --image-service-endpoint=unix:///var/run/dockershim.sock --iptables-masquerade-bit=0 --kubeconfig=/var/lib/origin/openshift.local.config/node/node.kubeconfig --max-pods=250 --network-plugin= --node-ip= --pod-infra-container-image=openshift/origin-pod:latest --port=10250 --read-only-port=0 --register-node=true --root-dir=/data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/openshift.local.volumes --tls-cert-file=/var/lib/origin/openshift.local.config/node/server.crt --tls-cipher-suites=TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305 --tls-cipher-suites=TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305 --tls-cipher-suites=TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 --tls-cipher-suites=TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256 --tls-cipher-suites=TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384 --tls-cipher-suites=TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384 --tls-cipher-suites=TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256 --tls-cipher-suites=TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256 --tls-cipher-suites=TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA --tls-cipher-suites=TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA --tls-cipher-suites=TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA --tls-cipher-suites=TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA --tls-cipher-suites=TLS_RSA_WITH_AES_128_GCM_SHA256 --tls-cipher-suites=TLS_RSA_WITH_AES_256_GCM_SHA384 --tls-cipher-suites=TLS_RSA_WITH_AES_128_CBC_SHA --tls-cipher-suites=TLS_RSA_WITH_AES_256_CBC_SHA --tls-min-version=VersionTLS12 --tls-private-key-file=/var/lib/origin/openshift.local.config/node/server.key --pod-manifest-path=/var/lib/origin/pod-manifests --cluster-dns=172.30.0.2 --v=0 I0602 09:34:35.005675 3285 run_kubelet.go:48] Running "start-kubelet" I0602 09:34:35.005689 3285 helper.go:46] Retrieving Docker daemon info I0602 09:34:35.020196 3285 helper.go:52] Docker daemon info: &types.Info{ID:"6BJV:73LF:IA7K:ALFR:AZ7W:LB7R:QWV6:M3QJ:L6HJ:QBVN:R7RW:ZVMN", Containers:2, ContainersRunning:0, ContainersPaused:0, ContainersStopped:2, Images:38, Driver:"overlay2", DriverStatus:[][2]string{[2]string{"Backing Filesystem", "xfs"}, [2]string{"Supports d_type", "true"}, [2]string{"Native Overlay Diff", "true"}}, SystemStatus:[][2]string(nil), Plugins:types.PluginsInfo{Volume:[]string{"local"}, Network:[]string{"bridge", "host", "macvlan", "null", "overlay"}, Authorization:[]string{"rhel-push-plugin"}, Log:[]string(nil)}, MemoryLimit:true, SwapLimit:true, KernelMemory:true, CPUCfsPeriod:true, CPUCfsQuota:true, CPUShares:true, CPUSet:true, IPv4Forwarding:true, BridgeNfIptables:true, BridgeNfIP6tables:true, Debug:true, NFd:24, OomKillDisable:true, NGoroutines:24, SystemTime:"2018-06-02T09:34:35.014203041Z", LoggingDriver:"json-file", CgroupDriver:"systemd", NEventsListener:0, KernelVersion:"3.10.0-862.2.3.el7.x86_64", OperatingSystem:"Red Hat Enterprise Linux Server 7.5 (Maipo)", OSType:"linux", Architecture:"x86_64", IndexServerAddress:"https://registry.access.redhat.com/v1/", RegistryConfig:(*registry.ServiceConfig)(0xc42007b960), NCPU:4, MemTotal:16656269312, GenericResources:[]swarm.GenericResource(nil), DockerRootDir:"/var/lib/docker", HTTPProxy:"", HTTPSProxy:"", NoProxy:"", Name:"ip-172-18-2-81.ec2.internal", Labels:[]string(nil), ExperimentalBuild:false, ServerVersion:"1.13.1", ClusterStore:"", ClusterAdvertise:"", Runtimes:map[string]types.Runtime{"docker-runc":types.Runtime{Path:"/usr/libexec/docker/docker-runc-current", Args:[]string(nil)}, "runc":types.Runtime{Path:"docker-runc", Args:[]string(nil)}}, DefaultRuntime:"docker-runc", Swarm:swarm.Info{NodeID:"", NodeAddr:"", LocalNodeState:"inactive", ControlAvailable:false, Error:"", RemoteManagers:[]swarm.Peer(nil), Nodes:0, Managers:0, Cluster:(*swarm.ClusterInfo)(0xc420c8e640)}, LiveRestoreEnabled:false, Isolation:"", InitBinary:"/usr/libexec/docker/docker-init-current", ContainerdCommit:types.Commit{ID:"", Expected:"aa8187dbd3b7ad67d8e5e3a15115d3eef43a7ed1"}, RuncCommit:types.Commit{ID:"e9c345b3f906d5dc5e8100b05ce37073a811c74a", Expected:"9df8b306d01f59d3a8029be411de015b7304dd8f"}, InitCommit:types.Commit{ID:"5b117de7f824f3d3825737cf09581645abbe35d4", Expected:"949e6facb77383876aeff8a6944dde66b3089574"}, SecurityOptions:[]string{"name=seccomp,profile=/etc/docker/seccomp.json", "name=selinux"}} I0602 09:34:35.020344 3285 run.go:195] Creating container named "origin" config: image: openshift/origin-node:latest entry point: hyperkube command: kubelet --address=0.0.0.0 --allow-privileged=true --anonymous-auth=true --authentication-token-webhook=true --authentication-token-webhook-cache-ttl=5m --authorization-mode=Webhook --authorization-webhook-cache-authorized-ttl=5m --authorization-webhook-cache-unauthorized-ttl=5m --cadvisor-port=0 --cgroup-driver=systemd --client-ca-file=/var/lib/origin/openshift.local.config/node/node-client-ca.crt --cluster-domain=cluster.local --container-runtime-endpoint=unix:///var/run/dockershim.sock --containerized=true --experimental-dockershim-root-directory=/var/lib/dockershim --fail-swap-on=false --file-check-frequency=0s --healthz-bind-address= --healthz-port=0 --host-ipc-sources=api --host-ipc-sources=file --host-network-sources=api --host-network-sources=file --host-pid-sources=api --host-pid-sources=file --hostname-override=localhost --http-check-frequency=0s --image-service-endpoint=unix:///var/run/dockershim.sock --iptables-masquerade-bit=0 --kubeconfig=/var/lib/origin/openshift.local.config/node/node.kubeconfig --max-pods=250 --network-plugin= --node-ip= --pod-infra-container-image=openshift/origin-pod:latest --port=10250 --read-only-port=0 --register-node=true --root-dir=/data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/openshift.local.volumes --tls-cert-file=/var/lib/origin/openshift.local.config/node/server.crt --tls-cipher-suites=TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305 --tls-cipher-suites=TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305 --tls-cipher-suites=TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 --tls-cipher-suites=TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256 --tls-cipher-suites=TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384 --tls-cipher-suites=TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384 --tls-cipher-suites=TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256 --tls-cipher-suites=TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256 --tls-cipher-suites=TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA --tls-cipher-suites=TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA --tls-cipher-suites=TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA --tls-cipher-suites=TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA --tls-cipher-suites=TLS_RSA_WITH_AES_128_GCM_SHA256 --tls-cipher-suites=TLS_RSA_WITH_AES_256_GCM_SHA384 --tls-cipher-suites=TLS_RSA_WITH_AES_128_CBC_SHA --tls-cipher-suites=TLS_RSA_WITH_AES_256_CBC_SHA --tls-min-version=VersionTLS12 --tls-private-key-file=/var/lib/origin/openshift.local.config/node/server.key --pod-manifest-path=/var/lib/origin/pod-manifests --cluster-dns=172.30.0.2 --v=0 environment: OPENSHIFT_PV_DIR=/data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/openshift.local.pv host config: pid mode: host user mode: network mode: host volume binds: /var/log:/var/log:rw /var/run:/var/run:rw /sys:/sys:rw /sys/fs/cgroup:/sys/fs/cgroup:rw /dev:/dev /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/node:/var/lib/origin/openshift.local.config/node:z /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver:/var/lib/origin/openshift.local.config/master:z /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/static-pod-manifests:/var/lib/origin/pod-manifests:z /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/etcd:/var/lib/etcd:z /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/openshift.local.pv:/data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/openshift.local.pv /:/rootfs:ro /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/openshift.local.volumes:/data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/openshift.local.volumes:rslave /var/lib/docker:/var/lib/docker /sys/devices/virtual/net:/sys/devices/virtual/net:rw I0602 09:34:35.047116 3285 run.go:200] Container created with id "9a09ee284ed01953bbf0ba3b5bda34453ec310e4b8bac263dec7733af16f9a0f" I0602 09:34:36.154659 3285 run_self_hosted.go:147] started kubelet in container "9a09ee284ed01953bbf0ba3b5bda34453ec310e4b8bac263dec7733af16f9a0f" I0602 09:34:36.155824 3285 run_self_hosted.go:172] Waiting for the kube-apiserver to be ready ... I0602 09:34:36.156774 3285 run_self_hosted.go:528] Server isn't healthy yet. Waiting a little while. Get https://127.0.0.1:8443/healthz?timeout=32s: dial tcp 127.0.0.1:8443: getsockopt: connection refused I0602 09:34:37.157449 3285 run_self_hosted.go:528] Server isn't healthy yet. Waiting a little while. Get https://127.0.0.1:8443/healthz?timeout=32s: dial tcp 127.0.0.1:8443: getsockopt: connection refused I0602 09:34:38.157472 3285 run_self_hosted.go:528] Server isn't healthy yet. Waiting a little while. Get https://127.0.0.1:8443/healthz?timeout=32s: dial tcp 127.0.0.1:8443: getsockopt: connection refused I0602 09:34:39.157436 3285 run_self_hosted.go:528] Server isn't healthy yet. Waiting a little while. Get https://127.0.0.1:8443/healthz?timeout=32s: dial tcp 127.0.0.1:8443: getsockopt: connection refused I0602 09:34:40.157406 3285 run_self_hosted.go:528] Server isn't healthy yet. Waiting a little while. Get https://127.0.0.1:8443/healthz?timeout=32s: dial tcp 127.0.0.1:8443: getsockopt: connection refused I0602 09:34:41.157417 3285 run_self_hosted.go:528] Server isn't healthy yet. Waiting a little while. Get https://127.0.0.1:8443/healthz?timeout=32s: dial tcp 127.0.0.1:8443: getsockopt: connection refused I0602 09:34:42.157438 3285 run_self_hosted.go:528] Server isn't healthy yet. Waiting a little while. Get https://127.0.0.1:8443/healthz?timeout=32s: dial tcp 127.0.0.1:8443: getsockopt: connection refused I0602 09:34:43.157462 3285 run_self_hosted.go:528] Server isn't healthy yet. Waiting a little while. Get https://127.0.0.1:8443/healthz?timeout=32s: dial tcp 127.0.0.1:8443: getsockopt: connection refused I0602 09:34:44.157401 3285 run_self_hosted.go:528] Server isn't healthy yet. Waiting a little while. Get https://127.0.0.1:8443/healthz?timeout=32s: dial tcp 127.0.0.1:8443: getsockopt: connection refused I0602 09:34:45.157375 3285 run_self_hosted.go:528] Server isn't healthy yet. Waiting a little while. Get https://127.0.0.1:8443/healthz?timeout=32s: dial tcp 127.0.0.1:8443: getsockopt: connection refused I0602 09:34:46.157430 3285 run_self_hosted.go:528] Server isn't healthy yet. Waiting a little while. Get https://127.0.0.1:8443/healthz?timeout=32s: dial tcp 127.0.0.1:8443: getsockopt: connection refused I0602 09:34:47.157390 3285 run_self_hosted.go:528] Server isn't healthy yet. Waiting a little while. Get https://127.0.0.1:8443/healthz?timeout=32s: dial tcp 127.0.0.1:8443: getsockopt: connection refused I0602 09:34:48.158093 3285 run_self_hosted.go:528] Server isn't healthy yet. Waiting a little while. Get https://127.0.0.1:8443/healthz?timeout=32s: dial tcp 127.0.0.1:8443: getsockopt: connection refused I0602 09:34:49.157392 3285 run_self_hosted.go:528] Server isn't healthy yet. Waiting a little while. Get https://127.0.0.1:8443/healthz?timeout=32s: dial tcp 127.0.0.1:8443: getsockopt: connection refused I0602 09:34:50.157427 3285 run_self_hosted.go:528] Server isn't healthy yet. Waiting a little while. Get https://127.0.0.1:8443/healthz?timeout=32s: dial tcp 127.0.0.1:8443: getsockopt: connection refused I0602 09:34:51.157412 3285 run_self_hosted.go:528] Server isn't healthy yet. Waiting a little while. Get https://127.0.0.1:8443/healthz?timeout=32s: dial tcp 127.0.0.1:8443: getsockopt: connection refused I0602 09:34:52.157322 3285 run_self_hosted.go:528] Server isn't healthy yet. Waiting a little while. Get https://127.0.0.1:8443/healthz?timeout=32s: dial tcp 127.0.0.1:8443: getsockopt: connection refused I0602 09:34:53.157408 3285 run_self_hosted.go:528] Server isn't healthy yet. Waiting a little while. Get https://127.0.0.1:8443/healthz?timeout=32s: dial tcp 127.0.0.1:8443: getsockopt: connection refused I0602 09:34:54.157587 3285 run_self_hosted.go:528] Server isn't healthy yet. Waiting a little while. Get https://127.0.0.1:8443/healthz?timeout=32s: dial tcp 127.0.0.1:8443: getsockopt: connection refused I0602 09:34:55.157389 3285 run_self_hosted.go:528] Server isn't healthy yet. Waiting a little while. Get https://127.0.0.1:8443/healthz?timeout=32s: dial tcp 127.0.0.1:8443: getsockopt: connection refused I0602 09:34:56.157342 3285 run_self_hosted.go:528] Server isn't healthy yet. Waiting a little while. Get https://127.0.0.1:8443/healthz?timeout=32s: dial tcp 127.0.0.1:8443: getsockopt: connection refused I0602 09:34:57.157402 3285 run_self_hosted.go:528] Server isn't healthy yet. Waiting a little while. Get https://127.0.0.1:8443/healthz?timeout=32s: dial tcp 127.0.0.1:8443: getsockopt: connection refused I0602 09:34:58.157396 3285 run_self_hosted.go:528] Server isn't healthy yet. Waiting a little while. Get https://127.0.0.1:8443/healthz?timeout=32s: dial tcp 127.0.0.1:8443: getsockopt: connection refused I0602 09:34:59.157435 3285 run_self_hosted.go:528] Server isn't healthy yet. Waiting a little while. Get https://127.0.0.1:8443/healthz?timeout=32s: dial tcp 127.0.0.1:8443: getsockopt: connection refused I0602 09:35:00.157417 3285 run_self_hosted.go:528] Server isn't healthy yet. Waiting a little while. Get https://127.0.0.1:8443/healthz?timeout=32s: dial tcp 127.0.0.1:8443: getsockopt: connection refused I0602 09:35:06.210717 3285 request.go:1099] body was not decodable (unable to check for Status): yaml: did not find expected <document start> I0602 09:35:06.210788 3285 request.go:1099] body was not decodable (unable to check for Status): yaml: did not find expected <document start> I0602 09:35:06.210800 3285 run_self_hosted.go:528] Server isn't healthy yet. Waiting a little while. an error on the server ("[+]ping ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/start-apiextensions-informers ok\n[+]poststarthook/start-apiextensions-controllers ok\n[-]poststarthook/bootstrap-controller failed: reason withheld\n[-]poststarthook/ca-registration failed: reason withheld\n[+]poststarthook/start-kube-aggregator-informers ok\n[+]poststarthook/apiservice-registration-controller ok\n[+]poststarthook/apiservice-status-available-controller ok\n[+]poststarthook/apiservice-openapi-controller ok\n[+]poststarthook/kube-apiserver-autoregistration ok\n[-]autoregister-completion failed: reason withheld\n[-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld\n[-]poststarthook/authorization.openshift.io-ensureopenshift-infra failed: reason withheld\n[+]poststarthook/quota.openshift.io-clusterquotamapping ok\n[-]poststarthook/openshift.io-AdmissionInit failed: reason withheld\n[+]poststarthook/openshift.io-StartInformers ok\n[+]poststarthook/oauth.openshift.io-StartOAuthClientsBootstrapping ok\nhealthz check failed") has prevented the request from succeeding I0602 09:35:06.210856 3285 request.go:1099] body was not decodable (unable to check for Status): yaml: did not find expected <document start> I0602 09:35:07.162366 3285 request.go:1099] body was not decodable (unable to check for Status): yaml: did not find expected <document start> I0602 09:35:07.162446 3285 request.go:1099] body was not decodable (unable to check for Status): yaml: did not find expected <document start> I0602 09:35:07.162457 3285 run_self_hosted.go:528] Server isn't healthy yet. Waiting a little while. an error on the server ("[+]ping ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/start-apiextensions-informers ok\n[+]poststarthook/start-apiextensions-controllers ok\n[+]poststarthook/bootstrap-controller ok\n[+]poststarthook/ca-registration ok\n[+]poststarthook/start-kube-aggregator-informers ok\n[+]poststarthook/apiservice-registration-controller ok\n[+]poststarthook/apiservice-status-available-controller ok\n[+]poststarthook/apiservice-openapi-controller ok\n[+]poststarthook/kube-apiserver-autoregistration ok\n[+]autoregister-completion ok\n[-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld\n[+]poststarthook/authorization.openshift.io-ensureopenshift-infra ok\n[+]poststarthook/quota.openshift.io-clusterquotamapping ok\n[+]poststarthook/openshift.io-AdmissionInit ok\n[+]poststarthook/openshift.io-StartInformers ok\n[+]poststarthook/oauth.openshift.io-StartOAuthClientsBootstrapping ok\nhealthz check failed") has prevented the request from succeeding I0602 09:35:07.162523 3285 request.go:1099] body was not decodable (unable to check for Status): yaml: did not find expected <document start> I0602 09:35:08.159573 3285 request.go:1099] body was not decodable (unable to check for Status): yaml: did not find expected <document start> I0602 09:35:08.159646 3285 request.go:1099] body was not decodable (unable to check for Status): yaml: did not find expected <document start> I0602 09:35:08.159656 3285 run_self_hosted.go:528] Server isn't healthy yet. Waiting a little while. an error on the server ("[+]ping ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/start-apiextensions-informers ok\n[+]poststarthook/start-apiextensions-controllers ok\n[+]poststarthook/bootstrap-controller ok\n[+]poststarthook/ca-registration ok\n[+]poststarthook/start-kube-aggregator-informers ok\n[+]poststarthook/apiservice-registration-controller ok\n[+]poststarthook/apiservice-status-available-controller ok\n[+]poststarthook/apiservice-openapi-controller ok\n[+]poststarthook/kube-apiserver-autoregistration ok\n[+]autoregister-completion ok\n[-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld\n[+]poststarthook/authorization.openshift.io-ensureopenshift-infra ok\n[+]poststarthook/quota.openshift.io-clusterquotamapping ok\n[+]poststarthook/openshift.io-AdmissionInit ok\n[+]poststarthook/openshift.io-StartInformers ok\n[+]poststarthook/oauth.openshift.io-StartOAuthClientsBootstrapping ok\nhealthz check failed") has prevented the request from succeeding I0602 09:35:08.159723 3285 request.go:1099] body was not decodable (unable to check for Status): yaml: did not find expected <document start> I0602 09:35:09.159771 3285 request.go:1099] body was not decodable (unable to check for Status): yaml: did not find expected <document start> I0602 09:35:09.159845 3285 request.go:1099] body was not decodable (unable to check for Status): yaml: did not find expected <document start> I0602 09:35:09.159859 3285 run_self_hosted.go:528] Server isn't healthy yet. Waiting a little while. an error on the server ("[+]ping ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/start-apiextensions-informers ok\n[+]poststarthook/start-apiextensions-controllers ok\n[+]poststarthook/bootstrap-controller ok\n[+]poststarthook/ca-registration ok\n[+]poststarthook/start-kube-aggregator-informers ok\n[+]poststarthook/apiservice-registration-controller ok\n[+]poststarthook/apiservice-status-available-controller ok\n[+]poststarthook/apiservice-openapi-controller ok\n[+]poststarthook/kube-apiserver-autoregistration ok\n[+]autoregister-completion ok\n[-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld\n[+]poststarthook/authorization.openshift.io-ensureopenshift-infra ok\n[+]poststarthook/quota.openshift.io-clusterquotamapping ok\n[+]poststarthook/openshift.io-AdmissionInit ok\n[+]poststarthook/openshift.io-StartInformers ok\n[+]poststarthook/oauth.openshift.io-StartOAuthClientsBootstrapping ok\nhealthz check failed") has prevented the request from succeeding I0602 09:35:09.159900 3285 request.go:1099] body was not decodable (unable to check for Status): yaml: did not find expected <document start> I0602 09:35:10.159406 3285 request.go:1099] body was not decodable (unable to check for Status): yaml: did not find expected <document start> I0602 09:35:10.159498 3285 request.go:1099] body was not decodable (unable to check for Status): yaml: did not find expected <document start> I0602 09:35:10.159508 3285 run_self_hosted.go:528] Server isn't healthy yet. Waiting a little while. an error on the server ("[+]ping ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/start-apiextensions-informers ok\n[+]poststarthook/start-apiextensions-controllers ok\n[+]poststarthook/bootstrap-controller ok\n[+]poststarthook/ca-registration ok\n[+]poststarthook/start-kube-aggregator-informers ok\n[+]poststarthook/apiservice-registration-controller ok\n[+]poststarthook/apiservice-status-available-controller ok\n[+]poststarthook/apiservice-openapi-controller ok\n[+]poststarthook/kube-apiserver-autoregistration ok\n[+]autoregister-completion ok\n[-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld\n[+]poststarthook/authorization.openshift.io-ensureopenshift-infra ok\n[+]poststarthook/quota.openshift.io-clusterquotamapping ok\n[+]poststarthook/openshift.io-AdmissionInit ok\n[+]poststarthook/openshift.io-StartInformers ok\n[+]poststarthook/oauth.openshift.io-StartOAuthClientsBootstrapping ok\nhealthz check failed") has prevented the request from succeeding I0602 09:35:10.159600 3285 request.go:1099] body was not decodable (unable to check for Status): yaml: did not find expected <document start> I0602 09:35:11.159644 3285 request.go:1099] body was not decodable (unable to check for Status): yaml: did not find expected <document start> I0602 09:35:11.159703 3285 request.go:1099] body was not decodable (unable to check for Status): yaml: did not find expected <document start> I0602 09:35:11.159710 3285 run_self_hosted.go:528] Server isn't healthy yet. Waiting a little while. an error on the server ("[+]ping ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/start-apiextensions-informers ok\n[+]poststarthook/start-apiextensions-controllers ok\n[+]poststarthook/bootstrap-controller ok\n[+]poststarthook/ca-registration ok\n[+]poststarthook/start-kube-aggregator-informers ok\n[+]poststarthook/apiservice-registration-controller ok\n[+]poststarthook/apiservice-status-available-controller ok\n[+]poststarthook/apiservice-openapi-controller ok\n[+]poststarthook/kube-apiserver-autoregistration ok\n[+]autoregister-completion ok\n[-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld\n[+]poststarthook/authorization.openshift.io-ensureopenshift-infra ok\n[+]poststarthook/quota.openshift.io-clusterquotamapping ok\n[+]poststarthook/openshift.io-AdmissionInit ok\n[+]poststarthook/openshift.io-StartInformers ok\n[+]poststarthook/oauth.openshift.io-StartOAuthClientsBootstrapping ok\nhealthz check failed") has prevented the request from succeeding I0602 09:35:11.159774 3285 request.go:1099] body was not decodable (unable to check for Status): yaml: did not find expected <document start> I0602 09:35:12.159844 3285 request.go:1099] body was not decodable (unable to check for Status): yaml: did not find expected <document start> I0602 09:35:12.159903 3285 request.go:1099] body was not decodable (unable to check for Status): yaml: did not find expected <document start> I0602 09:35:12.159910 3285 run_self_hosted.go:528] Server isn't healthy yet. Waiting a little while. an error on the server ("[+]ping ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/start-apiextensions-informers ok\n[+]poststarthook/start-apiextensions-controllers ok\n[+]poststarthook/bootstrap-controller ok\n[+]poststarthook/ca-registration ok\n[+]poststarthook/start-kube-aggregator-informers ok\n[+]poststarthook/apiservice-registration-controller ok\n[+]poststarthook/apiservice-status-available-controller ok\n[+]poststarthook/apiservice-openapi-controller ok\n[+]poststarthook/kube-apiserver-autoregistration ok\n[+]autoregister-completion ok\n[-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld\n[+]poststarthook/authorization.openshift.io-ensureopenshift-infra ok\n[+]poststarthook/quota.openshift.io-clusterquotamapping ok\n[+]poststarthook/openshift.io-AdmissionInit ok\n[+]poststarthook/openshift.io-StartInformers ok\n[+]poststarthook/oauth.openshift.io-StartOAuthClientsBootstrapping ok\nhealthz check failed") has prevented the request from succeeding I0602 09:35:12.159947 3285 request.go:1099] body was not decodable (unable to check for Status): yaml: did not find expected <document start> I0602 09:35:13.159800 3285 request.go:1099] body was not decodable (unable to check for Status): yaml: did not find expected <document start> I0602 09:35:13.159864 3285 request.go:1099] body was not decodable (unable to check for Status): yaml: did not find expected <document start> I0602 09:35:13.159872 3285 run_self_hosted.go:528] Server isn't healthy yet. Waiting a little while. an error on the server ("[+]ping ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/start-apiextensions-informers ok\n[+]poststarthook/start-apiextensions-controllers ok\n[+]poststarthook/bootstrap-controller ok\n[+]poststarthook/ca-registration ok\n[+]poststarthook/start-kube-aggregator-informers ok\n[+]poststarthook/apiservice-registration-controller ok\n[+]poststarthook/apiservice-status-available-controller ok\n[+]poststarthook/apiservice-openapi-controller ok\n[+]poststarthook/kube-apiserver-autoregistration ok\n[+]autoregister-completion ok\n[-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld\n[+]poststarthook/authorization.openshift.io-ensureopenshift-infra ok\n[+]poststarthook/quota.openshift.io-clusterquotamapping ok\n[+]poststarthook/openshift.io-AdmissionInit ok\n[+]poststarthook/openshift.io-StartInformers ok\n[+]poststarthook/oauth.openshift.io-StartOAuthClientsBootstrapping ok\nhealthz check failed") has prevented the request from succeeding I0602 09:35:13.159917 3285 request.go:1099] body was not decodable (unable to check for Status): yaml: did not find expected <document start> I0602 09:35:14.159512 3285 request.go:1099] body was not decodable (unable to check for Status): yaml: did not find expected <document start> I0602 09:35:14.159592 3285 request.go:1099] body was not decodable (unable to check for Status): yaml: did not find expected <document start> I0602 09:35:14.159602 3285 run_self_hosted.go:528] Server isn't healthy yet. Waiting a little while. an error on the server ("[+]ping ok\n[+]etcd ok\n[+]poststarthook/generic-apiserver-start-informers ok\n[+]poststarthook/start-apiextensions-informers ok\n[+]poststarthook/start-apiextensions-controllers ok\n[+]poststarthook/bootstrap-controller ok\n[+]poststarthook/ca-registration ok\n[+]poststarthook/start-kube-aggregator-informers ok\n[+]poststarthook/apiservice-registration-controller ok\n[+]poststarthook/apiservice-status-available-controller ok\n[+]poststarthook/apiservice-openapi-controller ok\n[+]poststarthook/kube-apiserver-autoregistration ok\n[+]autoregister-completion ok\n[-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld\n[+]poststarthook/authorization.openshift.io-ensureopenshift-infra ok\n[+]poststarthook/quota.openshift.io-clusterquotamapping ok\n[+]poststarthook/openshift.io-AdmissionInit ok\n[+]poststarthook/openshift.io-StartInformers ok\n[+]poststarthook/oauth.openshift.io-StartOAuthClientsBootstrapping ok\nhealthz check failed") has prevented the request from succeeding I0602 09:35:14.159679 3285 request.go:1099] body was not decodable (unable to check for Status): yaml: did not find expected <document start> I0602 09:35:15.159818 3285 interface.go:26] Installing "kube-proxy" ... I0602 09:35:15.159853 3285 interface.go:26] Installing "kube-dns" ... I0602 09:35:15.159863 3285 interface.go:26] Installing "openshift-apiserver" ... I0602 09:35:15.159906 3285 apply_template.go:83] Installing "openshift-apiserver" I0602 09:35:15.160244 3285 apply_template.go:83] Installing "kube-proxy" I0602 09:35:15.160527 3285 helper.go:46] Retrieving Docker daemon info I0602 09:35:15.160532 3285 helper.go:46] Retrieving Docker daemon info I0602 09:35:15.160674 3285 apply_template.go:83] Installing "kube-dns" I0602 09:35:15.160776 3285 helper.go:46] Retrieving Docker daemon info I0602 09:35:15.180238 3285 helper.go:52] Docker daemon info: &types.Info{ID:"6BJV:73LF:IA7K:ALFR:AZ7W:LB7R:QWV6:M3QJ:L6HJ:QBVN:R7RW:ZVMN", Containers:11, ContainersRunning:9, ContainersPaused:0, ContainersStopped:2, Images:38, Driver:"overlay2", DriverStatus:[][2]string{[2]string{"Backing Filesystem", "xfs"}, [2]string{"Supports d_type", "true"}, [2]string{"Native Overlay Diff", "true"}}, SystemStatus:[][2]string(nil), Plugins:types.PluginsInfo{Volume:[]string{"local"}, Network:[]string{"bridge", "host", "macvlan", "null", "overlay"}, Authorization:[]string{"rhel-push-plugin"}, Log:[]string(nil)}, MemoryLimit:true, SwapLimit:true, KernelMemory:true, CPUCfsPeriod:true, CPUCfsQuota:true, CPUShares:true, CPUSet:true, IPv4Forwarding:true, BridgeNfIptables:true, BridgeNfIP6tables:true, Debug:true, NFd:68, OomKillDisable:true, NGoroutines:70, SystemTime:"2018-06-02T09:35:15.172755886Z", LoggingDriver:"json-file", CgroupDriver:"systemd", NEventsListener:0, KernelVersion:"3.10.0-862.2.3.el7.x86_64", OperatingSystem:"Red Hat Enterprise Linux Server 7.5 (Maipo)", OSType:"linux", Architecture:"x86_64", IndexServerAddress:"https://registry.access.redhat.com/v1/", RegistryConfig:(*registry.ServiceConfig)(0xc420583b90), NCPU:4, MemTotal:16656269312, GenericResources:[]swarm.GenericResource(nil), DockerRootDir:"/var/lib/docker", HTTPProxy:"", HTTPSProxy:"", NoProxy:"", Name:"ip-172-18-2-81.ec2.internal", Labels:[]string(nil), ExperimentalBuild:false, ServerVersion:"1.13.1", ClusterStore:"", ClusterAdvertise:"", Runtimes:map[string]types.Runtime{"docker-runc":types.Runtime{Path:"/usr/libexec/docker/docker-runc-current", Args:[]string(nil)}, "runc":types.Runtime{Path:"docker-runc", Args:[]string(nil)}}, DefaultRuntime:"docker-runc", Swarm:swarm.Info{NodeID:"", NodeAddr:"", LocalNodeState:"inactive", ControlAvailable:false, Error:"", RemoteManagers:[]swarm.Peer(nil), Nodes:0, Managers:0, Cluster:(*swarm.ClusterInfo)(0xc4207af2c0)}, LiveRestoreEnabled:false, Isolation:"", InitBinary:"/usr/libexec/docker/docker-init-current", ContainerdCommit:types.Commit{ID:"", Expected:"aa8187dbd3b7ad67d8e5e3a15115d3eef43a7ed1"}, RuncCommit:types.Commit{ID:"e9c345b3f906d5dc5e8100b05ce37073a811c74a", Expected:"9df8b306d01f59d3a8029be411de015b7304dd8f"}, InitCommit:types.Commit{ID:"5b117de7f824f3d3825737cf09581645abbe35d4", Expected:"949e6facb77383876aeff8a6944dde66b3089574"}, SecurityOptions:[]string{"name=seccomp,profile=/etc/docker/seccomp.json", "name=selinux"}} I0602 09:35:15.180385 3285 run.go:195] Creating container named "" config: image: openshift/origin-cli:latest entry point: sh command: -c echo 'kube-proxy' && chmod 755 /install.sh && /install.sh host config: pid mode: host user mode: network mode: host I0602 09:35:15.188849 3285 helper.go:52] Docker daemon info: &types.Info{ID:"6BJV:73LF:IA7K:ALFR:AZ7W:LB7R:QWV6:M3QJ:L6HJ:QBVN:R7RW:ZVMN", Containers:11, ContainersRunning:9, ContainersPaused:0, ContainersStopped:2, Images:38, Driver:"overlay2", DriverStatus:[][2]string{[2]string{"Backing Filesystem", "xfs"}, [2]string{"Supports d_type", "true"}, [2]string{"Native Overlay Diff", "true"}}, SystemStatus:[][2]string(nil), Plugins:types.PluginsInfo{Volume:[]string{"local"}, Network:[]string{"bridge", "host", "macvlan", "null", "overlay"}, Authorization:[]string{"rhel-push-plugin"}, Log:[]string(nil)}, MemoryLimit:true, SwapLimit:true, KernelMemory:true, CPUCfsPeriod:true, CPUCfsQuota:true, CPUShares:true, CPUSet:true, IPv4Forwarding:true, BridgeNfIptables:true, BridgeNfIP6tables:true, Debug:true, NFd:68, OomKillDisable:true, NGoroutines:70, SystemTime:"2018-06-02T09:35:15.180938053Z", LoggingDriver:"json-file", CgroupDriver:"systemd", NEventsListener:0, KernelVersion:"3.10.0-862.2.3.el7.x86_64", OperatingSystem:"Red Hat Enterprise Linux Server 7.5 (Maipo)", OSType:"linux", Architecture:"x86_64", IndexServerAddress:"https://registry.access.redhat.com/v1/", RegistryConfig:(*registry.ServiceConfig)(0xc4205ba1c0), NCPU:4, MemTotal:16656269312, GenericResources:[]swarm.GenericResource(nil), DockerRootDir:"/var/lib/docker", HTTPProxy:"", HTTPSProxy:"", NoProxy:"", Name:"ip-172-18-2-81.ec2.internal", Labels:[]string(nil), ExperimentalBuild:false, ServerVersion:"1.13.1", ClusterStore:"", ClusterAdvertise:"", Runtimes:map[string]types.Runtime{"docker-runc":types.Runtime{Path:"/usr/libexec/docker/docker-runc-current", Args:[]string(nil)}, "runc":types.Runtime{Path:"docker-runc", Args:[]string(nil)}}, DefaultRuntime:"docker-runc", Swarm:swarm.Info{NodeID:"", NodeAddr:"", LocalNodeState:"inactive", ControlAvailable:false, Error:"", RemoteManagers:[]swarm.Peer(nil), Nodes:0, Managers:0, Cluster:(*swarm.ClusterInfo)(0xc420b09540)}, LiveRestoreEnabled:false, Isolation:"", InitBinary:"/usr/libexec/docker/docker-init-current", ContainerdCommit:types.Commit{ID:"", Expected:"aa8187dbd3b7ad67d8e5e3a15115d3eef43a7ed1"}, RuncCommit:types.Commit{ID:"e9c345b3f906d5dc5e8100b05ce37073a811c74a", Expected:"9df8b306d01f59d3a8029be411de015b7304dd8f"}, InitCommit:types.Commit{ID:"5b117de7f824f3d3825737cf09581645abbe35d4", Expected:"949e6facb77383876aeff8a6944dde66b3089574"}, SecurityOptions:[]string{"name=seccomp,profile=/etc/docker/seccomp.json", "name=selinux"}} I0602 09:35:15.188988 3285 run.go:195] Creating container named "" config: image: openshift/origin-cli:latest entry point: sh command: -c echo 'openshift-apiserver' && chmod 755 /install.sh && /install.sh host config: pid mode: host user mode: network mode: host I0602 09:35:15.209008 3285 helper.go:52] Docker daemon info: &types.Info{ID:"6BJV:73LF:IA7K:ALFR:AZ7W:LB7R:QWV6:M3QJ:L6HJ:QBVN:R7RW:ZVMN", Containers:11, ContainersRunning:9, ContainersPaused:0, ContainersStopped:2, Images:38, Driver:"overlay2", DriverStatus:[][2]string{[2]string{"Backing Filesystem", "xfs"}, [2]string{"Supports d_type", "true"}, [2]string{"Native Overlay Diff", "true"}}, SystemStatus:[][2]string(nil), Plugins:types.PluginsInfo{Volume:[]string{"local"}, Network:[]string{"bridge", "host", "macvlan", "null", "overlay"}, Authorization:[]string{"rhel-push-plugin"}, Log:[]string(nil)}, MemoryLimit:true, SwapLimit:true, KernelMemory:true, CPUCfsPeriod:true, CPUCfsQuota:true, CPUShares:true, CPUSet:true, IPv4Forwarding:true, BridgeNfIptables:true, BridgeNfIP6tables:true, Debug:true, NFd:69, OomKillDisable:true, NGoroutines:70, SystemTime:"2018-06-02T09:35:15.19791273Z", LoggingDriver:"json-file", CgroupDriver:"systemd", NEventsListener:0, KernelVersion:"3.10.0-862.2.3.el7.x86_64", OperatingSystem:"Red Hat Enterprise Linux Server 7.5 (Maipo)", OSType:"linux", Architecture:"x86_64", IndexServerAddress:"https://registry.access.redhat.com/v1/", RegistryConfig:(*registry.ServiceConfig)(0xc420583dc0), NCPU:4, MemTotal:16656269312, GenericResources:[]swarm.GenericResource(nil), DockerRootDir:"/var/lib/docker", HTTPProxy:"", HTTPSProxy:"", NoProxy:"", Name:"ip-172-18-2-81.ec2.internal", Labels:[]string(nil), ExperimentalBuild:false, ServerVersion:"1.13.1", ClusterStore:"", ClusterAdvertise:"", Runtimes:map[string]types.Runtime{"docker-runc":types.Runtime{Path:"/usr/libexec/docker/docker-runc-current", Args:[]string(nil)}, "runc":types.Runtime{Path:"docker-runc", Args:[]string(nil)}}, DefaultRuntime:"docker-runc", Swarm:swarm.Info{NodeID:"", NodeAddr:"", LocalNodeState:"inactive", ControlAvailable:false, Error:"", RemoteManagers:[]swarm.Peer(nil), Nodes:0, Managers:0, Cluster:(*swarm.ClusterInfo)(0xc4207af400)}, LiveRestoreEnabled:false, Isolation:"", InitBinary:"/usr/libexec/docker/docker-init-current", ContainerdCommit:types.Commit{ID:"", Expected:"aa8187dbd3b7ad67d8e5e3a15115d3eef43a7ed1"}, RuncCommit:types.Commit{ID:"e9c345b3f906d5dc5e8100b05ce37073a811c74a", Expected:"9df8b306d01f59d3a8029be411de015b7304dd8f"}, InitCommit:types.Commit{ID:"5b117de7f824f3d3825737cf09581645abbe35d4", Expected:"949e6facb77383876aeff8a6944dde66b3089574"}, SecurityOptions:[]string{"name=seccomp,profile=/etc/docker/seccomp.json", "name=selinux"}} I0602 09:35:15.209156 3285 run.go:195] Creating container named "" config: image: openshift/origin-cli:latest entry point: sh command: -c echo 'kube-dns' && chmod 755 /install.sh && /install.sh host config: pid mode: host user mode: network mode: host I0602 09:35:15.215324 3285 run.go:200] Container created with id "5642240c17fca63e6d051e74b8c14158086c8aaa83ae2021bd42e65e3bc1a20b" I0602 09:35:15.223778 3285 run.go:200] Container created with id "a56a8510e4b1d542c60d88c9ca54ca20e0c9db3c112b88632f36aaa810509343" I0602 09:35:15.242157 3285 run.go:200] Container created with id "4e8db5d8ddf4681efad9bab272b52571f29882ae7ecd2a5dfe181e7c89740b25" I0602 09:35:15.278750 3285 run.go:304] Starting container "5642240c17fca63e6d051e74b8c14158086c8aaa83ae2021bd42e65e3bc1a20b" I0602 09:35:15.280896 3285 run.go:304] Starting container "a56a8510e4b1d542c60d88c9ca54ca20e0c9db3c112b88632f36aaa810509343" I0602 09:35:15.286950 3285 run.go:304] Starting container "4e8db5d8ddf4681efad9bab272b52571f29882ae7ecd2a5dfe181e7c89740b25" I0602 09:35:15.426477 3285 run.go:311] Waiting for container "5642240c17fca63e6d051e74b8c14158086c8aaa83ae2021bd42e65e3bc1a20b" I0602 09:35:15.438759 3285 run.go:311] Waiting for container "4e8db5d8ddf4681efad9bab272b52571f29882ae7ecd2a5dfe181e7c89740b25" I0602 09:35:15.449233 3285 run.go:311] Waiting for container "a56a8510e4b1d542c60d88c9ca54ca20e0c9db3c112b88632f36aaa810509343" I0602 09:35:17.063933 3285 run.go:317] Done waiting for container "a56a8510e4b1d542c60d88c9ca54ca20e0c9db3c112b88632f36aaa810509343", rc=0 I0602 09:35:17.063962 3285 run.go:322] Reading logs from container "a56a8510e4b1d542c60d88c9ca54ca20e0c9db3c112b88632f36aaa810509343" I0602 09:35:17.067885 3285 run.go:330] Done reading logs from container "a56a8510e4b1d542c60d88c9ca54ca20e0c9db3c112b88632f36aaa810509343" I0602 09:35:17.068610 3285 run.go:337] Stdout: openshift-apiserver total 44K drwxr-xr-x. 1 root root 186 Jun 2 09:35 . drwxr-xr-x. 1 root root 186 Jun 2 09:35 .. -rwxr-xr-x. 1 root root 0 Jun 2 09:35 .dockerenv -rw-r--r--. 1 root root 12K Apr 2 18:39 anaconda-post.log lrwxrwxrwx. 1 root root 7 Apr 2 18:38 bin -> usr/bin drwxr-xr-x. 14 root root 2.6K Jun 2 09:35 dev drwxr-xr-x. 1 root root 66 Jun 2 09:35 etc drwxr-xr-x. 2 root root 6 Nov 5 2016 home -rwxr-xr-x. 1 root root 857 Jan 1 1970 install.sh -rw-r--r--. 1 root root 6.3K Jan 1 1970 install.yaml -rw-r--r--. 1 root root 7.2K Jan 1 1970 kubeconfig.kubeconfig lrwxrwxrwx. 1 root root 7 Apr 2 18:38 lib -> usr/lib lrwxrwxrwx. 1 root root 9 Apr 2 18:38 lib64 -> usr/lib64 drwxr-xr-x. 2 root root 6 Nov 5 2016 media drwxr-xr-x. 2 root root 6 Nov 5 2016 mnt -rw-r--r--. 1 root root 19 Jan 1 1970 namespace-file -rw-r--r--. 1 root root 168 Jan 1 1970 namespace.yaml drwxr-xr-x. 2 root root 6 Nov 5 2016 opt -rw-r--r--. 1 root root 749 Jan 1 1970 param-file.txt -rw-r--r--. 1 root root 0 Jan 1 1970 privileged-sa-list.txt dr-xr-xr-x. 166 root root 0 Jun 2 09:26 proc -rw-r--r--. 1 root root 0 Jan 1 1970 rbac.yaml dr-xr-x---. 2 root root 114 Apr 2 18:39 root drwxr-xr-x. 1 root root 6 May 31 04:14 run lrwxrwxrwx. 1 root root 8 Apr 2 18:38 sbin -> usr/sbin drwxr-xr-x. 1 root root 34 May 31 04:13 srv dr-xr-xr-x. 13 root root 0 Jun 2 09:26 sys drwxrwxrwt. 1 root root 6 May 31 04:14 tmp drwxr-xr-x. 1 root root 30 Apr 2 18:38 usr drwxr-xr-x. 1 root root 41 Apr 2 18:38 var namespace "openshift-apiserver" created daemonset.apps "openshift-apiserver" created serviceaccount "openshift-apiserver" created service "api" created apiservice.apiregistration.k8s.io "v1.apps.openshift.io" created apiservice.apiregistration.k8s.io "v1.authorization.openshift.io" created apiservice.apiregistration.k8s.io "v1.build.openshift.io" created apiservice.apiregistration.k8s.io "v1.image.openshift.io" created apiservice.apiregistration.k8s.io "v1.network.openshift.io" created apiservice.apiregistration.k8s.io "v1.oauth.openshift.io" created apiservice.apiregistration.k8s.io "v1.project.openshift.io" created apiservice.apiregistration.k8s.io "v1.quota.openshift.io" created apiservice.apiregistration.k8s.io "v1.route.openshift.io" created apiservice.apiregistration.k8s.io "v1.security.openshift.io" created apiservice.apiregistration.k8s.io "v1.template.openshift.io" created apiservice.apiregistration.k8s.io "v1.user.openshift.io" created I0602 09:35:17.068627 3285 run.go:338] Stderr: + ls -alh / + read p + ns= + '[' -s /namespace-file ']' ++ cat /namespace-file + ns='--namespace=openshift-apiserver ' + '[' -s /namespace.yaml ']' + oc apply --config=/kubeconfig.kubeconfig -f /namespace.yaml + '[' -s /rbac.yaml ']' + oc process --local -o yaml --ignore-unknown-parameters --param-file=/param-file.txt -f /install.yaml + oc apply --namespace=openshift-apiserver --config=/kubeconfig.kubeconfig -f - I0602 09:35:17.068640 3285 run.go:342] Container run successful I0602 09:35:17.068654 3285 run.go:293] Deleting container "a56a8510e4b1d542c60d88c9ca54ca20e0c9db3c112b88632f36aaa810509343" I0602 09:35:17.491439 3285 run.go:317] Done waiting for container "4e8db5d8ddf4681efad9bab272b52571f29882ae7ecd2a5dfe181e7c89740b25", rc=0 I0602 09:35:17.491468 3285 run.go:322] Reading logs from container "4e8db5d8ddf4681efad9bab272b52571f29882ae7ecd2a5dfe181e7c89740b25" I0602 09:35:17.496497 3285 run.go:330] Done reading logs from container "4e8db5d8ddf4681efad9bab272b52571f29882ae7ecd2a5dfe181e7c89740b25" I0602 09:35:17.496887 3285 run.go:337] Stdout: kube-dns total 40K drwxr-xr-x. 1 root root 186 Jun 2 09:35 . drwxr-xr-x. 1 root root 186 Jun 2 09:35 .. -rwxr-xr-x. 1 root root 0 Jun 2 09:35 .dockerenv -rw-r--r--. 1 root root 12K Apr 2 18:39 anaconda-post.log lrwxrwxrwx. 1 root root 7 Apr 2 18:38 bin -> usr/bin drwxr-xr-x. 14 root root 2.6K Jun 2 09:35 dev drwxr-xr-x. 1 root root 66 Jun 2 09:35 etc drwxr-xr-x. 2 root root 6 Nov 5 2016 home -rwxr-xr-x. 1 root root 857 Jan 1 1970 install.sh -rw-r--r--. 1 root root 2.1K Jan 1 1970 install.yaml -rw-r--r--. 1 root root 7.2K Jan 1 1970 kubeconfig.kubeconfig lrwxrwxrwx. 1 root root 7 Apr 2 18:38 lib -> usr/lib lrwxrwxrwx. 1 root root 9 Apr 2 18:38 lib64 -> usr/lib64 drwxr-xr-x. 2 root root 6 Nov 5 2016 media drwxr-xr-x. 2 root root 6 Nov 5 2016 mnt -rw-r--r--. 1 root root 8 Jan 1 1970 namespace-file -rw-r--r--. 1 root root 157 Jan 1 1970 namespace.yaml drwxr-xr-x. 2 root root 6 Nov 5 2016 opt -rw-r--r--. 1 root root 752 Jan 1 1970 param-file.txt -rw-r--r--. 1 root root 0 Jan 1 1970 privileged-sa-list.txt dr-xr-xr-x. 166 root root 0 Jun 2 09:26 proc -rw-r--r--. 1 root root 0 Jan 1 1970 rbac.yaml dr-xr-x---. 2 root root 114 Apr 2 18:39 root drwxr-xr-x. 1 root root 6 May 31 04:14 run lrwxrwxrwx. 1 root root 8 Apr 2 18:38 sbin -> usr/sbin drwxr-xr-x. 1 root root 34 May 31 04:13 srv dr-xr-xr-x. 13 root root 0 Jun 2 09:26 sys drwxrwxrwt. 1 root root 6 May 31 04:14 tmp drwxr-xr-x. 1 root root 30 Apr 2 18:38 usr drwxr-xr-x. 1 root root 41 Apr 2 18:38 var namespace "kube-dns" created daemonset.extensions "kube-dns" created serviceaccount "kube-dns" created service "kube-dns" created I0602 09:35:17.496902 3285 run.go:338] Stderr: + ls -alh / + read p + ns= + '[' -s /namespace-file ']' ++ cat /namespace-file + ns='--namespace=kube-dns ' + '[' -s /namespace.yaml ']' + oc apply --config=/kubeconfig.kubeconfig -f /namespace.yaml + '[' -s /rbac.yaml ']' + oc process --local -o yaml --ignore-unknown-parameters --param-file=/param-file.txt -f /install.yaml + oc apply --namespace=kube-dns --config=/kubeconfig.kubeconfig -f - I0602 09:35:17.496914 3285 run.go:342] Container run successful I0602 09:35:17.496929 3285 run.go:293] Deleting container "4e8db5d8ddf4681efad9bab272b52571f29882ae7ecd2a5dfe181e7c89740b25" I0602 09:35:17.620790 3285 run.go:317] Done waiting for container "5642240c17fca63e6d051e74b8c14158086c8aaa83ae2021bd42e65e3bc1a20b", rc=0 I0602 09:35:17.620814 3285 run.go:322] Reading logs from container "5642240c17fca63e6d051e74b8c14158086c8aaa83ae2021bd42e65e3bc1a20b" I0602 09:35:17.625146 3285 run.go:330] Done reading logs from container "5642240c17fca63e6d051e74b8c14158086c8aaa83ae2021bd42e65e3bc1a20b" I0602 09:35:17.625471 3285 run.go:337] Stdout: kube-proxy total 40K drwxr-xr-x. 1 root root 186 Jun 2 09:35 . drwxr-xr-x. 1 root root 186 Jun 2 09:35 .. -rwxr-xr-x. 1 root root 0 Jun 2 09:35 .dockerenv -rw-r--r--. 1 root root 12K Apr 2 18:39 anaconda-post.log lrwxrwxrwx. 1 root root 7 Apr 2 18:38 bin -> usr/bin drwxr-xr-x. 14 root root 2.6K Jun 2 09:35 dev drwxr-xr-x. 1 root root 66 Jun 2 09:35 etc drwxr-xr-x. 2 root root 6 Nov 5 2016 home -rwxr-xr-x. 1 root root 857 Jan 1 1970 install.sh -rw-r--r--. 1 root root 1.8K Jan 1 1970 install.yaml -rw-r--r--. 1 root root 7.2K Jan 1 1970 kubeconfig.kubeconfig lrwxrwxrwx. 1 root root 7 Apr 2 18:38 lib -> usr/lib lrwxrwxrwx. 1 root root 9 Apr 2 18:38 lib64 -> usr/lib64 drwxr-xr-x. 2 root root 6 Nov 5 2016 media drwxr-xr-x. 2 root root 6 Nov 5 2016 mnt -rw-r--r--. 1 root root 10 Jan 1 1970 namespace-file -rw-r--r--. 1 root root 159 Jan 1 1970 namespace.yaml drwxr-xr-x. 2 root root 6 Nov 5 2016 opt -rw-r--r--. 1 root root 752 Jan 1 1970 param-file.txt -rw-r--r--. 1 root root 0 Jan 1 1970 privileged-sa-list.txt dr-xr-xr-x. 166 root root 0 Jun 2 09:26 proc -rw-r--r--. 1 root root 0 Jan 1 1970 rbac.yaml dr-xr-x---. 2 root root 114 Apr 2 18:39 root drwxr-xr-x. 1 root root 6 May 31 04:14 run lrwxrwxrwx. 1 root root 8 Apr 2 18:38 sbin -> usr/sbin drwxr-xr-x. 1 root root 34 May 31 04:13 srv dr-xr-xr-x. 13 root root 0 Jun 2 09:26 sys drwxrwxrwt. 1 root root 6 May 31 04:14 tmp drwxr-xr-x. 1 root root 30 Apr 2 18:38 usr drwxr-xr-x. 1 root root 41 Apr 2 18:38 var namespace "kube-proxy" created serviceaccount "kube-proxy" created clusterrolebinding.rbac.authorization.k8s.io "system:kube-proxy" created daemonset.extensions "kube-proxy" created I0602 09:35:17.625498 3285 run.go:338] Stderr: + ls -alh / + read p + ns= + '[' -s /namespace-file ']' ++ cat /namespace-file + ns='--namespace=kube-proxy ' + '[' -s /namespace.yaml ']' + oc apply --config=/kubeconfig.kubeconfig -f /namespace.yaml + '[' -s /rbac.yaml ']' + oc process --local -o yaml --ignore-unknown-parameters --param-file=/param-file.txt -f /install.yaml + oc apply --namespace=kube-proxy --config=/kubeconfig.kubeconfig -f - I0602 09:35:17.625511 3285 run.go:342] Container run successful I0602 09:35:17.625526 3285 run.go:293] Deleting container "5642240c17fca63e6d051e74b8c14158086c8aaa83ae2021bd42e65e3bc1a20b" I0602 09:35:17.650350 3285 interface.go:41] Finished installing "kube-proxy" "kube-dns" "openshift-apiserver" I0602 09:35:17.656765 3285 readiness_apigroup.go:40] found: v1.apps.openshift.io I0602 09:35:17.656785 3285 readiness_apigroup.go:44] waiting for readiness: v1.apps.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"EndpointsNotFound", Message:"cannot find endpoints for service/api in \"openshift-apiserver\""} I0602 09:35:17.656826 3285 readiness_apigroup.go:40] found: v1.authorization.openshift.io I0602 09:35:17.656840 3285 readiness_apigroup.go:44] waiting for readiness: v1.authorization.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"EndpointsNotFound", Message:"cannot find endpoints for service/api in \"openshift-apiserver\""} I0602 09:35:17.656874 3285 readiness_apigroup.go:40] found: v1.build.openshift.io I0602 09:35:17.656894 3285 readiness_apigroup.go:44] waiting for readiness: v1.build.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"EndpointsNotFound", Message:"cannot find endpoints for service/api in \"openshift-apiserver\""} I0602 09:35:17.656931 3285 readiness_apigroup.go:40] found: v1.image.openshift.io I0602 09:35:17.656939 3285 readiness_apigroup.go:44] waiting for readiness: v1.image.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"EndpointsNotFound", Message:"cannot find endpoints for service/api in \"openshift-apiserver\""} I0602 09:35:17.656982 3285 readiness_apigroup.go:40] found: v1.network.openshift.io I0602 09:35:17.656989 3285 readiness_apigroup.go:44] waiting for readiness: v1.network.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"EndpointsNotFound", Message:"cannot find endpoints for service/api in \"openshift-apiserver\""} I0602 09:35:17.656999 3285 readiness_apigroup.go:40] found: v1.oauth.openshift.io I0602 09:35:17.657008 3285 readiness_apigroup.go:44] waiting for readiness: v1.oauth.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"EndpointsNotFound", Message:"cannot find endpoints for service/api in \"openshift-apiserver\""} I0602 09:35:17.657017 3285 readiness_apigroup.go:40] found: v1.project.openshift.io I0602 09:35:17.657022 3285 readiness_apigroup.go:44] waiting for readiness: v1.project.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"EndpointsNotFound", Message:"cannot find endpoints for service/api in \"openshift-apiserver\""} I0602 09:35:17.657031 3285 readiness_apigroup.go:40] found: v1.quota.openshift.io I0602 09:35:17.657035 3285 readiness_apigroup.go:44] waiting for readiness: v1.quota.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"EndpointsNotFound", Message:"cannot find endpoints for service/api in \"openshift-apiserver\""} I0602 09:35:17.657045 3285 readiness_apigroup.go:40] found: v1.route.openshift.io I0602 09:35:17.657049 3285 readiness_apigroup.go:44] waiting for readiness: v1.route.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"EndpointsNotFound", Message:"cannot find endpoints for service/api in \"openshift-apiserver\""} I0602 09:35:17.657082 3285 readiness_apigroup.go:40] found: v1.security.openshift.io I0602 09:35:17.657088 3285 readiness_apigroup.go:44] waiting for readiness: v1.security.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"EndpointsNotFound", Message:"cannot find endpoints for service/api in \"openshift-apiserver\""} I0602 09:35:17.657119 3285 readiness_apigroup.go:40] found: v1.template.openshift.io I0602 09:35:17.657127 3285 readiness_apigroup.go:44] waiting for readiness: v1.template.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"EndpointsNotFound", Message:"cannot find endpoints for service/api in \"openshift-apiserver\""} I0602 09:35:17.657144 3285 readiness_apigroup.go:40] found: v1.user.openshift.io I0602 09:35:17.657166 3285 readiness_apigroup.go:44] waiting for readiness: v1.user.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"EndpointsNotFound", Message:"cannot find endpoints for service/api in \"openshift-apiserver\""} I0602 09:35:17.657181 3285 readiness_apigroup.go:53] waiting for readiness: []string{"v1.apps.openshift.io", "v1.authorization.openshift.io", "v1.build.openshift.io", "v1.image.openshift.io", "v1.network.openshift.io", "v1.oauth.openshift.io", "v1.project.openshift.io", "v1.quota.openshift.io", "v1.route.openshift.io", "v1.security.openshift.io", "v1.template.openshift.io", "v1.user.openshift.io"} I0602 09:35:18.663076 3285 readiness_apigroup.go:40] found: v1.apps.openshift.io I0602 09:35:18.663104 3285 readiness_apigroup.go:44] waiting for readiness: v1.apps.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"EndpointsNotFound", Message:"cannot find endpoints for service/api in \"openshift-apiserver\""} I0602 09:35:18.663134 3285 readiness_apigroup.go:40] found: v1.authorization.openshift.io I0602 09:35:18.663143 3285 readiness_apigroup.go:44] waiting for readiness: v1.authorization.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"EndpointsNotFound", Message:"cannot find endpoints for service/api in \"openshift-apiserver\""} I0602 09:35:18.663161 3285 readiness_apigroup.go:40] found: v1.build.openshift.io I0602 09:35:18.663169 3285 readiness_apigroup.go:44] waiting for readiness: v1.build.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"EndpointsNotFound", Message:"cannot find endpoints for service/api in \"openshift-apiserver\""} I0602 09:35:18.663192 3285 readiness_apigroup.go:40] found: v1.image.openshift.io I0602 09:35:18.663200 3285 readiness_apigroup.go:44] waiting for readiness: v1.image.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"EndpointsNotFound", Message:"cannot find endpoints for service/api in \"openshift-apiserver\""} I0602 09:35:18.663216 3285 readiness_apigroup.go:40] found: v1.network.openshift.io I0602 09:35:18.663224 3285 readiness_apigroup.go:44] waiting for readiness: v1.network.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"EndpointsNotFound", Message:"cannot find endpoints for service/api in \"openshift-apiserver\""} I0602 09:35:18.663240 3285 readiness_apigroup.go:40] found: v1.oauth.openshift.io I0602 09:35:18.663248 3285 readiness_apigroup.go:44] waiting for readiness: v1.oauth.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"EndpointsNotFound", Message:"cannot find endpoints for service/api in \"openshift-apiserver\""} I0602 09:35:18.663408 3285 readiness_apigroup.go:40] found: v1.project.openshift.io I0602 09:35:18.663418 3285 readiness_apigroup.go:44] waiting for readiness: v1.project.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"EndpointsNotFound", Message:"cannot find endpoints for service/api in \"openshift-apiserver\""} I0602 09:35:18.663436 3285 readiness_apigroup.go:40] found: v1.quota.openshift.io I0602 09:35:18.663444 3285 readiness_apigroup.go:44] waiting for readiness: v1.quota.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"EndpointsNotFound", Message:"cannot find endpoints for service/api in \"openshift-apiserver\""} I0602 09:35:18.663461 3285 readiness_apigroup.go:40] found: v1.route.openshift.io I0602 09:35:18.663469 3285 readiness_apigroup.go:44] waiting for readiness: v1.route.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"EndpointsNotFound", Message:"cannot find endpoints for service/api in \"openshift-apiserver\""} I0602 09:35:18.663489 3285 readiness_apigroup.go:40] found: v1.security.openshift.io I0602 09:35:18.663498 3285 readiness_apigroup.go:44] waiting for readiness: v1.security.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"EndpointsNotFound", Message:"cannot find endpoints for service/api in \"openshift-apiserver\""} I0602 09:35:18.663514 3285 readiness_apigroup.go:40] found: v1.template.openshift.io I0602 09:35:18.663522 3285 readiness_apigroup.go:44] waiting for readiness: v1.template.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"EndpointsNotFound", Message:"cannot find endpoints for service/api in \"openshift-apiserver\""} I0602 09:35:18.663537 3285 readiness_apigroup.go:40] found: v1.user.openshift.io I0602 09:35:18.663545 3285 readiness_apigroup.go:44] waiting for readiness: v1.user.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"EndpointsNotFound", Message:"cannot find endpoints for service/api in \"openshift-apiserver\""} I0602 09:35:18.663565 3285 readiness_apigroup.go:53] waiting for readiness: []string{"v1.apps.openshift.io", "v1.authorization.openshift.io", "v1.build.openshift.io", "v1.image.openshift.io", "v1.network.openshift.io", "v1.oauth.openshift.io", "v1.project.openshift.io", "v1.quota.openshift.io", "v1.route.openshift.io", "v1.security.openshift.io", "v1.template.openshift.io", "v1.user.openshift.io"} I0602 09:35:19.662169 3285 readiness_apigroup.go:40] found: v1.apps.openshift.io I0602 09:35:19.662195 3285 readiness_apigroup.go:44] waiting for readiness: v1.apps.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:19.662224 3285 readiness_apigroup.go:40] found: v1.authorization.openshift.io I0602 09:35:19.662232 3285 readiness_apigroup.go:44] waiting for readiness: v1.authorization.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:19.662249 3285 readiness_apigroup.go:40] found: v1.build.openshift.io I0602 09:35:19.662267 3285 readiness_apigroup.go:44] waiting for readiness: v1.build.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:19.662287 3285 readiness_apigroup.go:40] found: v1.image.openshift.io I0602 09:35:19.662294 3285 readiness_apigroup.go:44] waiting for readiness: v1.image.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:19.662317 3285 readiness_apigroup.go:40] found: v1.network.openshift.io I0602 09:35:19.662325 3285 readiness_apigroup.go:44] waiting for readiness: v1.network.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:19.662340 3285 readiness_apigroup.go:40] found: v1.oauth.openshift.io I0602 09:35:19.662348 3285 readiness_apigroup.go:44] waiting for readiness: v1.oauth.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:19.662363 3285 readiness_apigroup.go:40] found: v1.project.openshift.io I0602 09:35:19.662371 3285 readiness_apigroup.go:44] waiting for readiness: v1.project.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:19.662385 3285 readiness_apigroup.go:40] found: v1.quota.openshift.io I0602 09:35:19.662393 3285 readiness_apigroup.go:44] waiting for readiness: v1.quota.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:19.662409 3285 readiness_apigroup.go:40] found: v1.route.openshift.io I0602 09:35:19.662416 3285 readiness_apigroup.go:44] waiting for readiness: v1.route.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:19.662431 3285 readiness_apigroup.go:40] found: v1.security.openshift.io I0602 09:35:19.662439 3285 readiness_apigroup.go:44] waiting for readiness: v1.security.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:19.662454 3285 readiness_apigroup.go:40] found: v1.template.openshift.io I0602 09:35:19.662462 3285 readiness_apigroup.go:44] waiting for readiness: v1.template.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:19.662477 3285 readiness_apigroup.go:40] found: v1.user.openshift.io I0602 09:35:19.662485 3285 readiness_apigroup.go:44] waiting for readiness: v1.user.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:19.662507 3285 readiness_apigroup.go:53] waiting for readiness: []string{"v1.apps.openshift.io", "v1.authorization.openshift.io", "v1.build.openshift.io", "v1.image.openshift.io", "v1.network.openshift.io", "v1.oauth.openshift.io", "v1.project.openshift.io", "v1.quota.openshift.io", "v1.route.openshift.io", "v1.security.openshift.io", "v1.template.openshift.io", "v1.user.openshift.io"} I0602 09:35:20.681396 3285 readiness_apigroup.go:40] found: v1.apps.openshift.io I0602 09:35:20.681421 3285 readiness_apigroup.go:44] waiting for readiness: v1.apps.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:20.681472 3285 readiness_apigroup.go:40] found: v1.authorization.openshift.io I0602 09:35:20.681959 3285 readiness_apigroup.go:44] waiting for readiness: v1.authorization.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:20.681999 3285 readiness_apigroup.go:40] found: v1.build.openshift.io I0602 09:35:20.682013 3285 readiness_apigroup.go:44] waiting for readiness: v1.build.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:20.682043 3285 readiness_apigroup.go:40] found: v1.image.openshift.io I0602 09:35:20.682056 3285 readiness_apigroup.go:44] waiting for readiness: v1.image.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:20.682102 3285 readiness_apigroup.go:40] found: v1.network.openshift.io I0602 09:35:20.682116 3285 readiness_apigroup.go:44] waiting for readiness: v1.network.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:20.682146 3285 readiness_apigroup.go:40] found: v1.oauth.openshift.io I0602 09:35:20.682177 3285 readiness_apigroup.go:44] waiting for readiness: v1.oauth.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:20.682209 3285 readiness_apigroup.go:40] found: v1.project.openshift.io I0602 09:35:20.682221 3285 readiness_apigroup.go:44] waiting for readiness: v1.project.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:20.682255 3285 readiness_apigroup.go:40] found: v1.quota.openshift.io I0602 09:35:20.682294 3285 readiness_apigroup.go:44] waiting for readiness: v1.quota.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:20.682326 3285 readiness_apigroup.go:40] found: v1.route.openshift.io I0602 09:35:20.682338 3285 readiness_apigroup.go:44] waiting for readiness: v1.route.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:20.682367 3285 readiness_apigroup.go:40] found: v1.security.openshift.io I0602 09:35:20.682380 3285 readiness_apigroup.go:44] waiting for readiness: v1.security.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:20.682408 3285 readiness_apigroup.go:40] found: v1.template.openshift.io I0602 09:35:20.682420 3285 readiness_apigroup.go:44] waiting for readiness: v1.template.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:20.682447 3285 readiness_apigroup.go:40] found: v1.user.openshift.io I0602 09:35:20.682459 3285 readiness_apigroup.go:44] waiting for readiness: v1.user.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:20.682489 3285 readiness_apigroup.go:53] waiting for readiness: []string{"v1.apps.openshift.io", "v1.authorization.openshift.io", "v1.build.openshift.io", "v1.image.openshift.io", "v1.network.openshift.io", "v1.oauth.openshift.io", "v1.project.openshift.io", "v1.quota.openshift.io", "v1.route.openshift.io", "v1.security.openshift.io", "v1.template.openshift.io", "v1.user.openshift.io"} I0602 09:35:21.661561 3285 readiness_apigroup.go:40] found: v1.apps.openshift.io I0602 09:35:21.661592 3285 readiness_apigroup.go:44] waiting for readiness: v1.apps.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:21.661623 3285 readiness_apigroup.go:40] found: v1.authorization.openshift.io I0602 09:35:21.661631 3285 readiness_apigroup.go:44] waiting for readiness: v1.authorization.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:21.661648 3285 readiness_apigroup.go:40] found: v1.build.openshift.io I0602 09:35:21.661656 3285 readiness_apigroup.go:44] waiting for readiness: v1.build.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:21.661671 3285 readiness_apigroup.go:40] found: v1.image.openshift.io I0602 09:35:21.661680 3285 readiness_apigroup.go:44] waiting for readiness: v1.image.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:21.661696 3285 readiness_apigroup.go:40] found: v1.network.openshift.io I0602 09:35:21.661703 3285 readiness_apigroup.go:44] waiting for readiness: v1.network.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:21.661720 3285 readiness_apigroup.go:40] found: v1.oauth.openshift.io I0602 09:35:21.661728 3285 readiness_apigroup.go:44] waiting for readiness: v1.oauth.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:21.661743 3285 readiness_apigroup.go:40] found: v1.project.openshift.io I0602 09:35:21.661751 3285 readiness_apigroup.go:44] waiting for readiness: v1.project.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:21.661766 3285 readiness_apigroup.go:40] found: v1.quota.openshift.io I0602 09:35:21.661774 3285 readiness_apigroup.go:44] waiting for readiness: v1.quota.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:21.661789 3285 readiness_apigroup.go:40] found: v1.route.openshift.io I0602 09:35:21.661797 3285 readiness_apigroup.go:44] waiting for readiness: v1.route.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:21.661812 3285 readiness_apigroup.go:40] found: v1.security.openshift.io I0602 09:35:21.661820 3285 readiness_apigroup.go:44] waiting for readiness: v1.security.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:21.661836 3285 readiness_apigroup.go:40] found: v1.template.openshift.io I0602 09:35:21.661844 3285 readiness_apigroup.go:44] waiting for readiness: v1.template.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:21.661859 3285 readiness_apigroup.go:40] found: v1.user.openshift.io I0602 09:35:21.661867 3285 readiness_apigroup.go:44] waiting for readiness: v1.user.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:21.661886 3285 readiness_apigroup.go:53] waiting for readiness: []string{"v1.apps.openshift.io", "v1.authorization.openshift.io", "v1.build.openshift.io", "v1.image.openshift.io", "v1.network.openshift.io", "v1.oauth.openshift.io", "v1.project.openshift.io", "v1.quota.openshift.io", "v1.route.openshift.io", "v1.security.openshift.io", "v1.template.openshift.io", "v1.user.openshift.io"} I0602 09:35:22.663063 3285 readiness_apigroup.go:40] found: v1.apps.openshift.io I0602 09:35:22.663100 3285 readiness_apigroup.go:44] waiting for readiness: v1.apps.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:22.663133 3285 readiness_apigroup.go:40] found: v1.authorization.openshift.io I0602 09:35:22.663143 3285 readiness_apigroup.go:44] waiting for readiness: v1.authorization.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:22.663162 3285 readiness_apigroup.go:40] found: v1.build.openshift.io I0602 09:35:22.663170 3285 readiness_apigroup.go:44] waiting for readiness: v1.build.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:22.663186 3285 readiness_apigroup.go:40] found: v1.image.openshift.io I0602 09:35:22.663194 3285 readiness_apigroup.go:44] waiting for readiness: v1.image.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:22.663211 3285 readiness_apigroup.go:40] found: v1.network.openshift.io I0602 09:35:22.663219 3285 readiness_apigroup.go:44] waiting for readiness: v1.network.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:22.663238 3285 readiness_apigroup.go:40] found: v1.oauth.openshift.io I0602 09:35:22.663247 3285 readiness_apigroup.go:44] waiting for readiness: v1.oauth.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:22.663280 3285 readiness_apigroup.go:40] found: v1.project.openshift.io I0602 09:35:22.663289 3285 readiness_apigroup.go:44] waiting for readiness: v1.project.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:22.663307 3285 readiness_apigroup.go:40] found: v1.quota.openshift.io I0602 09:35:22.663315 3285 readiness_apigroup.go:44] waiting for readiness: v1.quota.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:22.663333 3285 readiness_apigroup.go:40] found: v1.route.openshift.io I0602 09:35:22.663342 3285 readiness_apigroup.go:44] waiting for readiness: v1.route.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:22.663365 3285 readiness_apigroup.go:40] found: v1.security.openshift.io I0602 09:35:22.663375 3285 readiness_apigroup.go:44] waiting for readiness: v1.security.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:22.663393 3285 readiness_apigroup.go:40] found: v1.template.openshift.io I0602 09:35:22.663401 3285 readiness_apigroup.go:44] waiting for readiness: v1.template.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:22.663417 3285 readiness_apigroup.go:40] found: v1.user.openshift.io I0602 09:35:22.663425 3285 readiness_apigroup.go:44] waiting for readiness: v1.user.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:22.663444 3285 readiness_apigroup.go:53] waiting for readiness: []string{"v1.apps.openshift.io", "v1.authorization.openshift.io", "v1.build.openshift.io", "v1.image.openshift.io", "v1.network.openshift.io", "v1.oauth.openshift.io", "v1.project.openshift.io", "v1.quota.openshift.io", "v1.route.openshift.io", "v1.security.openshift.io", "v1.template.openshift.io", "v1.user.openshift.io"} I0602 09:35:23.661401 3285 readiness_apigroup.go:40] found: v1.apps.openshift.io I0602 09:35:23.661427 3285 readiness_apigroup.go:44] waiting for readiness: v1.apps.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:23.661457 3285 readiness_apigroup.go:40] found: v1.authorization.openshift.io I0602 09:35:23.661465 3285 readiness_apigroup.go:44] waiting for readiness: v1.authorization.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:23.661482 3285 readiness_apigroup.go:40] found: v1.build.openshift.io I0602 09:35:23.661489 3285 readiness_apigroup.go:44] waiting for readiness: v1.build.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:23.661535 3285 readiness_apigroup.go:40] found: v1.image.openshift.io I0602 09:35:23.661570 3285 readiness_apigroup.go:44] waiting for readiness: v1.image.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:23.661613 3285 readiness_apigroup.go:40] found: v1.network.openshift.io I0602 09:35:23.661622 3285 readiness_apigroup.go:44] waiting for readiness: v1.network.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:23.661661 3285 readiness_apigroup.go:40] found: v1.oauth.openshift.io I0602 09:35:23.661670 3285 readiness_apigroup.go:44] waiting for readiness: v1.oauth.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:23.661708 3285 readiness_apigroup.go:40] found: v1.project.openshift.io I0602 09:35:23.661717 3285 readiness_apigroup.go:44] waiting for readiness: v1.project.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:23.661755 3285 readiness_apigroup.go:40] found: v1.quota.openshift.io I0602 09:35:23.661764 3285 readiness_apigroup.go:44] waiting for readiness: v1.quota.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:23.661795 3285 readiness_apigroup.go:40] found: v1.route.openshift.io I0602 09:35:23.661862 3285 readiness_apigroup.go:44] waiting for readiness: v1.route.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:23.661898 3285 readiness_apigroup.go:40] found: v1.security.openshift.io I0602 09:35:23.661907 3285 readiness_apigroup.go:44] waiting for readiness: v1.security.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:23.661980 3285 readiness_apigroup.go:40] found: v1.template.openshift.io I0602 09:35:23.661991 3285 readiness_apigroup.go:44] waiting for readiness: v1.template.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:23.662047 3285 readiness_apigroup.go:40] found: v1.user.openshift.io I0602 09:35:23.662072 3285 readiness_apigroup.go:44] waiting for readiness: v1.user.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:23.662097 3285 readiness_apigroup.go:53] waiting for readiness: []string{"v1.apps.openshift.io", "v1.authorization.openshift.io", "v1.build.openshift.io", "v1.image.openshift.io", "v1.network.openshift.io", "v1.oauth.openshift.io", "v1.project.openshift.io", "v1.quota.openshift.io", "v1.route.openshift.io", "v1.security.openshift.io", "v1.template.openshift.io", "v1.user.openshift.io"} I0602 09:35:24.663047 3285 readiness_apigroup.go:40] found: v1.apps.openshift.io I0602 09:35:24.663077 3285 readiness_apigroup.go:44] waiting for readiness: v1.apps.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:24.663109 3285 readiness_apigroup.go:40] found: v1.authorization.openshift.io I0602 09:35:24.663119 3285 readiness_apigroup.go:44] waiting for readiness: v1.authorization.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:24.663138 3285 readiness_apigroup.go:40] found: v1.build.openshift.io I0602 09:35:24.663146 3285 readiness_apigroup.go:44] waiting for readiness: v1.build.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:24.663164 3285 readiness_apigroup.go:40] found: v1.image.openshift.io I0602 09:35:24.663172 3285 readiness_apigroup.go:44] waiting for readiness: v1.image.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:24.663190 3285 readiness_apigroup.go:40] found: v1.network.openshift.io I0602 09:35:24.663199 3285 readiness_apigroup.go:44] waiting for readiness: v1.network.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:24.663216 3285 readiness_apigroup.go:40] found: v1.oauth.openshift.io I0602 09:35:24.663224 3285 readiness_apigroup.go:44] waiting for readiness: v1.oauth.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:24.663239 3285 readiness_apigroup.go:40] found: v1.project.openshift.io I0602 09:35:24.663246 3285 readiness_apigroup.go:44] waiting for readiness: v1.project.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:24.663286 3285 readiness_apigroup.go:40] found: v1.quota.openshift.io I0602 09:35:24.663295 3285 readiness_apigroup.go:44] waiting for readiness: v1.quota.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:24.663313 3285 readiness_apigroup.go:40] found: v1.route.openshift.io I0602 09:35:24.663322 3285 readiness_apigroup.go:44] waiting for readiness: v1.route.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:24.663341 3285 readiness_apigroup.go:40] found: v1.security.openshift.io I0602 09:35:24.663350 3285 readiness_apigroup.go:44] waiting for readiness: v1.security.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:24.663367 3285 readiness_apigroup.go:40] found: v1.template.openshift.io I0602 09:35:24.663376 3285 readiness_apigroup.go:44] waiting for readiness: v1.template.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:24.663393 3285 readiness_apigroup.go:40] found: v1.user.openshift.io I0602 09:35:24.663401 3285 readiness_apigroup.go:44] waiting for readiness: v1.user.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:24.663422 3285 readiness_apigroup.go:53] waiting for readiness: []string{"v1.apps.openshift.io", "v1.authorization.openshift.io", "v1.build.openshift.io", "v1.image.openshift.io", "v1.network.openshift.io", "v1.oauth.openshift.io", "v1.project.openshift.io", "v1.quota.openshift.io", "v1.route.openshift.io", "v1.security.openshift.io", "v1.template.openshift.io", "v1.user.openshift.io"} I0602 09:35:25.662361 3285 readiness_apigroup.go:40] found: v1.apps.openshift.io I0602 09:35:25.662391 3285 readiness_apigroup.go:44] waiting for readiness: v1.apps.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:25.662421 3285 readiness_apigroup.go:40] found: v1.authorization.openshift.io I0602 09:35:25.662430 3285 readiness_apigroup.go:44] waiting for readiness: v1.authorization.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:25.662447 3285 readiness_apigroup.go:40] found: v1.build.openshift.io I0602 09:35:25.662455 3285 readiness_apigroup.go:44] waiting for readiness: v1.build.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:25.662471 3285 readiness_apigroup.go:40] found: v1.image.openshift.io I0602 09:35:25.662479 3285 readiness_apigroup.go:44] waiting for readiness: v1.image.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:25.662495 3285 readiness_apigroup.go:40] found: v1.network.openshift.io I0602 09:35:25.662503 3285 readiness_apigroup.go:44] waiting for readiness: v1.network.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:25.662520 3285 readiness_apigroup.go:40] found: v1.oauth.openshift.io I0602 09:35:25.662528 3285 readiness_apigroup.go:44] waiting for readiness: v1.oauth.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:25.662547 3285 readiness_apigroup.go:40] found: v1.project.openshift.io I0602 09:35:25.662555 3285 readiness_apigroup.go:44] waiting for readiness: v1.project.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:25.662571 3285 readiness_apigroup.go:40] found: v1.quota.openshift.io I0602 09:35:25.662580 3285 readiness_apigroup.go:44] waiting for readiness: v1.quota.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:25.662596 3285 readiness_apigroup.go:40] found: v1.route.openshift.io I0602 09:35:25.662604 3285 readiness_apigroup.go:44] waiting for readiness: v1.route.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:25.662619 3285 readiness_apigroup.go:40] found: v1.security.openshift.io I0602 09:35:25.662627 3285 readiness_apigroup.go:44] waiting for readiness: v1.security.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:25.662642 3285 readiness_apigroup.go:40] found: v1.template.openshift.io I0602 09:35:25.662650 3285 readiness_apigroup.go:44] waiting for readiness: v1.template.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:25.662665 3285 readiness_apigroup.go:40] found: v1.user.openshift.io I0602 09:35:25.662673 3285 readiness_apigroup.go:44] waiting for readiness: v1.user.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:25.662691 3285 readiness_apigroup.go:53] waiting for readiness: []string{"v1.apps.openshift.io", "v1.authorization.openshift.io", "v1.build.openshift.io", "v1.image.openshift.io", "v1.network.openshift.io", "v1.oauth.openshift.io", "v1.project.openshift.io", "v1.quota.openshift.io", "v1.route.openshift.io", "v1.security.openshift.io", "v1.template.openshift.io", "v1.user.openshift.io"} I0602 09:35:26.661384 3285 readiness_apigroup.go:40] found: v1.apps.openshift.io I0602 09:35:26.661409 3285 readiness_apigroup.go:44] waiting for readiness: v1.apps.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:26.661430 3285 readiness_apigroup.go:40] found: v1.authorization.openshift.io I0602 09:35:26.661435 3285 readiness_apigroup.go:44] waiting for readiness: v1.authorization.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:26.661446 3285 readiness_apigroup.go:40] found: v1.build.openshift.io I0602 09:35:26.661451 3285 readiness_apigroup.go:44] waiting for readiness: v1.build.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:26.661461 3285 readiness_apigroup.go:40] found: v1.image.openshift.io I0602 09:35:26.661466 3285 readiness_apigroup.go:44] waiting for readiness: v1.image.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:26.661476 3285 readiness_apigroup.go:40] found: v1.network.openshift.io I0602 09:35:26.661481 3285 readiness_apigroup.go:44] waiting for readiness: v1.network.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:26.661490 3285 readiness_apigroup.go:40] found: v1.oauth.openshift.io I0602 09:35:26.661498 3285 readiness_apigroup.go:44] waiting for readiness: v1.oauth.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:26.661509 3285 readiness_apigroup.go:40] found: v1.project.openshift.io I0602 09:35:26.661516 3285 readiness_apigroup.go:44] waiting for readiness: v1.project.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:26.661532 3285 readiness_apigroup.go:40] found: v1.quota.openshift.io I0602 09:35:26.661539 3285 readiness_apigroup.go:44] waiting for readiness: v1.quota.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:26.661556 3285 readiness_apigroup.go:40] found: v1.route.openshift.io I0602 09:35:26.661565 3285 readiness_apigroup.go:44] waiting for readiness: v1.route.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:26.661582 3285 readiness_apigroup.go:40] found: v1.security.openshift.io I0602 09:35:26.661587 3285 readiness_apigroup.go:44] waiting for readiness: v1.security.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:26.661602 3285 readiness_apigroup.go:40] found: v1.template.openshift.io I0602 09:35:26.661610 3285 readiness_apigroup.go:44] waiting for readiness: v1.template.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:26.661620 3285 readiness_apigroup.go:40] found: v1.user.openshift.io I0602 09:35:26.661625 3285 readiness_apigroup.go:44] waiting for readiness: v1.user.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:26.661641 3285 readiness_apigroup.go:53] waiting for readiness: []string{"v1.apps.openshift.io", "v1.authorization.openshift.io", "v1.build.openshift.io", "v1.image.openshift.io", "v1.network.openshift.io", "v1.oauth.openshift.io", "v1.project.openshift.io", "v1.quota.openshift.io", "v1.route.openshift.io", "v1.security.openshift.io", "v1.template.openshift.io", "v1.user.openshift.io"} I0602 09:35:27.662187 3285 readiness_apigroup.go:40] found: v1.apps.openshift.io I0602 09:35:27.662213 3285 readiness_apigroup.go:44] waiting for readiness: v1.apps.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:27.662237 3285 readiness_apigroup.go:40] found: v1.authorization.openshift.io I0602 09:35:27.662245 3285 readiness_apigroup.go:44] waiting for readiness: v1.authorization.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:27.662277 3285 readiness_apigroup.go:40] found: v1.build.openshift.io I0602 09:35:27.662283 3285 readiness_apigroup.go:44] waiting for readiness: v1.build.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:27.662293 3285 readiness_apigroup.go:40] found: v1.image.openshift.io I0602 09:35:27.662298 3285 readiness_apigroup.go:44] waiting for readiness: v1.image.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:27.662310 3285 readiness_apigroup.go:40] found: v1.network.openshift.io I0602 09:35:27.662317 3285 readiness_apigroup.go:44] waiting for readiness: v1.network.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:27.662332 3285 readiness_apigroup.go:40] found: v1.oauth.openshift.io I0602 09:35:27.662340 3285 readiness_apigroup.go:44] waiting for readiness: v1.oauth.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:27.662356 3285 readiness_apigroup.go:40] found: v1.project.openshift.io I0602 09:35:27.662367 3285 readiness_apigroup.go:44] waiting for readiness: v1.project.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:27.662380 3285 readiness_apigroup.go:40] found: v1.quota.openshift.io I0602 09:35:27.662389 3285 readiness_apigroup.go:44] waiting for readiness: v1.quota.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:27.662402 3285 readiness_apigroup.go:40] found: v1.route.openshift.io I0602 09:35:27.662409 3285 readiness_apigroup.go:44] waiting for readiness: v1.route.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:27.662425 3285 readiness_apigroup.go:40] found: v1.security.openshift.io I0602 09:35:27.662433 3285 readiness_apigroup.go:44] waiting for readiness: v1.security.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:27.662450 3285 readiness_apigroup.go:40] found: v1.template.openshift.io I0602 09:35:27.662458 3285 readiness_apigroup.go:44] waiting for readiness: v1.template.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:27.662475 3285 readiness_apigroup.go:40] found: v1.user.openshift.io I0602 09:35:27.662483 3285 readiness_apigroup.go:44] waiting for readiness: v1.user.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:27.662502 3285 readiness_apigroup.go:53] waiting for readiness: []string{"v1.apps.openshift.io", "v1.authorization.openshift.io", "v1.build.openshift.io", "v1.image.openshift.io", "v1.network.openshift.io", "v1.oauth.openshift.io", "v1.project.openshift.io", "v1.quota.openshift.io", "v1.route.openshift.io", "v1.security.openshift.io", "v1.template.openshift.io", "v1.user.openshift.io"} I0602 09:35:28.662215 3285 readiness_apigroup.go:40] found: v1.apps.openshift.io I0602 09:35:28.662249 3285 readiness_apigroup.go:44] waiting for readiness: v1.apps.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:28.662296 3285 readiness_apigroup.go:40] found: v1.authorization.openshift.io I0602 09:35:28.662305 3285 readiness_apigroup.go:44] waiting for readiness: v1.authorization.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:28.662325 3285 readiness_apigroup.go:40] found: v1.build.openshift.io I0602 09:35:28.662333 3285 readiness_apigroup.go:44] waiting for readiness: v1.build.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:28.662350 3285 readiness_apigroup.go:40] found: v1.image.openshift.io I0602 09:35:28.662358 3285 readiness_apigroup.go:44] waiting for readiness: v1.image.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:28.662385 3285 readiness_apigroup.go:40] found: v1.network.openshift.io I0602 09:35:28.662394 3285 readiness_apigroup.go:44] waiting for readiness: v1.network.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:28.662411 3285 readiness_apigroup.go:40] found: v1.oauth.openshift.io I0602 09:35:28.662419 3285 readiness_apigroup.go:44] waiting for readiness: v1.oauth.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:28.662444 3285 readiness_apigroup.go:40] found: v1.project.openshift.io I0602 09:35:28.662453 3285 readiness_apigroup.go:44] waiting for readiness: v1.project.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:28.662471 3285 readiness_apigroup.go:40] found: v1.quota.openshift.io I0602 09:35:28.662479 3285 readiness_apigroup.go:44] waiting for readiness: v1.quota.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:28.662495 3285 readiness_apigroup.go:40] found: v1.route.openshift.io I0602 09:35:28.662503 3285 readiness_apigroup.go:44] waiting for readiness: v1.route.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:28.662520 3285 readiness_apigroup.go:40] found: v1.security.openshift.io I0602 09:35:28.662528 3285 readiness_apigroup.go:44] waiting for readiness: v1.security.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:28.662552 3285 readiness_apigroup.go:40] found: v1.template.openshift.io I0602 09:35:28.662560 3285 readiness_apigroup.go:44] waiting for readiness: v1.template.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:28.662585 3285 readiness_apigroup.go:40] found: v1.user.openshift.io I0602 09:35:28.662593 3285 readiness_apigroup.go:44] waiting for readiness: v1.user.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:28.662620 3285 readiness_apigroup.go:53] waiting for readiness: []string{"v1.apps.openshift.io", "v1.authorization.openshift.io", "v1.build.openshift.io", "v1.image.openshift.io", "v1.network.openshift.io", "v1.oauth.openshift.io", "v1.project.openshift.io", "v1.quota.openshift.io", "v1.route.openshift.io", "v1.security.openshift.io", "v1.template.openshift.io", "v1.user.openshift.io"} I0602 09:35:29.661911 3285 readiness_apigroup.go:40] found: v1.apps.openshift.io I0602 09:35:29.661939 3285 readiness_apigroup.go:44] waiting for readiness: v1.apps.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:29.661967 3285 readiness_apigroup.go:40] found: v1.authorization.openshift.io I0602 09:35:29.661976 3285 readiness_apigroup.go:44] waiting for readiness: v1.authorization.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:29.661993 3285 readiness_apigroup.go:40] found: v1.build.openshift.io I0602 09:35:29.662004 3285 readiness_apigroup.go:44] waiting for readiness: v1.build.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:29.662024 3285 readiness_apigroup.go:40] found: v1.image.openshift.io I0602 09:35:29.662032 3285 readiness_apigroup.go:44] waiting for readiness: v1.image.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:29.662048 3285 readiness_apigroup.go:40] found: v1.network.openshift.io I0602 09:35:29.662056 3285 readiness_apigroup.go:44] waiting for readiness: v1.network.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:29.662072 3285 readiness_apigroup.go:40] found: v1.oauth.openshift.io I0602 09:35:29.662080 3285 readiness_apigroup.go:44] waiting for readiness: v1.oauth.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:29.662095 3285 readiness_apigroup.go:40] found: v1.project.openshift.io I0602 09:35:29.662103 3285 readiness_apigroup.go:44] waiting for readiness: v1.project.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:29.662118 3285 readiness_apigroup.go:40] found: v1.quota.openshift.io I0602 09:35:29.662126 3285 readiness_apigroup.go:44] waiting for readiness: v1.quota.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:29.662142 3285 readiness_apigroup.go:40] found: v1.route.openshift.io I0602 09:35:29.662150 3285 readiness_apigroup.go:44] waiting for readiness: v1.route.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:29.662165 3285 readiness_apigroup.go:40] found: v1.security.openshift.io I0602 09:35:29.662173 3285 readiness_apigroup.go:44] waiting for readiness: v1.security.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:29.662188 3285 readiness_apigroup.go:40] found: v1.template.openshift.io I0602 09:35:29.662196 3285 readiness_apigroup.go:44] waiting for readiness: v1.template.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:29.662212 3285 readiness_apigroup.go:40] found: v1.user.openshift.io I0602 09:35:29.662219 3285 readiness_apigroup.go:44] waiting for readiness: v1.user.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:29.662238 3285 readiness_apigroup.go:53] waiting for readiness: []string{"v1.apps.openshift.io", "v1.authorization.openshift.io", "v1.build.openshift.io", "v1.image.openshift.io", "v1.network.openshift.io", "v1.oauth.openshift.io", "v1.project.openshift.io", "v1.quota.openshift.io", "v1.route.openshift.io", "v1.security.openshift.io", "v1.template.openshift.io", "v1.user.openshift.io"} I0602 09:35:30.661545 3285 readiness_apigroup.go:40] found: v1.apps.openshift.io I0602 09:35:30.661569 3285 readiness_apigroup.go:44] waiting for readiness: v1.apps.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:30.661594 3285 readiness_apigroup.go:40] found: v1.authorization.openshift.io I0602 09:35:30.661600 3285 readiness_apigroup.go:44] waiting for readiness: v1.authorization.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:30.661611 3285 readiness_apigroup.go:40] found: v1.build.openshift.io I0602 09:35:30.661615 3285 readiness_apigroup.go:44] waiting for readiness: v1.build.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:30.661625 3285 readiness_apigroup.go:40] found: v1.image.openshift.io I0602 09:35:30.661630 3285 readiness_apigroup.go:44] waiting for readiness: v1.image.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:30.661639 3285 readiness_apigroup.go:40] found: v1.network.openshift.io I0602 09:35:30.661643 3285 readiness_apigroup.go:44] waiting for readiness: v1.network.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:30.661653 3285 readiness_apigroup.go:40] found: v1.oauth.openshift.io I0602 09:35:30.661658 3285 readiness_apigroup.go:44] waiting for readiness: v1.oauth.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:30.661669 3285 readiness_apigroup.go:40] found: v1.project.openshift.io I0602 09:35:30.661674 3285 readiness_apigroup.go:44] waiting for readiness: v1.project.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:30.661682 3285 readiness_apigroup.go:40] found: v1.quota.openshift.io I0602 09:35:30.661687 3285 readiness_apigroup.go:44] waiting for readiness: v1.quota.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:30.661696 3285 readiness_apigroup.go:40] found: v1.route.openshift.io I0602 09:35:30.661700 3285 readiness_apigroup.go:44] waiting for readiness: v1.route.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:30.661709 3285 readiness_apigroup.go:40] found: v1.security.openshift.io I0602 09:35:30.661714 3285 readiness_apigroup.go:44] waiting for readiness: v1.security.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:30.661723 3285 readiness_apigroup.go:40] found: v1.template.openshift.io I0602 09:35:30.661727 3285 readiness_apigroup.go:44] waiting for readiness: v1.template.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:30.661736 3285 readiness_apigroup.go:40] found: v1.user.openshift.io I0602 09:35:30.661741 3285 readiness_apigroup.go:44] waiting for readiness: v1.user.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:30.661752 3285 readiness_apigroup.go:53] waiting for readiness: []string{"v1.apps.openshift.io", "v1.authorization.openshift.io", "v1.build.openshift.io", "v1.image.openshift.io", "v1.network.openshift.io", "v1.oauth.openshift.io", "v1.project.openshift.io", "v1.quota.openshift.io", "v1.route.openshift.io", "v1.security.openshift.io", "v1.template.openshift.io", "v1.user.openshift.io"} I0602 09:35:31.662249 3285 readiness_apigroup.go:40] found: v1.apps.openshift.io I0602 09:35:31.662298 3285 readiness_apigroup.go:44] waiting for readiness: v1.apps.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:31.662330 3285 readiness_apigroup.go:40] found: v1.authorization.openshift.io I0602 09:35:31.662339 3285 readiness_apigroup.go:44] waiting for readiness: v1.authorization.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:31.662357 3285 readiness_apigroup.go:40] found: v1.build.openshift.io I0602 09:35:31.662366 3285 readiness_apigroup.go:44] waiting for readiness: v1.build.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:31.662388 3285 readiness_apigroup.go:40] found: v1.image.openshift.io I0602 09:35:31.662396 3285 readiness_apigroup.go:44] waiting for readiness: v1.image.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:31.662414 3285 readiness_apigroup.go:40] found: v1.network.openshift.io I0602 09:35:31.662423 3285 readiness_apigroup.go:44] waiting for readiness: v1.network.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:31.662440 3285 readiness_apigroup.go:40] found: v1.oauth.openshift.io I0602 09:35:31.662448 3285 readiness_apigroup.go:44] waiting for readiness: v1.oauth.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:31.662464 3285 readiness_apigroup.go:40] found: v1.project.openshift.io I0602 09:35:31.662472 3285 readiness_apigroup.go:44] waiting for readiness: v1.project.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:31.662488 3285 readiness_apigroup.go:40] found: v1.quota.openshift.io I0602 09:35:31.662500 3285 readiness_apigroup.go:44] waiting for readiness: v1.quota.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:31.662517 3285 readiness_apigroup.go:40] found: v1.route.openshift.io I0602 09:35:31.662526 3285 readiness_apigroup.go:44] waiting for readiness: v1.route.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:31.662542 3285 readiness_apigroup.go:40] found: v1.security.openshift.io I0602 09:35:31.662550 3285 readiness_apigroup.go:44] waiting for readiness: v1.security.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:31.662566 3285 readiness_apigroup.go:40] found: v1.template.openshift.io I0602 09:35:31.662574 3285 readiness_apigroup.go:44] waiting for readiness: v1.template.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:31.662599 3285 readiness_apigroup.go:40] found: v1.user.openshift.io I0602 09:35:31.662607 3285 readiness_apigroup.go:44] waiting for readiness: v1.user.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:31.662627 3285 readiness_apigroup.go:53] waiting for readiness: []string{"v1.apps.openshift.io", "v1.authorization.openshift.io", "v1.build.openshift.io", "v1.image.openshift.io", "v1.network.openshift.io", "v1.oauth.openshift.io", "v1.project.openshift.io", "v1.quota.openshift.io", "v1.route.openshift.io", "v1.security.openshift.io", "v1.template.openshift.io", "v1.user.openshift.io"} I0602 09:35:32.661726 3285 readiness_apigroup.go:40] found: v1.apps.openshift.io I0602 09:35:32.661749 3285 readiness_apigroup.go:44] waiting for readiness: v1.apps.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:32.661780 3285 readiness_apigroup.go:40] found: v1.authorization.openshift.io I0602 09:35:32.661787 3285 readiness_apigroup.go:44] waiting for readiness: v1.authorization.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:32.661799 3285 readiness_apigroup.go:40] found: v1.build.openshift.io I0602 09:35:32.661804 3285 readiness_apigroup.go:44] waiting for readiness: v1.build.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:32.661814 3285 readiness_apigroup.go:40] found: v1.image.openshift.io I0602 09:35:32.661818 3285 readiness_apigroup.go:44] waiting for readiness: v1.image.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:32.661828 3285 readiness_apigroup.go:40] found: v1.network.openshift.io I0602 09:35:32.661833 3285 readiness_apigroup.go:44] waiting for readiness: v1.network.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:32.661843 3285 readiness_apigroup.go:40] found: v1.oauth.openshift.io I0602 09:35:32.661848 3285 readiness_apigroup.go:44] waiting for readiness: v1.oauth.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:32.661858 3285 readiness_apigroup.go:40] found: v1.project.openshift.io I0602 09:35:32.661866 3285 readiness_apigroup.go:44] waiting for readiness: v1.project.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:32.661875 3285 readiness_apigroup.go:40] found: v1.quota.openshift.io I0602 09:35:32.661880 3285 readiness_apigroup.go:44] waiting for readiness: v1.quota.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:32.661889 3285 readiness_apigroup.go:40] found: v1.route.openshift.io I0602 09:35:32.661894 3285 readiness_apigroup.go:44] waiting for readiness: v1.route.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:32.661906 3285 readiness_apigroup.go:40] found: v1.security.openshift.io I0602 09:35:32.661911 3285 readiness_apigroup.go:44] waiting for readiness: v1.security.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:32.661920 3285 readiness_apigroup.go:40] found: v1.template.openshift.io I0602 09:35:32.661925 3285 readiness_apigroup.go:44] waiting for readiness: v1.template.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:32.661942 3285 readiness_apigroup.go:40] found: v1.user.openshift.io I0602 09:35:32.661949 3285 readiness_apigroup.go:44] waiting for readiness: v1.user.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:32.661966 3285 readiness_apigroup.go:53] waiting for readiness: []string{"v1.apps.openshift.io", "v1.authorization.openshift.io", "v1.build.openshift.io", "v1.image.openshift.io", "v1.network.openshift.io", "v1.oauth.openshift.io", "v1.project.openshift.io", "v1.quota.openshift.io", "v1.route.openshift.io", "v1.security.openshift.io", "v1.template.openshift.io", "v1.user.openshift.io"} I0602 09:35:33.662100 3285 readiness_apigroup.go:40] found: v1.apps.openshift.io I0602 09:35:33.662126 3285 readiness_apigroup.go:44] waiting for readiness: v1.apps.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:33.662155 3285 readiness_apigroup.go:40] found: v1.authorization.openshift.io I0602 09:35:33.662163 3285 readiness_apigroup.go:44] waiting for readiness: v1.authorization.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:33.662181 3285 readiness_apigroup.go:40] found: v1.build.openshift.io I0602 09:35:33.662189 3285 readiness_apigroup.go:44] waiting for readiness: v1.build.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:33.662204 3285 readiness_apigroup.go:40] found: v1.image.openshift.io I0602 09:35:33.662230 3285 readiness_apigroup.go:44] waiting for readiness: v1.image.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:33.662249 3285 readiness_apigroup.go:40] found: v1.network.openshift.io I0602 09:35:33.662289 3285 readiness_apigroup.go:44] waiting for readiness: v1.network.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:33.662321 3285 readiness_apigroup.go:40] found: v1.oauth.openshift.io I0602 09:35:33.662330 3285 readiness_apigroup.go:44] waiting for readiness: v1.oauth.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:33.662347 3285 readiness_apigroup.go:40] found: v1.project.openshift.io I0602 09:35:33.662355 3285 readiness_apigroup.go:44] waiting for readiness: v1.project.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:33.662370 3285 readiness_apigroup.go:40] found: v1.quota.openshift.io I0602 09:35:33.662377 3285 readiness_apigroup.go:44] waiting for readiness: v1.quota.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:33.662438 3285 readiness_apigroup.go:40] found: v1.route.openshift.io I0602 09:35:33.662448 3285 readiness_apigroup.go:44] waiting for readiness: v1.route.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:33.662531 3285 readiness_apigroup.go:40] found: v1.security.openshift.io I0602 09:35:33.662548 3285 readiness_apigroup.go:44] waiting for readiness: v1.security.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:33.662566 3285 readiness_apigroup.go:40] found: v1.template.openshift.io I0602 09:35:33.662573 3285 readiness_apigroup.go:44] waiting for readiness: v1.template.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:33.662626 3285 readiness_apigroup.go:40] found: v1.user.openshift.io I0602 09:35:33.662651 3285 readiness_apigroup.go:44] waiting for readiness: v1.user.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:33.662691 3285 readiness_apigroup.go:53] waiting for readiness: []string{"v1.apps.openshift.io", "v1.authorization.openshift.io", "v1.build.openshift.io", "v1.image.openshift.io", "v1.network.openshift.io", "v1.oauth.openshift.io", "v1.project.openshift.io", "v1.quota.openshift.io", "v1.route.openshift.io", "v1.security.openshift.io", "v1.template.openshift.io", "v1.user.openshift.io"} I0602 09:35:34.661504 3285 readiness_apigroup.go:40] found: v1.apps.openshift.io I0602 09:35:34.661531 3285 readiness_apigroup.go:44] waiting for readiness: v1.apps.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:34.661560 3285 readiness_apigroup.go:40] found: v1.authorization.openshift.io I0602 09:35:34.661568 3285 readiness_apigroup.go:44] waiting for readiness: v1.authorization.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:34.661584 3285 readiness_apigroup.go:40] found: v1.build.openshift.io I0602 09:35:34.661592 3285 readiness_apigroup.go:44] waiting for readiness: v1.build.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:34.661611 3285 readiness_apigroup.go:40] found: v1.image.openshift.io I0602 09:35:34.661619 3285 readiness_apigroup.go:44] waiting for readiness: v1.image.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:34.661634 3285 readiness_apigroup.go:40] found: v1.network.openshift.io I0602 09:35:34.661642 3285 readiness_apigroup.go:44] waiting for readiness: v1.network.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:34.661659 3285 readiness_apigroup.go:40] found: v1.oauth.openshift.io I0602 09:35:34.661668 3285 readiness_apigroup.go:44] waiting for readiness: v1.oauth.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:34.661684 3285 readiness_apigroup.go:40] found: v1.project.openshift.io I0602 09:35:34.661692 3285 readiness_apigroup.go:44] waiting for readiness: v1.project.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:34.661708 3285 readiness_apigroup.go:40] found: v1.quota.openshift.io I0602 09:35:34.661717 3285 readiness_apigroup.go:44] waiting for readiness: v1.quota.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:34.661734 3285 readiness_apigroup.go:40] found: v1.route.openshift.io I0602 09:35:34.661742 3285 readiness_apigroup.go:44] waiting for readiness: v1.route.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:34.661759 3285 readiness_apigroup.go:40] found: v1.security.openshift.io I0602 09:35:34.661768 3285 readiness_apigroup.go:44] waiting for readiness: v1.security.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:34.661784 3285 readiness_apigroup.go:40] found: v1.template.openshift.io I0602 09:35:34.661793 3285 readiness_apigroup.go:44] waiting for readiness: v1.template.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:34.661809 3285 readiness_apigroup.go:40] found: v1.user.openshift.io I0602 09:35:34.661818 3285 readiness_apigroup.go:44] waiting for readiness: v1.user.openshift.io v1beta1.APIServiceCondition{Type:"Available", Status:"False", LastTransitionTime:v1.Time{Time:time.Time{wall:0x0, ext:63663528916, loc:(*time.Location)(0x46c8fe0)}}, Reason:"MissingEndpoints", Message:"endpoints for service/api in \"openshift-apiserver\" have no addresses"} I0602 09:35:34.661837 3285 readiness_apigroup.go:53] waiting for readiness: []string{"v1.apps.openshift.io", "v1.authorization.openshift.io", "v1.build.openshift.io", "v1.image.openshift.io", "v1.network.openshift.io", "v1.oauth.openshift.io", "v1.project.openshift.io", "v1.quota.openshift.io", "v1.route.openshift.io", "v1.security.openshift.io", "v1.template.openshift.io", "v1.user.openshift.io"} I0602 09:35:35.661489 3285 readiness_apigroup.go:40] found: v1.apps.openshift.io I0602 09:35:35.661517 3285 readiness_apigroup.go:40] found: v1.authorization.openshift.io I0602 09:35:35.661527 3285 readiness_apigroup.go:40] found: v1.build.openshift.io I0602 09:35:35.661534 3285 readiness_apigroup.go:40] found: v1.image.openshift.io I0602 09:35:35.661541 3285 readiness_apigroup.go:40] found: v1.network.openshift.io I0602 09:35:35.661547 3285 readiness_apigroup.go:40] found: v1.oauth.openshift.io I0602 09:35:35.661553 3285 readiness_apigroup.go:40] found: v1.project.openshift.io I0602 09:35:35.661559 3285 readiness_apigroup.go:40] found: v1.quota.openshift.io I0602 09:35:35.661566 3285 readiness_apigroup.go:40] found: v1.route.openshift.io I0602 09:35:35.661573 3285 readiness_apigroup.go:40] found: v1.security.openshift.io I0602 09:35:35.661580 3285 readiness_apigroup.go:40] found: v1.template.openshift.io I0602 09:35:35.661586 3285 readiness_apigroup.go:40] found: v1.user.openshift.io I0602 09:35:35.662424 3285 readiness_apigroup.go:67] waiting for url: "/apis/apps.openshift.io/v1" 503 I0602 09:35:35.663083 3285 readiness_apigroup.go:67] waiting for url: "/apis/authorization.openshift.io/v1" 503 I0602 09:35:35.663739 3285 readiness_apigroup.go:67] waiting for url: "/apis/build.openshift.io/v1" 503 I0602 09:35:35.664442 3285 readiness_apigroup.go:67] waiting for url: "/apis/image.openshift.io/v1" 503 I0602 09:35:35.665156 3285 readiness_apigroup.go:67] waiting for url: "/apis/network.openshift.io/v1" 503 I0602 09:35:35.665854 3285 readiness_apigroup.go:67] waiting for url: "/apis/oauth.openshift.io/v1" 503 I0602 09:35:35.666601 3285 readiness_apigroup.go:67] waiting for url: "/apis/project.openshift.io/v1" 503 I0602 09:35:35.667340 3285 readiness_apigroup.go:67] waiting for url: "/apis/quota.openshift.io/v1" 503 I0602 09:35:35.668120 3285 readiness_apigroup.go:67] waiting for url: "/apis/route.openshift.io/v1" 503 I0602 09:35:35.668760 3285 readiness_apigroup.go:67] waiting for url: "/apis/security.openshift.io/v1" 503 I0602 09:35:35.669483 3285 readiness_apigroup.go:67] waiting for url: "/apis/template.openshift.io/v1" 503 I0602 09:35:35.670227 3285 readiness_apigroup.go:67] waiting for url: "/apis/user.openshift.io/v1" 503 I0602 09:35:35.670246 3285 readiness_apigroup.go:72] waiting for urls: []string{"/apis/apps.openshift.io/v1", "/apis/authorization.openshift.io/v1", "/apis/build.openshift.io/v1", "/apis/image.openshift.io/v1", "/apis/network.openshift.io/v1", "/apis/oauth.openshift.io/v1", "/apis/project.openshift.io/v1", "/apis/quota.openshift.io/v1", "/apis/route.openshift.io/v1", "/apis/security.openshift.io/v1", "/apis/template.openshift.io/v1", "/apis/user.openshift.io/v1"} I0602 09:35:36.661942 3285 readiness_apigroup.go:40] found: v1.apps.openshift.io I0602 09:35:36.661970 3285 readiness_apigroup.go:40] found: v1.authorization.openshift.io I0602 09:35:36.661980 3285 readiness_apigroup.go:40] found: v1.build.openshift.io I0602 09:35:36.661987 3285 readiness_apigroup.go:40] found: v1.image.openshift.io I0602 09:35:36.661994 3285 readiness_apigroup.go:40] found: v1.network.openshift.io I0602 09:35:36.662001 3285 readiness_apigroup.go:40] found: v1.oauth.openshift.io I0602 09:35:36.662007 3285 readiness_apigroup.go:40] found: v1.project.openshift.io I0602 09:35:36.662013 3285 readiness_apigroup.go:40] found: v1.quota.openshift.io I0602 09:35:36.662019 3285 readiness_apigroup.go:40] found: v1.route.openshift.io I0602 09:35:36.662026 3285 readiness_apigroup.go:40] found: v1.security.openshift.io I0602 09:35:36.662033 3285 readiness_apigroup.go:40] found: v1.template.openshift.io I0602 09:35:36.662039 3285 readiness_apigroup.go:40] found: v1.user.openshift.io I0602 09:35:36.662894 3285 readiness_apigroup.go:67] waiting for url: "/apis/apps.openshift.io/v1" 503 I0602 09:35:36.676657 3285 readiness_apigroup.go:67] waiting for url: "/apis/build.openshift.io/v1" 503 I0602 09:35:36.677457 3285 readiness_apigroup.go:67] waiting for url: "/apis/image.openshift.io/v1" 503 I0602 09:35:36.678116 3285 readiness_apigroup.go:67] waiting for url: "/apis/network.openshift.io/v1" 503 I0602 09:35:36.678937 3285 readiness_apigroup.go:67] waiting for url: "/apis/oauth.openshift.io/v1" 503 I0602 09:35:36.679619 3285 readiness_apigroup.go:67] waiting for url: "/apis/project.openshift.io/v1" 503 I0602 09:35:36.680323 3285 readiness_apigroup.go:67] waiting for url: "/apis/quota.openshift.io/v1" 503 I0602 09:35:36.681098 3285 readiness_apigroup.go:67] waiting for url: "/apis/route.openshift.io/v1" 503 I0602 09:35:36.681772 3285 readiness_apigroup.go:67] waiting for url: "/apis/security.openshift.io/v1" 503 I0602 09:35:36.682484 3285 readiness_apigroup.go:67] waiting for url: "/apis/template.openshift.io/v1" 503 I0602 09:35:36.683149 3285 readiness_apigroup.go:67] waiting for url: "/apis/user.openshift.io/v1" 503 I0602 09:35:36.683169 3285 readiness_apigroup.go:72] waiting for urls: []string{"/apis/apps.openshift.io/v1", "/apis/build.openshift.io/v1", "/apis/image.openshift.io/v1", "/apis/network.openshift.io/v1", "/apis/oauth.openshift.io/v1", "/apis/project.openshift.io/v1", "/apis/quota.openshift.io/v1", "/apis/route.openshift.io/v1", "/apis/security.openshift.io/v1", "/apis/template.openshift.io/v1", "/apis/user.openshift.io/v1"} I0602 09:35:37.662397 3285 readiness_apigroup.go:40] found: v1.apps.openshift.io I0602 09:35:37.662424 3285 readiness_apigroup.go:40] found: v1.authorization.openshift.io I0602 09:35:37.662433 3285 readiness_apigroup.go:40] found: v1.build.openshift.io I0602 09:35:37.662440 3285 readiness_apigroup.go:40] found: v1.image.openshift.io I0602 09:35:37.662446 3285 readiness_apigroup.go:40] found: v1.network.openshift.io I0602 09:35:37.662452 3285 readiness_apigroup.go:40] found: v1.oauth.openshift.io I0602 09:35:37.662458 3285 readiness_apigroup.go:40] found: v1.project.openshift.io I0602 09:35:37.662465 3285 readiness_apigroup.go:40] found: v1.quota.openshift.io I0602 09:35:37.662472 3285 readiness_apigroup.go:40] found: v1.route.openshift.io I0602 09:35:37.662479 3285 readiness_apigroup.go:40] found: v1.security.openshift.io I0602 09:35:37.662485 3285 readiness_apigroup.go:40] found: v1.template.openshift.io I0602 09:35:37.662492 3285 readiness_apigroup.go:40] found: v1.user.openshift.io I0602 09:35:37.663357 3285 readiness_apigroup.go:67] waiting for url: "/apis/apps.openshift.io/v1" 503 I0602 09:35:37.665567 3285 readiness_apigroup.go:67] waiting for url: "/apis/build.openshift.io/v1" 503 I0602 09:35:37.666294 3285 readiness_apigroup.go:67] waiting for url: "/apis/image.openshift.io/v1" 503 I0602 09:35:37.666996 3285 readiness_apigroup.go:67] waiting for url: "/apis/network.openshift.io/v1" 503 I0602 09:35:37.667695 3285 readiness_apigroup.go:67] waiting for url: "/apis/oauth.openshift.io/v1" 503 I0602 09:35:37.668359 3285 readiness_apigroup.go:67] waiting for url: "/apis/project.openshift.io/v1" 503 I0602 09:35:37.669061 3285 readiness_apigroup.go:67] waiting for url: "/apis/quota.openshift.io/v1" 503 I0602 09:35:37.669805 3285 readiness_apigroup.go:67] waiting for url: "/apis/route.openshift.io/v1" 503 I0602 09:35:37.670524 3285 readiness_apigroup.go:67] waiting for url: "/apis/security.openshift.io/v1" 503 I0602 09:35:37.671175 3285 readiness_apigroup.go:67] waiting for url: "/apis/template.openshift.io/v1" 503 I0602 09:35:37.671985 3285 readiness_apigroup.go:67] waiting for url: "/apis/user.openshift.io/v1" 503 I0602 09:35:37.672004 3285 readiness_apigroup.go:72] waiting for urls: []string{"/apis/apps.openshift.io/v1", "/apis/build.openshift.io/v1", "/apis/image.openshift.io/v1", "/apis/network.openshift.io/v1", "/apis/oauth.openshift.io/v1", "/apis/project.openshift.io/v1", "/apis/quota.openshift.io/v1", "/apis/route.openshift.io/v1", "/apis/security.openshift.io/v1", "/apis/template.openshift.io/v1", "/apis/user.openshift.io/v1"} I0602 09:35:38.661627 3285 readiness_apigroup.go:40] found: v1.apps.openshift.io I0602 09:35:38.661656 3285 readiness_apigroup.go:40] found: v1.authorization.openshift.io I0602 09:35:38.661665 3285 readiness_apigroup.go:40] found: v1.build.openshift.io I0602 09:35:38.661673 3285 readiness_apigroup.go:40] found: v1.image.openshift.io I0602 09:35:38.661679 3285 readiness_apigroup.go:40] found: v1.network.openshift.io I0602 09:35:38.661686 3285 readiness_apigroup.go:40] found: v1.oauth.openshift.io I0602 09:35:38.661692 3285 readiness_apigroup.go:40] found: v1.project.openshift.io I0602 09:35:38.661698 3285 readiness_apigroup.go:40] found: v1.quota.openshift.io I0602 09:35:38.661705 3285 readiness_apigroup.go:40] found: v1.route.openshift.io I0602 09:35:38.661712 3285 readiness_apigroup.go:40] found: v1.security.openshift.io I0602 09:35:38.661719 3285 readiness_apigroup.go:40] found: v1.template.openshift.io I0602 09:35:38.661725 3285 readiness_apigroup.go:40] found: v1.user.openshift.io I0602 09:35:38.662553 3285 readiness_apigroup.go:67] waiting for url: "/apis/apps.openshift.io/v1" 503 I0602 09:35:38.664847 3285 readiness_apigroup.go:67] waiting for url: "/apis/build.openshift.io/v1" 503 I0602 09:35:38.665536 3285 readiness_apigroup.go:67] waiting for url: "/apis/image.openshift.io/v1" 503 I0602 09:35:38.666229 3285 readiness_apigroup.go:67] waiting for url: "/apis/network.openshift.io/v1" 503 I0602 09:35:38.666972 3285 readiness_apigroup.go:67] waiting for url: "/apis/oauth.openshift.io/v1" 503 I0602 09:35:38.667648 3285 readiness_apigroup.go:67] waiting for url: "/apis/project.openshift.io/v1" 503 I0602 09:35:38.669632 3285 readiness_apigroup.go:67] waiting for url: "/apis/route.openshift.io/v1" 503 I0602 09:35:38.670317 3285 readiness_apigroup.go:67] waiting for url: "/apis/security.openshift.io/v1" 503 I0602 09:35:38.671057 3285 readiness_apigroup.go:67] waiting for url: "/apis/template.openshift.io/v1" 503 I0602 09:35:38.671918 3285 readiness_apigroup.go:67] waiting for url: "/apis/user.openshift.io/v1" 503 I0602 09:35:38.671947 3285 readiness_apigroup.go:72] waiting for urls: []string{"/apis/apps.openshift.io/v1", "/apis/build.openshift.io/v1", "/apis/image.openshift.io/v1", "/apis/network.openshift.io/v1", "/apis/oauth.openshift.io/v1", "/apis/project.openshift.io/v1", "/apis/route.openshift.io/v1", "/apis/security.openshift.io/v1", "/apis/template.openshift.io/v1", "/apis/user.openshift.io/v1"} I0602 09:35:39.662658 3285 readiness_apigroup.go:40] found: v1.apps.openshift.io I0602 09:35:39.662683 3285 readiness_apigroup.go:40] found: v1.authorization.openshift.io I0602 09:35:39.662690 3285 readiness_apigroup.go:40] found: v1.build.openshift.io I0602 09:35:39.662694 3285 readiness_apigroup.go:40] found: v1.image.openshift.io I0602 09:35:39.662699 3285 readiness_apigroup.go:40] found: v1.network.openshift.io I0602 09:35:39.662703 3285 readiness_apigroup.go:40] found: v1.oauth.openshift.io I0602 09:35:39.662709 3285 readiness_apigroup.go:40] found: v1.project.openshift.io I0602 09:35:39.662713 3285 readiness_apigroup.go:40] found: v1.quota.openshift.io I0602 09:35:39.662717 3285 readiness_apigroup.go:40] found: v1.route.openshift.io I0602 09:35:39.662722 3285 readiness_apigroup.go:40] found: v1.security.openshift.io I0602 09:35:39.662726 3285 readiness_apigroup.go:40] found: v1.template.openshift.io I0602 09:35:39.662730 3285 readiness_apigroup.go:40] found: v1.user.openshift.io I0602 09:35:39.663677 3285 readiness_apigroup.go:67] waiting for url: "/apis/apps.openshift.io/v1" 503 I0602 09:35:39.665936 3285 readiness_apigroup.go:67] waiting for url: "/apis/build.openshift.io/v1" 503 I0602 09:35:39.666603 3285 readiness_apigroup.go:67] waiting for url: "/apis/image.openshift.io/v1" 503 I0602 09:35:39.667194 3285 readiness_apigroup.go:67] waiting for url: "/apis/network.openshift.io/v1" 503 I0602 09:35:39.667827 3285 readiness_apigroup.go:67] waiting for url: "/apis/oauth.openshift.io/v1" 503 I0602 09:35:39.668561 3285 readiness_apigroup.go:67] waiting for url: "/apis/project.openshift.io/v1" 503 I0602 09:35:39.670580 3285 readiness_apigroup.go:67] waiting for url: "/apis/route.openshift.io/v1" 503 I0602 09:35:39.672544 3285 readiness_apigroup.go:67] waiting for url: "/apis/template.openshift.io/v1" 503 I0602 09:35:39.673163 3285 readiness_apigroup.go:67] waiting for url: "/apis/user.openshift.io/v1" 503 I0602 09:35:39.673184 3285 readiness_apigroup.go:72] waiting for urls: []string{"/apis/apps.openshift.io/v1", "/apis/build.openshift.io/v1", "/apis/image.openshift.io/v1", "/apis/network.openshift.io/v1", "/apis/oauth.openshift.io/v1", "/apis/project.openshift.io/v1", "/apis/route.openshift.io/v1", "/apis/template.openshift.io/v1", "/apis/user.openshift.io/v1"} I0602 09:35:40.662192 3285 readiness_apigroup.go:40] found: v1.apps.openshift.io I0602 09:35:40.662232 3285 readiness_apigroup.go:40] found: v1.authorization.openshift.io I0602 09:35:40.662243 3285 readiness_apigroup.go:40] found: v1.build.openshift.io I0602 09:35:40.662250 3285 readiness_apigroup.go:40] found: v1.image.openshift.io I0602 09:35:40.662277 3285 readiness_apigroup.go:40] found: v1.network.openshift.io I0602 09:35:40.662287 3285 readiness_apigroup.go:40] found: v1.oauth.openshift.io I0602 09:35:40.662294 3285 readiness_apigroup.go:40] found: v1.project.openshift.io I0602 09:35:40.662302 3285 readiness_apigroup.go:40] found: v1.quota.openshift.io I0602 09:35:40.662310 3285 readiness_apigroup.go:40] found: v1.route.openshift.io I0602 09:35:40.662317 3285 readiness_apigroup.go:40] found: v1.security.openshift.io I0602 09:35:40.662324 3285 readiness_apigroup.go:40] found: v1.template.openshift.io I0602 09:35:40.662332 3285 readiness_apigroup.go:40] found: v1.user.openshift.io I0602 09:35:40.663117 3285 readiness_apigroup.go:67] waiting for url: "/apis/apps.openshift.io/v1" 503 I0602 09:35:40.665312 3285 readiness_apigroup.go:67] waiting for url: "/apis/build.openshift.io/v1" 503 I0602 09:35:40.667335 3285 readiness_apigroup.go:67] waiting for url: "/apis/network.openshift.io/v1" 503 I0602 09:35:40.668035 3285 readiness_apigroup.go:67] waiting for url: "/apis/oauth.openshift.io/v1" 503 I0602 09:35:40.668707 3285 readiness_apigroup.go:67] waiting for url: "/apis/project.openshift.io/v1" 503 I0602 09:35:40.670532 3285 readiness_apigroup.go:67] waiting for url: "/apis/route.openshift.io/v1" 503 I0602 09:35:40.672385 3285 readiness_apigroup.go:67] waiting for url: "/apis/template.openshift.io/v1" 503 I0602 09:35:40.673029 3285 readiness_apigroup.go:67] waiting for url: "/apis/user.openshift.io/v1" 503 I0602 09:35:40.673050 3285 readiness_apigroup.go:72] waiting for urls: []string{"/apis/apps.openshift.io/v1", "/apis/build.openshift.io/v1", "/apis/network.openshift.io/v1", "/apis/oauth.openshift.io/v1", "/apis/project.openshift.io/v1", "/apis/route.openshift.io/v1", "/apis/template.openshift.io/v1", "/apis/user.openshift.io/v1"} I0602 09:35:41.662207 3285 readiness_apigroup.go:40] found: v1.apps.openshift.io I0602 09:35:41.662240 3285 readiness_apigroup.go:40] found: v1.authorization.openshift.io I0602 09:35:41.662250 3285 readiness_apigroup.go:40] found: v1.build.openshift.io I0602 09:35:41.662278 3285 readiness_apigroup.go:40] found: v1.image.openshift.io I0602 09:35:41.662287 3285 readiness_apigroup.go:40] found: v1.network.openshift.io I0602 09:35:41.662295 3285 readiness_apigroup.go:40] found: v1.oauth.openshift.io I0602 09:35:41.662302 3285 readiness_apigroup.go:40] found: v1.project.openshift.io I0602 09:35:41.662309 3285 readiness_apigroup.go:40] found: v1.quota.openshift.io I0602 09:35:41.662317 3285 readiness_apigroup.go:40] found: v1.route.openshift.io I0602 09:35:41.662325 3285 readiness_apigroup.go:40] found: v1.security.openshift.io I0602 09:35:41.662332 3285 readiness_apigroup.go:40] found: v1.template.openshift.io I0602 09:35:41.662339 3285 readiness_apigroup.go:40] found: v1.user.openshift.io I0602 09:35:41.663128 3285 readiness_apigroup.go:67] waiting for url: "/apis/apps.openshift.io/v1" 503 I0602 09:35:41.665470 3285 readiness_apigroup.go:67] waiting for url: "/apis/build.openshift.io/v1" 503 I0602 09:35:41.667604 3285 readiness_apigroup.go:67] waiting for url: "/apis/network.openshift.io/v1" 503 I0602 09:35:41.668417 3285 readiness_apigroup.go:67] waiting for url: "/apis/oauth.openshift.io/v1" 503 I0602 09:35:41.669054 3285 readiness_apigroup.go:67] waiting for url: "/apis/project.openshift.io/v1" 503 I0602 09:35:41.670963 3285 readiness_apigroup.go:67] waiting for url: "/apis/route.openshift.io/v1" 503 I0602 09:35:41.672944 3285 readiness_apigroup.go:67] waiting for url: "/apis/template.openshift.io/v1" 503 I0602 09:35:41.673674 3285 readiness_apigroup.go:67] waiting for url: "/apis/user.openshift.io/v1" 503 I0602 09:35:41.673694 3285 readiness_apigroup.go:72] waiting for urls: []string{"/apis/apps.openshift.io/v1", "/apis/build.openshift.io/v1", "/apis/network.openshift.io/v1", "/apis/oauth.openshift.io/v1", "/apis/project.openshift.io/v1", "/apis/route.openshift.io/v1", "/apis/template.openshift.io/v1", "/apis/user.openshift.io/v1"} I0602 09:35:42.662485 3285 readiness_apigroup.go:40] found: v1.apps.openshift.io I0602 09:35:42.662521 3285 readiness_apigroup.go:40] found: v1.authorization.openshift.io I0602 09:35:42.662530 3285 readiness_apigroup.go:40] found: v1.build.openshift.io I0602 09:35:42.662538 3285 readiness_apigroup.go:40] found: v1.image.openshift.io I0602 09:35:42.662545 3285 readiness_apigroup.go:40] found: v1.network.openshift.io I0602 09:35:42.662552 3285 readiness_apigroup.go:40] found: v1.oauth.openshift.io I0602 09:35:42.662565 3285 readiness_apigroup.go:40] found: v1.project.openshift.io I0602 09:35:42.662570 3285 readiness_apigroup.go:40] found: v1.quota.openshift.io I0602 09:35:42.662577 3285 readiness_apigroup.go:40] found: v1.route.openshift.io I0602 09:35:42.662584 3285 readiness_apigroup.go:40] found: v1.security.openshift.io I0602 09:35:42.662591 3285 readiness_apigroup.go:40] found: v1.template.openshift.io I0602 09:35:42.662597 3285 readiness_apigroup.go:40] found: v1.user.openshift.io I0602 09:35:42.663459 3285 readiness_apigroup.go:67] waiting for url: "/apis/apps.openshift.io/v1" 503 I0602 09:35:42.665643 3285 readiness_apigroup.go:67] waiting for url: "/apis/build.openshift.io/v1" 503 I0602 09:35:42.667806 3285 readiness_apigroup.go:67] waiting for url: "/apis/network.openshift.io/v1" 503 I0602 09:35:42.668501 3285 readiness_apigroup.go:67] waiting for url: "/apis/oauth.openshift.io/v1" 503 I0602 09:35:42.671911 3285 readiness_apigroup.go:67] waiting for url: "/apis/route.openshift.io/v1" 503 I0602 09:35:42.674152 3285 readiness_apigroup.go:67] waiting for url: "/apis/template.openshift.io/v1" 503 I0602 09:35:42.674771 3285 readiness_apigroup.go:67] waiting for url: "/apis/user.openshift.io/v1" 503 I0602 09:35:42.674792 3285 readiness_apigroup.go:72] waiting for urls: []string{"/apis/apps.openshift.io/v1", "/apis/build.openshift.io/v1", "/apis/network.openshift.io/v1", "/apis/oauth.openshift.io/v1", "/apis/route.openshift.io/v1", "/apis/template.openshift.io/v1", "/apis/user.openshift.io/v1"} I0602 09:35:43.663480 3285 readiness_apigroup.go:40] found: v1.apps.openshift.io I0602 09:35:43.663510 3285 readiness_apigroup.go:40] found: v1.authorization.openshift.io I0602 09:35:43.663520 3285 readiness_apigroup.go:40] found: v1.build.openshift.io I0602 09:35:43.663529 3285 readiness_apigroup.go:40] found: v1.image.openshift.io I0602 09:35:43.663544 3285 readiness_apigroup.go:40] found: v1.network.openshift.io I0602 09:35:43.663553 3285 readiness_apigroup.go:40] found: v1.oauth.openshift.io I0602 09:35:43.663561 3285 readiness_apigroup.go:40] found: v1.project.openshift.io I0602 09:35:43.663568 3285 readiness_apigroup.go:40] found: v1.quota.openshift.io I0602 09:35:43.663575 3285 readiness_apigroup.go:40] found: v1.route.openshift.io I0602 09:35:43.663591 3285 readiness_apigroup.go:40] found: v1.security.openshift.io I0602 09:35:43.663599 3285 readiness_apigroup.go:40] found: v1.template.openshift.io I0602 09:35:43.663606 3285 readiness_apigroup.go:40] found: v1.user.openshift.io I0602 09:35:43.664585 3285 readiness_apigroup.go:67] waiting for url: "/apis/apps.openshift.io/v1" 503 I0602 09:35:43.668177 3285 readiness_apigroup.go:67] waiting for url: "/apis/build.openshift.io/v1" 503 I0602 09:35:43.670565 3285 readiness_apigroup.go:67] waiting for url: "/apis/network.openshift.io/v1" 503 I0602 09:35:43.671373 3285 readiness_apigroup.go:67] waiting for url: "/apis/oauth.openshift.io/v1" 503 I0602 09:35:43.675305 3285 readiness_apigroup.go:67] waiting for url: "/apis/route.openshift.io/v1" 503 I0602 09:35:43.677827 3285 readiness_apigroup.go:67] waiting for url: "/apis/template.openshift.io/v1" 503 I0602 09:35:43.678826 3285 readiness_apigroup.go:67] waiting for url: "/apis/user.openshift.io/v1" 503 I0602 09:35:43.678853 3285 readiness_apigroup.go:72] waiting for urls: []string{"/apis/apps.openshift.io/v1", "/apis/build.openshift.io/v1", "/apis/network.openshift.io/v1", "/apis/oauth.openshift.io/v1", "/apis/route.openshift.io/v1", "/apis/template.openshift.io/v1", "/apis/user.openshift.io/v1"} I0602 09:35:44.661548 3285 readiness_apigroup.go:40] found: v1.apps.openshift.io I0602 09:35:44.661577 3285 readiness_apigroup.go:40] found: v1.authorization.openshift.io I0602 09:35:44.661586 3285 readiness_apigroup.go:40] found: v1.build.openshift.io I0602 09:35:44.661592 3285 readiness_apigroup.go:40] found: v1.image.openshift.io I0602 09:35:44.661599 3285 readiness_apigroup.go:40] found: v1.network.openshift.io I0602 09:35:44.661605 3285 readiness_apigroup.go:40] found: v1.oauth.openshift.io I0602 09:35:44.661611 3285 readiness_apigroup.go:40] found: v1.project.openshift.io I0602 09:35:44.661617 3285 readiness_apigroup.go:40] found: v1.quota.openshift.io I0602 09:35:44.661624 3285 readiness_apigroup.go:40] found: v1.route.openshift.io I0602 09:35:44.661631 3285 readiness_apigroup.go:40] found: v1.security.openshift.io I0602 09:35:44.661638 3285 readiness_apigroup.go:40] found: v1.template.openshift.io I0602 09:35:44.661644 3285 readiness_apigroup.go:40] found: v1.user.openshift.io I0602 09:35:44.662485 3285 readiness_apigroup.go:67] waiting for url: "/apis/apps.openshift.io/v1" 503 I0602 09:35:44.664626 3285 readiness_apigroup.go:67] waiting for url: "/apis/build.openshift.io/v1" 503 I0602 09:35:44.666634 3285 readiness_apigroup.go:67] waiting for url: "/apis/network.openshift.io/v1" 503 I0602 09:35:44.667498 3285 readiness_apigroup.go:67] waiting for url: "/apis/oauth.openshift.io/v1" 503 I0602 09:35:44.670958 3285 readiness_apigroup.go:67] waiting for url: "/apis/route.openshift.io/v1" 503 I0602 09:35:44.672953 3285 readiness_apigroup.go:67] waiting for url: "/apis/template.openshift.io/v1" 503 I0602 09:35:44.673728 3285 readiness_apigroup.go:67] waiting for url: "/apis/user.openshift.io/v1" 503 I0602 09:35:44.673747 3285 readiness_apigroup.go:72] waiting for urls: []string{"/apis/apps.openshift.io/v1", "/apis/build.openshift.io/v1", "/apis/network.openshift.io/v1", "/apis/oauth.openshift.io/v1", "/apis/route.openshift.io/v1", "/apis/template.openshift.io/v1", "/apis/user.openshift.io/v1"} I0602 09:35:45.662083 3285 readiness_apigroup.go:40] found: v1.apps.openshift.io I0602 09:35:45.662110 3285 readiness_apigroup.go:40] found: v1.authorization.openshift.io I0602 09:35:45.662120 3285 readiness_apigroup.go:40] found: v1.build.openshift.io I0602 09:35:45.662128 3285 readiness_apigroup.go:40] found: v1.image.openshift.io I0602 09:35:45.662136 3285 readiness_apigroup.go:40] found: v1.network.openshift.io I0602 09:35:45.662143 3285 readiness_apigroup.go:40] found: v1.oauth.openshift.io I0602 09:35:45.662150 3285 readiness_apigroup.go:40] found: v1.project.openshift.io I0602 09:35:45.662156 3285 readiness_apigroup.go:40] found: v1.quota.openshift.io I0602 09:35:45.662171 3285 readiness_apigroup.go:40] found: v1.route.openshift.io I0602 09:35:45.662179 3285 readiness_apigroup.go:40] found: v1.security.openshift.io I0602 09:35:45.662185 3285 readiness_apigroup.go:40] found: v1.template.openshift.io I0602 09:35:45.662192 3285 readiness_apigroup.go:40] found: v1.user.openshift.io I0602 09:35:45.662948 3285 readiness_apigroup.go:67] waiting for url: "/apis/apps.openshift.io/v1" 503 I0602 09:35:45.665306 3285 readiness_apigroup.go:67] waiting for url: "/apis/build.openshift.io/v1" 503 I0602 09:35:45.667472 3285 readiness_apigroup.go:67] waiting for url: "/apis/network.openshift.io/v1" 503 I0602 09:35:45.668161 3285 readiness_apigroup.go:67] waiting for url: "/apis/oauth.openshift.io/v1" 503 I0602 09:35:45.671332 3285 readiness_apigroup.go:67] waiting for url: "/apis/route.openshift.io/v1" 503 I0602 09:35:45.674533 3285 readiness_apigroup.go:67] waiting for url: "/apis/user.openshift.io/v1" 503 I0602 09:35:45.674553 3285 readiness_apigroup.go:72] waiting for urls: []string{"/apis/apps.openshift.io/v1", "/apis/build.openshift.io/v1", "/apis/network.openshift.io/v1", "/apis/oauth.openshift.io/v1", "/apis/route.openshift.io/v1", "/apis/user.openshift.io/v1"} I0602 09:35:46.662480 3285 readiness_apigroup.go:40] found: v1.apps.openshift.io I0602 09:35:46.662515 3285 readiness_apigroup.go:40] found: v1.authorization.openshift.io I0602 09:35:46.662524 3285 readiness_apigroup.go:40] found: v1.build.openshift.io I0602 09:35:46.662532 3285 readiness_apigroup.go:40] found: v1.image.openshift.io I0602 09:35:46.662540 3285 readiness_apigroup.go:40] found: v1.network.openshift.io I0602 09:35:46.662548 3285 readiness_apigroup.go:40] found: v1.oauth.openshift.io I0602 09:35:46.662555 3285 readiness_apigroup.go:40] found: v1.project.openshift.io I0602 09:35:46.662562 3285 readiness_apigroup.go:40] found: v1.quota.openshift.io I0602 09:35:46.662600 3285 readiness_apigroup.go:40] found: v1.route.openshift.io I0602 09:35:46.662626 3285 readiness_apigroup.go:40] found: v1.security.openshift.io I0602 09:35:46.662634 3285 readiness_apigroup.go:40] found: v1.template.openshift.io I0602 09:35:46.662641 3285 readiness_apigroup.go:40] found: v1.user.openshift.io I0602 09:35:46.663464 3285 readiness_apigroup.go:67] waiting for url: "/apis/apps.openshift.io/v1" 503 I0602 09:35:46.665606 3285 readiness_apigroup.go:67] waiting for url: "/apis/build.openshift.io/v1" 503 I0602 09:35:46.667693 3285 readiness_apigroup.go:67] waiting for url: "/apis/network.openshift.io/v1" 503 I0602 09:35:46.668336 3285 readiness_apigroup.go:67] waiting for url: "/apis/oauth.openshift.io/v1" 503 I0602 09:35:46.671635 3285 readiness_apigroup.go:67] waiting for url: "/apis/route.openshift.io/v1" 503 I0602 09:35:46.674997 3285 readiness_apigroup.go:67] waiting for url: "/apis/user.openshift.io/v1" 503 I0602 09:35:46.675016 3285 readiness_apigroup.go:72] waiting for urls: []string{"/apis/apps.openshift.io/v1", "/apis/build.openshift.io/v1", "/apis/network.openshift.io/v1", "/apis/oauth.openshift.io/v1", "/apis/route.openshift.io/v1", "/apis/user.openshift.io/v1"} I0602 09:35:47.662373 3285 readiness_apigroup.go:40] found: v1.apps.openshift.io I0602 09:35:47.662407 3285 readiness_apigroup.go:40] found: v1.authorization.openshift.io I0602 09:35:47.662417 3285 readiness_apigroup.go:40] found: v1.build.openshift.io I0602 09:35:47.662424 3285 readiness_apigroup.go:40] found: v1.image.openshift.io I0602 09:35:47.662432 3285 readiness_apigroup.go:40] found: v1.network.openshift.io I0602 09:35:47.662439 3285 readiness_apigroup.go:40] found: v1.oauth.openshift.io I0602 09:35:47.662446 3285 readiness_apigroup.go:40] found: v1.project.openshift.io I0602 09:35:47.662453 3285 readiness_apigroup.go:40] found: v1.quota.openshift.io I0602 09:35:47.662461 3285 readiness_apigroup.go:40] found: v1.route.openshift.io I0602 09:35:47.662469 3285 readiness_apigroup.go:40] found: v1.security.openshift.io I0602 09:35:47.662476 3285 readiness_apigroup.go:40] found: v1.template.openshift.io I0602 09:35:47.662483 3285 readiness_apigroup.go:40] found: v1.user.openshift.io I0602 09:35:47.663312 3285 readiness_apigroup.go:67] waiting for url: "/apis/apps.openshift.io/v1" 503 I0602 09:35:47.672849 3285 readiness_apigroup.go:67] waiting for url: "/apis/network.openshift.io/v1" 503 I0602 09:35:47.673684 3285 readiness_apigroup.go:67] waiting for url: "/apis/oauth.openshift.io/v1" 503 I0602 09:35:47.676819 3285 readiness_apigroup.go:67] waiting for url: "/apis/route.openshift.io/v1" 503 I0602 09:35:47.679930 3285 readiness_apigroup.go:67] waiting for url: "/apis/user.openshift.io/v1" 503 I0602 09:35:47.679947 3285 readiness_apigroup.go:72] waiting for urls: []string{"/apis/apps.openshift.io/v1", "/apis/network.openshift.io/v1", "/apis/oauth.openshift.io/v1", "/apis/route.openshift.io/v1", "/apis/user.openshift.io/v1"} I0602 09:35:48.661447 3285 readiness_apigroup.go:40] found: v1.apps.openshift.io I0602 09:35:48.661475 3285 readiness_apigroup.go:40] found: v1.authorization.openshift.io I0602 09:35:48.661485 3285 readiness_apigroup.go:40] found: v1.build.openshift.io I0602 09:35:48.661491 3285 readiness_apigroup.go:40] found: v1.image.openshift.io I0602 09:35:48.661497 3285 readiness_apigroup.go:40] found: v1.network.openshift.io I0602 09:35:48.661504 3285 readiness_apigroup.go:40] found: v1.oauth.openshift.io I0602 09:35:48.661510 3285 readiness_apigroup.go:40] found: v1.project.openshift.io I0602 09:35:48.661516 3285 readiness_apigroup.go:40] found: v1.quota.openshift.io I0602 09:35:48.661523 3285 readiness_apigroup.go:40] found: v1.route.openshift.io I0602 09:35:48.661530 3285 readiness_apigroup.go:40] found: v1.security.openshift.io I0602 09:35:48.661537 3285 readiness_apigroup.go:40] found: v1.template.openshift.io I0602 09:35:48.661543 3285 readiness_apigroup.go:40] found: v1.user.openshift.io I0602 09:35:48.662276 3285 readiness_apigroup.go:67] waiting for url: "/apis/apps.openshift.io/v1" 503 I0602 09:35:48.667076 3285 readiness_apigroup.go:67] waiting for url: "/apis/network.openshift.io/v1" 503 I0602 09:35:48.667787 3285 readiness_apigroup.go:67] waiting for url: "/apis/oauth.openshift.io/v1" 503 I0602 09:35:48.670866 3285 readiness_apigroup.go:67] waiting for url: "/apis/route.openshift.io/v1" 503 I0602 09:35:48.674176 3285 readiness_apigroup.go:67] waiting for url: "/apis/user.openshift.io/v1" 503 I0602 09:35:48.674197 3285 readiness_apigroup.go:72] waiting for urls: []string{"/apis/apps.openshift.io/v1", "/apis/network.openshift.io/v1", "/apis/oauth.openshift.io/v1", "/apis/route.openshift.io/v1", "/apis/user.openshift.io/v1"} I0602 09:35:49.675187 3285 readiness_apigroup.go:40] found: v1.apps.openshift.io I0602 09:35:49.675214 3285 readiness_apigroup.go:40] found: v1.authorization.openshift.io I0602 09:35:49.675223 3285 readiness_apigroup.go:40] found: v1.build.openshift.io I0602 09:35:49.675230 3285 readiness_apigroup.go:40] found: v1.image.openshift.io I0602 09:35:49.675237 3285 readiness_apigroup.go:40] found: v1.network.openshift.io I0602 09:35:49.675245 3285 readiness_apigroup.go:40] found: v1.oauth.openshift.io I0602 09:35:49.675252 3285 readiness_apigroup.go:40] found: v1.project.openshift.io I0602 09:35:49.675380 3285 readiness_apigroup.go:40] found: v1.quota.openshift.io I0602 09:35:49.675393 3285 readiness_apigroup.go:40] found: v1.route.openshift.io I0602 09:35:49.675401 3285 readiness_apigroup.go:40] found: v1.security.openshift.io I0602 09:35:49.675408 3285 readiness_apigroup.go:40] found: v1.template.openshift.io I0602 09:35:49.675414 3285 readiness_apigroup.go:40] found: v1.user.openshift.io I0602 09:35:49.691475 3285 readiness_apigroup.go:67] waiting for url: "/apis/apps.openshift.io/v1" 503 I0602 09:35:49.704797 3285 readiness_apigroup.go:67] waiting for url: "/apis/network.openshift.io/v1" 503 I0602 09:35:49.705499 3285 readiness_apigroup.go:67] waiting for url: "/apis/oauth.openshift.io/v1" 503 I0602 09:35:49.708765 3285 readiness_apigroup.go:67] waiting for url: "/apis/route.openshift.io/v1" 503 I0602 09:35:49.713413 3285 readiness_apigroup.go:67] waiting for url: "/apis/user.openshift.io/v1" 503 I0602 09:35:49.713432 3285 readiness_apigroup.go:72] waiting for urls: []string{"/apis/apps.openshift.io/v1", "/apis/network.openshift.io/v1", "/apis/oauth.openshift.io/v1", "/apis/route.openshift.io/v1", "/apis/user.openshift.io/v1"} I0602 09:35:50.662117 3285 readiness_apigroup.go:40] found: v1.apps.openshift.io I0602 09:35:50.662144 3285 readiness_apigroup.go:40] found: v1.authorization.openshift.io I0602 09:35:50.662153 3285 readiness_apigroup.go:40] found: v1.build.openshift.io I0602 09:35:50.662159 3285 readiness_apigroup.go:40] found: v1.image.openshift.io I0602 09:35:50.662167 3285 readiness_apigroup.go:40] found: v1.network.openshift.io I0602 09:35:50.662173 3285 readiness_apigroup.go:40] found: v1.oauth.openshift.io I0602 09:35:50.662179 3285 readiness_apigroup.go:40] found: v1.project.openshift.io I0602 09:35:50.662185 3285 readiness_apigroup.go:40] found: v1.quota.openshift.io I0602 09:35:50.662193 3285 readiness_apigroup.go:40] found: v1.route.openshift.io I0602 09:35:50.662200 3285 readiness_apigroup.go:40] found: v1.security.openshift.io I0602 09:35:50.662206 3285 readiness_apigroup.go:40] found: v1.template.openshift.io I0602 09:35:50.662213 3285 readiness_apigroup.go:40] found: v1.user.openshift.io I0602 09:35:50.663127 3285 readiness_apigroup.go:67] waiting for url: "/apis/apps.openshift.io/v1" 503 I0602 09:35:50.668471 3285 readiness_apigroup.go:67] waiting for url: "/apis/network.openshift.io/v1" 503 I0602 09:35:50.669298 3285 readiness_apigroup.go:67] waiting for url: "/apis/oauth.openshift.io/v1" 503 I0602 09:35:50.672616 3285 readiness_apigroup.go:67] waiting for url: "/apis/route.openshift.io/v1" 503 I0602 09:35:50.676886 3285 readiness_apigroup.go:67] waiting for url: "/apis/user.openshift.io/v1" 503 I0602 09:35:50.676906 3285 readiness_apigroup.go:72] waiting for urls: []string{"/apis/apps.openshift.io/v1", "/apis/network.openshift.io/v1", "/apis/oauth.openshift.io/v1", "/apis/route.openshift.io/v1", "/apis/user.openshift.io/v1"} I0602 09:35:51.662160 3285 readiness_apigroup.go:40] found: v1.apps.openshift.io I0602 09:35:51.662196 3285 readiness_apigroup.go:40] found: v1.authorization.openshift.io I0602 09:35:51.662206 3285 readiness_apigroup.go:40] found: v1.build.openshift.io I0602 09:35:51.662214 3285 readiness_apigroup.go:40] found: v1.image.openshift.io I0602 09:35:51.662221 3285 readiness_apigroup.go:40] found: v1.network.openshift.io I0602 09:35:51.662229 3285 readiness_apigroup.go:40] found: v1.oauth.openshift.io I0602 09:35:51.662237 3285 readiness_apigroup.go:40] found: v1.project.openshift.io I0602 09:35:51.662243 3285 readiness_apigroup.go:40] found: v1.quota.openshift.io I0602 09:35:51.662250 3285 readiness_apigroup.go:40] found: v1.route.openshift.io I0602 09:35:51.662272 3285 readiness_apigroup.go:40] found: v1.security.openshift.io I0602 09:35:51.662289 3285 readiness_apigroup.go:40] found: v1.template.openshift.io I0602 09:35:51.662296 3285 readiness_apigroup.go:40] found: v1.user.openshift.io I0602 09:35:51.662996 3285 readiness_apigroup.go:67] waiting for url: "/apis/apps.openshift.io/v1" 503 I0602 09:35:51.668341 3285 readiness_apigroup.go:67] waiting for url: "/apis/network.openshift.io/v1" 503 I0602 09:35:51.669097 3285 readiness_apigroup.go:67] waiting for url: "/apis/oauth.openshift.io/v1" 503 I0602 09:35:51.672230 3285 readiness_apigroup.go:67] waiting for url: "/apis/route.openshift.io/v1" 503 I0602 09:35:51.675643 3285 readiness_apigroup.go:67] waiting for url: "/apis/user.openshift.io/v1" 503 I0602 09:35:51.675665 3285 readiness_apigroup.go:72] waiting for urls: []string{"/apis/apps.openshift.io/v1", "/apis/network.openshift.io/v1", "/apis/oauth.openshift.io/v1", "/apis/route.openshift.io/v1", "/apis/user.openshift.io/v1"} I0602 09:35:52.679220 3285 readiness_apigroup.go:40] found: v1.apps.openshift.io I0602 09:35:52.679247 3285 readiness_apigroup.go:40] found: v1.authorization.openshift.io I0602 09:35:52.679272 3285 readiness_apigroup.go:40] found: v1.build.openshift.io I0602 09:35:52.679282 3285 readiness_apigroup.go:40] found: v1.image.openshift.io I0602 09:35:52.679290 3285 readiness_apigroup.go:40] found: v1.network.openshift.io I0602 09:35:52.679297 3285 readiness_apigroup.go:40] found: v1.oauth.openshift.io I0602 09:35:52.679305 3285 readiness_apigroup.go:40] found: v1.project.openshift.io I0602 09:35:52.679312 3285 readiness_apigroup.go:40] found: v1.quota.openshift.io I0602 09:35:52.679319 3285 readiness_apigroup.go:40] found: v1.route.openshift.io I0602 09:35:52.679327 3285 readiness_apigroup.go:40] found: v1.security.openshift.io I0602 09:35:52.679335 3285 readiness_apigroup.go:40] found: v1.template.openshift.io I0602 09:35:52.679342 3285 readiness_apigroup.go:40] found: v1.user.openshift.io I0602 09:35:52.684432 3285 readiness_apigroup.go:67] waiting for url: "/apis/apps.openshift.io/v1" 503 I0602 09:35:52.705815 3285 readiness_apigroup.go:67] waiting for url: "/apis/network.openshift.io/v1" 503 I0602 09:35:52.711493 3285 readiness_apigroup.go:67] waiting for url: "/apis/route.openshift.io/v1" 503 I0602 09:35:52.715241 3285 readiness_apigroup.go:67] waiting for url: "/apis/user.openshift.io/v1" 503 I0602 09:35:52.715282 3285 readiness_apigroup.go:72] waiting for urls: []string{"/apis/apps.openshift.io/v1", "/apis/network.openshift.io/v1", "/apis/route.openshift.io/v1", "/apis/user.openshift.io/v1"} I0602 09:35:53.661402 3285 readiness_apigroup.go:40] found: v1.apps.openshift.io I0602 09:35:53.661426 3285 readiness_apigroup.go:40] found: v1.authorization.openshift.io I0602 09:35:53.661432 3285 readiness_apigroup.go:40] found: v1.build.openshift.io I0602 09:35:53.661437 3285 readiness_apigroup.go:40] found: v1.image.openshift.io I0602 09:35:53.661442 3285 readiness_apigroup.go:40] found: v1.network.openshift.io I0602 09:35:53.661446 3285 readiness_apigroup.go:40] found: v1.oauth.openshift.io I0602 09:35:53.661451 3285 readiness_apigroup.go:40] found: v1.project.openshift.io I0602 09:35:53.661456 3285 readiness_apigroup.go:40] found: v1.quota.openshift.io I0602 09:35:53.661463 3285 readiness_apigroup.go:40] found: v1.route.openshift.io I0602 09:35:53.661468 3285 readiness_apigroup.go:40] found: v1.security.openshift.io I0602 09:35:53.661474 3285 readiness_apigroup.go:40] found: v1.template.openshift.io I0602 09:35:53.661480 3285 readiness_apigroup.go:40] found: v1.user.openshift.io I0602 09:35:53.662327 3285 readiness_apigroup.go:67] waiting for url: "/apis/apps.openshift.io/v1" 503 I0602 09:35:53.667327 3285 readiness_apigroup.go:67] waiting for url: "/apis/network.openshift.io/v1" 503 I0602 09:35:53.671828 3285 readiness_apigroup.go:67] waiting for url: "/apis/route.openshift.io/v1" 503 I0602 09:35:53.675287 3285 readiness_apigroup.go:67] waiting for url: "/apis/user.openshift.io/v1" 503 I0602 09:35:53.675307 3285 readiness_apigroup.go:72] waiting for urls: []string{"/apis/apps.openshift.io/v1", "/apis/network.openshift.io/v1", "/apis/route.openshift.io/v1", "/apis/user.openshift.io/v1"} I0602 09:35:54.667086 3285 readiness_apigroup.go:40] found: v1.apps.openshift.io I0602 09:35:54.667113 3285 readiness_apigroup.go:40] found: v1.authorization.openshift.io I0602 09:35:54.667123 3285 readiness_apigroup.go:40] found: v1.build.openshift.io I0602 09:35:54.667130 3285 readiness_apigroup.go:40] found: v1.image.openshift.io I0602 09:35:54.667137 3285 readiness_apigroup.go:40] found: v1.network.openshift.io I0602 09:35:54.667145 3285 readiness_apigroup.go:40] found: v1.oauth.openshift.io I0602 09:35:54.667152 3285 readiness_apigroup.go:40] found: v1.project.openshift.io I0602 09:35:54.667158 3285 readiness_apigroup.go:40] found: v1.quota.openshift.io I0602 09:35:54.667166 3285 readiness_apigroup.go:40] found: v1.route.openshift.io I0602 09:35:54.667173 3285 readiness_apigroup.go:40] found: v1.security.openshift.io I0602 09:35:54.667180 3285 readiness_apigroup.go:40] found: v1.template.openshift.io I0602 09:35:54.667187 3285 readiness_apigroup.go:40] found: v1.user.openshift.io I0602 09:35:54.682461 3285 readiness_apigroup.go:67] waiting for url: "/apis/apps.openshift.io/v1" 503 I0602 09:35:54.709018 3285 readiness_apigroup.go:67] waiting for url: "/apis/network.openshift.io/v1" 503 I0602 09:35:54.714409 3285 readiness_apigroup.go:67] waiting for url: "/apis/route.openshift.io/v1" 503 I0602 09:35:54.722494 3285 readiness_apigroup.go:67] waiting for url: "/apis/user.openshift.io/v1" 503 I0602 09:35:54.722516 3285 readiness_apigroup.go:72] waiting for urls: []string{"/apis/apps.openshift.io/v1", "/apis/network.openshift.io/v1", "/apis/route.openshift.io/v1", "/apis/user.openshift.io/v1"} I0602 09:35:55.662075 3285 readiness_apigroup.go:40] found: v1.apps.openshift.io I0602 09:35:55.662108 3285 readiness_apigroup.go:40] found: v1.authorization.openshift.io I0602 09:35:55.662118 3285 readiness_apigroup.go:40] found: v1.build.openshift.io I0602 09:35:55.662125 3285 readiness_apigroup.go:40] found: v1.image.openshift.io I0602 09:35:55.662133 3285 readiness_apigroup.go:40] found: v1.network.openshift.io I0602 09:35:55.662141 3285 readiness_apigroup.go:40] found: v1.oauth.openshift.io I0602 09:35:55.662148 3285 readiness_apigroup.go:40] found: v1.project.openshift.io I0602 09:35:55.662155 3285 readiness_apigroup.go:40] found: v1.quota.openshift.io I0602 09:35:55.662163 3285 readiness_apigroup.go:40] found: v1.route.openshift.io I0602 09:35:55.662174 3285 readiness_apigroup.go:40] found: v1.security.openshift.io I0602 09:35:55.662181 3285 readiness_apigroup.go:40] found: v1.template.openshift.io I0602 09:35:55.662188 3285 readiness_apigroup.go:40] found: v1.user.openshift.io I0602 09:35:55.663048 3285 readiness_apigroup.go:67] waiting for url: "/apis/apps.openshift.io/v1" 503 I0602 09:35:55.667868 3285 readiness_apigroup.go:67] waiting for url: "/apis/network.openshift.io/v1" 503 I0602 09:35:55.672320 3285 readiness_apigroup.go:67] waiting for url: "/apis/route.openshift.io/v1" 503 I0602 09:35:55.675550 3285 readiness_apigroup.go:67] waiting for url: "/apis/user.openshift.io/v1" 503 I0602 09:35:55.675570 3285 readiness_apigroup.go:72] waiting for urls: []string{"/apis/apps.openshift.io/v1", "/apis/network.openshift.io/v1", "/apis/route.openshift.io/v1", "/apis/user.openshift.io/v1"} I0602 09:35:56.662062 3285 readiness_apigroup.go:40] found: v1.apps.openshift.io I0602 09:35:56.662095 3285 readiness_apigroup.go:40] found: v1.authorization.openshift.io I0602 09:35:56.662104 3285 readiness_apigroup.go:40] found: v1.build.openshift.io I0602 09:35:56.662112 3285 readiness_apigroup.go:40] found: v1.image.openshift.io I0602 09:35:56.662119 3285 readiness_apigroup.go:40] found: v1.network.openshift.io I0602 09:35:56.662128 3285 readiness_apigroup.go:40] found: v1.oauth.openshift.io I0602 09:35:56.662135 3285 readiness_apigroup.go:40] found: v1.project.openshift.io I0602 09:35:56.662142 3285 readiness_apigroup.go:40] found: v1.quota.openshift.io I0602 09:35:56.662150 3285 readiness_apigroup.go:40] found: v1.route.openshift.io I0602 09:35:56.662158 3285 readiness_apigroup.go:40] found: v1.security.openshift.io I0602 09:35:56.662165 3285 readiness_apigroup.go:40] found: v1.template.openshift.io I0602 09:35:56.662171 3285 readiness_apigroup.go:40] found: v1.user.openshift.io I0602 09:35:56.662973 3285 readiness_apigroup.go:67] waiting for url: "/apis/apps.openshift.io/v1" 503 I0602 09:35:56.667699 3285 readiness_apigroup.go:67] waiting for url: "/apis/network.openshift.io/v1" 503 I0602 09:35:56.672183 3285 readiness_apigroup.go:67] waiting for url: "/apis/route.openshift.io/v1" 503 I0602 09:35:56.675041 3285 readiness_apigroup.go:67] waiting for url: "/apis/user.openshift.io/v1" 503 I0602 09:35:56.675062 3285 readiness_apigroup.go:72] waiting for urls: []string{"/apis/apps.openshift.io/v1", "/apis/network.openshift.io/v1", "/apis/route.openshift.io/v1", "/apis/user.openshift.io/v1"} I0602 09:35:57.684303 3285 readiness_apigroup.go:40] found: v1.apps.openshift.io I0602 09:35:57.684331 3285 readiness_apigroup.go:40] found: v1.authorization.openshift.io I0602 09:35:57.684341 3285 readiness_apigroup.go:40] found: v1.build.openshift.io I0602 09:35:57.684349 3285 readiness_apigroup.go:40] found: v1.image.openshift.io I0602 09:35:57.684356 3285 readiness_apigroup.go:40] found: v1.network.openshift.io I0602 09:35:57.684364 3285 readiness_apigroup.go:40] found: v1.oauth.openshift.io I0602 09:35:57.684371 3285 readiness_apigroup.go:40] found: v1.project.openshift.io I0602 09:35:57.684379 3285 readiness_apigroup.go:40] found: v1.quota.openshift.io I0602 09:35:57.684387 3285 readiness_apigroup.go:40] found: v1.route.openshift.io I0602 09:35:57.684395 3285 readiness_apigroup.go:40] found: v1.security.openshift.io I0602 09:35:57.684402 3285 readiness_apigroup.go:40] found: v1.template.openshift.io I0602 09:35:57.684410 3285 readiness_apigroup.go:40] found: v1.user.openshift.io I0602 09:35:57.685314 3285 readiness_apigroup.go:67] waiting for url: "/apis/apps.openshift.io/v1" 503 I0602 09:35:57.698105 3285 readiness_apigroup.go:67] waiting for url: "/apis/route.openshift.io/v1" 503 I0602 09:35:57.701529 3285 readiness_apigroup.go:67] waiting for url: "/apis/user.openshift.io/v1" 503 I0602 09:35:57.701555 3285 readiness_apigroup.go:72] waiting for urls: []string{"/apis/apps.openshift.io/v1", "/apis/route.openshift.io/v1", "/apis/user.openshift.io/v1"} I0602 09:35:58.662091 3285 readiness_apigroup.go:40] found: v1.apps.openshift.io I0602 09:35:58.662117 3285 readiness_apigroup.go:40] found: v1.authorization.openshift.io I0602 09:35:58.662126 3285 readiness_apigroup.go:40] found: v1.build.openshift.io I0602 09:35:58.662134 3285 readiness_apigroup.go:40] found: v1.image.openshift.io I0602 09:35:58.662141 3285 readiness_apigroup.go:40] found: v1.network.openshift.io I0602 09:35:58.662147 3285 readiness_apigroup.go:40] found: v1.oauth.openshift.io I0602 09:35:58.662153 3285 readiness_apigroup.go:40] found: v1.project.openshift.io I0602 09:35:58.662160 3285 readiness_apigroup.go:40] found: v1.quota.openshift.io I0602 09:35:58.662167 3285 readiness_apigroup.go:40] found: v1.route.openshift.io I0602 09:35:58.662174 3285 readiness_apigroup.go:40] found: v1.security.openshift.io I0602 09:35:58.662181 3285 readiness_apigroup.go:40] found: v1.template.openshift.io I0602 09:35:58.662187 3285 readiness_apigroup.go:40] found: v1.user.openshift.io I0602 09:35:58.662968 3285 readiness_apigroup.go:67] waiting for url: "/apis/apps.openshift.io/v1" 503 I0602 09:35:58.673039 3285 readiness_apigroup.go:67] waiting for url: "/apis/route.openshift.io/v1" 503 I0602 09:35:58.676167 3285 readiness_apigroup.go:67] waiting for url: "/apis/user.openshift.io/v1" 503 I0602 09:35:58.676188 3285 readiness_apigroup.go:72] waiting for urls: []string{"/apis/apps.openshift.io/v1", "/apis/route.openshift.io/v1", "/apis/user.openshift.io/v1"} I0602 09:35:59.661461 3285 readiness_apigroup.go:40] found: v1.apps.openshift.io I0602 09:35:59.661488 3285 readiness_apigroup.go:40] found: v1.authorization.openshift.io I0602 09:35:59.661496 3285 readiness_apigroup.go:40] found: v1.build.openshift.io I0602 09:35:59.661503 3285 readiness_apigroup.go:40] found: v1.image.openshift.io I0602 09:35:59.661510 3285 readiness_apigroup.go:40] found: v1.network.openshift.io I0602 09:35:59.661517 3285 readiness_apigroup.go:40] found: v1.oauth.openshift.io I0602 09:35:59.661523 3285 readiness_apigroup.go:40] found: v1.project.openshift.io I0602 09:35:59.661529 3285 readiness_apigroup.go:40] found: v1.quota.openshift.io I0602 09:35:59.661537 3285 readiness_apigroup.go:40] found: v1.route.openshift.io I0602 09:35:59.661544 3285 readiness_apigroup.go:40] found: v1.security.openshift.io I0602 09:35:59.661551 3285 readiness_apigroup.go:40] found: v1.template.openshift.io I0602 09:35:59.661557 3285 readiness_apigroup.go:40] found: v1.user.openshift.io I0602 09:35:59.662370 3285 readiness_apigroup.go:67] waiting for url: "/apis/apps.openshift.io/v1" 503 I0602 09:35:59.672494 3285 readiness_apigroup.go:67] waiting for url: "/apis/route.openshift.io/v1" 503 I0602 09:35:59.676344 3285 readiness_apigroup.go:72] waiting for urls: []string{"/apis/apps.openshift.io/v1", "/apis/route.openshift.io/v1"} I0602 09:36:00.671058 3285 readiness_apigroup.go:40] found: v1.apps.openshift.io I0602 09:36:00.671085 3285 readiness_apigroup.go:40] found: v1.authorization.openshift.io I0602 09:36:00.671094 3285 readiness_apigroup.go:40] found: v1.build.openshift.io I0602 09:36:00.671101 3285 readiness_apigroup.go:40] found: v1.image.openshift.io I0602 09:36:00.671108 3285 readiness_apigroup.go:40] found: v1.network.openshift.io I0602 09:36:00.671115 3285 readiness_apigroup.go:40] found: v1.oauth.openshift.io I0602 09:36:00.671121 3285 readiness_apigroup.go:40] found: v1.project.openshift.io I0602 09:36:00.671128 3285 readiness_apigroup.go:40] found: v1.quota.openshift.io I0602 09:36:00.671135 3285 readiness_apigroup.go:40] found: v1.route.openshift.io I0602 09:36:00.671142 3285 readiness_apigroup.go:40] found: v1.security.openshift.io I0602 09:36:00.671149 3285 readiness_apigroup.go:40] found: v1.template.openshift.io I0602 09:36:00.671156 3285 readiness_apigroup.go:40] found: v1.user.openshift.io I0602 09:36:00.674406 3285 readiness_apigroup.go:67] waiting for url: "/apis/apps.openshift.io/v1" 503 I0602 09:36:00.718655 3285 readiness_apigroup.go:67] waiting for url: "/apis/route.openshift.io/v1" 503 I0602 09:36:00.723160 3285 readiness_apigroup.go:72] waiting for urls: []string{"/apis/apps.openshift.io/v1", "/apis/route.openshift.io/v1"} I0602 09:36:01.661910 3285 readiness_apigroup.go:40] found: v1.apps.openshift.io I0602 09:36:01.661946 3285 readiness_apigroup.go:40] found: v1.authorization.openshift.io I0602 09:36:01.661956 3285 readiness_apigroup.go:40] found: v1.build.openshift.io I0602 09:36:01.661964 3285 readiness_apigroup.go:40] found: v1.image.openshift.io I0602 09:36:01.661972 3285 readiness_apigroup.go:40] found: v1.network.openshift.io I0602 09:36:01.661979 3285 readiness_apigroup.go:40] found: v1.oauth.openshift.io I0602 09:36:01.661987 3285 readiness_apigroup.go:40] found: v1.project.openshift.io I0602 09:36:01.661994 3285 readiness_apigroup.go:40] found: v1.quota.openshift.io I0602 09:36:01.662001 3285 readiness_apigroup.go:40] found: v1.route.openshift.io I0602 09:36:01.662042 3285 readiness_apigroup.go:40] found: v1.security.openshift.io I0602 09:36:01.662074 3285 readiness_apigroup.go:40] found: v1.template.openshift.io I0602 09:36:01.662081 3285 readiness_apigroup.go:40] found: v1.user.openshift.io I0602 09:36:01.662828 3285 readiness_apigroup.go:67] waiting for url: "/apis/apps.openshift.io/v1" 503 I0602 09:36:01.672317 3285 readiness_apigroup.go:67] waiting for url: "/apis/route.openshift.io/v1" 503 I0602 09:36:01.676083 3285 readiness_apigroup.go:72] waiting for urls: []string{"/apis/apps.openshift.io/v1", "/apis/route.openshift.io/v1"} I0602 09:36:02.661808 3285 readiness_apigroup.go:40] found: v1.apps.openshift.io I0602 09:36:02.661832 3285 readiness_apigroup.go:40] found: v1.authorization.openshift.io I0602 09:36:02.661838 3285 readiness_apigroup.go:40] found: v1.build.openshift.io I0602 09:36:02.661842 3285 readiness_apigroup.go:40] found: v1.image.openshift.io I0602 09:36:02.661847 3285 readiness_apigroup.go:40] found: v1.network.openshift.io I0602 09:36:02.661852 3285 readiness_apigroup.go:40] found: v1.oauth.openshift.io I0602 09:36:02.661856 3285 readiness_apigroup.go:40] found: v1.project.openshift.io I0602 09:36:02.661860 3285 readiness_apigroup.go:40] found: v1.quota.openshift.io I0602 09:36:02.661865 3285 readiness_apigroup.go:40] found: v1.route.openshift.io I0602 09:36:02.661869 3285 readiness_apigroup.go:40] found: v1.security.openshift.io I0602 09:36:02.661874 3285 readiness_apigroup.go:40] found: v1.template.openshift.io I0602 09:36:02.661878 3285 readiness_apigroup.go:40] found: v1.user.openshift.io I0602 09:36:02.662626 3285 readiness_apigroup.go:67] waiting for url: "/apis/apps.openshift.io/v1" 503 I0602 09:36:02.676833 3285 readiness_apigroup.go:72] waiting for urls: []string{"/apis/apps.openshift.io/v1"} I0602 09:36:03.662138 3285 readiness_apigroup.go:40] found: v1.apps.openshift.io I0602 09:36:03.662165 3285 readiness_apigroup.go:40] found: v1.authorization.openshift.io I0602 09:36:03.662174 3285 readiness_apigroup.go:40] found: v1.build.openshift.io I0602 09:36:03.662179 3285 readiness_apigroup.go:40] found: v1.image.openshift.io I0602 09:36:03.662183 3285 readiness_apigroup.go:40] found: v1.network.openshift.io I0602 09:36:03.662189 3285 readiness_apigroup.go:40] found: v1.oauth.openshift.io I0602 09:36:03.662193 3285 readiness_apigroup.go:40] found: v1.project.openshift.io I0602 09:36:03.662198 3285 readiness_apigroup.go:40] found: v1.quota.openshift.io I0602 09:36:03.662203 3285 readiness_apigroup.go:40] found: v1.route.openshift.io I0602 09:36:03.662208 3285 readiness_apigroup.go:40] found: v1.security.openshift.io I0602 09:36:03.662213 3285 readiness_apigroup.go:40] found: v1.template.openshift.io I0602 09:36:03.662217 3285 readiness_apigroup.go:40] found: v1.user.openshift.io I0602 09:36:03.662982 3285 readiness_apigroup.go:67] waiting for url: "/apis/apps.openshift.io/v1" 503 I0602 09:36:03.677297 3285 readiness_apigroup.go:72] waiting for urls: []string{"/apis/apps.openshift.io/v1"} I0602 09:36:04.662419 3285 readiness_apigroup.go:40] found: v1.apps.openshift.io I0602 09:36:04.662452 3285 readiness_apigroup.go:40] found: v1.authorization.openshift.io I0602 09:36:04.662462 3285 readiness_apigroup.go:40] found: v1.build.openshift.io I0602 09:36:04.662470 3285 readiness_apigroup.go:40] found: v1.image.openshift.io I0602 09:36:04.662477 3285 readiness_apigroup.go:40] found: v1.network.openshift.io I0602 09:36:04.662485 3285 readiness_apigroup.go:40] found: v1.oauth.openshift.io I0602 09:36:04.662493 3285 readiness_apigroup.go:40] found: v1.project.openshift.io I0602 09:36:04.662500 3285 readiness_apigroup.go:40] found: v1.quota.openshift.io I0602 09:36:04.662539 3285 readiness_apigroup.go:40] found: v1.route.openshift.io I0602 09:36:04.662566 3285 readiness_apigroup.go:40] found: v1.security.openshift.io I0602 09:36:04.662575 3285 readiness_apigroup.go:40] found: v1.template.openshift.io I0602 09:36:04.662583 3285 readiness_apigroup.go:40] found: v1.user.openshift.io I0602 09:36:04.678459 3285 run_self_hosted.go:224] openshift-apiserver available I0602 09:36:04.678505 3285 interface.go:26] Installing "openshift-controller-manager" ... I0602 09:36:04.678533 3285 apply_template.go:83] Installing "openshift-controller-manager" I0602 09:36:04.679079 3285 helper.go:46] Retrieving Docker daemon info I0602 09:36:04.681523 3285 run_self_hosted.go:576] observed "ADDED" with metadata: creationTimestamp: 2018-06-02T09:35:06Z labels: kube-aggregator.kubernetes.io/automanaged: onstart name: v1.apps resourceVersion: "18" selfLink: /apis/apiregistration.k8s.io/v1beta1/apiservices/v1.apps uid: 3befd881-6648-11e8-8005-0ed86b46e68a spec: caBundle: null group: apps groupPriorityMinimum: 17800 service: null version: v1 versionPriority: 15 status: conditions: - lastTransitionTime: 2018-06-02T09:35:06Z message: Local APIServices are always available reason: Local status: "True" type: Available I0602 09:36:04.681904 3285 run_self_hosted.go:576] observed "ADDED" with metadata: creationTimestamp: 2018-06-02T09:35:06Z labels: kube-aggregator.kubernetes.io/automanaged: onstart name: v1.authorization.k8s.io resourceVersion: "22" selfLink: /apis/apiregistration.k8s.io/v1beta1/apiservices/v1.authorization.k8s.io uid: 3bf095d8-6648-11e8-8005-0ed86b46e68a spec: caBundle: null group: authorization.k8s.io groupPriorityMinimum: 17600 service: null version: v1 versionPriority: 15 status: conditions: - lastTransitionTime: 2018-06-02T09:35:06Z message: Local APIServices are always available reason: Local status: "True" type: Available I0602 09:36:04.682429 3285 run_self_hosted.go:576] observed "ADDED" with metadata: creationTimestamp: 2018-06-02T09:35:06Z labels: kube-aggregator.kubernetes.io/automanaged: onstart name: v1beta1.authorization.k8s.io resourceVersion: "23" selfLink: /apis/apiregistration.k8s.io/v1beta1/apiservices/v1beta1.authorization.k8s.io uid: 3bf0a9c7-6648-11e8-8005-0ed86b46e68a spec: caBundle: null group: authorization.k8s.io groupPriorityMinimum: 17600 service: null version: v1beta1 versionPriority: 9 status: conditions: - lastTransitionTime: 2018-06-02T09:35:06Z message: Local APIServices are always available reason: Local status: "True" type: Available I0602 09:36:04.682721 3285 run_self_hosted.go:576] observed "ADDED" with metadata: creationTimestamp: 2018-06-02T09:35:06Z labels: kube-aggregator.kubernetes.io/automanaged: onstart name: v1.batch resourceVersion: "28" selfLink: /apis/apiregistration.k8s.io/v1beta1/apiservices/v1.batch uid: 3bf17b74-6648-11e8-8005-0ed86b46e68a spec: caBundle: null group: batch groupPriorityMinimum: 17400 service: null version: v1 versionPriority: 15 status: conditions: - lastTransitionTime: 2018-06-02T09:35:06Z message: Local APIServices are always available reason: Local status: "True" type: Available I0602 09:36:04.683286 3285 run_self_hosted.go:576] observed "ADDED" with metadata: creationTimestamp: 2018-06-02T09:35:06Z labels: kube-aggregator.kubernetes.io/automanaged: onstart name: v1beta1.rbac.authorization.k8s.io resourceVersion: "37" selfLink: /apis/apiregistration.k8s.io/v1beta1/apiservices/v1beta1.rbac.authorization.k8s.io uid: 3bf38006-6648-11e8-8005-0ed86b46e68a spec: caBundle: null group: rbac.authorization.k8s.io groupPriorityMinimum: 17000 service: null version: v1beta1 versionPriority: 12 status: conditions: - lastTransitionTime: 2018-06-02T09:35:06Z message: Local APIServices are always available reason: Local status: "True" type: Available I0602 09:36:04.683678 3285 run_self_hosted.go:576] observed "ADDED" with metadata: annotations: kubectl.kubernetes.io/last-applied-configuration: | {"apiVersion":"apiregistration.k8s.io/v1beta1","kind":"APIService","metadata":{"annotations":{},"name":"v1.security.openshift.io","namespace":""},"spec":{"group":"security.openshift.io","groupPriorityMinimum":9900,"insecureSkipTLSVerify":true,"service":{"name":"api","namespace":"openshift-apiserver"},"version":"v1","versionPriority":15}} creationTimestamp: 2018-06-02T09:35:16Z name: v1.security.openshift.io resourceVersion: "543" selfLink: /apis/apiregistration.k8s.io/v1beta1/apiservices/v1.security.openshift.io uid: 4232a27c-6648-11e8-8005-0ed86b46e68a spec: caBundle: null group: security.openshift.io groupPriorityMinimum: 9900 insecureSkipTLSVerify: true service: name: api namespace: openshift-apiserver version: v1 versionPriority: 15 status: conditions: - lastTransitionTime: 2018-06-02T09:35:34Z message: all checks passed reason: Passed status: "True" type: Available I0602 09:36:04.684107 3285 run_self_hosted.go:576] observed "ADDED" with metadata: creationTimestamp: 2018-06-02T09:35:06Z labels: kube-aggregator.kubernetes.io/automanaged: onstart name: v1. resourceVersion: "17" selfLink: /apis/apiregistration.k8s.io/v1beta1/apiservices/v1. uid: 3befcd87-6648-11e8-8005-0ed86b46e68a spec: caBundle: null groupPriorityMinimum: 18000 service: null version: v1 versionPriority: 1 status: conditions: - lastTransitionTime: 2018-06-02T09:35:06Z message: Local APIServices are always available reason: Local status: "True" type: Available I0602 09:36:04.684409 3285 run_self_hosted.go:576] observed "ADDED" with metadata: creationTimestamp: 2018-06-02T09:35:06Z labels: kube-aggregator.kubernetes.io/automanaged: onstart name: v1.rbac.authorization.k8s.io resourceVersion: "35" selfLink: /apis/apiregistration.k8s.io/v1beta1/apiservices/v1.rbac.authorization.k8s.io uid: 3bf311c9-6648-11e8-8005-0ed86b46e68a spec: caBundle: null group: rbac.authorization.k8s.io groupPriorityMinimum: 17000 service: null version: v1 versionPriority: 15 status: conditions: - lastTransitionTime: 2018-06-02T09:35:06Z message: Local APIServices are always available reason: Local status: "True" type: Available I0602 09:36:04.684832 3285 run_self_hosted.go:576] observed "ADDED" with metadata: creationTimestamp: 2018-06-02T09:35:06Z labels: kube-aggregator.kubernetes.io/automanaged: onstart name: v1beta1.policy resourceVersion: "36" selfLink: /apis/apiregistration.k8s.io/v1beta1/apiservices/v1beta1.policy uid: 3bf3040f-6648-11e8-8005-0ed86b46e68a spec: caBundle: null group: policy groupPriorityMinimum: 17100 service: null version: v1beta1 versionPriority: 9 status: conditions: - lastTransitionTime: 2018-06-02T09:35:06Z message: Local APIServices are always available reason: Local status: "True" type: Available I0602 09:36:04.685096 3285 run_self_hosted.go:576] observed "ADDED" with metadata: creationTimestamp: 2018-06-02T09:35:06Z labels: kube-aggregator.kubernetes.io/automanaged: onstart name: v1beta1.admissionregistration.k8s.io resourceVersion: "16" selfLink: /apis/apiregistration.k8s.io/v1beta1/apiservices/v1beta1.admissionregistration.k8s.io uid: 3befbbe2-6648-11e8-8005-0ed86b46e68a spec: caBundle: null group: admissionregistration.k8s.io groupPriorityMinimum: 16700 service: null version: v1beta1 versionPriority: 12 status: conditions: - lastTransitionTime: 2018-06-02T09:35:06Z message: Local APIServices are always available reason: Local status: "True" type: Available I0602 09:36:04.685652 3285 run_self_hosted.go:576] observed "ADDED" with metadata: creationTimestamp: 2018-06-02T09:35:06Z labels: kube-aggregator.kubernetes.io/automanaged: onstart name: v1.authentication.k8s.io resourceVersion: "25" selfLink: /apis/apiregistration.k8s.io/v1beta1/apiservices/v1.authentication.k8s.io uid: 3bf07c03-6648-11e8-8005-0ed86b46e68a spec: caBundle: null group: authentication.k8s.io groupPriorityMinimum: 17700 service: null version: v1 versionPriority: 15 status: conditions: - lastTransitionTime: 2018-06-02T09:35:06Z message: Local APIServices are always available reason: Local status: "True" type: Available I0602 09:36:04.685915 3285 run_self_hosted.go:576] observed "ADDED" with metadata: creationTimestamp: 2018-06-02T09:35:06Z labels: kube-aggregator.kubernetes.io/automanaged: onstart name: v1beta1.extensions resourceVersion: "34" selfLink: /apis/apiregistration.k8s.io/v1beta1/apiservices/v1beta1.extensions uid: 3bf2ff05-6648-11e8-8005-0ed86b46e68a spec: caBundle: null group: extensions groupPriorityMinimum: 17900 service: null version: v1beta1 versionPriority: 1 status: conditions: - lastTransitionTime: 2018-06-02T09:35:06Z message: Local APIServices are always available reason: Local status: "True" type: Available I0602 09:36:04.686492 3285 run_self_hosted.go:576] observed "ADDED" with metadata: annotations: kubectl.kubernetes.io/last-applied-configuration: | {"apiVersion":"apiregistration.k8s.io/v1beta1","kind":"APIService","metadata":{"annotations":{},"name":"v1.build.openshift.io","namespace":""},"spec":{"group":"build.openshift.io","groupPriorityMinimum":9900,"insecureSkipTLSVerify":true,"service":{"name":"api","namespace":"openshift-apiserver"},"version":"v1","versionPriority":15}} creationTimestamp: 2018-06-02T09:35:16Z name: v1.build.openshift.io resourceVersion: "547" selfLink: /apis/apiregistration.k8s.io/v1beta1/apiservices/v1.build.openshift.io uid: 422626f1-6648-11e8-8005-0ed86b46e68a spec: caBundle: null group: build.openshift.io groupPriorityMinimum: 9900 insecureSkipTLSVerify: true service: name: api namespace: openshift-apiserver version: v1 versionPriority: 15 status: conditions: - lastTransitionTime: 2018-06-02T09:35:35Z message: all checks passed reason: Passed status: "True" type: Available I0602 09:36:04.686810 3285 run_self_hosted.go:576] observed "ADDED" with metadata: creationTimestamp: 2018-06-02T09:35:06Z labels: kube-aggregator.kubernetes.io/automanaged: onstart name: v1beta1.authentication.k8s.io resourceVersion: "24" selfLink: /apis/apiregistration.k8s.io/v1beta1/apiservices/v1beta1.authentication.k8s.io uid: 3bf08fae-6648-11e8-8005-0ed86b46e68a spec: caBundle: null group: authentication.k8s.io groupPriorityMinimum: 17700 service: null version: v1beta1 versionPriority: 9 status: conditions: - lastTransitionTime: 2018-06-02T09:35:06Z message: Local APIServices are always available reason: Local status: "True" type: Available I0602 09:36:04.688437 3285 run_self_hosted.go:576] observed "ADDED" with metadata: creationTimestamp: 2018-06-02T09:35:06Z labels: kube-aggregator.kubernetes.io/automanaged: onstart name: v1beta1.certificates.k8s.io resourceVersion: "31" selfLink: /apis/apiregistration.k8s.io/v1beta1/apiservices/v1beta1.certificates.k8s.io uid: 3bf21510-6648-11e8-8005-0ed86b46e68a spec: caBundle: null group: certificates.k8s.io groupPriorityMinimum: 17300 service: null version: v1beta1 versionPriority: 9 status: conditions: - lastTransitionTime: 2018-06-02T09:35:06Z message: Local APIServices are always available reason: Local status: "True" type: Available I0602 09:36:04.690478 3285 run_self_hosted.go:576] observed "ADDED" with metadata: creationTimestamp: 2018-06-02T09:35:06Z labels: kube-aggregator.kubernetes.io/automanaged: onstart name: v2alpha1.batch resourceVersion: "30" selfLink: /apis/apiregistration.k8s.io/v1beta1/apiservices/v2alpha1.batch uid: 3bf1b87d-6648-11e8-8005-0ed86b46e68a spec: caBundle: null group: batch groupPriorityMinimum: 17400 service: null version: v2alpha1 versionPriority: 9 status: conditions: - lastTransitionTime: 2018-06-02T09:35:06Z message: Local APIServices are always available reason: Local status: "True" type: Available I0602 09:36:04.691632 3285 run_self_hosted.go:576] observed "ADDED" with metadata: creationTimestamp: 2018-06-02T09:35:06Z labels: kube-aggregator.kubernetes.io/automanaged: onstart name: v1.autoscaling resourceVersion: "26" selfLink: /apis/apiregistration.k8s.io/v1beta1/apiservices/v1.autoscaling uid: 3bf15a65-6648-11e8-8005-0ed86b46e68a spec: caBundle: null group: autoscaling groupPriorityMinimum: 17500 service: null version: v1 versionPriority: 15 status: conditions: - lastTransitionTime: 2018-06-02T09:35:06Z message: Local APIServices are always available reason: Local status: "True" type: Available I0602 09:36:04.693954 3285 run_self_hosted.go:576] observed "ADDED" with metadata: annotations: kubectl.kubernetes.io/last-applied-configuration: | {"apiVersion":"apiregistration.k8s.io/v1beta1","kind":"APIService","metadata":{"annotations":{},"name":"v1.authorization.openshift.io","namespace":""},"spec":{"group":"authorization.openshift.io","groupPriorityMinimum":9900,"insecureSkipTLSVerify":true,"service":{"name":"api","namespace":"openshift-apiserver"},"version":"v1","versionPriority":15}} creationTimestamp: 2018-06-02T09:35:16Z name: v1.authorization.openshift.io resourceVersion: "540" selfLink: /apis/apiregistration.k8s.io/v1beta1/apiservices/v1.authorization.openshift.io uid: 422446d7-6648-11e8-8005-0ed86b46e68a spec: caBundle: null group: authorization.openshift.io groupPriorityMinimum: 9900 insecureSkipTLSVerify: true service: name: api namespace: openshift-apiserver version: v1 versionPriority: 15 status: conditions: - lastTransitionTime: 2018-06-02T09:35:34Z message: all checks passed reason: Passed status: "True" type: Available I0602 09:36:04.695588 3285 run_self_hosted.go:576] observed "ADDED" with metadata: annotations: kubectl.kubernetes.io/last-applied-configuration: | {"apiVersion":"apiregistration.k8s.io/v1beta1","kind":"APIService","metadata":{"annotations":{},"name":"v1.image.openshift.io","namespace":""},"spec":{"group":"image.openshift.io","groupPriorityMinimum":9900,"insecureSkipTLSVerify":true,"service":{"name":"api","namespace":"openshift-apiserver"},"version":"v1","versionPriority":15}} creationTimestamp: 2018-06-02T09:35:16Z name: v1.image.openshift.io resourceVersion: "544" selfLink: /apis/apiregistration.k8s.io/v1beta1/apiservices/v1.image.openshift.io uid: 422757b8-6648-11e8-8005-0ed86b46e68a spec: caBundle: null group: image.openshift.io groupPriorityMinimum: 9900 insecureSkipTLSVerify: true service: name: api namespace: openshift-apiserver version: v1 versionPriority: 15 status: conditions: - lastTransitionTime: 2018-06-02T09:35:34Z message: all checks passed reason: Passed status: "True" type: Available I0602 09:36:04.697487 3285 run_self_hosted.go:576] observed "ADDED" with metadata: annotations: kubectl.kubernetes.io/last-applied-configuration: | {"apiVersion":"apiregistration.k8s.io/v1beta1","kind":"APIService","metadata":{"annotations":{},"name":"v1.route.openshift.io","namespace":""},"spec":{"group":"route.openshift.io","groupPriorityMinimum":9900,"insecureSkipTLSVerify":true,"service":{"name":"api","namespace":"openshift-apiserver"},"version":"v1","versionPriority":15}} creationTimestamp: 2018-06-02T09:35:16Z name: v1.route.openshift.io resourceVersion: "551" selfLink: /apis/apiregistration.k8s.io/v1beta1/apiservices/v1.route.openshift.io uid: 42303295-6648-11e8-8005-0ed86b46e68a spec: caBundle: null group: route.openshift.io groupPriorityMinimum: 9900 insecureSkipTLSVerify: true service: name: api namespace: openshift-apiserver version: v1 versionPriority: 15 status: conditions: - lastTransitionTime: 2018-06-02T09:35:35Z message: all checks passed reason: Passed status: "True" type: Available I0602 09:36:04.699028 3285 run_self_hosted.go:576] observed "ADDED" with metadata: annotations: kubectl.kubernetes.io/last-applied-configuration: | {"apiVersion":"apiregistration.k8s.io/v1beta1","kind":"APIService","metadata":{"annotations":{},"name":"v1.user.openshift.io","namespace":""},"spec":{"group":"user.openshift.io","groupPriorityMinimum":9900,"insecureSkipTLSVerify":true,"service":{"name":"api","namespace":"openshift-apiserver"},"version":"v1","versionPriority":15}} creationTimestamp: 2018-06-02T09:35:16Z name: v1.user.openshift.io resourceVersion: "550" selfLink: /apis/apiregistration.k8s.io/v1beta1/apiservices/v1.user.openshift.io uid: 4235fed5-6648-11e8-8005-0ed86b46e68a spec: caBundle: null group: user.openshift.io groupPriorityMinimum: 9900 insecureSkipTLSVerify: true service: name: api namespace: openshift-apiserver version: v1 versionPriority: 15 status: conditions: - lastTransitionTime: 2018-06-02T09:35:35Z message: all checks passed reason: Passed status: "True" type: Available I0602 09:36:04.699411 3285 run_self_hosted.go:576] observed "ADDED" with metadata: creationTimestamp: 2018-06-02T09:35:06Z labels: kube-aggregator.kubernetes.io/automanaged: onstart name: v1beta1.apps resourceVersion: "19" selfLink: /apis/apiregistration.k8s.io/v1beta1/apiservices/v1beta1.apps uid: 3befcf1a-6648-11e8-8005-0ed86b46e68a spec: caBundle: null group: apps groupPriorityMinimum: 17800 service: null version: v1beta1 versionPriority: 1 status: conditions: - lastTransitionTime: 2018-06-02T09:35:06Z message: Local APIServices are always available reason: Local status: "True" type: Available I0602 09:36:04.700456 3285 run_self_hosted.go:576] observed "ADDED" with metadata: creationTimestamp: 2018-06-02T09:35:06Z labels: kube-aggregator.kubernetes.io/automanaged: onstart name: v1beta1.batch resourceVersion: "29" selfLink: /apis/apiregistration.k8s.io/v1beta1/apiservices/v1beta1.batch uid: 3bf1947b-6648-11e8-8005-0ed86b46e68a spec: caBundle: null group: batch groupPriorityMinimum: 17400 service: null version: v1beta1 versionPriority: 9 status: conditions: - lastTransitionTime: 2018-06-02T09:35:06Z message: Local APIServices are always available reason: Local status: "True" type: Available I0602 09:36:04.700707 3285 run_self_hosted.go:576] observed "ADDED" with metadata: creationTimestamp: 2018-06-02T09:35:06Z labels: kube-aggregator.kubernetes.io/automanaged: onstart name: v1.storage.k8s.io resourceVersion: "38" selfLink: /apis/apiregistration.k8s.io/v1beta1/apiservices/v1.storage.k8s.io uid: 3bf40b22-6648-11e8-8005-0ed86b46e68a spec: caBundle: null group: storage.k8s.io groupPriorityMinimum: 16800 service: null version: v1 versionPriority: 15 status: conditions: - lastTransitionTime: 2018-06-02T09:35:06Z message: Local APIServices are always available reason: Local status: "True" type: Available I0602 09:36:04.701373 3285 run_self_hosted.go:576] observed "ADDED" with metadata: creationTimestamp: 2018-06-02T09:35:06Z labels: kube-aggregator.kubernetes.io/automanaged: onstart name: v1beta1.storage.k8s.io resourceVersion: "39" selfLink: /apis/apiregistration.k8s.io/v1beta1/apiservices/v1beta1.storage.k8s.io uid: 3bf4208d-6648-11e8-8005-0ed86b46e68a spec: caBundle: null group: storage.k8s.io groupPriorityMinimum: 16800 service: null version: v1beta1 versionPriority: 9 status: conditions: - lastTransitionTime: 2018-06-02T09:35:06Z message: Local APIServices are always available reason: Local status: "True" type: Available I0602 09:36:04.701792 3285 run_self_hosted.go:576] observed "ADDED" with metadata: annotations: kubectl.kubernetes.io/last-applied-configuration: | {"apiVersion":"apiregistration.k8s.io/v1beta1","kind":"APIService","metadata":{"annotations":{},"name":"v1.quota.openshift.io","namespace":""},"spec":{"group":"quota.openshift.io","groupPriorityMinimum":9900,"insecureSkipTLSVerify":true,"service":{"name":"api","namespace":"openshift-apiserver"},"version":"v1","versionPriority":15}} creationTimestamp: 2018-06-02T09:35:16Z name: v1.quota.openshift.io resourceVersion: "541" selfLink: /apis/apiregistration.k8s.io/v1beta1/apiservices/v1.quota.openshift.io uid: 422d592a-6648-11e8-8005-0ed86b46e68a spec: caBundle: null group: quota.openshift.io groupPriorityMinimum: 9900 insecureSkipTLSVerify: true service: name: api namespace: openshift-apiserver version: v1 versionPriority: 15 status: conditions: - lastTransitionTime: 2018-06-02T09:35:34Z message: all checks passed reason: Passed status: "True" type: Available I0602 09:36:04.702569 3285 run_self_hosted.go:576] observed "ADDED" with metadata: creationTimestamp: 2018-06-02T09:35:06Z labels: kube-aggregator.kubernetes.io/automanaged: onstart name: v1beta2.apps resourceVersion: "21" selfLink: /apis/apiregistration.k8s.io/v1beta1/apiservices/v1beta2.apps uid: 3bf028a3-6648-11e8-8005-0ed86b46e68a spec: caBundle: null group: apps groupPriorityMinimum: 17800 service: null version: v1beta2 versionPriority: 9 status: conditions: - lastTransitionTime: 2018-06-02T09:35:06Z message: Local APIServices are always available reason: Local status: "True" type: Available I0602 09:36:04.702900 3285 run_self_hosted.go:576] observed "ADDED" with metadata: creationTimestamp: 2018-06-02T09:35:06Z labels: kube-aggregator.kubernetes.io/automanaged: onstart name: v1beta1.events.k8s.io resourceVersion: "32" selfLink: /apis/apiregistration.k8s.io/v1beta1/apiservices/v1beta1.events.k8s.io uid: 3bf2dd06-6648-11e8-8005-0ed86b46e68a spec: caBundle: null group: events.k8s.io groupPriorityMinimum: 17750 service: null version: v1beta1 versionPriority: 5 status: conditions: - lastTransitionTime: 2018-06-02T09:35:06Z message: Local APIServices are always available reason: Local status: "True" type: Available I0602 09:36:04.703919 3285 run_self_hosted.go:576] observed "ADDED" with metadata: creationTimestamp: 2018-06-02T09:35:06Z labels: kube-aggregator.kubernetes.io/automanaged: onstart name: v1.networking.k8s.io resourceVersion: "33" selfLink: /apis/apiregistration.k8s.io/v1beta1/apiservices/v1.networking.k8s.io uid: 3bf2f341-6648-11e8-8005-0ed86b46e68a spec: caBundle: null group: networking.k8s.io groupPriorityMinimum: 17200 service: null version: v1 versionPriority: 15 status: conditions: - lastTransitionTime: 2018-06-02T09:35:06Z message: Local APIServices are always available reason: Local status: "True" type: Available I0602 09:36:04.704413 3285 run_self_hosted.go:576] observed "ADDED" with metadata: annotations: kubectl.kubernetes.io/last-applied-configuration: | {"apiVersion":"apiregistration.k8s.io/v1beta1","kind":"APIService","metadata":{"annotations":{},"name":"v1.apps.openshift.io","namespace":""},"spec":{"group":"apps.openshift.io","groupPriorityMinimum":9900,"insecureSkipTLSVerify":true,"service":{"name":"api","namespace":"openshift-apiserver"},"version":"v1","versionPriority":15}} creationTimestamp: 2018-06-02T09:35:16Z name: v1.apps.openshift.io resourceVersion: "542" selfLink: /apis/apiregistration.k8s.io/v1beta1/apiservices/v1.apps.openshift.io uid: 4221f7b3-6648-11e8-8005-0ed86b46e68a spec: caBundle: null group: apps.openshift.io groupPriorityMinimum: 9900 insecureSkipTLSVerify: true service: name: api namespace: openshift-apiserver version: v1 versionPriority: 15 status: conditions: - lastTransitionTime: 2018-06-02T09:35:34Z message: all checks passed reason: Passed status: "True" type: Available I0602 09:36:04.705320 3285 run_self_hosted.go:576] observed "ADDED" with metadata: annotations: kubectl.kubernetes.io/last-applied-configuration: | {"apiVersion":"apiregistration.k8s.io/v1beta1","kind":"APIService","metadata":{"annotations":{},"name":"v1.network.openshift.io","namespace":""},"spec":{"group":"network.openshift.io","groupPriorityMinimum":9900,"insecureSkipTLSVerify":true,"service":{"name":"api","namespace":"openshift-apiserver"},"version":"v1","versionPriority":15}} creationTimestamp: 2018-06-02T09:35:16Z name: v1.network.openshift.io resourceVersion: "549" selfLink: /apis/apiregistration.k8s.io/v1beta1/apiservices/v1.network.openshift.io uid: 4228b0f9-6648-11e8-8005-0ed86b46e68a spec: caBundle: null group: network.openshift.io groupPriorityMinimum: 9900 insecureSkipTLSVerify: true service: name: api namespace: openshift-apiserver version: v1 versionPriority: 15 status: conditions: - lastTransitionTime: 2018-06-02T09:35:35Z message: all checks passed reason: Passed status: "True" type: Available I0602 09:36:04.705748 3285 run_self_hosted.go:576] observed "ADDED" with metadata: annotations: kubectl.kubernetes.io/last-applied-configuration: | {"apiVersion":"apiregistration.k8s.io/v1beta1","kind":"APIService","metadata":{"annotations":{},"name":"v1.oauth.openshift.io","namespace":""},"spec":{"group":"oauth.openshift.io","groupPriorityMinimum":9900,"insecureSkipTLSVerify":true,"service":{"name":"api","namespace":"openshift-apiserver"},"version":"v1","versionPriority":15}} creationTimestamp: 2018-06-02T09:35:16Z name: v1.oauth.openshift.io resourceVersion: "548" selfLink: /apis/apiregistration.k8s.io/v1beta1/apiservices/v1.oauth.openshift.io uid: 422a5e81-6648-11e8-8005-0ed86b46e68a spec: caBundle: null group: oauth.openshift.io groupPriorityMinimum: 9900 insecureSkipTLSVerify: true service: name: api namespace: openshift-apiserver version: v1 versionPriority: 15 status: conditions: - lastTransitionTime: 2018-06-02T09:35:35Z message: all checks passed reason: Passed status: "True" type: Available I0602 09:36:04.706521 3285 run_self_hosted.go:576] observed "ADDED" with metadata: annotations: kubectl.kubernetes.io/last-applied-configuration: | {"apiVersion":"apiregistration.k8s.io/v1beta1","kind":"APIService","metadata":{"annotations":{},"name":"v1.template.openshift.io","namespace":""},"spec":{"group":"template.openshift.io","groupPriorityMinimum":9900,"insecureSkipTLSVerify":true,"service":{"name":"api","namespace":"openshift-apiserver"},"version":"v1","versionPriority":15}} creationTimestamp: 2018-06-02T09:35:16Z name: v1.template.openshift.io resourceVersion: "546" selfLink: /apis/apiregistration.k8s.io/v1beta1/apiservices/v1.template.openshift.io uid: 4234512c-6648-11e8-8005-0ed86b46e68a spec: caBundle: null group: template.openshift.io groupPriorityMinimum: 9900 insecureSkipTLSVerify: true service: name: api namespace: openshift-apiserver version: v1 versionPriority: 15 status: conditions: - lastTransitionTime: 2018-06-02T09:35:35Z message: all checks passed reason: Passed status: "True" type: Available I0602 09:36:04.707049 3285 run_self_hosted.go:576] observed "ADDED" with metadata: creationTimestamp: 2018-06-02T09:35:06Z labels: kube-aggregator.kubernetes.io/automanaged: onstart name: v2beta1.autoscaling resourceVersion: "27" selfLink: /apis/apiregistration.k8s.io/v1beta1/apiservices/v2beta1.autoscaling uid: 3bf173fe-6648-11e8-8005-0ed86b46e68a spec: caBundle: null group: autoscaling groupPriorityMinimum: 17500 service: null version: v2beta1 versionPriority: 9 status: conditions: - lastTransitionTime: 2018-06-02T09:35:06Z message: Local APIServices are always available reason: Local status: "True" type: Available I0602 09:36:04.707816 3285 run_self_hosted.go:576] observed "ADDED" with metadata: annotations: kubectl.kubernetes.io/last-applied-configuration: | {"apiVersion":"apiregistration.k8s.io/v1beta1","kind":"APIService","metadata":{"annotations":{},"name":"v1.project.openshift.io","namespace":""},"spec":{"group":"project.openshift.io","groupPriorityMinimum":9900,"insecureSkipTLSVerify":true,"service":{"name":"api","namespace":"openshift-apiserver"},"version":"v1","versionPriority":15}} creationTimestamp: 2018-06-02T09:35:16Z name: v1.project.openshift.io resourceVersion: "545" selfLink: /apis/apiregistration.k8s.io/v1beta1/apiservices/v1.project.openshift.io uid: 422bcc55-6648-11e8-8005-0ed86b46e68a spec: caBundle: null group: project.openshift.io groupPriorityMinimum: 9900 insecureSkipTLSVerify: true service: name: api namespace: openshift-apiserver version: v1 versionPriority: 15 status: conditions: - lastTransitionTime: 2018-06-02T09:35:35Z message: all checks passed reason: Passed status: "True" type: Available I0602 09:36:04.708141 3285 run_self_hosted.go:576] observed "ADDED" with metadata: creationTimestamp: 2018-06-02T09:35:06Z labels: kube-aggregator.kubernetes.io/automanaged: onstart name: v1beta1.apiextensions.k8s.io resourceVersion: "20" selfLink: /apis/apiregistration.k8s.io/v1beta1/apiservices/v1beta1.apiextensions.k8s.io uid: 3befd020-6648-11e8-8005-0ed86b46e68a spec: caBundle: null group: apiextensions.k8s.io groupPriorityMinimum: 16700 service: null version: v1beta1 versionPriority: 9 status: conditions: - lastTransitionTime: 2018-06-02T09:35:06Z message: Local APIServices are always available reason: Local status: "True" type: Available I0602 09:36:04.709740 3285 helper.go:52] Docker daemon info: &types.Info{ID:"6BJV:73LF:IA7K:ALFR:AZ7W:LB7R:QWV6:M3QJ:L6HJ:QBVN:R7RW:ZVMN", Containers:17, ContainersRunning:15, ContainersPaused:0, ContainersStopped:2, Images:38, Driver:"overlay2", DriverStatus:[][2]string{[2]string{"Backing Filesystem", "xfs"}, [2]string{"Supports d_type", "true"}, [2]string{"Native Overlay Diff", "true"}}, SystemStatus:[][2]string(nil), Plugins:types.PluginsInfo{Volume:[]string{"local"}, Network:[]string{"bridge", "host", "macvlan", "null", "overlay"}, Authorization:[]string{"rhel-push-plugin"}, Log:[]string(nil)}, MemoryLimit:true, SwapLimit:true, KernelMemory:true, CPUCfsPeriod:true, CPUCfsQuota:true, CPUShares:true, CPUSet:true, IPv4Forwarding:true, BridgeNfIptables:true, BridgeNfIP6tables:true, Debug:true, NFd:98, OomKillDisable:true, NGoroutines:91, SystemTime:"2018-06-02T09:36:04.698789113Z", LoggingDriver:"json-file", CgroupDriver:"systemd", NEventsListener:0, KernelVersion:"3.10.0-862.2.3.el7.x86_64", OperatingSystem:"Red Hat Enterprise Linux Server 7.5 (Maipo)", OSType:"linux", Architecture:"x86_64", IndexServerAddress:"https://registry.access.redhat.com/v1/", RegistryConfig:(*registry.ServiceConfig)(0xc4205af2d0), NCPU:4, MemTotal:16656269312, GenericResources:[]swarm.GenericResource(nil), DockerRootDir:"/var/lib/docker", HTTPProxy:"", HTTPSProxy:"", NoProxy:"", Name:"ip-172-18-2-81.ec2.internal", Labels:[]string(nil), ExperimentalBuild:false, ServerVersion:"1.13.1", ClusterStore:"", ClusterAdvertise:"", Runtimes:map[string]types.Runtime{"docker-runc":types.Runtime{Path:"/usr/libexec/docker/docker-runc-current", Args:[]string(nil)}, "runc":types.Runtime{Path:"docker-runc", Args:[]string(nil)}}, DefaultRuntime:"docker-runc", Swarm:swarm.Info{NodeID:"", NodeAddr:"", LocalNodeState:"inactive", ControlAvailable:false, Error:"", RemoteManagers:[]swarm.Peer(nil), Nodes:0, Managers:0, Cluster:(*swarm.ClusterInfo)(0xc4207ae000)}, LiveRestoreEnabled:false, Isolation:"", InitBinary:"/usr/libexec/docker/docker-init-current", ContainerdCommit:types.Commit{ID:"", Expected:"aa8187dbd3b7ad67d8e5e3a15115d3eef43a7ed1"}, RuncCommit:types.Commit{ID:"e9c345b3f906d5dc5e8100b05ce37073a811c74a", Expected:"9df8b306d01f59d3a8029be411de015b7304dd8f"}, InitCommit:types.Commit{ID:"5b117de7f824f3d3825737cf09581645abbe35d4", Expected:"949e6facb77383876aeff8a6944dde66b3089574"}, SecurityOptions:[]string{"name=seccomp,profile=/etc/docker/seccomp.json", "name=selinux"}} I0602 09:36:04.709835 3285 run.go:195] Creating container named "" config: image: openshift/origin-cli:latest entry point: sh command: -c echo 'openshift-controller-manager' && chmod 755 /install.sh && /install.sh host config: pid mode: host user mode: network mode: host I0602 09:36:04.748363 3285 run.go:200] Container created with id "597a428e3c194c4ae84b3947f7d409714a04bff94faacf0870892cea1debce7d" I0602 09:36:04.797182 3285 run.go:304] Starting container "597a428e3c194c4ae84b3947f7d409714a04bff94faacf0870892cea1debce7d" I0602 09:36:04.900355 3285 run.go:311] Waiting for container "597a428e3c194c4ae84b3947f7d409714a04bff94faacf0870892cea1debce7d" I0602 09:36:07.291344 3285 run.go:317] Done waiting for container "597a428e3c194c4ae84b3947f7d409714a04bff94faacf0870892cea1debce7d", rc=0 I0602 09:36:07.291382 3285 run.go:322] Reading logs from container "597a428e3c194c4ae84b3947f7d409714a04bff94faacf0870892cea1debce7d" I0602 09:36:07.293726 3285 run.go:330] Done reading logs from container "597a428e3c194c4ae84b3947f7d409714a04bff94faacf0870892cea1debce7d" I0602 09:36:07.294098 3285 run.go:337] Stdout: openshift-controller-manager total 48K drwxr-xr-x. 1 root root 186 Jun 2 09:36 . drwxr-xr-x. 1 root root 186 Jun 2 09:36 .. -rwxr-xr-x. 1 root root 0 Jun 2 09:36 .dockerenv -rw-r--r--. 1 root root 12K Apr 2 18:39 anaconda-post.log lrwxrwxrwx. 1 root root 7 Apr 2 18:38 bin -> usr/bin drwxr-xr-x. 14 root root 2.6K Jun 2 09:36 dev drwxr-xr-x. 1 root root 66 Jun 2 09:36 etc drwxr-xr-x. 2 root root 6 Nov 5 2016 home -rwxr-xr-x. 1 root root 857 Jan 1 1970 install.sh -rw-r--r--. 1 root root 2.3K Jan 1 1970 install.yaml -rw-r--r--. 1 root root 7.2K Jan 1 1970 kubeconfig.kubeconfig lrwxrwxrwx. 1 root root 7 Apr 2 18:38 lib -> usr/lib lrwxrwxrwx. 1 root root 9 Apr 2 18:38 lib64 -> usr/lib64 drwxr-xr-x. 2 root root 6 Nov 5 2016 media drwxr-xr-x. 2 root root 6 Nov 5 2016 mnt -rw-r--r--. 1 root root 28 Jan 1 1970 namespace-file -rw-r--r--. 1 root root 137 Jan 1 1970 namespace.yaml drwxr-xr-x. 2 root root 6 Nov 5 2016 opt -rw-r--r--. 1 root root 749 Jan 1 1970 param-file.txt -rw-r--r--. 1 root root 80 Jan 1 1970 privileged-sa-list.txt dr-xr-xr-x. 170 root root 0 Jun 2 09:26 proc -rw-r--r--. 1 root root 2.7K Jan 1 1970 rbac.yaml dr-xr-x---. 2 root root 114 Apr 2 18:39 root drwxr-xr-x. 1 root root 6 May 31 04:14 run lrwxrwxrwx. 1 root root 8 Apr 2 18:38 sbin -> usr/sbin drwxr-xr-x. 1 root root 34 May 31 04:13 srv dr-xr-xr-x. 13 root root 0 Jun 2 09:26 sys drwxrwxrwt. 1 root root 6 May 31 04:14 tmp drwxr-xr-x. 1 root root 30 Apr 2 18:38 usr drwxr-xr-x. 1 root root 41 Apr 2 18:38 var scc "privileged" added to: ["system:serviceaccount:openshift-controller-manager:openshift-controller-manager"] namespace "openshift-controller-manager" created clusterrole.rbac.authorization.k8s.io "system:openshift:openshift-controller-manager" reconciled clusterrolebinding.rbac.authorization.k8s.io "system:openshift:openshift-controller-manager" reconciled role.rbac.authorization.k8s.io "system:openshift:leader-locking-openshift-controller-manager" reconciled rolebinding.rbac.authorization.k8s.io "system:openshift:leader-locking-openshift-controller-manager" reconciled role.rbac.authorization.k8s.io "system:openshift:sa-creating-openshift-controller-manager" reconciled rolebinding.rbac.authorization.k8s.io "system:openshift:sa-creating-openshift-controller-manager" reconciled daemonset.apps "openshift-controller-manager" created serviceaccount "openshift-controller-manager" created I0602 09:36:07.294113 3285 run.go:338] Stderr: + ls -alh / + read p + oc adm policy add-scc-to-user --config=/kubeconfig.kubeconfig privileged system:serviceaccount:openshift-controller-manager:openshift-controller-manager + read p + ns= + '[' -s /namespace-file ']' ++ cat /namespace-file + ns='--namespace=openshift-controller-manager ' + '[' -s /namespace.yaml ']' + oc apply --config=/kubeconfig.kubeconfig -f /namespace.yaml + '[' -s /rbac.yaml ']' + oc process --local -o yaml --ignore-unknown-parameters --param-file=/param-file.txt -f /rbac.yaml + oc auth reconcile --config=/kubeconfig.kubeconfig -f - + oc process --local -o yaml --ignore-unknown-parameters --param-file=/param-file.txt -f /install.yaml + oc apply --namespace=openshift-controller-manager --config=/kubeconfig.kubeconfig -f - I0602 09:36:07.294129 3285 run.go:342] Container run successful I0602 09:36:07.294142 3285 run.go:293] Deleting container "597a428e3c194c4ae84b3947f7d409714a04bff94faacf0870892cea1debce7d" I0602 09:36:07.406143 3285 interface.go:41] Finished installing "openshift-controller-manager" Adding default OAuthClient redirect URIs ... I0602 09:36:07.420183 3285 up.go:507] oc cluster add [--image=openshift/origin-${component}:latest --base-dir=/data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup --tag=latest registry sample-templates persistent-volumes centos-imagestreams router web-console] I0602 09:36:07.420244 3285 client.go:309] No Docker environment variables found. Will attempt default socket. I0602 09:36:07.420276 3285 client.go:314] No Docker host (DOCKER_HOST) configured. Will attempt default socket. Adding registry ... Adding sample-templates ... Adding persistent-volumes ... Adding centos-imagestreams ... Adding router ... Adding web-console ... I0602 09:36:07.423955 3285 interface.go:26] Installing "openshift-image-registry" ... I0602 09:36:07.423968 3285 interface.go:26] Installing "sample-templates" ... I0602 09:36:07.423977 3285 interface.go:26] Installing "persistent-volumes" ... I0602 09:36:07.423986 3285 interface.go:26] Installing "centos-imagestreams" ... I0602 09:36:07.423995 3285 interface.go:26] Installing "openshift-router" ... I0602 09:36:07.424004 3285 interface.go:26] Installing "openshift-web-console-operator" ... I0602 09:36:07.424480 3285 web_console_operator.go:58] instantiating webconsole-operator template with parameters map[COMPONENT_IMAGE:openshift/origin-web-console:latest COMPONENT_LOGLEVEL:0 OPENSHIFT_PULL_POLICY:IfNotPresent NAMESPACE:openshift-core-operators IMAGE:openshift/origin-hypershift:latest LOGLEVEL:0] I0602 09:36:07.424514 3285 apply_template.go:83] Installing "openshift-web-console-operator" I0602 09:36:07.424576 3285 helper.go:46] Retrieving Docker daemon info I0602 09:36:07.425217 3285 interface.go:26] Installing "sample-templates/mariadb" ... I0602 09:36:07.425237 3285 interface.go:26] Installing "sample-templates/cakephp quickstart" ... I0602 09:36:07.425249 3285 interface.go:26] Installing "sample-templates/jenkins pipeline ephemeral" ... I0602 09:36:07.426222 3285 apply_list.go:68] Installing "centos-imagestreams" I0602 09:36:07.426294 3285 helper.go:46] Retrieving Docker daemon info I0602 09:36:07.426536 3285 apply_list.go:68] Installing "sample-templates/mariadb" I0602 09:36:07.426568 3285 helper.go:46] Retrieving Docker daemon info I0602 09:36:07.426773 3285 apply_list.go:68] Installing "sample-templates/cakephp quickstart" I0602 09:36:07.426802 3285 helper.go:46] Retrieving Docker daemon info I0602 09:36:07.427037 3285 interface.go:26] Installing "sample-templates/sample pipeline" ... I0602 09:36:07.427059 3285 interface.go:26] Installing "sample-templates/mongodb" ... I0602 09:36:07.427070 3285 interface.go:26] Installing "sample-templates/mysql" ... I0602 09:36:07.427079 3285 interface.go:26] Installing "sample-templates/postgresql" ... I0602 09:36:07.427088 3285 interface.go:26] Installing "sample-templates/dancer quickstart" ... I0602 09:36:07.427098 3285 interface.go:26] Installing "sample-templates/django quickstart" ... I0602 09:36:07.427109 3285 interface.go:26] Installing "sample-templates/nodejs quickstart" ... I0602 09:36:07.427119 3285 interface.go:26] Installing "sample-templates/rails quickstart" ... I0602 09:36:07.427187 3285 apply_list.go:68] Installing "sample-templates/rails quickstart" I0602 09:36:07.427216 3285 helper.go:46] Retrieving Docker daemon info I0602 09:36:07.427406 3285 apply_list.go:68] Installing "sample-templates/jenkins pipeline ephemeral" I0602 09:36:07.427450 3285 helper.go:46] Retrieving Docker daemon info I0602 09:36:07.427624 3285 apply_list.go:68] Installing "sample-templates/sample pipeline" I0602 09:36:07.427653 3285 helper.go:46] Retrieving Docker daemon info I0602 09:36:07.427834 3285 apply_list.go:68] Installing "sample-templates/mongodb" I0602 09:36:07.427862 3285 helper.go:46] Retrieving Docker daemon info I0602 09:36:07.428034 3285 apply_list.go:68] Installing "sample-templates/mysql" I0602 09:36:07.428062 3285 helper.go:46] Retrieving Docker daemon info I0602 09:36:07.428225 3285 apply_list.go:68] Installing "sample-templates/postgresql" I0602 09:36:07.428252 3285 helper.go:46] Retrieving Docker daemon info I0602 09:36:07.428444 3285 apply_list.go:68] Installing "sample-templates/dancer quickstart" I0602 09:36:07.428473 3285 helper.go:46] Retrieving Docker daemon info I0602 09:36:07.428641 3285 apply_list.go:68] Installing "sample-templates/django quickstart" I0602 09:36:07.428669 3285 helper.go:46] Retrieving Docker daemon info I0602 09:36:07.428836 3285 apply_list.go:68] Installing "sample-templates/nodejs quickstart" I0602 09:36:07.428866 3285 helper.go:46] Retrieving Docker daemon info I0602 09:36:07.498924 3285 helper.go:46] Retrieving Docker daemon info I0602 09:36:07.527245 3285 helper.go:52] Docker daemon info: &types.Info{ID:"6BJV:73LF:IA7K:ALFR:AZ7W:LB7R:QWV6:M3QJ:L6HJ:QBVN:R7RW:ZVMN", Containers:17, ContainersRunning:15, ContainersPaused:0, ContainersStopped:2, Images:38, Driver:"overlay2", DriverStatus:[][2]string{[2]string{"Backing Filesystem", "xfs"}, [2]string{"Supports d_type", "true"}, [2]string{"Native Overlay Diff", "true"}}, SystemStatus:[][2]string(nil), Plugins:types.PluginsInfo{Volume:[]string{"local"}, Network:[]string{"bridge", "host", "macvlan", "null", "overlay"}, Authorization:[]string{"rhel-push-plugin"}, Log:[]string(nil)}, MemoryLimit:true, SwapLimit:true, KernelMemory:true, CPUCfsPeriod:true, CPUCfsQuota:true, CPUShares:true, CPUSet:true, IPv4Forwarding:true, BridgeNfIptables:true, BridgeNfIP6tables:true, Debug:true, NFd:118, OomKillDisable:true, NGoroutines:120, SystemTime:"2018-06-02T09:36:07.511148406Z", LoggingDriver:"json-file", CgroupDriver:"systemd", NEventsListener:0, KernelVersion:"3.10.0-862.2.3.el7.x86_64", OperatingSystem:"Red Hat Enterprise Linux Server 7.5 (Maipo)", OSType:"linux", Architecture:"x86_64", IndexServerAddress:"https://registry.access.redhat.com/v1/", RegistryConfig:(*registry.ServiceConfig)(0xc4204729a0), NCPU:4, MemTotal:16656269312, GenericResources:[]swarm.GenericResource(nil), DockerRootDir:"/var/lib/docker", HTTPProxy:"", HTTPSProxy:"", NoProxy:"", Name:"ip-172-18-2-81.ec2.internal", Labels:[]string(nil), ExperimentalBuild:false, ServerVersion:"1.13.1", ClusterStore:"", ClusterAdvertise:"", Runtimes:map[string]types.Runtime{"docker-runc":types.Runtime{Path:"/usr/libexec/docker/docker-runc-current", Args:[]string(nil)}, "runc":types.Runtime{Path:"docker-runc", Args:[]string(nil)}}, DefaultRuntime:"docker-runc", Swarm:swarm.Info{NodeID:"", NodeAddr:"", LocalNodeState:"inactive", ControlAvailable:false, Error:"", RemoteManagers:[]swarm.Peer(nil), Nodes:0, Managers:0, Cluster:(*swarm.ClusterInfo)(0xc42049f040)}, LiveRestoreEnabled:false, Isolation:"", InitBinary:"/usr/libexec/docker/docker-init-current", ContainerdCommit:types.Commit{ID:"", Expected:"aa8187dbd3b7ad67d8e5e3a15115d3eef43a7ed1"}, RuncCommit:types.Commit{ID:"e9c345b3f906d5dc5e8100b05ce37073a811c74a", Expected:"9df8b306d01f59d3a8029be411de015b7304dd8f"}, InitCommit:types.Commit{ID:"5b117de7f824f3d3825737cf09581645abbe35d4", Expected:"949e6facb77383876aeff8a6944dde66b3089574"}, SecurityOptions:[]string{"name=seccomp,profile=/etc/docker/seccomp.json", "name=selinux"}} I0602 09:36:07.527405 3285 run.go:195] Creating container named "" config: image: openshift/origin-cli:latest entry point: sh command: -c chmod 755 /apply.sh && /apply.sh host config: pid mode: host user mode: network mode: host I0602 09:36:07.540063 3285 create_servercert.go:119] Creating a server cert with: admin.CreateServerCertOptions{SignerCertOptions:(*admin.SignerCertOptions)(0xc421412ec0), CertFile:"/data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/router.crt", KeyFile:"/data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/router.key", ExpireDays:0, Hostnames:[]string{"router.default.svc.cluster.local", "*.router.default.svc.cluster.local"}, Overwrite:true, Output:(*bytes.Buffer)(0xc42056fab0)} I0602 09:36:07.541206 3285 crypto.go:491] Generating server certificate in /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/router.crt, key in /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/router.key I0602 09:36:07.583603 3285 helper.go:52] Docker daemon info: &types.Info{ID:"6BJV:73LF:IA7K:ALFR:AZ7W:LB7R:QWV6:M3QJ:L6HJ:QBVN:R7RW:ZVMN", Containers:17, ContainersRunning:15, ContainersPaused:0, ContainersStopped:2, Images:38, Driver:"overlay2", DriverStatus:[][2]string{[2]string{"Backing Filesystem", "xfs"}, [2]string{"Supports d_type", "true"}, [2]string{"Native Overlay Diff", "true"}}, SystemStatus:[][2]string(nil), Plugins:types.PluginsInfo{Volume:[]string{"local"}, Network:[]string{"bridge", "host", "macvlan", "null", "overlay"}, Authorization:[]string{"rhel-push-plugin"}, Log:[]string(nil)}, MemoryLimit:true, SwapLimit:true, KernelMemory:true, CPUCfsPeriod:true, CPUCfsQuota:true, CPUShares:true, CPUSet:true, IPv4Forwarding:true, BridgeNfIptables:true, BridgeNfIP6tables:true, Debug:true, NFd:115, OomKillDisable:true, NGoroutines:117, SystemTime:"2018-06-02T09:36:07.496802839Z", LoggingDriver:"json-file", CgroupDriver:"systemd", NEventsListener:0, KernelVersion:"3.10.0-862.2.3.el7.x86_64", OperatingSystem:"Red Hat Enterprise Linux Server 7.5 (Maipo)", OSType:"linux", Architecture:"x86_64", IndexServerAddress:"https://registry.access.redhat.com/v1/", RegistryConfig:(*registry.ServiceConfig)(0xc420472fc0), NCPU:4, MemTotal:16656269312, GenericResources:[]swarm.GenericResource(nil), DockerRootDir:"/var/lib/docker", HTTPProxy:"", HTTPSProxy:"", NoProxy:"", Name:"ip-172-18-2-81.ec2.internal", Labels:[]string(nil), ExperimentalBuild:false, ServerVersion:"1.13.1", ClusterStore:"", ClusterAdvertise:"", Runtimes:map[string]types.Runtime{"docker-runc":types.Runtime{Path:"/usr/libexec/docker/docker-runc-current", Args:[]string(nil)}, "runc":types.Runtime{Path:"docker-runc", Args:[]string(nil)}}, DefaultRuntime:"docker-runc", Swarm:swarm.Info{NodeID:"", NodeAddr:"", LocalNodeState:"inactive", ControlAvailable:false, Error:"", RemoteManagers:[]swarm.Peer(nil), Nodes:0, Managers:0, Cluster:(*swarm.ClusterInfo)(0xc42049f180)}, LiveRestoreEnabled:false, Isolation:"", InitBinary:"/usr/libexec/docker/docker-init-current", ContainerdCommit:types.Commit{ID:"", Expected:"aa8187dbd3b7ad67d8e5e3a15115d3eef43a7ed1"}, RuncCommit:types.Commit{ID:"e9c345b3f906d5dc5e8100b05ce37073a811c74a", Expected:"9df8b306d01f59d3a8029be411de015b7304dd8f"}, InitCommit:types.Commit{ID:"5b117de7f824f3d3825737cf09581645abbe35d4", Expected:"949e6facb77383876aeff8a6944dde66b3089574"}, SecurityOptions:[]string{"name=seccomp,profile=/etc/docker/seccomp.json", "name=selinux"}} I0602 09:36:07.583745 3285 run.go:195] Creating container named "" config: image: openshift/origin-cli:latest entry point: sh command: -c chmod 755 /apply.sh && /apply.sh host config: pid mode: host user mode: network mode: host I0602 09:36:07.586036 3285 helper.go:52] Docker daemon info: &types.Info{ID:"6BJV:73LF:IA7K:ALFR:AZ7W:LB7R:QWV6:M3QJ:L6HJ:QBVN:R7RW:ZVMN", Containers:17, ContainersRunning:15, ContainersPaused:0, ContainersStopped:2, Images:38, Driver:"overlay2", DriverStatus:[][2]string{[2]string{"Backing Filesystem", "xfs"}, [2]string{"Supports d_type", "true"}, [2]string{"Native Overlay Diff", "true"}}, SystemStatus:[][2]string(nil), Plugins:types.PluginsInfo{Volume:[]string{"local"}, Network:[]string{"bridge", "host", "macvlan", "null", "overlay"}, Authorization:[]string{"rhel-push-plugin"}, Log:[]string(nil)}, MemoryLimit:true, SwapLimit:true, KernelMemory:true, CPUCfsPeriod:true, CPUCfsQuota:true, CPUShares:true, CPUSet:true, IPv4Forwarding:true, BridgeNfIptables:true, BridgeNfIP6tables:true, Debug:true, NFd:118, OomKillDisable:true, NGoroutines:120, SystemTime:"2018-06-02T09:36:07.528369534Z", LoggingDriver:"json-file", CgroupDriver:"systemd", NEventsListener:0, KernelVersion:"3.10.0-862.2.3.el7.x86_64", OperatingSystem:"Red Hat Enterprise Linux Server 7.5 (Maipo)", OSType:"linux", Architecture:"x86_64", IndexServerAddress:"https://registry.access.redhat.com/v1/", RegistryConfig:(*registry.ServiceConfig)(0xc420440930), NCPU:4, MemTotal:16656269312, GenericResources:[]swarm.GenericResource(nil), DockerRootDir:"/var/lib/docker", HTTPProxy:"", HTTPSProxy:"", NoProxy:"", Name:"ip-172-18-2-81.ec2.internal", Labels:[]string(nil), ExperimentalBuild:false, ServerVersion:"1.13.1", ClusterStore:"", ClusterAdvertise:"", Runtimes:map[string]types.Runtime{"docker-runc":types.Runtime{Path:"/usr/libexec/docker/docker-runc-current", Args:[]string(nil)}, "runc":types.Runtime{Path:"docker-runc", Args:[]string(nil)}}, DefaultRuntime:"docker-runc", Swarm:swarm.Info{NodeID:"", NodeAddr:"", LocalNodeState:"inactive", ControlAvailable:false, Error:"", RemoteManagers:[]swarm.Peer(nil), Nodes:0, Managers:0, Cluster:(*swarm.ClusterInfo)(0xc420530640)}, LiveRestoreEnabled:false, Isolation:"", InitBinary:"/usr/libexec/docker/docker-init-current", ContainerdCommit:types.Commit{ID:"", Expected:"aa8187dbd3b7ad67d8e5e3a15115d3eef43a7ed1"}, RuncCommit:types.Commit{ID:"e9c345b3f906d5dc5e8100b05ce37073a811c74a", Expected:"9df8b306d01f59d3a8029be411de015b7304dd8f"}, InitCommit:types.Commit{ID:"5b117de7f824f3d3825737cf09581645abbe35d4", Expected:"949e6facb77383876aeff8a6944dde66b3089574"}, SecurityOptions:[]string{"name=seccomp,profile=/etc/docker/seccomp.json", "name=selinux"}} I0602 09:36:07.586173 3285 run.go:195] Creating container named "" config: image: openshift/origin-cli:latest entry point: sh command: -c chmod 755 /apply.sh && /apply.sh host config: pid mode: host user mode: network mode: host I0602 09:36:07.632595 3285 helper.go:52] Docker daemon info: &types.Info{ID:"6BJV:73LF:IA7K:ALFR:AZ7W:LB7R:QWV6:M3QJ:L6HJ:QBVN:R7RW:ZVMN", Containers:17, ContainersRunning:15, ContainersPaused:0, ContainersStopped:2, Images:38, Driver:"overlay2", DriverStatus:[][2]string{[2]string{"Backing Filesystem", "xfs"}, [2]string{"Supports d_type", "true"}, [2]string{"Native Overlay Diff", "true"}}, SystemStatus:[][2]string(nil), Plugins:types.PluginsInfo{Volume:[]string{"local"}, Network:[]string{"bridge", "host", "macvlan", "null", "overlay"}, Authorization:[]string{"rhel-push-plugin"}, Log:[]string(nil)}, MemoryLimit:true, SwapLimit:true, KernelMemory:true, CPUCfsPeriod:true, CPUCfsQuota:true, CPUShares:true, CPUSet:true, IPv4Forwarding:true, BridgeNfIptables:true, BridgeNfIP6tables:true, Debug:true, NFd:128, OomKillDisable:true, NGoroutines:118, SystemTime:"2018-06-02T09:36:07.567540271Z", LoggingDriver:"json-file", CgroupDriver:"systemd", NEventsListener:0, KernelVersion:"3.10.0-862.2.3.el7.x86_64", OperatingSystem:"Red Hat Enterprise Linux Server 7.5 (Maipo)", OSType:"linux", Architecture:"x86_64", IndexServerAddress:"https://registry.access.redhat.com/v1/", RegistryConfig:(*registry.ServiceConfig)(0xc420473650), NCPU:4, MemTotal:16656269312, GenericResources:[]swarm.GenericResource(nil), DockerRootDir:"/var/lib/docker", HTTPProxy:"", HTTPSProxy:"", NoProxy:"", Name:"ip-172-18-2-81.ec2.internal", Labels:[]string(nil), ExperimentalBuild:false, ServerVersion:"1.13.1", ClusterStore:"", ClusterAdvertise:"", Runtimes:map[string]types.Runtime{"docker-runc":types.Runtime{Path:"/usr/libexec/docker/docker-runc-current", Args:[]string(nil)}, "runc":types.Runtime{Path:"docker-runc", Args:[]string(nil)}}, DefaultRuntime:"docker-runc", Swarm:swarm.Info{NodeID:"", NodeAddr:"", LocalNodeState:"inactive", ControlAvailable:false, Error:"", RemoteManagers:[]swarm.Peer(nil), Nodes:0, Managers:0, Cluster:(*swarm.ClusterInfo)(0xc42049f2c0)}, LiveRestoreEnabled:false, Isolation:"", InitBinary:"/usr/libexec/docker/docker-init-current", ContainerdCommit:types.Commit{ID:"", Expected:"aa8187dbd3b7ad67d8e5e3a15115d3eef43a7ed1"}, RuncCommit:types.Commit{ID:"e9c345b3f906d5dc5e8100b05ce37073a811c74a", Expected:"9df8b306d01f59d3a8029be411de015b7304dd8f"}, InitCommit:types.Commit{ID:"5b117de7f824f3d3825737cf09581645abbe35d4", Expected:"949e6facb77383876aeff8a6944dde66b3089574"}, SecurityOptions:[]string{"name=seccomp,profile=/etc/docker/seccomp.json", "name=selinux"}} I0602 09:36:07.632731 3285 run.go:195] Creating container named "" config: image: openshift/origin-cli:latest entry point: sh command: -c chmod 755 /apply.sh && /apply.sh host config: pid mode: host user mode: network mode: host I0602 09:36:07.640567 3285 run.go:200] Container created with id "03c99a8567cf91381d60b11e0d047b3971d2d7ab5d82a81df0f4a6a1765ebc5a" I0602 09:36:07.702285 3285 run.go:200] Container created with id "a6ac2905aec75b17796674bb00c1d367947d7dee8118101c4f5eebd1042f757a" I0602 09:36:07.702311 3285 run.go:200] Container created with id "6692d757e2da5644786bb7cfcda4e551cbd0a7d2319ddc37e9074422d85f0a6e" I0602 09:36:07.726689 3285 helper.go:52] Docker daemon info: &types.Info{ID:"6BJV:73LF:IA7K:ALFR:AZ7W:LB7R:QWV6:M3QJ:L6HJ:QBVN:R7RW:ZVMN", Containers:17, ContainersRunning:15, ContainersPaused:0, ContainersStopped:2, Images:38, Driver:"overlay2", DriverStatus:[][2]string{[2]string{"Backing Filesystem", "xfs"}, [2]string{"Supports d_type", "true"}, [2]string{"Native Overlay Diff", "true"}}, SystemStatus:[][2]string(nil), Plugins:types.PluginsInfo{Volume:[]string{"local"}, Network:[]string{"bridge", "host", "macvlan", "null", "overlay"}, Authorization:[]string{"rhel-push-plugin"}, Log:[]string(nil)}, MemoryLimit:true, SwapLimit:true, KernelMemory:true, CPUCfsPeriod:true, CPUCfsQuota:true, CPUShares:true, CPUSet:true, IPv4Forwarding:true, BridgeNfIptables:true, BridgeNfIP6tables:true, Debug:true, NFd:113, OomKillDisable:true, NGoroutines:118, SystemTime:"2018-06-02T09:36:07.63413602Z", LoggingDriver:"json-file", CgroupDriver:"systemd", NEventsListener:0, KernelVersion:"3.10.0-862.2.3.el7.x86_64", OperatingSystem:"Red Hat Enterprise Linux Server 7.5 (Maipo)", OSType:"linux", Architecture:"x86_64", IndexServerAddress:"https://registry.access.redhat.com/v1/", RegistryConfig:(*registry.ServiceConfig)(0xc4205828c0), NCPU:4, MemTotal:16656269312, GenericResources:[]swarm.GenericResource(nil), DockerRootDir:"/var/lib/docker", HTTPProxy:"", HTTPSProxy:"", NoProxy:"", Name:"ip-172-18-2-81.ec2.internal", Labels:[]string(nil), ExperimentalBuild:false, ServerVersion:"1.13.1", ClusterStore:"", ClusterAdvertise:"", Runtimes:map[string]types.Runtime{"docker-runc":types.Runtime{Path:"/usr/libexec/docker/docker-runc-current", Args:[]string(nil)}, "runc":types.Runtime{Path:"docker-runc", Args:[]string(nil)}}, DefaultRuntime:"docker-runc", Swarm:swarm.Info{NodeID:"", NodeAddr:"", LocalNodeState:"inactive", ControlAvailable:false, Error:"", RemoteManagers:[]swarm.Peer(nil), Nodes:0, Managers:0, Cluster:(*swarm.ClusterInfo)(0xc42093edc0)}, LiveRestoreEnabled:false, Isolation:"", InitBinary:"/usr/libexec/docker/docker-init-current", ContainerdCommit:types.Commit{ID:"", Expected:"aa8187dbd3b7ad67d8e5e3a15115d3eef43a7ed1"}, RuncCommit:types.Commit{ID:"e9c345b3f906d5dc5e8100b05ce37073a811c74a", Expected:"9df8b306d01f59d3a8029be411de015b7304dd8f"}, InitCommit:types.Commit{ID:"5b117de7f824f3d3825737cf09581645abbe35d4", Expected:"949e6facb77383876aeff8a6944dde66b3089574"}, SecurityOptions:[]string{"name=seccomp,profile=/etc/docker/seccomp.json", "name=selinux"}} I0602 09:36:07.726846 3285 run.go:195] Creating container named "" config: image: openshift/origin-cli:latest entry point: sh command: -c chmod 755 /apply.sh && /apply.sh host config: pid mode: host user mode: network mode: host I0602 09:36:07.777425 3285 run.go:200] Container created with id "17760450f60c871fe70b6e953d38f0353293a19b5bd993492646b1f4d5596d23" I0602 09:36:07.806423 3285 run.go:200] Container created with id "9df29a90da910b5a8e0f6b27aa66c52530b4bc67c34d7e430342bbb72ff00516" I0602 09:36:07.827379 3285 run.go:304] Starting container "03c99a8567cf91381d60b11e0d047b3971d2d7ab5d82a81df0f4a6a1765ebc5a" I0602 09:36:07.913074 3285 helper.go:52] Docker daemon info: &types.Info{ID:"6BJV:73LF:IA7K:ALFR:AZ7W:LB7R:QWV6:M3QJ:L6HJ:QBVN:R7RW:ZVMN", Containers:17, ContainersRunning:15, ContainersPaused:0, ContainersStopped:2, Images:38, Driver:"overlay2", DriverStatus:[][2]string{[2]string{"Backing Filesystem", "xfs"}, [2]string{"Supports d_type", "true"}, [2]string{"Native Overlay Diff", "true"}}, SystemStatus:[][2]string(nil), Plugins:types.PluginsInfo{Volume:[]string{"local"}, Network:[]string{"bridge", "host", "macvlan", "null", "overlay"}, Authorization:[]string{"rhel-push-plugin"}, Log:[]string(nil)}, MemoryLimit:true, SwapLimit:true, KernelMemory:true, CPUCfsPeriod:true, CPUCfsQuota:true, CPUShares:true, CPUSet:true, IPv4Forwarding:true, BridgeNfIptables:true, BridgeNfIP6tables:true, Debug:true, NFd:123, OomKillDisable:true, NGoroutines:119, SystemTime:"2018-06-02T09:36:07.655344071Z", LoggingDriver:"json-file", CgroupDriver:"systemd", NEventsListener:0, KernelVersion:"3.10.0-862.2.3.el7.x86_64", OperatingSystem:"Red Hat Enterprise Linux Server 7.5 (Maipo)", OSType:"linux", Architecture:"x86_64", IndexServerAddress:"https://registry.access.redhat.com/v1/", RegistryConfig:(*registry.ServiceConfig)(0xc420582e00), NCPU:4, MemTotal:16656269312, GenericResources:[]swarm.GenericResource(nil), DockerRootDir:"/var/lib/docker", HTTPProxy:"", HTTPSProxy:"", NoProxy:"", Name:"ip-172-18-2-81.ec2.internal", Labels:[]string(nil), ExperimentalBuild:false, ServerVersion:"1.13.1", ClusterStore:"", ClusterAdvertise:"", Runtimes:map[string]types.Runtime{"runc":types.Runtime{Path:"docker-runc", Args:[]string(nil)}, "docker-runc":types.Runtime{Path:"/usr/libexec/docker/docker-runc-current", Args:[]string(nil)}}, DefaultRuntime:"docker-runc", Swarm:swarm.Info{NodeID:"", NodeAddr:"", LocalNodeState:"inactive", ControlAvailable:false, Error:"", RemoteManagers:[]swarm.Peer(nil), Nodes:0, Managers:0, Cluster:(*swarm.ClusterInfo)(0xc42093ef00)}, LiveRestoreEnabled:false, Isolation:"", InitBinary:"/usr/libexec/docker/docker-init-current", ContainerdCommit:types.Commit{ID:"", Expected:"aa8187dbd3b7ad67d8e5e3a15115d3eef43a7ed1"}, RuncCommit:types.Commit{ID:"e9c345b3f906d5dc5e8100b05ce37073a811c74a", Expected:"9df8b306d01f59d3a8029be411de015b7304dd8f"}, InitCommit:types.Commit{ID:"5b117de7f824f3d3825737cf09581645abbe35d4", Expected:"949e6facb77383876aeff8a6944dde66b3089574"}, SecurityOptions:[]string{"name=seccomp,profile=/etc/docker/seccomp.json", "name=selinux"}} I0602 09:36:07.913217 3285 run.go:195] Creating container named "" config: image: openshift/origin-cli:latest entry point: sh command: -c chmod 755 /apply.sh && /apply.sh host config: pid mode: host user mode: network mode: host I0602 09:36:07.964743 3285 helper.go:52] Docker daemon info: &types.Info{ID:"6BJV:73LF:IA7K:ALFR:AZ7W:LB7R:QWV6:M3QJ:L6HJ:QBVN:R7RW:ZVMN", Containers:17, ContainersRunning:15, ContainersPaused:0, ContainersStopped:2, Images:38, Driver:"overlay2", DriverStatus:[][2]string{[2]string{"Backing Filesystem", "xfs"}, [2]string{"Supports d_type", "true"}, [2]string{"Native Overlay Diff", "true"}}, SystemStatus:[][2]string(nil), Plugins:types.PluginsInfo{Volume:[]string{"local"}, Network:[]string{"bridge", "host", "macvlan", "null", "overlay"}, Authorization:[]string{"rhel-push-plugin"}, Log:[]string(nil)}, MemoryLimit:true, SwapLimit:true, KernelMemory:true, CPUCfsPeriod:true, CPUCfsQuota:true, CPUShares:true, CPUSet:true, IPv4Forwarding:true, BridgeNfIptables:true, BridgeNfIP6tables:true, Debug:true, NFd:127, OomKillDisable:true, NGoroutines:119, SystemTime:"2018-06-02T09:36:07.66882681Z", LoggingDriver:"json-file", CgroupDriver:"systemd", NEventsListener:0, KernelVersion:"3.10.0-862.2.3.el7.x86_64", OperatingSystem:"Red Hat Enterprise Linux Server 7.5 (Maipo)", OSType:"linux", Architecture:"x86_64", IndexServerAddress:"https://registry.access.redhat.com/v1/", RegistryConfig:(*registry.ServiceConfig)(0xc420440d90), NCPU:4, MemTotal:16656269312, GenericResources:[]swarm.GenericResource(nil), DockerRootDir:"/var/lib/docker", HTTPProxy:"", HTTPSProxy:"", NoProxy:"", Name:"ip-172-18-2-81.ec2.internal", Labels:[]string(nil), ExperimentalBuild:false, ServerVersion:"1.13.1", ClusterStore:"", ClusterAdvertise:"", Runtimes:map[string]types.Runtime{"docker-runc":types.Runtime{Path:"/usr/libexec/docker/docker-runc-current", Args:[]string(nil)}, "runc":types.Runtime{Path:"docker-runc", Args:[]string(nil)}}, DefaultRuntime:"docker-runc", Swarm:swarm.Info{NodeID:"", NodeAddr:"", LocalNodeState:"inactive", ControlAvailable:false, Error:"", RemoteManagers:[]swarm.Peer(nil), Nodes:0, Managers:0, Cluster:(*swarm.ClusterInfo)(0xc4205308c0)}, LiveRestoreEnabled:false, Isolation:"", InitBinary:"/usr/libexec/docker/docker-init-current", ContainerdCommit:types.Commit{ID:"", Expected:"aa8187dbd3b7ad67d8e5e3a15115d3eef43a7ed1"}, RuncCommit:types.Commit{ID:"e9c345b3f906d5dc5e8100b05ce37073a811c74a", Expected:"9df8b306d01f59d3a8029be411de015b7304dd8f"}, InitCommit:types.Commit{ID:"5b117de7f824f3d3825737cf09581645abbe35d4", Expected:"949e6facb77383876aeff8a6944dde66b3089574"}, SecurityOptions:[]string{"name=seccomp,profile=/etc/docker/seccomp.json", "name=selinux"}} I0602 09:36:07.964891 3285 run.go:195] Creating container named "" config: image: openshift/origin-cli:latest entry point: sh command: -c chmod 755 /apply.sh && /apply.sh host config: pid mode: host user mode: network mode: host I0602 09:36:07.972356 3285 run.go:304] Starting container "a6ac2905aec75b17796674bb00c1d367947d7dee8118101c4f5eebd1042f757a" I0602 09:36:08.002988 3285 run.go:304] Starting container "6692d757e2da5644786bb7cfcda4e551cbd0a7d2319ddc37e9074422d85f0a6e" I0602 09:36:08.017852 3285 helper.go:52] Docker daemon info: &types.Info{ID:"6BJV:73LF:IA7K:ALFR:AZ7W:LB7R:QWV6:M3QJ:L6HJ:QBVN:R7RW:ZVMN", Containers:17, ContainersRunning:15, ContainersPaused:0, ContainersStopped:2, Images:38, Driver:"overlay2", DriverStatus:[][2]string{[2]string{"Backing Filesystem", "xfs"}, [2]string{"Supports d_type", "true"}, [2]string{"Native Overlay Diff", "true"}}, SystemStatus:[][2]string(nil), Plugins:types.PluginsInfo{Volume:[]string{"local"}, Network:[]string{"bridge", "host", "macvlan", "null", "overlay"}, Authorization:[]string{"rhel-push-plugin"}, Log:[]string(nil)}, MemoryLimit:true, SwapLimit:true, KernelMemory:true, CPUCfsPeriod:true, CPUCfsQuota:true, CPUShares:true, CPUSet:true, IPv4Forwarding:true, BridgeNfIptables:true, BridgeNfIP6tables:true, Debug:true, NFd:149, OomKillDisable:true, NGoroutines:119, SystemTime:"2018-06-02T09:36:07.758713286Z", LoggingDriver:"json-file", CgroupDriver:"systemd", NEventsListener:0, KernelVersion:"3.10.0-862.2.3.el7.x86_64", OperatingSystem:"Red Hat Enterprise Linux Server 7.5 (Maipo)", OSType:"linux", Architecture:"x86_64", IndexServerAddress:"https://registry.access.redhat.com/v1/", RegistryConfig:(*registry.ServiceConfig)(0xc420440230), NCPU:4, MemTotal:16656269312, GenericResources:[]swarm.GenericResource(nil), DockerRootDir:"/var/lib/docker", HTTPProxy:"", HTTPSProxy:"", NoProxy:"", Name:"ip-172-18-2-81.ec2.internal", Labels:[]string(nil), ExperimentalBuild:false, ServerVersion:"1.13.1", ClusterStore:"", ClusterAdvertise:"", Runtimes:map[string]types.Runtime{"runc":types.Runtime{Path:"docker-runc", Args:[]string(nil)}, "docker-runc":types.Runtime{Path:"/usr/libexec/docker/docker-runc-current", Args:[]string(nil)}}, DefaultRuntime:"docker-runc", Swarm:swarm.Info{NodeID:"", NodeAddr:"", LocalNodeState:"inactive", ControlAvailable:false, Error:"", RemoteManagers:[]swarm.Peer(nil), Nodes:0, Managers:0, Cluster:(*swarm.ClusterInfo)(0xc4205303c0)}, LiveRestoreEnabled:false, Isolation:"", InitBinary:"/usr/libexec/docker/docker-init-current", ContainerdCommit:types.Commit{ID:"", Expected:"aa8187dbd3b7ad67d8e5e3a15115d3eef43a7ed1"}, RuncCommit:types.Commit{ID:"e9c345b3f906d5dc5e8100b05ce37073a811c74a", Expected:"9df8b306d01f59d3a8029be411de015b7304dd8f"}, InitCommit:types.Commit{ID:"5b117de7f824f3d3825737cf09581645abbe35d4", Expected:"949e6facb77383876aeff8a6944dde66b3089574"}, SecurityOptions:[]string{"name=seccomp,profile=/etc/docker/seccomp.json", "name=selinux"}} I0602 09:36:08.018003 3285 run.go:195] Creating container named "" config: image: openshift/origin-cli:latest entry point: sh command: -c chmod 755 /apply.sh && /apply.sh host config: pid mode: host user mode: network mode: host I0602 09:36:08.019933 3285 helper.go:52] Docker daemon info: &types.Info{ID:"6BJV:73LF:IA7K:ALFR:AZ7W:LB7R:QWV6:M3QJ:L6HJ:QBVN:R7RW:ZVMN", Containers:17, ContainersRunning:15, ContainersPaused:0, ContainersStopped:2, Images:38, Driver:"overlay2", DriverStatus:[][2]string{[2]string{"Backing Filesystem", "xfs"}, [2]string{"Supports d_type", "true"}, [2]string{"Native Overlay Diff", "true"}}, SystemStatus:[][2]string(nil), Plugins:types.PluginsInfo{Volume:[]string{"local"}, Network:[]string{"bridge", "host", "macvlan", "null", "overlay"}, Authorization:[]string{"rhel-push-plugin"}, Log:[]string(nil)}, MemoryLimit:true, SwapLimit:true, KernelMemory:true, CPUCfsPeriod:true, CPUCfsQuota:true, CPUShares:true, CPUSet:true, IPv4Forwarding:true, BridgeNfIptables:true, BridgeNfIP6tables:true, Debug:true, NFd:129, OomKillDisable:true, NGoroutines:121, SystemTime:"2018-06-02T09:36:07.693537934Z", LoggingDriver:"json-file", CgroupDriver:"systemd", NEventsListener:0, KernelVersion:"3.10.0-862.2.3.el7.x86_64", OperatingSystem:"Red Hat Enterprise Linux Server 7.5 (Maipo)", OSType:"linux", Architecture:"x86_64", IndexServerAddress:"https://registry.access.redhat.com/v1/", RegistryConfig:(*registry.ServiceConfig)(0xc420440850), NCPU:4, MemTotal:16656269312, GenericResources:[]swarm.GenericResource(nil), DockerRootDir:"/var/lib/docker", HTTPProxy:"", HTTPSProxy:"", NoProxy:"", Name:"ip-172-18-2-81.ec2.internal", Labels:[]string(nil), ExperimentalBuild:false, ServerVersion:"1.13.1", ClusterStore:"", ClusterAdvertise:"", Runtimes:map[string]types.Runtime{"docker-runc":types.Runtime{Path:"/usr/libexec/docker/docker-runc-current", Args:[]string(nil)}, "runc":types.Runtime{Path:"docker-runc", Args:[]string(nil)}}, DefaultRuntime:"docker-runc", Swarm:swarm.Info{NodeID:"", NodeAddr:"", LocalNodeState:"inactive", ControlAvailable:false, Error:"", RemoteManagers:[]swarm.Peer(nil), Nodes:0, Managers:0, Cluster:(*swarm.ClusterInfo)(0xc420530a00)}, LiveRestoreEnabled:false, Isolation:"", InitBinary:"/usr/libexec/docker/docker-init-current", ContainerdCommit:types.Commit{ID:"", Expected:"aa8187dbd3b7ad67d8e5e3a15115d3eef43a7ed1"}, RuncCommit:types.Commit{ID:"e9c345b3f906d5dc5e8100b05ce37073a811c74a", Expected:"9df8b306d01f59d3a8029be411de015b7304dd8f"}, InitCommit:types.Commit{ID:"5b117de7f824f3d3825737cf09581645abbe35d4", Expected:"949e6facb77383876aeff8a6944dde66b3089574"}, SecurityOptions:[]string{"name=seccomp,profile=/etc/docker/seccomp.json", "name=selinux"}} I0602 09:36:08.020049 3285 run.go:195] Creating container named "" config: image: openshift/origin-cli:latest entry point: sh command: -c echo 'openshift-web-console-operator' && chmod 755 /install.sh && /install.sh host config: pid mode: host user mode: network mode: host I0602 09:36:08.025687 3285 create_servercert.go:134] Generated new server certificate as /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/router.crt, key as /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/router.key I0602 09:36:08.025867 3285 helper.go:46] Retrieving Docker daemon info I0602 09:36:08.077670 3285 run.go:200] Container created with id "7ef7a0448894a32d9eae9dea89ffcc69d6e9d335b2041208c1262f8b562ed833" I0602 09:36:08.107414 3285 run.go:304] Starting container "17760450f60c871fe70b6e953d38f0353293a19b5bd993492646b1f4d5596d23" I0602 09:36:08.123845 3285 run.go:200] Container created with id "36769d03d8b546254d262deed279330664046743a810177a6b5f74143409af53" I0602 09:36:08.124219 3285 run.go:200] Container created with id "06ed71ef7e3fd114180690121ddb7958d8a08d380a43fb797dcb18cd2f8c2f2a" I0602 09:36:08.144978 3285 helper.go:52] Docker daemon info: &types.Info{ID:"6BJV:73LF:IA7K:ALFR:AZ7W:LB7R:QWV6:M3QJ:L6HJ:QBVN:R7RW:ZVMN", Containers:17, ContainersRunning:15, ContainersPaused:0, ContainersStopped:2, Images:38, Driver:"overlay2", DriverStatus:[][2]string{[2]string{"Backing Filesystem", "xfs"}, [2]string{"Supports d_type", "true"}, [2]string{"Native Overlay Diff", "true"}}, SystemStatus:[][2]string(nil), Plugins:types.PluginsInfo{Volume:[]string{"local"}, Network:[]string{"bridge", "host", "macvlan", "null", "overlay"}, Authorization:[]string{"rhel-push-plugin"}, Log:[]string(nil)}, MemoryLimit:true, SwapLimit:true, KernelMemory:true, CPUCfsPeriod:true, CPUCfsQuota:true, CPUShares:true, CPUSet:true, IPv4Forwarding:true, BridgeNfIptables:true, BridgeNfIP6tables:true, Debug:true, NFd:146, OomKillDisable:true, NGoroutines:119, SystemTime:"2018-06-02T09:36:07.721210567Z", LoggingDriver:"json-file", CgroupDriver:"systemd", NEventsListener:0, KernelVersion:"3.10.0-862.2.3.el7.x86_64", OperatingSystem:"Red Hat Enterprise Linux Server 7.5 (Maipo)", OSType:"linux", Architecture:"x86_64", IndexServerAddress:"https://registry.access.redhat.com/v1/", RegistryConfig:(*registry.ServiceConfig)(0xc420440b60), NCPU:4, MemTotal:16656269312, GenericResources:[]swarm.GenericResource(nil), DockerRootDir:"/var/lib/docker", HTTPProxy:"", HTTPSProxy:"", NoProxy:"", Name:"ip-172-18-2-81.ec2.internal", Labels:[]string(nil), ExperimentalBuild:false, ServerVersion:"1.13.1", ClusterStore:"", ClusterAdvertise:"", Runtimes:map[string]types.Runtime{"docker-runc":types.Runtime{Path:"/usr/libexec/docker/docker-runc-current", Args:[]string(nil)}, "runc":types.Runtime{Path:"docker-runc", Args:[]string(nil)}}, DefaultRuntime:"docker-runc", Swarm:swarm.Info{NodeID:"", NodeAddr:"", LocalNodeState:"inactive", ControlAvailable:false, Error:"", RemoteManagers:[]swarm.Peer(nil), Nodes:0, Managers:0, Cluster:(*swarm.ClusterInfo)(0xc420530b40)}, LiveRestoreEnabled:false, Isolation:"", InitBinary:"/usr/libexec/docker/docker-init-current", ContainerdCommit:types.Commit{ID:"", Expected:"aa8187dbd3b7ad67d8e5e3a15115d3eef43a7ed1"}, RuncCommit:types.Commit{ID:"e9c345b3f906d5dc5e8100b05ce37073a811c74a", Expected:"9df8b306d01f59d3a8029be411de015b7304dd8f"}, InitCommit:types.Commit{ID:"5b117de7f824f3d3825737cf09581645abbe35d4", Expected:"949e6facb77383876aeff8a6944dde66b3089574"}, SecurityOptions:[]string{"name=seccomp,profile=/etc/docker/seccomp.json", "name=selinux"}} I0602 09:36:08.145115 3285 run.go:195] Creating container named "" config: image: openshift/origin-cli:latest entry point: sh command: -c chmod 755 /apply.sh && /apply.sh host config: pid mode: host user mode: network mode: host I0602 09:36:08.145302 3285 helper.go:52] Docker daemon info: &types.Info{ID:"6BJV:73LF:IA7K:ALFR:AZ7W:LB7R:QWV6:M3QJ:L6HJ:QBVN:R7RW:ZVMN", Containers:17, ContainersRunning:15, ContainersPaused:0, ContainersStopped:2, Images:38, Driver:"overlay2", DriverStatus:[][2]string{[2]string{"Backing Filesystem", "xfs"}, [2]string{"Supports d_type", "true"}, [2]string{"Native Overlay Diff", "true"}}, SystemStatus:[][2]string(nil), Plugins:types.PluginsInfo{Volume:[]string{"local"}, Network:[]string{"bridge", "host", "macvlan", "null", "overlay"}, Authorization:[]string{"rhel-push-plugin"}, Log:[]string(nil)}, MemoryLimit:true, SwapLimit:true, KernelMemory:true, CPUCfsPeriod:true, CPUCfsQuota:true, CPUShares:true, CPUSet:true, IPv4Forwarding:true, BridgeNfIptables:true, BridgeNfIP6tables:true, Debug:true, NFd:149, OomKillDisable:true, NGoroutines:117, SystemTime:"2018-06-02T09:36:07.744244568Z", LoggingDriver:"json-file", CgroupDriver:"systemd", NEventsListener:0, KernelVersion:"3.10.0-862.2.3.el7.x86_64", OperatingSystem:"Red Hat Enterprise Linux Server 7.5 (Maipo)", OSType:"linux", Architecture:"x86_64", IndexServerAddress:"https://registry.access.redhat.com/v1/", RegistryConfig:(*registry.ServiceConfig)(0xc4204c77a0), NCPU:4, MemTotal:16656269312, GenericResources:[]swarm.GenericResource(nil), DockerRootDir:"/var/lib/docker", HTTPProxy:"", HTTPSProxy:"", NoProxy:"", Name:"ip-172-18-2-81.ec2.internal", Labels:[]string(nil), ExperimentalBuild:false, ServerVersion:"1.13.1", ClusterStore:"", ClusterAdvertise:"", Runtimes:map[string]types.Runtime{"docker-runc":types.Runtime{Path:"/usr/libexec/docker/docker-runc-current", Args:[]string(nil)}, "runc":types.Runtime{Path:"docker-runc", Args:[]string(nil)}}, DefaultRuntime:"docker-runc", Swarm:swarm.Info{NodeID:"", NodeAddr:"", LocalNodeState:"inactive", ControlAvailable:false, Error:"", RemoteManagers:[]swarm.Peer(nil), Nodes:0, Managers:0, Cluster:(*swarm.ClusterInfo)(0xc420471cc0)}, LiveRestoreEnabled:false, Isolation:"", InitBinary:"/usr/libexec/docker/docker-init-current", ContainerdCommit:types.Commit{ID:"", Expected:"aa8187dbd3b7ad67d8e5e3a15115d3eef43a7ed1"}, RuncCommit:types.Commit{ID:"e9c345b3f906d5dc5e8100b05ce37073a811c74a", Expected:"9df8b306d01f59d3a8029be411de015b7304dd8f"}, InitCommit:types.Commit{ID:"5b117de7f824f3d3825737cf09581645abbe35d4", Expected:"949e6facb77383876aeff8a6944dde66b3089574"}, SecurityOptions:[]string{"name=seccomp,profile=/etc/docker/seccomp.json", "name=selinux"}} I0602 09:36:08.145423 3285 run.go:195] Creating container named "" config: image: openshift/origin-cli:latest entry point: sh command: -c chmod 755 /apply.sh && /apply.sh host config: pid mode: host user mode: network mode: host I0602 09:36:08.145578 3285 helper.go:52] Docker daemon info: &types.Info{ID:"6BJV:73LF:IA7K:ALFR:AZ7W:LB7R:QWV6:M3QJ:L6HJ:QBVN:R7RW:ZVMN", Containers:17, ContainersRunning:15, ContainersPaused:0, ContainersStopped:2, Images:38, Driver:"overlay2", DriverStatus:[][2]string{[2]string{"Backing Filesystem", "xfs"}, [2]string{"Supports d_type", "true"}, [2]string{"Native Overlay Diff", "true"}}, SystemStatus:[][2]string(nil), Plugins:types.PluginsInfo{Volume:[]string{"local"}, Network:[]string{"bridge", "host", "macvlan", "null", "overlay"}, Authorization:[]string{"rhel-push-plugin"}, Log:[]string(nil)}, MemoryLimit:true, SwapLimit:true, KernelMemory:true, CPUCfsPeriod:true, CPUCfsQuota:true, CPUShares:true, CPUSet:true, IPv4Forwarding:true, BridgeNfIptables:true, BridgeNfIP6tables:true, Debug:true, NFd:160, OomKillDisable:true, NGoroutines:116, SystemTime:"2018-06-02T09:36:07.806047245Z", LoggingDriver:"json-file", CgroupDriver:"systemd", NEventsListener:0, KernelVersion:"3.10.0-862.2.3.el7.x86_64", OperatingSystem:"Red Hat Enterprise Linux Server 7.5 (Maipo)", OSType:"linux", Architecture:"x86_64", IndexServerAddress:"https://registry.access.redhat.com/v1/", RegistryConfig:(*registry.ServiceConfig)(0xc420440e00), NCPU:4, MemTotal:16656269312, GenericResources:[]swarm.GenericResource(nil), DockerRootDir:"/var/lib/docker", HTTPProxy:"", HTTPSProxy:"", NoProxy:"", Name:"ip-172-18-2-81.ec2.internal", Labels:[]string(nil), ExperimentalBuild:false, ServerVersion:"1.13.1", ClusterStore:"", ClusterAdvertise:"", Runtimes:map[string]types.Runtime{"docker-runc":types.Runtime{Path:"/usr/libexec/docker/docker-runc-current", Args:[]string(nil)}, "runc":types.Runtime{Path:"docker-runc", Args:[]string(nil)}}, DefaultRuntime:"docker-runc", Swarm:swarm.Info{NodeID:"", NodeAddr:"", LocalNodeState:"inactive", ControlAvailable:false, Error:"", RemoteManagers:[]swarm.Peer(nil), Nodes:0, Managers:0, Cluster:(*swarm.ClusterInfo)(0xc420530c80)}, LiveRestoreEnabled:false, Isolation:"", InitBinary:"/usr/libexec/docker/docker-init-current", ContainerdCommit:types.Commit{ID:"", Expected:"aa8187dbd3b7ad67d8e5e3a15115d3eef43a7ed1"}, RuncCommit:types.Commit{ID:"e9c345b3f906d5dc5e8100b05ce37073a811c74a", Expected:"9df8b306d01f59d3a8029be411de015b7304dd8f"}, InitCommit:types.Commit{ID:"5b117de7f824f3d3825737cf09581645abbe35d4", Expected:"949e6facb77383876aeff8a6944dde66b3089574"}, SecurityOptions:[]string{"name=seccomp,profile=/etc/docker/seccomp.json", "name=selinux"}} I0602 09:36:08.145678 3285 run.go:195] Creating container named "" config: image: openshift/origin-cli:latest entry point: sh command: -c chmod 755 /apply.sh && /apply.sh host config: pid mode: host user mode: network mode: host I0602 09:36:08.177442 3285 run.go:200] Container created with id "12b9014c70ac070d73de79f285ca9f21bd3eeca873ae41591862695a225b340a" I0602 09:36:08.182476 3285 helper.go:52] Docker daemon info: &types.Info{ID:"6BJV:73LF:IA7K:ALFR:AZ7W:LB7R:QWV6:M3QJ:L6HJ:QBVN:R7RW:ZVMN", Containers:17, ContainersRunning:15, ContainersPaused:0, ContainersStopped:2, Images:38, Driver:"overlay2", DriverStatus:[][2]string{[2]string{"Backing Filesystem", "xfs"}, [2]string{"Supports d_type", "true"}, [2]string{"Native Overlay Diff", "true"}}, SystemStatus:[][2]string(nil), Plugins:types.PluginsInfo{Volume:[]string{"local"}, Network:[]string{"bridge", "host", "macvlan", "null", "overlay"}, Authorization:[]string{"rhel-push-plugin"}, Log:[]string(nil)}, MemoryLimit:true, SwapLimit:true, KernelMemory:true, CPUCfsPeriod:true, CPUCfsQuota:true, CPUShares:true, CPUSet:true, IPv4Forwarding:true, BridgeNfIptables:true, BridgeNfIP6tables:true, Debug:true, NFd:159, OomKillDisable:true, NGoroutines:121, SystemTime:"2018-06-02T09:36:07.932058431Z", LoggingDriver:"json-file", CgroupDriver:"systemd", NEventsListener:0, KernelVersion:"3.10.0-862.2.3.el7.x86_64", OperatingSystem:"Red Hat Enterprise Linux Server 7.5 (Maipo)", OSType:"linux", Architecture:"x86_64", IndexServerAddress:"https://registry.access.redhat.com/v1/", RegistryConfig:(*registry.ServiceConfig)(0xc420abc310), NCPU:4, MemTotal:16656269312, GenericResources:[]swarm.GenericResource(nil), DockerRootDir:"/var/lib/docker", HTTPProxy:"", HTTPSProxy:"", NoProxy:"", Name:"ip-172-18-2-81.ec2.internal", Labels:[]string(nil), ExperimentalBuild:false, ServerVersion:"1.13.1", ClusterStore:"", ClusterAdvertise:"", Runtimes:map[string]types.Runtime{"docker-runc":types.Runtime{Path:"/usr/libexec/docker/docker-runc-current", Args:[]string(nil)}, "runc":types.Runtime{Path:"docker-runc", Args:[]string(nil)}}, DefaultRuntime:"docker-runc", Swarm:swarm.Info{NodeID:"", NodeAddr:"", LocalNodeState:"inactive", ControlAvailable:false, Error:"", RemoteManagers:[]swarm.Peer(nil), Nodes:0, Managers:0, Cluster:(*swarm.ClusterInfo)(0xc420b08140)}, LiveRestoreEnabled:false, Isolation:"", InitBinary:"/usr/libexec/docker/docker-init-current", ContainerdCommit:types.Commit{ID:"", Expected:"aa8187dbd3b7ad67d8e5e3a15115d3eef43a7ed1"}, RuncCommit:types.Commit{ID:"e9c345b3f906d5dc5e8100b05ce37073a811c74a", Expected:"9df8b306d01f59d3a8029be411de015b7304dd8f"}, InitCommit:types.Commit{ID:"5b117de7f824f3d3825737cf09581645abbe35d4", Expected:"949e6facb77383876aeff8a6944dde66b3089574"}, SecurityOptions:[]string{"name=seccomp,profile=/etc/docker/seccomp.json", "name=selinux"}} I0602 09:36:08.182613 3285 run.go:195] Creating container named "" config: image: openshift/origin-cli:latest entry point: sh command: -c chmod 755 /apply.sh && /apply.sh host config: pid mode: host user mode: network mode: host I0602 09:36:08.198727 3285 helper.go:52] Docker daemon info: &types.Info{ID:"6BJV:73LF:IA7K:ALFR:AZ7W:LB7R:QWV6:M3QJ:L6HJ:QBVN:R7RW:ZVMN", Containers:17, ContainersRunning:15, ContainersPaused:0, ContainersStopped:2, Images:38, Driver:"overlay2", DriverStatus:[][2]string{[2]string{"Backing Filesystem", "xfs"}, [2]string{"Supports d_type", "true"}, [2]string{"Native Overlay Diff", "true"}}, SystemStatus:[][2]string(nil), Plugins:types.PluginsInfo{Volume:[]string{"local"}, Network:[]string{"bridge", "host", "macvlan", "null", "overlay"}, Authorization:[]string{"rhel-push-plugin"}, Log:[]string(nil)}, MemoryLimit:true, SwapLimit:true, KernelMemory:true, CPUCfsPeriod:true, CPUCfsQuota:true, CPUShares:true, CPUSet:true, IPv4Forwarding:true, BridgeNfIptables:true, BridgeNfIP6tables:true, Debug:true, NFd:159, OomKillDisable:true, NGoroutines:121, SystemTime:"2018-06-02T09:36:07.933423425Z", LoggingDriver:"json-file", CgroupDriver:"systemd", NEventsListener:0, KernelVersion:"3.10.0-862.2.3.el7.x86_64", OperatingSystem:"Red Hat Enterprise Linux Server 7.5 (Maipo)", OSType:"linux", Architecture:"x86_64", IndexServerAddress:"https://registry.access.redhat.com/v1/", RegistryConfig:(*registry.ServiceConfig)(0xc42056e770), NCPU:4, MemTotal:16656269312, GenericResources:[]swarm.GenericResource(nil), DockerRootDir:"/var/lib/docker", HTTPProxy:"", HTTPSProxy:"", NoProxy:"", Name:"ip-172-18-2-81.ec2.internal", Labels:[]string(nil), ExperimentalBuild:false, ServerVersion:"1.13.1", ClusterStore:"", ClusterAdvertise:"", Runtimes:map[string]types.Runtime{"docker-runc":types.Runtime{Path:"/usr/libexec/docker/docker-runc-current", Args:[]string(nil)}, "runc":types.Runtime{Path:"docker-runc", Args:[]string(nil)}}, DefaultRuntime:"docker-runc", Swarm:swarm.Info{NodeID:"", NodeAddr:"", LocalNodeState:"inactive", ControlAvailable:false, Error:"", RemoteManagers:[]swarm.Peer(nil), Nodes:0, Managers:0, Cluster:(*swarm.ClusterInfo)(0xc4207ae000)}, LiveRestoreEnabled:false, Isolation:"", InitBinary:"/usr/libexec/docker/docker-init-current", ContainerdCommit:types.Commit{ID:"", Expected:"aa8187dbd3b7ad67d8e5e3a15115d3eef43a7ed1"}, RuncCommit:types.Commit{ID:"e9c345b3f906d5dc5e8100b05ce37073a811c74a", Expected:"9df8b306d01f59d3a8029be411de015b7304dd8f"}, InitCommit:types.Commit{ID:"5b117de7f824f3d3825737cf09581645abbe35d4", Expected:"949e6facb77383876aeff8a6944dde66b3089574"}, SecurityOptions:[]string{"name=seccomp,profile=/etc/docker/seccomp.json", "name=selinux"}} I0602 09:36:08.198877 3285 run.go:195] Creating container named "" config: image: openshift/origin-cli:latest entry point: oc command: adm registry --loglevel=0 --cluster-ip=172.30.1.1 --config=/data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/admin.kubeconfig --images=openshift/origin-${component}:latest --mount-host=/data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/openshift.local.pv/registry host config: pid mode: host user mode: network mode: host volume binds: /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver:/data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver I0602 09:36:08.319320 3285 run.go:304] Starting container "9df29a90da910b5a8e0f6b27aa66c52530b4bc67c34d7e430342bbb72ff00516" I0602 09:36:08.339400 3285 run.go:304] Starting container "7ef7a0448894a32d9eae9dea89ffcc69d6e9d335b2041208c1262f8b562ed833" I0602 09:36:08.339585 3285 run.go:200] Container created with id "b8fb52601fdf5e78cd00bbaaf4c4526a52a30b2555bf9b71c7ef807d4b31dd1c" I0602 09:36:08.360675 3285 helper.go:52] Docker daemon info: &types.Info{ID:"6BJV:73LF:IA7K:ALFR:AZ7W:LB7R:QWV6:M3QJ:L6HJ:QBVN:R7RW:ZVMN", Containers:23, ContainersRunning:15, ContainersPaused:0, ContainersStopped:8, Images:38, Driver:"overlay2", DriverStatus:[][2]string{[2]string{"Backing Filesystem", "xfs"}, [2]string{"Supports d_type", "true"}, [2]string{"Native Overlay Diff", "true"}}, SystemStatus:[][2]string(nil), Plugins:types.PluginsInfo{Volume:[]string{"local"}, Network:[]string{"bridge", "host", "macvlan", "null", "overlay"}, Authorization:[]string{"rhel-push-plugin"}, Log:[]string(nil)}, MemoryLimit:true, SwapLimit:true, KernelMemory:true, CPUCfsPeriod:true, CPUCfsQuota:true, CPUShares:true, CPUSet:true, IPv4Forwarding:true, BridgeNfIptables:true, BridgeNfIP6tables:true, Debug:true, NFd:152, OomKillDisable:true, NGoroutines:130, SystemTime:"2018-06-02T09:36:08.091934142Z", LoggingDriver:"json-file", CgroupDriver:"systemd", NEventsListener:0, KernelVersion:"3.10.0-862.2.3.el7.x86_64", OperatingSystem:"Red Hat Enterprise Linux Server 7.5 (Maipo)", OSType:"linux", Architecture:"x86_64", IndexServerAddress:"https://registry.access.redhat.com/v1/", RegistryConfig:(*registry.ServiceConfig)(0xc420abc700), NCPU:4, MemTotal:16656269312, GenericResources:[]swarm.GenericResource(nil), DockerRootDir:"/var/lib/docker", HTTPProxy:"", HTTPSProxy:"", NoProxy:"", Name:"ip-172-18-2-81.ec2.internal", Labels:[]string(nil), ExperimentalBuild:false, ServerVersion:"1.13.1", ClusterStore:"", ClusterAdvertise:"", Runtimes:map[string]types.Runtime{"docker-runc":types.Runtime{Path:"/usr/libexec/docker/docker-runc-current", Args:[]string(nil)}, "runc":types.Runtime{Path:"docker-runc", Args:[]string(nil)}}, DefaultRuntime:"docker-runc", Swarm:swarm.Info{NodeID:"", NodeAddr:"", LocalNodeState:"inactive", ControlAvailable:false, Error:"", RemoteManagers:[]swarm.Peer(nil), Nodes:0, Managers:0, Cluster:(*swarm.ClusterInfo)(0xc420b08640)}, LiveRestoreEnabled:false, Isolation:"", InitBinary:"/usr/libexec/docker/docker-init-current", ContainerdCommit:types.Commit{ID:"", Expected:"aa8187dbd3b7ad67d8e5e3a15115d3eef43a7ed1"}, RuncCommit:types.Commit{ID:"e9c345b3f906d5dc5e8100b05ce37073a811c74a", Expected:"9df8b306d01f59d3a8029be411de015b7304dd8f"}, InitCommit:types.Commit{ID:"5b117de7f824f3d3825737cf09581645abbe35d4", Expected:"949e6facb77383876aeff8a6944dde66b3089574"}, SecurityOptions:[]string{"name=seccomp,profile=/etc/docker/seccomp.json", "name=selinux"}} I0602 09:36:08.361490 3285 run.go:195] Creating container named "" config: image: openshift/origin-cli:latest entry point: oc command: adm router --host-ports=true --loglevel=0 --config=/data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/admin.kubeconfig --host-network=true --images=openshift/origin-${component}:latest --default-cert=/data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver/router.pem host config: pid mode: user mode: network mode: host volume binds: /data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver:/data/src/github.com/openshift/origin-web-console-server/openshift.local.clusterup/kube-apiserver I0602 09:36:08.394484 3285 run.go:200] Container created with id "aa5336736f90271f4d1465863c80ae092cec4b5477cdf4205f3c14b2a878b727" I0602 09:36:08.411270 3285 run.go:304] Starting container "36769d03d8b546254d262deed279330664046743a810177a6b5f74143409af53" I0602 09:36:08.484442 3285 run.go:200] Container created with id "666a27692885f13e64e3e4d58c85bb5c851c57c26d658a017c1d7a01b375c75b" I0602 09:36:08.523027 3285 run.go:311] Waiting for container "03c99a8567cf91381d60b11e0d047b3971d2d7ab5d82a81df0f4a6a1765ebc5a" I0602 09:36:08.533246 3285 run.go:304] Starting container "06ed71ef7e3fd114180690121ddb7958d8a08d380a43fb797dcb18cd2f8c2f2a" I0602 09:36:08.566327 3285 run.go:304] Starting container "12b9014c70ac070d73de79f285ca9f21bd3eeca873ae41591862695a225b340a" I0602 09:36:08.587801 3285 run.go:200] Container created with id "4424c0781ca81c44293648cf4418f314a00b7be8637f5f2bc2c2eb3d72b7c238" I0602 09:36:08.665798 3285 run.go:200] Container created with id "69262893cc6b2a39f6997c57ad8e641062af2e5c73407b158ccd0f377fbb3a1d" I0602 09:36:08.665825 3285 run.go:304] Starting container "69262893cc6b2a39f6997c57ad8e641062af2e5c73407b158ccd0f377fbb3a1d" I0602 09:36:08.683379 3285 run.go:304] Starting container "b8fb52601fdf5e78cd00bbaaf4c4526a52a30b2555bf9b71c7ef807d4b31dd1c" I0602 09:36:08.707444 3285 run.go:200] Container created with id "55efb3ae65fdad7abe52293a12b13a57e63a07ca5a2a8f408d41d2ee907e6c57" I0602 09:36:08.707470 3285 run.go:304] Starting container "55efb3ae65fdad7abe52293a12b13a57e63a07ca5a2a8f408d41d2ee907e6c57" I0602 09:36:08.811391 3285 run.go:304] Starting container "aa5336736f90271f4d1465863c80ae092cec4b5477cdf4205f3c14b2a878b727" I0602 09:36:08.938389 3285 run.go:304] Starting container "4424c0781ca81c44293648cf4418f314a00b7be8637f5f2bc2c2eb3d72b7c238" I0602 09:36:08.960408 3285 run.go:311] Waiting for container "a6ac2905aec75b17796674bb00c1d367947d7dee8118101c4f5eebd1042f757a" I0602 09:36:08.980386 3285 run.go:304] Starting container "666a27692885f13e64e3e4d58c85bb5c851c57c26d658a017c1d7a01b375c75b" I0602 09:36:09.214403 3285 run.go:311] Waiting for container "6692d757e2da5644786bb7cfcda4e551cbd0a7d2319ddc37e9074422d85f0a6e" I0602 09:36:09.747638 3285 run.go:311] Waiting for container "7ef7a0448894a32d9eae9dea89ffcc69d6e9d335b2041208c1262f8b562ed833" I0602 09:36:09.791407 3285 run.go:311] Waiting for container "9df29a90da910b5a8e0f6b27aa66c52530b4bc67c34d7e430342bbb72ff00516" I0602 09:36:10.041887 3285 run.go:311] Waiting for container "17760450f60c871fe70b6e953d38f0353293a19b5bd993492646b1f4d5596d23" I0602 09:36:10.180534 3285 run.go:311] Waiting for container "06ed71ef7e3fd114180690121ddb7958d8a08d380a43fb797dcb18cd2f8c2f2a" I0602 09:36:10.591847 3285 run.go:317] Done waiting for container "03c99a8567cf91381d60b11e0d047b3971d2d7ab5d82a81df0f4a6a1765ebc5a", rc=0 I0602 09:36:10.591876 3285 run.go:322] Reading logs from container "03c99a8567cf91381d60b11e0d047b3971d2d7ab5d82a81df0f4a6a1765ebc5a" I0602 09:36:10.602443 3285 run.go:330] Done reading logs from container "03c99a8567cf91381d60b11e0d047b3971d2d7ab5d82a81df0f4a6a1765ebc5a" I0602 09:36:10.602838 3285 run.go:337] Stdout: total 52K drwxr-xr-x. 1 root root 90 Jun 2 09:36 . drwxr-xr-x. 1 root root 90 Jun 2 09:36 .. -rwxr-xr-x. 1 root root 0 Jun 2 09:36 .dockerenv -rw-r--r--. 1 root root 12K Apr 2 18:39 anaconda-post.log -rwxr-xr-x. 1 root root 182 Jan 1 1970 apply.sh lrwxrwxrwx. 1 root root 7 Apr 2 18:38 bin -> usr/bin drwxr-xr-x. 14 root root 2.6K Jun 2 09:36 dev drwxr-xr-x. 1 root root 66 Jun 2 09:36 etc drwxr-xr-x. 2 root root 6 Nov 5 2016 home -rw-r--r--. 1 root root 7.2K Jan 1 1970 kubeconfig.kubeconfig lrwxrwxrwx. 1 root root 7 Apr 2 18:38 lib -> usr/lib lrwxrwxrwx. 1 root root 9 Apr 2 18:38 lib64 -> usr/lib64 -rw-r--r--. 1 root root 22K Jan 1 1970 list.yaml drwxr-xr-x. 2 root root 6 Nov 5 2016 media drwxr-xr-x. 2 root root 6 Nov 5 2016 mnt -rw-r--r--. 1 root root 9 Jan 1 1970 namespace-file drwxr-xr-x. 2 root root 6 Nov 5 2016 opt dr-xr-xr-x. 185 root root 0 Jun 2 09:26 proc dr-xr-x---. 2 root root 114 Apr 2 18:39 root drwxr-xr-x. 1 root root 6 May 31 04:14 run lrwxrwxrwx. 1 root root 8 Apr 2 18:38 sbin -> usr/sbin drwxr-xr-x. 1 root root 34 May 31 04:13 srv dr-xr-xr-x. 13 root root 0 Jun 2 09:26 sys drwxrwxrwt. 1 root root 6 May 31 04:14 tmp drwxr-xr-x. 1 root root 30 Apr 2 18:38 usr drwxr-xr-x. 1 root root 41 Apr 2 18:38 var template.template.openshift.io "nodejs-mongo-persistent" created I0602 09:36:10.602855 3285 run.go:338] Stderr: + ls -alh / + ns= + '[' -s /namespace-file ']' ++ cat /namespace-file + ns='--namespace=openshift ' + oc apply --namespace=openshift --config=/kubeconfig.kubeconfig -f /list.yaml I0602 09:36:10.602871 3285 run.go:342] Container run successful I0602 09:36:10.602883 3285 run.go:293] Deleting container "03c99a8567cf91381d60b11e0d047b3971d2d7ab5d82a81df0f4a6a1765ebc5a" I0602 09:36:10.928396 3285 run.go:311] Waiting for container "69262893cc6b2a39f6997c57ad8e641062af2e5c73407b158ccd0f377fbb3a1d" I0602 09:36:10.932341 3285 run.go:311] Waiting for container "55efb3ae65fdad7abe52293a12b13a57e63a07ca5a2a8f408d41d2ee907e6c57" I0602 09:36:10.932470 3285 run.go:311] Waiting for container "36769d03d8b546254d262deed279330664046743a810177a6b5f74143409af53" I0602 09:36:11.006430 3285 run.go:311] Waiting for container "12b9014c70ac070d73de79f285ca9f21bd3eeca873ae41591862695a225b340a" I0602 09:36:11.258407 3285 run.go:311] Waiting for container "666a27692885f13e64e3e4d58c85bb5c851c57c26d658a017c1d7a01b375c75b" I0602 09:36:11.416020 3285 run.go:311] Waiting for container "b8fb52601fdf5e78cd00bbaaf4c4526a52a30b2555bf9b71c7ef807d4b31dd1c" I0602 09:36:11.833438 3285 run.go:311] Waiting for container "aa5336736f90271f4d1465863c80ae092cec4b5477cdf4205f3c14b2a878b727" I0602 09:36:12.028406 3285 run.go:311] Waiting for container "4424c0781ca81c44293648cf4418f314a00b7be8637f5f2bc2c2eb3d72b7c238" I0602 09:36:12.122086 3285 run.go:317] Done waiting for container "9df29a90da910b5a8e0f6b27aa66c52530b4bc67c34d7e430342bbb72ff00516", rc=0 I0602 09:36:12.122113 3285 run.go:322] Reading logs from container "9df29a90da910b5a8e0f6b27aa66c52530b4bc67c34d7e430342bbb72ff00516" I0602 09:36:12.294765 3285 run.go:330] Done reading logs from container "9df29a90da910b5a8e0f6b27aa66c52530b4bc67c34d7e430342bbb72ff00516" I0602 09:36:12.295117 3285 run.go:337] Stdout: total 40K drwxr-xr-x. 1 root root 90 Jun 2 09:36 . drwxr-xr-x. 1 root root 90 Jun 2 09:36 .. -rwxr-xr-x. 1 root root 0 Jun 2 09:36 .dockerenv -rw-r--r--. 1 root root 12K Apr 2 18:39 anaconda-post.log -rwxr-xr-x. 1 root root 182 Jan 1 1970 apply.sh lrwxrwxrwx. 1 root root 7 Apr 2 18:38 bin -> usr/bin drwxr-xr-x. 14 root root 2.6K Jun 2 09:36 dev drwxr-xr-x. 1 root root 66 Jun 2 09:36 etc drwxr-xr-x. 2 root root 6 Nov 5 2016 home -rw-r--r--. 1 root root 7.2K Jan 1 1970 kubeconfig.kubeconfig lrwxrwxrwx. 1 root root 7 Apr 2 18:38 lib -> usr/lib lrwxrwxrwx. 1 root root 9 Apr 2 18:38 lib64 -> usr/lib64 -rw-r--r--. 1 root root 9.6K Jan 1 1970 list.yaml drwxr-xr-x. 2 root root 6 Nov 5 2016 media drwxr-xr-x. 2 root root 6 Nov 5 2016 mnt -rw-r--r--. 1 root root 9 Jan 1 1970 namespace-file drwxr-xr-x. 2 root root 6 Nov 5 2016 opt dr-xr-xr-x. 214 root root 0 Jun 2 09:26 proc dr-xr-x---. 2 root root 114 Apr 2 18:39 root drwxr-xr-x. 1 root root 6 May 31 04:14 run lrwxrwxrwx. 1 root root 8 Apr 2 18:38 sbin -> usr/sbin drwxr-xr-x. 1 root root 34 May 31 04:13 srv dr-xr-xr-x. 13 root root 0 Jun 2 09:26 sys drwxrwxrwt. 1 root root 6 May 31 04:14 tmp drwxr-xr-x. 1 root root 30 Apr 2 18:38 usr drwxr-xr-x. 1 root root 41 Apr 2 18:38 var template.template.openshift.io "jenkins-pipeline-example" created I0602 09:36:12.295134 3285 run.go:338] Stderr: + ls -alh / + ns= + '[' -s /namespace-file ']' ++ cat /namespace-file + ns='--namespace=openshift ' + oc apply --namespace=openshift --config=/kubeconfig.kubeconfig -f /list.yaml I0602 09:36:12.295147 3285 run.go:342] Container run successful I0602 09:36:12.295156 3285 run.go:293] Deleting container "9df29a90da910b5a8e0f6b27aa66c52530b4bc67c34d7e430342bbb72ff00516" I0602 09:36:13.034020 3285 run.go:317] Done waiting for container "a6ac2905aec75b17796674bb00c1d367947d7dee8118101c4f5eebd1042f757a", rc=0 I0602 09:36:13.034048 3285 run.go:322] Reading logs from container "a6ac2905aec75b17796674bb00c1d367947d7dee8118101c4f5eebd1042f757a" I0602 09:36:13.050346 3285 run.go:330] Done reading logs from container "a6ac2905aec75b17796674bb00c1d367947d7dee8118101c4f5eebd1042f757a" I0602 09:36:13.050940 3285 run.go:337] Stdout: total 56K drwxr-xr-x. 1 root root 90 Jun 2 09:36 . drwxr-xr-x. 1 root root 90 Jun 2 09:36 .. -rwxr-xr-x. 1 root root 0 Jun 2 09:36 .dockerenv -rw-r--r--. 1 root root 12K Apr 2 18:39 anaconda-post.log -rwxr-xr-x. 1 root root 182 Jan 1 1970 apply.sh lrwxrwxrwx. 1 root root 7 Apr 2 18:38 bin -> usr/bin drwxr-xr-x. 14 root root 2.6K Jun 2 09:36 dev drwxr-xr-x. 1 root root 66 Jun 2 09:36 etc drwxr-xr-x. 2 root root 6 Nov 5 2016 home -rw-r--r--. 1 root root 7.2K Jan 1 1970 kubeconfig.kubeconfig lrwxrwxrwx. 1 root root 7 Apr 2 18:38 lib -> usr/lib lrwxrwxrwx. 1 root root 9 Apr 2 18:38 lib64 -> usr/lib64 -rw-r--r--. 1 root root 25K Jan 1 1970 list.yaml drwxr-xr-x. 2 root root 6 Nov 5 2016 media drwxr-xr-x. 2 root root 6 Nov 5 2016 mnt -rw-r--r--. 1 root root 9 Jan 1 1970 namespace-file drwxr-xr-x. 2 root root 6 Nov 5 2016 opt dr-xr-xr-x. 200 root root 0 Jun 2 09:26 proc dr-xr-x---. 2 root root 114 Apr 2 18:39 root drwxr-xr-x. 1 root root 6 May 31 04:14 run lrwxrwxrwx. 1 root root 8 Apr 2 18:38 sbin -> usr/sbin drwxr-xr-x. 1 root root 34 May 31 04:13 srv dr-xr-xr-x. 13 root root 0 Jun 2 09:26 sys drwxrwxrwt. 1 root root 6 May 31 04:14 tmp drwxr-xr-x. 1 root root 30 Apr 2 18:38 usr drwxr-xr-x. 1 root root 41 Apr 2 18:38 var template.template.openshift.io "rails-pgsql-persistent" created I0602 09:36:13.050957 3285 run.go:338] Stderr: + ls -alh / + ns= + '[' -s /namespace-file ']' ++ cat /namespace-file + ns='--namespace=openshift ' + oc apply --namespace=openshift --config=/kubeconfig.kubeconfig -f /list.yaml I0602 09:36:13.050969 3285 run.go:342] Container run successful I0602 09:36:13.050978 3285 run.go:293] Deleting container "a6ac2905aec75b17796674bb00c1d367947d7dee8118101c4f5eebd1042f757a" I0602 09:36:13.095456 3285 run.go:317] Done waiting for container "6692d757e2da5644786bb7cfcda4e551cbd0a7d2319ddc37e9074422d85f0a6e", rc=0 I0602 09:36:13.095485 3285 run.go:322] Reading logs from container "6692d757e2da5644786bb7cfcda4e551cbd0a7d2319ddc37e9074422d85f0a6e" I0602 09:36:13.122357 3285 run.go:330] Done reading logs from container "6692d757e2da5644786bb7cfcda4e551cbd0a7d2319ddc37e9074422d85f0a6e" I0602 09:36:13.122756 3285 run.go:337] Stdout: total 44K drwxr-xr-x. 1 root root 90 Jun 2 09:36 . drwxr-xr-x. 1 root root 90 Jun 2 09:36 .. -rwxr-xr-x. 1 root root 0 Jun 2 09:36 .dockerenv -rw-r--r--. 1 root root 12K Apr 2 18:39 anaconda-post.log -rwxr-xr-x. 1 root root 182 Jan 1 1970 apply.sh lrwxrwxrwx. 1 root root 7 Apr 2 18:38 bin -> usr/bin drwxr-xr-x. 14 root root 2.6K Jun 2 09:36 dev drwxr-xr-x. 1 root root 66 Jun 2 09:36 etc drwxr-xr-x. 2 root root 6 Nov 5 2016 home -rw-r--r--. 1 root root 7.2K Jan 1 1970 kubeconfig.kubeconfig lrwxrwxrwx. 1 root root 7 Apr 2 18:38 lib -> usr/lib lrwxrwxrwx. 1 root root 9 Apr 2 18:38 lib64 -> usr/lib64 -rw-r--r--. 1 root root 13K Jan 1 1970 list.yaml drwxr-xr-x. 2 root root 6 Nov 5 2016 media drwxr-xr-x. 2 root root 6 Nov 5 2016 mnt -rw-r--r--. 1 root root 9 Jan 1 1970 namespace-file drwxr-xr-x. 2 root root 6 Nov 5 2016 opt dr-xr-xr-x. 208 root root 0 Jun 2 09:26 proc dr-xr-x---. 2 root root 114 Apr 2 18:39 root drwxr-xr-x. 1 root root 6 May 31 04:14 run lrwxrwxrwx. 1 root root 8 Apr 2 18:38 sbin -> usr/sbin drwxr-xr-x. 1 root root 34 May 31 04:13 srv dr-xr-xr-x. 13 root root 0 Jun 2 09:26 sys drwxrwxrwt. 1 root root 6 May 31 04:14 tmp drwxr-xr-x. 1 root root 30 Apr 2 18:38 usr drwxr-xr-x. 1 root root 41 Apr 2 18:38 var template.template.openshift.io "jenkins-ephemeral" created I0602 09:36:13.122771 3285 run.go:338] Stderr: + ls -alh / + ns= + '[' -s /namespace-file ']' ++ cat /namespace-file + ns='--namespace=openshift ' + oc apply --namespace=openshift --config=/kubeconfig.kubeconfig -f /list.yaml I0602 09:36:13.122784 3285 run.go:342] Container run successful I0602 09:36:13.122793 3285 run.go:293] Deleting container "6692d757e2da5644786bb7cfcda4e551cbd0a7d2319ddc37e9074422d85f0a6e" I0602 09:36:13.139751 3285 run.go:317] Done waiting for container "69262893cc6b2a39f6997c57ad8e641062af2e5c73407b158ccd0f377fbb3a1d", rc=0 I0602 09:36:13.139780 3285 run.go:322] Reading logs from container "69262893cc6b2a39f6997c57ad8e641062af2e5c73407b158ccd0f377fbb3a1d" I0602 09:36:13.145712 3285 run.go:330] Done reading logs from container "69262893cc6b2a39f6997c57ad8e641062af2e5c73407b158ccd0f377fbb3a1d" I0602 09:36:13.146030 3285 run.go:337] Stdout: --> Creating registry registry ... serviceaccount "registry" created clusterrolebinding "registry-registry-role" created deploymentconfig "docker-registry" created service "docker-registry" created --> Success I0602 09:36:13.146064 3285 run.go:338] Stderr: I0602 09:36:13.146079 3285 run.go:342] Container run successful I0602 09:36:13.146089 3285 run.go:293] Deleting container "69262893cc6b2a39f6997c57ad8e641062af2e5c73407b158ccd0f377fbb3a1d" I0602 09:36:13.202633 3285 run.go:317] Done waiting for container "7ef7a0448894a32d9eae9dea89ffcc69d6e9d335b2041208c1262f8b562ed833", rc=0 I0602 09:36:13.202663 3285 run.go:322] Reading logs from container "7ef7a0448894a32d9eae9dea89ffcc69d6e9d335b2041208c1262f8b562ed833" I0602 09:36:13.254399 3285 run.go:330] Done reading logs from container "7ef7a0448894a32d9eae9dea89ffcc69d6e9d335b2041208c1262f8b562ed833" I0602 09:36:13.254694 3285 run.go:337] Stdout: total 40K drwxr-xr-x. 1 root root 90 Jun 2 09:36 . drwxr-xr-x. 1 root root 90 Jun 2 09:36 .. -rwxr-xr-x. 1 root root 0 Jun 2 09:36 .dockerenv -rw-r--r--. 1 root root 12K Apr 2 18:39 anaconda-post.log -rwxr-xr-x. 1 root root 182 Jan 1 1970 apply.sh lrwxrwxrwx. 1 root root 7 Apr 2 18:38 bin -> usr/bin drwxr-xr-x. 14 root root 2.6K Jun 2 09:36 dev drwxr-xr-x. 1 root root 66 Jun 2 09:36 etc drwxr-xr-x. 2 root root 6 Nov 5 2016 home -rw-r--r--. 1 root root 7.2K Jan 1 1970 kubeconfig.kubeconfig lrwxrwxrwx. 1 root root 7 Apr 2 18:38 lib -> usr/lib lrwxrwxrwx. 1 root root 9 Apr 2 18:38 lib64 -> usr/lib64 -rw-r--r--. 1 root root 12K Jan 1 1970 list.yaml drwxr-xr-x. 2 root root 6 Nov 5 2016 media drwxr-xr-x. 2 root root 6 Nov 5 2016 mnt -rw-r--r--. 1 root root 9 Jan 1 1970 namespace-file drwxr-xr-x. 2 root root 6 Nov 5 2016 opt dr-xr-xr-x. 224 root root 0 Jun 2 09:26 proc dr-xr-x---. 2 root root 114 Apr 2 18:39 root drwxr-xr-x. 1 root root 6 May 31 04:14 run lrwxrwxrwx. 1 root root 8 Apr 2 18:38 sbin -> usr/sbin drwxr-xr-x. 1 root root 34 May 31 04:13 srv dr-xr-xr-x. 13 root root 0 Jun 2 09:26 sys drwxrwxrwt. 1 root root 6 May 31 04:14 tmp drwxr-xr-x. 1 root root 30 Apr 2 18:38 usr drwxr-xr-x. 1 root root 41 Apr 2 18:38 var template.template.openshift.io "postgresql-persistent" created I0602 09:36:13.254710 3285 run.go:338] Stderr: + ls -alh / + ns= + '[' -s /namespace-file ']' ++ cat /namespace-file + ns='--namespace=openshift ' + oc apply --namespace=openshift --config=/kubeconfig.kubeconfig -f /list.yaml I0602 09:36:13.254722 3285 run.go:342] Container run successful I0602 09:36:13.254731 3285 run.go:293] Deleting container "7ef7a0448894a32d9eae9dea89ffcc69d6e9d335b2041208c1262f8b562ed833" I0602 09:36:13.304626 3285 run.go:317] Done waiting for container "55efb3ae65fdad7abe52293a12b13a57e63a07ca5a2a8f408d41d2ee907e6c57", rc=0 I0602 09:36:13.304654 3285 run.go:322] Reading logs from container "55efb3ae65fdad7abe52293a12b13a57e63a07ca5a2a8f408d41d2ee907e6c57" I0602 09:36:13.318330 3285 run.go:330] Done reading logs from container "55efb3ae65fdad7abe52293a12b13a57e63a07ca5a2a8f408d41d2ee907e6c57" I0602 09:36:13.318605 3285 run.go:337] Stdout: --> Creating router router ... secret "router-certs" created clusterrolebinding "router-router-role" created deploymentconfig "router" created service "router" created --> Success I0602 09:36:13.318618 3285 run.go:338] Stderr: info: password for stats user admin has been set to ayYg90NsDI warning: serviceaccounts "router" already exists I0602 09:36:13.318627 3285 run.go:342] Container run successful I0602 09:36:13.318635 3285 run.go:293] Deleting container "55efb3ae65fdad7abe52293a12b13a57e63a07ca5a2a8f408d41d2ee907e6c57" I0602 09:36:13.369451 3285 run.go:317] Done waiting for container "17760450f60c871fe70b6e953d38f0353293a19b5bd993492646b1f4d5596d23", rc=0 I0602 09:36:13.369479 3285 run.go:322] Reading logs from container "17760450f60c871fe70b6e953d38f0353293a19b5bd993492646b1f4d5596d23" I0602 09:36:13.374952 3285 run.go:330] Done reading logs from container "17760450f60c871fe70b6e953d38f0353293a19b5bd993492646b1f4d5596d23" I0602 09:36:13.375371 3285 run.go:337] Stdout: total 88K drwxr-xr-x. 1 root root 90 Jun 2 09:36 . drwxr-xr-x. 1 root root 90 Jun 2 09:36 .. -rwxr-xr-x. 1 root root 0 Jun 2 09:36 .dockerenv -rw-r--r--. 1 root root 12K Apr 2 18:39 anaconda-post.log -rwxr-xr-x. 1 root root 182 Jan 1 1970 apply.sh lrwxrwxrwx. 1 root root 7 Apr 2 18:38 bin -> usr/bin drwxr-xr-x. 14 root root 2.6K Jun 2 09:36 dev drwxr-xr-x. 1 root root 66 Jun 2 09:36 etc drwxr-xr-x. 2 root root 6 Nov 5 2016 home -rw-r--r--. 1 root root 7.2K Jan 1 1970 kubeconfig.kubeconfig lrwxrwxrwx. 1 root root 7 Apr 2 18:38 lib -> usr/lib lrwxrwxrwx. 1 root root 9 Apr 2 18:38 lib64 -> usr/lib64 -rw-r--r--. 1 root root 60K Jan 1 1970 list.yaml drwxr-xr-x. 2 root root 6 Nov 5 2016 media drwxr-xr-x. 2 root root 6 Nov 5 2016 mnt -rw-r--r--. 1 root root 9 Jan 1 1970 namespace-file drwxr-xr-x. 2 root root 6 Nov 5 2016 opt dr-xr-xr-x. 217 root root 0 Jun 2 09:26 proc dr-xr-x---. 2 root root 114 Apr 2 18:39 root drwxr-xr-x. 1 root root 6 May 31 04:14 run lrwxrwxrwx. 1 root root 8 Apr 2 18:38 sbin -> usr/sbin drwxr-xr-x. 1 root root 34 May 31 04:13 srv dr-xr-xr-x. 13 root root 0 Jun 2 09:26 sys drwxrwxrwt. 1 root root 6 May 31 04:14 tmp drwxr-xr-x. 1 root root 30 Apr 2 18:38 usr drwxr-xr-x. 1 root root 41 Apr 2 18:38 var imagestream.image.openshift.io "dotnet" created imagestream.image.openshift.io "httpd" created imagestream.image.openshift.io "jenkins" created imagestream.image.openshift.io "mariadb" created imagestream.image.openshift.io "mongodb" created imagestream.image.openshift.io "mysql" created imagestream.image.openshift.io "nginx" created imagestream.image.openshift.io "nodejs" created imagestream.image.openshift.io "perl" created imagestream.image.openshift.io "php" created imagestream.image.openshift.io "postgresql" created imagestream.image.openshift.io "python" created imagestream.image.openshift.io "redis" created imagestream.image.openshift.io "ruby" created imagestream.image.openshift.io "wildfly" created I0602 09:36:13.375395 3285 run.go:338] Stderr: + ls -alh / + ns= + '[' -s /namespace-file ']' ++ cat /namespace-file + ns='--namespace=openshift ' + oc apply --namespace=openshift --config=/kubeconfig.kubeconfig -f /list.yaml I0602 09:36:13.375409 3285 run.go:342] Container run successful I0602 09:36:13.375417 3285 run.go:293] Deleting container "17760450f60c871fe70b6e953d38f0353293a19b5bd993492646b1f4d5596d23" I0602 09:36:13.418782 3285 run.go:317] Done waiting for container "06ed71ef7e3fd114180690121ddb7958d8a08d380a43fb797dcb18cd2f8c2f2a", rc=0 I0602 09:36:13.418809 3285 run.go:322] Reading logs from container "06ed71ef7e3fd114180690121ddb7958d8a08d380a43fb797dcb18cd2f8c2f2a" I0602 09:36:13.451389 3285 run.go:330] Done reading logs from container "06ed71ef7e3fd114180690121ddb7958d8a08d380a43fb797dcb18cd2f8c2f2a" I0602 09:36:13.451794 3285 run.go:337] Stdout: total 52K drwxr-xr-x. 1 root root 90 Jun 2 09:36 . drwxr-xr-x. 1 root root 90 Jun 2 09:36 .. -rwxr-xr-x. 1 root root 0 Jun 2 09:36 .dockerenv -rw-r--r--. 1 root root 12K Apr 2 18:39 anaconda-post.log -rwxr-xr-x. 1 root root 182 Jan 1 1970 apply.sh lrwxrwxrwx. 1 root root 7 Apr 2 18:38 bin -> usr/bin drwxr-xr-x. 14 root root 2.6K Jun 2 09:36 dev drwxr-xr-x. 1 root root 66 Jun 2 09:36 etc drwxr-xr-x. 2 root root 6 Nov 5 2016 home -rw-r--r--. 1 root root 7.2K Jan 1 1970 kubeconfig.kubeconfig lrwxrwxrwx. 1 root root 7 Apr 2 18:38 lib -> usr/lib lrwxrwxrwx. 1 root root 9 Apr 2 18:38 lib64 -> usr/lib64 -rw-r--r--. 1 root root 22K Jan 1 1970 list.yaml drwxr-xr-x. 2 root root 6 Nov 5 2016 media drwxr-xr-x. 2 root root 6 Nov 5 2016 mnt -rw-r--r--. 1 root root 9 Jan 1 1970 namespace-file drwxr-xr-x. 2 root root 6 Nov 5 2016 opt dr-xr-xr-x. 226 root root 0 Jun 2 09:26 proc dr-xr-x---. 2 root root 114 Apr 2 18:39 root drwxr-xr-x. 1 root root 6 May 31 04:14 run lrwxrwxrwx. 1 root root 8 Apr 2 18:38 sbin -> usr/sbin drwxr-xr-x. 1 root root 34 May 31 04:13 srv dr-xr-xr-x. 13 root root 0 Jun 2 09:26 sys drwxrwxrwt. 1 root root 6 May 31 04:14 tmp drwxr-xr-x. 1 root root 30 Apr 2 18:38 usr drwxr-xr-x. 1 root root 41 Apr 2 18:38 var template.template.openshift.io "django-psql-persistent" created I0602 09:36:13.451810 3285 run.go:338] Stderr: + ls -alh / + ns= + '[' -s /namespace-file ']' ++ cat /namespace-file + ns='--namespace=openshift ' + oc apply --namespace=openshift --config=/kubeconfig.kubeconfig -f /list.yaml I0602 09:36:13.451823 3285 run.go:342] Container run successful I0602 09:36:13.451836 3285 run.go:293] Deleting container "06ed71ef7e3fd114180690121ddb7958d8a08d380a43fb797dcb18cd2f8c2f2a" I0602 09:36:13.799447 3285 run.go:317] Done waiting for container "aa5336736f90271f4d1465863c80ae092cec4b5477cdf4205f3c14b2a878b727", rc=0 I0602 09:36:13.799476 3285 run.go:322] Reading logs from container "aa5336736f90271f4d1465863c80ae092cec4b5477cdf4205f3c14b2a878b727" I0602 09:36:13.804333 3285 run.go:330] Done reading logs from container "aa5336736f90271f4d1465863c80ae092cec4b5477cdf4205f3c14b2a878b727" I0602 09:36:13.804617 3285 run.go:337] Stdout: total 44K drwxr-xr-x. 1 root root 90 Jun 2 09:36 . drwxr-xr-x. 1 root root 90 Jun 2 09:36 .. -rwxr-xr-x. 1 root root 0 Jun 2 09:36 .dockerenv -rw-r--r--. 1 root root 12K Apr 2 18:39 anaconda-post.log -rwxr-xr-x. 1 root root 182 Jan 1 1970 apply.sh lrwxrwxrwx. 1 root root 7 Apr 2 18:38 bin -> usr/bin drwxr-xr-x. 14 root root 2.6K Jun 2 09:36 dev drwxr-xr-x. 1 root root 66 Jun 2 09:36 etc drwxr-xr-x. 2 root root 6 Nov 5 2016 home -rw-r--r--. 1 root root 7.2K Jan 1 1970 kubeconfig.kubeconfig lrwxrwxrwx. 1 root root 7 Apr 2 18:38 lib -> usr/lib lrwxrwxrwx. 1 root root 9 Apr 2 18:38 lib64 -> usr/lib64 -rw-r--r--. 1 root root 13K Jan 1 1970 list.yaml drwxr-xr-x. 2 root root 6 Nov 5 2016 media drwxr-xr-x. 2 root root 6 Nov 5 2016 mnt -rw-r--r--. 1 root root 9 Jan 1 1970 namespace-file drwxr-xr-x. 2 root root 6 Nov 5 2016 opt dr-xr-xr-x. 225 root root 0 Jun 2 09:26 proc dr-xr-x---. 2 root root 114 Apr 2 18:39 root drwxr-xr-x. 1 root root 6 May 31 04:14 run lrwxrwxrwx. 1 root root 8 Apr 2 18:38 sbin -> usr/sbin drwxr-xr-x. 1 root root 34 May 31 04:13 srv dr-xr-xr-x. 13 root root 0 Jun 2 09:26 sys drwxrwxrwt. 1 root root 6 May 31 04:14 tmp drwxr-xr-x. 1 root root 30 Apr 2 18:38 usr drwxr-xr-x. 1 root root 41 Apr 2 18:38 var template.template.openshift.io "mysql-persistent" created I0602 09:36:13.804631 3285 run.go:338] Stderr: + ls -alh / + ns= + '[' -s /namespace-file ']' ++ cat /namespace-file + ns='--namespace=openshift ' + oc apply --namespace=openshift --config=/kubeconfig.kubeconfig -f /list.yaml I0602 09:36:13.804642 3285 run.go:342] Container run successful I0602 09:36:13.804654 3285 run.go:293] Deleting container "aa5336736f90271f4d1465863c80ae092cec4b5477cdf4205f3c14b2a878b727" I0602 09:36:13.928443 3285 run.go:317] Done waiting for container "12b9014c70ac070d73de79f285ca9f21bd3eeca873ae41591862695a225b340a", rc=0 I0602 09:36:13.928473 3285 run.go:322] Reading logs from container "12b9014c70ac070d73de79f285ca9f21bd3eeca873ae41591862695a225b340a" I0602 09:36:13.955910 3285 run.go:330] Done reading logs from container "12b9014c70ac070d73de79f285ca9f21bd3eeca873ae41591862695a225b340a" I0602 09:36:13.956628 3285 run.go:337] Stdout: total 52K drwxr-xr-x. 1 root root 90 Jun 2 09:36 . drwxr-xr-x. 1 root root 90 Jun 2 09:36 .. -rwxr-xr-x. 1 root root 0 Jun 2 09:36 .dockerenv -rw-r--r--. 1 root root 12K Apr 2 18:39 anaconda-post.log -rwxr-xr-x. 1 root root 182 Jan 1 1970 apply.sh lrwxrwxrwx. 1 root root 7 Apr 2 18:38 bin -> usr/bin drwxr-xr-x. 14 root root 2.6K Jun 2 09:36 dev drwxr-xr-x. 1 root root 66 Jun 2 09:36 etc drwxr-xr-x. 2 root root 6 Nov 5 2016 home -rw-r--r--. 1 root root 7.2K Jan 1 1970 kubeconfig.kubeconfig lrwxrwxrwx. 1 root root 7 Apr 2 18:38 lib -> usr/lib lrwxrwxrwx. 1 root root 9 Apr 2 18:38 lib64 -> usr/lib64 -rw-r--r--. 1 root root 23K Jan 1 1970 list.yaml drwxr-xr-x. 2 root root 6 Nov 5 2016 media drwxr-xr-x. 2 root root 6 Nov 5 2016 mnt -rw-r--r--. 1 root root 9 Jan 1 1970 namespace-file drwxr-xr-x. 2 root root 6 Nov 5 2016 opt dr-xr-xr-x. 228 root root 0 Jun 2 09:26 proc dr-xr-x---. 2 root root 114 Apr 2 18:39 root drwxr-xr-x. 1 root root 6 May 31 04:14 run lrwxrwxrwx. 1 root root 8 Apr 2 18:38 sbin -> usr/sbin drwxr-xr-x. 1 root root 34 May 31 04:13 srv dr-xr-xr-x. 13 root root 0 Jun 2 09:26 sys drwxrwxrwt. 1 root root 6 May 31 04:14 tmp drwxr-xr-x. 1 root root 30 Apr 2 18:38 usr drwxr-xr-x. 1 root root 41 Apr 2 18:38 var template.template.openshift.io "cakephp-mysql-persistent" created I0602 09:36:13.956644 3285 run.go:338] Stderr: + ls -alh / + ns= + '[' -s /namespace-file ']' ++ cat /namespace-file + ns='--namespace=openshift ' + oc apply --namespace=openshift --config=/kubeconfig.kubeconfig -f /list.yaml I0602 09:36:13.956658 3285 run.go:342] Container run successful I0602 09:36:13.956666 3285 run.go:293] Deleting container "12b9014c70ac070d73de79f285ca9f21bd3eeca873ae41591862695a225b340a" I0602 09:36:13.987431 3285 run.go:317] Done waiting for container "b8fb52601fdf5e78cd00bbaaf4c4526a52a30b2555bf9b71c7ef807d4b31dd1c", rc=0 I0602 09:36:13.987457 3285 run.go:322] Reading logs from container "b8fb52601fdf5e78cd00bbaaf4c4526a52a30b2555bf9b71c7ef807d4b31dd1c" I0602 09:36:13.994324 3285 run.go:330] Done reading logs from container "b8fb52601fdf5e78cd00bbaaf4c4526a52a30b2555bf9b71c7ef807d4b31dd1c" I0602 09:36:13.994665 3285 run.go:337] Stdout: total 44K drwxr-xr-x. 1 root root 90 Jun 2 09:36 . drwxr-xr-x. 1 root root 90 Jun 2 09:36 .. -rwxr-xr-x. 1 root root 0 Jun 2 09:36 .dockerenv -rw-r--r--. 1 root root 12K Apr 2 18:39 anaconda-post.log -rwxr-xr-x. 1 root root 182 Jan 1 1970 apply.sh lrwxrwxrwx. 1 root root 7 Apr 2 18:38 bin -> usr/bin drwxr-xr-x. 14 root root 2.6K Jun 2 09:36 dev drwxr-xr-x. 1 root root 66 Jun 2 09:36 etc drwxr-xr-x. 2 root root 6 Nov 5 2016 home -rw-r--r--. 1 root root 7.2K Jan 1 1970 kubeconfig.kubeconfig lrwxrwxrwx. 1 root root 7 Apr 2 18:38 lib -> usr/lib lrwxrwxrwx. 1 root root 9 Apr 2 18:38 lib64 -> usr/lib64 -rw-r--r--. 1 root root 13K Jan 1 1970 list.yaml drwxr-xr-x. 2 root root 6 Nov 5 2016 media drwxr-xr-x. 2 root root 6 Nov 5 2016 mnt -rw-r--r--. 1 root root 9 Jan 1 1970 namespace-file drwxr-xr-x. 2 root root 6 Nov 5 2016 opt dr-xr-xr-x. 225 root root 0 Jun 2 09:26 proc dr-xr-x---. 2 root root 114 Apr 2 18:39 root drwxr-xr-x. 1 root root 6 May 31 04:14 run lrwxrwxrwx. 1 root root 8 Apr 2 18:38 sbin -> usr/sbin drwxr-xr-x. 1 root root 34 May 31 04:13 srv dr-xr-xr-x. 13 root root 0 Jun 2 09:26 sys drwxrwxrwt. 1 root root 6 May 31 04:14 tmp drwxr-xr-x. 1 root root 30 Apr 2 18:38 usr drwxr-xr-x. 1 root root 41 Apr 2 18:38 var template.template.openshift.io "mariadb-persistent" created I0602 09:36:13.994686 3285 run.go:338] Stderr: + ls -alh / + ns= + '[' -s /namespace-file ']' ++ cat /namespace-file + ns='--namespace=openshift ' + oc apply --namespace=openshift --config=/kubeconfig.kubeconfig -f /list.yaml I0602 09:36:13.994699 3285 run.go:342] Container run successful I0602 09:36:13.994707 3285 run.go:293] Deleting container "b8fb52601fdf5e78cd00bbaaf4c4526a52a30b2555bf9b71c7ef807d4b31dd1c" I0602 09:36:14.149238 3285 run.go:317] Done waiting for container "666a27692885f13e64e3e4d58c85bb5c851c57c26d658a017c1d7a01b375c75b", rc=0 I0602 09:36:14.149284 3285 run.go:322] Reading logs from container "666a27692885f13e64e3e4d58c85bb5c851c57c26d658a017c1d7a01b375c75b" I0602 09:36:14.156951 3285 run.go:330] Done reading logs from container "666a27692885f13e64e3e4d58c85bb5c851c57c26d658a017c1d7a01b375c75b" I0602 09:36:14.157282 3285 run.go:337] Stdout: total 52K drwxr-xr-x. 1 root root 90 Jun 2 09:36 . drwxr-xr-x. 1 root root 90 Jun 2 09:36 .. -rwxr-xr-x. 1 root root 0 Jun 2 09:36 .dockerenv -rw-r--r--. 1 root root 12K Apr 2 18:39 anaconda-post.log -rwxr-xr-x. 1 root root 182 Jan 1 1970 apply.sh lrwxrwxrwx. 1 root root 7 Apr 2 18:38 bin -> usr/bin drwxr-xr-x. 14 root root 2.6K Jun 2 09:36 dev drwxr-xr-x. 1 root root 66 Jun 2 09:36 etc drwxr-xr-x. 2 root root 6 Nov 5 2016 home -rw-r--r--. 1 root root 7.2K Jan 1 1970 kubeconfig.kubeconfig lrwxrwxrwx. 1 root root 7 Apr 2 18:38 lib -> usr/lib lrwxrwxrwx. 1 root root 9 Apr 2 18:38 lib64 -> usr/lib64 -rw-r--r--. 1 root root 21K Jan 1 1970 list.yaml drwxr-xr-x. 2 root root 6 Nov 5 2016 media drwxr-xr-x. 2 root root 6 Nov 5 2016 mnt -rw-r--r--. 1 root root 9 Jan 1 1970 namespace-file drwxr-xr-x. 2 root root 6 Nov 5 2016 opt dr-xr-xr-x. 221 root root 0 Jun 2 09:26 proc dr-xr-x---. 2 root root 114 Apr 2 18:39 root drwxr-xr-x. 1 root root 6 May 31 04:14 run lrwxrwxrwx. 1 root root 8 Apr 2 18:38 sbin -> usr/sbin drwxr-xr-x. 1 root root 34 May 31 04:13 srv dr-xr-xr-x. 13 root root 0 Jun 2 09:26 sys drwxrwxrwt. 1 root root 6 May 31 04:14 tmp drwxr-xr-x. 1 root root 30 Apr 2 18:38 usr drwxr-xr-x. 1 root root 41 Apr 2 18:38 var template.template.openshift.io "dancer-mysql-persistent" created I0602 09:36:14.157302 3285 run.go:338] Stderr: + ls -alh / + ns= + '[' -s /namespace-file ']' ++ cat /namespace-file + ns='--namespace=openshift ' + oc apply --namespace=openshift --config=/kubeconfig.kubeconfig -f /list.yaml I0602 09:36:14.157314 3285 run.go:342] Container run successful I0602 09:36:14.157323 3285 run.go:293] Deleting container "666a27692885f13e64e3e4d58c85bb5c851c57c26d658a017c1d7a01b375c75b" I0602 09:36:14.310650 3285 run.go:317] Done waiting for container "4424c0781ca81c44293648cf4418f314a00b7be8637f5f2bc2c2eb3d72b7c238", rc=0 I0602 09:36:14.310677 3285 run.go:322] Reading logs from container "4424c0781ca81c44293648cf4418f314a00b7be8637f5f2bc2c2eb3d72b7c238" I0602 09:36:14.318836 3285 run.go:330] Done reading logs from container "4424c0781ca81c44293648cf4418f314a00b7be8637f5f2bc2c2eb3d72b7c238" I0602 09:36:14.319131 3285 run.go:337] Stdout: total 44K drwxr-xr-x. 1 root root 90 Jun 2 09:36 . drwxr-xr-x. 1 root root 90 Jun 2 09:36 .. -rwxr-xr-x. 1 root root 0 Jun 2 09:36 .dockerenv -rw-r--r--. 1 root root 12K Apr 2 18:39 anaconda-post.log -rwxr-xr-x. 1 root root 182 Jan 1 1970 apply.sh lrwxrwxrwx. 1 root root 7 Apr 2 18:38 bin -> usr/bin drwxr-xr-x. 14 root root 2.6K Jun 2 09:36 dev drwxr-xr-x. 1 root root 66 Jun 2 09:36 etc drwxr-xr-x. 2 root root 6 Nov 5 2016 home -rw-r--r--. 1 root root 7.2K Jan 1 1970 kubeconfig.kubeconfig lrwxrwxrwx. 1 root root 7 Apr 2 18:38 lib -> usr/lib lrwxrwxrwx. 1 root root 9 Apr 2 18:38 lib64 -> usr/lib64 -rw-r--r--. 1 root root 13K Jan 1 1970 list.yaml drwxr-xr-x. 2 root root 6 Nov 5 2016 media drwxr-xr-x. 2 root root 6 Nov 5 2016 mnt -rw-r--r--. 1 root root 9 Jan 1 1970 namespace-file drwxr-xr-x. 2 root root 6 Nov 5 2016 opt dr-xr-xr-x. 222 root root 0 Jun 2 09:26 proc dr-xr-x---. 2 root root 114 Apr 2 18:39 root drwxr-xr-x. 1 root root 6 May 31 04:14 run lrwxrwxrwx. 1 root root 8 Apr 2 18:38 sbin -> usr/sbin drwxr-xr-x. 1 root root 34 May 31 04:13 srv dr-xr-xr-x. 13 root root 0 Jun 2 09:26 sys drwxrwxrwt. 1 root root 6 May 31 04:14 tmp drwxr-xr-x. 1 root root 30 Apr 2 18:38 usr drwxr-xr-x. 1 root root 41 Apr 2 18:38 var template.template.openshift.io "mongodb-persistent" created I0602 09:36:14.319144 3285 run.go:338] Stderr: + ls -alh / + ns= + '[' -s /namespace-file ']' ++ cat /namespace-file + ns='--namespace=openshift ' + oc apply --namespace=openshift --config=/kubeconfig.kubeconfig -f /list.yaml I0602 09:36:14.319156 3285 run.go:342] Container run successful I0602 09:36:14.319165 3285 run.go:293] Deleting container "4424c0781ca81c44293648cf4418f314a00b7be8637f5f2bc2c2eb3d72b7c238" I0602 09:36:14.820823 3285 run_self_hosted.go:576] observed "ADDED" with metadata: creationTimestamp: 2018-06-02T09:36:14Z labels: kube-aggregator.kubernetes.io/automanaged: "true" name: v1alpha1.webconsole.operator.openshift.io resourceVersion: "728" selfLink: /apis/apiregistration.k8s.io/v1beta1/apiservices/v1alpha1.webconsole.operator.openshift.io uid: 64b616cf-6648-11e8-8005-0ed86b46e68a spec: caBundle: null group: webconsole.operator.openshift.io groupPriorityMinimum: 1000 service: null version: v1alpha1 versionPriority: 100 status: conditions: - lastTransitionTime: 2018-06-02T09:36:14Z message: Local APIServices are always available reason: Local status: "True" type: Available I0602 09:36:15.799453 3285 run.go:317] Done waiting for container "36769d03d8b546254d262deed279330664046743a810177a6b5f74143409af53", rc=0 I0602 09:36:15.799485 3285 run.go:322] Reading logs from container "36769d03d8b546254d262deed279330664046743a810177a6b5f74143409af53" I0602 09:36:15.814384 3285 run.go:330] Done reading logs from container "36769d03d8b546254d262deed279330664046743a810177a6b5f74143409af53" I0602 09:36:15.814718 3285 run.go:337] Stdout: openshift-web-console-operator total 40K drwxr-xr-x. 1 root root 186 Jun 2 09:36 . drwxr-xr-x. 1 root root 186 Jun 2 09:36 .. -rwxr-xr-x. 1 root root 0 Jun 2 09:36 .dockerenv -rw-r--r--. 1 root root 12K Apr 2 18:39 anaconda-post.log lrwxrwxrwx. 1 root root 7 Apr 2 18:38 bin -> usr/bin drwxr-xr-x. 14 root root 2.6K Jun 2 09:36 dev drwxr-xr-x. 1 root root 66 Jun 2 09:36 etc drwxr-xr-x. 2 root root 6 Nov 5 2016 home -rwxr-xr-x. 1 root root 857 Jan 1 1970 install.sh -rw-r--r--. 1 root root 2.8K Jan 1 1970 install.yaml -rw-r--r--. 1 root root 7.2K Jan 1 1970 kubeconfig.kubeconfig lrwxrwxrwx. 1 root root 7 Apr 2 18:38 lib -> usr/lib lrwxrwxrwx. 1 root root 9 Apr 2 18:38 lib64 -> usr/lib64 drwxr-xr-x. 2 root root 6 Nov 5 2016 media drwxr-xr-x. 2 root root 6 Nov 5 2016 mnt -rw-r--r--. 1 root root 24 Jan 1 1970 namespace-file -rw-r--r--. 1 root root 0 Jan 1 1970 namespace.yaml drwxr-xr-x. 2 root root 6 Nov 5 2016 opt -rw-r--r--. 1 root root 207 Jan 1 1970 param-file.txt -rw-r--r--. 1 root root 0 Jan 1 1970 privileged-sa-list.txt dr-xr-xr-x. 226 root root 0 Jun 2 09:26 proc -rw-r--r--. 1 root root 460 Jan 1 1970 rbac.yaml dr-xr-x---. 2 root root 114 Apr 2 18:39 root drwxr-xr-x. 1 root root 6 May 31 04:14 run lrwxrwxrwx. 1 root root 8 Apr 2 18:38 sbin -> usr/sbin drwxr-xr-x. 1 root root 34 May 31 04:13 srv dr-xr-xr-x. 13 root root 0 Jun 2 09:26 sys drwxrwxrwt. 1 root root 6 May 31 04:14 tmp drwxr-xr-x. 1 root root 30 Apr 2 18:38 usr drwxr-xr-x. 1 root root 41 Apr 2 18:38 var namespace "openshift-core-operators" created clusterrolebinding.rbac.authorization.k8s.io "system:openshift:operator:web-console" reconciled customresourcedefinition.apiextensions.k8s.io "openshiftwebconsoleconfigs.webconsole.operator.openshift.io" created configmap "openshift-web-console-operator-config" created deployment.apps "openshift-web-console-operator" created serviceaccount "openshift-web-console-operator" created openshiftwebconsoleconfig.webconsole.operator.openshift.io "instance" created I0602 09:36:15.814732 3285 run.go:338] Stderr: + ls -alh / + read p + ns= + '[' -s /namespace-file ']' ++ cat /namespace-file + ns='--namespace=openshift-core-operators ' + '[' -s /namespace.yaml ']' + '[' -s /namespace-file ']' + oc apply --config=/kubeconfig.kubeconfig -f - ++ cat /namespace-file + oc create ns openshift-core-operators --config=/kubeconfig.kubeconfig --dry-run -o yaml + '[' -s /rbac.yaml ']' + oc process --local -o yaml --ignore-unknown-parameters --param-file=/param-file.txt -f /rbac.yaml + oc auth reconcile --config=/kubeconfig.kubeconfig -f - + oc apply --namespace=openshift-core-operators --config=/kubeconfig.kubeconfig -f - + oc process --local -o yaml --ignore-unknown-parameters --param-file=/param-file.txt -f /install.yaml I0602 09:36:15.814743 3285 run.go:342] Container run successful I0602 09:36:15.814753 3285 run.go:293] Deleting container "36769d03d8b546254d262deed279330664046743a810177a6b5f74143409af53" I0602 09:36:15.881381 3285 interface.go:41] Finished installing "sample-templates/mariadb" "sample-templates/cakephp quickstart" "sample-templates/jenkins pipeline ephemeral" "sample-templates/sample pipeline" "sample-templates/mongodb" "sample-templates/mysql" "sample-templates/postgresql" "sample-templates/dancer quickstart" "sample-templates/django quickstart" "sample-templates/nodejs quickstart" "sample-templates/rails quickstart" I0602 09:36:15.931409 3285 web_console_operator.go:68] polling for web-console availability ... I0602 09:36:16.948355 3285 web_console_operator.go:68] polling for web-console availability ... I0602 09:36:17.955722 3285 web_console_operator.go:68] polling for web-console availability ... I0602 09:36:18.951340 3285 web_console_operator.go:68] polling for web-console availability ... I0602 09:36:19.948054 3285 web_console_operator.go:68] polling for web-console availability ... I0602 09:36:20.950035 3285 web_console_operator.go:68] polling for web-console availability ... I0602 09:36:21.948411 3285 web_console_operator.go:68] polling for web-console availability ... I0602 09:36:22.947689 3285 web_console_operator.go:68] polling for web-console availability ... I0602 09:36:23.947715 3285 web_console_operator.go:68] polling for web-console availability ... I0602 09:36:24.950365 3285 web_console_operator.go:68] polling for web-console availability ... I0602 09:36:25.947729 3285 web_console_operator.go:68] polling for web-console availability ... I0602 09:36:26.948406 3285 web_console_operator.go:68] polling for web-console availability ... I0602 09:36:27.947718 3285 web_console_operator.go:68] polling for web-console availability ... I0602 09:36:28.948369 3285 web_console_operator.go:68] polling for web-console availability ... I0602 09:36:29.950289 3285 web_console_operator.go:68] polling for web-console availability ... I0602 09:36:30.947728 3285 web_console_operator.go:68] polling for web-console availability ... I0602 09:36:31.947721 3285 web_console_operator.go:68] polling for web-console availability ... I0602 09:36:31.960324 3285 interface.go:41] Finished installing "openshift-image-registry" "sample-templates" "persistent-volumes" "centos-imagestreams" "openshift-router" "openshift-web-console-operator" Login to server ... Login successful. You don't have any projects. You can try to create a new project, by running new-project <projectname> Welcome! See ' help' to get started. Creating initial project "myproject" ... Server Information ... OpenShift server started. The server is accessible via web console at: https://localhost:8443 You are logged in as: User: developer Password: <any value> To login as administrator: oc login -u system:admin + set +o xtrace ########## FINISHED STAGE: SUCCESS: VALIDATE CONSOLE STARTS [00h 02m 17s] ########## [PostBuildScript] - Executing post build scripts. [workspace] $ /bin/bash /tmp/jenkins6950715912539546932.sh ########## STARTING STAGE: DOWNLOAD ARTIFACTS FROM THE REMOTE HOST ########## + [[ -s /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/activate ]] + source /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/activate ++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740 ++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740 ++ export PATH=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ PATH=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ unset PYTHON_HOME ++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config ++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config + trap 'exit 0' EXIT ++ pwd + ARTIFACT_DIR=/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/artifacts/gathered + rm -rf /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/artifacts/gathered + mkdir -p /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/artifacts/gathered + ssh -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel sudo stat /data/src/github.com/openshift/origin/_output/scripts File: ‘/data/src/github.com/openshift/origin/_output/scripts’ Size: 44 Blocks: 0 IO Block: 4096 directory Device: ca02h/51714d Inode: 25602895 Links: 4 Access: (2777/drwxrwsrwx) Uid: ( 1001/ origin) Gid: ( 1003/origin-git) Context: system_u:object_r:container_file_t:s0 Access: 2018-06-02 09:31:24.480612373 +0000 Modify: 2018-05-31 04:02:28.651515752 +0000 Change: 2018-06-02 09:31:24.480612373 +0000 Birth: - + ssh -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel sudo chmod -R o+rX /data/src/github.com/openshift/origin/_output/scripts + scp -r -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel:/data/src/github.com/openshift/origin/_output/scripts /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/artifacts/gathered + tree /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/artifacts/gathered /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/artifacts/gathered └── scripts ├── build-base-images │   ├── artifacts │   ├── logs │   └── openshift.local.home └── shell ├── artifacts ├── logs │   ├── ca9276dd9cc9b03f983cc2d6b459700d3ffa52953a3b6f7cf99a4738262d8529.json │   └── scripts.log └── openshift.local.home 9 directories, 2 files + exit 0 [workspace] $ /bin/bash /tmp/jenkins5780939118356233731.sh ########## STARTING STAGE: GENERATE ARTIFACTS FROM THE REMOTE HOST ########## + [[ -s /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/activate ]] + source /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/activate ++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740 ++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740 ++ export PATH=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ PATH=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ unset PYTHON_HOME ++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config ++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config + trap 'exit 0' EXIT ++ pwd + ARTIFACT_DIR=/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/artifacts/generated + rm -rf /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/artifacts/generated + mkdir /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/artifacts/generated + ssh -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'sudo docker version && sudo docker info && sudo docker images && sudo docker ps -a 2>&1' WARNING: You're not using the default seccomp profile + ssh -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'sudo cat /etc/sysconfig/docker /etc/sysconfig/docker-network /etc/sysconfig/docker-storage /etc/sysconfig/docker-storage-setup /etc/systemd/system/docker.service 2>&1' + true + ssh -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'sudo find /var/lib/docker/containers -name *.log | sudo xargs tail -vn +1 2>&1' + ssh -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'oc get --raw /metrics --server=https://$( uname --nodename ):10250 --config=/etc/origin/master/admin.kubeconfig 2>&1' + true + ssh -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'sudo ausearch -m AVC -m SELINUX_ERR -m USER_AVC 2>&1' + true + ssh -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'oc get --raw /metrics --config=/etc/origin/master/admin.kubeconfig 2>&1' + true + ssh -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'sudo df -T -h && sudo pvs && sudo vgs && sudo lvs && sudo findmnt --all 2>&1' + ssh -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'sudo yum list installed 2>&1' + ssh -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'sudo journalctl --dmesg --no-pager --all --lines=all 2>&1' + ssh -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'sudo journalctl _PID=1 --no-pager --all --lines=all 2>&1' + tree /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/artifacts/generated /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/artifacts/generated ├── avc_denials.log ├── containers.log ├── dmesg.log ├── docker.config ├── docker.info ├── filesystem.info ├── installed_packages.log ├── master-metrics.log ├── node-metrics.log └── pid1.journal 0 directories, 10 files + exit 0 [workspace] $ /bin/bash /tmp/jenkins2081558283672783033.sh ########## STARTING STAGE: FETCH SYSTEMD JOURNALS FROM THE REMOTE HOST ########## + [[ -s /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/activate ]] + source /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/activate ++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740 ++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740 ++ export PATH=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ PATH=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ unset PYTHON_HOME ++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config ++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config + trap 'exit 0' EXIT ++ pwd + ARTIFACT_DIR=/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/artifacts/journals + rm -rf /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/artifacts/journals + mkdir /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/artifacts/journals + ssh -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel sudo journalctl --unit docker.service --no-pager --all --lines=all + ssh -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel sudo journalctl --unit dnsmasq.service --no-pager --all --lines=all + ssh -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel sudo journalctl --unit systemd-journald.service --no-pager --all --lines=all + ssh -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config openshiftdevel sudo journalctl --unit systemd-journald.service --no-pager --all --lines=all + tree /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/artifacts/journals /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/artifacts/journals ├── dnsmasq.service ├── docker.service └── systemd-journald.service 0 directories, 3 files + exit 0 [workspace] $ /bin/bash /tmp/jenkins1230618150866066261.sh ########## STARTING STAGE: ASSEMBLE GCS OUTPUT ########## + [[ -s /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/activate ]] + source /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/activate ++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740 ++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740 ++ export PATH=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ PATH=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ unset PYTHON_HOME ++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config ++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config + trap 'exit 0' EXIT + mkdir -p gcs/artifacts gcs/artifacts/generated gcs/artifacts/journals gcs/artifacts/gathered ++ python -c 'import json; import urllib; print json.load(urllib.urlopen('\''https://ci.openshift.redhat.com/jenkins/job/test_branch_origin_web_console_server_e2e/68/api/json'\''))['\''result'\'']' + result=SUCCESS + cat ++ date +%s + cat /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/builds/68/log + cp artifacts/generated/avc_denials.log artifacts/generated/containers.log artifacts/generated/dmesg.log artifacts/generated/docker.config artifacts/generated/docker.info artifacts/generated/filesystem.info artifacts/generated/installed_packages.log artifacts/generated/master-metrics.log artifacts/generated/node-metrics.log artifacts/generated/pid1.journal gcs/artifacts/generated/ + cp artifacts/journals/dnsmasq.service artifacts/journals/docker.service artifacts/journals/systemd-journald.service gcs/artifacts/journals/ + cp -r artifacts/gathered/scripts gcs/artifacts/ ++ pwd + scp -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config -r /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/gcs openshiftdevel:/data + scp -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config /var/lib/jenkins/.config/gcloud/gcs-publisher-credentials.json openshiftdevel:/data/credentials.json + exit 0 [workspace] $ /bin/bash /tmp/jenkins1185093023757309424.sh ########## STARTING STAGE: PUSH THE ARTIFACTS AND METADATA ########## + [[ -s /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/activate ]] + source /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/activate ++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740 ++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740 ++ export PATH=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ PATH=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ unset PYTHON_HOME ++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config ++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config ++ mktemp + script=/tmp/tmp.shcv2f0Rq8 + cat + chmod +x /tmp/tmp.shcv2f0Rq8 + scp -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config /tmp/tmp.shcv2f0Rq8 openshiftdevel:/tmp/tmp.shcv2f0Rq8 + ssh -F /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/.ssh_config -t openshiftdevel 'bash -l -c "timeout 300 /tmp/tmp.shcv2f0Rq8"' + cd /home/origin + trap 'exit 0' EXIT + [[ -n {"type":"postsubmit","job":"test_branch_origin_web_console_server_e2e","buildid":"e465f2b2-6646-11e8-92c9-0a58ac100eda","refs":{"org":"openshift","repo":"origin-web-console-server","base_ref":"master","base_sha":"14f3f2f0d75d20f7bbe037ee6a08ec893cb1dc67"}} ]] ++ jq --compact-output .buildid + [[ "e465f2b2-6646-11e8-92c9-0a58ac100eda" =~ ^"[0-9]+"$ ]] Using BUILD_NUMBER + echo 'Using BUILD_NUMBER' ++ jq --compact-output '.buildid |= "68"' + JOB_SPEC='{"type":"postsubmit","job":"test_branch_origin_web_console_server_e2e","buildid":"68","refs":{"org":"openshift","repo":"origin-web-console-server","base_ref":"master","base_sha":"14f3f2f0d75d20f7bbe037ee6a08ec893cb1dc67"}}' + docker run -e 'JOB_SPEC={"type":"postsubmit","job":"test_branch_origin_web_console_server_e2e","buildid":"68","refs":{"org":"openshift","repo":"origin-web-console-server","base_ref":"master","base_sha":"14f3f2f0d75d20f7bbe037ee6a08ec893cb1dc67"}}' -v /data:/data:z registry.svc.ci.openshift.org/ci/gcsupload:latest --dry-run=false --gcs-path=gs://origin-ci-test --gcs-credentials-file=/data/credentials.json --path-strategy=single --default-org=openshift --default-repo=origin /data/gcs/artifacts /data/gcs/build-log.txt /data/gcs/finished.json Unable to find image 'registry.svc.ci.openshift.org/ci/gcsupload:latest' locally Trying to pull repository registry.svc.ci.openshift.org/ci/gcsupload ... latest: Pulling from registry.svc.ci.openshift.org/ci/gcsupload 1160f4abea84: Already exists be60dbe7622d: Already exists cafcba51f636: Pulling fs layer cafcba51f636: Verifying Checksum cafcba51f636: Download complete cafcba51f636: Pull complete Digest: sha256:04c70f24b5e0e0f937f03bc8b686a8c922abb4b24de196a64f9fc16845a1e67e Status: Downloaded newer image for registry.svc.ci.openshift.org/ci/gcsupload:latest {"component":"gcsupload","level":"info","msg":"Gathering artifacts from artifact directory: /data/gcs/artifacts","time":"2018-06-02T09:37:01Z"} {"component":"gcsupload","level":"info","msg":"Found /data/gcs/artifacts/generated/avc_denials.log in artifact directory. Uploading as artifacts/generated/avc_denials.log\n","time":"2018-06-02T09:37:01Z"} {"component":"gcsupload","level":"info","msg":"Found /data/gcs/artifacts/generated/containers.log in artifact directory. Uploading as artifacts/generated/containers.log\n","time":"2018-06-02T09:37:01Z"} {"component":"gcsupload","level":"info","msg":"Found /data/gcs/artifacts/generated/dmesg.log in artifact directory. Uploading as artifacts/generated/dmesg.log\n","time":"2018-06-02T09:37:01Z"} {"component":"gcsupload","level":"info","msg":"Found /data/gcs/artifacts/generated/docker.config in artifact directory. Uploading as artifacts/generated/docker.config\n","time":"2018-06-02T09:37:01Z"} {"component":"gcsupload","level":"info","msg":"Found /data/gcs/artifacts/generated/docker.info in artifact directory. Uploading as artifacts/generated/docker.info\n","time":"2018-06-02T09:37:01Z"} {"component":"gcsupload","level":"info","msg":"Found /data/gcs/artifacts/generated/filesystem.info in artifact directory. Uploading as artifacts/generated/filesystem.info\n","time":"2018-06-02T09:37:01Z"} {"component":"gcsupload","level":"info","msg":"Found /data/gcs/artifacts/generated/installed_packages.log in artifact directory. Uploading as artifacts/generated/installed_packages.log\n","time":"2018-06-02T09:37:01Z"} {"component":"gcsupload","level":"info","msg":"Found /data/gcs/artifacts/generated/master-metrics.log in artifact directory. Uploading as artifacts/generated/master-metrics.log\n","time":"2018-06-02T09:37:01Z"} {"component":"gcsupload","level":"info","msg":"Found /data/gcs/artifacts/generated/node-metrics.log in artifact directory. Uploading as artifacts/generated/node-metrics.log\n","time":"2018-06-02T09:37:01Z"} {"component":"gcsupload","level":"info","msg":"Found /data/gcs/artifacts/generated/pid1.journal in artifact directory. Uploading as artifacts/generated/pid1.journal\n","time":"2018-06-02T09:37:01Z"} {"component":"gcsupload","level":"info","msg":"Found /data/gcs/artifacts/journals/dnsmasq.service in artifact directory. Uploading as artifacts/journals/dnsmasq.service\n","time":"2018-06-02T09:37:01Z"} {"component":"gcsupload","level":"info","msg":"Found /data/gcs/artifacts/journals/docker.service in artifact directory. Uploading as artifacts/journals/docker.service\n","time":"2018-06-02T09:37:01Z"} {"component":"gcsupload","level":"info","msg":"Found /data/gcs/artifacts/journals/systemd-journald.service in artifact directory. Uploading as artifacts/journals/systemd-journald.service\n","time":"2018-06-02T09:37:01Z"} {"component":"gcsupload","level":"info","msg":"Found /data/gcs/artifacts/scripts/shell/logs/ca9276dd9cc9b03f983cc2d6b459700d3ffa52953a3b6f7cf99a4738262d8529.json in artifact directory. Uploading as artifacts/scripts/shell/logs/ca9276dd9cc9b03f983cc2d6b459700d3ffa52953a3b6f7cf99a4738262d8529.json\n","time":"2018-06-02T09:37:01Z"} {"component":"gcsupload","level":"info","msg":"Found /data/gcs/artifacts/scripts/shell/logs/scripts.log in artifact directory. Uploading as artifacts/scripts/shell/logs/scripts.log\n","time":"2018-06-02T09:37:01Z"} {"component":"gcsupload","dest":"logs/test_branch_origin_web_console_server_e2e/68/artifacts/journals/docker.service","level":"info","msg":"Queued for upload","time":"2018-06-02T09:37:01Z"} {"component":"gcsupload","dest":"logs/test_branch_origin_web_console_server_e2e/68/artifacts/generated/containers.log","level":"info","msg":"Queued for upload","time":"2018-06-02T09:37:01Z"} {"component":"gcsupload","dest":"logs/test_branch_origin_web_console_server_e2e/68/artifacts/generated/filesystem.info","level":"info","msg":"Queued for upload","time":"2018-06-02T09:37:01Z"} {"component":"gcsupload","dest":"logs/test_branch_origin_web_console_server_e2e/68/artifacts/generated/installed_packages.log","level":"info","msg":"Queued for upload","time":"2018-06-02T09:37:01Z"} {"component":"gcsupload","dest":"logs/test_branch_origin_web_console_server_e2e/68/artifacts/generated/master-metrics.log","level":"info","msg":"Queued for upload","time":"2018-06-02T09:37:01Z"} {"component":"gcsupload","dest":"logs/test_branch_origin_web_console_server_e2e/68/artifacts/generated/node-metrics.log","level":"info","msg":"Queued for upload","time":"2018-06-02T09:37:01Z"} {"component":"gcsupload","dest":"logs/test_branch_origin_web_console_server_e2e/68/artifacts/journals/dnsmasq.service","level":"info","msg":"Queued for upload","time":"2018-06-02T09:37:01Z"} {"component":"gcsupload","dest":"logs/test_branch_origin_web_console_server_e2e/68/finished.json","level":"info","msg":"Queued for upload","time":"2018-06-02T09:37:01Z"} {"component":"gcsupload","dest":"logs/test_branch_origin_web_console_server_e2e/68/artifacts/generated/pid1.journal","level":"info","msg":"Queued for upload","time":"2018-06-02T09:37:01Z"} {"component":"gcsupload","dest":"logs/test_branch_origin_web_console_server_e2e/68/artifacts/scripts/shell/logs/ca9276dd9cc9b03f983cc2d6b459700d3ffa52953a3b6f7cf99a4738262d8529.json","level":"info","msg":"Queued for upload","time":"2018-06-02T09:37:01Z"} {"component":"gcsupload","dest":"logs/test_branch_origin_web_console_server_e2e/68/artifacts/scripts/shell/logs/scripts.log","level":"info","msg":"Queued for upload","time":"2018-06-02T09:37:01Z"} {"component":"gcsupload","dest":"logs/test_branch_origin_web_console_server_e2e/latest-build.txt","level":"info","msg":"Queued for upload","time":"2018-06-02T09:37:01Z"} {"component":"gcsupload","dest":"logs/test_branch_origin_web_console_server_e2e/68/artifacts/generated/avc_denials.log","level":"info","msg":"Queued for upload","time":"2018-06-02T09:37:01Z"} {"component":"gcsupload","dest":"logs/test_branch_origin_web_console_server_e2e/68/artifacts/generated/dmesg.log","level":"info","msg":"Queued for upload","time":"2018-06-02T09:37:01Z"} {"component":"gcsupload","dest":"logs/test_branch_origin_web_console_server_e2e/68/artifacts/generated/docker.config","level":"info","msg":"Queued for upload","time":"2018-06-02T09:37:01Z"} {"component":"gcsupload","dest":"logs/test_branch_origin_web_console_server_e2e/68/artifacts/generated/docker.info","level":"info","msg":"Queued for upload","time":"2018-06-02T09:37:01Z"} {"component":"gcsupload","dest":"logs/test_branch_origin_web_console_server_e2e/68/artifacts/journals/systemd-journald.service","level":"info","msg":"Queued for upload","time":"2018-06-02T09:37:01Z"} {"component":"gcsupload","dest":"logs/test_branch_origin_web_console_server_e2e/68/build-log.txt","level":"info","msg":"Queued for upload","time":"2018-06-02T09:37:01Z"} {"component":"gcsupload","dest":"logs/test_branch_origin_web_console_server_e2e/latest-build.txt","level":"info","msg":"Finished upload","time":"2018-06-02T09:37:01Z"} {"component":"gcsupload","dest":"logs/test_branch_origin_web_console_server_e2e/68/artifacts/generated/dmesg.log","level":"info","msg":"Finished upload","time":"2018-06-02T09:37:02Z"} {"component":"gcsupload","dest":"logs/test_branch_origin_web_console_server_e2e/68/artifacts/generated/pid1.journal","level":"info","msg":"Finished upload","time":"2018-06-02T09:37:02Z"} {"component":"gcsupload","dest":"logs/test_branch_origin_web_console_server_e2e/68/artifacts/journals/dnsmasq.service","level":"info","msg":"Finished upload","time":"2018-06-02T09:37:02Z"} {"component":"gcsupload","dest":"logs/test_branch_origin_web_console_server_e2e/68/artifacts/scripts/shell/logs/scripts.log","level":"info","msg":"Finished upload","time":"2018-06-02T09:37:02Z"} {"component":"gcsupload","dest":"logs/test_branch_origin_web_console_server_e2e/68/artifacts/generated/docker.config","level":"info","msg":"Finished upload","time":"2018-06-02T09:37:02Z"} {"component":"gcsupload","dest":"logs/test_branch_origin_web_console_server_e2e/68/build-log.txt","level":"info","msg":"Finished upload","time":"2018-06-02T09:37:02Z"} {"component":"gcsupload","dest":"logs/test_branch_origin_web_console_server_e2e/68/artifacts/generated/containers.log","level":"info","msg":"Finished upload","time":"2018-06-02T09:37:02Z"} {"component":"gcsupload","dest":"logs/test_branch_origin_web_console_server_e2e/68/artifacts/generated/filesystem.info","level":"info","msg":"Finished upload","time":"2018-06-02T09:37:02Z"} {"component":"gcsupload","dest":"logs/test_branch_origin_web_console_server_e2e/68/artifacts/generated/avc_denials.log","level":"info","msg":"Finished upload","time":"2018-06-02T09:37:02Z"} {"component":"gcsupload","dest":"logs/test_branch_origin_web_console_server_e2e/68/artifacts/generated/docker.info","level":"info","msg":"Finished upload","time":"2018-06-02T09:37:02Z"} {"component":"gcsupload","dest":"logs/test_branch_origin_web_console_server_e2e/68/artifacts/scripts/shell/logs/ca9276dd9cc9b03f983cc2d6b459700d3ffa52953a3b6f7cf99a4738262d8529.json","level":"info","msg":"Finished upload","time":"2018-06-02T09:37:02Z"} {"component":"gcsupload","dest":"logs/test_branch_origin_web_console_server_e2e/68/artifacts/generated/master-metrics.log","level":"info","msg":"Finished upload","time":"2018-06-02T09:37:02Z"} {"component":"gcsupload","dest":"logs/test_branch_origin_web_console_server_e2e/68/artifacts/journals/docker.service","level":"info","msg":"Finished upload","time":"2018-06-02T09:37:02Z"} {"component":"gcsupload","dest":"logs/test_branch_origin_web_console_server_e2e/68/finished.json","level":"info","msg":"Finished upload","time":"2018-06-02T09:37:02Z"} {"component":"gcsupload","dest":"logs/test_branch_origin_web_console_server_e2e/68/artifacts/generated/node-metrics.log","level":"info","msg":"Finished upload","time":"2018-06-02T09:37:02Z"} {"component":"gcsupload","dest":"logs/test_branch_origin_web_console_server_e2e/68/artifacts/generated/installed_packages.log","level":"info","msg":"Finished upload","time":"2018-06-02T09:37:02Z"} {"component":"gcsupload","dest":"logs/test_branch_origin_web_console_server_e2e/68/artifacts/journals/systemd-journald.service","level":"info","msg":"Finished upload","time":"2018-06-02T09:37:02Z"} {"component":"gcsupload","level":"info","msg":"Finished upload to GCS","time":"2018-06-02T09:37:02Z"} + exit 0 + set +o xtrace ########## FINISHED STAGE: SUCCESS: PUSH THE ARTIFACTS AND METADATA [00h 00m 07s] ########## [workspace] $ /bin/bash /tmp/jenkins4644727543977924387.sh ########## STARTING STAGE: DEPROVISION CLOUD RESOURCES ########## + [[ -s /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/activate ]] + source /var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/activate ++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740 ++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740 ++ export PATH=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ PATH=/var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/bin:/sbin:/usr/sbin:/bin:/usr/bin ++ unset PYTHON_HOME ++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config ++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config + oct deprovision PLAYBOOK: main.yml ************************************************************* 4 plays in /var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/lib/python2.7/site-packages/oct/ansible/oct/playbooks/deprovision/main.yml PLAY [ensure we have the parameters necessary to deprovision virtual hosts] **** TASK [ensure all required variables are set] *********************************** task path: /var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/lib/python2.7/site-packages/oct/ansible/oct/playbooks/deprovision/main.yml:9 skipping: [localhost] => (item=origin_ci_inventory_dir) => { "changed": false, "generated_timestamp": "2018-06-02 05:37:03.867314", "item": "origin_ci_inventory_dir", "skip_reason": "Conditional check failed", "skipped": true } skipping: [localhost] => (item=origin_ci_aws_region) => { "changed": false, "generated_timestamp": "2018-06-02 05:37:03.870019", "item": "origin_ci_aws_region", "skip_reason": "Conditional check failed", "skipped": true } PLAY [deprovision virtual hosts in EC2] **************************************** TASK [Gathering Facts] ********************************************************* ok: [localhost] TASK [deprovision a virtual EC2 host] ****************************************** task path: /var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/lib/python2.7/site-packages/oct/ansible/oct/playbooks/deprovision/main.yml:28 included: /var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/lib/python2.7/site-packages/oct/ansible/oct/playbooks/deprovision/roles/aws-down/tasks/main.yml for localhost TASK [update the SSH configuration to remove AWS EC2 specifics] **************** task path: /var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/lib/python2.7/site-packages/oct/ansible/oct/playbooks/deprovision/roles/aws-down/tasks/main.yml:2 ok: [localhost] => { "changed": false, "generated_timestamp": "2018-06-02 05:37:04.641601", "msg": "" } TASK [rename EC2 instance for termination reaper] ****************************** task path: /var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/lib/python2.7/site-packages/oct/ansible/oct/playbooks/deprovision/roles/aws-down/tasks/main.yml:8 changed: [localhost] => { "changed": true, "generated_timestamp": "2018-06-02 05:37:05.200730", "msg": "Tags {'Name': 'oct-terminate'} created for resource i-058cff891deed5ea1." } TASK [tear down the EC2 instance] ********************************************** task path: /var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/lib/python2.7/site-packages/oct/ansible/oct/playbooks/deprovision/roles/aws-down/tasks/main.yml:15 changed: [localhost] => { "changed": true, "generated_timestamp": "2018-06-02 05:37:06.051528", "instance_ids": [ "i-058cff891deed5ea1" ], "instances": [ { "ami_launch_index": "0", "architecture": "x86_64", "block_device_mapping": { "/dev/sda1": { "delete_on_termination": true, "status": "attached", "volume_id": "vol-00fc266584b1b74b5" }, "/dev/sdb": { "delete_on_termination": true, "status": "attached", "volume_id": "vol-0b7b843726a425a20" } }, "dns_name": "ec2-35-173-203-6.compute-1.amazonaws.com", "ebs_optimized": false, "groups": { "sg-7e73221a": "default" }, "hypervisor": "xen", "id": "i-058cff891deed5ea1", "image_id": "ami-02f7bf6134e0f2ab7", "instance_type": "m4.xlarge", "kernel": null, "key_name": "libra", "launch_time": "2018-06-02T09:26:05.000Z", "placement": "us-east-1d", "private_dns_name": "ip-172-18-2-81.ec2.internal", "private_ip": "172.18.2.81", "public_dns_name": "ec2-35-173-203-6.compute-1.amazonaws.com", "public_ip": "35.173.203.6", "ramdisk": null, "region": "us-east-1", "root_device_name": "/dev/sda1", "root_device_type": "ebs", "state": "running", "state_code": 16, "tags": { "Name": "oct-terminate", "openshift_etcd": "", "openshift_master": "", "openshift_node": "" }, "tenancy": "default", "virtualization_type": "hvm" } ], "tagged_instances": [] } TASK [remove the serialized host variables] ************************************ task path: /var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/lib/python2.7/site-packages/oct/ansible/oct/playbooks/deprovision/roles/aws-down/tasks/main.yml:22 changed: [localhost] => { "changed": true, "generated_timestamp": "2018-06-02 05:37:06.296699", "path": "/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory/host_vars/172.18.2.81.yml", "state": "absent" } PLAY [deprovision virtual hosts locally manged by Vagrant] ********************* TASK [Gathering Facts] ********************************************************* ok: [localhost] PLAY [clean up local configuration for deprovisioned instances] **************** TASK [remove inventory configuration directory] ******************************** task path: /var/lib/jenkins/origin-ci-tool/2a23bc6c6919b630ea10b71c36a29547b29a6740/lib/python2.7/site-packages/oct/ansible/oct/playbooks/deprovision/main.yml:61 changed: [localhost] => { "changed": true, "generated_timestamp": "2018-06-02 05:37:06.749076", "path": "/var/lib/jenkins/jobs/test_branch_origin_web_console_server_e2e/workspace/.config/origin-ci-tool/inventory", "state": "absent" } PLAY RECAP ********************************************************************* localhost : ok=8 changed=4 unreachable=0 failed=0 + set +o xtrace ########## FINISHED STAGE: SUCCESS: DEPROVISION CLOUD RESOURCES [00h 00m 04s] ########## Archiving artifacts Recording test results [WS-CLEANUP] Deleting project workspace...[WS-CLEANUP] done Finished: SUCCESS