SuccessConsole Output

Skipping 18,191 KB.. Full Log
I0816 13:34:04.044469    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/template.openshift.io/v1/serverresources.json
I0816 13:34:04.044519    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/user.openshift.io/v1/serverresources.json
I0816 13:34:04.045017    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/v1/serverresources.json
I0816 13:34:04.058128    2232 round_trippers.go:405] GET https://127.0.0.1:8443/oapi/v1/namespaces/test/buildconfigs/ruby-sample-build 200 OK in 9 milliseconds
I0816 13:34:04.059777    2232 cached_discovery.go:119] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/servergroups.json
I0816 13:34:04.059840    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/apiregistration.k8s.io/v1beta1/serverresources.json
I0816 13:34:04.059968    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/extensions/v1beta1/serverresources.json
I0816 13:34:04.060037    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/apps/v1beta1/serverresources.json
I0816 13:34:04.060076    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/authentication.k8s.io/v1/serverresources.json
I0816 13:34:04.060129    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/authentication.k8s.io/v1beta1/serverresources.json
I0816 13:34:04.060192    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/authorization.k8s.io/v1/serverresources.json
I0816 13:34:04.060236    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/authorization.k8s.io/v1beta1/serverresources.json
I0816 13:34:04.060302    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/autoscaling/v1/serverresources.json
I0816 13:34:04.060360    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/batch/v1/serverresources.json
I0816 13:34:04.060425    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/batch/v2alpha1/serverresources.json
I0816 13:34:04.060472    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/certificates.k8s.io/v1beta1/serverresources.json
I0816 13:34:04.060509    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/networking.k8s.io/v1/serverresources.json
I0816 13:34:04.060550    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/policy/v1beta1/serverresources.json
I0816 13:34:04.060660    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/authorization.openshift.io/v1/serverresources.json
I0816 13:34:04.060732    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/rbac.authorization.k8s.io/v1beta1/serverresources.json
I0816 13:34:04.060778    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/storage.k8s.io/v1/serverresources.json
I0816 13:34:04.060816    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/storage.k8s.io/v1beta1/serverresources.json
I0816 13:34:04.060863    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/apiextensions.k8s.io/v1beta1/serverresources.json
I0816 13:34:04.060915    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/apps.openshift.io/v1/serverresources.json
I0816 13:34:04.060995    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/build.openshift.io/v1/serverresources.json
I0816 13:34:04.061066    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/image.openshift.io/v1/serverresources.json
I0816 13:34:04.061123    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/network.openshift.io/v1/serverresources.json
I0816 13:34:04.061175    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/oauth.openshift.io/v1/serverresources.json
I0816 13:34:04.061218    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/project.openshift.io/v1/serverresources.json
I0816 13:34:04.061309    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/quota.openshift.io/v1/serverresources.json
I0816 13:34:04.061361    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/route.openshift.io/v1/serverresources.json
I0816 13:34:04.061409    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/security.openshift.io/v1/serverresources.json
I0816 13:34:04.061469    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/template.openshift.io/v1/serverresources.json
I0816 13:34:04.061521    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/user.openshift.io/v1/serverresources.json
I0816 13:34:04.062128    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/v1/serverresources.json
I0816 13:34:04.062398    2232 cached_discovery.go:119] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/servergroups.json
I0816 13:34:04.062474    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/apiregistration.k8s.io/v1beta1/serverresources.json
I0816 13:34:04.062600    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/extensions/v1beta1/serverresources.json
I0816 13:34:04.062689    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/apps/v1beta1/serverresources.json
I0816 13:34:04.062747    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/authentication.k8s.io/v1/serverresources.json
I0816 13:34:04.062784    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/authentication.k8s.io/v1beta1/serverresources.json
I0816 13:34:04.062831    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/authorization.k8s.io/v1/serverresources.json
I0816 13:34:04.062878    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/authorization.k8s.io/v1beta1/serverresources.json
I0816 13:34:04.062927    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/autoscaling/v1/serverresources.json
I0816 13:34:04.062974    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/batch/v1/serverresources.json
I0816 13:34:04.063044    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/batch/v2alpha1/serverresources.json
I0816 13:34:04.063097    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/certificates.k8s.io/v1beta1/serverresources.json
I0816 13:34:04.063142    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/networking.k8s.io/v1/serverresources.json
I0816 13:34:04.063182    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/policy/v1beta1/serverresources.json
I0816 13:34:04.063333    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/authorization.openshift.io/v1/serverresources.json
I0816 13:34:04.063397    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/rbac.authorization.k8s.io/v1beta1/serverresources.json
I0816 13:34:04.063441    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/storage.k8s.io/v1/serverresources.json
I0816 13:34:04.063480    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/storage.k8s.io/v1beta1/serverresources.json
I0816 13:34:04.063523    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/apiextensions.k8s.io/v1beta1/serverresources.json
I0816 13:34:04.063589    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/apps.openshift.io/v1/serverresources.json
I0816 13:34:04.063656    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/build.openshift.io/v1/serverresources.json
I0816 13:34:04.063752    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/image.openshift.io/v1/serverresources.json
I0816 13:34:04.063808    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/network.openshift.io/v1/serverresources.json
I0816 13:34:04.063867    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/oauth.openshift.io/v1/serverresources.json
I0816 13:34:04.063929    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/project.openshift.io/v1/serverresources.json
I0816 13:34:04.063978    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/quota.openshift.io/v1/serverresources.json
I0816 13:34:04.064022    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/route.openshift.io/v1/serverresources.json
I0816 13:34:04.064089    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/security.openshift.io/v1/serverresources.json
I0816 13:34:04.064159    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/template.openshift.io/v1/serverresources.json
I0816 13:34:04.064214    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/user.openshift.io/v1/serverresources.json
I0816 13:34:04.064761    2232 cached_discovery.go:72] returning cached discovery info from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/openshift.local.home/.kube/127.0.0.1_8443/v1/serverresources.json
I0816 13:34:04.070106    2232 round_trippers.go:405] GET https://127.0.0.1:8443/oapi/v1/namespaces/test/builds 200 OK in 2 milliseconds
I0816 13:34:04.084839    2232 round_trippers.go:405] GET https://127.0.0.1:8443/oapi/v1/namespaces/test/builds/ruby-sample-build-1/log 200 OK in 14 milliseconds

logs: ok
[INFO] Starting build from /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/artifacts/stiAppConfig.json with non-existing commit...
Running test/end-to-end/core.sh:462: executing 'oc start-build test --commit=fffffff --wait' expecting failure...
SUCCESS after 0.176s: test/end-to-end/core.sh:462: executing 'oc start-build test --commit=fffffff --wait' expecting failure
There was no output from the command.
Standard error from the command:
Error from server (Forbidden): buildconfigs "test" is forbidden: buildconfigs.build.openshift.io "test" not found

[INFO] Validating exec
Running test/end-to-end/core.sh:469: executing 'oc exec frontend-1-mh575 id' expecting success and text '1000'...
SUCCESS after 0.267s: test/end-to-end/core.sh:469: executing 'oc exec frontend-1-mh575 id' expecting success and text '1000'
Standard output from the command:
uid=1000060000 gid=0(root) groups=0(root),1000060000

There was no error output from the command.
Running test/end-to-end/core.sh:470: executing 'oc rsh pod/frontend-1-mh575 id -u' expecting success and text '1000'...
SUCCESS after 0.294s: test/end-to-end/core.sh:470: executing 'oc rsh pod/frontend-1-mh575 id -u' expecting success and text '1000'
Standard output from the command:
1000060000

There was no error output from the command.
Running test/end-to-end/core.sh:471: executing 'oc rsh -T frontend-1-mh575 id -u' expecting success and text '1000'...
SUCCESS after 0.341s: test/end-to-end/core.sh:471: executing 'oc rsh -T frontend-1-mh575 id -u' expecting success and text '1000'
Standard output from the command:
1000060000

There was no error output from the command.
Running test/end-to-end/core.sh:476: executing 'echo 'echo $TERM' | TERM=test_terminal oc rsh frontend-1-mh575' expecting success and text 'test_terminal'...
SUCCESS after 0.347s: test/end-to-end/core.sh:476: executing 'echo 'echo $TERM' | TERM=test_terminal oc rsh frontend-1-mh575' expecting success and text 'test_terminal'
Standard output from the command:
test_terminal

There was no error output from the command.
Running test/end-to-end/core.sh:478: executing 'TERM=test_terminal oc rsh frontend-1-mh575 echo '$TERM'' expecting success and not text 'test_terminal'...
SUCCESS after 0.257s: test/end-to-end/core.sh:478: executing 'TERM=test_terminal oc rsh frontend-1-mh575 echo '$TERM'' expecting success and not text 'test_terminal'
Standard output from the command:
$TERM

There was no error output from the command.
Running test/end-to-end/core.sh:481: executing 'oc rollout status dc/frontend --revision=1' expecting success...
SUCCESS after 0.184s: test/end-to-end/core.sh:481: executing 'oc rollout status dc/frontend --revision=1' expecting success
Standard output from the command:
replication controller "frontend-1" successfully rolled out

There was no error output from the command.
Running test/end-to-end/core.sh:483: executing 'oc logs dc/frontend' expecting success and text 'Connecting to production database'...
SUCCESS after 0.232s: test/end-to-end/core.sh:483: executing 'oc logs dc/frontend' expecting success and text 'Connecting to production database'
Standard output from the command:
You might consider adding 'puma' into your Gemfile.
Run app...
Connecting to production database (user6LX@172.30.117.68:5434)...
Connected to database
Create database...
root already exists
Run migrations...
rake aborted!
StandardError: An error has occurred, all later migrations canceled:
Mysql2::Error: Table 'key_pairs' already exists: CREATE TABLE `key_pairs` (`key` int(11) auto_increment PRIMARY KEY, `value` varchar(255)) ENGINE=InnoDB/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/connection_adapters/abstract_mysql_adapter.rb:303:in `query'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/connection_adapters/abstract_mysql_adapter.rb:303:in `block in execute'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/connection_adapters/abstract_adapter.rb:373:in `block in log'
/opt/app-root/src/bundle/ruby/gems/activesupport-4.1.7/lib/active_support/notifications/instrumenter.rb:20:in `instrument'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/connection_adapters/abstract_adapter.rb:367:in `log'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/connection_adapters/abstract_mysql_adapter.rb:303:in `execute'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/connection_adapters/mysql2_adapter.rb:228:in `execute'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/connection_adapters/abstract/schema_statements.rb:205:in `create_table'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/connection_adapters/abstract_mysql_adapter.rb:460:in `create_table'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:649:in `block in method_missing'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:621:in `block in say_with_time'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:621:in `say_with_time'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:641:in `method_missing'
/opt/app-root/src/db/migrate/20141102191902_create_key_pair.rb:3:in `up'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:598:in `exec_migration'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:579:in `block (2 levels) in migrate'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:578:in `block in migrate'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/connection_adapters/abstract/connection_pool.rb:294:in `with_connection'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:577:in `migrate'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:752:in `migrate'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:991:in `block in execute_migration_in_transaction'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:1039:in `ddl_transaction'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:990:in `execute_migration_in_transaction'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:952:in `block in migrate'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:948:in `each'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:948:in `migrate'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:807:in `up'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:785:in `migrate'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/railties/databases.rake:34:in `block (2 levels) in <top (required)>'
ActiveRecord::StatementInvalid: Mysql2::Error: Table 'key_pairs' already exists: CREATE TABLE `key_pairs` (`key` int(11) auto_increment PRIMARY KEY, `value` varchar(255)) ENGINE=InnoDB
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/connection_adapters/abstract_mysql_adapter.rb:303:in `query'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/connection_adapters/abstract_mysql_adapter.rb:303:in `block in execute'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/connection_adapters/abstract_adapter.rb:373:in `block in log'
/opt/app-root/src/bundle/ruby/gems/activesupport-4.1.7/lib/active_support/notifications/instrumenter.rb:20:in `instrument'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/connection_adapters/abstract_adapter.rb:367:in `log'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/connection_adapters/abstract_mysql_adapter.rb:303:in `execute'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/connection_adapters/mysql2_adapter.rb:228:in `execute'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/connection_adapters/abstract/schema_statements.rb:205:in `create_table'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/connection_adapters/abstract_mysql_adapter.rb:460:in `create_table'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:649:in `block in method_missing'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:621:in `block in say_with_time'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:621:in `say_with_time'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:641:in `method_missing'
/opt/app-root/src/db/migrate/20141102191902_create_key_pair.rb:3:in `up'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:598:in `exec_migration'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:579:in `block (2 levels) in migrate'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:578:in `block in migrate'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/connection_adapters/abstract/connection_pool.rb:294:in `with_connection'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:577:in `migrate'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:752:in `migrate'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:991:in `block in execute_migration_in_transaction'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:1039:in `ddl_transaction'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:990:in `execute_migration_in_transaction'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:952:in `block in migrate'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:948:in `each'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:948:in `migrate'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:807:in `up'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:785:in `migrate'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/railties/databases.rake:34:in `block (2 levels) in <top (required)>'
Mysql2::Error: Table 'key_pairs' already exists
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/connection_adapters/abstract_mysql_adapter.rb:303:in `query'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/connection_adapters/abstract_mysql_adapter.rb:303:in `block in execute'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/connection_adapters/abstract_adapter.rb:373:in `block in log'
/opt/app-root/src/bundle/ruby/gems/activesupport-4.1.7/lib/active_support/notifications/instrumenter.rb:20:in `instrument'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/connection_adapters/abstract_adapter.rb:367:in `log'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/connection_adapters/abstract_mysql_adapter.rb:303:in `execute'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/connection_adapters/mysql2_adapter.rb:228:in `execute'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/connection_adapters/abstract/schema_statements.rb:205:in `create_table'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/connection_adapters/abstract_mysql_adapter.rb:460:in `create_table'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:649:in `block in method_missing'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:621:in `block in say_with_time'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:621:in `say_with_time'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:641:in `method_missing'
/opt/app-root/src/db/migrate/20141102191902_create_key_pair.rb:3:in `up'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:598:in `exec_migration'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:579:in `block (2 levels) in migrate'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:578:in `block in migrate'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/connection_adapters/abstract/connection_pool.rb:294:in `with_connection'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:577:in `migrate'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:752:in `migrate'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:991:in `block in execute_migration_in_transaction'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:1039:in `ddl_transaction'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:990:in `execute_migration_in_transaction'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:952:in `block in migrate'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:948:in `each'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:948:in `migrate'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:807:in `up'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/migration.rb:785:in `migrate'
/opt/app-root/src/bundle/ruby/gems/activerecord-4.1.7/lib/active_record/railties/databases.rake:34:in `block (2 levels) in <top (required)>'
Tasks: TOP => db:migrate
(See full trace by running task with --trace)
[2017-08-16 17:34:01] INFO  WEBrick 1.3.1
[2017-08-16 17:34:01] INFO  ruby 2.2.2 (2015-04-13) [x86_64-linux]
[2017-08-16 17:34:01] INFO  WEBrick::HTTPServer#start: pid=1 port=8080
Updating key foo to 1337
D, [2017-08-16T17:34:01.672434 #1] DEBUG -- :   KeyPair Exists (0.3ms)  SELECT  1 AS one FROM `key_pairs`  WHERE `key_pairs`.`key` = 'foo' LIMIT 1
D, [2017-08-16T17:34:01.674702 #1] DEBUG -- :    (0.1ms)  BEGIN
D, [2017-08-16T17:34:01.677250 #1] DEBUG -- :   SQL (0.2ms)  INSERT INTO `key_pairs` (`key`, `value`) VALUES ('foo', '1337')
D, [2017-08-16T17:34:01.677615 #1] DEBUG -- :    (0.2ms)  COMMIT
D, [2017-08-16T17:34:01.677861 #1] DEBUG -- :    (0.1ms)  BEGIN
D, [2017-08-16T17:34:01.678313 #1] DEBUG -- :    (0.2ms)  COMMIT
172.18.1.94 - - [16/Aug/2017 17:34:01] "POST /keys/foo HTTP/1.1" 200 11 0.0233
Retrieving key foo
D, [2017-08-16T17:34:01.742882 #1] DEBUG -- :   KeyPair Exists (0.4ms)  SELECT  1 AS one FROM `key_pairs`  WHERE `key_pairs`.`key` = 'foo' LIMIT 1
D, [2017-08-16T17:34:01.743499 #1] DEBUG -- :   KeyPair Load (0.3ms)  SELECT  `key_pairs`.* FROM `key_pairs`  WHERE `key_pairs`.`key` = 'foo' LIMIT 1
172.18.1.94 - - [16/Aug/2017 17:34:01] "GET /keys/foo HTTP/1.1" 200 4 0.0036

There was no error output from the command.
Running test/end-to-end/core.sh:484: executing 'oc deploy frontend' expecting success and text 'deployed'...
SUCCESS after 0.194s: test/end-to-end/core.sh:484: executing 'oc deploy frontend' expecting success and text 'deployed'
Standard output from the command:
frontend deployment #1 deployed 18 seconds ago - 2 pods

There was no error output from the command.
[INFO] Validating port-forward
Running test/end-to-end/core.sh:488: executing 'oc port-forward -p frontend-1-mh575 10080:8080  &> '/data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/logs/port-forward.log' &' expecting success...
SUCCESS after 0.013s: test/end-to-end/core.sh:488: executing 'oc port-forward -p frontend-1-mh575 10080:8080  &> '/data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/logs/port-forward.log' &' expecting success
There was no output from the command.
There was no error output from the command.
Running test/end-to-end/core.sh:489: executing 'curl --max-time 2 --fail --silent 'http://localhost:10080'' expecting success; re-trying every 0.2s until completion or 10.000s...
SUCCESS after 0.304s: test/end-to-end/core.sh:489: executing 'curl --max-time 2 --fail --silent 'http://localhost:10080'' expecting success; re-trying every 0.2s until completion or 10.000s
Standard output from the command:
<!DOCTYPE html>
<html>
<head>
  <!-- Latest compiled and minified CSS -->
  <link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.0/css/bootstrap.min.css">

  <!-- Optional theme -->
  <link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.0/css/bootstrap-theme.min.css">

  <!-- Latest compiled and minified JavaScript -->
  <script src="//code.jquery.com/jquery-1.11.0.min.js"></script>
  <script src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.0/js/bootstrap.min.js"></script>

  <title>Hello from OpenShift v3!</title>
</head>
<body>
  <div class="page-header" align=center>
    <h1> Welcome to an OpenShift v3 Demo App! </h1>
  </div>

  
  <div class="container">
    <h3> Get, edit or delete key-value pairs, for fun. </h3>
    </br>
    <form role="form" name="myForm" onSubmit="return handleSubmit()">
      <div type="text" class="alert alert-warning" id="response" role="alert" style="display:none;"> </div>
      <div class="form-group">
        <label for="key"> Key </label>
        <input type="text" class="form-control" id="key" name="key" value="" placeholder="Example: FirstName">
      </div>

      <div class="form-group">
        <label for="value">Value</label>
        <input type="text" id="value" class="form-control" name="value" value="" placeholder="Example: Dan"/>
      </div>

      <div class="input-group-btn">
        <button type="button" class="btn btn-default dropdown-toggle" data-toggle="dropdown">Action type <span class="caret"></span></button>
        <ul class="dropdown-menu" role="menu">
          <li><a href="#" id="get">Get</a></li>
          <li><a href="#" id="put">Put</a></li>
          <li><a href="#" id="delete">Delete</a></li>
        </ul>
      </div>
    </form>
  </div>
  
</body>

<script type="text/javascript">
function handleSubmit()
      {
        return false;
      }

document.getElementById('get').onclick = function() {
    xmlHttp = new XMLHttpRequest();
    xmlHttp.open( "GET",window.location.protocol+"//"+window.location.hostname+":"+window.location.port+"/keys/"+document.getElementById("key").value, false );
    xmlHttp.send( null );
    document.getElementById("response").style.display = 'block';
    document.getElementById("response").innerHTML = xmlHttp.responseText;
}

document.getElementById('put').onclick = function() {
    xmlHttp = new XMLHttpRequest();
    xmlHttp.open( "POST", window.location.protocol+"//"+window.location.hostname+":"+window.location.port+"/keys/"+document.getElementById("key").value, false );
    var params = "value="+document.getElementById("value").value;

    xmlHttp.setRequestHeader("Content-type", "application/x-www-form-urlencoded");
    xmlHttp.setRequestHeader("Content-length", params.length);
    xmlHttp.setRequestHeader("Connection", "close");

    xmlHttp.send( params );
    document.getElementById("response").style.display = 'block';
    document.getElementById("response").innerHTML = xmlHttp.responseText;
}

document.getElementById('delete').onclick = function() {
    xmlHttp = new XMLHttpRequest();
    xmlHttp.open( "DELETE", window.location.protocol+"//"+window.location.hostname+":"+window.location.port+"/keys/"+document.getElementById("key").value, false );
    xmlHttp.send( null );
    document.getElementById("response").style.display = 'block';
    document.getElementById("response").innerHTML = "Key deleted.";
}

</script>
</html>
Standard error from the command:
[INFO] Validating rsync
Running test/end-to-end/core.sh:493: executing 'oc rsync examples/sample-app frontend-1-mh575:/tmp' expecting success...
SUCCESS after 0.458s: test/end-to-end/core.sh:493: executing 'oc rsync examples/sample-app frontend-1-mh575:/tmp' expecting success
Standard output from the command:
sending incremental file list
sample-app/
sample-app/OWNERS
sample-app/README.md
sample-app/application-template-custombuild.json
sample-app/application-template-dockerbuild.json
sample-app/application-template-pullspecbuild.json
sample-app/application-template-stibuild.json
sample-app/cleanup.sh
sample-app/container-setup.md
sample-app/github-webhook-example.json
sample-app/pullimages.sh
sample-app/logs/
sample-app/logs/.gitkeep

sent 84204 bytes  received 229 bytes  168866.00 bytes/sec
total size is 83388  speedup is 0.99

There was no error output from the command.
Running test/end-to-end/core.sh:494: executing 'oc rsh frontend-1-mh575 ls /tmp/sample-app' expecting success and text 'application-template-stibuild'...
SUCCESS after 0.257s: test/end-to-end/core.sh:494: executing 'oc rsh frontend-1-mh575 ls /tmp/sample-app' expecting success and text 'application-template-stibuild'
Standard output from the command:
OWNERS
README.md
application-template-custombuild.json
application-template-dockerbuild.json
application-template-pullspecbuild.json
application-template-stibuild.json
cleanup.sh
container-setup.md
github-webhook-example.json
logs
pullimages.sh

There was no error output from the command.
[INFO] Back to 'default' project with 'admin' user...
Running test/end-to-end/core.sh:515: executing 'oc project default/127-0-0-1:8443/system:admin' expecting success...
SUCCESS after 0.166s: test/end-to-end/core.sh:515: executing 'oc project default/127-0-0-1:8443/system:admin' expecting success
Standard output from the command:
Now using project "default" from context named "default/127-0-0-1:8443/system:admin" on server "https://127.0.0.1:8443".

There was no error output from the command.
Running test/end-to-end/core.sh:519: executing 'oc get endpoints router --output-version=v1 --template='{{ if .subsets }}{{ len .subsets }}{{ else }}0{{ end }}'' expecting any result and text '[1-9]+'; re-trying every 0.2s until completion or 300.000s...
SUCCESS after 0.196s: test/end-to-end/core.sh:519: executing 'oc get endpoints router --output-version=v1 --template='{{ if .subsets }}{{ len .subsets }}{{ else }}0{{ end }}'' expecting any result and text '[1-9]+'; re-trying every 0.2s until completion or 300.000s
Standard output from the command:
1
Standard error from the command:
Flag --output-version has been deprecated, the resource is used exactly as fetched from the API. To get a specific API version, fully-qualify the resource, version, and group (for example: 'jobs.v1.batch/myjob').

[INFO] Waiting for router to start...
Running test/end-to-end/core.sh:523: executing 'curl --max-time 2 --fail --silent 'http://172.18.1.94:1936/healthz'' expecting success; re-trying every 0.2s until completion or 300.000s...
SUCCESS after 0.016s: test/end-to-end/core.sh:523: executing 'curl --max-time 2 --fail --silent 'http://172.18.1.94:1936/healthz'' expecting success; re-trying every 0.2s until completion or 300.000s
Standard output from the command:
ok
There was no error output from the command.
[INFO] Validating privileged pod exec
Running test/end-to-end/core.sh:527: executing 'oc policy add-role-to-user admin e2e-default-admin' expecting success...
SUCCESS after 0.200s: test/end-to-end/core.sh:527: executing 'oc policy add-role-to-user admin e2e-default-admin' expecting success
Standard output from the command:
role "admin" added: "e2e-default-admin"

There was no error output from the command.
Running test/end-to-end/core.sh:529: executing 'oc project default/127-0-0-1:8443/system:admin' expecting success...
SUCCESS after 0.163s: test/end-to-end/core.sh:529: executing 'oc project default/127-0-0-1:8443/system:admin' expecting success
Standard output from the command:
Already on project "default" on server "https://127.0.0.1:8443".

There was no error output from the command.
Running test/end-to-end/core.sh:530: executing 'oc exec -n default -tip router-1-b8w0v ls' expecting success...
SUCCESS after 0.267s: test/end-to-end/core.sh:530: executing 'oc exec -n default -tip router-1-b8w0v ls' expecting success
Standard output from the command:
cert_config.map
default_pub_keys.pem
error-page-503.http
haproxy-config.template
haproxy.config
os_edge_http_be.map
os_http_be.map
os_reencrypt.map
os_route_http_expose.map
os_route_http_redirect.map
os_sni_passthrough.map
os_tcp_be.map
os_wildcard_domain.map

Standard error from the command:
W0816 13:34:09.402135    3825 cmd.go:403] -p POD_NAME is DEPRECATED and will be removed in a future version. Use exec POD_NAME instead.
Unable to use a TTY - input is not a terminal or the right kind of file

[INFO] Validating routed app response...
Running test/end-to-end/core.sh:539: executing 'curl -s -k --resolve 'www.example.com:443:172.18.1.94' https://www.example.com' expecting any result and text 'Hello from OpenShift'; re-trying every 0.2s until completion or 10.000s...
SUCCESS after 0.151s: test/end-to-end/core.sh:539: executing 'curl -s -k --resolve 'www.example.com:443:172.18.1.94' https://www.example.com' expecting any result and text 'Hello from OpenShift'; re-trying every 0.2s until completion or 10.000s
Standard output from the command:
<!DOCTYPE html>
<html>
<head>
  <!-- Latest compiled and minified CSS -->
  <link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.0/css/bootstrap.min.css">

  <!-- Optional theme -->
  <link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.0/css/bootstrap-theme.min.css">

  <!-- Latest compiled and minified JavaScript -->
  <script src="//code.jquery.com/jquery-1.11.0.min.js"></script>
  <script src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.0/js/bootstrap.min.js"></script>

  <title>Hello from OpenShift v3!</title>
</head>
<body>
  <div class="page-header" align=center>
    <h1> Welcome to an OpenShift v3 Demo App! </h1>
  </div>

  
  <div class="container">
    <h3> Get, edit or delete key-value pairs, for fun. </h3>
    </br>
    <form role="form" name="myForm" onSubmit="return handleSubmit()">
      <div type="text" class="alert alert-warning" id="response" role="alert" style="display:none;"> </div>
      <div class="form-group">
        <label for="key"> Key </label>
        <input type="text" class="form-control" id="key" name="key" value="" placeholder="Example: FirstName">
      </div>

      <div class="form-group">
        <label for="value">Value</label>
        <input type="text" id="value" class="form-control" name="value" value="" placeholder="Example: Dan"/>
      </div>

      <div class="input-group-btn">
        <button type="button" class="btn btn-default dropdown-toggle" data-toggle="dropdown">Action type <span class="caret"></span></button>
        <ul class="dropdown-menu" role="menu">
          <li><a href="#" id="get">Get</a></li>
          <li><a href="#" id="put">Put</a></li>
          <li><a href="#" id="delete">Delete</a></li>
        </ul>
      </div>
    </form>
  </div>
  
</body>

<script type="text/javascript">
function handleSubmit()
      {
        return false;
      }

document.getElementById('get').onclick = function() {
    xmlHttp = new XMLHttpRequest();
    xmlHttp.open( "GET",window.location.protocol+"//"+window.location.hostname+":"+window.location.port+"/keys/"+document.getElementById("key").value, false );
    xmlHttp.send( null );
    document.getElementById("response").style.display = 'block';
    document.getElementById("response").innerHTML = xmlHttp.responseText;
}

document.getElementById('put').onclick = function() {
    xmlHttp = new XMLHttpRequest();
    xmlHttp.open( "POST", window.location.protocol+"//"+window.location.hostname+":"+window.location.port+"/keys/"+document.getElementById("key").value, false );
    var params = "value="+document.getElementById("value").value;

    xmlHttp.setRequestHeader("Content-type", "application/x-www-form-urlencoded");
    xmlHttp.setRequestHeader("Content-length", params.length);
    xmlHttp.setRequestHeader("Connection", "close");

    xmlHttp.send( params );
    document.getElementById("response").style.display = 'block';
    document.getElementById("response").innerHTML = xmlHttp.responseText;
}

document.getElementById('delete').onclick = function() {
    xmlHttp = new XMLHttpRequest();
    xmlHttp.open( "DELETE", window.location.protocol+"//"+window.location.hostname+":"+window.location.port+"/keys/"+document.getElementById("key").value, false );
    xmlHttp.send( null );
    document.getElementById("response").style.display = 'block';
    document.getElementById("response").innerHTML = "Key deleted.";
}

</script>
</html>

There was no error output from the command.
Running test/end-to-end/core.sh:541: executing 'oc delete route/route-edge -n test' expecting success...
SUCCESS after 0.213s: test/end-to-end/core.sh:541: executing 'oc delete route/route-edge -n test' expecting success
Standard output from the command:
route "route-edge" deleted

There was no error output from the command.
Running test/end-to-end/core.sh:545: executing 'oc create route edge --service=frontend --cert=/tmp/openshift/test-end-to-end/openshift.local.config/master/ca.crt                                               --key=/tmp/openshift/test-end-to-end/openshift.local.config/master/ca.key                                                                   --ca-cert=/tmp/openshift/test-end-to-end/openshift.local.config/master/ca.crt                                                               --hostname=www.example.com -n test' expecting success...
SUCCESS after 0.193s: test/end-to-end/core.sh:545: executing 'oc create route edge --service=frontend --cert=/tmp/openshift/test-end-to-end/openshift.local.config/master/ca.crt                                               --key=/tmp/openshift/test-end-to-end/openshift.local.config/master/ca.key                                                                   --ca-cert=/tmp/openshift/test-end-to-end/openshift.local.config/master/ca.crt                                                               --hostname=www.example.com -n test' expecting success
Standard output from the command:
route "frontend" created

There was no error output from the command.
Running test/end-to-end/core.sh:546: executing 'curl -s -k --resolve 'www.example.com:443:172.18.1.94' https://www.example.com' expecting any result and text 'Hello from OpenShift'; re-trying every 0.2s until completion or 10.000s...
SUCCESS after 0.092s: test/end-to-end/core.sh:546: executing 'curl -s -k --resolve 'www.example.com:443:172.18.1.94' https://www.example.com' expecting any result and text 'Hello from OpenShift'; re-trying every 0.2s until completion or 10.000s
Standard output from the command:
<!DOCTYPE html>
<html>
<head>
  <!-- Latest compiled and minified CSS -->
  <link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.0/css/bootstrap.min.css">

  <!-- Optional theme -->
  <link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.0/css/bootstrap-theme.min.css">

  <!-- Latest compiled and minified JavaScript -->
  <script src="//code.jquery.com/jquery-1.11.0.min.js"></script>
  <script src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.0/js/bootstrap.min.js"></script>

  <title>Hello from OpenShift v3!</title>
</head>
<body>
  <div class="page-header" align=center>
    <h1> Welcome to an OpenShift v3 Demo App! </h1>
  </div>

  
  <div class="container">
    <h3> Get, edit or delete key-value pairs, for fun. </h3>
    </br>
    <form role="form" name="myForm" onSubmit="return handleSubmit()">
      <div type="text" class="alert alert-warning" id="response" role="alert" style="display:none;"> </div>
      <div class="form-group">
        <label for="key"> Key </label>
        <input type="text" class="form-control" id="key" name="key" value="" placeholder="Example: FirstName">
      </div>

      <div class="form-group">
        <label for="value">Value</label>
        <input type="text" id="value" class="form-control" name="value" value="" placeholder="Example: Dan"/>
      </div>

      <div class="input-group-btn">
        <button type="button" class="btn btn-default dropdown-toggle" data-toggle="dropdown">Action type <span class="caret"></span></button>
        <ul class="dropdown-menu" role="menu">
          <li><a href="#" id="get">Get</a></li>
          <li><a href="#" id="put">Put</a></li>
          <li><a href="#" id="delete">Delete</a></li>
        </ul>
      </div>
    </form>
  </div>
  
</body>

<script type="text/javascript">
function handleSubmit()
      {
        return false;
      }

document.getElementById('get').onclick = function() {
    xmlHttp = new XMLHttpRequest();
    xmlHttp.open( "GET",window.location.protocol+"//"+window.location.hostname+":"+window.location.port+"/keys/"+document.getElementById("key").value, false );
    xmlHttp.send( null );
    document.getElementById("response").style.display = 'block';
    document.getElementById("response").innerHTML = xmlHttp.responseText;
}

document.getElementById('put').onclick = function() {
    xmlHttp = new XMLHttpRequest();
    xmlHttp.open( "POST", window.location.protocol+"//"+window.location.hostname+":"+window.location.port+"/keys/"+document.getElementById("key").value, false );
    var params = "value="+document.getElementById("value").value;

    xmlHttp.setRequestHeader("Content-type", "application/x-www-form-urlencoded");
    xmlHttp.setRequestHeader("Content-length", params.length);
    xmlHttp.setRequestHeader("Connection", "close");

    xmlHttp.send( params );
    document.getElementById("response").style.display = 'block';
    document.getElementById("response").innerHTML = xmlHttp.responseText;
}

document.getElementById('delete').onclick = function() {
    xmlHttp = new XMLHttpRequest();
    xmlHttp.open( "DELETE", window.location.protocol+"//"+window.location.hostname+":"+window.location.port+"/keys/"+document.getElementById("key").value, false );
    xmlHttp.send( null );
    document.getElementById("response").style.display = 'block';
    document.getElementById("response").innerHTML = "Key deleted.";
}

</script>
</html>

There was no error output from the command.
Running test/end-to-end/core.sh:548: executing 'curl -s -k --resolve 'www.example.com:443:172.18.1.94' https://wWw.ExAmPlE.cOm' expecting any result and text 'Hello from OpenShift'; re-trying every 0.2s until completion or 10.000s...
SUCCESS after 0.088s: test/end-to-end/core.sh:548: executing 'curl -s -k --resolve 'www.example.com:443:172.18.1.94' https://wWw.ExAmPlE.cOm' expecting any result and text 'Hello from OpenShift'; re-trying every 0.2s until completion or 10.000s
Standard output from the command:
<!DOCTYPE html>
<html>
<head>
  <!-- Latest compiled and minified CSS -->
  <link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.0/css/bootstrap.min.css">

  <!-- Optional theme -->
  <link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.0/css/bootstrap-theme.min.css">

  <!-- Latest compiled and minified JavaScript -->
  <script src="//code.jquery.com/jquery-1.11.0.min.js"></script>
  <script src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.0/js/bootstrap.min.js"></script>

  <title>Hello from OpenShift v3!</title>
</head>
<body>
  <div class="page-header" align=center>
    <h1> Welcome to an OpenShift v3 Demo App! </h1>
  </div>

  
  <div class="container">
    <h3> Get, edit or delete key-value pairs, for fun. </h3>
    </br>
    <form role="form" name="myForm" onSubmit="return handleSubmit()">
      <div type="text" class="alert alert-warning" id="response" role="alert" style="display:none;"> </div>
      <div class="form-group">
        <label for="key"> Key </label>
        <input type="text" class="form-control" id="key" name="key" value="" placeholder="Example: FirstName">
      </div>

      <div class="form-group">
        <label for="value">Value</label>
        <input type="text" id="value" class="form-control" name="value" value="" placeholder="Example: Dan"/>
      </div>

      <div class="input-group-btn">
        <button type="button" class="btn btn-default dropdown-toggle" data-toggle="dropdown">Action type <span class="caret"></span></button>
        <ul class="dropdown-menu" role="menu">
          <li><a href="#" id="get">Get</a></li>
          <li><a href="#" id="put">Put</a></li>
          <li><a href="#" id="delete">Delete</a></li>
        </ul>
      </div>
    </form>
  </div>
  
</body>

<script type="text/javascript">
function handleSubmit()
      {
        return false;
      }

document.getElementById('get').onclick = function() {
    xmlHttp = new XMLHttpRequest();
    xmlHttp.open( "GET",window.location.protocol+"//"+window.location.hostname+":"+window.location.port+"/keys/"+document.getElementById("key").value, false );
    xmlHttp.send( null );
    document.getElementById("response").style.display = 'block';
    document.getElementById("response").innerHTML = xmlHttp.responseText;
}

document.getElementById('put').onclick = function() {
    xmlHttp = new XMLHttpRequest();
    xmlHttp.open( "POST", window.location.protocol+"//"+window.location.hostname+":"+window.location.port+"/keys/"+document.getElementById("key").value, false );
    var params = "value="+document.getElementById("value").value;

    xmlHttp.setRequestHeader("Content-type", "application/x-www-form-urlencoded");
    xmlHttp.setRequestHeader("Content-length", params.length);
    xmlHttp.setRequestHeader("Connection", "close");

    xmlHttp.send( params );
    document.getElementById("response").style.display = 'block';
    document.getElementById("response").innerHTML = xmlHttp.responseText;
}

document.getElementById('delete').onclick = function() {
    xmlHttp = new XMLHttpRequest();
    xmlHttp.open( "DELETE", window.location.protocol+"//"+window.location.hostname+":"+window.location.port+"/keys/"+document.getElementById("key").value, false );
    xmlHttp.send( null );
    document.getElementById("response").style.display = 'block';
    document.getElementById("response").innerHTML = "Key deleted.";
}

</script>
</html>

There was no error output from the command.
Running test/end-to-end/core.sh:550: executing 'curl -s -k -H 'Host: wWw.ExAmPlE.cOm' https://172.18.1.94' expecting any result and text 'Hello from OpenShift'; re-trying every 0.2s until completion or 10.000s...
SUCCESS after 0.093s: test/end-to-end/core.sh:550: executing 'curl -s -k -H 'Host: wWw.ExAmPlE.cOm' https://172.18.1.94' expecting any result and text 'Hello from OpenShift'; re-trying every 0.2s until completion or 10.000s
Standard output from the command:
<!DOCTYPE html>
<html>
<head>
  <!-- Latest compiled and minified CSS -->
  <link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.0/css/bootstrap.min.css">

  <!-- Optional theme -->
  <link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.0/css/bootstrap-theme.min.css">

  <!-- Latest compiled and minified JavaScript -->
  <script src="//code.jquery.com/jquery-1.11.0.min.js"></script>
  <script src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.0/js/bootstrap.min.js"></script>

  <title>Hello from OpenShift v3!</title>
</head>
<body>
  <div class="page-header" align=center>
    <h1> Welcome to an OpenShift v3 Demo App! </h1>
  </div>

  
  <div class="container">
    <h3> Get, edit or delete key-value pairs, for fun. </h3>
    </br>
    <form role="form" name="myForm" onSubmit="return handleSubmit()">
      <div type="text" class="alert alert-warning" id="response" role="alert" style="display:none;"> </div>
      <div class="form-group">
        <label for="key"> Key </label>
        <input type="text" class="form-control" id="key" name="key" value="" placeholder="Example: FirstName">
      </div>

      <div class="form-group">
        <label for="value">Value</label>
        <input type="text" id="value" class="form-control" name="value" value="" placeholder="Example: Dan"/>
      </div>

      <div class="input-group-btn">
        <button type="button" class="btn btn-default dropdown-toggle" data-toggle="dropdown">Action type <span class="caret"></span></button>
        <ul class="dropdown-menu" role="menu">
          <li><a href="#" id="get">Get</a></li>
          <li><a href="#" id="put">Put</a></li>
          <li><a href="#" id="delete">Delete</a></li>
        </ul>
      </div>
    </form>
  </div>
  
</body>

<script type="text/javascript">
function handleSubmit()
      {
        return false;
      }

document.getElementById('get').onclick = function() {
    xmlHttp = new XMLHttpRequest();
    xmlHttp.open( "GET",window.location.protocol+"//"+window.location.hostname+":"+window.location.port+"/keys/"+document.getElementById("key").value, false );
    xmlHttp.send( null );
    document.getElementById("response").style.display = 'block';
    document.getElementById("response").innerHTML = xmlHttp.responseText;
}

document.getElementById('put').onclick = function() {
    xmlHttp = new XMLHttpRequest();
    xmlHttp.open( "POST", window.location.protocol+"//"+window.location.hostname+":"+window.location.port+"/keys/"+document.getElementById("key").value, false );
    var params = "value="+document.getElementById("value").value;

    xmlHttp.setRequestHeader("Content-type", "application/x-www-form-urlencoded");
    xmlHttp.setRequestHeader("Content-length", params.length);
    xmlHttp.setRequestHeader("Connection", "close");

    xmlHttp.send( params );
    document.getElementById("response").style.display = 'block';
    document.getElementById("response").innerHTML = xmlHttp.responseText;
}

document.getElementById('delete').onclick = function() {
    xmlHttp = new XMLHttpRequest();
    xmlHttp.open( "DELETE", window.location.protocol+"//"+window.location.hostname+":"+window.location.port+"/keys/"+document.getElementById("key").value, false );
    xmlHttp.send( null );
    document.getElementById("response").style.display = 'block';
    document.getElementById("response").innerHTML = "Key deleted.";
}

</script>
</html>

There was no error output from the command.
[INFO] Validating pod.spec.nodeSelector rejections
Running test/end-to-end/core.sh:556: executing 'openshift admin new-project node-selector --description='This is an example project to test node selection prevents deployment' --admin='e2e-user' --node-selector='impossible-label=true'' expecting success...
SUCCESS after 1.034s: test/end-to-end/core.sh:556: executing 'openshift admin new-project node-selector --description='This is an example project to test node selection prevents deployment' --admin='e2e-user' --node-selector='impossible-label=true'' expecting success
Standard output from the command:
Created project node-selector

There was no error output from the command.
Running test/end-to-end/core.sh:557: executing 'oc process -n node-selector -v NODE_NAME='localhost' -f test/testdata/node-selector/pods.json | oc create -n node-selector -f -' expecting success...
SUCCESS after 0.337s: test/end-to-end/core.sh:557: executing 'oc process -n node-selector -v NODE_NAME='localhost' -f test/testdata/node-selector/pods.json | oc create -n node-selector -f -' expecting success
Standard output from the command:
pod "pod-without-node-name" created
pod "pod-with-node-name" created

Standard error from the command:
Flag --value has been deprecated, Use -p, --param instead.

Running test/end-to-end/core.sh:559: executing 'oc get events -n node-selector' expecting any result and text 'pod-without-node-name.+FailedScheduling'; re-trying every 0.2s until completion or 20.000s...
SUCCESS after 0.235s: test/end-to-end/core.sh:559: executing 'oc get events -n node-selector' expecting any result and text 'pod-without-node-name.+FailedScheduling'; re-trying every 0.2s until completion or 20.000s
Standard output from the command:
LASTSEEN   FIRSTSEEN   COUNT     NAME                    KIND      SUBOBJECT   TYPE      REASON              SOURCE               MESSAGE
0s         0s          1         pod-with-node-name      Pod                   Warning   MatchNodeSelector   kubelet, localhost   Predicate MatchNodeSelector failed
0s         0s          2         pod-without-node-name   Pod                   Warning   FailedScheduling    default-scheduler    No nodes are available that match all of the following predicates:: MatchNodeSelector (1).

There was no error output from the command.
Running test/end-to-end/core.sh:561: executing 'oc get events -n node-selector' expecting any result and text 'pod-with-node-name.+MatchNodeSelector'; re-trying every 0.2s until completion or 20.000s...
SUCCESS after 0.229s: test/end-to-end/core.sh:561: executing 'oc get events -n node-selector' expecting any result and text 'pod-with-node-name.+MatchNodeSelector'; re-trying every 0.2s until completion or 20.000s
Standard output from the command:
LASTSEEN   FIRSTSEEN   COUNT     NAME                    KIND      SUBOBJECT   TYPE      REASON              SOURCE               MESSAGE
0s         0s          1         pod-with-node-name      Pod                   Warning   MatchNodeSelector   kubelet, localhost   Predicate MatchNodeSelector failed
0s         0s          2         pod-without-node-name   Pod                   Warning   FailedScheduling    default-scheduler    No nodes are available that match all of the following predicates:: MatchNodeSelector (1).

There was no error output from the command.
[INFO] Validating image pruning
Running test/end-to-end/core.sh:567: executing 'docker login -u e2e-user -p eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJrdWJlcm5ldGVzL3NlcnZpY2VhY2NvdW50Iiwia3ViZXJuZXRlcy5pby9zZXJ2aWNlYWNjb3VudC9uYW1lc3BhY2UiOiJjYWNoZSIsImt1YmVybmV0ZXMuaW8vc2VydmljZWFjY291bnQvc2VjcmV0Lm5hbWUiOiJidWlsZGVyLXRva2VuLXQzenZiIiwia3ViZXJuZXRlcy5pby9zZXJ2aWNlYWNjb3VudC9zZXJ2aWNlLWFjY291bnQubmFtZSI6ImJ1aWxkZXIiLCJrdWJlcm5ldGVzLmlvL3NlcnZpY2VhY2NvdW50L3NlcnZpY2UtYWNjb3VudC51aWQiOiI2ZTE2NTM2OS04MmE4LTExZTctODMxMS0wZTk2OTA5YTI2MjYiLCJzdWIiOiJzeXN0ZW06c2VydmljZWFjY291bnQ6Y2FjaGU6YnVpbGRlciJ9.kXVVGP1y21uLu3kdr36AnWA7Fi36u_VbZcMgCn40ITIgqbVK6wRCGICrWS6HSBl3sSyX_St8PL2NuEAqViitbRDU-X_o1NboSRRJ2TIdZHmVHz6uD3mX_1hZhbEMifG6GAY3Cocr3YR0vOe2_W0MCPHDE52ICUq6lq65ozf0uJDHg28DSTX0A8jMvJes-qG1ztgeiMAHeWV0kAi2b2J4f0WYanqMtPKLEffm_U0hA-Ue-DXL6FAVCVZLcfMbmVpRSXohsu-7jMFJhaR8S2__HKdHvkF_BDuiGaEyRJmUnSiyr15q4f_BRvwlR1j1SkoDJZ75cVq5Z7hfGTm1uwhtMA -e builder@openshift.com 172.30.1.1:5000' expecting success...
SUCCESS after 0.075s: test/end-to-end/core.sh:567: executing 'docker login -u e2e-user -p eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJrdWJlcm5ldGVzL3NlcnZpY2VhY2NvdW50Iiwia3ViZXJuZXRlcy5pby9zZXJ2aWNlYWNjb3VudC9uYW1lc3BhY2UiOiJjYWNoZSIsImt1YmVybmV0ZXMuaW8vc2VydmljZWFjY291bnQvc2VjcmV0Lm5hbWUiOiJidWlsZGVyLXRva2VuLXQzenZiIiwia3ViZXJuZXRlcy5pby9zZXJ2aWNlYWNjb3VudC9zZXJ2aWNlLWFjY291bnQubmFtZSI6ImJ1aWxkZXIiLCJrdWJlcm5ldGVzLmlvL3NlcnZpY2VhY2NvdW50L3NlcnZpY2UtYWNjb3VudC51aWQiOiI2ZTE2NTM2OS04MmE4LTExZTctODMxMS0wZTk2OTA5YTI2MjYiLCJzdWIiOiJzeXN0ZW06c2VydmljZWFjY291bnQ6Y2FjaGU6YnVpbGRlciJ9.kXVVGP1y21uLu3kdr36AnWA7Fi36u_VbZcMgCn40ITIgqbVK6wRCGICrWS6HSBl3sSyX_St8PL2NuEAqViitbRDU-X_o1NboSRRJ2TIdZHmVHz6uD3mX_1hZhbEMifG6GAY3Cocr3YR0vOe2_W0MCPHDE52ICUq6lq65ozf0uJDHg28DSTX0A8jMvJes-qG1ztgeiMAHeWV0kAi2b2J4f0WYanqMtPKLEffm_U0hA-Ue-DXL6FAVCVZLcfMbmVpRSXohsu-7jMFJhaR8S2__HKdHvkF_BDuiGaEyRJmUnSiyr15q4f_BRvwlR1j1SkoDJZ75cVq5Z7hfGTm1uwhtMA -e builder@openshift.com 172.30.1.1:5000' expecting success
Standard output from the command:
Login Succeeded

Standard error from the command:
Flag --email has been deprecated, will be removed in 1.13.

Running test/end-to-end/core.sh:568: executing 'docker pull busybox' expecting success...
SUCCESS after 0.770s: test/end-to-end/core.sh:568: executing 'docker pull busybox' expecting success
Standard output from the command:
Using default tag: latest
Trying to pull repository registry.access.redhat.com/busybox ... 
Trying to pull repository docker.io/library/busybox ... 
latest: Pulling from docker.io/library/busybox
Digest: sha256:2605a2c4875ce5eb27a9f7403263190cd1af31e48a2044d400320548356251c4

There was no error output from the command.
Running test/end-to-end/core.sh:570: executing 'docker pull gcr.io/google_containers/pause:3.0' expecting success...
SUCCESS after 1.594s: test/end-to-end/core.sh:570: executing 'docker pull gcr.io/google_containers/pause:3.0' expecting success
Standard output from the command:
Trying to pull repository gcr.io/google_containers/pause ... 
3.0: Pulling from gcr.io/google_containers/pause
a3ed95caeb02: Pulling fs layer
f11233434377: Pulling fs layer
a3ed95caeb02: Download complete
f11233434377: Verifying Checksum
f11233434377: Download complete
a3ed95caeb02: Pull complete
f11233434377: Pull complete
Digest: sha256:0d093c962a6c2dd8bb8727b661e2b5f13e9df884af9945b4cc7088d9350cd3ee

There was no error output from the command.
Running test/end-to-end/core.sh:571: executing 'docker pull openshift/hello-openshift' expecting success...
SUCCESS after 1.499s: test/end-to-end/core.sh:571: executing 'docker pull openshift/hello-openshift' expecting success
Standard output from the command:
Using default tag: latest
Trying to pull repository registry.access.redhat.com/openshift/hello-openshift ... 
Trying to pull repository docker.io/openshift/hello-openshift ... 
latest: Pulling from docker.io/openshift/hello-openshift
4f4fb700ef54: Pulling fs layer
0b92a4867baa: Pulling fs layer
4f4fb700ef54: Verifying Checksum
4f4fb700ef54: Download complete
0b92a4867baa: Verifying Checksum
0b92a4867baa: Download complete
4f4fb700ef54: Pull complete
0b92a4867baa: Pull complete
Digest: sha256:d44288338b3e12f29126f5b3ee0bc27022e46f9dcef80638953981b5c4df753a

There was no error output from the command.
Running test/end-to-end/core.sh:574: executing 'docker tag busybox 172.30.1.1:5000/cache/prune' expecting success...
SUCCESS after 0.040s: test/end-to-end/core.sh:574: executing 'docker tag busybox 172.30.1.1:5000/cache/prune' expecting success
There was no output from the command.
There was no error output from the command.
Running test/end-to-end/core.sh:575: executing 'docker push 172.30.1.1:5000/cache/prune' expecting success...
SUCCESS after 0.637s: test/end-to-end/core.sh:575: executing 'docker push 172.30.1.1:5000/cache/prune' expecting success
Standard output from the command:
The push refers to a repository [172.30.1.1:5000/cache/prune]
08c2295a7fa5: Preparing
08c2295a7fa5: Pushed
latest: digest: sha256:8573b4a813d7b90ef3876c6bec33db1272c02f0f90c406b25a5f9729169548ac size: 527

There was no error output from the command.
Running test/end-to-end/core.sh:578: executing 'docker tag openshift/hello-openshift 172.30.1.1:5000/cache/prune' expecting success...
SUCCESS after 0.029s: test/end-to-end/core.sh:578: executing 'docker tag openshift/hello-openshift 172.30.1.1:5000/cache/prune' expecting success
There was no output from the command.
There was no error output from the command.
Running test/end-to-end/core.sh:579: executing 'docker push 172.30.1.1:5000/cache/prune' expecting success...
SUCCESS after 1.006s: test/end-to-end/core.sh:579: executing 'docker push 172.30.1.1:5000/cache/prune' expecting success
Standard output from the command:
The push refers to a repository [172.30.1.1:5000/cache/prune]
b29214cbca2a: Preparing
5f70bf18a086: Preparing
5f70bf18a086: Pushed
b29214cbca2a: Pushed
latest: digest: sha256:b83c31e0e11cfdc4e0fb9887e0b2217b8d6645bf65a281e16491c2c53222c263 size: 734

There was no error output from the command.
Running test/end-to-end/core.sh:582: executing 'docker tag gcr.io/google_containers/pause:3.0 172.30.1.1:5000/cache/prune' expecting success...
SUCCESS after 0.061s: test/end-to-end/core.sh:582: executing 'docker tag gcr.io/google_containers/pause:3.0 172.30.1.1:5000/cache/prune' expecting success
There was no output from the command.
There was no error output from the command.
Running test/end-to-end/core.sh:583: executing 'docker push 172.30.1.1:5000/cache/prune' expecting success...
SUCCESS after 0.614s: test/end-to-end/core.sh:583: executing 'docker push 172.30.1.1:5000/cache/prune' expecting success
Standard output from the command:
The push refers to a repository [172.30.1.1:5000/cache/prune]
5f70bf18a086: Preparing
41ff149e94f2: Preparing
5f70bf18a086: Preparing
5f70bf18a086: Layer already exists
41ff149e94f2: Pushed
latest: digest: sha256:f04288efc7e65a84be74d4fc63e235ac3c6c603cf832e442e0bd3f240b10a91b size: 939

There was no error output from the command.
Running test/end-to-end/core.sh:587: executing 'oc exec -p docker-registry-3-55bh3 du /registry > '/data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/logs/prune-images.before.txt'' expecting success...
SUCCESS after 0.265s: test/end-to-end/core.sh:587: executing 'oc exec -p docker-registry-3-55bh3 du /registry > '/data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/logs/prune-images.before.txt'' expecting success
There was no output from the command.
Standard error from the command:
W0816 13:34:20.105373    5707 cmd.go:403] -p POD_NAME is DEPRECATED and will be removed in a future version. Use exec POD_NAME instead.

Running test/end-to-end/core.sh:590: executing 'oadm policy add-cluster-role-to-user system:image-pruner system:serviceaccount:cache:builder' expecting success...
SUCCESS after 0.233s: test/end-to-end/core.sh:590: executing 'oadm policy add-cluster-role-to-user system:image-pruner system:serviceaccount:cache:builder' expecting success
Standard output from the command:
cluster role "system:image-pruner" added: "system:serviceaccount:cache:builder"

There was no error output from the command.
Running test/end-to-end/core.sh:591: executing 'oadm policy who-can list images' expecting any result and text 'system:serviceaccount:cache:builder'; re-trying every 0.2s until completion or 60.000s...
SUCCESS after 0.231s: test/end-to-end/core.sh:591: executing 'oadm policy who-can list images' expecting any result and text 'system:serviceaccount:cache:builder'; re-trying every 0.2s until completion or 60.000s
Standard output from the command:
Namespace: default
Verb:      list
Resource:  images

Users:  system:admin
        system:kube-controller-manager
        system:serviceaccount:cache:builder
        system:serviceaccount:default:pvinstaller
        system:serviceaccount:kube-system:generic-garbage-collector
        system:serviceaccount:kube-system:namespace-controller
        system:serviceaccount:kube-system:resourcequota-controller
        system:serviceaccount:openshift-infra:image-import-controller

Groups: system:cluster-admins
        system:cluster-readers
        system:masters


There was no error output from the command.
Running test/end-to-end/core.sh:594: executing 'oadm prune images --token='eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJrdWJlcm5ldGVzL3NlcnZpY2VhY2NvdW50Iiwia3ViZXJuZXRlcy5pby9zZXJ2aWNlYWNjb3VudC9uYW1lc3BhY2UiOiJjYWNoZSIsImt1YmVybmV0ZXMuaW8vc2VydmljZWFjY291bnQvc2VjcmV0Lm5hbWUiOiJidWlsZGVyLXRva2VuLXQzenZiIiwia3ViZXJuZXRlcy5pby9zZXJ2aWNlYWNjb3VudC9zZXJ2aWNlLWFjY291bnQubmFtZSI6ImJ1aWxkZXIiLCJrdWJlcm5ldGVzLmlvL3NlcnZpY2VhY2NvdW50L3NlcnZpY2UtYWNjb3VudC51aWQiOiI2ZTE2NTM2OS04MmE4LTExZTctODMxMS0wZTk2OTA5YTI2MjYiLCJzdWIiOiJzeXN0ZW06c2VydmljZWFjY291bnQ6Y2FjaGU6YnVpbGRlciJ9.kXVVGP1y21uLu3kdr36AnWA7Fi36u_VbZcMgCn40ITIgqbVK6wRCGICrWS6HSBl3sSyX_St8PL2NuEAqViitbRDU-X_o1NboSRRJ2TIdZHmVHz6uD3mX_1hZhbEMifG6GAY3Cocr3YR0vOe2_W0MCPHDE52ICUq6lq65ozf0uJDHg28DSTX0A8jMvJes-qG1ztgeiMAHeWV0kAi2b2J4f0WYanqMtPKLEffm_U0hA-Ue-DXL6FAVCVZLcfMbmVpRSXohsu-7jMFJhaR8S2__HKdHvkF_BDuiGaEyRJmUnSiyr15q4f_BRvwlR1j1SkoDJZ75cVq5Z7hfGTm1uwhtMA' --keep-younger-than=0 --keep-tag-revisions=1 --confirm' expecting success and not text 'error'...
SUCCESS after 0.599s: test/end-to-end/core.sh:594: executing 'oadm prune images --token='eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJrdWJlcm5ldGVzL3NlcnZpY2VhY2NvdW50Iiwia3ViZXJuZXRlcy5pby9zZXJ2aWNlYWNjb3VudC9uYW1lc3BhY2UiOiJjYWNoZSIsImt1YmVybmV0ZXMuaW8vc2VydmljZWFjY291bnQvc2VjcmV0Lm5hbWUiOiJidWlsZGVyLXRva2VuLXQzenZiIiwia3ViZXJuZXRlcy5pby9zZXJ2aWNlYWNjb3VudC9zZXJ2aWNlLWFjY291bnQubmFtZSI6ImJ1aWxkZXIiLCJrdWJlcm5ldGVzLmlvL3NlcnZpY2VhY2NvdW50L3NlcnZpY2UtYWNjb3VudC51aWQiOiI2ZTE2NTM2OS04MmE4LTExZTctODMxMS0wZTk2OTA5YTI2MjYiLCJzdWIiOiJzeXN0ZW06c2VydmljZWFjY291bnQ6Y2FjaGU6YnVpbGRlciJ9.kXVVGP1y21uLu3kdr36AnWA7Fi36u_VbZcMgCn40ITIgqbVK6wRCGICrWS6HSBl3sSyX_St8PL2NuEAqViitbRDU-X_o1NboSRRJ2TIdZHmVHz6uD3mX_1hZhbEMifG6GAY3Cocr3YR0vOe2_W0MCPHDE52ICUq6lq65ozf0uJDHg28DSTX0A8jMvJes-qG1ztgeiMAHeWV0kAi2b2J4f0WYanqMtPKLEffm_U0hA-Ue-DXL6FAVCVZLcfMbmVpRSXohsu-7jMFJhaR8S2__HKdHvkF_BDuiGaEyRJmUnSiyr15q4f_BRvwlR1j1SkoDJZ75cVq5Z7hfGTm1uwhtMA' --keep-younger-than=0 --keep-tag-revisions=1 --confirm' expecting success and not text 'error'
Standard output from the command:
Deleting references from image streams to images ...
STREAM        IMAGE                                                                     TAGS
cache/prune   sha256:8573b4a813d7b90ef3876c6bec33db1272c02f0f90c406b25a5f9729169548ac   latest
cache/prune   sha256:b83c31e0e11cfdc4e0fb9887e0b2217b8d6645bf65a281e16491c2c53222c263   latest

Deleting registry repository layer links ...
REPO          LAYER LINK
cache/prune   sha256:efe10ee6727fe52d2db2eb5045518fe98d8e31fdad1cbdd5e1f737018c349ebb
cache/prune   sha256:daacdb2949be2c830e78b878ff144e38132faee2c2dcc20a544a5e0460fbdbf3
cache/prune   sha256:fd8f0c02e747b724f87c92d007becea08eaf13888b07385c94f4380b45bae68d
cache/prune   sha256:d0ac388b84364c2392fbd877488a258dc4b32234a677b4674b4a0485bb91cfba

Deleting registry layer blobs ...
BLOB
sha256:efe10ee6727fe52d2db2eb5045518fe98d8e31fdad1cbdd5e1f737018c349ebb
sha256:daacdb2949be2c830e78b878ff144e38132faee2c2dcc20a544a5e0460fbdbf3
sha256:fd8f0c02e747b724f87c92d007becea08eaf13888b07385c94f4380b45bae68d
sha256:d0ac388b84364c2392fbd877488a258dc4b32234a677b4674b4a0485bb91cfba

Deleting registry repository manifest data ...
REPO          IMAGE
cache/prune   sha256:8573b4a813d7b90ef3876c6bec33db1272c02f0f90c406b25a5f9729169548ac
cache/prune   sha256:b83c31e0e11cfdc4e0fb9887e0b2217b8d6645bf65a281e16491c2c53222c263

Deleting images from server ...
IMAGE
sha256:8573b4a813d7b90ef3876c6bec33db1272c02f0f90c406b25a5f9729169548ac
sha256:b83c31e0e11cfdc4e0fb9887e0b2217b8d6645bf65a281e16491c2c53222c263

There was no error output from the command.
Running test/end-to-end/core.sh:597: executing 'oc exec -p docker-registry-3-55bh3 du /registry > '/data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/logs/prune-images.after.txt'' expecting success...
SUCCESS after 0.267s: test/end-to-end/core.sh:597: executing 'oc exec -p docker-registry-3-55bh3 du /registry > '/data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/logs/prune-images.after.txt'' expecting success
There was no output from the command.
Standard error from the command:
W0816 13:34:21.706097    6021 cmd.go:403] -p POD_NAME is DEPRECATED and will be removed in a future version. Use exec POD_NAME instead.

Running test/end-to-end/core.sh:600: executing 'diff /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/logs/prune-images.before.txt /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/logs/prune-images.after.txt' expecting exit code 1...
SUCCESS after 0.021s: test/end-to-end/core.sh:600: executing 'diff /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/logs/prune-images.before.txt /data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/logs/prune-images.after.txt' expecting exit code 1
Standard output from the command:
57,58c57,58
< 4	/registry/docker/registry/v2/repositories/cache/prune/_layers/sha256/daacdb2949be2c830e78b878ff144e38132faee2c2dcc20a544a5e0460fbdbf3
< 4	/registry/docker/registry/v2/repositories/cache/prune/_layers/sha256/efe10ee6727fe52d2db2eb5045518fe98d8e31fdad1cbdd5e1f737018c349ebb
---
> 0	/registry/docker/registry/v2/repositories/cache/prune/_layers/sha256/daacdb2949be2c830e78b878ff144e38132faee2c2dcc20a544a5e0460fbdbf3
> 0	/registry/docker/registry/v2/repositories/cache/prune/_layers/sha256/efe10ee6727fe52d2db2eb5045518fe98d8e31fdad1cbdd5e1f737018c349ebb
60,61c60,61
< 4	/registry/docker/registry/v2/repositories/cache/prune/_layers/sha256/d0ac388b84364c2392fbd877488a258dc4b32234a677b4674b4a0485bb91cfba
< 4	/registry/docker/registry/v2/repositories/cache/prune/_layers/sha256/fd8f0c02e747b724f87c92d007becea08eaf13888b07385c94f4380b45bae68d
---
> 0	/registry/docker/registry/v2/repositories/cache/prune/_layers/sha256/d0ac388b84364c2392fbd877488a258dc4b32234a677b4674b4a0485bb91cfba
> 0	/registry/docker/registry/v2/repositories/cache/prune/_layers/sha256/fd8f0c02e747b724f87c92d007becea08eaf13888b07385c94f4380b45bae68d
64,67c64,67
< 32	/registry/docker/registry/v2/repositories/cache/prune/_layers/sha256
< 32	/registry/docker/registry/v2/repositories/cache/prune/_layers
< 4	/registry/docker/registry/v2/repositories/cache/prune/_manifests/revisions/sha256/8573b4a813d7b90ef3876c6bec33db1272c02f0f90c406b25a5f9729169548ac
< 4	/registry/docker/registry/v2/repositories/cache/prune/_manifests/revisions/sha256/b83c31e0e11cfdc4e0fb9887e0b2217b8d6645bf65a281e16491c2c53222c263
---
> 16	/registry/docker/registry/v2/repositories/cache/prune/_layers/sha256
> 16	/registry/docker/registry/v2/repositories/cache/prune/_layers
> 0	/registry/docker/registry/v2/repositories/cache/prune/_manifests/revisions/sha256/8573b4a813d7b90ef3876c6bec33db1272c02f0f90c406b25a5f9729169548ac
> 0	/registry/docker/registry/v2/repositories/cache/prune/_manifests/revisions/sha256/b83c31e0e11cfdc4e0fb9887e0b2217b8d6645bf65a281e16491c2c53222c263
69,73c69,73
< 16	/registry/docker/registry/v2/repositories/cache/prune/_manifests/revisions/sha256
< 16	/registry/docker/registry/v2/repositories/cache/prune/_manifests/revisions
< 16	/registry/docker/registry/v2/repositories/cache/prune/_manifests
< 48	/registry/docker/registry/v2/repositories/cache/prune
< 164	/registry/docker/registry/v2/repositories/cache
---
> 8	/registry/docker/registry/v2/repositories/cache/prune/_manifests/revisions/sha256
> 8	/registry/docker/registry/v2/repositories/cache/prune/_manifests/revisions
> 8	/registry/docker/registry/v2/repositories/cache/prune/_manifests
> 24	/registry/docker/registry/v2/repositories/cache/prune
> 140	/registry/docker/registry/v2/repositories/cache
117c117
< 264	/registry/docker/registry/v2/repositories
---
> 240	/registry/docker/registry/v2/repositories
163,164c163
< 4	/registry/docker/registry/v2/blobs/sha256/fd/fd8f0c02e747b724f87c92d007becea08eaf13888b07385c94f4380b45bae68d
< 12	/registry/docker/registry/v2/blobs/sha256/fd
---
> 4	/registry/docker/registry/v2/blobs/sha256/fd
181,184c180,181
< 700	/registry/docker/registry/v2/blobs/sha256/da/daacdb2949be2c830e78b878ff144e38132faee2c2dcc20a544a5e0460fbdbf3
< 700	/registry/docker/registry/v2/blobs/sha256/da
< 4	/registry/docker/registry/v2/blobs/sha256/ef/efe10ee6727fe52d2db2eb5045518fe98d8e31fdad1cbdd5e1f737018c349ebb
< 4	/registry/docker/registry/v2/blobs/sha256/ef
---
> 0	/registry/docker/registry/v2/blobs/sha256/da
> 0	/registry/docker/registry/v2/blobs/sha256/ef
189,190c186
< 2004	/registry/docker/registry/v2/blobs/sha256/d0/d0ac388b84364c2392fbd877488a258dc4b32234a677b4674b4a0485bb91cfba
< 2004	/registry/docker/registry/v2/blobs/sha256/d0
---
> 0	/registry/docker/registry/v2/blobs/sha256/d0
199,204c195,200
< 322796	/registry/docker/registry/v2/blobs/sha256
< 322796	/registry/docker/registry/v2/blobs
< 323060	/registry/docker/registry/v2
< 323060	/registry/docker/registry
< 323060	/registry/docker
< 323060	/registry
---
> 320080	/registry/docker/registry/v2/blobs/sha256
> 320080	/registry/docker/registry/v2/blobs
> 320320	/registry/docker/registry/v2
> 320320	/registry/docker/registry
> 320320	/registry/docker
> 320320	/registry

There was no error output from the command.
Running test/end-to-end/core.sh:603: executing 'oc import-image nginx --confirm -n cache' expecting success...
SUCCESS after 0.377s: test/end-to-end/core.sh:603: executing 'oc import-image nginx --confirm -n cache' expecting success
Standard output from the command:
The import completed successfully.

Name:			nginx
Namespace:		cache
Created:		Less than a second ago
Labels:			<none>
Annotations:		openshift.io/image.dockerRepositoryCheck=2017-08-16T17:34:22Z
Docker Pull Spec:	172.30.1.1:5000/cache/nginx
Image Lookup:		local=false
Unique Images:		1
Tags:			1

latest
  tagged from nginx

  * nginx@sha256:788fa27763db6d69ad3444e8ba72f947df9e7e163bad7c1f5614f8fd27a311c3
      Less than a second ago

Image Name:	nginx:latest
Docker Image:	nginx@sha256:788fa27763db6d69ad3444e8ba72f947df9e7e163bad7c1f5614f8fd27a311c3
Name:		sha256:788fa27763db6d69ad3444e8ba72f947df9e7e163bad7c1f5614f8fd27a311c3
Created:	Less than a second ago
Image Size:	44.04 MB (first layer 194 B, last binary layer 22.49 MB)
Image Created:	3 weeks ago
Author:		NGINX Docker Maintainers "docker-maint@nginx.com"
Arch:		amd64
Command:	nginx -g daemon off;
Working Dir:	<none>
User:		<none>
Exposes Ports:	80/tcp
Docker Labels:	<none>
Environment:	PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
		NGINX_VERSION=1.13.3-1~stretch
		NJS_VERSION=1.13.3.0.1.11-1~stretch


There was no error output from the command.
Running test/end-to-end/core.sh:606: executing 'curl -H 'Authorization: bearer eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJrdWJlcm5ldGVzL3NlcnZpY2VhY2NvdW50Iiwia3ViZXJuZXRlcy5pby9zZXJ2aWNlYWNjb3VudC9uYW1lc3BhY2UiOiJjYWNoZSIsImt1YmVybmV0ZXMuaW8vc2VydmljZWFjY291bnQvc2VjcmV0Lm5hbWUiOiJidWlsZGVyLXRva2VuLXQzenZiIiwia3ViZXJuZXRlcy5pby9zZXJ2aWNlYWNjb3VudC9zZXJ2aWNlLWFjY291bnQubmFtZSI6ImJ1aWxkZXIiLCJrdWJlcm5ldGVzLmlvL3NlcnZpY2VhY2NvdW50L3NlcnZpY2UtYWNjb3VudC51aWQiOiI2ZTE2NTM2OS04MmE4LTExZTctODMxMS0wZTk2OTA5YTI2MjYiLCJzdWIiOiJzeXN0ZW06c2VydmljZWFjY291bnQ6Y2FjaGU6YnVpbGRlciJ9.kXVVGP1y21uLu3kdr36AnWA7Fi36u_VbZcMgCn40ITIgqbVK6wRCGICrWS6HSBl3sSyX_St8PL2NuEAqViitbRDU-X_o1NboSRRJ2TIdZHmVHz6uD3mX_1hZhbEMifG6GAY3Cocr3YR0vOe2_W0MCPHDE52ICUq6lq65ozf0uJDHg28DSTX0A8jMvJes-qG1ztgeiMAHeWV0kAi2b2J4f0WYanqMtPKLEffm_U0hA-Ue-DXL6FAVCVZLcfMbmVpRSXohsu-7jMFJhaR8S2__HKdHvkF_BDuiGaEyRJmUnSiyr15q4f_BRvwlR1j1SkoDJZ75cVq5Z7hfGTm1uwhtMA' 'http://172.30.1.1:5000/v2/cache/nginx/blobs/sha256:9406c100a1c33ea3f04818a99e6c65b594ac0c571b2042fa07b0ad0043b64dce' 1>/dev/null' expecting success...
SUCCESS after 0.890s: test/end-to-end/core.sh:606: executing 'curl -H 'Authorization: bearer eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJrdWJlcm5ldGVzL3NlcnZpY2VhY2NvdW50Iiwia3ViZXJuZXRlcy5pby9zZXJ2aWNlYWNjb3VudC9uYW1lc3BhY2UiOiJjYWNoZSIsImt1YmVybmV0ZXMuaW8vc2VydmljZWFjY291bnQvc2VjcmV0Lm5hbWUiOiJidWlsZGVyLXRva2VuLXQzenZiIiwia3ViZXJuZXRlcy5pby9zZXJ2aWNlYWNjb3VudC9zZXJ2aWNlLWFjY291bnQubmFtZSI6ImJ1aWxkZXIiLCJrdWJlcm5ldGVzLmlvL3NlcnZpY2VhY2NvdW50L3NlcnZpY2UtYWNjb3VudC51aWQiOiI2ZTE2NTM2OS04MmE4LTExZTctODMxMS0wZTk2OTA5YTI2MjYiLCJzdWIiOiJzeXN0ZW06c2VydmljZWFjY291bnQ6Y2FjaGU6YnVpbGRlciJ9.kXVVGP1y21uLu3kdr36AnWA7Fi36u_VbZcMgCn40ITIgqbVK6wRCGICrWS6HSBl3sSyX_St8PL2NuEAqViitbRDU-X_o1NboSRRJ2TIdZHmVHz6uD3mX_1hZhbEMifG6GAY3Cocr3YR0vOe2_W0MCPHDE52ICUq6lq65ozf0uJDHg28DSTX0A8jMvJes-qG1ztgeiMAHeWV0kAi2b2J4f0WYanqMtPKLEffm_U0hA-Ue-DXL6FAVCVZLcfMbmVpRSXohsu-7jMFJhaR8S2__HKdHvkF_BDuiGaEyRJmUnSiyr15q4f_BRvwlR1j1SkoDJZ75cVq5Z7hfGTm1uwhtMA' 'http://172.30.1.1:5000/v2/cache/nginx/blobs/sha256:9406c100a1c33ea3f04818a99e6c65b594ac0c571b2042fa07b0ad0043b64dce' 1>/dev/null' expecting success
There was no output from the command.
Standard error from the command:
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
  0 20.5M    0 15874    0     0  44548      0  0:08:03 --:--:--  0:08:03 44464
100 20.5M  100 20.5M    0     0  23.5M      0 --:--:-- --:--:-- --:--:-- 23.5M

Running test/end-to-end/core.sh:608: executing 'oc exec --context='default/127-0-0-1:8443/system:admin' -n default -p 'docker-registry-3-55bh3' du /registry | tee '/data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/logs/registry-images.txt' | grep '9406c100a1c33ea3f04818a99e6c65b594ac0c571b2042fa07b0ad0043b64dce' | grep blobs' expecting success; re-trying every 0.2s until completion or 60.000s...
SUCCESS after 1.205s: test/end-to-end/core.sh:608: executing 'oc exec --context='default/127-0-0-1:8443/system:admin' -n default -p 'docker-registry-3-55bh3' du /registry | tee '/data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/logs/registry-images.txt' | grep '9406c100a1c33ea3f04818a99e6c65b594ac0c571b2042fa07b0ad0043b64dce' | grep blobs' expecting success; re-trying every 0.2s until completion or 60.000s
Standard output from the command:
21044	/registry/docker/registry/v2/blobs/sha256/94/9406c100a1c33ea3f04818a99e6c65b594ac0c571b2042fa07b0ad0043b64dce
Standard error from the command:
W0816 13:34:23.725669    6324 cmd.go:403] -p POD_NAME is DEPRECATED and will be removed in a future version. Use exec POD_NAME instead.
W0816 13:34:24.197143    6365 cmd.go:403] -p POD_NAME is DEPRECATED and will be removed in a future version. Use exec POD_NAME instead.
W0816 13:34:24.658616    6405 cmd.go:403] -p POD_NAME is DEPRECATED and will be removed in a future version. Use exec POD_NAME instead.
Running test/end-to-end/core.sh:609: executing 'oc delete is nginx -n cache' expecting success...
SUCCESS after 0.204s: test/end-to-end/core.sh:609: executing 'oc delete is nginx -n cache' expecting success
Standard output from the command:
imagestream "nginx" deleted

There was no error output from the command.
Running test/end-to-end/core.sh:610: executing 'oc exec -p docker-registry-3-55bh3 du /registry > '/data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/logs/prune-images.before.txt'' expecting success...
SUCCESS after 0.261s: test/end-to-end/core.sh:610: executing 'oc exec -p docker-registry-3-55bh3 du /registry > '/data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/logs/prune-images.before.txt'' expecting success
There was no output from the command.
Standard error from the command:
W0816 13:34:25.181176    6561 cmd.go:403] -p POD_NAME is DEPRECATED and will be removed in a future version. Use exec POD_NAME instead.

Running test/end-to-end/core.sh:611: executing 'oadm prune images --token='eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJrdWJlcm5ldGVzL3NlcnZpY2VhY2NvdW50Iiwia3ViZXJuZXRlcy5pby9zZXJ2aWNlYWNjb3VudC9uYW1lc3BhY2UiOiJjYWNoZSIsImt1YmVybmV0ZXMuaW8vc2VydmljZWFjY291bnQvc2VjcmV0Lm5hbWUiOiJidWlsZGVyLXRva2VuLXQzenZiIiwia3ViZXJuZXRlcy5pby9zZXJ2aWNlYWNjb3VudC9zZXJ2aWNlLWFjY291bnQubmFtZSI6ImJ1aWxkZXIiLCJrdWJlcm5ldGVzLmlvL3NlcnZpY2VhY2NvdW50L3NlcnZpY2UtYWNjb3VudC51aWQiOiI2ZTE2NTM2OS04MmE4LTExZTctODMxMS0wZTk2OTA5YTI2MjYiLCJzdWIiOiJzeXN0ZW06c2VydmljZWFjY291bnQ6Y2FjaGU6YnVpbGRlciJ9.kXVVGP1y21uLu3kdr36AnWA7Fi36u_VbZcMgCn40ITIgqbVK6wRCGICrWS6HSBl3sSyX_St8PL2NuEAqViitbRDU-X_o1NboSRRJ2TIdZHmVHz6uD3mX_1hZhbEMifG6GAY3Cocr3YR0vOe2_W0MCPHDE52ICUq6lq65ozf0uJDHg28DSTX0A8jMvJes-qG1ztgeiMAHeWV0kAi2b2J4f0WYanqMtPKLEffm_U0hA-Ue-DXL6FAVCVZLcfMbmVpRSXohsu-7jMFJhaR8S2__HKdHvkF_BDuiGaEyRJmUnSiyr15q4f_BRvwlR1j1SkoDJZ75cVq5Z7hfGTm1uwhtMA' --keep-younger-than=0 --confirm --all --registry-url='172.30.1.1:5000'' expecting success and not text 'error'...
SUCCESS after 0.394s: test/end-to-end/core.sh:611: executing 'oadm prune images --token='eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJrdWJlcm5ldGVzL3NlcnZpY2VhY2NvdW50Iiwia3ViZXJuZXRlcy5pby9zZXJ2aWNlYWNjb3VudC9uYW1lc3BhY2UiOiJjYWNoZSIsImt1YmVybmV0ZXMuaW8vc2VydmljZWFjY291bnQvc2VjcmV0Lm5hbWUiOiJidWlsZGVyLXRva2VuLXQzenZiIiwia3ViZXJuZXRlcy5pby9zZXJ2aWNlYWNjb3VudC9zZXJ2aWNlLWFjY291bnQubmFtZSI6ImJ1aWxkZXIiLCJrdWJlcm5ldGVzLmlvL3NlcnZpY2VhY2NvdW50L3NlcnZpY2UtYWNjb3VudC51aWQiOiI2ZTE2NTM2OS04MmE4LTExZTctODMxMS0wZTk2OTA5YTI2MjYiLCJzdWIiOiJzeXN0ZW06c2VydmljZWFjY291bnQ6Y2FjaGU6YnVpbGRlciJ9.kXVVGP1y21uLu3kdr36AnWA7Fi36u_VbZcMgCn40ITIgqbVK6wRCGICrWS6HSBl3sSyX_St8PL2NuEAqViitbRDU-X_o1NboSRRJ2TIdZHmVHz6uD3mX_1hZhbEMifG6GAY3Cocr3YR0vOe2_W0MCPHDE52ICUq6lq65ozf0uJDHg28DSTX0A8jMvJes-qG1ztgeiMAHeWV0kAi2b2J4f0WYanqMtPKLEffm_U0hA-Ue-DXL6FAVCVZLcfMbmVpRSXohsu-7jMFJhaR8S2__HKdHvkF_BDuiGaEyRJmUnSiyr15q4f_BRvwlR1j1SkoDJZ75cVq5Z7hfGTm1uwhtMA' --keep-younger-than=0 --confirm --all --registry-url='172.30.1.1:5000'' expecting success and not text 'error'
Standard output from the command:

Deleting registry layer blobs ...
BLOB
sha256:94ed0c431eb58b1c824715ac158d102bc78b5eb9d690579da5d8bc96b190eb67
sha256:b8efb18f159bd948486f18bd8940b56fd2298b438229f5bd2bcf4cedcf037448
sha256:aa74daafd50caca8cb6af306686b6f40ddf0392f5f06ffc13205305c3b73102a
sha256:9406c100a1c33ea3f04818a99e6c65b594ac0c571b2042fa07b0ad0043b64dce

Deleting images from server ...
IMAGE
sha256:788fa27763db6d69ad3444e8ba72f947df9e7e163bad7c1f5614f8fd27a311c3

There was no error output from the command.
Running test/end-to-end/core.sh:612: executing 'oc exec --context='default/127-0-0-1:8443/system:admin' -n default -p 'docker-registry-3-55bh3' du /registry | tee '/data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/logs/registry-images.txt' | grep '9406c100a1c33ea3f04818a99e6c65b594ac0c571b2042fa07b0ad0043b64dce' | grep blobs' expecting failure...
SUCCESS after 0.261s: test/end-to-end/core.sh:612: executing 'oc exec --context='default/127-0-0-1:8443/system:admin' -n default -p 'docker-registry-3-55bh3' du /registry | tee '/data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/logs/registry-images.txt' | grep '9406c100a1c33ea3f04818a99e6c65b594ac0c571b2042fa07b0ad0043b64dce' | grep blobs' expecting failure
There was no output from the command.
Standard error from the command:
W0816 13:34:26.055863    6730 cmd.go:403] -p POD_NAME is DEPRECATED and will be removed in a future version. Use exec POD_NAME instead.

Running test/end-to-end/core.sh:613: executing 'oc exec -p docker-registry-3-55bh3 du /registry > '/data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/logs/prune-images.after.txt'' expecting success...
SUCCESS after 0.259s: test/end-to-end/core.sh:613: executing 'oc exec -p docker-registry-3-55bh3 du /registry > '/data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/logs/prune-images.after.txt'' expecting success
There was no output from the command.
Standard error from the command:
W0816 13:34:26.344805    6823 cmd.go:403] -p POD_NAME is DEPRECATED and will be removed in a future version. Use exec POD_NAME instead.

Running test/end-to-end/core.sh:614: executing 'diff '/data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/logs/prune-images.before.txt' '/data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/logs/prune-images.after.txt'' expecting exit code 1...
SUCCESS after 0.014s: test/end-to-end/core.sh:614: executing 'diff '/data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/logs/prune-images.before.txt' '/data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/logs/prune-images.after.txt'' expecting exit code 1
Standard output from the command:
200,207c200,206
< 21044	/registry/docker/registry/v2/blobs/sha256/94/9406c100a1c33ea3f04818a99e6c65b594ac0c571b2042fa07b0ad0043b64dce
< 21044	/registry/docker/registry/v2/blobs/sha256/94
< 341124	/registry/docker/registry/v2/blobs/sha256
< 341124	/registry/docker/registry/v2/blobs
< 341368	/registry/docker/registry/v2
< 341368	/registry/docker/registry
< 341368	/registry/docker
< 341368	/registry
---
> 0	/registry/docker/registry/v2/blobs/sha256/94
> 320080	/registry/docker/registry/v2/blobs/sha256
> 320080	/registry/docker/registry/v2/blobs
> 320324	/registry/docker/registry/v2
> 320324	/registry/docker/registry
> 320324	/registry/docker
> 320324	/registry

There was no error output from the command.
[INFO] Validated image pruning
[INFO] Configure registry to accept manifest V2 schema 2
Running test/end-to-end/core.sh:619: executing 'oc project 'default/127-0-0-1:8443/system:admin'' expecting success...
SUCCESS after 0.165s: test/end-to-end/core.sh:619: executing 'oc project 'default/127-0-0-1:8443/system:admin'' expecting success
Standard output from the command:
Already on project "default" on server "https://127.0.0.1:8443".

There was no error output from the command.
Running test/end-to-end/core.sh:620: executing 'oc env -n default dc/docker-registry REGISTRY_MIDDLEWARE_REPOSITORY_OPENSHIFT_ACCEPTSCHEMA2=true' expecting success...
SUCCESS after 0.207s: test/end-to-end/core.sh:620: executing 'oc env -n default dc/docker-registry REGISTRY_MIDDLEWARE_REPOSITORY_OPENSHIFT_ACCEPTSCHEMA2=true' expecting success
Standard output from the command:
deploymentconfig "docker-registry" updated

There was no error output from the command.
Running test/end-to-end/core.sh:621: executing 'oc rollout status dc/docker-registry' expecting success...
SUCCESS after 15.997s: test/end-to-end/core.sh:621: executing 'oc rollout status dc/docker-registry' expecting success
Standard output from the command:
Waiting for rollout to finish: 0 out of 1 new replicas have been updated...
Waiting for rollout to finish: 0 out of 1 new replicas have been updated...
Waiting for rollout to finish: 1 old replicas are pending termination...
Waiting for rollout to finish: 1 old replicas are pending termination...
Waiting for latest deployment config spec to be observed by the controller loop...
replication controller "docker-registry-4" successfully rolled out

There was no error output from the command.
[INFO] Registry configured to accept manifest V2 schema 2
[INFO] Accept manifest V2 schema 2
Running test/end-to-end/core.sh:625: executing 'oc login -u schema2-user -p pass' expecting success...
SUCCESS after 0.252s: test/end-to-end/core.sh:625: executing 'oc login -u schema2-user -p pass' expecting success
Standard output from the command:
Login successful.

You have one project on this server: "schema2"

Using project "schema2".

There was no error output from the command.
Running test/end-to-end/core.sh:626: executing 'oc project schema2' expecting success...
SUCCESS after 0.223s: test/end-to-end/core.sh:626: executing 'oc project schema2' expecting success
Standard output from the command:
Already on project "schema2" on server "https://127.0.0.1:8443".

There was no error output from the command.
Running test/end-to-end/core.sh:628: executing 'docker tag busybox '172.30.1.1:5000/schema2/busybox'' expecting success...
SUCCESS after 0.033s: test/end-to-end/core.sh:628: executing 'docker tag busybox '172.30.1.1:5000/schema2/busybox'' expecting success
There was no output from the command.
There was no error output from the command.
Running test/end-to-end/core.sh:629: executing 'docker login -u e2e-user -p 'P6ifQeVkm-SuNYTb-ILVcIDoUAX2mKlSJQPixur37OM' -e e2e-user@openshift.com '172.30.1.1:5000'' expecting success...
SUCCESS after 0.048s: test/end-to-end/core.sh:629: executing 'docker login -u e2e-user -p 'P6ifQeVkm-SuNYTb-ILVcIDoUAX2mKlSJQPixur37OM' -e e2e-user@openshift.com '172.30.1.1:5000'' expecting success
Standard output from the command:
Login Succeeded

Standard error from the command:
Flag --email has been deprecated, will be removed in 1.13.

Running test/end-to-end/core.sh:630: executing 'docker push '172.30.1.1:5000/schema2/busybox'' expecting success...
SUCCESS after 0.612s: test/end-to-end/core.sh:630: executing 'docker push '172.30.1.1:5000/schema2/busybox'' expecting success
Standard output from the command:
The push refers to a repository [172.30.1.1:5000/schema2/busybox]
08c2295a7fa5: Preparing
08c2295a7fa5: Pushed
latest: digest: sha256:8573b4a813d7b90ef3876c6bec33db1272c02f0f90c406b25a5f9729169548ac size: 527

There was no error output from the command.
Running test/end-to-end/core.sh:632: executing 'oc get -o jsonpath='{.image.dockerImageManifestMediaType}' istag busybox:latest' expecting success and text 'application/vnd\.docker\.distribution\.manifest\.v2\+json'...
SUCCESS after 0.193s: test/end-to-end/core.sh:632: executing 'oc get -o jsonpath='{.image.dockerImageManifestMediaType}' istag busybox:latest' expecting success and text 'application/vnd\.docker\.distribution\.manifest\.v2\+json'
Standard output from the command:
application/vnd.docker.distribution.manifest.v2+json
There was no error output from the command.
[INFO] Manifest V2 schema 2 successfully accepted
[INFO] Convert manifest V2 schema 2 to schema 1 for older client
Running test/end-to-end/core.sh:636: executing 'oc login -u schema2-user -p pass' expecting success...
SUCCESS after 0.231s: test/end-to-end/core.sh:636: executing 'oc login -u schema2-user -p pass' expecting success
Standard output from the command:
Login successful.

You have one project on this server: "schema2"

Using project "schema2".

There was no error output from the command.
Running test/end-to-end/core.sh:637: executing 'oc new-project schema2tagged' expecting success...
SUCCESS after 0.306s: test/end-to-end/core.sh:637: executing 'oc new-project schema2tagged' expecting success
Standard output from the command:
Now using project "schema2tagged" on server "https://127.0.0.1:8443".

You can add applications to this project with the 'new-app' command. For example, try:

    oc new-app centos/ruby-22-centos7~https://github.com/openshift/ruby-ex.git

to build a new example application in Ruby.

There was no error output from the command.
Running test/end-to-end/core.sh:638: executing 'oc tag --source=istag schema2/busybox:latest busybox:latest' expecting success...
SUCCESS after 0.217s: test/end-to-end/core.sh:638: executing 'oc tag --source=istag schema2/busybox:latest busybox:latest' expecting success
Standard output from the command:
Tag busybox:latest set to schema2/busybox@sha256:8573b4a813d7b90ef3876c6bec33db1272c02f0f90c406b25a5f9729169548ac.

There was no error output from the command.
Running test/end-to-end/core.sh:640: executing 'echo 'sha256:8573b4a813d7b90ef3876c6bec33db1272c02f0f90c406b25a5f9729169548ac'' expecting success and text '.+'...
SUCCESS after 0.020s: test/end-to-end/core.sh:640: executing 'echo 'sha256:8573b4a813d7b90ef3876c6bec33db1272c02f0f90c406b25a5f9729169548ac'' expecting success and text '.+'
Standard output from the command:
sha256:8573b4a813d7b90ef3876c6bec33db1272c02f0f90c406b25a5f9729169548ac

There was no error output from the command.
Running test/end-to-end/core.sh:644: executing 'jq -r '.schemaVersion' '/data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/artifacts/busybox-manifest.json'' expecting success and text '^1$'...
SUCCESS after 0.020s: test/end-to-end/core.sh:644: executing 'jq -r '.schemaVersion' '/data/src/github.com/openshift/origin/_output/scripts/test-end-to-end/artifacts/busybox-manifest.json'' expecting success and text '^1$'
Standard output from the command:
1

There was no error output from the command.
Running test/end-to-end/core.sh:645: executing 'echo 'sha256:ddd9a1619d640a74871704874f17aebdb6429f0f39676cb15f63e6a74d72862f'' expecting success and not text 'sha256:8573b4a813d7b90ef3876c6bec33db1272c02f0f90c406b25a5f9729169548ac'...
SUCCESS after 0.016s: test/end-to-end/core.sh:645: executing 'echo 'sha256:ddd9a1619d640a74871704874f17aebdb6429f0f39676cb15f63e6a74d72862f'' expecting success and not text 'sha256:8573b4a813d7b90ef3876c6bec33db1272c02f0f90c406b25a5f9729169548ac'
Standard output from the command:
sha256:ddd9a1619d640a74871704874f17aebdb6429f0f39676cb15f63e6a74d72862f

There was no error output from the command.
Running test/end-to-end/core.sh:646: executing 'echo 'sha256:ddd9a1619d640a74871704874f17aebdb6429f0f39676cb15f63e6a74d72862f'' expecting success and text '.+'...
SUCCESS after 0.014s: test/end-to-end/core.sh:646: executing 'echo 'sha256:ddd9a1619d640a74871704874f17aebdb6429f0f39676cb15f63e6a74d72862f'' expecting success and text '.+'
Standard output from the command:
sha256:ddd9a1619d640a74871704874f17aebdb6429f0f39676cb15f63e6a74d72862f

There was no error output from the command.
Running test/end-to-end/core.sh:648: executing 'curl -I -u 'schema2-user:P6ifQeVkm-SuNYTb-ILVcIDoUAX2mKlSJQPixur37OM' '172.30.1.1:5000/v2/schema2tagged/busybox/manifests/sha256:ddd9a1619d640a74871704874f17aebdb6429f0f39676cb15f63e6a74d72862f'' expecting success and text '404 Not Found'...
SUCCESS after 0.036s: test/end-to-end/core.sh:648: executing 'curl -I -u 'schema2-user:P6ifQeVkm-SuNYTb-ILVcIDoUAX2mKlSJQPixur37OM' '172.30.1.1:5000/v2/schema2tagged/busybox/manifests/sha256:ddd9a1619d640a74871704874f17aebdb6429f0f39676cb15f63e6a74d72862f'' expecting success and text '404 Not Found'
Standard output from the command:
HTTP/1.1 404 Not Found
Content-Type: application/json; charset=utf-8
Docker-Distribution-Api-Version: registry/2.0
X-Registry-Supports-Signatures: 1
Date: Wed, 16 Aug 2017 17:34:45 GMT
Content-Length: 183


Standard error from the command:
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
  0   183    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0

Running test/end-to-end/core.sh:650: executing 'curl -I -u 'schema2-user:P6ifQeVkm-SuNYTb-ILVcIDoUAX2mKlSJQPixur37OM' -H 'Accept: application/vnd.docker.distribution.manifest.v2+json' '172.30.1.1:5000/v2/schema2tagged/busybox/manifests/latest'' expecting success and text 'Docker-Content-Digest:\s*sha256:8573b4a813d7b90ef3876c6bec33db1272c02f0f90c406b25a5f9729169548ac'...
SUCCESS after 0.041s: test/end-to-end/core.sh:650: executing 'curl -I -u 'schema2-user:P6ifQeVkm-SuNYTb-ILVcIDoUAX2mKlSJQPixur37OM' -H 'Accept: application/vnd.docker.distribution.manifest.v2+json' '172.30.1.1:5000/v2/schema2tagged/busybox/manifests/latest'' expecting success and text 'Docker-Content-Digest:\s*sha256:8573b4a813d7b90ef3876c6bec33db1272c02f0f90c406b25a5f9729169548ac'
Standard output from the command:
HTTP/1.1 200 OK
Content-Length: 527
Content-Type: application/vnd.docker.distribution.manifest.v2+json
Docker-Content-Digest: sha256:8573b4a813d7b90ef3876c6bec33db1272c02f0f90c406b25a5f9729169548ac
Docker-Distribution-Api-Version: registry/2.0
Etag: "sha256:8573b4a813d7b90ef3876c6bec33db1272c02f0f90c406b25a5f9729169548ac"
X-Registry-Supports-Signatures: 1
Date: Wed, 16 Aug 2017 17:34:45 GMT


Standard error from the command:
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
  0   527    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0

[INFO] Manifest V2 schema 2 successfully converted to schema 1
[INFO] Verify image size calculation
Running test/end-to-end/core.sh:655: executing 'echo '716618'' expecting success and text '^[1-9][0-9]*$'...
SUCCESS after 0.014s: test/end-to-end/core.sh:655: executing 'echo '716618'' expecting success and text '^[1-9][0-9]*$'
Standard output from the command:
716618

There was no error output from the command.
[INFO] Image size matches
/data/src/github.com/openshift/origin/hack/lib/log/system.sh: line 31: 31648 Terminated              sar -A -o "${binary_logfile}" 1 86400 > /dev/null 2> "${stderr_logfile}"
[INFO] jUnit XML report placed at _output/scripts/test-end-to-end/artifacts/oscmd_report_d2Dm5.xml
Of 271 tests executed in 366.810s, 271 succeeded, 0 failed, and 0 were skipped.
[INFO] [CLEANUP] Beginning cleanup routines...
[INFO] [CLEANUP] Dumping cluster events to _output/scripts/test-end-to-end/artifacts/events.txt
Logged into "https://127.0.0.1:8443" as "system:admin" using existing credentials.

You have access to the following projects and can switch between them with 'oc project <projectname>':

    cache
    crossmount
    custom
    default
    docker
    kube-public
    kube-system
    myproject
    node-selector
    openshift
    openshift-infra
    project0
    schema2
  * schema2tagged
    test
    verify-manifest

Using project "schema2tagged".
[INFO] [CLEANUP] Dumping etcd contents to _output/scripts/test-end-to-end/artifacts/etcd
WARN: error decoding value k8s
,
apiregistration.k8s.io/v1beta1
APIService�
�
v2alpha1.batch"?/apis/apiregistration.k8s.io/v1beta1/apiservices/v2alpha1.batch*$4605582f-82a8-11e7-8311-0e96909a262628B����Z1
)kube-aggregator.kubernetes.io/automanagedtruezbatchv2alpha1 *8��@	L
J
	AvailableTrue����"Local*&Local APIServices are always available": no kind "APIService" is registered for version "apiregistration.k8s.io/v1beta1"
WARN: error decoding value k8s
,
apiregistration.k8s.io/v1beta1
APIService�
�
v1beta1.storage.k8s.io"G/apis/apiregistration.k8s.io/v1beta1/apiservices/v1beta1.storage.k8s.io*$4613f772-82a8-11e7-8311-0e96909a262628B����Z1
)kube-aggregator.kubernetes.io/automanagedtruez#storage.k8s.iov1beta1 *8��@	L
J
	AvailableTrue����"Local*&Local APIServices are always available": no kind "APIService" is registered for version "apiregistration.k8s.io/v1beta1"
WARN: error decoding value k8s
,
apiregistration.k8s.io/v1beta1
APIService�
�
!v1beta1.rbac.authorization.k8s.io"R/apis/apiregistration.k8s.io/v1beta1/apiservices/v1beta1.rbac.authorization.k8s.io*$460c5551-82a8-11e7-8311-0e96909a262628B����Z1
)kube-aggregator.kubernetes.io/automanagedtruez.rbac.authorization.k8s.iov1beta1 *8�@L
J
	AvailableTrue����"Local*&Local APIServices are always available": no kind "APIService" is registered for version "apiregistration.k8s.io/v1beta1"
WARN: error decoding value k8s
,
apiregistration.k8s.io/v1beta1
APIService�
�
v1beta1.policy"?/apis/apiregistration.k8s.io/v1beta1/apiservices/v1beta1.policy*$460dd94b-82a8-11e7-8311-0e96909a262628B����Z1
)kube-aggregator.kubernetes.io/automanagedtruezpolicyv1beta1 *8̅@	L
J
	AvailableTrue����"Local*&Local APIServices are always available": no kind "APIService" is registered for version "apiregistration.k8s.io/v1beta1"
WARN: error decoding value k8s
,
apiregistration.k8s.io/v1beta1
APIService�
�
v1beta1.extensions"C/apis/apiregistration.k8s.io/v1beta1/apiservices/v1beta1.extensions*$460580e3-82a8-11e7-8311-0e96909a262628B����Z1
)kube-aggregator.kubernetes.io/automanagedtruez
extensionsv1beta1 *8�@L
J
	AvailableTrue����"Local*&Local APIServices are always available": no kind "APIService" is registered for version "apiregistration.k8s.io/v1beta1"
WARN: error decoding value k8s
,
apiregistration.k8s.io/v1beta1
APIService�
�
v1beta1.certificates.k8s.io"L/apis/apiregistration.k8s.io/v1beta1/apiservices/v1beta1.certificates.k8s.io*$460585fb-82a8-11e7-8311-0e96909a262628B����Z1
)kube-aggregator.kubernetes.io/automanagedtruez(certificates.k8s.iov1beta1 *8��@	L
J
	AvailableTrue����"Local*&Local APIServices are always available": no kind "APIService" is registered for version "apiregistration.k8s.io/v1beta1"
WARN: error decoding value k8s
,
apiregistration.k8s.io/v1beta1
APIService�
�
v1beta1.authorization.k8s.io"M/apis/apiregistration.k8s.io/v1beta1/apiservices/v1beta1.authorization.k8s.io*$46048107-82a8-11e7-8311-0e96909a262628B����Z1
)kube-aggregator.kubernetes.io/automanagedtruez)authorization.k8s.iov1beta1 *8��@	L
J
	AvailableTrue����"Local*&Local APIServices are always available": no kind "APIService" is registered for version "apiregistration.k8s.io/v1beta1"
WARN: error decoding value k8s
,
apiregistration.k8s.io/v1beta1
APIService�
�
v1beta1.authentication.k8s.io"N/apis/apiregistration.k8s.io/v1beta1/apiservices/v1beta1.authentication.k8s.io*$4603f139-82a8-11e7-8311-0e96909a262628B����Z1
)kube-aggregator.kubernetes.io/automanagedtruez*authentication.k8s.iov1beta1 *8��@	L
J
	AvailableTrue����"Local*&Local APIServices are always available": no kind "APIService" is registered for version "apiregistration.k8s.io/v1beta1"
WARN: error decoding value k8s
,
apiregistration.k8s.io/v1beta1
APIService�
�
v1beta1.apps"=/apis/apiregistration.k8s.io/v1beta1/apiservices/v1beta1.apps*$4603938c-82a8-11e7-8311-0e96909a262628B����Z1
)kube-aggregator.kubernetes.io/automanagedtruezappsv1beta1 *8��@L
J
	AvailableTrue����"Local*&Local APIServices are always available": no kind "APIService" is registered for version "apiregistration.k8s.io/v1beta1"
WARN: error decoding value k8s
,
apiregistration.k8s.io/v1beta1
APIService�
�
v1beta1.apiextensions.k8s.io"M/apis/apiregistration.k8s.io/v1beta1/apiservices/v1beta1.apiextensions.k8s.io*$46035986-82a8-11e7-8311-0e96909a262628B����Z1
)kube-aggregator.kubernetes.io/automanagedtruez)apiextensions.k8s.iov1beta1 *8��@	L
J
	AvailableTrue����"Local*&Local APIServices are always available": no kind "APIService" is registered for version "apiregistration.k8s.io/v1beta1"
WARN: error decoding value k8s
,
apiregistration.k8s.io/v1beta1
APIService�
�
v1.user.openshift.io"E/apis/apiregistration.k8s.io/v1beta1/apiservices/v1.user.openshift.io*$4616a9ca-82a8-11e7-8311-0e96909a262628B����Z1
)kube-aggregator.kubernetes.io/automanagedtruez user.openshift.iov1 *8�M@L
J
	AvailableTrue����"Local*&Local APIServices are always available": no kind "APIService" is registered for version "apiregistration.k8s.io/v1beta1"
WARN: error decoding value k8s
,
apiregistration.k8s.io/v1beta1
APIService�
�
v1.template.openshift.io"I/apis/apiregistration.k8s.io/v1beta1/apiservices/v1.template.openshift.io*$461779c6-82a8-11e7-8311-0e96909a262628B����Z1
)kube-aggregator.kubernetes.io/automanagedtruez$template.openshift.iov1 *8�M@L
J
	AvailableTrue����"Local*&Local APIServices are always available": no kind "APIService" is registered for version "apiregistration.k8s.io/v1beta1"
WARN: error decoding value k8s
,
apiregistration.k8s.io/v1beta1
APIService�
�
v1.storage.k8s.io"B/apis/apiregistration.k8s.io/v1beta1/apiservices/v1.storage.k8s.io*$460e5689-82a8-11e7-8311-0e96909a262628B����Z1
)kube-aggregator.kubernetes.io/automanagedtruezstorage.k8s.iov1 *8��@L
J
	AvailableTrue����"Local*&Local APIServices are always available": no kind "APIService" is registered for version "apiregistration.k8s.io/v1beta1"
WARN: error decoding value k8s
,
apiregistration.k8s.io/v1beta1
APIService�
�
v1.security.openshift.io"I/apis/apiregistration.k8s.io/v1beta1/apiservices/v1.security.openshift.io*$460e32f6-82a8-11e7-8311-0e96909a262628B����Z1
)kube-aggregator.kubernetes.io/automanagedtruez$security.openshift.iov1 *8�M@L
J
	AvailableTrue����"Local*&Local APIServices are always available": no kind "APIService" is registered for version "apiregistration.k8s.io/v1beta1"
WARN: error decoding value k8s
,
apiregistration.k8s.io/v1beta1
APIService�
�
v1.route.openshift.io"F/apis/apiregistration.k8s.io/v1beta1/apiservices/v1.route.openshift.io*$460dbe86-82a8-11e7-8311-0e96909a262628B����Z1
)kube-aggregator.kubernetes.io/automanagedtruez!route.openshift.iov1 *8�M@L
J
	AvailableTrue����"Local*&Local APIServices are always available": no kind "APIService" is registered for version "apiregistration.k8s.io/v1beta1"
WARN: error decoding value k8s
,
apiregistration.k8s.io/v1beta1
APIService�
�
v1.quota.openshift.io"F/apis/apiregistration.k8s.io/v1beta1/apiservices/v1.quota.openshift.io*$460b6050-82a8-11e7-8311-0e96909a262628B����Z1
)kube-aggregator.kubernetes.io/automanagedtruez!quota.openshift.iov1 *8�M@L
J
	AvailableTrue����"Local*&Local APIServices are always available": no kind "APIService" is registered for version "apiregistration.k8s.io/v1beta1"
WARN: error decoding value k8s
,
apiregistration.k8s.io/v1beta1
APIService�
�
v1.project.openshift.io"H/apis/apiregistration.k8s.io/v1beta1/apiservices/v1.project.openshift.io*$460749b6-82a8-11e7-8311-0e96909a262628B����Z1
)kube-aggregator.kubernetes.io/automanagedtruez#project.openshift.iov1 *8�M@L
J
	AvailableTrue����"Local*&Local APIServices are always available": no kind "APIService" is registered for version "apiregistration.k8s.io/v1beta1"
WARN: error decoding value k8s
,
apiregistration.k8s.io/v1beta1
APIService�
�
v1.oauth.openshift.io"F/apis/apiregistration.k8s.io/v1beta1/apiservices/v1.oauth.openshift.io*$4606a9ba-82a8-11e7-8311-0e96909a262628B����Z1
)kube-aggregator.kubernetes.io/automanagedtruez!oauth.openshift.iov1 *8�M@L
J
	AvailableTrue����"Local*&Local APIServices are always available": no kind "APIService" is registered for version "apiregistration.k8s.io/v1beta1"
WARN: error decoding value k8s
,
apiregistration.k8s.io/v1beta1
APIService�
�
v1.networking.k8s.io"E/apis/apiregistration.k8s.io/v1beta1/apiservices/v1.networking.k8s.io*$46067b22-82a8-11e7-8311-0e96909a262628B����Z1
)kube-aggregator.kubernetes.io/automanagedtruez!networking.k8s.iov1 *8��@L
J
	AvailableTrue����"Local*&Local APIServices are always available": no kind "APIService" is registered for version "apiregistration.k8s.io/v1beta1"
WARN: error decoding value k8s
,
apiregistration.k8s.io/v1beta1
APIService�
�
v1.network.openshift.io"H/apis/apiregistration.k8s.io/v1beta1/apiservices/v1.network.openshift.io*$4605c82f-82a8-11e7-8311-0e96909a262628B����Z1
)kube-aggregator.kubernetes.io/automanagedtruez#network.openshift.iov1 *8�M@L
J
	AvailableTrue����"Local*&Local APIServices are always available": no kind "APIService" is registered for version "apiregistration.k8s.io/v1beta1"
WARN: error decoding value k8s
,
apiregistration.k8s.io/v1beta1
APIService�
�
v1.image.openshift.io"F/apis/apiregistration.k8s.io/v1beta1/apiservices/v1.image.openshift.io*$4605a6b4-82a8-11e7-8311-0e96909a262628B����Z1
)kube-aggregator.kubernetes.io/automanagedtruez!image.openshift.iov1 *8�M@L
J
	AvailableTrue����"Local*&Local APIServices are always available": no kind "APIService" is registered for version "apiregistration.k8s.io/v1beta1"
WARN: error decoding value k8s
,
apiregistration.k8s.io/v1beta1
APIService�
�
v1.build.openshift.io"F/apis/apiregistration.k8s.io/v1beta1/apiservices/v1.build.openshift.io*$46057bcc-82a8-11e7-8311-0e96909a262628B����Z1
)kube-aggregator.kubernetes.io/automanagedtruez!build.openshift.iov1 *8�M@L
J
	AvailableTrue����"Local*&Local APIServices are always available": no kind "APIService" is registered for version "apiregistration.k8s.io/v1beta1"
WARN: error decoding value k8s
,
apiregistration.k8s.io/v1beta1
APIService�
�
v1.batch"9/apis/apiregistration.k8s.io/v1beta1/apiservices/v1.batch*$4604ed08-82a8-11e7-8311-0e96909a262628B����Z1
)kube-aggregator.kubernetes.io/automanagedtruezbatchv1 *8��@L
J
	AvailableTrue����"Local*&Local APIServices are always available": no kind "APIService" is registered for version "apiregistration.k8s.io/v1beta1"
WARN: error decoding value k8s
,
apiregistration.k8s.io/v1beta1
APIService�
�
v1.autoscaling"?/apis/apiregistration.k8s.io/v1beta1/apiservices/v1.autoscaling*$4604e97b-82a8-11e7-8311-0e96909a262628B����Z1
)kube-aggregator.kubernetes.io/automanagedtruezautoscalingv1 *8܈@L
J
	AvailableTrue����"Local*&Local APIServices are always available": no kind "APIService" is registered for version "apiregistration.k8s.io/v1beta1"
WARN: error decoding value k8s
,
apiregistration.k8s.io/v1beta1
APIService�
�
v1.authorization.openshift.io"N/apis/apiregistration.k8s.io/v1beta1/apiservices/v1.authorization.openshift.io*$4604b8f1-82a8-11e7-8311-0e96909a262628B����Z1
)kube-aggregator.kubernetes.io/automanagedtruez*authorization.openshift.iov1 *8��@L
J
	AvailableTrue����"Local*&Local APIServices are always available": no kind "APIService" is registered for version "apiregistration.k8s.io/v1beta1"
WARN: error decoding value k8s
,
apiregistration.k8s.io/v1beta1
APIService�
�
v1.authorization.k8s.io"H/apis/apiregistration.k8s.io/v1beta1/apiservices/v1.authorization.k8s.io*$460436bf-82a8-11e7-8311-0e96909a262628B����Z1
)kube-aggregator.kubernetes.io/automanagedtruez$authorization.k8s.iov1 *8��@L
J
	AvailableTrue����"Local*&Local APIServices are always available": no kind "APIService" is registered for version "apiregistration.k8s.io/v1beta1"
WARN: error decoding value k8s
,
apiregistration.k8s.io/v1beta1
APIService�
�
v1.authentication.k8s.io"I/apis/apiregistration.k8s.io/v1beta1/apiservices/v1.authentication.k8s.io*$46039101-82a8-11e7-8311-0e96909a262628B����Z1
)kube-aggregator.kubernetes.io/automanagedtruez%authentication.k8s.iov1 *8��@L
J
	AvailableTrue����"Local*&Local APIServices are always available": no kind "APIService" is registered for version "apiregistration.k8s.io/v1beta1"
WARN: error decoding value k8s
,
apiregistration.k8s.io/v1beta1
APIService�
�
v1.apps.openshift.io"E/apis/apiregistration.k8s.io/v1beta1/apiservices/v1.apps.openshift.io*$460387e4-82a8-11e7-8311-0e96909a262628B����Z1
)kube-aggregator.kubernetes.io/automanagedtruez apps.openshift.iov1 *8�M@L
J
	AvailableTrue����"Local*&Local APIServices are always available": no kind "APIService" is registered for version "apiregistration.k8s.io/v1beta1"
WARN: error decoding value k8s
,
apiregistration.k8s.io/v1beta1
APIService�
�
v1."4/apis/apiregistration.k8s.io/v1beta1/apiservices/v1.*$460336b2-82a8-11e7-8311-0e96909a262628B����Z1
)kube-aggregator.kubernetes.io/automanagedtruezv1 *8Ќ@L
J
	AvailableTrue����"Local*&Local APIServices are always available": no kind "APIService" is registered for version "apiregistration.k8s.io/v1beta1"
[INFO] [CLEANUP] Dumping container logs to _output/scripts/test-end-to-end/logs/containers
[INFO] [CLEANUP] Truncating log files over 200M
[INFO] [CLEANUP] Stopping docker containers
[INFO] [CLEANUP] Removing docker containers
[INFO] [CLEANUP] Killing child processes
[INFO] [CLEANUP] Pruning etcd data directory
[INFO] Restoring journald limits
[INFO] hack/test-end-to-end-docker.sh exited with code 0 after 00h 07m 39s
+ set +o xtrace
########## FINISHED STAGE: SUCCESS: RUN INTEGRATION TESTS [00h 43m 52s] ##########
[workspace] $ /bin/bash /tmp/hudson1809709136425406330.sh
########## STARTING STAGE: RELEASE THE AMI ##########
+ [[ -s /var/lib/jenkins/jobs/ami_build_origin_int_rhel_fork/workspace/activate ]]
+ source /var/lib/jenkins/jobs/ami_build_origin_int_rhel_fork/workspace/activate
++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/3e39bcd027e6f86329e676a27a3ffa25107c9cd0
++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/3e39bcd027e6f86329e676a27a3ffa25107c9cd0
++ export PATH=/var/lib/jenkins/origin-ci-tool/3e39bcd027e6f86329e676a27a3ffa25107c9cd0/bin:/sbin:/usr/sbin:/bin:/usr/bin
++ PATH=/var/lib/jenkins/origin-ci-tool/3e39bcd027e6f86329e676a27a3ffa25107c9cd0/bin:/sbin:/usr/sbin:/bin:/usr/bin
++ unset PYTHON_HOME
++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/ami_build_origin_int_rhel_fork/workspace/.config
++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/ami_build_origin_int_rhel_fork/workspace/.config
+ trello_tag=
+ [[ -n https://trello.com/c/Vac6mHz5/900-8-registry-rebase-docker-distribution-to-26x ]]
+ [[ https://trello.com/c/Vac6mHz5/900-8-registry-rebase-docker-distribution-to-26x != \<\n\o\n\e\> ]]
+ trello_tag='--tag trello=https://trello.com/c/Vac6mHz5/900-8-registry-rebase-docker-distribution-to-26x'
+ oct package ami --mark-ready --tag qe=ready --tag trello=https://trello.com/c/Vac6mHz5/900-8-registry-rebase-docker-distribution-to-26x

PLAYBOOK: ami-mark-ready.yml ***************************************************
2 plays in /var/lib/jenkins/origin-ci-tool/3e39bcd027e6f86329e676a27a3ffa25107c9cd0/lib/python2.7/site-packages/oct/ansible/oct/playbooks/package/ami-mark-ready.yml

PLAY [ensure we have the parameters necessary to package the VM image] *********

TASK [ensure all required variables are set] ***********************************
task path: /var/lib/jenkins/origin-ci-tool/3e39bcd027e6f86329e676a27a3ffa25107c9cd0/lib/python2.7/site-packages/oct/ansible/oct/playbooks/package/ami-mark-ready.yml:9
skipping: [localhost] => (item=origin_ci_aws_region)  => {
    "changed": false, 
    "generated_timestamp": "2017-08-16 13:35:22.957964", 
    "item": "origin_ci_aws_region", 
    "skip_reason": "Conditional check failed", 
    "skipped": true
}
skipping: [localhost] => (item=origin_ci_hosts)  => {
    "changed": false, 
    "generated_timestamp": "2017-08-16 13:35:22.961831", 
    "item": "origin_ci_hosts", 
    "skip_reason": "Conditional check failed", 
    "skipped": true
}

TASK [ensure only one AWS instance is running] *********************************
task path: /var/lib/jenkins/origin-ci-tool/3e39bcd027e6f86329e676a27a3ffa25107c9cd0/lib/python2.7/site-packages/oct/ansible/oct/playbooks/package/ami-mark-ready.yml:17
skipping: [localhost] => {
    "changed": false, 
    "generated_timestamp": "2017-08-16 13:35:23.018973", 
    "skip_reason": "Conditional check failed", 
    "skipped": true
}

PLAY [package the VM image] ****************************************************

TASK [determine the inventory hostname for the host we are packaging] **********
task path: /var/lib/jenkins/origin-ci-tool/3e39bcd027e6f86329e676a27a3ffa25107c9cd0/lib/python2.7/site-packages/oct/ansible/oct/playbooks/package/ami-mark-ready.yml:29
ok: [localhost] => {
    "ansible_facts": {
        "origin_ci_aws_hostname": "172.18.1.94"
    }, 
    "changed": false, 
    "generated_timestamp": "2017-08-16 13:35:23.086885"
}

TASK [determine AWS EC2 AMI name] **********************************************
task path: /var/lib/jenkins/origin-ci-tool/3e39bcd027e6f86329e676a27a3ffa25107c9cd0/lib/python2.7/site-packages/oct/ansible/oct/playbooks/package/ami-mark-ready.yml:33
ok: [localhost] => {
    "ansible_facts": {
        "origin_ci_aws_ami_name": "ami_build_origin_int_rhel_fork_43"
    }, 
    "changed": false, 
    "generated_timestamp": "2017-08-16 13:35:23.149444"
}

TASK [search for an AMI to update] *********************************************
task path: /var/lib/jenkins/origin-ci-tool/3e39bcd027e6f86329e676a27a3ffa25107c9cd0/lib/python2.7/site-packages/oct/ansible/oct/playbooks/package/ami-mark-ready.yml:37
ok: [localhost] => {
    "attempts": 1, 
    "changed": false, 
    "generated_timestamp": "2017-08-16 13:35:27.786547", 
    "results": [
        {
            "ami_id": "ami-bfcbf9c4", 
            "architecture": "x86_64", 
            "block_device_mapping": {
                "/dev/sda1": {
                    "delete_on_termination": true, 
                    "encrypted": false, 
                    "size": 35, 
                    "snapshot_id": "snap-0413785f3d5a8dcf8", 
                    "volume_type": "gp2"
                }, 
                "/dev/sdb": {
                    "delete_on_termination": true, 
                    "encrypted": false, 
                    "size": 35, 
                    "snapshot_id": "snap-04f757913ebbdca38", 
                    "volume_type": "gp2"
                }
            }, 
            "creationDate": "2017-08-16T14:45:23.000Z", 
            "description": "OpenShift Origin development AMI on rhel at the fork stage.", 
            "hypervisor": "xen", 
            "is_public": false, 
            "location": "531415883065/ami_build_origin_int_rhel_fork_43", 
            "name": "ami_build_origin_int_rhel_fork_43", 
            "owner_id": "531415883065", 
            "platform": null, 
            "root_device_name": "/dev/sda1", 
            "root_device_type": "ebs", 
            "state": "available", 
            "tags": {
                "Name": "ami_build_origin_int_rhel_fork_43", 
                "image_stage": "fork", 
                "operating_system": "rhel", 
                "ready": "no"
            }, 
            "virtualization_type": "hvm"
        }
    ]
}

TASK [determine which AMI to update] *******************************************
task path: /var/lib/jenkins/origin-ci-tool/3e39bcd027e6f86329e676a27a3ffa25107c9cd0/lib/python2.7/site-packages/oct/ansible/oct/playbooks/package/ami-mark-ready.yml:51
ok: [localhost] => {
    "ansible_facts": {
        "origin_ci_aws_ami_id": "ami-bfcbf9c4"
    }, 
    "changed": false, 
    "generated_timestamp": "2017-08-16 13:35:27.845025"
}

TASK [mark the AMI ready] ******************************************************
task path: /var/lib/jenkins/origin-ci-tool/3e39bcd027e6f86329e676a27a3ffa25107c9cd0/lib/python2.7/site-packages/oct/ansible/oct/playbooks/package/ami-mark-ready.yml:55
changed: [localhost] => (item={'key': 'ready', 'value': 'yes'}) => {
    "changed": true, 
    "generated_timestamp": "2017-08-16 13:35:28.556789", 
    "item": {
        "key": "ready", 
        "value": "yes"
    }, 
    "msg": "Tags {'ready': 'yes'} created for resource ami-bfcbf9c4."
}
changed: [localhost] => (item={'key': u'trello', 'value': u'https://trello.com/c/Vac6mHz5/900-8-registry-rebase-docker-distribution-to-26x'}) => {
    "changed": true, 
    "generated_timestamp": "2017-08-16 13:35:29.034173", 
    "item": {
        "key": "trello", 
        "value": "https://trello.com/c/Vac6mHz5/900-8-registry-rebase-docker-distribution-to-26x"
    }, 
    "msg": "Tags {'trello': 'https://trello.com/c/Vac6mHz5/900-8-registry-rebase-docker-distribution-to-26x'} created for resource ami-bfcbf9c4."
}
changed: [localhost] => (item={'key': u'qe', 'value': u'ready'}) => {
    "changed": true, 
    "generated_timestamp": "2017-08-16 13:35:29.500560", 
    "item": {
        "key": "qe", 
        "value": "ready"
    }, 
    "msg": "Tags {'qe': 'ready'} created for resource ami-bfcbf9c4."
}

PLAY RECAP *********************************************************************
localhost                  : ok=5    changed=1    unreachable=0    failed=0   

+ set +o xtrace
########## FINISHED STAGE: SUCCESS: RELEASE THE AMI [00h 00m 08s] ##########
[workspace] $ /bin/bash /tmp/hudson4521966534198871204.sh
########## STARTING STAGE: MAKE A TRELLO COMMENT ##########
+ [[ -s /var/lib/jenkins/jobs/ami_build_origin_int_rhel_fork/workspace/activate ]]
+ source /var/lib/jenkins/jobs/ami_build_origin_int_rhel_fork/workspace/activate
++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/3e39bcd027e6f86329e676a27a3ffa25107c9cd0
++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/3e39bcd027e6f86329e676a27a3ffa25107c9cd0
++ export PATH=/var/lib/jenkins/origin-ci-tool/3e39bcd027e6f86329e676a27a3ffa25107c9cd0/bin:/sbin:/usr/sbin:/bin:/usr/bin
++ PATH=/var/lib/jenkins/origin-ci-tool/3e39bcd027e6f86329e676a27a3ffa25107c9cd0/bin:/sbin:/usr/sbin:/bin:/usr/bin
++ unset PYTHON_HOME
++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/ami_build_origin_int_rhel_fork/workspace/.config
++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/ami_build_origin_int_rhel_fork/workspace/.config
+ [[ https://trello.com/c/Vac6mHz5/900-8-registry-rebase-docker-distribution-to-26x == \<\n\o\n\e\> ]]
+ [[ -z https://trello.com/c/Vac6mHz5/900-8-registry-rebase-docker-distribution-to-26x ]]
++ ssh -F ./.config/origin-ci-tool/inventory/.ssh_config openshiftdevel curl -s http://169.254.169.254/latest/meta-data/ami-id
+ AMI_ID=ami-2810943f
+ trello comment 'A fork AMI (id: ami-2810943f) has been created for this card' --card-url https://trello.com/c/Vac6mHz5/900-8-registry-rebase-docker-distribution-to-26x
+ set +o xtrace
########## FINISHED STAGE: SUCCESS: MAKE A TRELLO COMMENT [00h 00m 02s] ##########
[workspace] $ /bin/bash /tmp/hudson260819129262998725.sh
########## STARTING STAGE: RUN EXTENDED CONFORMANCE SUITES ##########
+ [[ -s /var/lib/jenkins/jobs/ami_build_origin_int_rhel_fork/workspace/activate ]]
+ source /var/lib/jenkins/jobs/ami_build_origin_int_rhel_fork/workspace/activate
++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/3e39bcd027e6f86329e676a27a3ffa25107c9cd0
++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/3e39bcd027e6f86329e676a27a3ffa25107c9cd0
++ export PATH=/var/lib/jenkins/origin-ci-tool/3e39bcd027e6f86329e676a27a3ffa25107c9cd0/bin:/sbin:/usr/sbin:/bin:/usr/bin
++ PATH=/var/lib/jenkins/origin-ci-tool/3e39bcd027e6f86329e676a27a3ffa25107c9cd0/bin:/sbin:/usr/sbin:/bin:/usr/bin
++ unset PYTHON_HOME
++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/ami_build_origin_int_rhel_fork/workspace/.config
++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/ami_build_origin_int_rhel_fork/workspace/.config
+ cat
+ [[ false == \t\r\u\e ]]
+ set +o xtrace
########## FINISHED STAGE: SUCCESS: RUN EXTENDED CONFORMANCE SUITES [00h 00m 00s] ##########
[PostBuildScript] - Execution post build scripts.
[workspace] $ /bin/bash /tmp/hudson7542805922850901938.sh
########## STARTING STAGE: DOWNLOAD ARTIFACTS FROM THE REMOTE HOST ##########
+ [[ -s /var/lib/jenkins/jobs/ami_build_origin_int_rhel_fork/workspace/activate ]]
+ source /var/lib/jenkins/jobs/ami_build_origin_int_rhel_fork/workspace/activate
++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/3e39bcd027e6f86329e676a27a3ffa25107c9cd0
++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/3e39bcd027e6f86329e676a27a3ffa25107c9cd0
++ export PATH=/var/lib/jenkins/origin-ci-tool/3e39bcd027e6f86329e676a27a3ffa25107c9cd0/bin:/sbin:/usr/sbin:/bin:/usr/bin
++ PATH=/var/lib/jenkins/origin-ci-tool/3e39bcd027e6f86329e676a27a3ffa25107c9cd0/bin:/sbin:/usr/sbin:/bin:/usr/bin
++ unset PYTHON_HOME
++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/ami_build_origin_int_rhel_fork/workspace/.config
++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/ami_build_origin_int_rhel_fork/workspace/.config
+ trap 'exit 0' EXIT
++ pwd
+ ARTIFACT_DIR=/var/lib/jenkins/jobs/ami_build_origin_int_rhel_fork/workspace/artifacts
+ rm -rf /var/lib/jenkins/jobs/ami_build_origin_int_rhel_fork/workspace/artifacts
+ mkdir /var/lib/jenkins/jobs/ami_build_origin_int_rhel_fork/workspace/artifacts
+ ssh -F ./.config/origin-ci-tool/inventory/.ssh_config openshiftdevel sudo stat /data/src/github/openshift/origin/_output/scripts
stat: cannot stat ‘/data/src/github/openshift/origin/_output/scripts’: No such file or directory
+ tree /var/lib/jenkins/jobs/ami_build_origin_int_rhel_fork/workspace/artifacts
/var/lib/jenkins/jobs/ami_build_origin_int_rhel_fork/workspace/artifacts

0 directories, 0 files
+ exit 0
[workspace] $ /bin/bash /tmp/hudson3241415958289918448.sh
########## STARTING STAGE: GENERATE ARTIFACTS FROM THE REMOTE HOST ##########
+ [[ -s /var/lib/jenkins/jobs/ami_build_origin_int_rhel_fork/workspace/activate ]]
+ source /var/lib/jenkins/jobs/ami_build_origin_int_rhel_fork/workspace/activate
++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/3e39bcd027e6f86329e676a27a3ffa25107c9cd0
++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/3e39bcd027e6f86329e676a27a3ffa25107c9cd0
++ export PATH=/var/lib/jenkins/origin-ci-tool/3e39bcd027e6f86329e676a27a3ffa25107c9cd0/bin:/sbin:/usr/sbin:/bin:/usr/bin
++ PATH=/var/lib/jenkins/origin-ci-tool/3e39bcd027e6f86329e676a27a3ffa25107c9cd0/bin:/sbin:/usr/sbin:/bin:/usr/bin
++ unset PYTHON_HOME
++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/ami_build_origin_int_rhel_fork/workspace/.config
++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/ami_build_origin_int_rhel_fork/workspace/.config
+ trap 'exit 0' EXIT
++ pwd
+ ARTIFACT_DIR=/var/lib/jenkins/jobs/ami_build_origin_int_rhel_fork/workspace/artifacts/generated
+ rm -rf /var/lib/jenkins/jobs/ami_build_origin_int_rhel_fork/workspace/artifacts/generated
+ mkdir /var/lib/jenkins/jobs/ami_build_origin_int_rhel_fork/workspace/artifacts/generated
+ ssh -F ./.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'sudo docker version && sudo docker info && sudo docker images && sudo docker ps -a 2>&1'
+ ssh -F ./.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'sudo df -h && sudo pvs && sudo vgs && sudo lvs 2>&1'
+ ssh -F ./.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'sudo yum list installed 2>&1'
+ ssh -F ./.config/origin-ci-tool/inventory/.ssh_config openshiftdevel 'sudo ausearch -m AVC -m SELINUX_ERR -m USER_AVC 2>&1'
+ true
+ tree /var/lib/jenkins/jobs/ami_build_origin_int_rhel_fork/workspace/artifacts/generated
/var/lib/jenkins/jobs/ami_build_origin_int_rhel_fork/workspace/artifacts/generated
├── avc_denials.log
├── docker.info
├── filesystem.info
└── installed_packages.log

0 directories, 4 files
+ exit 0
[workspace] $ /bin/bash /tmp/hudson7837081175569310657.sh
########## STARTING STAGE: FETCH SYSTEMD JOURNALS FROM THE REMOTE HOST ##########
+ [[ -s /var/lib/jenkins/jobs/ami_build_origin_int_rhel_fork/workspace/activate ]]
+ source /var/lib/jenkins/jobs/ami_build_origin_int_rhel_fork/workspace/activate
++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/3e39bcd027e6f86329e676a27a3ffa25107c9cd0
++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/3e39bcd027e6f86329e676a27a3ffa25107c9cd0
++ export PATH=/var/lib/jenkins/origin-ci-tool/3e39bcd027e6f86329e676a27a3ffa25107c9cd0/bin:/sbin:/usr/sbin:/bin:/usr/bin
++ PATH=/var/lib/jenkins/origin-ci-tool/3e39bcd027e6f86329e676a27a3ffa25107c9cd0/bin:/sbin:/usr/sbin:/bin:/usr/bin
++ unset PYTHON_HOME
++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/ami_build_origin_int_rhel_fork/workspace/.config
++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/ami_build_origin_int_rhel_fork/workspace/.config
+ trap 'exit 0' EXIT
++ pwd
+ ARTIFACT_DIR=/var/lib/jenkins/jobs/ami_build_origin_int_rhel_fork/workspace/artifacts/journals
+ rm -rf /var/lib/jenkins/jobs/ami_build_origin_int_rhel_fork/workspace/artifacts/journals
+ mkdir /var/lib/jenkins/jobs/ami_build_origin_int_rhel_fork/workspace/artifacts/journals
+ ssh -F ./.config/origin-ci-tool/inventory/.ssh_config openshiftdevel sudo journalctl --unit docker.service --no-pager --all --lines=all
+ ssh -F ./.config/origin-ci-tool/inventory/.ssh_config openshiftdevel sudo journalctl --unit origin-master.service --no-pager --all --lines=all
+ ssh -F ./.config/origin-ci-tool/inventory/.ssh_config openshiftdevel sudo journalctl --unit origin-node.service --no-pager --all --lines=all
+ tree /var/lib/jenkins/jobs/ami_build_origin_int_rhel_fork/workspace/artifacts/journals
/var/lib/jenkins/jobs/ami_build_origin_int_rhel_fork/workspace/artifacts/journals
├── docker.service
├── origin-master.service
└── origin-node.service

0 directories, 3 files
+ exit 0
[workspace] $ /bin/bash /tmp/hudson5256392735966723237.sh
########## STARTING STAGE: DEPROVISION CLOUD RESOURCES ##########
+ [[ -s /var/lib/jenkins/jobs/ami_build_origin_int_rhel_fork/workspace/activate ]]
+ source /var/lib/jenkins/jobs/ami_build_origin_int_rhel_fork/workspace/activate
++ export VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/3e39bcd027e6f86329e676a27a3ffa25107c9cd0
++ VIRTUAL_ENV=/var/lib/jenkins/origin-ci-tool/3e39bcd027e6f86329e676a27a3ffa25107c9cd0
++ export PATH=/var/lib/jenkins/origin-ci-tool/3e39bcd027e6f86329e676a27a3ffa25107c9cd0/bin:/sbin:/usr/sbin:/bin:/usr/bin
++ PATH=/var/lib/jenkins/origin-ci-tool/3e39bcd027e6f86329e676a27a3ffa25107c9cd0/bin:/sbin:/usr/sbin:/bin:/usr/bin
++ unset PYTHON_HOME
++ export OCT_CONFIG_HOME=/var/lib/jenkins/jobs/ami_build_origin_int_rhel_fork/workspace/.config
++ OCT_CONFIG_HOME=/var/lib/jenkins/jobs/ami_build_origin_int_rhel_fork/workspace/.config
+ oct deprovision

PLAYBOOK: main.yml *************************************************************
4 plays in /var/lib/jenkins/origin-ci-tool/3e39bcd027e6f86329e676a27a3ffa25107c9cd0/lib/python2.7/site-packages/oct/ansible/oct/playbooks/deprovision/main.yml

PLAY [ensure we have the parameters necessary to deprovision virtual hosts] ****

TASK [ensure all required variables are set] ***********************************
task path: /var/lib/jenkins/origin-ci-tool/3e39bcd027e6f86329e676a27a3ffa25107c9cd0/lib/python2.7/site-packages/oct/ansible/oct/playbooks/deprovision/main.yml:9
skipping: [localhost] => (item=origin_ci_inventory_dir)  => {
    "changed": false, 
    "generated_timestamp": "2017-08-16 13:35:44.546428", 
    "item": "origin_ci_inventory_dir", 
    "skip_reason": "Conditional check failed", 
    "skipped": true
}
skipping: [localhost] => (item=origin_ci_aws_region)  => {
    "changed": false, 
    "generated_timestamp": "2017-08-16 13:35:44.553752", 
    "item": "origin_ci_aws_region", 
    "skip_reason": "Conditional check failed", 
    "skipped": true
}

PLAY [deprovision virtual hosts in EC2] ****************************************

TASK [Gathering Facts] *********************************************************
ok: [localhost]

TASK [deprovision a virtual EC2 host] ******************************************
task path: /var/lib/jenkins/origin-ci-tool/3e39bcd027e6f86329e676a27a3ffa25107c9cd0/lib/python2.7/site-packages/oct/ansible/oct/playbooks/deprovision/main.yml:28
included: /var/lib/jenkins/origin-ci-tool/3e39bcd027e6f86329e676a27a3ffa25107c9cd0/lib/python2.7/site-packages/oct/ansible/oct/playbooks/deprovision/roles/aws-down/tasks/main.yml for localhost

TASK [update the SSH configuration to remove AWS EC2 specifics] ****************
task path: /var/lib/jenkins/origin-ci-tool/3e39bcd027e6f86329e676a27a3ffa25107c9cd0/lib/python2.7/site-packages/oct/ansible/oct/playbooks/deprovision/roles/aws-down/tasks/main.yml:2
ok: [localhost] => {
    "changed": false, 
    "generated_timestamp": "2017-08-16 13:35:45.481507", 
    "msg": ""
}

TASK [rename EC2 instance for termination reaper] ******************************
task path: /var/lib/jenkins/origin-ci-tool/3e39bcd027e6f86329e676a27a3ffa25107c9cd0/lib/python2.7/site-packages/oct/ansible/oct/playbooks/deprovision/roles/aws-down/tasks/main.yml:8
changed: [localhost] => {
    "changed": true, 
    "generated_timestamp": "2017-08-16 13:35:46.156021", 
    "msg": "Tags {'Name': 'terminate'} created for resource i-0e0bd19e9c2a65c49."
}

TASK [tear down the EC2 instance] **********************************************
task path: /var/lib/jenkins/origin-ci-tool/3e39bcd027e6f86329e676a27a3ffa25107c9cd0/lib/python2.7/site-packages/oct/ansible/oct/playbooks/deprovision/roles/aws-down/tasks/main.yml:15
changed: [localhost] => {
    "changed": true, 
    "generated_timestamp": "2017-08-16 13:35:47.087984", 
    "instance_ids": [
        "i-0e0bd19e9c2a65c49"
    ], 
    "instances": [
        {
            "ami_launch_index": "0", 
            "architecture": "x86_64", 
            "block_device_mapping": {
                "/dev/sda1": {
                    "delete_on_termination": true, 
                    "status": "attached", 
                    "volume_id": "vol-0543e043cc997679d"
                }, 
                "/dev/sdb": {
                    "delete_on_termination": true, 
                    "status": "attached", 
                    "volume_id": "vol-0306826da2eb94694"
                }
            }, 
            "dns_name": "ec2-54-173-2-140.compute-1.amazonaws.com", 
            "ebs_optimized": false, 
            "groups": {
                "sg-7e73221a": "default"
            }, 
            "hypervisor": "xen", 
            "id": "i-0e0bd19e9c2a65c49", 
            "image_id": "ami-2810943f", 
            "instance_type": "m4.xlarge", 
            "kernel": null, 
            "key_name": "libra", 
            "launch_time": "2017-08-16T14:11:53.000Z", 
            "placement": "us-east-1d", 
            "private_dns_name": "ip-172-18-1-94.ec2.internal", 
            "private_ip": "172.18.1.94", 
            "public_dns_name": "ec2-54-173-2-140.compute-1.amazonaws.com", 
            "public_ip": "54.173.2.140", 
            "ramdisk": null, 
            "region": "us-east-1", 
            "root_device_name": "/dev/sda1", 
            "root_device_type": "ebs", 
            "state": "running", 
            "state_code": 16, 
            "tags": {
                "Name": "terminate", 
                "openshift_etcd": "", 
                "openshift_master": "", 
                "openshift_node": ""
            }, 
            "tenancy": "default", 
            "virtualization_type": "hvm"
        }
    ], 
    "tagged_instances": []
}

TASK [remove the serialized host variables] ************************************
task path: /var/lib/jenkins/origin-ci-tool/3e39bcd027e6f86329e676a27a3ffa25107c9cd0/lib/python2.7/site-packages/oct/ansible/oct/playbooks/deprovision/roles/aws-down/tasks/main.yml:21
changed: [localhost] => {
    "changed": true, 
    "generated_timestamp": "2017-08-16 13:35:47.354201", 
    "path": "/var/lib/jenkins/jobs/ami_build_origin_int_rhel_fork/workspace/.config/origin-ci-tool/inventory/host_vars/172.18.1.94.yml", 
    "state": "absent"
}

PLAY [deprovision virtual hosts locally manged by Vagrant] *********************

TASK [Gathering Facts] *********************************************************
ok: [localhost]

PLAY [clean up local configuration for deprovisioned instances] ****************

TASK [remove inventory configuration directory] ********************************
task path: /var/lib/jenkins/origin-ci-tool/3e39bcd027e6f86329e676a27a3ffa25107c9cd0/lib/python2.7/site-packages/oct/ansible/oct/playbooks/deprovision/main.yml:61
changed: [localhost] => {
    "changed": true, 
    "generated_timestamp": "2017-08-16 13:35:47.818227", 
    "path": "/var/lib/jenkins/jobs/ami_build_origin_int_rhel_fork/workspace/.config/origin-ci-tool/inventory", 
    "state": "absent"
}

PLAY RECAP *********************************************************************
localhost                  : ok=8    changed=4    unreachable=0    failed=0   

+ set +o xtrace
########## FINISHED STAGE: SUCCESS: DEPROVISION CLOUD RESOURCES [00h 00m 04s] ##########
Archiving artifacts
Recording test results
Finished: SUCCESS