AbortedConsole Output

Skipping 291 KB.. Full Log
vider": "openshift"
                    }, 
                    "name": "logging-kibana-ops", 
                    "namespace": "logging", 
                    "resourceVersion": "1607", 
                    "selfLink": "/oapi/v1/namespaces/logging/routes/logging-kibana-ops", 
                    "uid": "91bf830e-4c7b-11e7-a20a-0e795be4d69c"
                }, 
                "spec": {
                    "host": "kibana-ops.router.default.svc.cluster.local", 
                    "tls": {
                        "caCertificate": "-----BEGIN CERTIFICATE-----\nMIIC2jCCAcKgAwIBAgIBATANBgkqhkiG9w0BAQsFADAeMRwwGgYDVQQDExNsb2dn\naW5nLXNpZ25lci10ZXN0MB4XDTE3MDYwODE4NTEwNloXDTIyMDYwNzE4NTEwN1ow\nHjEcMBoGA1UEAxMTbG9nZ2luZy1zaWduZXItdGVzdDCCASIwDQYJKoZIhvcNAQEB\nBQADggEPADCCAQoCggEBAPR1P+eWaCPVc6Uy+bxIm0JI2jbMla7JpiVgQSOlBwBg\nK071H5327tsWDbj09HLuH/2C7MYDBjKdViT1Ylp1f1nFNIH7DuonQImrK/c0+G2p\n7igkX3mN2lXUzAi7p6a5cqzgOJXsVj4j3lpjHDpmgdeEVYpVFCF4Bibutn2RCByK\nzoYZmxF6ZQ4mt8t5oj3qMtikVxwFegn7a7rE9zuHQm1PYrfyiyxsi4xihocTSc73\n/Lv0Im/xkvkHHjQ98QS8QWUBFaRgwGSd9IgyO3JLr7XMrMHllkeSXQ7EXoWFBrCV\nBCrPqraH2V/tIb6iFiCOgqJq6besR/IIlI6CUpMr1AsCAwEAAaMjMCEwDgYDVR0P\nAQH/BAQDAgKkMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBACEK\n4MIoV7djD1ebbuOBXJZo1VkuI8B5yJFHIZy6JU7NcKbB/ijNstWPZCbZfh/XZ9s0\nqISIu0F+h3kq0jlGGOsbGM+TGIIto5vzld5P2dnswo7CeGyFhhYzKrmOldJKdgDE\nom1nfPxEj00/hJPGPNxyg7R66rit2nV8IJgkCBYX6f4dqZzyCKrHqAszKh8M3smg\n0V9LddkvAUio0uZB81btfQ3k35JvjUxTiy+xUuqWd6hDb0yTfoIEiuBRLC/uVR5Z\nom+uwq3NJPYBbNrNtZXb32GIP1tShYhzn9DcxbEsBeloY1xvEdCE0eJYwIzBBVEm\nuDJQAN5ovA5yu+rK+/M=\n-----END CERTIFICATE-----\n", 
                        "destinationCACertificate": "-----BEGIN CERTIFICATE-----\nMIIC2jCCAcKgAwIBAgIBATANBgkqhkiG9w0BAQsFADAeMRwwGgYDVQQDExNsb2dn\naW5nLXNpZ25lci10ZXN0MB4XDTE3MDYwODE4NTEwNloXDTIyMDYwNzE4NTEwN1ow\nHjEcMBoGA1UEAxMTbG9nZ2luZy1zaWduZXItdGVzdDCCASIwDQYJKoZIhvcNAQEB\nBQADggEPADCCAQoCggEBAPR1P+eWaCPVc6Uy+bxIm0JI2jbMla7JpiVgQSOlBwBg\nK071H5327tsWDbj09HLuH/2C7MYDBjKdViT1Ylp1f1nFNIH7DuonQImrK/c0+G2p\n7igkX3mN2lXUzAi7p6a5cqzgOJXsVj4j3lpjHDpmgdeEVYpVFCF4Bibutn2RCByK\nzoYZmxF6ZQ4mt8t5oj3qMtikVxwFegn7a7rE9zuHQm1PYrfyiyxsi4xihocTSc73\n/Lv0Im/xkvkHHjQ98QS8QWUBFaRgwGSd9IgyO3JLr7XMrMHllkeSXQ7EXoWFBrCV\nBCrPqraH2V/tIb6iFiCOgqJq6besR/IIlI6CUpMr1AsCAwEAAaMjMCEwDgYDVR0P\nAQH/BAQDAgKkMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBACEK\n4MIoV7djD1ebbuOBXJZo1VkuI8B5yJFHIZy6JU7NcKbB/ijNstWPZCbZfh/XZ9s0\nqISIu0F+h3kq0jlGGOsbGM+TGIIto5vzld5P2dnswo7CeGyFhhYzKrmOldJKdgDE\nom1nfPxEj00/hJPGPNxyg7R66rit2nV8IJgkCBYX6f4dqZzyCKrHqAszKh8M3smg\n0V9LddkvAUio0uZB81btfQ3k35JvjUxTiy+xUuqWd6hDb0yTfoIEiuBRLC/uVR5Z\nom+uwq3NJPYBbNrNtZXb32GIP1tShYhzn9DcxbEsBeloY1xvEdCE0eJYwIzBBVEm\nuDJQAN5ovA5yu+rK+/M=\n-----END CERTIFICATE-----\n", 
                        "insecureEdgeTerminationPolicy": "Redirect", 
                        "termination": "reencrypt"
                    }, 
                    "to": {
                        "kind": "Service", 
                        "name": "logging-kibana-ops", 
                        "weight": 100
                    }, 
                    "wildcardPolicy": "None"
                }, 
                "status": {
                    "ingress": [
                        {
                            "conditions": [
                                {
                                    "lastTransitionTime": "2017-06-08T18:52:06Z", 
                                    "status": "True", 
                                    "type": "Admitted"
                                }
                            ], 
                            "host": "kibana-ops.router.default.svc.cluster.local", 
                            "routerName": "router", 
                            "wildcardPolicy": "None"
                        }
                    ]
                }
            }
        ], 
        "returncode": 0
    }, 
    "state": "present"
}

TASK [openshift_logging_kibana : Generate proxy session] ***********************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:125
ok: [openshift] => {
    "ansible_facts": {
        "session_secret": "EBtWLxSNkhHnty49JmjCGS4jxXjAzll6HQMyGxXjlLEgQV1SUO3V2MTifeifaX2pAtr5s8YmsafceVSsM9kg4Z1fg8NaLqLWdn71PO6eFjQsBJlkUBCw1o24qFz6amm7pUW6geZYwk8irxtJUf3cvBesGHx48wWbhfcYbDyOYSu4pS3gp5QxE3Wkyc6JPxILxEt9HUb8"
    }, 
    "changed": false
}

TASK [openshift_logging_kibana : Generate oauth client secret] *****************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:132
ok: [openshift] => {
    "ansible_facts": {
        "oauth_secret": "eyeadORqPAm76VknVrgD1vLYplFWWgD8ITypIiGMpnQd40wOx7fSruVlbc5guvFY"
    }, 
    "changed": false
}

TASK [openshift_logging_kibana : Create oauth-client template] *****************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:138
changed: [openshift] => {
    "changed": true, 
    "checksum": "63af488453f16bc81b671f9effafb9bcb2e5e216", 
    "dest": "/tmp/openshift-logging-ansible-9NqgYe/templates/oauth-client.yml", 
    "gid": 0, 
    "group": "root", 
    "md5sum": "cb1113e10213c698bff6c1682eaaaad1", 
    "mode": "0644", 
    "owner": "root", 
    "secontext": "unconfined_u:object_r:admin_home_t:s0", 
    "size": 332, 
    "src": "/root/.ansible/tmp/ansible-tmp-1496947927.18-155144523373391/source", 
    "state": "file", 
    "uid": 0
}

TASK [openshift_logging_kibana : Set kibana-proxy oauth-client] ****************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:146
changed: [openshift] => {
    "changed": true, 
    "results": {
        "cmd": "/bin/oc get oauthclient kibana-proxy -o json -n logging", 
        "results": [
            {
                "apiVersion": "v1", 
                "kind": "OAuthClient", 
                "metadata": {
                    "creationTimestamp": "2017-06-08T18:51:58Z", 
                    "labels": {
                        "logging-infra": "support"
                    }, 
                    "name": "kibana-proxy", 
                    "resourceVersion": "1609", 
                    "selfLink": "/oapi/v1/oauthclients/kibana-proxy", 
                    "uid": "8d0c0fbd-4c7b-11e7-a20a-0e795be4d69c"
                }, 
                "redirectURIs": [
                    "https://kibana-ops.router.default.svc.cluster.local"
                ], 
                "scopeRestrictions": [
                    {
                        "literals": [
                            "user:info", 
                            "user:check-access", 
                            "user:list-projects"
                        ]
                    }
                ], 
                "secret": "eyeadORqPAm76VknVrgD1vLYplFWWgD8ITypIiGMpnQd40wOx7fSruVlbc5guvFY"
            }
        ], 
        "returncode": 0
    }, 
    "state": "present"
}

TASK [openshift_logging_kibana : Set Kibana secret] ****************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:157
ok: [openshift] => {
    "changed": false, 
    "results": {
        "apiVersion": "v1", 
        "data": {
            "ca": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUMyakNDQWNLZ0F3SUJBZ0lCQVRBTkJna3Foa2lHOXcwQkFRc0ZBREFlTVJ3d0dnWURWUVFERXhOc2IyZG4KYVc1bkxYTnBaMjVsY2kxMFpYTjBNQjRYRFRFM01EWXdPREU0TlRFd05sb1hEVEl5TURZd056RTROVEV3TjFvdwpIakVjTUJvR0ExVUVBeE1UYkc5bloybHVaeTF6YVdkdVpYSXRkR1Z6ZERDQ0FTSXdEUVlKS29aSWh2Y05BUUVCCkJRQURnZ0VQQURDQ0FRb0NnZ0VCQVBSMVArZVdhQ1BWYzZVeStieEltMEpJMmpiTWxhN0pwaVZnUVNPbEJ3QmcKSzA3MUg1MzI3dHNXRGJqMDlITHVILzJDN01ZREJqS2RWaVQxWWxwMWYxbkZOSUg3RHVvblFJbXJLL2MwK0cycAo3aWdrWDNtTjJsWFV6QWk3cDZhNWNxemdPSlhzVmo0ajNscGpIRHBtZ2RlRVZZcFZGQ0Y0QmlidXRuMlJDQnlLCnpvWVpteEY2WlE0bXQ4dDVvajNxTXRpa1Z4d0ZlZ243YTdyRTl6dUhRbTFQWXJmeWl5eHNpNHhpaG9jVFNjNzMKL0x2MEltL3hrdmtISGpROThRUzhRV1VCRmFSZ3dHU2Q5SWd5TzNKTHI3WE1yTUhsbGtlU1hRN0VYb1dGQnJDVgpCQ3JQcXJhSDJWL3RJYjZpRmlDT2dxSnE2YmVzUi9JSWxJNkNVcE1yMUFzQ0F3RUFBYU1qTUNFd0RnWURWUjBQCkFRSC9CQVFEQWdLa01BOEdBMVVkRXdFQi93UUZNQU1CQWY4d0RRWUpLb1pJaHZjTkFRRUxCUUFEZ2dFQkFDRUsKNE1Jb1Y3ZGpEMWViYnVPQlhKWm8xVmt1SThCNXlKRkhJWnk2SlU3TmNLYkIvaWpOc3RXUFpDYlpmaC9YWjlzMApxSVNJdTBGK2gza3EwamxHR09zYkdNK1RHSUl0bzV2emxkNVAyZG5zd283Q2VHeUZoaFl6S3JtT2xkSktkZ0RFCm9tMW5mUHhFajAwL2hKUEdQTnh5ZzdSNjZyaXQyblY4SUpna0NCWVg2ZjRkcVp6eUNLckhxQXN6S2g4TTNzbWcKMFY5TGRka3ZBVWlvMHVaQjgxYnRmUTNrMzVKdmpVeFRpeSt4VXVxV2Q2aERiMHlUZm9JRWl1QlJMQy91VlI1WgpvbSt1d3EzTkpQWUJiTnJOdFpYYjMyR0lQMXRTaFloem45RGN4YkVzQmVsb1kxeHZFZENFMGVKWXdJekJCVkVtCnVESlFBTjVvdkE1eXUrcksrL009Ci0tLS0tRU5EIENFUlRJRklDQVRFLS0tLS0K", 
            "cert": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURSVENDQWkyZ0F3SUJBZ0lCQXpBTkJna3Foa2lHOXcwQkFRVUZBREFlTVJ3d0dnWURWUVFERXhOc2IyZG4KYVc1bkxYTnBaMjVsY2kxMFpYTjBNQjRYRFRFM01EWXdPREU0TlRFeE1sb1hEVEU1TURZd09ERTROVEV4TWxvdwpSakVRTUE0R0ExVUVDZ3dIVEc5bloybHVaekVTTUJBR0ExVUVDd3dKVDNCbGJsTm9hV1owTVI0d0hBWURWUVFECkRCVnplWE4wWlcwdWJHOW5aMmx1Wnk1cmFXSmhibUV3Z2dFaU1BMEdDU3FHU0liM0RRRUJBUVVBQTRJQkR3QXcKZ2dFS0FvSUJBUURhWjhpemswM1lXQkFkQmtGV25sdG5wQmNyNUh4NnJpTmxXdnYrcTFRMFl3cTkwVjk3ZkhSUwpGOTRwTEFvSlNab3psbmpFWlpoM2xsd012WGxiMUdSWExMeHYxS25nUEVhejdQV211cmw3YzVTMmdVYm9LN2FiCjY5YU9BZlJ6a045QW1oY0xhY0ZNUzhjUkFxRVdmdStKWDZDZkJCZU1jQ1BSY3hOMytxVVNsS3k5VGZ5YXN2bnEKNGpmL2x0TE5XRENqTHhZUkVLeUQ0dzRQelVmU2pLay9ZNmpyK1o2Q1ZrQmwwbDlmZFp4K0NaeTlPM0ZCbWM1WApteVFUWlhFd2w4R09vTGNMWThCMjVmTEN2Mzdna0pmYTZXNTczZnB5WWpPSEFtQzU2SzhCNFgydmJyV0RNU1RFClZibm1YT045NDZEd3cvaDg5ODRHMFkzSFg1emF2TE8xQWdNQkFBR2paakJrTUE0R0ExVWREd0VCL3dRRUF3SUYKb0RBSkJnTlZIUk1FQWpBQU1CMEdBMVVkSlFRV01CUUdDQ3NHQVFVRkJ3TUJCZ2dyQmdFRkJRY0RBakFkQmdOVgpIUTRFRmdRVTI5NVBIcDArY2xWTTJBbFdaOXhuY0tiS2ZSSXdDUVlEVlIwakJBSXdBREFOQmdrcWhraUc5dzBCCkFRVUZBQU9DQVFFQXdGT2MxYTl2U1kwUnE1R1QwNXlVb2dSdmJuSHJqYmFubWgzaFhIZGgvaEl4cm5oeUZIRVcKVmZ1WW5MY2ZkSDR1U0xuSkp5cm9IcVVBQUNLTGVLSTFlczNLQkN4NzRoVXFqbnlsMEpVWG40R2pGUmhiNE5WUgpCVVhoeS9MRVlkc3JyT0pqZmNyWWhXMm1YYThHKy84ZHhFRXBOd09TTTNpaUdYYm5TUEZuaHVFQjdNTWFQU2Q3CkE4LzZZUU1OTE12Ym9nZmRYNVFNL1dleTdwWXAxV1Vkb1BUZzAwd3hKSG5SbExGcmxpVnh5K2JUd3QrVHd0MW8KcVYwZlh0OVJUaklDWXJDS0xWK3pOYnRaRmJhNDJVdi80Mk5RVlFSbi9zQWgwTGMyakl5Z0ZKMnB6WlA2VHYrawpvRTV0WHdMTy9HQ2E3SUNvWnZLTlBBV0RTOW5JWU5TSjRBPT0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo=", 
            "key": "LS0tLS1CRUdJTiBQUklWQVRFIEtFWS0tLS0tCk1JSUV2Z0lCQURBTkJna3Foa2lHOXcwQkFRRUZBQVNDQktnd2dnU2tBZ0VBQW9JQkFRRGFaOGl6azAzWVdCQWQKQmtGV25sdG5wQmNyNUh4NnJpTmxXdnYrcTFRMFl3cTkwVjk3ZkhSU0Y5NHBMQW9KU1pvemxuakVaWmgzbGx3TQp2WGxiMUdSWExMeHYxS25nUEVhejdQV211cmw3YzVTMmdVYm9LN2FiNjlhT0FmUnprTjlBbWhjTGFjRk1TOGNSCkFxRVdmdStKWDZDZkJCZU1jQ1BSY3hOMytxVVNsS3k5VGZ5YXN2bnE0amYvbHRMTldEQ2pMeFlSRUt5RDR3NFAKelVmU2pLay9ZNmpyK1o2Q1ZrQmwwbDlmZFp4K0NaeTlPM0ZCbWM1WG15UVRaWEV3bDhHT29MY0xZOEIyNWZMQwp2Mzdna0pmYTZXNTczZnB5WWpPSEFtQzU2SzhCNFgydmJyV0RNU1RFVmJubVhPTjk0NkR3dy9oODk4NEcwWTNIClg1emF2TE8xQWdNQkFBRUNnZ0VBTnlGaGdTcmF5SlNhSktseWpzblVhdG41MjJPR3BIVVdqRHdtUmdlN2JxbmQKaVVLenBua281NXdNd1RuVG9ycUlpU01mZ3Z2ZU83bWhyTG1iUzduV1JrY05uZkVCbkkrNmpTcHdhOURRaDBBSwp0dkwxRWlBT3JWY2Z4cUxwc001Ti9JR285WUdSZ0kvVVJRL3oxN2pkQUVFNlMwbCtKRDRlWEgxKzJaZFVXZGJrClNZQW1MUGh0Y1JGQXZ4ZXNES2lrWWVYcEJlRklJWVNqZkdmdlhFUjN6N1cxaUFxaitsTUhoNGc0UTFGTC9PcGMKMUVRM3dxRmVTWnMyNndTb2VHb0FtT0RlTmVSMnlFckN0a2w0QnBHNmViVnIxWXlBSE1jamY0SG8vSGx2UStqSwpEaFcwNXd5UDNlcWlNdzFURWEvZldKSVRta0dJOGI2SENMb2RTbnB5Z1FLQmdRRHhOQmhOOEZkTjRBLzhiek9kCkR5S3lBaysrVW9Ua2NZRitlSC94SXlFK2k1b0hrcjVzUHFndGVPUEhFR3MvYXVvR25UeHhjMUZIMjFwVWEwaXIKSTNjdVUzcFlsZWRScXVHR0xydjJxdlE5Mis0SjJwcmwxYVFESjdlVXlVMkczSmhIbVkxVXd5am5jMExPcDJ5YQpKNjFDcEJMcWNydUE1NXM0UzdWUjYwT1o0UUtCZ1FEbnphbmZUUTBSckZvbW92OU5ZaWR0YVh2SGFmUzNWTVdMCkxoVVBiNTZiTm82cTNwTTdlUEczZGV0Rll1YklRQUtxL2FRcFg1N3dJZU5OM2FHQ1ZSUHVtS1E5WkplMFdEWDEKZjZsZTgvUnk4eno3Z29vUm14bEUvWHduVi95akFxci9Tb09SMndOWnpHYUhFUlFmMFZERWxOQXdBSWZjbWFSSgpOU1RtUXpZY1ZRS0JnUUM4UjNMMVc3ZDZkWDFsQ2hTYTg5YWNaMHVCLy8zMktGZnh0ZlFBOGZBZ1YyNkxuTk9CCndVTWNBN0toajhKM3hXZ2FjNkJiMnExR3p3WHF3YmYreHZkZ05ROWFtT0R4RWRkckc0M2psVHlFRU9uMXpTdCsKWHM3aEc1Nmt5S0g0dExjMXBMbjZ3VUJyc3NBaWVmUS9QTnQxR29nZGRDUERFb2RLWVdhUEpIc25ZUUtCZ1FEYgprbzRKenBQRm5Pd2NVeVBncWt4TXNMR21PSEN0UkdpSEtSU2I5WkJZMkRvV0V4V0dMVTlTQ0NuZkpRc1V2c1dJCk1SRFNmQmd5d1BidWFEVTRIdHp0a3J0b3lDT0htbE1xaEUzc2VzdWdJMVl0cS82YkpMNndBaWtibjVYWTE0c2cKbmZXNU1LdWc0U1J4WnJVWVdvN1dWMXhKVEFkbDZFZWRiUERTQ1BKMXJRS0JnRzkydnVVTGlRMU9HVTZvZHVHRgpFbUd4R3QxNmgwVlZjeXJBWjViQTl3ZzhQeVFYb3o1dEE3MWJCdjJlb28rd0EvV0VKZDNBVWlYOUFJVC9RTlRiCmF0a1oxcFZDZnA1UW5NZzFPRUozcVFWQmZMaFFSM3I3WlpxNVJTRGVtWlZSOVY4NTQxSXd3T25ibnFnNC9jNVAKZE5xY3p4ZFI0WjcxYm83TUhEb2pBL2VLCi0tLS0tRU5EIFBSSVZBVEUgS0VZLS0tLS0K"
        }, 
        "kind": "Secret", 
        "metadata": {
            "creationTimestamp": null, 
            "name": "logging-kibana"
        }, 
        "type": "Opaque"
    }, 
    "state": "present"
}

TASK [openshift_logging_kibana : Set Kibana Proxy secret] **********************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:171
changed: [openshift] => {
    "changed": true, 
    "results": {
        "cmd": "/bin/oc replace -f /tmp/logging-kibana-proxy -n logging", 
        "results": "", 
        "returncode": 0
    }, 
    "state": "present"
}

TASK [openshift_logging_kibana : Generate Kibana DC template] ******************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:197
changed: [openshift] => {
    "changed": true, 
    "checksum": "348859494a83091ee685e4af5497009390489095", 
    "dest": "/tmp/openshift-logging-ansible-9NqgYe/templates/kibana-dc.yaml", 
    "gid": 0, 
    "group": "root", 
    "md5sum": "e1907e1b972239afac3bf66068395ccd", 
    "mode": "0644", 
    "owner": "root", 
    "secontext": "unconfined_u:object_r:admin_home_t:s0", 
    "size": 3761, 
    "src": "/root/.ansible/tmp/ansible-tmp-1496947930.57-167410824209162/source", 
    "state": "file", 
    "uid": 0
}

TASK [openshift_logging_kibana : Set Kibana DC] ********************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:216
changed: [openshift] => {
    "changed": true, 
    "results": {
        "cmd": "/bin/oc get dc logging-kibana-ops -o json -n logging", 
        "results": [
            {
                "apiVersion": "v1", 
                "kind": "DeploymentConfig", 
                "metadata": {
                    "creationTimestamp": "2017-06-08T18:52:11Z", 
                    "generation": 2, 
                    "labels": {
                        "component": "kibana-ops", 
                        "logging-infra": "kibana", 
                        "provider": "openshift"
                    }, 
                    "name": "logging-kibana-ops", 
                    "namespace": "logging", 
                    "resourceVersion": "1624", 
                    "selfLink": "/oapi/v1/namespaces/logging/deploymentconfigs/logging-kibana-ops", 
                    "uid": "94905893-4c7b-11e7-a20a-0e795be4d69c"
                }, 
                "spec": {
                    "replicas": 1, 
                    "selector": {
                        "component": "kibana-ops", 
                        "logging-infra": "kibana", 
                        "provider": "openshift"
                    }, 
                    "strategy": {
                        "activeDeadlineSeconds": 21600, 
                        "resources": {}, 
                        "rollingParams": {
                            "intervalSeconds": 1, 
                            "maxSurge": "25%", 
                            "maxUnavailable": "25%", 
                            "timeoutSeconds": 600, 
                            "updatePeriodSeconds": 1
                        }, 
                        "type": "Rolling"
                    }, 
                    "template": {
                        "metadata": {
                            "creationTimestamp": null, 
                            "labels": {
                                "component": "kibana-ops", 
                                "logging-infra": "kibana", 
                                "provider": "openshift"
                            }, 
                            "name": "logging-kibana-ops"
                        }, 
                        "spec": {
                            "containers": [
                                {
                                    "env": [
                                        {
                                            "name": "ES_HOST", 
                                            "value": "logging-es-ops"
                                        }, 
                                        {
                                            "name": "ES_PORT", 
                                            "value": "9200"
                                        }, 
                                        {
                                            "name": "KIBANA_MEMORY_LIMIT", 
                                            "valueFrom": {
                                                "resourceFieldRef": {
                                                    "containerName": "kibana", 
                                                    "divisor": "0", 
                                                    "resource": "limits.memory"
                                                }
                                            }
                                        }
                                    ], 
                                    "image": "172.30.46.236:5000/logging/logging-kibana:latest", 
                                    "imagePullPolicy": "Always", 
                                    "name": "kibana", 
                                    "readinessProbe": {
                                        "exec": {
                                            "command": [
                                                "/usr/share/kibana/probe/readiness.sh"
                                            ]
                                        }, 
                                        "failureThreshold": 3, 
                                        "initialDelaySeconds": 5, 
                                        "periodSeconds": 5, 
                                        "successThreshold": 1, 
                                        "timeoutSeconds": 4
                                    }, 
                                    "resources": {
                                        "limits": {
                                            "memory": "736Mi"
                                        }
                                    }, 
                                    "terminationMessagePath": "/dev/termination-log", 
                                    "terminationMessagePolicy": "File", 
                                    "volumeMounts": [
                                        {
                                            "mountPath": "/etc/kibana/keys", 
                                            "name": "kibana", 
                                            "readOnly": true
                                        }
                                    ]
                                }, 
                                {
                                    "env": [
                                        {
                                            "name": "OAP_BACKEND_URL", 
                                            "value": "http://localhost:5601"
                                        }, 
                                        {
                                            "name": "OAP_AUTH_MODE", 
                                            "value": "oauth2"
                                        }, 
                                        {
                                            "name": "OAP_TRANSFORM", 
                                            "value": "user_header,token_header"
                                        }, 
                                        {
                                            "name": "OAP_OAUTH_ID", 
                                            "value": "kibana-proxy"
                                        }, 
                                        {
                                            "name": "OAP_MASTER_URL", 
                                            "value": "https://kubernetes.default.svc.cluster.local"
                                        }, 
                                        {
                                            "name": "OAP_PUBLIC_MASTER_URL", 
                                            "value": "https://172.18.14.90:8443"
                                        }, 
                                        {
                                            "name": "OAP_LOGOUT_REDIRECT", 
                                            "value": "https://172.18.14.90:8443/console/logout"
                                        }, 
                                        {
                                            "name": "OAP_MASTER_CA_FILE", 
                                            "value": "/var/run/secrets/kubernetes.io/serviceaccount/ca.crt"
                                        }, 
                                        {
                                            "name": "OAP_DEBUG", 
                                            "value": "False"
                                        }, 
                                        {
                                            "name": "OAP_OAUTH_SECRET_FILE", 
                                            "value": "/secret/oauth-secret"
                                        }, 
                                        {
                                            "name": "OAP_SERVER_CERT_FILE", 
                                            "value": "/secret/server-cert"
                                        }, 
                                        {
                                            "name": "OAP_SERVER_KEY_FILE", 
                                            "value": "/secret/server-key"
                                        }, 
                                        {
                                            "name": "OAP_SERVER_TLS_FILE", 
                                            "value": "/secret/server-tls.json"
                                        }, 
                                        {
                                            "name": "OAP_SESSION_SECRET_FILE", 
                                            "value": "/secret/session-secret"
                                        }, 
                                        {
                                            "name": "OCP_AUTH_PROXY_MEMORY_LIMIT", 
                                            "valueFrom": {
                                                "resourceFieldRef": {
                                                    "containerName": "kibana-proxy", 
                                                    "divisor": "0", 
                                                    "resource": "limits.memory"
                                                }
                                            }
                                        }
                                    ], 
                                    "image": "172.30.46.236:5000/logging/logging-auth-proxy:latest", 
                                    "imagePullPolicy": "Always", 
                                    "name": "kibana-proxy", 
                                    "ports": [
                                        {
                                            "containerPort": 3000, 
                                            "name": "oaproxy", 
                                            "protocol": "TCP"
                                        }
                                    ], 
                                    "resources": {
                                        "limits": {
                                            "memory": "96Mi"
                                        }
                                    }, 
                                    "terminationMessagePath": "/dev/termination-log", 
                                    "terminationMessagePolicy": "File", 
                                    "volumeMounts": [
                                        {
                                            "mountPath": "/secret", 
                                            "name": "kibana-proxy", 
                                            "readOnly": true
                                        }
                                    ]
                                }
                            ], 
                            "dnsPolicy": "ClusterFirst", 
                            "restartPolicy": "Always", 
                            "schedulerName": "default-scheduler", 
                            "securityContext": {}, 
                            "serviceAccount": "aggregated-logging-kibana", 
                            "serviceAccountName": "aggregated-logging-kibana", 
                            "terminationGracePeriodSeconds": 30, 
                            "volumes": [
                                {
                                    "name": "kibana", 
                                    "secret": {
                                        "defaultMode": 420, 
                                        "secretName": "logging-kibana"
                                    }
                                }, 
                                {
                                    "name": "kibana-proxy", 
                                    "secret": {
                                        "defaultMode": 420, 
                                        "secretName": "logging-kibana-proxy"
                                    }
                                }
                            ]
                        }
                    }, 
                    "test": false, 
                    "triggers": [
                        {
                            "type": "ConfigChange"
                        }
                    ]
                }, 
                "status": {
                    "availableReplicas": 0, 
                    "conditions": [
                        {
                            "lastTransitionTime": "2017-06-08T18:52:11Z", 
                            "lastUpdateTime": "2017-06-08T18:52:11Z", 
                            "message": "Deployment config does not have minimum availability.", 
                            "status": "False", 
                            "type": "Available"
                        }, 
                        {
                            "lastTransitionTime": "2017-06-08T18:52:11Z", 
                            "lastUpdateTime": "2017-06-08T18:52:11Z", 
                            "message": "replication controller \"logging-kibana-ops-1\" is waiting for pod \"logging-kibana-ops-1-deploy\" to run", 
                            "status": "Unknown", 
                            "type": "Progressing"
                        }
                    ], 
                    "details": {
                        "causes": [
                            {
                                "type": "ConfigChange"
                            }
                        ], 
                        "message": "config change"
                    }, 
                    "latestVersion": 1, 
                    "observedGeneration": 2, 
                    "replicas": 0, 
                    "unavailableReplicas": 0, 
                    "updatedReplicas": 0
                }
            }
        ], 
        "returncode": 0
    }, 
    "state": "present"
}

TASK [openshift_logging_kibana : Delete temp directory] ************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:228
ok: [openshift] => {
    "changed": false, 
    "path": "/tmp/openshift-logging-ansible-9NqgYe", 
    "state": "absent"
}

TASK [openshift_logging : include_role] ****************************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging/tasks/install_logging.yaml:195
statically included: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_curator/tasks/determine_version.yaml

TASK [openshift_logging_curator : fail] ****************************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_curator/tasks/determine_version.yaml:3
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_curator : set_fact] ************************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_curator/tasks/determine_version.yaml:7
ok: [openshift] => {
    "ansible_facts": {
        "curator_version": "3_5"
    }, 
    "changed": false
}

TASK [openshift_logging_curator : set_fact] ************************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_curator/tasks/determine_version.yaml:12
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_curator : fail] ****************************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_curator/tasks/determine_version.yaml:15
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_curator : Create temp directory for doing work in] *****
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:5
ok: [openshift] => {
    "changed": false, 
    "cmd": [
        "mktemp", 
        "-d", 
        "/tmp/openshift-logging-ansible-XXXXXX"
    ], 
    "delta": "0:00:00.002411", 
    "end": "2017-06-08 14:52:13.834551", 
    "rc": 0, 
    "start": "2017-06-08 14:52:13.832140"
}

STDOUT:

/tmp/openshift-logging-ansible-R39Avs

TASK [openshift_logging_curator : set_fact] ************************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:10
ok: [openshift] => {
    "ansible_facts": {
        "tempdir": "/tmp/openshift-logging-ansible-R39Avs"
    }, 
    "changed": false
}

TASK [openshift_logging_curator : Create templates subdirectory] ***************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:14
ok: [openshift] => {
    "changed": false, 
    "gid": 0, 
    "group": "root", 
    "mode": "0755", 
    "owner": "root", 
    "path": "/tmp/openshift-logging-ansible-R39Avs/templates", 
    "secontext": "unconfined_u:object_r:user_tmp_t:s0", 
    "size": 6, 
    "state": "directory", 
    "uid": 0
}

TASK [openshift_logging_curator : Create Curator service account] **************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:24
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_curator : Create Curator service account] **************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:32
changed: [openshift] => {
    "changed": true, 
    "results": {
        "cmd": "/bin/oc get sa aggregated-logging-curator -o json -n logging", 
        "results": [
            {
                "apiVersion": "v1", 
                "imagePullSecrets": [
                    {
                        "name": "aggregated-logging-curator-dockercfg-5s0sc"
                    }
                ], 
                "kind": "ServiceAccount", 
                "metadata": {
                    "creationTimestamp": "2017-06-08T18:52:15Z", 
                    "name": "aggregated-logging-curator", 
                    "namespace": "logging", 
                    "resourceVersion": "1641", 
                    "selfLink": "/api/v1/namespaces/logging/serviceaccounts/aggregated-logging-curator", 
                    "uid": "96b4348d-4c7b-11e7-a20a-0e795be4d69c"
                }, 
                "secrets": [
                    {
                        "name": "aggregated-logging-curator-token-0gtjv"
                    }, 
                    {
                        "name": "aggregated-logging-curator-dockercfg-5s0sc"
                    }
                ]
            }
        ], 
        "returncode": 0
    }, 
    "state": "present"
}

TASK [openshift_logging_curator : copy] ****************************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:41
ok: [openshift] => {
    "changed": false, 
    "checksum": "9008efd9a8892dcc42c28c6dfb6708527880a6d8", 
    "dest": "/tmp/openshift-logging-ansible-R39Avs/curator.yml", 
    "gid": 0, 
    "group": "root", 
    "md5sum": "5498c5fd98f3dd06e34b20eb1f55dc12", 
    "mode": "0644", 
    "owner": "root", 
    "secontext": "unconfined_u:object_r:admin_home_t:s0", 
    "size": 320, 
    "src": "/root/.ansible/tmp/ansible-tmp-1496947935.68-145156536777554/source", 
    "state": "file", 
    "uid": 0
}

TASK [openshift_logging_curator : copy] ****************************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:47
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_curator : Set Curator configmap] ***********************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:53
changed: [openshift] => {
    "changed": true, 
    "results": {
        "cmd": "/bin/oc get configmap logging-curator -o json -n logging", 
        "results": [
            {
                "apiVersion": "v1", 
                "data": {
                    "config.yaml": "# Logging example curator config file\n\n# uncomment and use this to override the defaults from env vars\n#.defaults:\n#  delete:\n#    days: 30\n#  runhour: 0\n#  runminute: 0\n\n# to keep ops logs for a different duration:\n#.operations:\n#  delete:\n#    weeks: 8\n\n# example for a normal project\n#myapp:\n#  delete:\n#    weeks: 1\n"
                }, 
                "kind": "ConfigMap", 
                "metadata": {
                    "creationTimestamp": "2017-06-08T18:52:16Z", 
                    "name": "logging-curator", 
                    "namespace": "logging", 
                    "resourceVersion": "1643", 
                    "selfLink": "/api/v1/namespaces/logging/configmaps/logging-curator", 
                    "uid": "97b22114-4c7b-11e7-a20a-0e795be4d69c"
                }
            }
        ], 
        "returncode": 0
    }, 
    "state": "present"
}

TASK [openshift_logging_curator : Set Curator secret] **************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:62
changed: [openshift] => {
    "changed": true, 
    "results": {
        "cmd": "/bin/oc secrets new logging-curator ca=/etc/origin/logging/ca.crt key=/etc/origin/logging/system.logging.curator.key cert=/etc/origin/logging/system.logging.curator.crt -n logging", 
        "results": "", 
        "returncode": 0
    }, 
    "state": "present"
}

TASK [openshift_logging_curator : set_fact] ************************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:75
ok: [openshift] => {
    "ansible_facts": {
        "curator_component": "curator", 
        "curator_name": "logging-curator"
    }, 
    "changed": false
}

TASK [openshift_logging_curator : Generate Curator deploymentconfig] ***********
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:81
ok: [openshift] => {
    "changed": false, 
    "checksum": "bfc66395c8615270cc162ebf7fb40ecc45498532", 
    "dest": "/tmp/openshift-logging-ansible-R39Avs/templates/curator-dc.yaml", 
    "gid": 0, 
    "group": "root", 
    "md5sum": "d710f66f509dcb4c4bce9e4b46860a65", 
    "mode": "0644", 
    "owner": "root", 
    "secontext": "unconfined_u:object_r:admin_home_t:s0", 
    "size": 2340, 
    "src": "/root/.ansible/tmp/ansible-tmp-1496947938.44-242734797861420/source", 
    "state": "file", 
    "uid": 0
}

TASK [openshift_logging_curator : Set Curator DC] ******************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:99
changed: [openshift] => {
    "changed": true, 
    "results": {
        "cmd": "/bin/oc get dc logging-curator -o json -n logging", 
        "results": [
            {
                "apiVersion": "v1", 
                "kind": "DeploymentConfig", 
                "metadata": {
                    "creationTimestamp": "2017-06-08T18:52:19Z", 
                    "generation": 2, 
                    "labels": {
                        "component": "curator", 
                        "logging-infra": "curator", 
                        "provider": "openshift"
                    }, 
                    "name": "logging-curator", 
                    "namespace": "logging", 
                    "resourceVersion": "1679", 
                    "selfLink": "/oapi/v1/namespaces/logging/deploymentconfigs/logging-curator", 
                    "uid": "9968b294-4c7b-11e7-a20a-0e795be4d69c"
                }, 
                "spec": {
                    "replicas": 1, 
                    "selector": {
                        "component": "curator", 
                        "logging-infra": "curator", 
                        "provider": "openshift"
                    }, 
                    "strategy": {
                        "activeDeadlineSeconds": 21600, 
                        "recreateParams": {
                            "timeoutSeconds": 600
                        }, 
                        "resources": {}, 
                        "rollingParams": {
                            "intervalSeconds": 1, 
                            "maxSurge": "25%", 
                            "maxUnavailable": "25%", 
                            "timeoutSeconds": 600, 
                            "updatePeriodSeconds": 1
                        }, 
                        "type": "Recreate"
                    }, 
                    "template": {
                        "metadata": {
                            "creationTimestamp": null, 
                            "labels": {
                                "component": "curator", 
                                "logging-infra": "curator", 
                                "provider": "openshift"
                            }, 
                            "name": "logging-curator"
                        }, 
                        "spec": {
                            "containers": [
                                {
                                    "env": [
                                        {
                                            "name": "K8S_HOST_URL", 
                                            "value": "https://kubernetes.default.svc.cluster.local"
                                        }, 
                                        {
                                            "name": "ES_HOST", 
                                            "value": "logging-es"
                                        }, 
                                        {
                                            "name": "ES_PORT", 
                                            "value": "9200"
                                        }, 
                                        {
                                            "name": "ES_CLIENT_CERT", 
                                            "value": "/etc/curator/keys/cert"
                                        }, 
                                        {
                                            "name": "ES_CLIENT_KEY", 
                                            "value": "/etc/curator/keys/key"
                                        }, 
                                        {
                                            "name": "ES_CA", 
                                            "value": "/etc/curator/keys/ca"
                                        }, 
                                        {
                                            "name": "CURATOR_DEFAULT_DAYS", 
                                            "value": "30"
                                        }, 
                                        {
                                            "name": "CURATOR_RUN_HOUR", 
                                            "value": "0"
                                        }, 
                                        {
                                            "name": "CURATOR_RUN_MINUTE", 
                                            "value": "0"
                                        }, 
                                        {
                                            "name": "CURATOR_RUN_TIMEZONE", 
                                            "value": "UTC"
                                        }, 
                                        {
                                            "name": "CURATOR_SCRIPT_LOG_LEVEL", 
                                            "value": "INFO"
                                        }, 
                                        {
                                            "name": "CURATOR_LOG_LEVEL", 
                                            "value": "ERROR"
                                        }
                                    ], 
                                    "image": "172.30.46.236:5000/logging/logging-curator:latest", 
                                    "imagePullPolicy": "Always", 
                                    "name": "curator", 
                                    "resources": {
                                        "limits": {
                                            "cpu": "100m"
                                        }
                                    }, 
                                    "terminationMessagePath": "/dev/termination-log", 
                                    "terminationMessagePolicy": "File", 
                                    "volumeMounts": [
                                        {
                                            "mountPath": "/etc/curator/keys", 
                                            "name": "certs", 
                                            "readOnly": true
                                        }, 
                                        {
                                            "mountPath": "/etc/curator/settings", 
                                            "name": "config", 
                                            "readOnly": true
                                        }
                                    ]
                                }
                            ], 
                            "dnsPolicy": "ClusterFirst", 
                            "restartPolicy": "Always", 
                            "schedulerName": "default-scheduler", 
                            "securityContext": {}, 
                            "serviceAccount": "aggregated-logging-curator", 
                            "serviceAccountName": "aggregated-logging-curator", 
                            "terminationGracePeriodSeconds": 30, 
                            "volumes": [
                                {
                                    "name": "certs", 
                                    "secret": {
                                        "defaultMode": 420, 
                                        "secretName": "logging-curator"
                                    }
                                }, 
                                {
                                    "configMap": {
                                        "defaultMode": 420, 
                                        "name": "logging-curator"
                                    }, 
                                    "name": "config"
                                }
                            ]
                        }
                    }, 
                    "test": false, 
                    "triggers": [
                        {
                            "type": "ConfigChange"
                        }
                    ]
                }, 
                "status": {
                    "availableReplicas": 0, 
                    "conditions": [
                        {
                            "lastTransitionTime": "2017-06-08T18:52:19Z", 
                            "lastUpdateTime": "2017-06-08T18:52:19Z", 
                            "message": "Deployment config does not have minimum availability.", 
                            "status": "False", 
                            "type": "Available"
                        }, 
                        {
                            "lastTransitionTime": "2017-06-08T18:52:19Z", 
                            "lastUpdateTime": "2017-06-08T18:52:19Z", 
                            "message": "replication controller \"logging-curator-1\" is waiting for pod \"logging-curator-1-deploy\" to run", 
                            "status": "Unknown", 
                            "type": "Progressing"
                        }
                    ], 
                    "details": {
                        "causes": [
                            {
                                "type": "ConfigChange"
                            }
                        ], 
                        "message": "config change"
                    }, 
                    "latestVersion": 1, 
                    "observedGeneration": 2, 
                    "replicas": 0, 
                    "unavailableReplicas": 0, 
                    "updatedReplicas": 0
                }
            }
        ], 
        "returncode": 0
    }, 
    "state": "present"
}

TASK [openshift_logging_curator : Delete temp directory] ***********************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:109
ok: [openshift] => {
    "changed": false, 
    "path": "/tmp/openshift-logging-ansible-R39Avs", 
    "state": "absent"
}

TASK [openshift_logging : include_role] ****************************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging/tasks/install_logging.yaml:207
statically included: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_curator/tasks/determine_version.yaml

TASK [openshift_logging_curator : fail] ****************************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_curator/tasks/determine_version.yaml:3
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_curator : set_fact] ************************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_curator/tasks/determine_version.yaml:7
ok: [openshift] => {
    "ansible_facts": {
        "curator_version": "3_5"
    }, 
    "changed": false
}

TASK [openshift_logging_curator : set_fact] ************************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_curator/tasks/determine_version.yaml:12
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_curator : fail] ****************************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_curator/tasks/determine_version.yaml:15
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_curator : Create temp directory for doing work in] *****
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:5
ok: [openshift] => {
    "changed": false, 
    "cmd": [
        "mktemp", 
        "-d", 
        "/tmp/openshift-logging-ansible-XXXXXX"
    ], 
    "delta": "0:00:01.003694", 
    "end": "2017-06-08 14:52:23.933637", 
    "rc": 0, 
    "start": "2017-06-08 14:52:22.929943"
}

STDOUT:

/tmp/openshift-logging-ansible-b33dab

TASK [openshift_logging_curator : set_fact] ************************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:10
ok: [openshift] => {
    "ansible_facts": {
        "tempdir": "/tmp/openshift-logging-ansible-b33dab"
    }, 
    "changed": false
}

TASK [openshift_logging_curator : Create templates subdirectory] ***************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:14
ok: [openshift] => {
    "changed": false, 
    "gid": 0, 
    "group": "root", 
    "mode": "0755", 
    "owner": "root", 
    "path": "/tmp/openshift-logging-ansible-b33dab/templates", 
    "secontext": "unconfined_u:object_r:user_tmp_t:s0", 
    "size": 6, 
    "state": "directory", 
    "uid": 0
}

TASK [openshift_logging_curator : Create Curator service account] **************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:24
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_curator : Create Curator service account] **************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:32
ok: [openshift] => {
    "changed": false, 
    "results": {
        "cmd": "/bin/oc get sa aggregated-logging-curator -o json -n logging", 
        "results": [
            {
                "apiVersion": "v1", 
                "imagePullSecrets": [
                    {
                        "name": "aggregated-logging-curator-dockercfg-5s0sc"
                    }
                ], 
                "kind": "ServiceAccount", 
                "metadata": {
                    "creationTimestamp": "2017-06-08T18:52:15Z", 
                    "name": "aggregated-logging-curator", 
                    "namespace": "logging", 
                    "resourceVersion": "1641", 
                    "selfLink": "/api/v1/namespaces/logging/serviceaccounts/aggregated-logging-curator", 
                    "uid": "96b4348d-4c7b-11e7-a20a-0e795be4d69c"
                }, 
                "secrets": [
                    {
                        "name": "aggregated-logging-curator-token-0gtjv"
                    }, 
                    {
                        "name": "aggregated-logging-curator-dockercfg-5s0sc"
                    }
                ]
            }
        ], 
        "returncode": 0
    }, 
    "state": "present"
}

TASK [openshift_logging_curator : copy] ****************************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:41
ok: [openshift] => {
    "changed": false, 
    "checksum": "9008efd9a8892dcc42c28c6dfb6708527880a6d8", 
    "dest": "/tmp/openshift-logging-ansible-b33dab/curator.yml", 
    "gid": 0, 
    "group": "root", 
    "md5sum": "5498c5fd98f3dd06e34b20eb1f55dc12", 
    "mode": "0644", 
    "owner": "root", 
    "secontext": "unconfined_u:object_r:admin_home_t:s0", 
    "size": 320, 
    "src": "/root/.ansible/tmp/ansible-tmp-1496947944.77-52297343065915/source", 
    "state": "file", 
    "uid": 0
}

TASK [openshift_logging_curator : copy] ****************************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:47
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_curator : Set Curator configmap] ***********************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:53
ok: [openshift] => {
    "changed": false, 
    "results": {
        "cmd": "/bin/oc get configmap logging-curator -o json -n logging", 
        "results": [
            {
                "apiVersion": "v1", 
                "data": {
                    "config.yaml": "# Logging example curator config file\n\n# uncomment and use this to override the defaults from env vars\n#.defaults:\n#  delete:\n#    days: 30\n#  runhour: 0\n#  runminute: 0\n\n# to keep ops logs for a different duration:\n#.operations:\n#  delete:\n#    weeks: 8\n\n# example for a normal project\n#myapp:\n#  delete:\n#    weeks: 1\n"
                }, 
                "kind": "ConfigMap", 
                "metadata": {
                    "creationTimestamp": "2017-06-08T18:52:16Z", 
                    "name": "logging-curator", 
                    "namespace": "logging", 
                    "resourceVersion": "1643", 
                    "selfLink": "/api/v1/namespaces/logging/configmaps/logging-curator", 
                    "uid": "97b22114-4c7b-11e7-a20a-0e795be4d69c"
                }
            }
        ], 
        "returncode": 0
    }, 
    "state": "present"
}

TASK [openshift_logging_curator : Set Curator secret] **************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:62
ok: [openshift] => {
    "changed": false, 
    "results": {
        "apiVersion": "v1", 
        "data": {
            "ca": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUMyakNDQWNLZ0F3SUJBZ0lCQVRBTkJna3Foa2lHOXcwQkFRc0ZBREFlTVJ3d0dnWURWUVFERXhOc2IyZG4KYVc1bkxYTnBaMjVsY2kxMFpYTjBNQjRYRFRFM01EWXdPREU0TlRFd05sb1hEVEl5TURZd056RTROVEV3TjFvdwpIakVjTUJvR0ExVUVBeE1UYkc5bloybHVaeTF6YVdkdVpYSXRkR1Z6ZERDQ0FTSXdEUVlKS29aSWh2Y05BUUVCCkJRQURnZ0VQQURDQ0FRb0NnZ0VCQVBSMVArZVdhQ1BWYzZVeStieEltMEpJMmpiTWxhN0pwaVZnUVNPbEJ3QmcKSzA3MUg1MzI3dHNXRGJqMDlITHVILzJDN01ZREJqS2RWaVQxWWxwMWYxbkZOSUg3RHVvblFJbXJLL2MwK0cycAo3aWdrWDNtTjJsWFV6QWk3cDZhNWNxemdPSlhzVmo0ajNscGpIRHBtZ2RlRVZZcFZGQ0Y0QmlidXRuMlJDQnlLCnpvWVpteEY2WlE0bXQ4dDVvajNxTXRpa1Z4d0ZlZ243YTdyRTl6dUhRbTFQWXJmeWl5eHNpNHhpaG9jVFNjNzMKL0x2MEltL3hrdmtISGpROThRUzhRV1VCRmFSZ3dHU2Q5SWd5TzNKTHI3WE1yTUhsbGtlU1hRN0VYb1dGQnJDVgpCQ3JQcXJhSDJWL3RJYjZpRmlDT2dxSnE2YmVzUi9JSWxJNkNVcE1yMUFzQ0F3RUFBYU1qTUNFd0RnWURWUjBQCkFRSC9CQVFEQWdLa01BOEdBMVVkRXdFQi93UUZNQU1CQWY4d0RRWUpLb1pJaHZjTkFRRUxCUUFEZ2dFQkFDRUsKNE1Jb1Y3ZGpEMWViYnVPQlhKWm8xVmt1SThCNXlKRkhJWnk2SlU3TmNLYkIvaWpOc3RXUFpDYlpmaC9YWjlzMApxSVNJdTBGK2gza3EwamxHR09zYkdNK1RHSUl0bzV2emxkNVAyZG5zd283Q2VHeUZoaFl6S3JtT2xkSktkZ0RFCm9tMW5mUHhFajAwL2hKUEdQTnh5ZzdSNjZyaXQyblY4SUpna0NCWVg2ZjRkcVp6eUNLckhxQXN6S2g4TTNzbWcKMFY5TGRka3ZBVWlvMHVaQjgxYnRmUTNrMzVKdmpVeFRpeSt4VXVxV2Q2aERiMHlUZm9JRWl1QlJMQy91VlI1WgpvbSt1d3EzTkpQWUJiTnJOdFpYYjMyR0lQMXRTaFloem45RGN4YkVzQmVsb1kxeHZFZENFMGVKWXdJekJCVkVtCnVESlFBTjVvdkE1eXUrcksrL009Ci0tLS0tRU5EIENFUlRJRklDQVRFLS0tLS0K", 
            "cert": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURSakNDQWk2Z0F3SUJBZ0lCQkRBTkJna3Foa2lHOXcwQkFRVUZBREFlTVJ3d0dnWURWUVFERXhOc2IyZG4KYVc1bkxYTnBaMjVsY2kxMFpYTjBNQjRYRFRFM01EWXdPREU0TlRFeE0xb1hEVEU1TURZd09ERTROVEV4TTFvdwpSekVRTUE0R0ExVUVDZ3dIVEc5bloybHVaekVTTUJBR0ExVUVDd3dKVDNCbGJsTm9hV1owTVI4d0hRWURWUVFECkRCWnplWE4wWlcwdWJHOW5aMmx1Wnk1amRYSmhkRzl5TUlJQklqQU5CZ2txaGtpRzl3MEJBUUVGQUFPQ0FROEEKTUlJQkNnS0NBUUVBdUMwQ3JPYTBENCtrdW5jakZIVmZkRWdqalNJUk13djRzVHRkQnJUS3FTTktDdHN3K2lzeQo4YnVFZXFaWm9Fem9JQTAwaGZLOEVmV2NiRlRTQkxQdXAwWjhkTnFCakFxRmdJWDM0QThwdmxta1RjTGJJb1V0CkJmREh4RTRFbGVta3dsN0FHdFJiUDd3Q3NuSTVXMnVDZzRYVlpkcFZwazlQaWg4ZzBnMVpZWkVLcEI3VEd6UWwKaFNGUkVLYlBjNk9GWDYwUDhxbVJFMzJrbm8raXJFMEw1SFR2NVl0NzRycTg0a1RmNGlHOGVDTzFBbmQ5MEpLUwpndEsrWmFIdUpkdWl5cVVCNW8yazc0ZVpUUE9FNmdsaEwvaTdpNU01MUhSdmNGOGVZd3R3UlFFY2lBNlViVXRxCkRaSUxLN2hHVlBuUGhMWUNsd3RzVTZRcFRMdFZGZ1R6UXdJREFRQUJvMll3WkRBT0JnTlZIUThCQWY4RUJBTUMKQmFBd0NRWURWUjBUQkFJd0FEQWRCZ05WSFNVRUZqQVVCZ2dyQmdFRkJRY0RBUVlJS3dZQkJRVUhBd0l3SFFZRApWUjBPQkJZRUZHMmxTbllLalBSeG9NYi9qa01mdzVoVnFnUytNQWtHQTFVZEl3UUNNQUF3RFFZSktvWklodmNOCkFRRUZCUUFEZ2dFQkFCN0hndzQ3RGVqQWNTZ2RBNTdmZ29VeHpPSWxBcWVtQnhJZldPWXZJN3BLQlhjSUFubmMKbmZsL3pBR0VoWDBoZnM5Q1FrTUV2WVJGeGZzQ0xkZE11V2RsUXZwaDJ0QTBBWlZpbEswWVk0WjIrdkVtdHJpbQovSmhaK2l3R3NNMWNCYUtJNFJOZEh1eWZMR01nYzhBSjNEcitDTUFkZzgzc0pnSXZWRlkxNUJRcmdxZkZCNHNBClJOMXRseHRUQUI2QWdScjEyK2Q4eWEyNUZXcHJVbmYxaFpVZnVtSFBnd3FhUmxmRFpDa0lFQVg0RVhtZzZHdFEKREcwM2YvM3lYeURralFCY24wOTdzajd2T242SVFnM2dlRVNEb3FwU0JYYVhRWW5ZMno2RmkyVDl0anpQL1dzSApEZFhFVXRPMEdSQSs1TWNnSEVCZXliSXdEWjlLVWlocFh5RT0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo=", 
            "key": "LS0tLS1CRUdJTiBQUklWQVRFIEtFWS0tLS0tCk1JSUV2UUlCQURBTkJna3Foa2lHOXcwQkFRRUZBQVNDQktjd2dnU2pBZ0VBQW9JQkFRQzRMUUtzNXJRUGo2UzYKZHlNVWRWOTBTQ09OSWhFekMvaXhPMTBHdE1xcEkwb0syekQ2S3pMeHU0UjZwbG1nVE9nZ0RUU0Y4cndSOVp4cwpWTklFcys2blJueDAyb0dNQ29XQWhmZmdEeW0rV2FSTnd0c2loUzBGOE1mRVRnU1Y2YVRDWHNBYTFGcy92QUt5CmNqbGJhNEtEaGRWbDJsV21UMCtLSHlEU0RWbGhrUXFrSHRNYk5DV0ZJVkVRcHM5em80VmZyUS95cVpFVGZhU2UKajZLc1RRdmtkTy9saTN2aXVyemlSTi9pSWJ4NEk3VUNkMzNRa3BLQzByNWxvZTRsMjZMS3BRSG1qYVR2aDVsTQo4NFRxQ1dFditMdUxrem5VZEc5d1h4NWpDM0JGQVJ5SURwUnRTMm9Oa2dzcnVFWlUrYytFdGdLWEMyeFRwQ2xNCnUxVVdCUE5EQWdNQkFBRUNnZ0VCQUk3WWFqRE1KbS9EUzFKTVFPc0hOZWVzWTlaOHk3WTUzTU1wZGJkVDFiTmwKYVhJT0cyaU14TkIwVGdZS3E0MEcyU3F5MHovb3lqRnJ1OHpSNUtvOUhDYWxKclpER3lNUXJnYkc0VC9vd3NtZQpJUzhtcUd1WHV4bldiRG8zQUp0OVFZcDNqcXZBZEoxMEp1dGNhRndkZ0hhZUFLc2tzalZ4WWhHUW54WUx3SDFJCmZNWWxtaXJFMU1seTRkdEw5UDdwRGF6dkdiU0M0VzB2UkNhVlVhbm5KVzZ5c3Ivdjd6bVB3RHBaelQ3Rlk2aUUKeHoxK1FvVTc1cEdWOFpUdlJSUzlBaWFKU0ZiWldoL1FHNFoxU2JPbW16WUR6ZnJtakdHUGc2VFIwMlB3VTdUeQpqcnBaREMzeTl4YUZ2UWdEK1FINUNQelR5SEQ5UTRzdW9HYmJjWFY2NFlFQ2dZRUE4TVZkbnpWL2xwaTJNZHZLCkJnQ2tYd0Fjc1ZDZHlmNFZoREpRbU1MQWI4ZHRySEVTOWJNc1VGUGtTb2tyeklWVm5Tc1JXWnJTeFVWdGc2bzAKVlNtK1NKSzFRelRwUmFsV2NSb3VmUWtZbDBaeGdrY0w3WkR1MGlzUDU5TmYvSEpaY1RnamJIVlkrSFhpMjdJdwpFeFNBT3ExSnZhOTFWRzhBN2lDQlRqdlhBMk1DZ1lFQXc5TTlWcFlMYmFRL29ZNitMeTRXcFpLVUoyQW9MRlhDCklXRnBISTBHem1NeDNUYUNkY3pFZnZCQkkxNzRrR3NuRHFkVmlwbzVJN28zQW5OMnU4bEpjbFhHWkVVUnFzaTIKem1CYTRnVTRvak8vMzRQOEE5UnJhTEJuOE5hcUNMRTdheVRaTkNsamZFV3luTWt4VThtSkFoSlFqWEhxM1A3SgpKYkNxSFgzQ2hxRUNnWUJJV1RZcHVoeXR0eFBBWmFnT0o3RThycjd3ZFNZRTVXbTN3SnJCWEh3UTJwOVZHdUUwCm5YTnFhOXNFbnJWZmpvMU1YZjJuckVNN2JnSmJkeENrcXBkNEhVV0VmV2kvT3dRemxrZHo0UURYckI0OXBIV3MKSEVuM00yTmpLUHdGMHFLWHgrTzFHb255bHhIbExHVVB4ckJGY1g3Z0ZkSXUzZkFzbmUzZEFWUTV1d0tCZ0VUbgpTeW53V2MwV2doMUNzTURCSXM3WHFMSENMK0NKdkdrcWNXWE9hWnF0WkpwRjE5bkRWMUNqRVFoMUhsa1Ntd2VZCjhZeDNmNnVHbGx5U2JERGZQcXl3YWNKV1cwVVFvdUl6SFVkeU50ODcva09UbVZWdnBPdU5Sc1dEZ3dqVDgraGIKYXVIanZsRzM1SVV1ZGVEWmhRL3BpZFo3UXZ6SDJkY2RObjV5QmNTQkFvR0FIcnVTWEtSaVJRWHp4QXRnYzB0UApVZVJxVEJxaFFWaEh2eUFTbjBERWlKWTFobGxaNDVVdnN6YkhTeUlqVlFxbTV6UzVDLzNIR2VjcmYydHNKTjk4CitYQkV5UzZmdnRnTEVnMEljMHlLcUNCakpYSlJtNkEyQWVMNEdrNUZaVjUxWkZWZGU5Q2lBb1djSjVCNEtNM00KQWdMSndOcVdqTGw1TWRYRUxxN2dZN0U9Ci0tLS0tRU5EIFBSSVZBVEUgS0VZLS0tLS0K"
        }, 
        "kind": "Secret", 
        "metadata": {
            "creationTimestamp": null, 
            "name": "logging-curator"
        }, 
        "type": "Opaque"
    }, 
    "state": "present"
}

TASK [openshift_logging_curator : set_fact] ************************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:75
ok: [openshift] => {
    "ansible_facts": {
        "curator_component": "curator-ops", 
        "curator_name": "logging-curator-ops"
    }, 
    "changed": false
}

TASK [openshift_logging_curator : Generate Curator deploymentconfig] ***********
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:81
ok: [openshift] => {
    "changed": false, 
    "checksum": "9a7ee05bba3f10bbd8dcbc47cb1ea2a48dfb6662", 
    "dest": "/tmp/openshift-logging-ansible-b33dab/templates/curator-dc.yaml", 
    "gid": 0, 
    "group": "root", 
    "md5sum": "b46648590747938a0c574d531017b33d", 
    "mode": "0644", 
    "owner": "root", 
    "secontext": "unconfined_u:object_r:admin_home_t:s0", 
    "size": 2364, 
    "src": "/root/.ansible/tmp/ansible-tmp-1496947946.72-264673800349506/source", 
    "state": "file", 
    "uid": 0
}

TASK [openshift_logging_curator : Set Curator DC] ******************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:99
changed: [openshift] => {
    "changed": true, 
    "results": {
        "cmd": "/bin/oc get dc logging-curator-ops -o json -n logging", 
        "results": [
            {
                "apiVersion": "v1", 
                "kind": "DeploymentConfig", 
                "metadata": {
                    "creationTimestamp": "2017-06-08T18:52:27Z", 
                    "generation": 2, 
                    "labels": {
                        "component": "curator-ops", 
                        "logging-infra": "curator", 
                        "provider": "openshift"
                    }, 
                    "name": "logging-curator-ops", 
                    "namespace": "logging", 
                    "resourceVersion": "1696", 
                    "selfLink": "/oapi/v1/namespaces/logging/deploymentconfigs/logging-curator-ops", 
                    "uid": "9e2047a3-4c7b-11e7-a20a-0e795be4d69c"
                }, 
                "spec": {
                    "replicas": 1, 
                    "selector": {
                        "component": "curator-ops", 
                        "logging-infra": "curator", 
                        "provider": "openshift"
                    }, 
                    "strategy": {
                        "activeDeadlineSeconds": 21600, 
                        "recreateParams": {
                            "timeoutSeconds": 600
                        }, 
                        "resources": {}, 
                        "rollingParams": {
                            "intervalSeconds": 1, 
                            "maxSurge": "25%", 
                            "maxUnavailable": "25%", 
                            "timeoutSeconds": 600, 
                            "updatePeriodSeconds": 1
                        }, 
                        "type": "Recreate"
                    }, 
                    "template": {
                        "metadata": {
                            "creationTimestamp": null, 
                            "labels": {
                                "component": "curator-ops", 
                                "logging-infra": "curator", 
                                "provider": "openshift"
                            }, 
                            "name": "logging-curator-ops"
                        }, 
                        "spec": {
                            "containers": [
                                {
                                    "env": [
                                        {
                                            "name": "K8S_HOST_URL", 
                                            "value": "https://kubernetes.default.svc.cluster.local"
                                        }, 
                                        {
                                            "name": "ES_HOST", 
                                            "value": "logging-es-ops"
                                        }, 
                                        {
                                            "name": "ES_PORT", 
                                            "value": "9200"
                                        }, 
                                        {
                                            "name": "ES_CLIENT_CERT", 
                                            "value": "/etc/curator/keys/cert"
                                        }, 
                                        {
                                            "name": "ES_CLIENT_KEY", 
                                            "value": "/etc/curator/keys/key"
                                        }, 
                                        {
                                            "name": "ES_CA", 
                                            "value": "/etc/curator/keys/ca"
                                        }, 
                                        {
                                            "name": "CURATOR_DEFAULT_DAYS", 
                                            "value": "30"
                                        }, 
                                        {
                                            "name": "CURATOR_RUN_HOUR", 
                                            "value": "0"
                                        }, 
                                        {
                                            "name": "CURATOR_RUN_MINUTE", 
                                            "value": "0"
                                        }, 
                                        {
                                            "name": "CURATOR_RUN_TIMEZONE", 
                                            "value": "UTC"
                                        }, 
                                        {
                                            "name": "CURATOR_SCRIPT_LOG_LEVEL", 
                                            "value": "INFO"
                                        }, 
                                        {
                                            "name": "CURATOR_LOG_LEVEL", 
                                            "value": "ERROR"
                                        }
                                    ], 
                                    "image": "172.30.46.236:5000/logging/logging-curator:latest", 
                                    "imagePullPolicy": "Always", 
                                    "name": "curator", 
                                    "resources": {
                                        "limits": {
                                            "cpu": "100m"
                                        }
                                    }, 
                                    "terminationMessagePath": "/dev/termination-log", 
                                    "terminationMessagePolicy": "File", 
                                    "volumeMounts": [
                                        {
                                            "mountPath": "/etc/curator/keys", 
                                            "name": "certs", 
                                            "readOnly": true
                                        }, 
                                        {
                                            "mountPath": "/etc/curator/settings", 
                                            "name": "config", 
                                            "readOnly": true
                                        }
                                    ]
                                }
                            ], 
                            "dnsPolicy": "ClusterFirst", 
                            "restartPolicy": "Always", 
                            "schedulerName": "default-scheduler", 
                            "securityContext": {}, 
                            "serviceAccount": "aggregated-logging-curator", 
                            "serviceAccountName": "aggregated-logging-curator", 
                            "terminationGracePeriodSeconds": 30, 
                            "volumes": [
                                {
                                    "name": "certs", 
                                    "secret": {
                                        "defaultMode": 420, 
                                        "secretName": "logging-curator"
                                    }
                                }, 
                                {
                                    "configMap": {
                                        "defaultMode": 420, 
                                        "name": "logging-curator"
                                    }, 
                                    "name": "config"
                                }
                            ]
                        }
                    }, 
                    "test": false, 
                    "triggers": [
                        {
                            "type": "ConfigChange"
                        }
                    ]
                }, 
                "status": {
                    "availableReplicas": 0, 
                    "conditions": [
                        {
                            "lastTransitionTime": "2017-06-08T18:52:27Z", 
                            "lastUpdateTime": "2017-06-08T18:52:27Z", 
                            "message": "Deployment config does not have minimum availability.", 
                            "status": "False", 
                            "type": "Available"
                        }, 
                        {
                            "lastTransitionTime": "2017-06-08T18:52:27Z", 
                            "lastUpdateTime": "2017-06-08T18:52:27Z", 
                            "message": "replication controller \"logging-curator-ops-1\" is waiting for pod \"logging-curator-ops-1-deploy\" to run", 
                            "status": "Unknown", 
                            "type": "Progressing"
                        }
                    ], 
                    "details": {
                        "causes": [
                            {
                                "type": "ConfigChange"
                            }
                        ], 
                        "message": "config change"
                    }, 
                    "latestVersion": 1, 
                    "observedGeneration": 2, 
                    "replicas": 0, 
                    "unavailableReplicas": 0, 
                    "updatedReplicas": 0
                }
            }
        ], 
        "returncode": 0
    }, 
    "state": "present"
}

TASK [openshift_logging_curator : Delete temp directory] ***********************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:109
ok: [openshift] => {
    "changed": false, 
    "path": "/tmp/openshift-logging-ansible-b33dab", 
    "state": "absent"
}

TASK [openshift_logging : include_role] ****************************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging/tasks/install_logging.yaml:226
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging : include_role] ****************************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging/tasks/install_logging.yaml:241
statically included: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_fluentd/tasks/determine_version.yaml

TASK [openshift_logging_fluentd : fail] ****************************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:2
 [WARNING]: when statements should not include jinja2 templating delimiters
such as {{ }} or {% %}. Found: {{ openshift_logging_fluentd_nodeselector.keys()
| count }} > 1
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_fluentd : fail] ****************************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:6
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_fluentd : fail] ****************************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:10
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_fluentd : fail] ****************************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:14
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_fluentd : fail] ****************************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_fluentd/tasks/determine_version.yaml:3
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_fluentd : set_fact] ************************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_fluentd/tasks/determine_version.yaml:7
ok: [openshift] => {
    "ansible_facts": {
        "fluentd_version": "3_5"
    }, 
    "changed": false
}

TASK [openshift_logging_fluentd : set_fact] ************************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_fluentd/tasks/determine_version.yaml:12
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_fluentd : fail] ****************************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_fluentd/tasks/determine_version.yaml:15
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_fluentd : set_fact] ************************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:20
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_fluentd : set_fact] ************************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:26
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_fluentd : Create temp directory for doing work in] *****
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:33
ok: [openshift] => {
    "changed": false, 
    "cmd": [
        "mktemp", 
        "-d", 
        "/tmp/openshift-logging-ansible-XXXXXX"
    ], 
    "delta": "0:00:00.002186", 
    "end": "2017-06-08 14:52:32.466339", 
    "rc": 0, 
    "start": "2017-06-08 14:52:32.464153"
}

STDOUT:

/tmp/openshift-logging-ansible-6TUYM0

TASK [openshift_logging_fluentd : set_fact] ************************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:38
ok: [openshift] => {
    "ansible_facts": {
        "tempdir": "/tmp/openshift-logging-ansible-6TUYM0"
    }, 
    "changed": false
}

TASK [openshift_logging_fluentd : Create templates subdirectory] ***************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:41
ok: [openshift] => {
    "changed": false, 
    "gid": 0, 
    "group": "root", 
    "mode": "0755", 
    "owner": "root", 
    "path": "/tmp/openshift-logging-ansible-6TUYM0/templates", 
    "secontext": "unconfined_u:object_r:user_tmp_t:s0", 
    "size": 6, 
    "state": "directory", 
    "uid": 0
}

TASK [openshift_logging_fluentd : Create Fluentd service account] **************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:51
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_fluentd : Create Fluentd service account] **************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:59
changed: [openshift] => {
    "changed": true, 
    "results": {
        "cmd": "/bin/oc get sa aggregated-logging-fluentd -o json -n logging", 
        "results": [
            {
                "apiVersion": "v1", 
                "imagePullSecrets": [
                    {
                        "name": "aggregated-logging-fluentd-dockercfg-z5f4g"
                    }
                ], 
                "kind": "ServiceAccount", 
                "metadata": {
                    "creationTimestamp": "2017-06-08T18:52:33Z", 
                    "name": "aggregated-logging-fluentd", 
                    "namespace": "logging", 
                    "resourceVersion": "1728", 
                    "selfLink": "/api/v1/namespaces/logging/serviceaccounts/aggregated-logging-fluentd", 
                    "uid": "a1a99675-4c7b-11e7-a20a-0e795be4d69c"
                }, 
                "secrets": [
                    {
                        "name": "aggregated-logging-fluentd-token-f4pzb"
                    }, 
                    {
                        "name": "aggregated-logging-fluentd-dockercfg-z5f4g"
                    }
                ]
            }
        ], 
        "returncode": 0
    }, 
    "state": "present"
}

TASK [openshift_logging_fluentd : Set privileged permissions for Fluentd] ******
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:68
changed: [openshift] => {
    "changed": true, 
    "present": "present", 
    "results": {
        "cmd": "/bin/oc adm policy add-scc-to-user privileged system:serviceaccount:logging:aggregated-logging-fluentd -n logging", 
        "results": "", 
        "returncode": 0
    }
}

TASK [openshift_logging_fluentd : Set cluster-reader permissions for Fluentd] ***
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:77
changed: [openshift] => {
    "changed": true, 
    "present": "present", 
    "results": {
        "cmd": "/bin/oc adm policy add-cluster-role-to-user cluster-reader system:serviceaccount:logging:aggregated-logging-fluentd -n logging", 
        "results": "", 
        "returncode": 0
    }
}

TASK [openshift_logging_fluentd : template] ************************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:86
ok: [openshift] => {
    "changed": false, 
    "checksum": "a8c8596f5fc2c5dd7c8d33d244af17a2555be086", 
    "dest": "/tmp/openshift-logging-ansible-6TUYM0/fluent.conf", 
    "gid": 0, 
    "group": "root", 
    "md5sum": "579698b48ffce6276ee0e8d5ac71a338", 
    "mode": "0644", 
    "owner": "root", 
    "secontext": "unconfined_u:object_r:admin_home_t:s0", 
    "size": 1301, 
    "src": "/root/.ansible/tmp/ansible-tmp-1496947955.48-3426845963888/source", 
    "state": "file", 
    "uid": 0
}

TASK [openshift_logging_fluentd : copy] ****************************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:94
ok: [openshift] => {
    "changed": false, 
    "checksum": "b3e75eddc4a0765edc77da092384c0c6f95440e1", 
    "dest": "/tmp/openshift-logging-ansible-6TUYM0/fluentd-throttle-config.yaml", 
    "gid": 0, 
    "group": "root", 
    "md5sum": "25871b8e0a9bedc166a6029872a6c336", 
    "mode": "0644", 
    "owner": "root", 
    "secontext": "unconfined_u:object_r:admin_home_t:s0", 
    "size": 133, 
    "src": "/root/.ansible/tmp/ansible-tmp-1496947955.98-98216043331320/source", 
    "state": "file", 
    "uid": 0
}

TASK [openshift_logging_fluentd : copy] ****************************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:100
ok: [openshift] => {
    "changed": false, 
    "checksum": "a3aa36da13f3108aa4ad5b98d4866007b44e9798", 
    "dest": "/tmp/openshift-logging-ansible-6TUYM0/secure-forward.conf", 
    "gid": 0, 
    "group": "root", 
    "md5sum": "1084b00c427f4fa48dfc66d6ad6555d4", 
    "mode": "0644", 
    "owner": "root", 
    "secontext": "unconfined_u:object_r:admin_home_t:s0", 
    "size": 563, 
    "src": "/root/.ansible/tmp/ansible-tmp-1496947956.34-176005224691546/source", 
    "state": "file", 
    "uid": 0
}

TASK [openshift_logging_fluentd : copy] ****************************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:107
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_fluentd : copy] ****************************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:113
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_fluentd : copy] ****************************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:119
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_fluentd : Set Fluentd configmap] ***********************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:125
changed: [openshift] => {
    "changed": true, 
    "results": {
        "cmd": "/bin/oc get configmap logging-fluentd -o json -n logging", 
        "results": [
            {
                "apiVersion": "v1", 
                "data": {
                    "fluent.conf": "# This file is the fluentd configuration entrypoint. Edit with care.\n\n@include configs.d/openshift/system.conf\n\n# In each section below, pre- and post- includes don't include anything initially;\n# they exist to enable future additions to openshift conf as needed.\n\n## sources\n## ordered so that syslog always runs last...\n@include configs.d/openshift/input-pre-*.conf\n@include configs.d/dynamic/input-docker-*.conf\n@include configs.d/dynamic/input-syslog-*.conf\n@include configs.d/openshift/input-post-*.conf\n##\n\n<label @INGRESS>\n## filters\n  @include configs.d/openshift/filter-pre-*.conf\n  @include configs.d/openshift/filter-retag-journal.conf\n  @include configs.d/openshift/filter-k8s-meta.conf\n  @include configs.d/openshift/filter-kibana-transform.conf\n  @include configs.d/openshift/filter-k8s-flatten-hash.conf\n  @include configs.d/openshift/filter-k8s-record-transform.conf\n  @include configs.d/openshift/filter-syslog-record-transform.conf\n  @include configs.d/openshift/filter-viaq-data-model.conf\n  @include configs.d/openshift/filter-post-*.conf\n##\n\n## matches\n  @include configs.d/openshift/output-pre-*.conf\n  @include configs.d/openshift/output-operations.conf\n  @include configs.d/openshift/output-applications.conf\n  # no post - applications.conf matches everything left\n##\n</label>\n", 
                    "secure-forward.conf": "# @type secure_forward\n\n# self_hostname ${HOSTNAME}\n# shared_key <SECRET_STRING>\n\n# secure yes\n# enable_strict_verification yes\n\n# ca_cert_path /etc/fluent/keys/your_ca_cert\n# ca_private_key_path /etc/fluent/keys/your_private_key\n  # for private CA secret key\n# ca_private_key_passphrase passphrase\n\n# <server>\n  # or IP\n#   host server.fqdn.example.com\n#   port 24284\n# </server>\n# <server>\n  # ip address to connect\n#   host 203.0.113.8\n  # specify hostlabel for FQDN verification if ipaddress is used for host\n#   hostlabel server.fqdn.example.com\n# </server>\n", 
                    "throttle-config.yaml": "# Logging example fluentd throttling config file\n\n#example-project:\n#  read_lines_limit: 10\n#\n#.operations:\n#  read_lines_limit: 100\n"
                }, 
                "kind": "ConfigMap", 
                "metadata": {
                    "creationTimestamp": "2017-06-08T18:52:37Z", 
                    "name": "logging-fluentd", 
                    "namespace": "logging", 
                    "resourceVersion": "1736", 
                    "selfLink": "/api/v1/namespaces/logging/configmaps/logging-fluentd", 
                    "uid": "a3f4a92a-4c7b-11e7-a20a-0e795be4d69c"
                }
            }
        ], 
        "returncode": 0
    }, 
    "state": "present"
}

TASK [openshift_logging_fluentd : Set logging-fluentd secret] ******************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:137
changed: [openshift] => {
    "changed": true, 
    "results": {
        "cmd": "/bin/oc secrets new logging-fluentd ca=/etc/origin/logging/ca.crt key=/etc/origin/logging/system.logging.fluentd.key cert=/etc/origin/logging/system.logging.fluentd.crt -n logging", 
        "results": "", 
        "returncode": 0
    }, 
    "state": "present"
}

TASK [openshift_logging_fluentd : Generate logging-fluentd daemonset definition] ***
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:154
ok: [openshift] => {
    "changed": false, 
    "checksum": "1559c3daf0093ad279587d1c778589900168d682", 
    "dest": "/tmp/openshift-logging-ansible-6TUYM0/templates/logging-fluentd.yaml", 
    "gid": 0, 
    "group": "root", 
    "md5sum": "91f7d59dfd6ee648fab3e64ad704c140", 
    "mode": "0644", 
    "owner": "root", 
    "secontext": "unconfined_u:object_r:admin_home_t:s0", 
    "size": 3414, 
    "src": "/root/.ansible/tmp/ansible-tmp-1496947958.37-151832799701802/source", 
    "state": "file", 
    "uid": 0
}

TASK [openshift_logging_fluentd : Set logging-fluentd daemonset] ***************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:172
changed: [openshift] => {
    "changed": true, 
    "results": {
        "cmd": "/bin/oc get daemonset logging-fluentd -o json -n logging", 
        "results": [
            {
                "apiVersion": "extensions/v1beta1", 
                "kind": "DaemonSet", 
                "metadata": {
                    "creationTimestamp": "2017-06-08T18:52:39Z", 
                    "generation": 1, 
                    "labels": {
                        "component": "fluentd", 
                        "logging-infra": "fluentd", 
                        "provider": "openshift"
                    }, 
                    "name": "logging-fluentd", 
                    "namespace": "logging", 
                    "resourceVersion": "1750", 
                    "selfLink": "/apis/extensions/v1beta1/namespaces/logging/daemonsets/logging-fluentd", 
                    "uid": "a52b5b99-4c7b-11e7-a20a-0e795be4d69c"
                }, 
                "spec": {
                    "selector": {
                        "matchLabels": {
                            "component": "fluentd", 
                            "provider": "openshift"
                        }
                    }, 
                    "template": {
                        "metadata": {
                            "creationTimestamp": null, 
                            "labels": {
                                "component": "fluentd", 
                                "logging-infra": "fluentd", 
                                "provider": "openshift"
                            }, 
                            "name": "fluentd-elasticsearch"
                        }, 
                        "spec": {
                            "containers": [
                                {
                                    "env": [
                                        {
                                            "name": "K8S_HOST_URL", 
                                            "value": "https://kubernetes.default.svc.cluster.local"
                                        }, 
                                        {
                                            "name": "ES_HOST", 
                                            "value": "logging-es"
                                        }, 
                                        {
                                            "name": "ES_PORT", 
                                            "value": "9200"
                                        }, 
                                        {
                                            "name": "ES_CLIENT_CERT", 
                                            "value": "/etc/fluent/keys/cert"
                                        }, 
                                        {
                                            "name": "ES_CLIENT_KEY", 
                                            "value": "/etc/fluent/keys/key"
                                        }, 
                                        {
                                            "name": "ES_CA", 
                                            "value": "/etc/fluent/keys/ca"
                                        }, 
                                        {
                                            "name": "OPS_HOST", 
                                            "value": "logging-es-ops"
                                        }, 
                                        {
                                            "name": "OPS_PORT", 
                                            "value": "9200"
                                        }, 
                                        {
                                            "name": "OPS_CLIENT_CERT", 
                                            "value": "/etc/fluent/keys/cert"
                                        }, 
                                        {
                                            "name": "OPS_CLIENT_KEY", 
                                            "value": "/etc/fluent/keys/key"
                                        }, 
                                        {
                                            "name": "OPS_CA", 
                                            "value": "/etc/fluent/keys/ca"
                                        }, 
                                        {
                                            "name": "ES_COPY", 
                                            "value": "false"
                                        }, 
                                        {
                                            "name": "USE_JOURNAL", 
                                            "value": "true"
                                        }, 
                                        {
                                            "name": "JOURNAL_SOURCE"
                                        }, 
                                        {
                                            "name": "JOURNAL_READ_FROM_HEAD", 
                                            "value": "false"
                                        }
                                    ], 
                                    "image": "172.30.46.236:5000/logging/logging-fluentd:latest", 
                                    "imagePullPolicy": "Always", 
                                    "name": "fluentd-elasticsearch", 
                                    "resources": {
                                        "limits": {
                                            "cpu": "100m", 
                                            "memory": "512Mi"
                                        }
                                    }, 
                                    "securityContext": {
                                        "privileged": true
                                    }, 
                                    "terminationMessagePath": "/dev/termination-log", 
                                    "terminationMessagePolicy": "File", 
                                    "volumeMounts": [
                                        {
                                            "mountPath": "/run/log/journal", 
                                            "name": "runlogjournal"
                                        }, 
                                        {
                                            "mountPath": "/var/log", 
                                            "name": "varlog"
                                        }, 
                                        {
                                            "mountPath": "/var/lib/docker/containers", 
                                            "name": "varlibdockercontainers", 
                                            "readOnly": true
                                        }, 
                                        {
                                            "mountPath": "/etc/fluent/configs.d/user", 
                                            "name": "config", 
                                            "readOnly": true
                                        }, 
                                        {
                                            "mountPath": "/etc/fluent/keys", 
                                            "name": "certs", 
                                            "readOnly": true
                                        }, 
                                        {
                                            "mountPath": "/etc/docker-hostname", 
                                            "name": "dockerhostname", 
                                            "readOnly": true
                                        }, 
                                        {
                                            "mountPath": "/etc/localtime", 
                                            "name": "localtime", 
                                            "readOnly": true
                                        }, 
                                        {
                                            "mountPath": "/etc/sysconfig/docker", 
                                            "name": "dockercfg", 
                                            "readOnly": true
                                        }, 
                                        {
                                            "mountPath": "/etc/docker", 
                                            "name": "dockerdaemoncfg", 
                                            "readOnly": true
                                        }
                                    ]
                                }
                            ], 
                            "dnsPolicy": "ClusterFirst", 
                            "nodeSelector": {
                                "logging-infra-fluentd": "true"
                            }, 
                            "restartPolicy": "Always", 
                            "schedulerName": "default-scheduler", 
                            "securityContext": {}, 
                            "serviceAccount": "aggregated-logging-fluentd", 
                            "serviceAccountName": "aggregated-logging-fluentd", 
                            "terminationGracePeriodSeconds": 30, 
                            "volumes": [
                                {
                                    "hostPath": {
                                        "path": "/run/log/journal"
                                    }, 
                                    "name": "runlogjournal"
                                }, 
                                {
                                    "hostPath": {
                                        "path": "/var/log"
                                    }, 
                                    "name": "varlog"
                                }, 
                                {
                                    "hostPath": {
                                        "path": "/var/lib/docker/containers"
                                    }, 
                                    "name": "varlibdockercontainers"
                                }, 
                                {
                                    "configMap": {
                                        "defaultMode": 420, 
                                        "name": "logging-fluentd"
                                    }, 
                                    "name": "config"
                                }, 
                                {
                                    "name": "certs", 
                                    "secret": {
                                        "defaultMode": 420, 
                                        "secretName": "logging-fluentd"
                                    }
                                }, 
                                {
                                    "hostPath": {
                                        "path": "/etc/hostname"
                                    }, 
                                    "name": "dockerhostname"
                                }, 
                                {
                                    "hostPath": {
                                        "path": "/etc/localtime"
                                    }, 
                                    "name": "localtime"
                                }, 
                                {
                                    "hostPath": {
                                        "path": "/etc/sysconfig/docker"
                                    }, 
                                    "name": "dockercfg"
                                }, 
                                {
                                    "hostPath": {
                                        "path": "/etc/docker"
                                    }, 
                                    "name": "dockerdaemoncfg"
                                }
                            ]
                        }
                    }, 
                    "templateGeneration": 1, 
                    "updateStrategy": {
                        "rollingUpdate": {
                            "maxUnavailable": 1
                        }, 
                        "type": "RollingUpdate"
                    }
                }, 
                "status": {
                    "currentNumberScheduled": 0, 
                    "desiredNumberScheduled": 0, 
                    "numberMisscheduled": 0, 
                    "numberReady": 0, 
                    "observedGeneration": 1
                }
            }
        ], 
        "returncode": 0
    }, 
    "state": "present"
}

TASK [openshift_logging_fluentd : Retrieve list of Fluentd hosts] **************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:183
ok: [openshift] => {
    "changed": false, 
    "results": {
        "cmd": "/bin/oc get node -o json -n default", 
        "results": [
            {
                "apiVersion": "v1", 
                "items": [
                    {
                        "apiVersion": "v1", 
                        "kind": "Node", 
                        "metadata": {
                            "annotations": {
                                "volumes.kubernetes.io/controller-managed-attach-detach": "true"
                            }, 
                            "creationTimestamp": "2017-06-08T18:27:25Z", 
                            "labels": {
                                "beta.kubernetes.io/arch": "amd64", 
                                "beta.kubernetes.io/os": "linux", 
                                "kubernetes.io/hostname": "172.18.14.90"
                            }, 
                            "name": "172.18.14.90", 
                            "namespace": "", 
                            "resourceVersion": "1732", 
                            "selfLink": "/api/v1/nodes/172.18.14.90", 
                            "uid": "1f156474-4c78-11e7-a20a-0e795be4d69c"
                        }, 
                        "spec": {
                            "externalID": "172.18.14.90", 
                            "providerID": "aws:////i-01db74e94a06d1a91"
                        }, 
                        "status": {
                            "addresses": [
                                {
                                    "address": "172.18.14.90", 
                                    "type": "LegacyHostIP"
                                }, 
                                {
                                    "address": "172.18.14.90", 
                                    "type": "InternalIP"
                                }, 
                                {
                                    "address": "172.18.14.90", 
                                    "type": "Hostname"
                                }
                            ], 
                            "allocatable": {
                                "cpu": "4", 
                                "memory": "7129288Ki", 
                                "pods": "40"
                            }, 
                            "capacity": {
                                "cpu": "4", 
                                "memory": "7231688Ki", 
                                "pods": "40"
                            }, 
                            "conditions": [
                                {
                                    "lastHeartbeatTime": "2017-06-08T18:52:35Z", 
                                    "lastTransitionTime": "2017-06-08T18:27:25Z", 
                                    "message": "kubelet has sufficient disk space available", 
                                    "reason": "KubeletHasSufficientDisk", 
                                    "status": "False", 
                                    "type": "OutOfDisk"
                                }, 
                                {
                                    "lastHeartbeatTime": "2017-06-08T18:52:35Z", 
                                    "lastTransitionTime": "2017-06-08T18:27:25Z", 
                                    "message": "kubelet has sufficient memory available", 
                                    "reason": "KubeletHasSufficientMemory", 
                                    "status": "False", 
                                    "type": "MemoryPressure"
                                }, 
                                {
                                    "lastHeartbeatTime": "2017-06-08T18:52:35Z", 
                                    "lastTransitionTime": "2017-06-08T18:27:25Z", 
                                    "message": "kubelet has no disk pressure", 
                                    "reason": "KubeletHasNoDiskPressure", 
                                    "status": "False", 
                                    "type": "DiskPressure"
                                }, 
                                {
                                    "lastHeartbeatTime": "2017-06-08T18:52:35Z", 
                                    "lastTransitionTime": "2017-06-08T18:27:25Z", 
                                    "message": "kubelet is posting ready status", 
                                    "reason": "KubeletReady", 
                                    "status": "True", 
                                    "type": "Ready"
                                }
                            ], 
                            "daemonEndpoints": {
                                "kubeletEndpoint": {
                                    "Port": 10250
                                }
                            }, 
                            "images": [
                                {
                                    "names": [
                                        "docker.io/openshift/origin-docker-registry@sha256:ae8bef1a8222265fb34dd33dc434c766e63d492522798038f4c15bcf67e0fde2", 
                                        "docker.io/openshift/origin-docker-registry:latest"
                                    ], 
                                    "sizeBytes": 1100553430
                                }, 
                                {
                                    "names": [
                                        "openshift/origin-docker-registry:latest"
                                    ], 
                                    "sizeBytes": 1100164272
                                }, 
                                {
                                    "names": [
                                        "openshift/origin-gitserver:6acabdc", 
                                        "openshift/origin-gitserver:latest"
                                    ], 
                                    "sizeBytes": 1086520226
                                }, 
                                {
                                    "names": [
                                        "openshift/openvswitch:6acabdc", 
                                        "openshift/openvswitch:latest"
                                    ], 
                                    "sizeBytes": 1053403667
                                }, 
                                {
                                    "names": [
                                        "openshift/node:6acabdc", 
                                        "openshift/node:latest"
                                    ], 
                                    "sizeBytes": 1051721928
                                }, 
                                {
                                    "names": [
                                        "openshift/origin-haproxy-router:latest"
                                    ], 
                                    "sizeBytes": 1022758742
                                }, 
                                {
                                    "names": [
                                        "openshift/origin-docker-builder:latest"
                                    ], 
                                    "sizeBytes": 1001728427
                                }, 
                                {
                                    "names": [
                                        "openshift/origin-recycler:6acabdc", 
                                        "openshift/origin-recycler:latest"
                                    ], 
                                    "sizeBytes": 1001728427
                                }, 
                                {
                                    "names": [
                                        "openshift/origin-deployer:6acabdc", 
                                        "openshift/origin-deployer:latest"
                                    ], 
                                    "sizeBytes": 1001728427
                                }, 
                                {
                                    "names": [
                                        "openshift/origin:6acabdc", 
                                        "openshift/origin:latest"
                                    ], 
                                    "sizeBytes": 1001728427
                                }, 
                                {
                                    "names": [
                                        "openshift/origin-sti-builder:6acabdc", 
                                        "openshift/origin-sti-builder:latest"
                                    ], 
                                    "sizeBytes": 1001728427
                                }, 
                                {
                                    "names": [
                                        "openshift/origin-cluster-capacity:6acabdc", 
                                        "openshift/origin-cluster-capacity:latest"
                                    ], 
                                    "sizeBytes": 962455026
                                }, 
                                {
                                    "names": [
                                        "rhel7.1:latest"
                                    ], 
                                    "sizeBytes": 765301508
                                }, 
                                {
                                    "names": [
                                        "openshift/dind-master:latest"
                                    ], 
                                    "sizeBytes": 731456758
                                }, 
                                {
                                    "names": [
                                        "openshift/dind-node:latest"
                                    ], 
                                    "sizeBytes": 731453034
                                }, 
                                {
                                    "names": [
                                        "172.30.46.236:5000/logging/logging-auth-proxy@sha256:80105c09d7d6c9fcf84634b46864ad17b10e582f9fa6f0be4a08111c7450ab44", 
                                        "172.30.46.236:5000/logging/logging-auth-proxy:latest"
                                    ], 
                                    "sizeBytes": 715536037
                                }, 
                                {
                                    "names": [
                                        "<none>@<none>", 
                                        "<none>:<none>"
                                    ], 
                                    "sizeBytes": 709532011
                                }, 
                                {
                                    "names": [
                                        "docker.io/node@sha256:46db0dd19955beb87b841c30a6b9812ba626473283e84117d1c016deee5949a9", 
                                        "docker.io/node:0.10.36"
                                    ], 
                                    "sizeBytes": 697128386
                                }, 
                                {
                                    "names": [
                                        "172.30.46.236:5000/logging/logging-kibana@sha256:600ab33441ad8f4013f39a205ef6f1c4310ec43273bcc1b1867d1cd4a9a75966", 
                                        "172.30.46.236:5000/logging/logging-kibana:latest"
                                    ], 
                                    "sizeBytes": 682851502
                                }, 
                                {
                                    "names": [
                                        "openshift/dind:latest"
                                    ], 
                                    "sizeBytes": 640650210
                                }, 
                                {
                                    "names": [
                                        "172.30.46.236:5000/logging/logging-elasticsearch@sha256:b17b203f8ccb5c9e0fc30787e2a2ac0539d7cf15e0125e093999e2a2fba11367", 
                                        "172.30.46.236:5000/logging/logging-elasticsearch:latest"
                                    ], 
                                    "sizeBytes": 623379790
                                }, 
                                {
                                    "names": [
                                        "172.30.46.236:5000/logging/logging-fluentd@sha256:0fe24293418bd591e59d2cbd4fcc3282d57b0f4a8484ef0c94515682860a36c5", 
                                        "172.30.46.236:5000/logging/logging-fluentd:latest"
                                    ], 
                                    "sizeBytes": 472182625
                                }, 
                                {
                                    "names": [
                                        "docker.io/openshift/origin-logging-elasticsearch@sha256:273c11b82929c5f946ddb48bd49dbe1e77ba40d9c15275dc7619fd07b9c8ec54", 
                                        "docker.io/openshift/origin-logging-elasticsearch:latest"
                                    ], 
                                    "sizeBytes": 425433879
                                }, 
                                {
                                    "names": [
                                        "172.30.46.236:5000/logging/logging-curator@sha256:164cd389805d374154b2b9c2895d82f455ba9dcfc98c15b6577424bc26e3a2f1", 
                                        "172.30.46.236:5000/logging/logging-curator:latest"
                                    ], 
                                    "sizeBytes": 418287799
                                }, 
                                {
                                    "names": [
                                        "docker.io/openshift/base-centos7@sha256:aea292a3bddba020cde0ee83e6a45807931eb607c164ec6a3674f67039d8cd7c", 
                                        "docker.io/openshift/base-centos7:latest"
                                    ], 
                                    "sizeBytes": 383049978
                                }, 
                                {
                                    "names": [
                                        "rhel7.2:latest"
                                    ], 
                                    "sizeBytes": 377493597
                                }, 
                                {
                                    "names": [
                                        "openshift/origin-egress-router:6acabdc", 
                                        "openshift/origin-egress-router:latest"
                                    ], 
                                    "sizeBytes": 364745713
                                }, 
                                {
                                    "names": [
                                        "openshift/origin-base:latest"
                                    ], 
                                    "sizeBytes": 363070172
                                }, 
                                {
                                    "names": [
                                        "docker.io/openshift/origin-logging-fluentd@sha256:b55877bf5f5624c0111688db16bac54ed7b64291114323fb010d9e8e630b1c89", 
                                        "docker.io/openshift/origin-logging-fluentd:latest"
                                    ], 
                                    "sizeBytes": 359219273
                                }, 
                                {
                                    "names": [
                                        "docker.io/fedora@sha256:69281ddd7b2600e5f2b17f1e12d7fba25207f459204fb2d15884f8432c479136", 
                                        "docker.io/fedora:25"
                                    ], 
                                    "sizeBytes": 230864375
                                }, 
                                {
                                    "names": [
                                        "rhel7.3:latest", 
                                        "rhel7:latest"
                                    ], 
                                    "sizeBytes": 219121266
                                }, 
                                {
                                    "names": [
                                        "openshift/origin-pod:6acabdc", 
                                        "openshift/origin-pod:latest"
                                    ], 
                                    "sizeBytes": 213199843
                                }, 
                                {
                                    "names": [
                                        "registry.access.redhat.com/rhel7.2@sha256:98e6ca5d226c26e31a95cd67716afe22833c943e1926a21daf1a030906a02249", 
                                        "registry.access.redhat.com/rhel7.2:latest"
                                    ], 
                                    "sizeBytes": 201376319
                                }, 
                                {
                                    "names": [
                                        "registry.access.redhat.com/rhel7.3@sha256:1e232401d8e0ba53b36b757b4712fbcbd1dab9c21db039c45a84871a74e89e68", 
                                        "registry.access.redhat.com/rhel7.3:latest"
                                    ], 
                                    "sizeBytes": 192693772
                                }, 
                                {
                                    "names": [
                                        "openshift/origin-source:latest"
                                    ], 
                                    "sizeBytes": 192548894
                                }, 
                                {
                                    "names": [
                                        "registry.access.redhat.com/rhel7.1@sha256:1bc5a4c43bbb29a5a96a61896ff696933be3502e2f5fdc4cde02d9e101731fdd", 
                                        "registry.access.redhat.com/rhel7.1:latest"
                                    ], 
                                    "sizeBytes": 158229901
                                }, 
                                {
                                    "names": [
                                        "openshift/hello-openshift:6acabdc", 
                                        "openshift/hello-openshift:latest"
                                    ], 
                                    "sizeBytes": 5643318
                                }
                            ], 
                            "nodeInfo": {
                                "architecture": "amd64", 
                                "bootID": "47ad466c-9d62-4e83-a9c3-4d8f1d5f21b8", 
                                "containerRuntimeVersion": "docker://1.12.6", 
                                "kernelVersion": "3.10.0-327.22.2.el7.x86_64", 
                                "kubeProxyVersion": "v1.6.1+5115d708d7", 
                                "kubeletVersion": "v1.6.1+5115d708d7", 
                                "machineID": "f9370ed252a14f73b014c1301a9b6d1b", 
                                "operatingSystem": "linux", 
                                "osImage": "Red Hat Enterprise Linux Server 7.3 (Maipo)", 
                                "systemUUID": "EC291DF8-F7E8-E3A3-DEB2-5EA827F2E128"
                            }
                        }
                    }
                ], 
                "kind": "List", 
                "metadata": {}, 
                "resourceVersion": "", 
                "selfLink": ""
            }
        ], 
        "returncode": 0
    }, 
    "state": "list"
}

TASK [openshift_logging_fluentd : Set openshift_logging_fluentd_hosts] *********
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:190
ok: [openshift] => {
    "ansible_facts": {
        "openshift_logging_fluentd_hosts": [
            "172.18.14.90"
        ]
    }, 
    "changed": false
}

TASK [openshift_logging_fluentd : include] *************************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:195
included: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_fluentd/tasks/label_and_wait.yaml for openshift

TASK [openshift_logging_fluentd : Label 172.18.14.90 for Fluentd deployment] ***
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_fluentd/tasks/label_and_wait.yaml:2
changed: [openshift] => {
    "changed": true, 
    "results": {
        "cmd": "/bin/oc label node 172.18.14.90 logging-infra-fluentd=true --overwrite", 
        "results": "", 
        "returncode": 0
    }, 
    "state": "add"
}

TASK [openshift_logging_fluentd : command] *************************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_fluentd/tasks/label_and_wait.yaml:10
changed: [openshift -> 127.0.0.1] => {
    "changed": true, 
    "cmd": [
        "sleep", 
        "0.5"
    ], 
    "delta": "0:00:00.502326", 
    "end": "2017-06-08 14:52:41.993382", 
    "rc": 0, 
    "start": "2017-06-08 14:52:41.491056"
}

TASK [openshift_logging_fluentd : Delete temp directory] ***********************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:202
ok: [openshift] => {
    "changed": false, 
    "path": "/tmp/openshift-logging-ansible-6TUYM0", 
    "state": "absent"
}

TASK [openshift_logging : include] *********************************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging/tasks/install_logging.yaml:253
included: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging/tasks/update_master_config.yaml for openshift

TASK [openshift_logging : include] *********************************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging/tasks/main.yaml:36
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging : Cleaning up local temp dir] **************************
task path: /tmp/tmp.aBpGK5BHS9/openhift-ansible/roles/openshift_logging/tasks/main.yaml:40
ok: [openshift -> 127.0.0.1] => {
    "changed": false, 
    "path": "/tmp/openshift-logging-ansible-eowPYv", 
    "state": "absent"
}
META: ran handlers
META: ran handlers

PLAY [Update Master configs] ***************************************************
skipping: no hosts matched

PLAY RECAP *********************************************************************
localhost                  : ok=2    changed=0    unreachable=0    failed=0   
openshift                  : ok=207  changed=70   unreachable=0    failed=0   

/data/src/github.com/openshift/origin-aggregated-logging
Running /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:170: executing 'oc get pods -l component=es' expecting any result and text 'Running'; re-trying every 0.2s until completion or 180.000s...
SUCCESS after 0.309s: /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:170: executing 'oc get pods -l component=es' expecting any result and text 'Running'; re-trying every 0.2s until completion or 180.000s
Standard output from the command:
NAME                                      READY     STATUS    RESTARTS   AGE
logging-es-data-master-wzqmb0lv-1-s77n8   1/1       Running   0          58s

There was no error output from the command.
Running /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:171: executing 'oc get pods -l component=kibana' expecting any result and text 'Running'; re-trying every 0.2s until completion or 180.000s...
SUCCESS after 17.655s: /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:171: executing 'oc get pods -l component=kibana' expecting any result and text 'Running'; re-trying every 0.2s until completion or 180.000s
Standard output from the command:
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-n3w3w   0/2       ContainerCreating   0          27s
... repeated 2 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-n3w3w   0/2       ContainerCreating   0          28s
... repeated 2 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-n3w3w   0/2       ContainerCreating   0          29s
... repeated 2 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-n3w3w   0/2       ContainerCreating   0          30s
... repeated 2 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-n3w3w   0/2       ContainerCreating   0          31s
... repeated 2 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-n3w3w   0/2       ContainerCreating   0          32s
... repeated 3 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-n3w3w   0/2       ContainerCreating   0          33s
... repeated 2 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-n3w3w   0/2       ContainerCreating   0          34s
... repeated 2 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-n3w3w   0/2       ContainerCreating   0          35s
... repeated 3 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-n3w3w   0/2       ContainerCreating   0          36s
... repeated 2 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-n3w3w   0/2       ContainerCreating   0          37s
... repeated 2 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-n3w3w   0/2       ContainerCreating   0          38s
... repeated 3 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-n3w3w   0/2       ContainerCreating   0          39s
... repeated 2 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-n3w3w   0/2       ContainerCreating   0          40s
... repeated 2 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-n3w3w   0/2       ContainerCreating   0          41s
... repeated 2 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-n3w3w   0/2       ContainerCreating   0          42s
... repeated 2 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-n3w3w   0/2       ContainerCreating   0          43s
... repeated 2 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-n3w3w   0/2       ContainerCreating   0          44s
... repeated 2 times
NAME                     READY     STATUS    RESTARTS   AGE
logging-kibana-1-n3w3w   1/2       Running   0          44s
Standard error from the command:
Running /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:172: executing 'oc get pods -l component=curator' expecting any result and text 'Running'; re-trying every 0.2s until completion or 180.000s...
SUCCESS after 4.373s: /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:172: executing 'oc get pods -l component=curator' expecting any result and text 'Running'; re-trying every 0.2s until completion or 180.000s
Standard output from the command:
NAME                      READY     STATUS              RESTARTS   AGE
logging-curator-1-gsn2z   0/1       ContainerCreating   0          24s
... repeated 2 times
NAME                      READY     STATUS              RESTARTS   AGE
logging-curator-1-gsn2z   0/1       ContainerCreating   0          25s
... repeated 2 times
NAME                      READY     STATUS              RESTARTS   AGE
logging-curator-1-gsn2z   0/1       ContainerCreating   0          26s
NAME                      READY     STATUS              RESTARTS   AGE
logging-curator-1-gsn2z   0/1       ContainerCreating   0          27s
... repeated 2 times
NAME                      READY     STATUS    RESTARTS   AGE
logging-curator-1-gsn2z   1/1       Running   0          28s
Standard error from the command:
Running /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:175: executing 'oc get pods -l component=es-ops' expecting any result and text 'Running'; re-trying every 0.2s until completion or 180.000s...
SUCCESS after 0.285s: /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:175: executing 'oc get pods -l component=es-ops' expecting any result and text 'Running'; re-trying every 0.2s until completion or 180.000s
Standard output from the command:
NAME                                          READY     STATUS    RESTARTS   AGE
logging-es-ops-data-master-gcngcleo-1-6p3b6   1/1       Running   0          1m

There was no error output from the command.
Running /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:176: executing 'oc get pods -l component=kibana-ops' expecting any result and text 'Running'; re-trying every 0.2s until completion or 180.000s...
SUCCESS after 2.261s: /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:176: executing 'oc get pods -l component=kibana-ops' expecting any result and text 'Running'; re-trying every 0.2s until completion or 180.000s
Standard output from the command:
NAME                         READY     STATUS              RESTARTS   AGE
logging-kibana-ops-1-0clp0   0/2       ContainerCreating   0          39s
... repeated 2 times
NAME                         READY     STATUS              RESTARTS   AGE
logging-kibana-ops-1-0clp0   0/2       ContainerCreating   0          40s
... repeated 2 times
NAME                         READY     STATUS    RESTARTS   AGE
logging-kibana-ops-1-0clp0   1/2       Running   0          41s
Standard error from the command:
Running /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:177: executing 'oc get pods -l component=curator-ops' expecting any result and text 'Running'; re-trying every 0.2s until completion or 180.000s...
SUCCESS after 0.240s: /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:177: executing 'oc get pods -l component=curator-ops' expecting any result and text 'Running'; re-trying every 0.2s until completion or 180.000s
Standard output from the command:
NAME                          READY     STATUS    RESTARTS   AGE
logging-curator-ops-1-xbk84   1/1       Running   0          13s

There was no error output from the command.
Running /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:185: executing 'oc project logging > /dev/null' expecting success...
SUCCESS after 0.254s: /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:185: executing 'oc project logging > /dev/null' expecting success
There was no output from the command.
There was no error output from the command.
/data/src/github.com/openshift/origin-aggregated-logging/hack/testing /data/src/github.com/openshift/origin-aggregated-logging
--> Deploying template "logging/logging-fluentd-template-maker" for "-" to project logging

     logging-fluentd-template-maker
     ---------
     Template to create template for fluentd

     * With parameters:
        * MASTER_URL=https://kubernetes.default.svc.cluster.local
        * ES_HOST=logging-es
        * ES_PORT=9200
        * ES_CLIENT_CERT=/etc/fluent/keys/cert
        * ES_CLIENT_KEY=/etc/fluent/keys/key
        * ES_CA=/etc/fluent/keys/ca
        * OPS_HOST=logging-es-ops
        * OPS_PORT=9200
        * OPS_CLIENT_CERT=/etc/fluent/keys/cert
        * OPS_CLIENT_KEY=/etc/fluent/keys/key
        * OPS_CA=/etc/fluent/keys/ca
        * ES_COPY=false
        * ES_COPY_HOST=
        * ES_COPY_PORT=
        * ES_COPY_SCHEME=https
        * ES_COPY_CLIENT_CERT=
        * ES_COPY_CLIENT_KEY=
        * ES_COPY_CA=
        * ES_COPY_USERNAME=
        * ES_COPY_PASSWORD=
        * OPS_COPY_HOST=
        * OPS_COPY_PORT=
        * OPS_COPY_SCHEME=https
        * OPS_COPY_CLIENT_CERT=
        * OPS_COPY_CLIENT_KEY=
        * OPS_COPY_CA=
        * OPS_COPY_USERNAME=
        * OPS_COPY_PASSWORD=
        * IMAGE_PREFIX_DEFAULT=172.30.46.236:5000/logging/
        * IMAGE_VERSION_DEFAULT=latest
        * USE_JOURNAL=
        * JOURNAL_SOURCE=
        * JOURNAL_READ_FROM_HEAD=false
        * USE_MUX=false
        * USE_MUX_CLIENT=false
        * MUX_ALLOW_EXTERNAL=false
        * BUFFER_QUEUE_LIMIT=1024
        * BUFFER_SIZE_LIMIT=16777216

--> Creating resources ...
    template "logging-fluentd-template" created
--> Success
    Run 'oc status' to view your app.
WARNING: bridge-nf-call-ip6tables is disabled
START wait_for_fluentd_to_catch_up at 2017-06-08 18:53:23.440297597+00:00
added es message c373da95-175e-40f1-8065-36b8d6fc00dd
added es-ops message f945881e-a1ce-4fa2-8409-60d21c9d5c1f
good - wait_for_fluentd_to_catch_up: found 1 record project logging for c373da95-175e-40f1-8065-36b8d6fc00dd
good - wait_for_fluentd_to_catch_up: found 1 record project .operations for f945881e-a1ce-4fa2-8409-60d21c9d5c1f
END wait_for_fluentd_to_catch_up took 12 seconds at 2017-06-08 18:53:35.513980361+00:00
Running /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:223: executing 'oc login --username=admin --password=admin' expecting success...
SUCCESS after 0.233s: /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:223: executing 'oc login --username=admin --password=admin' expecting success
Standard output from the command:
Login successful.

You don't have any projects. You can try to create a new project, by running

    oc new-project <projectname>


There was no error output from the command.
Running /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:224: executing 'oc login --username=system:admin' expecting success...
SUCCESS after 0.221s: /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:224: executing 'oc login --username=system:admin' expecting success
Standard output from the command:
Logged into "https://172.18.14.90:8443" as "system:admin" using existing credentials.

You have access to the following projects and can switch between them with 'oc project <projectname>':

  * default
    kube-public
    kube-system
    logging
    openshift
    openshift-infra

Using project "default".

There was no error output from the command.
Running /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:225: executing 'oadm policy add-cluster-role-to-user cluster-admin admin' expecting success...
SUCCESS after 0.252s: /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:225: executing 'oadm policy add-cluster-role-to-user cluster-admin admin' expecting success
Standard output from the command:
cluster role "cluster-admin" added: "admin"

There was no error output from the command.
Running /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:226: executing 'oc login --username=loguser --password=loguser' expecting success...
SUCCESS after 0.243s: /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:226: executing 'oc login --username=loguser --password=loguser' expecting success
Standard output from the command:
Login successful.

You don't have any projects. You can try to create a new project, by running

    oc new-project <projectname>


There was no error output from the command.
Running /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:227: executing 'oc login --username=system:admin' expecting success...
SUCCESS after 0.247s: /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:227: executing 'oc login --username=system:admin' expecting success
Standard output from the command:
Logged into "https://172.18.14.90:8443" as "system:admin" using existing credentials.

You have access to the following projects and can switch between them with 'oc project <projectname>':

  * default
    kube-public
    kube-system
    logging
    openshift
    openshift-infra

Using project "default".

There was no error output from the command.
Running /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:228: executing 'oc project logging > /dev/null' expecting success...
SUCCESS after 0.441s: /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:228: executing 'oc project logging > /dev/null' expecting success
There was no output from the command.
There was no error output from the command.
Running /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:229: executing 'oadm policy add-role-to-user view loguser' expecting success...
SUCCESS after 0.226s: /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:229: executing 'oadm policy add-role-to-user view loguser' expecting success
Standard output from the command:
role "view" added: "loguser"

There was no error output from the command.
Checking if Elasticsearch logging-es-data-master-wzqmb0lv-1-s77n8 is ready
{
    "_id": "0",
    "_index": ".searchguard.logging-es-data-master-wzqmb0lv-1-s77n8",
    "_shards": {
        "failed": 0,
        "successful": 1,
        "total": 1
    },
    "_type": "rolesmapping",
    "_version": 2,
    "created": false
}
Checking if Elasticsearch logging-es-ops-data-master-gcngcleo-1-6p3b6 is ready
{
    "_id": "0",
    "_index": ".searchguard.logging-es-ops-data-master-gcngcleo-1-6p3b6",
    "_shards": {
        "failed": 0,
        "successful": 1,
        "total": 1
    },
    "_type": "rolesmapping",
    "_version": 2,
    "created": false
}
------------------------------------------
     Test 'admin' user can access cluster stats
------------------------------------------
Running /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:265: executing 'test 200 = 200' expecting success...
SUCCESS after 0.009s: /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:265: executing 'test 200 = 200' expecting success
There was no output from the command.
There was no error output from the command.
------------------------------------------
     Test 'admin' user can access cluster stats for OPS cluster
------------------------------------------
Running /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:274: executing 'test 200 = 200' expecting success...
SUCCESS after 0.010s: /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:274: executing 'test 200 = 200' expecting success
There was no output from the command.
There was no error output from the command.
Running e2e tests
Checking installation of the EFK stack...
Running test/cluster/rollout.sh:20: executing 'oc project logging' expecting success...
SUCCESS after 0.274s: test/cluster/rollout.sh:20: executing 'oc project logging' expecting success
Standard output from the command:
Already on project "logging" on server "https://172.18.14.90:8443".

There was no error output from the command.
[INFO] Checking for DeploymentConfigurations...
Running test/cluster/rollout.sh:24: executing 'oc get deploymentconfig logging-kibana' expecting success...
SUCCESS after 0.217s: test/cluster/rollout.sh:24: executing 'oc get deploymentconfig logging-kibana' expecting success
Standard output from the command:
NAME             REVISION   DESIRED   CURRENT   TRIGGERED BY
logging-kibana   1          1         1         config

There was no error output from the command.
Running test/cluster/rollout.sh:25: executing 'oc rollout status deploymentconfig/logging-kibana' expecting success...
SUCCESS after 0.268s: test/cluster/rollout.sh:25: executing 'oc rollout status deploymentconfig/logging-kibana' expecting success
Standard output from the command:
replication controller "logging-kibana-1" successfully rolled out

There was no error output from the command.
Running test/cluster/rollout.sh:24: executing 'oc get deploymentconfig logging-curator' expecting success...
SUCCESS after 0.230s: test/cluster/rollout.sh:24: executing 'oc get deploymentconfig logging-curator' expecting success
Standard output from the command:
NAME              REVISION   DESIRED   CURRENT   TRIGGERED BY
logging-curator   1          1         1         config

There was no error output from the command.
Running test/cluster/rollout.sh:25: executing 'oc rollout status deploymentconfig/logging-curator' expecting success...
SUCCESS after 0.220s: test/cluster/rollout.sh:25: executing 'oc rollout status deploymentconfig/logging-curator' expecting success
Standard output from the command:
replication controller "logging-curator-1" successfully rolled out

There was no error output from the command.
Running test/cluster/rollout.sh:24: executing 'oc get deploymentconfig logging-kibana-ops' expecting success...
SUCCESS after 0.219s: test/cluster/rollout.sh:24: executing 'oc get deploymentconfig logging-kibana-ops' expecting success
Standard output from the command:
NAME                 REVISION   DESIRED   CURRENT   TRIGGERED BY
logging-kibana-ops   1          1         1         config

There was no error output from the command.
Running test/cluster/rollout.sh:25: executing 'oc rollout status deploymentconfig/logging-kibana-ops' expecting success...
SUCCESS after 0.216s: test/cluster/rollout.sh:25: executing 'oc rollout status deploymentconfig/logging-kibana-ops' expecting success
Standard output from the command:
replication controller "logging-kibana-ops-1" successfully rolled out

There was no error output from the command.
Running test/cluster/rollout.sh:24: executing 'oc get deploymentconfig logging-curator-ops' expecting success...
SUCCESS after 0.251s: test/cluster/rollout.sh:24: executing 'oc get deploymentconfig logging-curator-ops' expecting success
Standard output from the command:
NAME                  REVISION   DESIRED   CURRENT   TRIGGERED BY
logging-curator-ops   1          1         1         config

There was no error output from the command.
Running test/cluster/rollout.sh:25: executing 'oc rollout status deploymentconfig/logging-curator-ops' expecting success...
SUCCESS after 0.313s: test/cluster/rollout.sh:25: executing 'oc rollout status deploymentconfig/logging-curator-ops' expecting success
Standard output from the command:
replication controller "logging-curator-ops-1" successfully rolled out

There was no error output from the command.
Running test/cluster/rollout.sh:24: executing 'oc get deploymentconfig logging-es-data-master-wzqmb0lv' expecting success...
SUCCESS after 0.211s: test/cluster/rollout.sh:24: executing 'oc get deploymentconfig logging-es-data-master-wzqmb0lv' expecting success
Standard output from the command:
NAME                              REVISION   DESIRED   CURRENT   TRIGGERED BY
logging-es-data-master-wzqmb0lv   1          1         1         config

There was no error output from the command.
Running test/cluster/rollout.sh:25: executing 'oc rollout status deploymentconfig/logging-es-data-master-wzqmb0lv' expecting success...
SUCCESS after 0.270s: test/cluster/rollout.sh:25: executing 'oc rollout status deploymentconfig/logging-es-data-master-wzqmb0lv' expecting success
Standard output from the command:
replication controller "logging-es-data-master-wzqmb0lv-1" successfully rolled out

There was no error output from the command.
Running test/cluster/rollout.sh:24: executing 'oc get deploymentconfig logging-es-ops-data-master-gcngcleo' expecting success...
SUCCESS after 0.212s: test/cluster/rollout.sh:24: executing 'oc get deploymentconfig logging-es-ops-data-master-gcngcleo' expecting success
Standard output from the command:
NAME                                  REVISION   DESIRED   CURRENT   TRIGGERED BY
logging-es-ops-data-master-gcngcleo   1          1         1         config

There was no error output from the command.
Running test/cluster/rollout.sh:25: executing 'oc rollout status deploymentconfig/logging-es-ops-data-master-gcngcleo' expecting success...
SUCCESS after 0.219s: test/cluster/rollout.sh:25: executing 'oc rollout status deploymentconfig/logging-es-ops-data-master-gcngcleo' expecting success
Standard output from the command:
replication controller "logging-es-ops-data-master-gcngcleo-1" successfully rolled out

There was no error output from the command.
[INFO] Checking for Routes...
Running test/cluster/rollout.sh:30: executing 'oc get route logging-kibana' expecting success...
SUCCESS after 0.257s: test/cluster/rollout.sh:30: executing 'oc get route logging-kibana' expecting success
Standard output from the command:
NAME             HOST/PORT                                 PATH      SERVICES         PORT      TERMINATION          WILDCARD
logging-kibana   kibana.router.default.svc.cluster.local             logging-kibana   <all>     reencrypt/Redirect   None

There was no error output from the command.
Running test/cluster/rollout.sh:30: executing 'oc get route logging-kibana-ops' expecting success...
SUCCESS after 0.259s: test/cluster/rollout.sh:30: executing 'oc get route logging-kibana-ops' expecting success
Standard output from the command:
NAME                 HOST/PORT                                     PATH      SERVICES             PORT      TERMINATION          WILDCARD
logging-kibana-ops   kibana-ops.router.default.svc.cluster.local             logging-kibana-ops   <all>     reencrypt/Redirect   None

There was no error output from the command.
[INFO] Checking for Services...
Running test/cluster/rollout.sh:35: executing 'oc get service logging-es' expecting success...
SUCCESS after 0.240s: test/cluster/rollout.sh:35: executing 'oc get service logging-es' expecting success
Standard output from the command:
NAME         CLUSTER-IP       EXTERNAL-IP   PORT(S)    AGE
logging-es   172.30.186.149   <none>        9200/TCP   2m

There was no error output from the command.
Running test/cluster/rollout.sh:35: executing 'oc get service logging-es-cluster' expecting success...
SUCCESS after 0.212s: test/cluster/rollout.sh:35: executing 'oc get service logging-es-cluster' expecting success
Standard output from the command:
NAME                 CLUSTER-IP      EXTERNAL-IP   PORT(S)    AGE
logging-es-cluster   172.30.196.81   <none>        9300/TCP   2m

There was no error output from the command.
Running test/cluster/rollout.sh:35: executing 'oc get service logging-kibana' expecting success...
SUCCESS after 0.242s: test/cluster/rollout.sh:35: executing 'oc get service logging-kibana' expecting success
Standard output from the command:
NAME             CLUSTER-IP      EXTERNAL-IP   PORT(S)   AGE
logging-kibana   172.30.120.17   <none>        443/TCP   2m

There was no error output from the command.
Running test/cluster/rollout.sh:35: executing 'oc get service logging-es-ops' expecting success...
SUCCESS after 0.211s: test/cluster/rollout.sh:35: executing 'oc get service logging-es-ops' expecting success
Standard output from the command:
NAME             CLUSTER-IP       EXTERNAL-IP   PORT(S)    AGE
logging-es-ops   172.30.231.128   <none>        9200/TCP   2m

There was no error output from the command.
Running test/cluster/rollout.sh:35: executing 'oc get service logging-es-ops-cluster' expecting success...
SUCCESS after 0.215s: test/cluster/rollout.sh:35: executing 'oc get service logging-es-ops-cluster' expecting success
Standard output from the command:
NAME                     CLUSTER-IP      EXTERNAL-IP   PORT(S)    AGE
logging-es-ops-cluster   172.30.209.93   <none>        9300/TCP   2m

There was no error output from the command.
Running test/cluster/rollout.sh:35: executing 'oc get service logging-kibana-ops' expecting success...
SUCCESS after 0.254s: test/cluster/rollout.sh:35: executing 'oc get service logging-kibana-ops' expecting success
Standard output from the command:
NAME                 CLUSTER-IP       EXTERNAL-IP   PORT(S)   AGE
logging-kibana-ops   172.30.200.101   <none>        443/TCP   1m

There was no error output from the command.
[INFO] Checking for OAuthClients...
Running test/cluster/rollout.sh:40: executing 'oc get oauthclient kibana-proxy' expecting success...
SUCCESS after 0.211s: test/cluster/rollout.sh:40: executing 'oc get oauthclient kibana-proxy' expecting success
Standard output from the command:
NAME           SECRET                                                             WWW-CHALLENGE   REDIRECT URIS
kibana-proxy   eyeadORqPAm76VknVrgD1vLYplFWWgD8ITypIiGMpnQd40wOx7fSruVlbc5guvFY   FALSE           https://kibana-ops.router.default.svc.cluster.local

There was no error output from the command.
[INFO] Checking for DaemonSets...
Running test/cluster/rollout.sh:45: executing 'oc get daemonset logging-fluentd' expecting success...
SUCCESS after 0.209s: test/cluster/rollout.sh:45: executing 'oc get daemonset logging-fluentd' expecting success
Standard output from the command:
NAME              DESIRED   CURRENT   READY     UP-TO-DATE   AVAILABLE   NODE-SELECTOR                AGE
logging-fluentd   1         1         1         1            1           logging-infra-fluentd=true   1m

There was no error output from the command.
Running test/cluster/rollout.sh:47: executing 'oc get daemonset logging-fluentd -o jsonpath='{ .status.numberReady }'' expecting any result and text '1'; re-trying every 0.2s until completion or 60.000s...
SUCCESS after 0.216s: test/cluster/rollout.sh:47: executing 'oc get daemonset logging-fluentd -o jsonpath='{ .status.numberReady }'' expecting any result and text '1'; re-trying every 0.2s until completion or 60.000s
Standard output from the command:
1
There was no error output from the command.
Checking for log entry matches between ES and their sources...
WARNING: bridge-nf-call-ip6tables is disabled
Running test/cluster/functionality.sh:40: executing 'oc login --username=admin --password=admin' expecting success...
SUCCESS after 0.287s: test/cluster/functionality.sh:40: executing 'oc login --username=admin --password=admin' expecting success
Standard output from the command:
Login successful.

You have access to the following projects and can switch between them with 'oc project <projectname>':

    default
    kube-public
    kube-system
  * logging
    openshift
    openshift-infra

Using project "logging".

There was no error output from the command.
Running test/cluster/functionality.sh:44: executing 'oc login --username=system:admin' expecting success...
SUCCESS after 0.235s: test/cluster/functionality.sh:44: executing 'oc login --username=system:admin' expecting success
Standard output from the command:
Logged into "https://172.18.14.90:8443" as "system:admin" using existing credentials.

You have access to the following projects and can switch between them with 'oc project <projectname>':

    default
    kube-public
    kube-system
  * logging
    openshift
    openshift-infra

Using project "logging".

There was no error output from the command.
Running test/cluster/functionality.sh:45: executing 'oc project logging' expecting success...
SUCCESS after 0.233s: test/cluster/functionality.sh:45: executing 'oc project logging' expecting success
Standard output from the command:
Already on project "logging" on server "https://172.18.14.90:8443".

There was no error output from the command.
[INFO] Testing Kibana pod logging-kibana-1-n3w3w for a successful start...
Running test/cluster/functionality.sh:52: executing 'oc exec logging-kibana-1-n3w3w -c kibana -- curl -s --request HEAD --write-out '%{response_code}' http://localhost:5601/' expecting any result and text '200'; re-trying every 0.2s until completion or 600.000s...
Error while running ssh/sudo command: 
set -e
pushd /data/src/github.com/openshift//origin-aggregated-logging/hack/testing >/dev/null
export PATH=$GOPATH/bin:$PATH

echo '***************************************************'
echo 'Running GIT_URL=https://github.com/openshift/origin-aggregated-logging GIT_BRANCH=master O_A_L_DIR=/data/src/github.com/openshift/origin-aggregated-logging OS_ROOT=/data/src/github.com/openshift/origin ENABLE_OPS_CLUSTER=true USE_LOCAL_SOURCE=true TEST_PERF=false VERBOSE=1 OS_ANSIBLE_REPO=https://github.com/openshift/openshift-ansible OS_ANSIBLE_BRANCH=master ./logging.sh...'
time GIT_URL=https://github.com/openshift/origin-aggregated-logging GIT_BRANCH=master O_A_L_DIR=/data/src/github.com/openshift/origin-aggregated-logging OS_ROOT=/data/src/github.com/openshift/origin ENABLE_OPS_CLUSTER=true USE_LOCAL_SOURCE=true TEST_PERF=false VERBOSE=1 OS_ANSIBLE_REPO=https://github.com/openshift/openshift-ansible OS_ANSIBLE_BRANCH=master ./logging.sh
echo 'Finished GIT_URL=https://github.com/openshift/origin-aggregated-logging GIT_BRANCH=master O_A_L_DIR=/data/src/github.com/openshift/origin-aggregated-logging OS_ROOT=/data/src/github.com/openshift/origin ENABLE_OPS_CLUSTER=true USE_LOCAL_SOURCE=true TEST_PERF=false VERBOSE=1 OS_ANSIBLE_REPO=https://github.com/openshift/openshift-ansible OS_ANSIBLE_BRANCH=master ./logging.sh'
echo '***************************************************'

popd >/dev/null
        
Build was aborted
Aborted by Rich Megginson
Publish artifacts to S3 Bucket Skipping publishing on S3 because build aborted
[description-setter] Could not determine description.
[PostBuildScript] - Execution post build scripts.
[workspace] $ /bin/sh -xe /tmp/hudson7718430364696348438.sh
+ INSTANCE_NAME=origin_logging-rhel7-1634
+ pushd origin
~/jobs/test-origin-aggregated-logging/workspace/origin ~/jobs/test-origin-aggregated-logging/workspace
+ rc=0
+ '[' -f .vagrant-openshift.json ']'
++ /usr/bin/vagrant ssh -c 'sudo ausearch -m avc'
+ ausearchresult=
+ rc=255
+ '[' '' = '<no matches>' ']'
+ /usr/bin/vagrant destroy -f
==> openshiftdev: Terminating the instance...
==> openshiftdev: Running cleanup tasks for 'shell' provisioner...
+ popd
~/jobs/test-origin-aggregated-logging/workspace
+ exit 255
Build step 'Execute a set of scripts' marked build as failure
[BFA] Scanning build for known causes...
[BFA] No failure causes found
[BFA] Done. 0s
Finished: ABORTED