FailedConsole Output

Skipping 277 KB.. Full Log
********************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:42
ok: [openshift] => {
    "ansible_facts": {
        "kibana_component": "kibana-ops", 
        "kibana_name": "logging-kibana-ops"
    }, 
    "changed": false
}

TASK [openshift_logging_kibana : Retrieving the cert to use when generating secrets for the logging components] ***
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:46
ok: [openshift] => (item={u'name': u'ca_file', u'file': u'ca.crt'}) => {
    "changed": false, 
    "content": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUMyakNDQWNLZ0F3SUJBZ0lCQVRBTkJna3Foa2lHOXcwQkFRc0ZBREFlTVJ3d0dnWURWUVFERXhOc2IyZG4KYVc1bkxYTnBaMjVsY2kxMFpYTjBNQjRYRFRFM01EWXdPREl5TURReE0xb1hEVEl5TURZd056SXlNRFF4TkZvdwpIakVjTUJvR0ExVUVBeE1UYkc5bloybHVaeTF6YVdkdVpYSXRkR1Z6ZERDQ0FTSXdEUVlKS29aSWh2Y05BUUVCCkJRQURnZ0VQQURDQ0FRb0NnZ0VCQU52Z3IrSlp0dk9HVUxjTU9EUmZkemIrdktmclZaOHZnTXo2SmU3eHQ0dW4KeGdvZksrRTlHaWJsTENEdktSUXBvLzBNUk1Fa1dndWtVRm1WSkZBMHlwQjlSZjRJVit6dCtSQkszRklLZzB1dQpvYXV3ZE1XTEhrZkxnNXUvdzBjMnZLVmhDb0NOaEhSSUxCcnJCSkhWQnhMQXZOTyszNnF5RS9wcFJoSGs0T1E2CnV4MSszYU4xSkprcmhHU3JYMXU3QnVQdFZxaFRBOU1sR3R6Y3RYNFM1QngrbzJVcHJxaTJLTTZzSURtcGhhU0QKVHZJeUJZVHJSOE1FVi9CM0xVRHJSNC9XUndNUVJlRUtFYko4MEw2SXgydlRBYW9lZ3dmY3FWeFN1U2xvY0Q3NApFNUJsaHZiQW5VT1g1RDhDNWZ3WXBrRHppZWJ4YURGakRIRDZVdm96QkVrQ0F3RUFBYU1qTUNFd0RnWURWUjBQCkFRSC9CQVFEQWdLa01BOEdBMVVkRXdFQi93UUZNQU1CQWY4d0RRWUpLb1pJaHZjTkFRRUxCUUFEZ2dFQkFJVXMKQVdNcXl6bFNoSEtacTFvWEx3SjhHbmloL201S3JQa1U3bUVHMDhkdW1yZEVxaDVTSU1oTGowaWtVNnJTVDN2RgpxMEVqUnNvc3pQWVFwU05iR0V6WWE4QkVwM1VPa1FyTFExcWlVaXFGODhqa0p5anhqTnhJMEtUL2FmL1pPZ0RCCnJ3OW01UG4yeWVvK2tpUnlrVDREb2FGcyszWWxYU2w0UHRlY1JwRVhrcFZIcit3MlBiMHF0K0hxcFlSMkxTQUcKR0Y5NktBbTNON0VCUjc2dnVGS05zNkU3M1RQSWRzMHVxK1dGS2dxRHFXN0wzamJ1WUR2aERMZ0pHUXdOa1VVcwpqWWZpcm51cWNJUVlRZWZyMWw4VllvZ2RjQTExOXR5ZnJERzU3RHJudGtRNjJoMUh2S0liQjNrZy9VRVowQStHCkJGaHk2bkRQOFhaVlB2azg2NDA9Ci0tLS0tRU5EIENFUlRJRklDQVRFLS0tLS0K", 
    "encoding": "base64", 
    "item": {
        "file": "ca.crt", 
        "name": "ca_file"
    }, 
    "source": "/etc/origin/logging/ca.crt"
}
ok: [openshift] => (item={u'name': u'kibana_internal_key', u'file': u'kibana-internal.key'}) => {
    "changed": false, 
    "content": "LS0tLS1CRUdJTiBSU0EgUFJJVkFURSBLRVktLS0tLQpNSUlFcFFJQkFBS0NBUUVBOTQ1MUpZeHhyZVB5NE53ZXZLd3RmeXpMRndIQWRjOGlqYWZwaFZDZERLV1E5REVkCm5qcTA4elQxSjB1QmZQREJHUEFxL2JKeGRXNk5xdnE2WUlteUt2aE9ac1FudXAyei9TcmtsVXFkYnJaTi95aTEKc2IxU1h3NXhKamE3TlZFMGlSTUhjdVgrVGtJVGZDQ2FWZ3U2REUrdE5UTnhIRnY3MXAyc2VTKzhueVY5V1RwSQpDRVZZZjVIcmdrSVRUcTMwOG1pVndpTGZlTGI4M3FjL3kxZXZoMWI3WHJ2L2phNUhkcEM4SC96NC9uQmtJeG00CkdSNlR4L1ZMRXhwZER1MENwWCtoM3gwZFpaWUFTeUtnQSsvZTlseUxNTDNEN29BQ0hZVE4zUjVVazRTYVVNWFoKWEpZVW8wbzMwOWJGWG9qL3FYZ0VPRmE1MFBpcklpNkV3Y1E4c1FJREFRQUJBb0lCQVFDa2dBQnVOQll5UTkvWAo5YzFpK0ZLL1NHZW40UjBmNjhWOU5nL2g2OENBYitDVHM3T0h1bUVWbjZJc08ydFBON2hJdHdjS1VESStvM2oyCkhlZ05VRktaWlNqTDl4cWpVam9SU0JoZThCY1BRNHN1dThHSTIxSktJaEdrSUFoa2xKS0JaNml5ZkF4eHVreXkKVmdQWW03QkVrQWw3NmJRRzhVUzJ4MDJkdGMvME82bXN5RU5EdWgxcnp5WnFHbHpFV0twNFU0MnJ2d0JOczNpdwpKM3N1T2Fma2dldDRHSmFIQ25WQmJVUkZyTGRzTUZOaERsa3hubk0wamp4U2dCYlgyZkVWQUowMUl4aVVoOXRNClN1a3liSWhnQjQraFE2ODNvc1ZPdk53cVhaekR0RkxYVHU2VVFlT2lFdkNrWlVlVDBCTjJBWDhrdEhzWDM2RjgKQ2lTZG9HZ0JBb0dCQVBuYzV6eXQ1Qjk3azFTYzRLTW44Mk9iYUQxRUwrMFNsaW5DekUwcitrSmI4Mk14aktaKwpzemJtRUhtQ2FiZlpSbmhLYUtmVlMvN2YvT1pwT0JVZ2hjSTJxN3N6Q0xWWXQ0U21zMjc5MWhlUjRRTTQ0NUt1CkI3MzZCeWVEaEt0dEpic0FYQkJ2YmdZWkMrYnhjUXNnUVM2OGRtTE5VUHpPaThlRGxLRWlnbFdCQW9HQkFQMmoKRFVqZ2R2UlZHSTJjeTRWblRvWHVDcStVU2Z1U01EeFJQNE9aSllLVDZpUGxJM1ZBOFE3UU9GNWtPQ2xpbFVnYwp0TzRaZ3Y2Yk00SHFqOW4rUERkUFZyZnlLcURyb3IzUElsSjdYd2N0RmFSbk42bWQ1dGNDSkRkQ0MvRlJUT0FPCmxQcnpaWWk4T3ZNRCtiZWdlRmRMYTVDZ3lHMXJVK0ZTc0c5YWwxOHhBb0dCQU1JVjJMem1xa3ZXTnRSRDQ2NTQKeXZOMWRSd2FDa3hSNHJqZnFzakxQMk1xZjcxdWNMMm5yMWdXajNlRFdORkpnaGJRWGNqNHcwWmhBTmFOcHRXcQpaSncrMFZqaFJJTnhQV1JhQlFsVFBXa2psTEJuRVVjUHpjM0JHS3VWMm9rNmlGTmc3U0tMRytDNnRlY1hadUxvCmxCbFVwWFVMM1R2eXlOc2FsMm92SHlTQkFvR0JBSTIwZGl5Y00vbUlQWUtJOG5BaTZvMExGRzlLTXhkWmNwZVEKY1NMT0FsWDAyK0NZR1hwZUFRL2NwMUdkUG45b3M2eXBWVENka2tFN1pzaVR5ekRveUszQ0p0bjRjM2Fhajc3cQpKZXZkUDVkYnlmYWpNRWNVa0M3WjVHLzI5NE1vZ0FIQ1E5djg5bUl4ZitIYis3MXE2ekVpVXdObndQTFkvdkVwCmFLZWQvbXBSQW9HQUJvSmlDVmVNZlpLWVlPcm1pVE13VjI3TlNHODROczRDajRveHRTa1A3ZHh0Wjl0emRSZXkKcGd3emsrOHZKRm4rRjJyMDJZbXhrYnNzK1RDemRQN1NlS1ZkZGllTlJHY1MwdkcxcURLbklCK2tmSVdMaDc2eApIbGpaMDN1TTdEbTMzYVJNdkZFR2QrM3lBWXh4ODBhOXl1bEsyelpEMTk3R01hUHZGOUpXR1VzPQotLS0tLUVORCBSU0EgUFJJVkFURSBLRVktLS0tLQo=", 
    "encoding": "base64", 
    "item": {
        "file": "kibana-internal.key", 
        "name": "kibana_internal_key"
    }, 
    "source": "/etc/origin/logging/kibana-internal.key"
}
ok: [openshift] => (item={u'name': u'kibana_internal_cert', u'file': u'kibana-internal.crt'}) => {
    "changed": false, 
    "content": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURUakNDQWphZ0F3SUJBZ0lCQWpBTkJna3Foa2lHOXcwQkFRc0ZBREFlTVJ3d0dnWURWUVFERXhOc2IyZG4KYVc1bkxYTnBaMjVsY2kxMFpYTjBNQjRYRFRFM01EWXdPREl5TURReE5Wb1hEVEU1TURZd09ESXlNRFF4TmxvdwpGakVVTUJJR0ExVUVBeE1MSUd0cFltRnVZUzF2Y0hNd2dnRWlNQTBHQ1NxR1NJYjNEUUVCQVFVQUE0SUJEd0F3CmdnRUtBb0lCQVFEM2puVWxqSEd0NC9MZzNCNjhyQzEvTE1zWEFjQjF6eUtOcCttRlVKME1wWkQwTVIyZU9yVHoKTlBVblM0Rjg4TUVZOENyOXNuRjFibzJxK3JwZ2liSXErRTVteENlNm5iUDlLdVNWU3AxdXRrMy9LTFd4dlZKZgpEbkVtTnJzMVVUU0pFd2R5NWY1T1FoTjhJSnBXQzdvTVQ2MDFNM0VjVy92V25heDVMN3lmSlgxWk9rZ0lSVmgvCmtldUNRaE5PcmZUeWFKWENJdDk0dHZ6ZXB6L0xWNitIVnZ0ZXUvK05ya2Qya0x3Zi9QaitjR1FqR2JnWkhwUEgKOVVzVEdsME83UUtsZjZIZkhSMWxsZ0JMSXFBRDc5NzJYSXN3dmNQdWdBSWRoTTNkSGxTVGhKcFF4ZGxjbGhTagpTamZUMXNWZWlQK3BlQVE0VnJuUStLc2lMb1RCeER5eEFnTUJBQUdqZ1o0d2dac3dEZ1lEVlIwUEFRSC9CQVFECkFnV2dNQk1HQTFVZEpRUU1NQW9HQ0NzR0FRVUZCd01CTUF3R0ExVWRFd0VCL3dRQ01BQXdaZ1lEVlIwUkJGOHcKWFlJTElHdHBZbUZ1WVMxdmNIT0NMQ0JyYVdKaGJtRXRiM0J6TG5KdmRYUmxjaTVrWldaaGRXeDBMbk4yWXk1agpiSFZ6ZEdWeUxteHZZMkZzZ2hnZ2EybGlZVzVoTGpFeU55NHdMakF1TVM1NGFYQXVhVytDQm10cFltRnVZVEFOCkJna3Foa2lHOXcwQkFRc0ZBQU9DQVFFQTFTNi9VRUhGeVp0YWlWL005MHNFTEE2TjNYSGpObVBGQk82UXVEL3YKUTFkSytGWHdub0prcm5QbDhtcGR6M2dXWnE0Qjh1QnliUkRlYXdrMXdvcE10bTBucXFhaXRKZ0s3WlBRbHl5NwpZVmlScGR3MS90dXJCdjg5Rlp0STJURnd5RWIyVVFSd0ZnODZhY0M2ekdCUHF0MHYzWG00SGdpc0JPM3lNT0srClJLQzNmZ0RiTXE4SGY5bGpFZWVuTnJHNG04MTd6SGtwK1pCcGZmaFhLRTVneFdzNXM5SFBxVE5SSFAwY0R1UXQKM0FSeDhvV3pzSkoxYUdINzJaQVlad2RpaUZrZUFpNWtGenhZUEIwQlZCWXpZQzBqSndBRDNEVThzenc2T2pJWQpXR1BZaTBieURYRy9MVDNPNytUUmRNaTlMSlUzM3pQamd4R2RDRmZPcXlHM2JnPT0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQotLS0tLUJFR0lOIENFUlRJRklDQVRFLS0tLS0KTUlJQzJqQ0NBY0tnQXdJQkFnSUJBVEFOQmdrcWhraUc5dzBCQVFzRkFEQWVNUnd3R2dZRFZRUURFeE5zYjJkbgphVzVuTFhOcFoyNWxjaTEwWlhOME1CNFhEVEUzTURZd09ESXlNRFF4TTFvWERUSXlNRFl3TnpJeU1EUXhORm93CkhqRWNNQm9HQTFVRUF4TVRiRzluWjJsdVp5MXphV2R1WlhJdGRHVnpkRENDQVNJd0RRWUpLb1pJaHZjTkFRRUIKQlFBRGdnRVBBRENDQVFvQ2dnRUJBTnZncitKWnR2T0dVTGNNT0RSZmR6Yit2S2ZyVlo4dmdNejZKZTd4dDR1bgp4Z29mSytFOUdpYmxMQ0R2S1JRcG8vME1STUVrV2d1a1VGbVZKRkEweXBCOVJmNElWK3p0K1JCSzNGSUtnMHV1Cm9hdXdkTVdMSGtmTGc1dS93MGMydktWaENvQ05oSFJJTEJyckJKSFZCeExBdk5PKzM2cXlFL3BwUmhIazRPUTYKdXgxKzNhTjFKSmtyaEdTclgxdTdCdVB0VnFoVEE5TWxHdHpjdFg0UzVCeCtvMlVwcnFpMktNNnNJRG1waGFTRApUdkl5QllUclI4TUVWL0IzTFVEclI0L1dSd01RUmVFS0ViSjgwTDZJeDJ2VEFhb2Vnd2ZjcVZ4U3VTbG9jRDc0CkU1QmxodmJBblVPWDVEOEM1ZndZcGtEemllYnhhREZqREhENlV2b3pCRWtDQXdFQUFhTWpNQ0V3RGdZRFZSMFAKQVFIL0JBUURBZ0trTUE4R0ExVWRFd0VCL3dRRk1BTUJBZjh3RFFZSktvWklodmNOQVFFTEJRQURnZ0VCQUlVcwpBV01xeXpsU2hIS1pxMW9YTHdKOEduaWgvbTVLclBrVTdtRUcwOGR1bXJkRXFoNVNJTWhMajBpa1U2clNUM3ZGCnEwRWpSc29zelBZUXBTTmJHRXpZYThCRXAzVU9rUXJMUTFxaVVpcUY4OGprSnlqeGpOeEkwS1QvYWYvWk9nREIKcnc5bTVQbjJ5ZW8ra2lSeWtUNERvYUZzKzNZbFhTbDRQdGVjUnBFWGtwVkhyK3cyUGIwcXQrSHFwWVIyTFNBRwpHRjk2S0FtM043RUJSNzZ2dUZLTnM2RTczVFBJZHMwdXErV0ZLZ3FEcVc3TDNqYnVZRHZoRExnSkdRd05rVVVzCmpZZmlybnVxY0lRWVFlZnIxbDhWWW9nZGNBMTE5dHlmckRHNTdEcm50a1E2MmgxSHZLSWJCM2tnL1VFWjBBK0cKQkZoeTZuRFA4WFpWUHZrODY0MD0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo=", 
    "encoding": "base64", 
    "item": {
        "file": "kibana-internal.crt", 
        "name": "kibana_internal_cert"
    }, 
    "source": "/etc/origin/logging/kibana-internal.crt"
}
ok: [openshift] => (item={u'name': u'server_tls', u'file': u'server-tls.json'}) => {
    "changed": false, 
    "content": "Ly8gU2VlIGZvciBhdmFpbGFibGUgb3B0aW9uczogaHR0cHM6Ly9ub2RlanMub3JnL2FwaS90bHMuaHRtbCN0bHNfdGxzX2NyZWF0ZXNlcnZlcl9vcHRpb25zX3NlY3VyZWNvbm5lY3Rpb25saXN0ZW5lcgp0bHNfb3B0aW9ucyA9IHsKCWNpcGhlcnM6ICdrRUVDREg6K2tFRUNESCtTSEE6a0VESDora0VESCtTSEE6K2tFREgrQ0FNRUxMSUE6a0VDREg6K2tFQ0RIK1NIQTprUlNBOitrUlNBK1NIQTora1JTQStDQU1FTExJQTohYU5VTEw6IWVOVUxMOiFTU0x2MjohUkM0OiFERVM6IUVYUDohU0VFRDohSURFQTorM0RFUycsCglob25vckNpcGhlck9yZGVyOiB0cnVlCn0K", 
    "encoding": "base64", 
    "item": {
        "file": "server-tls.json", 
        "name": "server_tls"
    }, 
    "source": "/etc/origin/logging/server-tls.json"
}

TASK [openshift_logging_kibana : Set logging-kibana-ops service] ***************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:57
changed: [openshift] => {
    "changed": true, 
    "results": {
        "clusterip": "172.30.107.167", 
        "cmd": "/bin/oc get service logging-kibana-ops -o json -n logging", 
        "results": [
            {
                "apiVersion": "v1", 
                "kind": "Service", 
                "metadata": {
                    "creationTimestamp": "2017-06-08T22:05:08Z", 
                    "name": "logging-kibana-ops", 
                    "namespace": "logging", 
                    "resourceVersion": "1478", 
                    "selfLink": "/api/v1/namespaces/logging/services/logging-kibana-ops", 
                    "uid": "892968f5-4c96-11e7-82d5-0ed67230a962"
                }, 
                "spec": {
                    "clusterIP": "172.30.107.167", 
                    "ports": [
                        {
                            "port": 443, 
                            "protocol": "TCP", 
                            "targetPort": "oaproxy"
                        }
                    ], 
                    "selector": {
                        "component": "kibana-ops", 
                        "provider": "openshift"
                    }, 
                    "sessionAffinity": "None", 
                    "type": "ClusterIP"
                }, 
                "status": {
                    "loadBalancer": {}
                }
            }
        ], 
        "returncode": 0
    }, 
    "state": "present"
}

TASK [openshift_logging_kibana : set_fact] *************************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:74
 [WARNING]: when statements should not include jinja2 templating delimiters
such as {{ }} or {% %}. Found: {{ openshift_logging_kibana_key | trim | length
> 0 }}
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_kibana : set_fact] *************************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:79
 [WARNING]: when statements should not include jinja2 templating delimiters
such as {{ }} or {% %}. Found: {{ openshift_logging_kibana_cert | trim | length
> 0 }}
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_kibana : set_fact] *************************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:84
 [WARNING]: when statements should not include jinja2 templating delimiters
such as {{ }} or {% %}. Found: {{ openshift_logging_kibana_ca | trim | length >
0 }}
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_kibana : set_fact] *************************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:89
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_kibana : Generating Kibana route template] *************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:94
ok: [openshift] => {
    "changed": false, 
    "checksum": "d1b3cb31518d545f749c0d096d9fdb79a24bc3c5", 
    "dest": "/tmp/openshift-logging-ansible-fNe9g4/templates/kibana-route.yaml", 
    "gid": 0, 
    "group": "root", 
    "md5sum": "86d27484021acc9e563caf99a7b5e928", 
    "mode": "0644", 
    "owner": "root", 
    "secontext": "unconfined_u:object_r:admin_home_t:s0", 
    "size": 2726, 
    "src": "/root/.ansible/tmp/ansible-tmp-1496959509.23-48406767454254/source", 
    "state": "file", 
    "uid": 0
}

TASK [openshift_logging_kibana : Setting Kibana route] *************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:114
changed: [openshift] => {
    "changed": true, 
    "results": {
        "cmd": "/bin/oc get route logging-kibana-ops -o json -n logging", 
        "results": [
            {
                "apiVersion": "v1", 
                "kind": "Route", 
                "metadata": {
                    "creationTimestamp": "2017-06-08T22:05:10Z", 
                    "labels": {
                        "component": "support", 
                        "logging-infra": "support", 
                        "provider": "openshift"
                    }, 
                    "name": "logging-kibana-ops", 
                    "namespace": "logging", 
                    "resourceVersion": "1481", 
                    "selfLink": "/oapi/v1/namespaces/logging/routes/logging-kibana-ops", 
                    "uid": "89e146d8-4c96-11e7-82d5-0ed67230a962"
                }, 
                "spec": {
                    "host": "kibana-ops.router.default.svc.cluster.local", 
                    "tls": {
                        "caCertificate": "-----BEGIN CERTIFICATE-----\nMIIC2jCCAcKgAwIBAgIBATANBgkqhkiG9w0BAQsFADAeMRwwGgYDVQQDExNsb2dn\naW5nLXNpZ25lci10ZXN0MB4XDTE3MDYwODIyMDQxM1oXDTIyMDYwNzIyMDQxNFow\nHjEcMBoGA1UEAxMTbG9nZ2luZy1zaWduZXItdGVzdDCCASIwDQYJKoZIhvcNAQEB\nBQADggEPADCCAQoCggEBANvgr+JZtvOGULcMODRfdzb+vKfrVZ8vgMz6Je7xt4un\nxgofK+E9GiblLCDvKRQpo/0MRMEkWgukUFmVJFA0ypB9Rf4IV+zt+RBK3FIKg0uu\noauwdMWLHkfLg5u/w0c2vKVhCoCNhHRILBrrBJHVBxLAvNO+36qyE/ppRhHk4OQ6\nux1+3aN1JJkrhGSrX1u7BuPtVqhTA9MlGtzctX4S5Bx+o2Uprqi2KM6sIDmphaSD\nTvIyBYTrR8MEV/B3LUDrR4/WRwMQReEKEbJ80L6Ix2vTAaoegwfcqVxSuSlocD74\nE5BlhvbAnUOX5D8C5fwYpkDziebxaDFjDHD6UvozBEkCAwEAAaMjMCEwDgYDVR0P\nAQH/BAQDAgKkMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAIUs\nAWMqyzlShHKZq1oXLwJ8Gnih/m5KrPkU7mEG08dumrdEqh5SIMhLj0ikU6rST3vF\nq0EjRsoszPYQpSNbGEzYa8BEp3UOkQrLQ1qiUiqF88jkJyjxjNxI0KT/af/ZOgDB\nrw9m5Pn2yeo+kiRykT4DoaFs+3YlXSl4PtecRpEXkpVHr+w2Pb0qt+HqpYR2LSAG\nGF96KAm3N7EBR76vuFKNs6E73TPIds0uq+WFKgqDqW7L3jbuYDvhDLgJGQwNkUUs\njYfirnuqcIQYQefr1l8VYogdcA119tyfrDG57DrntkQ62h1HvKIbB3kg/UEZ0A+G\nBFhy6nDP8XZVPvk8640=\n-----END CERTIFICATE-----\n", 
                        "destinationCACertificate": "-----BEGIN CERTIFICATE-----\nMIIC2jCCAcKgAwIBAgIBATANBgkqhkiG9w0BAQsFADAeMRwwGgYDVQQDExNsb2dn\naW5nLXNpZ25lci10ZXN0MB4XDTE3MDYwODIyMDQxM1oXDTIyMDYwNzIyMDQxNFow\nHjEcMBoGA1UEAxMTbG9nZ2luZy1zaWduZXItdGVzdDCCASIwDQYJKoZIhvcNAQEB\nBQADggEPADCCAQoCggEBANvgr+JZtvOGULcMODRfdzb+vKfrVZ8vgMz6Je7xt4un\nxgofK+E9GiblLCDvKRQpo/0MRMEkWgukUFmVJFA0ypB9Rf4IV+zt+RBK3FIKg0uu\noauwdMWLHkfLg5u/w0c2vKVhCoCNhHRILBrrBJHVBxLAvNO+36qyE/ppRhHk4OQ6\nux1+3aN1JJkrhGSrX1u7BuPtVqhTA9MlGtzctX4S5Bx+o2Uprqi2KM6sIDmphaSD\nTvIyBYTrR8MEV/B3LUDrR4/WRwMQReEKEbJ80L6Ix2vTAaoegwfcqVxSuSlocD74\nE5BlhvbAnUOX5D8C5fwYpkDziebxaDFjDHD6UvozBEkCAwEAAaMjMCEwDgYDVR0P\nAQH/BAQDAgKkMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAIUs\nAWMqyzlShHKZq1oXLwJ8Gnih/m5KrPkU7mEG08dumrdEqh5SIMhLj0ikU6rST3vF\nq0EjRsoszPYQpSNbGEzYa8BEp3UOkQrLQ1qiUiqF88jkJyjxjNxI0KT/af/ZOgDB\nrw9m5Pn2yeo+kiRykT4DoaFs+3YlXSl4PtecRpEXkpVHr+w2Pb0qt+HqpYR2LSAG\nGF96KAm3N7EBR76vuFKNs6E73TPIds0uq+WFKgqDqW7L3jbuYDvhDLgJGQwNkUUs\njYfirnuqcIQYQefr1l8VYogdcA119tyfrDG57DrntkQ62h1HvKIbB3kg/UEZ0A+G\nBFhy6nDP8XZVPvk8640=\n-----END CERTIFICATE-----\n", 
                        "insecureEdgeTerminationPolicy": "Redirect", 
                        "termination": "reencrypt"
                    }, 
                    "to": {
                        "kind": "Service", 
                        "name": "logging-kibana-ops", 
                        "weight": 100
                    }, 
                    "wildcardPolicy": "None"
                }, 
                "status": {
                    "ingress": [
                        {
                            "conditions": [
                                {
                                    "lastTransitionTime": "2017-06-08T22:05:10Z", 
                                    "status": "True", 
                                    "type": "Admitted"
                                }
                            ], 
                            "host": "kibana-ops.router.default.svc.cluster.local", 
                            "routerName": "router", 
                            "wildcardPolicy": "None"
                        }
                    ]
                }
            }
        ], 
        "returncode": 0
    }, 
    "state": "present"
}

TASK [openshift_logging_kibana : Generate proxy session] ***********************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:125
ok: [openshift] => {
    "ansible_facts": {
        "session_secret": "jxDCBgeNKWot47Wl6p10UdLnswe0KrbJYJbsirbebJ2CdmDRw4lofQpBsYiGyxgWfqPVg94l9xAYAnTjXAYHlLL8MX4jY3a98F66JLBvxGt9XietKfTEB4GFCQBqygDtnv7QHPV6FbnJiXSiRfU8Uf0iVErbP5l0WlelSrs6kbUfCdnxyL0ypcRbXqa6830dK277isNu"
    }, 
    "changed": false
}

TASK [openshift_logging_kibana : Generate oauth client secret] *****************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:132
ok: [openshift] => {
    "ansible_facts": {
        "oauth_secret": "aplh9WBpRVqrnOaFfKQTr4CIS5suYDR1P9QZLUks9KEpsWrF1HlZ7g80eXj0XvD1"
    }, 
    "changed": false
}

TASK [openshift_logging_kibana : Create oauth-client template] *****************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:138
changed: [openshift] => {
    "changed": true, 
    "checksum": "1c082caf6a85a1e6a49e093c8f1a33fa7965366d", 
    "dest": "/tmp/openshift-logging-ansible-fNe9g4/templates/oauth-client.yml", 
    "gid": 0, 
    "group": "root", 
    "md5sum": "872f72624b4507d6baecf0c9440582c4", 
    "mode": "0644", 
    "owner": "root", 
    "secontext": "unconfined_u:object_r:admin_home_t:s0", 
    "size": 332, 
    "src": "/root/.ansible/tmp/ansible-tmp-1496959510.35-246705307513070/source", 
    "state": "file", 
    "uid": 0
}

TASK [openshift_logging_kibana : Set kibana-proxy oauth-client] ****************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:146
changed: [openshift] => {
    "changed": true, 
    "results": {
        "cmd": "/bin/oc get oauthclient kibana-proxy -o json -n logging", 
        "results": [
            {
                "apiVersion": "v1", 
                "kind": "OAuthClient", 
                "metadata": {
                    "creationTimestamp": "2017-06-08T22:05:03Z", 
                    "labels": {
                        "logging-infra": "support"
                    }, 
                    "name": "kibana-proxy", 
                    "resourceVersion": "1482", 
                    "selfLink": "/oapi/v1/oauthclients/kibana-proxy", 
                    "uid": "8600b8a6-4c96-11e7-82d5-0ed67230a962"
                }, 
                "redirectURIs": [
                    "https://kibana-ops.router.default.svc.cluster.local"
                ], 
                "scopeRestrictions": [
                    {
                        "literals": [
                            "user:info", 
                            "user:check-access", 
                            "user:list-projects"
                        ]
                    }
                ], 
                "secret": "aplh9WBpRVqrnOaFfKQTr4CIS5suYDR1P9QZLUks9KEpsWrF1HlZ7g80eXj0XvD1"
            }
        ], 
        "returncode": 0
    }, 
    "state": "present"
}

TASK [openshift_logging_kibana : Set Kibana secret] ****************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:157
ok: [openshift] => {
    "changed": false, 
    "results": {
        "apiVersion": "v1", 
        "data": {
            "ca": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUMyakNDQWNLZ0F3SUJBZ0lCQVRBTkJna3Foa2lHOXcwQkFRc0ZBREFlTVJ3d0dnWURWUVFERXhOc2IyZG4KYVc1bkxYTnBaMjVsY2kxMFpYTjBNQjRYRFRFM01EWXdPREl5TURReE0xb1hEVEl5TURZd056SXlNRFF4TkZvdwpIakVjTUJvR0ExVUVBeE1UYkc5bloybHVaeTF6YVdkdVpYSXRkR1Z6ZERDQ0FTSXdEUVlKS29aSWh2Y05BUUVCCkJRQURnZ0VQQURDQ0FRb0NnZ0VCQU52Z3IrSlp0dk9HVUxjTU9EUmZkemIrdktmclZaOHZnTXo2SmU3eHQ0dW4KeGdvZksrRTlHaWJsTENEdktSUXBvLzBNUk1Fa1dndWtVRm1WSkZBMHlwQjlSZjRJVit6dCtSQkszRklLZzB1dQpvYXV3ZE1XTEhrZkxnNXUvdzBjMnZLVmhDb0NOaEhSSUxCcnJCSkhWQnhMQXZOTyszNnF5RS9wcFJoSGs0T1E2CnV4MSszYU4xSkprcmhHU3JYMXU3QnVQdFZxaFRBOU1sR3R6Y3RYNFM1QngrbzJVcHJxaTJLTTZzSURtcGhhU0QKVHZJeUJZVHJSOE1FVi9CM0xVRHJSNC9XUndNUVJlRUtFYko4MEw2SXgydlRBYW9lZ3dmY3FWeFN1U2xvY0Q3NApFNUJsaHZiQW5VT1g1RDhDNWZ3WXBrRHppZWJ4YURGakRIRDZVdm96QkVrQ0F3RUFBYU1qTUNFd0RnWURWUjBQCkFRSC9CQVFEQWdLa01BOEdBMVVkRXdFQi93UUZNQU1CQWY4d0RRWUpLb1pJaHZjTkFRRUxCUUFEZ2dFQkFJVXMKQVdNcXl6bFNoSEtacTFvWEx3SjhHbmloL201S3JQa1U3bUVHMDhkdW1yZEVxaDVTSU1oTGowaWtVNnJTVDN2RgpxMEVqUnNvc3pQWVFwU05iR0V6WWE4QkVwM1VPa1FyTFExcWlVaXFGODhqa0p5anhqTnhJMEtUL2FmL1pPZ0RCCnJ3OW01UG4yeWVvK2tpUnlrVDREb2FGcyszWWxYU2w0UHRlY1JwRVhrcFZIcit3MlBiMHF0K0hxcFlSMkxTQUcKR0Y5NktBbTNON0VCUjc2dnVGS05zNkU3M1RQSWRzMHVxK1dGS2dxRHFXN0wzamJ1WUR2aERMZ0pHUXdOa1VVcwpqWWZpcm51cWNJUVlRZWZyMWw4VllvZ2RjQTExOXR5ZnJERzU3RHJudGtRNjJoMUh2S0liQjNrZy9VRVowQStHCkJGaHk2bkRQOFhaVlB2azg2NDA9Ci0tLS0tRU5EIENFUlRJRklDQVRFLS0tLS0K", 
            "cert": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURSVENDQWkyZ0F3SUJBZ0lCQXpBTkJna3Foa2lHOXcwQkFRVUZBREFlTVJ3d0dnWURWUVFERXhOc2IyZG4KYVc1bkxYTnBaMjVsY2kxMFpYTjBNQjRYRFRFM01EWXdPREl5TURReE9Wb1hEVEU1TURZd09ESXlNRFF4T1ZvdwpSakVRTUE0R0ExVUVDZ3dIVEc5bloybHVaekVTTUJBR0ExVUVDd3dKVDNCbGJsTm9hV1owTVI0d0hBWURWUVFECkRCVnplWE4wWlcwdWJHOW5aMmx1Wnk1cmFXSmhibUV3Z2dFaU1BMEdDU3FHU0liM0RRRUJBUVVBQTRJQkR3QXcKZ2dFS0FvSUJBUUN3K2VMaWQ0QUtEc0dsRkl3WDYvSW80cWdBZHVCblZaUDQydlg0Wi82OFNmVFB3cHJRV0dhdwplbTh1eFZQVElzbFZXa0R5blovbXhicXNtSW9pbzdqN3Qwc2RnSTJVRk1oYnhWS0dEbFVqbkUvbFc1YXB4aXVnCi94TWt6bjd2V1FOZHVYMVBUUVpSbFpKcmpGWFQzMU1iVnlUeWZSWG84bHkvdHAwQm5pQnFNZzJxa2N6NHpzUWwKbHE2RWdPS1BBbDA0T2hjM01yQUcxakJkUDlmNnZtYlJjVXBaRDUrV05KaVRrTEl6QlgxZTh1MFJsamFyTlQrUwpnbTlkeGlTTURENnhoQ2NCWFZkWGo3eE9lV0VNcmZ6b3ZWbTNvTUJtVG5ya2J2OWxEN3B1aEVNTktFS2NNclBTCjFMbzZGYjhuMWMxTVFVVm9UV3czVllSb1hTVGVrU1E1QWdNQkFBR2paakJrTUE0R0ExVWREd0VCL3dRRUF3SUYKb0RBSkJnTlZIUk1FQWpBQU1CMEdBMVVkSlFRV01CUUdDQ3NHQVFVRkJ3TUJCZ2dyQmdFRkJRY0RBakFkQmdOVgpIUTRFRmdRVVJ2ZitOSGZpejVxem91VnhwVWUvNU9odU50WXdDUVlEVlIwakJBSXdBREFOQmdrcWhraUc5dzBCCkFRVUZBQU9DQVFFQVBqeUVEODhSa1BaZ0ozWklMWmphUWZ0RmVIZG4rNC92eG1VY3NxMzU4RHZ5QUlkakFVRU0KRE9hVlFyNFlvVndwbVFheFNHdWRTQjl3a0ZwNmlpaDkxVFA2eUlmR01GM0NHZGd2bmpjb3M3b0UzdHM2Z1BrQwplK3duU3NBc2NPODBMR3NsaFg1bm9OTGQ2RmlNTVBBbWFJUVdseEFOMWJyK2dzZGNWekxZakltYmVkSnpuaHJ4CnFrTXc0TjJ4Z3lRREliVFgwNzhEaURkaG81ZmpZTm9sTnRWMXNhTFJraXdob1pTQ2hGZERuci83ZlRIK1o0VlIKTVpsVVlkTDMyYU1MRHFVZFFEdG9kRWErb1VTZ2NhOS92QUw0RGx6UlQyWERjVXFieWwxellseW9jeXF1aEdCcgpLMmQ1ZnJ4MmVMUHJEekdmbExBbFNqRUZsWWl4UE91bzNRPT0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo=", 
            "key": "LS0tLS1CRUdJTiBQUklWQVRFIEtFWS0tLS0tCk1JSUV2UUlCQURBTkJna3Foa2lHOXcwQkFRRUZBQVNDQktjd2dnU2pBZ0VBQW9JQkFRQ3crZUxpZDRBS0RzR2wKRkl3WDYvSW80cWdBZHVCblZaUDQydlg0Wi82OFNmVFB3cHJRV0dhd2VtOHV4VlBUSXNsVldrRHluWi9teGJxcwptSW9pbzdqN3Qwc2RnSTJVRk1oYnhWS0dEbFVqbkUvbFc1YXB4aXVnL3hNa3puN3ZXUU5kdVgxUFRRWlJsWkpyCmpGWFQzMU1iVnlUeWZSWG84bHkvdHAwQm5pQnFNZzJxa2N6NHpzUWxscTZFZ09LUEFsMDRPaGMzTXJBRzFqQmQKUDlmNnZtYlJjVXBaRDUrV05KaVRrTEl6QlgxZTh1MFJsamFyTlQrU2dtOWR4aVNNREQ2eGhDY0JYVmRYajd4TwplV0VNcmZ6b3ZWbTNvTUJtVG5ya2J2OWxEN3B1aEVNTktFS2NNclBTMUxvNkZiOG4xYzFNUVVWb1RXdzNWWVJvClhTVGVrU1E1QWdNQkFBRUNnZ0VBSGk2djVDR3ZRNGZhdkxXeFlzOEpvTEZ2SjQwZW5tRDQrSU1YV2dNYWJiQkwKUVRlV1Fad2gvMDBTTXk3U0tmc1B6UyswVGV4Vi9meVE1cWlwUGxrZnprWW5XR081UVp6OXdyRDNid2Q0dTdjSAo1VHptaUJQVkV3TU5sOFJqMzV3VnQrRlBucnVyTFRUT0lPdHhJODQ5bFRjYWhRU1NQUVZBamkydHZ6M2hsMXFqCldBTVJIMXpBOVIxdVpKMVNaODdSVGp1NHNEVktLYlV1bHVXaWtRbGpDYmMyMjNXdS82M0pFR2VVN2kyOGFaNnQKZ1IvL1VDS0tKMlpsbjJ4dWVRbGZHWkpiUUc3bGQ0UzVOeGN2Y3FxY1FKdVd0cTVWOGp0YmlWUTlza1JsTGxrSwprUWp4U1NKZkpZL01WczdVY0pPYjZMUkI4cG0wblZ3UHAveFRaQUdweFFLQmdRRGIycUtCQkJuUkU4aGZLQjBrClJmOFpuZ1d0RG5uaHFUNTFYWGNYM3VLMjNjL2VIZWlGRU1RTUdOVjlGWmw5UkNuZ3VyeTRSdGtZay9pTm1jUGUKeU9BNHNaeGFwczNRZDJDazRGU1pnMy9LQ1d5ZENPQUFYMWp2UUFHMGlHNi93dGdNeGxiWnR0UXJaakkvSmwxVwpiUThlc2ZPR3grV0Q5Vm5OeVFnL1pYRkw0d0tCZ1FET0VwT1YxYjVEb0dYK0pFSU5BUDgxR2xTQlZ2ZEh2Q0ljClZ4RWQ5R3NRMVlGN01lajBQWFR0Nk5FZWZiZjdvVktLQk4weHRMQTBPL3BYUktrUUoraDUwODN4aURTTndxZjIKaFcyVVdUSGpYZTd0eUhOR0dHSGRXVUt3cU1XTUFPVGhBS3g3R21DY1dqNXdCZmxXTnJXaFZZVEV1T3ZhY2UzYgpJVzJDZngvQ013S0JnRGJzLzA1R0xRRDc5VUk0cmxjaW1hSFh4Y2svWlpuSUE0K2t3eDBXbFFOa2xRc0Q0OVJpCnBPZjB5RmxRclNFTGgzN2FpRTF0ZmMxVEluU1Y1c1pnM3d1cTY4b0dCRU5CbjhaeE9SR0ZOY2JPNWZDeGgrenYKNVRNWWhaU0p4VWxwTHpxNXpuYmo0MHZZWTFNOWpYSUxHd3pySGxFYzNhMUMvYVdZeE5PNXp6MUxBb0dCQUw3aApCQ1c1Qy9rV1BRTXdVS0k4OXlnWUVJaVJwcEdpMlRISkVFamYyc0NJRTl5cU9XN3E4a0ZnOHpzOEZreVZmSi90CnEzbmNOWTdjczZGY0x3R1JCY3k2L1RzcUFBd0RiNU53dWpTOGZmSnVVdCtjV2ZKbXFWZmYwMTlScFZKdWpaTkIKRFlNbHhyRnRBZ0svOWRuKzZUVGN3eE41ZXJZbjY3dWJkaFYzYlZKekFvR0FDWEpJR1l5ZExuZEZ2SEdsaHI1YgpHZStJQXI4UkVXTWRWSm0yY1YwQVp5eW40SGJYaW8vM24xN0crOGxlVmgrN2U0STg3Uk5UUUwwRXhnZE1mek5kCnJSVWRld3pjdERWdFljTVk2YTJOcUhKalJxZVNEUHBKZExQMjNxczJOdGNwK0hXNEZKcnJnNisyU244RHNqZmMKcW1nSnBzMkRFTlpBTXdZc3dyU05oTW89Ci0tLS0tRU5EIFBSSVZBVEUgS0VZLS0tLS0K"
        }, 
        "kind": "Secret", 
        "metadata": {
            "creationTimestamp": null, 
            "name": "logging-kibana"
        }, 
        "type": "Opaque"
    }, 
    "state": "present"
}

TASK [openshift_logging_kibana : Set Kibana Proxy secret] **********************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:171
changed: [openshift] => {
    "changed": true, 
    "results": {
        "cmd": "/bin/oc replace -f /tmp/logging-kibana-proxy -n logging", 
        "results": "", 
        "returncode": 0
    }, 
    "state": "present"
}

TASK [openshift_logging_kibana : Generate Kibana DC template] ******************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:197
changed: [openshift] => {
    "changed": true, 
    "checksum": "14670bc209a1b2381574bb935673f032f7767a12", 
    "dest": "/tmp/openshift-logging-ansible-fNe9g4/templates/kibana-dc.yaml", 
    "gid": 0, 
    "group": "root", 
    "md5sum": "ef074a3e06ec50d36039245b4e0ee1d6", 
    "mode": "0644", 
    "owner": "root", 
    "secontext": "unconfined_u:object_r:admin_home_t:s0", 
    "size": 3759, 
    "src": "/root/.ansible/tmp/ansible-tmp-1496959513.16-277243306100038/source", 
    "state": "file", 
    "uid": 0
}

TASK [openshift_logging_kibana : Set Kibana DC] ********************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:216
changed: [openshift] => {
    "changed": true, 
    "results": {
        "cmd": "/bin/oc get dc logging-kibana-ops -o json -n logging", 
        "results": [
            {
                "apiVersion": "v1", 
                "kind": "DeploymentConfig", 
                "metadata": {
                    "creationTimestamp": "2017-06-08T22:05:13Z", 
                    "generation": 2, 
                    "labels": {
                        "component": "kibana-ops", 
                        "logging-infra": "kibana", 
                        "provider": "openshift"
                    }, 
                    "name": "logging-kibana-ops", 
                    "namespace": "logging", 
                    "resourceVersion": "1498", 
                    "selfLink": "/oapi/v1/namespaces/logging/deploymentconfigs/logging-kibana-ops", 
                    "uid": "8c387502-4c96-11e7-82d5-0ed67230a962"
                }, 
                "spec": {
                    "replicas": 1, 
                    "selector": {
                        "component": "kibana-ops", 
                        "logging-infra": "kibana", 
                        "provider": "openshift"
                    }, 
                    "strategy": {
                        "activeDeadlineSeconds": 21600, 
                        "resources": {}, 
                        "rollingParams": {
                            "intervalSeconds": 1, 
                            "maxSurge": "25%", 
                            "maxUnavailable": "25%", 
                            "timeoutSeconds": 600, 
                            "updatePeriodSeconds": 1
                        }, 
                        "type": "Rolling"
                    }, 
                    "template": {
                        "metadata": {
                            "creationTimestamp": null, 
                            "labels": {
                                "component": "kibana-ops", 
                                "logging-infra": "kibana", 
                                "provider": "openshift"
                            }, 
                            "name": "logging-kibana-ops"
                        }, 
                        "spec": {
                            "containers": [
                                {
                                    "env": [
                                        {
                                            "name": "ES_HOST", 
                                            "value": "logging-es-ops"
                                        }, 
                                        {
                                            "name": "ES_PORT", 
                                            "value": "9200"
                                        }, 
                                        {
                                            "name": "KIBANA_MEMORY_LIMIT", 
                                            "valueFrom": {
                                                "resourceFieldRef": {
                                                    "containerName": "kibana", 
                                                    "divisor": "0", 
                                                    "resource": "limits.memory"
                                                }
                                            }
                                        }
                                    ], 
                                    "image": "172.30.72.153:5000/logging/logging-kibana:latest", 
                                    "imagePullPolicy": "Always", 
                                    "name": "kibana", 
                                    "readinessProbe": {
                                        "exec": {
                                            "command": [
                                                "/usr/share/kibana/probe/readiness.sh"
                                            ]
                                        }, 
                                        "failureThreshold": 3, 
                                        "initialDelaySeconds": 5, 
                                        "periodSeconds": 5, 
                                        "successThreshold": 1, 
                                        "timeoutSeconds": 4
                                    }, 
                                    "resources": {
                                        "limits": {
                                            "memory": "736Mi"
                                        }
                                    }, 
                                    "terminationMessagePath": "/dev/termination-log", 
                                    "terminationMessagePolicy": "File", 
                                    "volumeMounts": [
                                        {
                                            "mountPath": "/etc/kibana/keys", 
                                            "name": "kibana", 
                                            "readOnly": true
                                        }
                                    ]
                                }, 
                                {
                                    "env": [
                                        {
                                            "name": "OAP_BACKEND_URL", 
                                            "value": "http://localhost:5601"
                                        }, 
                                        {
                                            "name": "OAP_AUTH_MODE", 
                                            "value": "oauth2"
                                        }, 
                                        {
                                            "name": "OAP_TRANSFORM", 
                                            "value": "user_header,token_header"
                                        }, 
                                        {
                                            "name": "OAP_OAUTH_ID", 
                                            "value": "kibana-proxy"
                                        }, 
                                        {
                                            "name": "OAP_MASTER_URL", 
                                            "value": "https://kubernetes.default.svc.cluster.local"
                                        }, 
                                        {
                                            "name": "OAP_PUBLIC_MASTER_URL", 
                                            "value": "https://172.18.2.35:8443"
                                        }, 
                                        {
                                            "name": "OAP_LOGOUT_REDIRECT", 
                                            "value": "https://172.18.2.35:8443/console/logout"
                                        }, 
                                        {
                                            "name": "OAP_MASTER_CA_FILE", 
                                            "value": "/var/run/secrets/kubernetes.io/serviceaccount/ca.crt"
                                        }, 
                                        {
                                            "name": "OAP_DEBUG", 
                                            "value": "False"
                                        }, 
                                        {
                                            "name": "OAP_OAUTH_SECRET_FILE", 
                                            "value": "/secret/oauth-secret"
                                        }, 
                                        {
                                            "name": "OAP_SERVER_CERT_FILE", 
                                            "value": "/secret/server-cert"
                                        }, 
                                        {
                                            "name": "OAP_SERVER_KEY_FILE", 
                                            "value": "/secret/server-key"
                                        }, 
                                        {
                                            "name": "OAP_SERVER_TLS_FILE", 
                                            "value": "/secret/server-tls.json"
                                        }, 
                                        {
                                            "name": "OAP_SESSION_SECRET_FILE", 
                                            "value": "/secret/session-secret"
                                        }, 
                                        {
                                            "name": "OCP_AUTH_PROXY_MEMORY_LIMIT", 
                                            "valueFrom": {
                                                "resourceFieldRef": {
                                                    "containerName": "kibana-proxy", 
                                                    "divisor": "0", 
                                                    "resource": "limits.memory"
                                                }
                                            }
                                        }
                                    ], 
                                    "image": "172.30.72.153:5000/logging/logging-auth-proxy:latest", 
                                    "imagePullPolicy": "Always", 
                                    "name": "kibana-proxy", 
                                    "ports": [
                                        {
                                            "containerPort": 3000, 
                                            "name": "oaproxy", 
                                            "protocol": "TCP"
                                        }
                                    ], 
                                    "resources": {
                                        "limits": {
                                            "memory": "96Mi"
                                        }
                                    }, 
                                    "terminationMessagePath": "/dev/termination-log", 
                                    "terminationMessagePolicy": "File", 
                                    "volumeMounts": [
                                        {
                                            "mountPath": "/secret", 
                                            "name": "kibana-proxy", 
                                            "readOnly": true
                                        }
                                    ]
                                }
                            ], 
                            "dnsPolicy": "ClusterFirst", 
                            "restartPolicy": "Always", 
                            "schedulerName": "default-scheduler", 
                            "securityContext": {}, 
                            "serviceAccount": "aggregated-logging-kibana", 
                            "serviceAccountName": "aggregated-logging-kibana", 
                            "terminationGracePeriodSeconds": 30, 
                            "volumes": [
                                {
                                    "name": "kibana", 
                                    "secret": {
                                        "defaultMode": 420, 
                                        "secretName": "logging-kibana"
                                    }
                                }, 
                                {
                                    "name": "kibana-proxy", 
                                    "secret": {
                                        "defaultMode": 420, 
                                        "secretName": "logging-kibana-proxy"
                                    }
                                }
                            ]
                        }
                    }, 
                    "test": false, 
                    "triggers": [
                        {
                            "type": "ConfigChange"
                        }
                    ]
                }, 
                "status": {
                    "availableReplicas": 0, 
                    "conditions": [
                        {
                            "lastTransitionTime": "2017-06-08T22:05:13Z", 
                            "lastUpdateTime": "2017-06-08T22:05:13Z", 
                            "message": "Deployment config does not have minimum availability.", 
                            "status": "False", 
                            "type": "Available"
                        }, 
                        {
                            "lastTransitionTime": "2017-06-08T22:05:14Z", 
                            "lastUpdateTime": "2017-06-08T22:05:14Z", 
                            "message": "replication controller \"logging-kibana-ops-1\" is waiting for pod \"logging-kibana-ops-1-deploy\" to run", 
                            "status": "Unknown", 
                            "type": "Progressing"
                        }
                    ], 
                    "details": {
                        "causes": [
                            {
                                "type": "ConfigChange"
                            }
                        ], 
                        "message": "config change"
                    }, 
                    "latestVersion": 1, 
                    "observedGeneration": 2, 
                    "replicas": 0, 
                    "unavailableReplicas": 0, 
                    "updatedReplicas": 0
                }
            }
        ], 
        "returncode": 0
    }, 
    "state": "present"
}

TASK [openshift_logging_kibana : Delete temp directory] ************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:228
ok: [openshift] => {
    "changed": false, 
    "path": "/tmp/openshift-logging-ansible-fNe9g4", 
    "state": "absent"
}

TASK [openshift_logging : include_role] ****************************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging/tasks/install_logging.yaml:195
statically included: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_curator/tasks/determine_version.yaml

TASK [openshift_logging_curator : fail] ****************************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_curator/tasks/determine_version.yaml:3
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_curator : set_fact] ************************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_curator/tasks/determine_version.yaml:7
ok: [openshift] => {
    "ansible_facts": {
        "curator_version": "3_5"
    }, 
    "changed": false
}

TASK [openshift_logging_curator : set_fact] ************************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_curator/tasks/determine_version.yaml:12
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_curator : fail] ****************************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_curator/tasks/determine_version.yaml:15
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_curator : Create temp directory for doing work in] *****
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:5
ok: [openshift] => {
    "changed": false, 
    "cmd": [
        "mktemp", 
        "-d", 
        "/tmp/openshift-logging-ansible-XXXXXX"
    ], 
    "delta": "0:00:01.003274", 
    "end": "2017-06-08 18:05:16.376582", 
    "rc": 0, 
    "start": "2017-06-08 18:05:15.373308"
}

STDOUT:

/tmp/openshift-logging-ansible-R0Loa0

TASK [openshift_logging_curator : set_fact] ************************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:10
ok: [openshift] => {
    "ansible_facts": {
        "tempdir": "/tmp/openshift-logging-ansible-R0Loa0"
    }, 
    "changed": false
}

TASK [openshift_logging_curator : Create templates subdirectory] ***************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:14
ok: [openshift] => {
    "changed": false, 
    "gid": 0, 
    "group": "root", 
    "mode": "0755", 
    "owner": "root", 
    "path": "/tmp/openshift-logging-ansible-R0Loa0/templates", 
    "secontext": "unconfined_u:object_r:user_tmp_t:s0", 
    "size": 6, 
    "state": "directory", 
    "uid": 0
}

TASK [openshift_logging_curator : Create Curator service account] **************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:24
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_curator : Create Curator service account] **************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:32
changed: [openshift] => {
    "changed": true, 
    "results": {
        "cmd": "/bin/oc get sa aggregated-logging-curator -o json -n logging", 
        "results": [
            {
                "apiVersion": "v1", 
                "imagePullSecrets": [
                    {
                        "name": "aggregated-logging-curator-dockercfg-03l6b"
                    }
                ], 
                "kind": "ServiceAccount", 
                "metadata": {
                    "creationTimestamp": "2017-06-08T22:05:17Z", 
                    "name": "aggregated-logging-curator", 
                    "namespace": "logging", 
                    "resourceVersion": "1506", 
                    "selfLink": "/api/v1/namespaces/logging/serviceaccounts/aggregated-logging-curator", 
                    "uid": "8e213a1f-4c96-11e7-82d5-0ed67230a962"
                }, 
                "secrets": [
                    {
                        "name": "aggregated-logging-curator-token-3zbtt"
                    }, 
                    {
                        "name": "aggregated-logging-curator-dockercfg-03l6b"
                    }
                ]
            }
        ], 
        "returncode": 0
    }, 
    "state": "present"
}

TASK [openshift_logging_curator : copy] ****************************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:41
ok: [openshift] => {
    "changed": false, 
    "checksum": "9008efd9a8892dcc42c28c6dfb6708527880a6d8", 
    "dest": "/tmp/openshift-logging-ansible-R0Loa0/curator.yml", 
    "gid": 0, 
    "group": "root", 
    "md5sum": "5498c5fd98f3dd06e34b20eb1f55dc12", 
    "mode": "0644", 
    "owner": "root", 
    "secontext": "unconfined_u:object_r:admin_home_t:s0", 
    "size": 320, 
    "src": "/root/.ansible/tmp/ansible-tmp-1496959517.52-2149907488828/source", 
    "state": "file", 
    "uid": 0
}

TASK [openshift_logging_curator : copy] ****************************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:47
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_curator : Set Curator configmap] ***********************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:53
changed: [openshift] => {
    "changed": true, 
    "results": {
        "cmd": "/bin/oc get configmap logging-curator -o json -n logging", 
        "results": [
            {
                "apiVersion": "v1", 
                "data": {
                    "config.yaml": "# Logging example curator config file\n\n# uncomment and use this to override the defaults from env vars\n#.defaults:\n#  delete:\n#    days: 30\n#  runhour: 0\n#  runminute: 0\n\n# to keep ops logs for a different duration:\n#.operations:\n#  delete:\n#    weeks: 8\n\n# example for a normal project\n#myapp:\n#  delete:\n#    weeks: 1\n"
                }, 
                "kind": "ConfigMap", 
                "metadata": {
                    "creationTimestamp": "2017-06-08T22:05:18Z", 
                    "name": "logging-curator", 
                    "namespace": "logging", 
                    "resourceVersion": "1507", 
                    "selfLink": "/api/v1/namespaces/logging/configmaps/logging-curator", 
                    "uid": "8ec600ed-4c96-11e7-82d5-0ed67230a962"
                }
            }
        ], 
        "returncode": 0
    }, 
    "state": "present"
}

TASK [openshift_logging_curator : Set Curator secret] **************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:62
changed: [openshift] => {
    "changed": true, 
    "results": {
        "cmd": "/bin/oc secrets new logging-curator ca=/etc/origin/logging/ca.crt key=/etc/origin/logging/system.logging.curator.key cert=/etc/origin/logging/system.logging.curator.crt -n logging", 
        "results": "", 
        "returncode": 0
    }, 
    "state": "present"
}

TASK [openshift_logging_curator : set_fact] ************************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:75
ok: [openshift] => {
    "ansible_facts": {
        "curator_component": "curator", 
        "curator_name": "logging-curator"
    }, 
    "changed": false
}

TASK [openshift_logging_curator : Generate Curator deploymentconfig] ***********
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:81
ok: [openshift] => {
    "changed": false, 
    "checksum": "e7a42a4f1d62786335acea7004db737970afbaf8", 
    "dest": "/tmp/openshift-logging-ansible-R0Loa0/templates/curator-dc.yaml", 
    "gid": 0, 
    "group": "root", 
    "md5sum": "3ad9454ff8a4983b89456493a92c2b5c", 
    "mode": "0644", 
    "owner": "root", 
    "secontext": "unconfined_u:object_r:admin_home_t:s0", 
    "size": 2340, 
    "src": "/root/.ansible/tmp/ansible-tmp-1496959519.53-206766053719908/source", 
    "state": "file", 
    "uid": 0
}

TASK [openshift_logging_curator : Set Curator DC] ******************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:99
changed: [openshift] => {
    "changed": true, 
    "results": {
        "cmd": "/bin/oc get dc logging-curator -o json -n logging", 
        "results": [
            {
                "apiVersion": "v1", 
                "kind": "DeploymentConfig", 
                "metadata": {
                    "creationTimestamp": "2017-06-08T22:05:20Z", 
                    "generation": 2, 
                    "labels": {
                        "component": "curator", 
                        "logging-infra": "curator", 
                        "provider": "openshift"
                    }, 
                    "name": "logging-curator", 
                    "namespace": "logging", 
                    "resourceVersion": "1521", 
                    "selfLink": "/oapi/v1/namespaces/logging/deploymentconfigs/logging-curator", 
                    "uid": "901faa9b-4c96-11e7-82d5-0ed67230a962"
                }, 
                "spec": {
                    "replicas": 1, 
                    "selector": {
                        "component": "curator", 
                        "logging-infra": "curator", 
                        "provider": "openshift"
                    }, 
                    "strategy": {
                        "activeDeadlineSeconds": 21600, 
                        "recreateParams": {
                            "timeoutSeconds": 600
                        }, 
                        "resources": {}, 
                        "rollingParams": {
                            "intervalSeconds": 1, 
                            "maxSurge": "25%", 
                            "maxUnavailable": "25%", 
                            "timeoutSeconds": 600, 
                            "updatePeriodSeconds": 1
                        }, 
                        "type": "Recreate"
                    }, 
                    "template": {
                        "metadata": {
                            "creationTimestamp": null, 
                            "labels": {
                                "component": "curator", 
                                "logging-infra": "curator", 
                                "provider": "openshift"
                            }, 
                            "name": "logging-curator"
                        }, 
                        "spec": {
                            "containers": [
                                {
                                    "env": [
                                        {
                                            "name": "K8S_HOST_URL", 
                                            "value": "https://kubernetes.default.svc.cluster.local"
                                        }, 
                                        {
                                            "name": "ES_HOST", 
                                            "value": "logging-es"
                                        }, 
                                        {
                                            "name": "ES_PORT", 
                                            "value": "9200"
                                        }, 
                                        {
                                            "name": "ES_CLIENT_CERT", 
                                            "value": "/etc/curator/keys/cert"
                                        }, 
                                        {
                                            "name": "ES_CLIENT_KEY", 
                                            "value": "/etc/curator/keys/key"
                                        }, 
                                        {
                                            "name": "ES_CA", 
                                            "value": "/etc/curator/keys/ca"
                                        }, 
                                        {
                                            "name": "CURATOR_DEFAULT_DAYS", 
                                            "value": "30"
                                        }, 
                                        {
                                            "name": "CURATOR_RUN_HOUR", 
                                            "value": "0"
                                        }, 
                                        {
                                            "name": "CURATOR_RUN_MINUTE", 
                                            "value": "0"
                                        }, 
                                        {
                                            "name": "CURATOR_RUN_TIMEZONE", 
                                            "value": "UTC"
                                        }, 
                                        {
                                            "name": "CURATOR_SCRIPT_LOG_LEVEL", 
                                            "value": "INFO"
                                        }, 
                                        {
                                            "name": "CURATOR_LOG_LEVEL", 
                                            "value": "ERROR"
                                        }
                                    ], 
                                    "image": "172.30.72.153:5000/logging/logging-curator:latest", 
                                    "imagePullPolicy": "Always", 
                                    "name": "curator", 
                                    "resources": {
                                        "limits": {
                                            "cpu": "100m"
                                        }
                                    }, 
                                    "terminationMessagePath": "/dev/termination-log", 
                                    "terminationMessagePolicy": "File", 
                                    "volumeMounts": [
                                        {
                                            "mountPath": "/etc/curator/keys", 
                                            "name": "certs", 
                                            "readOnly": true
                                        }, 
                                        {
                                            "mountPath": "/etc/curator/settings", 
                                            "name": "config", 
                                            "readOnly": true
                                        }
                                    ]
                                }
                            ], 
                            "dnsPolicy": "ClusterFirst", 
                            "restartPolicy": "Always", 
                            "schedulerName": "default-scheduler", 
                            "securityContext": {}, 
                            "serviceAccount": "aggregated-logging-curator", 
                            "serviceAccountName": "aggregated-logging-curator", 
                            "terminationGracePeriodSeconds": 30, 
                            "volumes": [
                                {
                                    "name": "certs", 
                                    "secret": {
                                        "defaultMode": 420, 
                                        "secretName": "logging-curator"
                                    }
                                }, 
                                {
                                    "configMap": {
                                        "defaultMode": 420, 
                                        "name": "logging-curator"
                                    }, 
                                    "name": "config"
                                }
                            ]
                        }
                    }, 
                    "test": false, 
                    "triggers": [
                        {
                            "type": "ConfigChange"
                        }
                    ]
                }, 
                "status": {
                    "availableReplicas": 0, 
                    "conditions": [
                        {
                            "lastTransitionTime": "2017-06-08T22:05:20Z", 
                            "lastUpdateTime": "2017-06-08T22:05:20Z", 
                            "message": "Deployment config does not have minimum availability.", 
                            "status": "False", 
                            "type": "Available"
                        }, 
                        {
                            "lastTransitionTime": "2017-06-08T22:05:20Z", 
                            "lastUpdateTime": "2017-06-08T22:05:20Z", 
                            "message": "replication controller \"logging-curator-1\" is waiting for pod \"logging-curator-1-deploy\" to run", 
                            "status": "Unknown", 
                            "type": "Progressing"
                        }
                    ], 
                    "details": {
                        "causes": [
                            {
                                "type": "ConfigChange"
                            }
                        ], 
                        "message": "config change"
                    }, 
                    "latestVersion": 1, 
                    "observedGeneration": 2, 
                    "replicas": 0, 
                    "unavailableReplicas": 0, 
                    "updatedReplicas": 0
                }
            }
        ], 
        "returncode": 0
    }, 
    "state": "present"
}

TASK [openshift_logging_curator : Delete temp directory] ***********************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:109
ok: [openshift] => {
    "changed": false, 
    "path": "/tmp/openshift-logging-ansible-R0Loa0", 
    "state": "absent"
}

TASK [openshift_logging : include_role] ****************************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging/tasks/install_logging.yaml:207
statically included: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_curator/tasks/determine_version.yaml

TASK [openshift_logging_curator : fail] ****************************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_curator/tasks/determine_version.yaml:3
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_curator : set_fact] ************************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_curator/tasks/determine_version.yaml:7
ok: [openshift] => {
    "ansible_facts": {
        "curator_version": "3_5"
    }, 
    "changed": false
}

TASK [openshift_logging_curator : set_fact] ************************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_curator/tasks/determine_version.yaml:12
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_curator : fail] ****************************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_curator/tasks/determine_version.yaml:15
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_curator : Create temp directory for doing work in] *****
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:5
ok: [openshift] => {
    "changed": false, 
    "cmd": [
        "mktemp", 
        "-d", 
        "/tmp/openshift-logging-ansible-XXXXXX"
    ], 
    "delta": "0:00:00.002795", 
    "end": "2017-06-08 18:05:23.180163", 
    "rc": 0, 
    "start": "2017-06-08 18:05:23.177368"
}

STDOUT:

/tmp/openshift-logging-ansible-hSzKvb

TASK [openshift_logging_curator : set_fact] ************************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:10
ok: [openshift] => {
    "ansible_facts": {
        "tempdir": "/tmp/openshift-logging-ansible-hSzKvb"
    }, 
    "changed": false
}

TASK [openshift_logging_curator : Create templates subdirectory] ***************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:14
ok: [openshift] => {
    "changed": false, 
    "gid": 0, 
    "group": "root", 
    "mode": "0755", 
    "owner": "root", 
    "path": "/tmp/openshift-logging-ansible-hSzKvb/templates", 
    "secontext": "unconfined_u:object_r:user_tmp_t:s0", 
    "size": 6, 
    "state": "directory", 
    "uid": 0
}

TASK [openshift_logging_curator : Create Curator service account] **************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:24
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_curator : Create Curator service account] **************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:32
ok: [openshift] => {
    "changed": false, 
    "results": {
        "cmd": "/bin/oc get sa aggregated-logging-curator -o json -n logging", 
        "results": [
            {
                "apiVersion": "v1", 
                "imagePullSecrets": [
                    {
                        "name": "aggregated-logging-curator-dockercfg-03l6b"
                    }
                ], 
                "kind": "ServiceAccount", 
                "metadata": {
                    "creationTimestamp": "2017-06-08T22:05:17Z", 
                    "name": "aggregated-logging-curator", 
                    "namespace": "logging", 
                    "resourceVersion": "1506", 
                    "selfLink": "/api/v1/namespaces/logging/serviceaccounts/aggregated-logging-curator", 
                    "uid": "8e213a1f-4c96-11e7-82d5-0ed67230a962"
                }, 
                "secrets": [
                    {
                        "name": "aggregated-logging-curator-token-3zbtt"
                    }, 
                    {
                        "name": "aggregated-logging-curator-dockercfg-03l6b"
                    }
                ]
            }
        ], 
        "returncode": 0
    }, 
    "state": "present"
}

TASK [openshift_logging_curator : copy] ****************************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:41
ok: [openshift] => {
    "changed": false, 
    "checksum": "9008efd9a8892dcc42c28c6dfb6708527880a6d8", 
    "dest": "/tmp/openshift-logging-ansible-hSzKvb/curator.yml", 
    "gid": 0, 
    "group": "root", 
    "md5sum": "5498c5fd98f3dd06e34b20eb1f55dc12", 
    "mode": "0644", 
    "owner": "root", 
    "secontext": "unconfined_u:object_r:admin_home_t:s0", 
    "size": 320, 
    "src": "/root/.ansible/tmp/ansible-tmp-1496959524.07-264395072116652/source", 
    "state": "file", 
    "uid": 0
}

TASK [openshift_logging_curator : copy] ****************************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:47
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_curator : Set Curator configmap] ***********************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:53
ok: [openshift] => {
    "changed": false, 
    "results": {
        "cmd": "/bin/oc get configmap logging-curator -o json -n logging", 
        "results": [
            {
                "apiVersion": "v1", 
                "data": {
                    "config.yaml": "# Logging example curator config file\n\n# uncomment and use this to override the defaults from env vars\n#.defaults:\n#  delete:\n#    days: 30\n#  runhour: 0\n#  runminute: 0\n\n# to keep ops logs for a different duration:\n#.operations:\n#  delete:\n#    weeks: 8\n\n# example for a normal project\n#myapp:\n#  delete:\n#    weeks: 1\n"
                }, 
                "kind": "ConfigMap", 
                "metadata": {
                    "creationTimestamp": "2017-06-08T22:05:18Z", 
                    "name": "logging-curator", 
                    "namespace": "logging", 
                    "resourceVersion": "1507", 
                    "selfLink": "/api/v1/namespaces/logging/configmaps/logging-curator", 
                    "uid": "8ec600ed-4c96-11e7-82d5-0ed67230a962"
                }
            }
        ], 
        "returncode": 0
    }, 
    "state": "present"
}

TASK [openshift_logging_curator : Set Curator secret] **************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:62
ok: [openshift] => {
    "changed": false, 
    "results": {
        "apiVersion": "v1", 
        "data": {
            "ca": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUMyakNDQWNLZ0F3SUJBZ0lCQVRBTkJna3Foa2lHOXcwQkFRc0ZBREFlTVJ3d0dnWURWUVFERXhOc2IyZG4KYVc1bkxYTnBaMjVsY2kxMFpYTjBNQjRYRFRFM01EWXdPREl5TURReE0xb1hEVEl5TURZd056SXlNRFF4TkZvdwpIakVjTUJvR0ExVUVBeE1UYkc5bloybHVaeTF6YVdkdVpYSXRkR1Z6ZERDQ0FTSXdEUVlKS29aSWh2Y05BUUVCCkJRQURnZ0VQQURDQ0FRb0NnZ0VCQU52Z3IrSlp0dk9HVUxjTU9EUmZkemIrdktmclZaOHZnTXo2SmU3eHQ0dW4KeGdvZksrRTlHaWJsTENEdktSUXBvLzBNUk1Fa1dndWtVRm1WSkZBMHlwQjlSZjRJVit6dCtSQkszRklLZzB1dQpvYXV3ZE1XTEhrZkxnNXUvdzBjMnZLVmhDb0NOaEhSSUxCcnJCSkhWQnhMQXZOTyszNnF5RS9wcFJoSGs0T1E2CnV4MSszYU4xSkprcmhHU3JYMXU3QnVQdFZxaFRBOU1sR3R6Y3RYNFM1QngrbzJVcHJxaTJLTTZzSURtcGhhU0QKVHZJeUJZVHJSOE1FVi9CM0xVRHJSNC9XUndNUVJlRUtFYko4MEw2SXgydlRBYW9lZ3dmY3FWeFN1U2xvY0Q3NApFNUJsaHZiQW5VT1g1RDhDNWZ3WXBrRHppZWJ4YURGakRIRDZVdm96QkVrQ0F3RUFBYU1qTUNFd0RnWURWUjBQCkFRSC9CQVFEQWdLa01BOEdBMVVkRXdFQi93UUZNQU1CQWY4d0RRWUpLb1pJaHZjTkFRRUxCUUFEZ2dFQkFJVXMKQVdNcXl6bFNoSEtacTFvWEx3SjhHbmloL201S3JQa1U3bUVHMDhkdW1yZEVxaDVTSU1oTGowaWtVNnJTVDN2RgpxMEVqUnNvc3pQWVFwU05iR0V6WWE4QkVwM1VPa1FyTFExcWlVaXFGODhqa0p5anhqTnhJMEtUL2FmL1pPZ0RCCnJ3OW01UG4yeWVvK2tpUnlrVDREb2FGcyszWWxYU2w0UHRlY1JwRVhrcFZIcit3MlBiMHF0K0hxcFlSMkxTQUcKR0Y5NktBbTNON0VCUjc2dnVGS05zNkU3M1RQSWRzMHVxK1dGS2dxRHFXN0wzamJ1WUR2aERMZ0pHUXdOa1VVcwpqWWZpcm51cWNJUVlRZWZyMWw4VllvZ2RjQTExOXR5ZnJERzU3RHJudGtRNjJoMUh2S0liQjNrZy9VRVowQStHCkJGaHk2bkRQOFhaVlB2azg2NDA9Ci0tLS0tRU5EIENFUlRJRklDQVRFLS0tLS0K", 
            "cert": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURSakNDQWk2Z0F3SUJBZ0lCQkRBTkJna3Foa2lHOXcwQkFRVUZBREFlTVJ3d0dnWURWUVFERXhOc2IyZG4KYVc1bkxYTnBaMjVsY2kxMFpYTjBNQjRYRFRFM01EWXdPREl5TURReU1Gb1hEVEU1TURZd09ESXlNRFF5TUZvdwpSekVRTUE0R0ExVUVDZ3dIVEc5bloybHVaekVTTUJBR0ExVUVDd3dKVDNCbGJsTm9hV1owTVI4d0hRWURWUVFECkRCWnplWE4wWlcwdWJHOW5aMmx1Wnk1amRYSmhkRzl5TUlJQklqQU5CZ2txaGtpRzl3MEJBUUVGQUFPQ0FROEEKTUlJQkNnS0NBUUVBdVAyRllZbDBTQjlFS0NyMFJYLzdnSUxJZXJLRTRCZGIzZU5pcHlXUEk4c1Rhenh4MEw2bApUb3Z5WlAvQVUySzNucGdjZXlwUkNsSFFodXlXWHVCSTBacko4cmt4eno5dHEwNytEN0hrdWxkNExKWDJEL2NlCjdVZ1RrTGFJNFFNN0xqMWxDS1Jyc1RRSkZLQ21jek52N0hPcXVKaU1ySk4vcCtRNVJwOTR6N2ZRcllxUjZqQUYKL3VxU2dDanpwTHNDcnpCVGNTSWpxb29pZEhNSVRwaXBzVmZHUnpHSWpBMUZzbWVVUkMzbGtCLzF4U2wveVdXdgo1TGZXSlg2Tzhsdkwxd2lDU2E3aDhRcHFmZXlzMkJzMTd4UFRzbXh5dGFGRUoxU2dFQWNLMkxPMkFIcUhtc05WClJseWM4MlZEU05KTlpFZ3pXMU1rWjNZUXhqQmVTT1BocHdJREFRQUJvMll3WkRBT0JnTlZIUThCQWY4RUJBTUMKQmFBd0NRWURWUjBUQkFJd0FEQWRCZ05WSFNVRUZqQVVCZ2dyQmdFRkJRY0RBUVlJS3dZQkJRVUhBd0l3SFFZRApWUjBPQkJZRUZNODVYWkhBY3VVZU9wWUl5Mzc3eG13S01VditNQWtHQTFVZEl3UUNNQUF3RFFZSktvWklodmNOCkFRRUZCUUFEZ2dFQkFNUmx0Qk9la1dsc25WbGVSanpuNEhwREtqTkpYVWE3YTlaYTFTTGg4OFhhZUVVRXd5UEgKTExpNjJGQmhkamtCSTNZU0phdUJISGRlMUJyR1RnZDlwRWNZM1N4dFhEWjFTelhaZDdxY2lvT3NDUFZiRWJWeQpKeitQRXMrRmJ4MXhBZVluRThRczRsY2lnd3BUZzZDSjhzd3FKT2dCN1FsQzAxR1JpMmlWOTVXNndxTmNaUDhUCjcrOWMyK0VzcWIvRmVEaThHQmpwUW1lZFNSanl4R0N1STVWaVBCcGd2NTNBWWVrQVVoTXN5Z1pGRlh6cUtTN2kKRHg1WDZnTzRGUVNzQVprTzliNVFGUUZpOWhtZDdGUHNQOG5uR3Yyd2F5RzBOM0JOMDhhVnFiN0VoM0thOXBadQpoWlBCNHZ3R3JkTkFHcjcyc3dHaGRUb2E3NnYxNDUxOGo2RT0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo=", 
            "key": "LS0tLS1CRUdJTiBQUklWQVRFIEtFWS0tLS0tCk1JSUV2QUlCQURBTkJna3Foa2lHOXcwQkFRRUZBQVNDQktZd2dnU2lBZ0VBQW9JQkFRQzQvWVZoaVhSSUgwUW8KS3ZSRmYvdUFnc2g2c29UZ0YxdmQ0MktuSlk4anl4TnJQSEhRdnFWT2kvSmsvOEJUWXJlZW1CeDdLbEVLVWRDRwo3SlplNEVqUm1zbnl1VEhQUDIyclR2NFBzZVM2VjNnc2xmWVA5eDd0U0JPUXRvamhBenN1UFdVSXBHdXhOQWtVCm9LWnpNMi9zYzZxNG1JeXNrMytuNURsR24zalB0OUN0aXBIcU1BWCs2cEtBS1BPa3V3S3ZNRk54SWlPcWlpSjAKY3doT21LbXhWOFpITVlpTURVV3laNVJFTGVXUUgvWEZLWC9KWmEva3Q5WWxmbzd5Vzh2WENJSkpydUh4Q21wOQo3S3pZR3pYdkU5T3liSEsxb1VRblZLQVFCd3JZczdZQWVvZWF3MVZHWEp6elpVTkkwazFrU0ROYlV5Um5kaERHCk1GNUk0K0duQWdNQkFBRUNnZ0VBSkF4clNJV0RubVpSWHM1NENXOFdNSVhMMFhVbVJaRG51d2FMZSt6cXh0dlUKN2VtNmVDWWVpbjdrL2wrRU4vdWRzbzhMeGNpS1FFdVBLaXZDWmJ5cEZBZG80UjdMb1FwSU1jRFQ2M1VNRUcvcApJQzhjNHlHUjZrVklZc1BmU3BIWm4zb2hUUnpWUzVQTFRUUzlHdWZrSUp5dG9RbEJLSVFadFRyWkduVUxFdGIzClBYb2xwYk0veXpRaEQ3NWNobFNhM2NjeXFVMW9LOFBZa1dGZjNNMzQ5d1lHNmFvR3JTYUxHd2hHaVhjTjl3NWYKK0JZWTFEajBpSWk1ZGNRMlR5RWZEd0FKMytqOWU4ZTBkejFKaGM3SEEvaDU4bWp6dE1VbTVzOXU5RE9WVkMvdQplR0RqaS9uRktkYlhwd015Wmo1bEFwSHZiTU5SUE5OQU45WjFLbXpZOFFLQmdRRGdiNlQwYVhCQnRDTmVNTGt1Cm5oRTlwL2JHQitqS2drejgyZU1jbmlVNUF0RHZMNUJqVENVSnUvaVVvYzBqVExHNU5CZXV5SlhqeDRUU3ZsMEsKQlZxS281cVAycTFidDhqaWRZQ1dyeVRsYmloK1gybm5SbUhMR2trcDFYTStDZm1vQWx1VzdQcE4wYXZuR0NiZwo0YmZtMU4zMFZmRVVpM1JqbmZNYTA5c0Fmd0tCZ1FEVEFiWjhhVGZFemVCdnF0WFF3ajdkRGhROU13VVVsQkFUCi85VXhWKzhHVjBUcGdBd1ozRHlwRHV2ajRHZ2JYUEVOYmovWHB0OU1IVUVWMWJWQW44MG5JN0VmSisxNGs4d2UKMWU3SmxhbVZpNGlTT1hsZkhxWHVlazQ4NHQ0dGpTSUpmd3UzMVRBdUYxYXlWTko0dnBzQnhjZC9YM1BJTnRlegpUekd0WEFTSzJRS0JnQ1FKYVNnaGdmZmF6ODhBN2NzUmNGMUJDZnUwSzJuUTBFSE5UbzA2eTJsRERYOUtXTm5JCjBTMmw4LzBSbWRtd0o5MUFBd2w2aGE3RVM0eDNLb05BWnRCYUFxaFVpWDl2bkI3WWxTUWN6VVRWa2tUd1Z4WGUKbERkRGFqb0FYdmU0TEZFUXg5b05CUWZYd2MxaWNHT3RlcFRpMnh5dmEvMUxndzIzditBdjRqdExBb0dBU3JDSwpoZE5BdUZzZm5LZGQ4LzB0LzFWaWVKODhweUlaNXRwaC9vdGZ3bURWdGNscmZ5UGtIUHQ1S0RwZzMzNnduTTV2CnJzZ1V0bjZNY1V3cVVtdStWS0pmcWVXYU8xb2xvS3lkVDZHNFc0VCtuTFBYY0lJS3ZqVnd5RUNONEpYYWZINEsKQ2RKZXNaSDFVRDZTR2NkdVd0eEZWNkxUK3dUZXFVdlpDOEpNb1ZFQ2dZQXVwN1JReE1QRHdERVZyNGthT2RRSwpiWW1TZGNaRk52WVQ5Y1l5TnpvZmdtREFJa2ZyZElic2E2emFtREpjeTFaalc5UGdsZFRIaEV3RnAwaXV6U2YwCjhXOWZnRkx0bHpyRDE4SFovYUQ4TnlycjZnZnhON29ydDgyYWRHSU1odXFCWEx1WEVidUZ6QS94MFVFdzBwQlMKekVaanJsMzVWeGMrQlZOTVA1V3JPZz09Ci0tLS0tRU5EIFBSSVZBVEUgS0VZLS0tLS0K"
        }, 
        "kind": "Secret", 
        "metadata": {
            "creationTimestamp": null, 
            "name": "logging-curator"
        }, 
        "type": "Opaque"
    }, 
    "state": "present"
}

TASK [openshift_logging_curator : set_fact] ************************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:75
ok: [openshift] => {
    "ansible_facts": {
        "curator_component": "curator-ops", 
        "curator_name": "logging-curator-ops"
    }, 
    "changed": false
}

TASK [openshift_logging_curator : Generate Curator deploymentconfig] ***********
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:81
ok: [openshift] => {
    "changed": false, 
    "checksum": "45633b1da3343529acb88db161ab6398154960cf", 
    "dest": "/tmp/openshift-logging-ansible-hSzKvb/templates/curator-dc.yaml", 
    "gid": 0, 
    "group": "root", 
    "md5sum": "0a14b8cd1e652ea27312cd398c3b204b", 
    "mode": "0644", 
    "owner": "root", 
    "secontext": "unconfined_u:object_r:admin_home_t:s0", 
    "size": 2364, 
    "src": "/root/.ansible/tmp/ansible-tmp-1496959525.81-19948668921198/source", 
    "state": "file", 
    "uid": 0
}

TASK [openshift_logging_curator : Set Curator DC] ******************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:99
changed: [openshift] => {
    "changed": true, 
    "results": {
        "cmd": "/bin/oc get dc logging-curator-ops -o json -n logging", 
        "results": [
            {
                "apiVersion": "v1", 
                "kind": "DeploymentConfig", 
                "metadata": {
                    "creationTimestamp": "2017-06-08T22:05:26Z", 
                    "generation": 2, 
                    "labels": {
                        "component": "curator-ops", 
                        "logging-infra": "curator", 
                        "provider": "openshift"
                    }, 
                    "name": "logging-curator-ops", 
                    "namespace": "logging", 
                    "resourceVersion": "1536", 
                    "selfLink": "/oapi/v1/namespaces/logging/deploymentconfigs/logging-curator-ops", 
                    "uid": "93dc6bcf-4c96-11e7-82d5-0ed67230a962"
                }, 
                "spec": {
                    "replicas": 1, 
                    "selector": {
                        "component": "curator-ops", 
                        "logging-infra": "curator", 
                        "provider": "openshift"
                    }, 
                    "strategy": {
                        "activeDeadlineSeconds": 21600, 
                        "recreateParams": {
                            "timeoutSeconds": 600
                        }, 
                        "resources": {}, 
                        "rollingParams": {
                            "intervalSeconds": 1, 
                            "maxSurge": "25%", 
                            "maxUnavailable": "25%", 
                            "timeoutSeconds": 600, 
                            "updatePeriodSeconds": 1
                        }, 
                        "type": "Recreate"
                    }, 
                    "template": {
                        "metadata": {
                            "creationTimestamp": null, 
                            "labels": {
                                "component": "curator-ops", 
                                "logging-infra": "curator", 
                                "provider": "openshift"
                            }, 
                            "name": "logging-curator-ops"
                        }, 
                        "spec": {
                            "containers": [
                                {
                                    "env": [
                                        {
                                            "name": "K8S_HOST_URL", 
                                            "value": "https://kubernetes.default.svc.cluster.local"
                                        }, 
                                        {
                                            "name": "ES_HOST", 
                                            "value": "logging-es-ops"
                                        }, 
                                        {
                                            "name": "ES_PORT", 
                                            "value": "9200"
                                        }, 
                                        {
                                            "name": "ES_CLIENT_CERT", 
                                            "value": "/etc/curator/keys/cert"
                                        }, 
                                        {
                                            "name": "ES_CLIENT_KEY", 
                                            "value": "/etc/curator/keys/key"
                                        }, 
                                        {
                                            "name": "ES_CA", 
                                            "value": "/etc/curator/keys/ca"
                                        }, 
                                        {
                                            "name": "CURATOR_DEFAULT_DAYS", 
                                            "value": "30"
                                        }, 
                                        {
                                            "name": "CURATOR_RUN_HOUR", 
                                            "value": "0"
                                        }, 
                                        {
                                            "name": "CURATOR_RUN_MINUTE", 
                                            "value": "0"
                                        }, 
                                        {
                                            "name": "CURATOR_RUN_TIMEZONE", 
                                            "value": "UTC"
                                        }, 
                                        {
                                            "name": "CURATOR_SCRIPT_LOG_LEVEL", 
                                            "value": "INFO"
                                        }, 
                                        {
                                            "name": "CURATOR_LOG_LEVEL", 
                                            "value": "ERROR"
                                        }
                                    ], 
                                    "image": "172.30.72.153:5000/logging/logging-curator:latest", 
                                    "imagePullPolicy": "Always", 
                                    "name": "curator", 
                                    "resources": {
                                        "limits": {
                                            "cpu": "100m"
                                        }
                                    }, 
                                    "terminationMessagePath": "/dev/termination-log", 
                                    "terminationMessagePolicy": "File", 
                                    "volumeMounts": [
                                        {
                                            "mountPath": "/etc/curator/keys", 
                                            "name": "certs", 
                                            "readOnly": true
                                        }, 
                                        {
                                            "mountPath": "/etc/curator/settings", 
                                            "name": "config", 
                                            "readOnly": true
                                        }
                                    ]
                                }
                            ], 
                            "dnsPolicy": "ClusterFirst", 
                            "restartPolicy": "Always", 
                            "schedulerName": "default-scheduler", 
                            "securityContext": {}, 
                            "serviceAccount": "aggregated-logging-curator", 
                            "serviceAccountName": "aggregated-logging-curator", 
                            "terminationGracePeriodSeconds": 30, 
                            "volumes": [
                                {
                                    "name": "certs", 
                                    "secret": {
                                        "defaultMode": 420, 
                                        "secretName": "logging-curator"
                                    }
                                }, 
                                {
                                    "configMap": {
                                        "defaultMode": 420, 
                                        "name": "logging-curator"
                                    }, 
                                    "name": "config"
                                }
                            ]
                        }
                    }, 
                    "test": false, 
                    "triggers": [
                        {
                            "type": "ConfigChange"
                        }
                    ]
                }, 
                "status": {
                    "availableReplicas": 0, 
                    "conditions": [
                        {
                            "lastTransitionTime": "2017-06-08T22:05:26Z", 
                            "lastUpdateTime": "2017-06-08T22:05:26Z", 
                            "message": "Deployment config does not have minimum availability.", 
                            "status": "False", 
                            "type": "Available"
                        }, 
                        {
                            "lastTransitionTime": "2017-06-08T22:05:26Z", 
                            "lastUpdateTime": "2017-06-08T22:05:26Z", 
                            "message": "replication controller \"logging-curator-ops-1\" is waiting for pod \"logging-curator-ops-1-deploy\" to run", 
                            "status": "Unknown", 
                            "type": "Progressing"
                        }
                    ], 
                    "details": {
                        "causes": [
                            {
                                "type": "ConfigChange"
                            }
                        ], 
                        "message": "config change"
                    }, 
                    "latestVersion": 1, 
                    "observedGeneration": 2, 
                    "replicas": 0, 
                    "unavailableReplicas": 0, 
                    "updatedReplicas": 0
                }
            }
        ], 
        "returncode": 0
    }, 
    "state": "present"
}

TASK [openshift_logging_curator : Delete temp directory] ***********************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:109
ok: [openshift] => {
    "changed": false, 
    "path": "/tmp/openshift-logging-ansible-hSzKvb", 
    "state": "absent"
}

TASK [openshift_logging : include_role] ****************************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging/tasks/install_logging.yaml:226
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging : include_role] ****************************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging/tasks/install_logging.yaml:241
statically included: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_fluentd/tasks/determine_version.yaml

TASK [openshift_logging_fluentd : fail] ****************************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:2
 [WARNING]: when statements should not include jinja2 templating delimiters
such as {{ }} or {% %}. Found: {{ openshift_logging_fluentd_nodeselector.keys()
| count }} > 1
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_fluentd : fail] ****************************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:6
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_fluentd : fail] ****************************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:10
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_fluentd : fail] ****************************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:14
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_fluentd : fail] ****************************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_fluentd/tasks/determine_version.yaml:3
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_fluentd : set_fact] ************************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_fluentd/tasks/determine_version.yaml:7
ok: [openshift] => {
    "ansible_facts": {
        "fluentd_version": "3_5"
    }, 
    "changed": false
}

TASK [openshift_logging_fluentd : set_fact] ************************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_fluentd/tasks/determine_version.yaml:12
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_fluentd : fail] ****************************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_fluentd/tasks/determine_version.yaml:15
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_fluentd : set_fact] ************************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:20
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_fluentd : set_fact] ************************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:26
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_fluentd : Create temp directory for doing work in] *****
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:33
ok: [openshift] => {
    "changed": false, 
    "cmd": [
        "mktemp", 
        "-d", 
        "/tmp/openshift-logging-ansible-XXXXXX"
    ], 
    "delta": "0:00:00.002383", 
    "end": "2017-06-08 18:05:30.344703", 
    "rc": 0, 
    "start": "2017-06-08 18:05:30.342320"
}

STDOUT:

/tmp/openshift-logging-ansible-myKFHx

TASK [openshift_logging_fluentd : set_fact] ************************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:38
ok: [openshift] => {
    "ansible_facts": {
        "tempdir": "/tmp/openshift-logging-ansible-myKFHx"
    }, 
    "changed": false
}

TASK [openshift_logging_fluentd : Create templates subdirectory] ***************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:41
ok: [openshift] => {
    "changed": false, 
    "gid": 0, 
    "group": "root", 
    "mode": "0755", 
    "owner": "root", 
    "path": "/tmp/openshift-logging-ansible-myKFHx/templates", 
    "secontext": "unconfined_u:object_r:user_tmp_t:s0", 
    "size": 6, 
    "state": "directory", 
    "uid": 0
}

TASK [openshift_logging_fluentd : Create Fluentd service account] **************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:51
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_fluentd : Create Fluentd service account] **************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:59
changed: [openshift] => {
    "changed": true, 
    "results": {
        "cmd": "/bin/oc get sa aggregated-logging-fluentd -o json -n logging", 
        "results": [
            {
                "apiVersion": "v1", 
                "imagePullSecrets": [
                    {
                        "name": "aggregated-logging-fluentd-dockercfg-gck51"
                    }
                ], 
                "kind": "ServiceAccount", 
                "metadata": {
                    "creationTimestamp": "2017-06-08T22:05:31Z", 
                    "name": "aggregated-logging-fluentd", 
                    "namespace": "logging", 
                    "resourceVersion": "1546", 
                    "selfLink": "/api/v1/namespaces/logging/serviceaccounts/aggregated-logging-fluentd", 
                    "uid": "9676d7ce-4c96-11e7-82d5-0ed67230a962"
                }, 
                "secrets": [
                    {
                        "name": "aggregated-logging-fluentd-token-kw94p"
                    }, 
                    {
                        "name": "aggregated-logging-fluentd-dockercfg-gck51"
                    }
                ]
            }
        ], 
        "returncode": 0
    }, 
    "state": "present"
}

TASK [openshift_logging_fluentd : Set privileged permissions for Fluentd] ******
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:68
changed: [openshift] => {
    "changed": true, 
    "present": "present", 
    "results": {
        "cmd": "/bin/oc adm policy add-scc-to-user privileged system:serviceaccount:logging:aggregated-logging-fluentd -n logging", 
        "results": "", 
        "returncode": 0
    }
}

TASK [openshift_logging_fluentd : Set cluster-reader permissions for Fluentd] ***
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:77
changed: [openshift] => {
    "changed": true, 
    "present": "present", 
    "results": {
        "cmd": "/bin/oc adm policy add-cluster-role-to-user cluster-reader system:serviceaccount:logging:aggregated-logging-fluentd -n logging", 
        "results": "", 
        "returncode": 0
    }
}

TASK [openshift_logging_fluentd : template] ************************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:86
ok: [openshift] => {
    "changed": false, 
    "checksum": "a8c8596f5fc2c5dd7c8d33d244af17a2555be086", 
    "dest": "/tmp/openshift-logging-ansible-myKFHx/fluent.conf", 
    "gid": 0, 
    "group": "root", 
    "md5sum": "579698b48ffce6276ee0e8d5ac71a338", 
    "mode": "0644", 
    "owner": "root", 
    "secontext": "unconfined_u:object_r:admin_home_t:s0", 
    "size": 1301, 
    "src": "/root/.ansible/tmp/ansible-tmp-1496959532.63-242011723332087/source", 
    "state": "file", 
    "uid": 0
}

TASK [openshift_logging_fluentd : copy] ****************************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:94
ok: [openshift] => {
    "changed": false, 
    "checksum": "b3e75eddc4a0765edc77da092384c0c6f95440e1", 
    "dest": "/tmp/openshift-logging-ansible-myKFHx/fluentd-throttle-config.yaml", 
    "gid": 0, 
    "group": "root", 
    "md5sum": "25871b8e0a9bedc166a6029872a6c336", 
    "mode": "0644", 
    "owner": "root", 
    "secontext": "unconfined_u:object_r:admin_home_t:s0", 
    "size": 133, 
    "src": "/root/.ansible/tmp/ansible-tmp-1496959533.02-114021546658631/source", 
    "state": "file", 
    "uid": 0
}

TASK [openshift_logging_fluentd : copy] ****************************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:100
ok: [openshift] => {
    "changed": false, 
    "checksum": "a3aa36da13f3108aa4ad5b98d4866007b44e9798", 
    "dest": "/tmp/openshift-logging-ansible-myKFHx/secure-forward.conf", 
    "gid": 0, 
    "group": "root", 
    "md5sum": "1084b00c427f4fa48dfc66d6ad6555d4", 
    "mode": "0644", 
    "owner": "root", 
    "secontext": "unconfined_u:object_r:admin_home_t:s0", 
    "size": 563, 
    "src": "/root/.ansible/tmp/ansible-tmp-1496959533.37-98971340767874/source", 
    "state": "file", 
    "uid": 0
}

TASK [openshift_logging_fluentd : copy] ****************************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:107
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_fluentd : copy] ****************************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:113
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_fluentd : copy] ****************************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:119
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging_fluentd : Set Fluentd configmap] ***********************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:125
changed: [openshift] => {
    "changed": true, 
    "results": {
        "cmd": "/bin/oc get configmap logging-fluentd -o json -n logging", 
        "results": [
            {
                "apiVersion": "v1", 
                "data": {
                    "fluent.conf": "# This file is the fluentd configuration entrypoint. Edit with care.\n\n@include configs.d/openshift/system.conf\n\n# In each section below, pre- and post- includes don't include anything initially;\n# they exist to enable future additions to openshift conf as needed.\n\n## sources\n## ordered so that syslog always runs last...\n@include configs.d/openshift/input-pre-*.conf\n@include configs.d/dynamic/input-docker-*.conf\n@include configs.d/dynamic/input-syslog-*.conf\n@include configs.d/openshift/input-post-*.conf\n##\n\n<label @INGRESS>\n## filters\n  @include configs.d/openshift/filter-pre-*.conf\n  @include configs.d/openshift/filter-retag-journal.conf\n  @include configs.d/openshift/filter-k8s-meta.conf\n  @include configs.d/openshift/filter-kibana-transform.conf\n  @include configs.d/openshift/filter-k8s-flatten-hash.conf\n  @include configs.d/openshift/filter-k8s-record-transform.conf\n  @include configs.d/openshift/filter-syslog-record-transform.conf\n  @include configs.d/openshift/filter-viaq-data-model.conf\n  @include configs.d/openshift/filter-post-*.conf\n##\n\n## matches\n  @include configs.d/openshift/output-pre-*.conf\n  @include configs.d/openshift/output-operations.conf\n  @include configs.d/openshift/output-applications.conf\n  # no post - applications.conf matches everything left\n##\n</label>\n", 
                    "secure-forward.conf": "# @type secure_forward\n\n# self_hostname ${HOSTNAME}\n# shared_key <SECRET_STRING>\n\n# secure yes\n# enable_strict_verification yes\n\n# ca_cert_path /etc/fluent/keys/your_ca_cert\n# ca_private_key_path /etc/fluent/keys/your_private_key\n  # for private CA secret key\n# ca_private_key_passphrase passphrase\n\n# <server>\n  # or IP\n#   host server.fqdn.example.com\n#   port 24284\n# </server>\n# <server>\n  # ip address to connect\n#   host 203.0.113.8\n  # specify hostlabel for FQDN verification if ipaddress is used for host\n#   hostlabel server.fqdn.example.com\n# </server>\n", 
                    "throttle-config.yaml": "# Logging example fluentd throttling config file\n\n#example-project:\n#  read_lines_limit: 10\n#\n#.operations:\n#  read_lines_limit: 100\n"
                }, 
                "kind": "ConfigMap", 
                "metadata": {
                    "creationTimestamp": "2017-06-08T22:05:34Z", 
                    "name": "logging-fluentd", 
                    "namespace": "logging", 
                    "resourceVersion": "1552", 
                    "selfLink": "/api/v1/namespaces/logging/configmaps/logging-fluentd", 
                    "uid": "984eb474-4c96-11e7-82d5-0ed67230a962"
                }
            }
        ], 
        "returncode": 0
    }, 
    "state": "present"
}

TASK [openshift_logging_fluentd : Set logging-fluentd secret] ******************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:137
changed: [openshift] => {
    "changed": true, 
    "results": {
        "cmd": "/bin/oc secrets new logging-fluentd ca=/etc/origin/logging/ca.crt key=/etc/origin/logging/system.logging.fluentd.key cert=/etc/origin/logging/system.logging.fluentd.crt -n logging", 
        "results": "", 
        "returncode": 0
    }, 
    "state": "present"
}

TASK [openshift_logging_fluentd : Generate logging-fluentd daemonset definition] ***
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:154
ok: [openshift] => {
    "changed": false, 
    "checksum": "3564bc243932f7877601224379b9eec08ea5bba4", 
    "dest": "/tmp/openshift-logging-ansible-myKFHx/templates/logging-fluentd.yaml", 
    "gid": 0, 
    "group": "root", 
    "md5sum": "ba4832d8047c015c3b7e5015883f1483", 
    "mode": "0644", 
    "owner": "root", 
    "secontext": "unconfined_u:object_r:admin_home_t:s0", 
    "size": 3414, 
    "src": "/root/.ansible/tmp/ansible-tmp-1496959535.12-276503391162037/source", 
    "state": "file", 
    "uid": 0
}

TASK [openshift_logging_fluentd : Set logging-fluentd daemonset] ***************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:172
changed: [openshift] => {
    "changed": true, 
    "results": {
        "cmd": "/bin/oc get daemonset logging-fluentd -o json -n logging", 
        "results": [
            {
                "apiVersion": "extensions/v1beta1", 
                "kind": "DaemonSet", 
                "metadata": {
                    "creationTimestamp": "2017-06-08T22:05:35Z", 
                    "generation": 1, 
                    "labels": {
                        "component": "fluentd", 
                        "logging-infra": "fluentd", 
                        "provider": "openshift"
                    }, 
                    "name": "logging-fluentd", 
                    "namespace": "logging", 
                    "resourceVersion": "1555", 
                    "selfLink": "/apis/extensions/v1beta1/namespaces/logging/daemonsets/logging-fluentd", 
                    "uid": "995b9070-4c96-11e7-82d5-0ed67230a962"
                }, 
                "spec": {
                    "selector": {
                        "matchLabels": {
                            "component": "fluentd", 
                            "provider": "openshift"
                        }
                    }, 
                    "template": {
                        "metadata": {
                            "creationTimestamp": null, 
                            "labels": {
                                "component": "fluentd", 
                                "logging-infra": "fluentd", 
                                "provider": "openshift"
                            }, 
                            "name": "fluentd-elasticsearch"
                        }, 
                        "spec": {
                            "containers": [
                                {
                                    "env": [
                                        {
                                            "name": "K8S_HOST_URL", 
                                            "value": "https://kubernetes.default.svc.cluster.local"
                                        }, 
                                        {
                                            "name": "ES_HOST", 
                                            "value": "logging-es"
                                        }, 
                                        {
                                            "name": "ES_PORT", 
                                            "value": "9200"
                                        }, 
                                        {
                                            "name": "ES_CLIENT_CERT", 
                                            "value": "/etc/fluent/keys/cert"
                                        }, 
                                        {
                                            "name": "ES_CLIENT_KEY", 
                                            "value": "/etc/fluent/keys/key"
                                        }, 
                                        {
                                            "name": "ES_CA", 
                                            "value": "/etc/fluent/keys/ca"
                                        }, 
                                        {
                                            "name": "OPS_HOST", 
                                            "value": "logging-es-ops"
                                        }, 
                                        {
                                            "name": "OPS_PORT", 
                                            "value": "9200"
                                        }, 
                                        {
                                            "name": "OPS_CLIENT_CERT", 
                                            "value": "/etc/fluent/keys/cert"
                                        }, 
                                        {
                                            "name": "OPS_CLIENT_KEY", 
                                            "value": "/etc/fluent/keys/key"
                                        }, 
                                        {
                                            "name": "OPS_CA", 
                                            "value": "/etc/fluent/keys/ca"
                                        }, 
                                        {
                                            "name": "ES_COPY", 
                                            "value": "false"
                                        }, 
                                        {
                                            "name": "USE_JOURNAL", 
                                            "value": "true"
                                        }, 
                                        {
                                            "name": "JOURNAL_SOURCE"
                                        }, 
                                        {
                                            "name": "JOURNAL_READ_FROM_HEAD", 
                                            "value": "false"
                                        }
                                    ], 
                                    "image": "172.30.72.153:5000/logging/logging-fluentd:latest", 
                                    "imagePullPolicy": "Always", 
                                    "name": "fluentd-elasticsearch", 
                                    "resources": {
                                        "limits": {
                                            "cpu": "100m", 
                                            "memory": "512Mi"
                                        }
                                    }, 
                                    "securityContext": {
                                        "privileged": true
                                    }, 
                                    "terminationMessagePath": "/dev/termination-log", 
                                    "terminationMessagePolicy": "File", 
                                    "volumeMounts": [
                                        {
                                            "mountPath": "/run/log/journal", 
                                            "name": "runlogjournal"
                                        }, 
                                        {
                                            "mountPath": "/var/log", 
                                            "name": "varlog"
                                        }, 
                                        {
                                            "mountPath": "/var/lib/docker/containers", 
                                            "name": "varlibdockercontainers", 
                                            "readOnly": true
                                        }, 
                                        {
                                            "mountPath": "/etc/fluent/configs.d/user", 
                                            "name": "config", 
                                            "readOnly": true
                                        }, 
                                        {
                                            "mountPath": "/etc/fluent/keys", 
                                            "name": "certs", 
                                            "readOnly": true
                                        }, 
                                        {
                                            "mountPath": "/etc/docker-hostname", 
                                            "name": "dockerhostname", 
                                            "readOnly": true
                                        }, 
                                        {
                                            "mountPath": "/etc/localtime", 
                                            "name": "localtime", 
                                            "readOnly": true
                                        }, 
                                        {
                                            "mountPath": "/etc/sysconfig/docker", 
                                            "name": "dockercfg", 
                                            "readOnly": true
                                        }, 
                                        {
                                            "mountPath": "/etc/docker", 
                                            "name": "dockerdaemoncfg", 
                                            "readOnly": true
                                        }
                                    ]
                                }
                            ], 
                            "dnsPolicy": "ClusterFirst", 
                            "nodeSelector": {
                                "logging-infra-fluentd": "true"
                            }, 
                            "restartPolicy": "Always", 
                            "schedulerName": "default-scheduler", 
                            "securityContext": {}, 
                            "serviceAccount": "aggregated-logging-fluentd", 
                            "serviceAccountName": "aggregated-logging-fluentd", 
                            "terminationGracePeriodSeconds": 30, 
                            "volumes": [
                                {
                                    "hostPath": {
                                        "path": "/run/log/journal"
                                    }, 
                                    "name": "runlogjournal"
                                }, 
                                {
                                    "hostPath": {
                                        "path": "/var/log"
                                    }, 
                                    "name": "varlog"
                                }, 
                                {
                                    "hostPath": {
                                        "path": "/var/lib/docker/containers"
                                    }, 
                                    "name": "varlibdockercontainers"
                                }, 
                                {
                                    "configMap": {
                                        "defaultMode": 420, 
                                        "name": "logging-fluentd"
                                    }, 
                                    "name": "config"
                                }, 
                                {
                                    "name": "certs", 
                                    "secret": {
                                        "defaultMode": 420, 
                                        "secretName": "logging-fluentd"
                                    }
                                }, 
                                {
                                    "hostPath": {
                                        "path": "/etc/hostname"
                                    }, 
                                    "name": "dockerhostname"
                                }, 
                                {
                                    "hostPath": {
                                        "path": "/etc/localtime"
                                    }, 
                                    "name": "localtime"
                                }, 
                                {
                                    "hostPath": {
                                        "path": "/etc/sysconfig/docker"
                                    }, 
                                    "name": "dockercfg"
                                }, 
                                {
                                    "hostPath": {
                                        "path": "/etc/docker"
                                    }, 
                                    "name": "dockerdaemoncfg"
                                }
                            ]
                        }
                    }, 
                    "templateGeneration": 1, 
                    "updateStrategy": {
                        "rollingUpdate": {
                            "maxUnavailable": 1
                        }, 
                        "type": "RollingUpdate"
                    }
                }, 
                "status": {
                    "currentNumberScheduled": 0, 
                    "desiredNumberScheduled": 0, 
                    "numberMisscheduled": 0, 
                    "numberReady": 0, 
                    "observedGeneration": 1
                }
            }
        ], 
        "returncode": 0
    }, 
    "state": "present"
}

TASK [openshift_logging_fluentd : Retrieve list of Fluentd hosts] **************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:183
ok: [openshift] => {
    "changed": false, 
    "results": {
        "cmd": "/bin/oc get node -o json -n default", 
        "results": [
            {
                "apiVersion": "v1", 
                "items": [
                    {
                        "apiVersion": "v1", 
                        "kind": "Node", 
                        "metadata": {
                            "annotations": {
                                "volumes.kubernetes.io/controller-managed-attach-detach": "true"
                            }, 
                            "creationTimestamp": "2017-06-08T21:45:29Z", 
                            "labels": {
                                "beta.kubernetes.io/arch": "amd64", 
                                "beta.kubernetes.io/os": "linux", 
                                "kubernetes.io/hostname": "172.18.2.35"
                            }, 
                            "name": "172.18.2.35", 
                            "namespace": "", 
                            "resourceVersion": "1549", 
                            "selfLink": "/api/v1/nodes/172.18.2.35", 
                            "uid": "ca1d8b00-4c93-11e7-82d5-0ed67230a962"
                        }, 
                        "spec": {
                            "externalID": "172.18.2.35", 
                            "providerID": "aws:////i-014bd4ec99e8f9b77"
                        }, 
                        "status": {
                            "addresses": [
                                {
                                    "address": "172.18.2.35", 
                                    "type": "LegacyHostIP"
                                }, 
                                {
                                    "address": "172.18.2.35", 
                                    "type": "InternalIP"
                                }, 
                                {
                                    "address": "172.18.2.35", 
                                    "type": "Hostname"
                                }
                            ], 
                            "allocatable": {
                                "cpu": "4", 
                                "memory": "7129288Ki", 
                                "pods": "40"
                            }, 
                            "capacity": {
                                "cpu": "4", 
                                "memory": "7231688Ki", 
                                "pods": "40"
                            }, 
                            "conditions": [
                                {
                                    "lastHeartbeatTime": "2017-06-08T22:05:32Z", 
                                    "lastTransitionTime": "2017-06-08T21:45:29Z", 
                                    "message": "kubelet has sufficient disk space available", 
                                    "reason": "KubeletHasSufficientDisk", 
                                    "status": "False", 
                                    "type": "OutOfDisk"
                                }, 
                                {
                                    "lastHeartbeatTime": "2017-06-08T22:05:32Z", 
                                    "lastTransitionTime": "2017-06-08T21:45:29Z", 
                                    "message": "kubelet has sufficient memory available", 
                                    "reason": "KubeletHasSufficientMemory", 
                                    "status": "False", 
                                    "type": "MemoryPressure"
                                }, 
                                {
                                    "lastHeartbeatTime": "2017-06-08T22:05:32Z", 
                                    "lastTransitionTime": "2017-06-08T21:45:29Z", 
                                    "message": "kubelet has no disk pressure", 
                                    "reason": "KubeletHasNoDiskPressure", 
                                    "status": "False", 
                                    "type": "DiskPressure"
                                }, 
                                {
                                    "lastHeartbeatTime": "2017-06-08T22:05:32Z", 
                                    "lastTransitionTime": "2017-06-08T21:45:29Z", 
                                    "message": "kubelet is posting ready status", 
                                    "reason": "KubeletReady", 
                                    "status": "True", 
                                    "type": "Ready"
                                }
                            ], 
                            "daemonEndpoints": {
                                "kubeletEndpoint": {
                                    "Port": 10250
                                }
                            }, 
                            "images": [
                                {
                                    "names": [
                                        "openshift/origin-federation:6acabdc", 
                                        "openshift/origin-federation:latest"
                                    ], 
                                    "sizeBytes": 1205885664
                                }, 
                                {
                                    "names": [
                                        "docker.io/openshift/origin-docker-registry@sha256:6ac12709d3236e73885b51f9d34af2c4443f988d913b24cf4fa8d446c076e901", 
                                        "docker.io/openshift/origin-docker-registry:latest"
                                    ], 
                                    "sizeBytes": 1100552981
                                }, 
                                {
                                    "names": [
                                        "openshift/origin-docker-registry:latest"
                                    ], 
                                    "sizeBytes": 1100164272
                                }, 
                                {
                                    "names": [
                                        "openshift/origin-gitserver:6acabdc", 
                                        "openshift/origin-gitserver:latest"
                                    ], 
                                    "sizeBytes": 1086520226
                                }, 
                                {
                                    "names": [
                                        "openshift/node:6acabdc", 
                                        "openshift/node:latest"
                                    ], 
                                    "sizeBytes": 1051721928
                                }, 
                                {
                                    "names": [
                                        "openshift/origin-keepalived-ipfailover:6acabdc", 
                                        "openshift/origin-keepalived-ipfailover:latest"
                                    ], 
                                    "sizeBytes": 1028529711
                                }, 
                                {
                                    "names": [
                                        "openshift/origin-haproxy-router:latest"
                                    ], 
                                    "sizeBytes": 1022758742
                                }, 
                                {
                                    "names": [
                                        "openshift/origin-docker-builder:latest"
                                    ], 
                                    "sizeBytes": 1001728427
                                }, 
                                {
                                    "names": [
                                        "openshift/origin:6acabdc", 
                                        "openshift/origin:latest"
                                    ], 
                                    "sizeBytes": 1001728427
                                }, 
                                {
                                    "names": [
                                        "openshift/origin-f5-router:6acabdc", 
                                        "openshift/origin-f5-router:latest"
                                    ], 
                                    "sizeBytes": 1001728427
                                }, 
                                {
                                    "names": [
                                        "openshift/origin-sti-builder:6acabdc", 
                                        "openshift/origin-sti-builder:latest"
                                    ], 
                                    "sizeBytes": 1001728427
                                }, 
                                {
                                    "names": [
                                        "openshift/origin-cluster-capacity:6acabdc", 
                                        "openshift/origin-cluster-capacity:latest"
                                    ], 
                                    "sizeBytes": 962455026
                                }, 
                                {
                                    "names": [
                                        "openshift/dind-master:latest"
                                    ], 
                                    "sizeBytes": 731456758
                                }, 
                                {
                                    "names": [
                                        "openshift/dind-node:latest"
                                    ], 
                                    "sizeBytes": 731453034
                                }, 
                                {
                                    "names": [
                                        "172.30.72.153:5000/logging/logging-auth-proxy@sha256:3686627029d142b4e048406e9210909fb1edfbcee625fe80b9c7eb7ea7f72994", 
                                        "172.30.72.153:5000/logging/logging-auth-proxy:latest"
                                    ], 
                                    "sizeBytes": 715536037
                                }, 
                                {
                                    "names": [
                                        "<none>@<none>", 
                                        "<none>:<none>"
                                    ], 
                                    "sizeBytes": 709532011
                                }, 
                                {
                                    "names": [
                                        "docker.io/node@sha256:46db0dd19955beb87b841c30a6b9812ba626473283e84117d1c016deee5949a9", 
                                        "docker.io/node:0.10.36"
                                    ], 
                                    "sizeBytes": 697128386
                                }, 
                                {
                                    "names": [
                                        "172.30.72.153:5000/logging/logging-kibana@sha256:b6ea373785abd677e1e096b0a035bea974d72dbbeaceed4a686a596370b1fb11", 
                                        "172.30.72.153:5000/logging/logging-kibana:latest"
                                    ], 
                                    "sizeBytes": 682851529
                                }, 
                                {
                                    "names": [
                                        "docker.io/openshift/origin-logging-kibana@sha256:3b95068a8514b13780c047f955e99d481af5597bf2b214501030e7be2f4d6dc1", 
                                        "docker.io/openshift/origin-logging-kibana:latest"
                                    ], 
                                    "sizeBytes": 682851503
                                }, 
                                {
                                    "names": [
                                        "openshift/dind:latest"
                                    ], 
                                    "sizeBytes": 640650210
                                }, 
                                {
                                    "names": [
                                        "172.30.72.153:5000/logging/logging-elasticsearch@sha256:8b05778c4673106be06272dff2bcbe47e57d3e1481627b5de9d662498a955319", 
                                        "172.30.72.153:5000/logging/logging-elasticsearch:latest"
                                    ], 
                                    "sizeBytes": 623379825
                                }, 
                                {
                                    "names": [
                                        "172.30.72.153:5000/logging/logging-fluentd@sha256:57aa0cfe5686e352cce101a146d79fc7769bc414762f7e4845232bc9abf956e4", 
                                        "172.30.72.153:5000/logging/logging-fluentd:latest"
                                    ], 
                                    "sizeBytes": 472182652
                                }, 
                                {
                                    "names": [
                                        "172.30.72.153:5000/logging/logging-curator@sha256:fe447b96afb1e193cb985e219454693d54a99d87fb2b7f476e5ae3f39338b1e0", 
                                        "172.30.72.153:5000/logging/logging-curator:latest"
                                    ], 
                                    "sizeBytes": 418287928
                                }, 
                                {
                                    "names": [
                                        "docker.io/openshift/base-centos7@sha256:aea292a3bddba020cde0ee83e6a45807931eb607c164ec6a3674f67039d8cd7c", 
                                        "docker.io/openshift/base-centos7:latest"
                                    ], 
                                    "sizeBytes": 383049978
                                }, 
                                {
                                    "names": [
                                        "rhel7.2:latest"
                                    ], 
                                    "sizeBytes": 377493597
                                }, 
                                {
                                    "names": [
                                        "openshift/origin-base:latest"
                                    ], 
                                    "sizeBytes": 363070172
                                }, 
                                {
                                    "names": [
                                        "docker.io/fedora@sha256:69281ddd7b2600e5f2b17f1e12d7fba25207f459204fb2d15884f8432c479136", 
                                        "docker.io/fedora:25"
                                    ], 
                                    "sizeBytes": 230864375
                                }, 
                                {
                                    "names": [
                                        "docker.io/openshift/origin-logging-curator@sha256:c09a1f9ef6f2ca23b4d3dca5f4a2c0f5bb76ed65351d9295d7d3344e5bce3f89", 
                                        "docker.io/openshift/origin-logging-curator:latest"
                                    ], 
                                    "sizeBytes": 224977447
                                }, 
                                {
                                    "names": [
                                        "rhel7:latest"
                                    ], 
                                    "sizeBytes": 219121266
                                }, 
                                {
                                    "names": [
                                        "openshift/origin-pod:6acabdc", 
                                        "openshift/origin-pod:latest"
                                    ], 
                                    "sizeBytes": 213199843
                                }, 
                                {
                                    "names": [
                                        "registry.access.redhat.com/rhel7.2@sha256:98e6ca5d226c26e31a95cd67716afe22833c943e1926a21daf1a030906a02249", 
                                        "registry.access.redhat.com/rhel7.2:latest"
                                    ], 
                                    "sizeBytes": 201376319
                                }, 
                                {
                                    "names": [
                                        "registry.access.redhat.com/rhel7.3@sha256:1e232401d8e0ba53b36b757b4712fbcbd1dab9c21db039c45a84871a74e89e68", 
                                        "registry.access.redhat.com/rhel7.3:latest"
                                    ], 
                                    "sizeBytes": 192693772
                                }, 
                                {
                                    "names": [
                                        "docker.io/centos@sha256:bba1de7c9d900a898e3cadbae040dfe8a633c06bc104a0df76ae24483e03c077"
                                    ], 
                                    "sizeBytes": 192548999
                                }, 
                                {
                                    "names": [
                                        "openshift/origin-source:latest"
                                    ], 
                                    "sizeBytes": 192548894
                                }, 
                                {
                                    "names": [
                                        "docker.io/centos@sha256:aebf12af704307dfa0079b3babdca8d7e8ff6564696882bcb5d11f1d461f9ee9", 
                                        "docker.io/centos:7", 
                                        "docker.io/centos:centos7"
                                    ], 
                                    "sizeBytes": 192548537
                                }, 
                                {
                                    "names": [
                                        "openshift/hello-openshift:6acabdc", 
                                        "openshift/hello-openshift:latest"
                                    ], 
                                    "sizeBytes": 5643318
                                }
                            ], 
                            "nodeInfo": {
                                "architecture": "amd64", 
                                "bootID": "b9c36265-d2ac-491b-a967-2dc10011c5be", 
                                "containerRuntimeVersion": "docker://1.12.6", 
                                "kernelVersion": "3.10.0-327.22.2.el7.x86_64", 
                                "kubeProxyVersion": "v1.6.1+5115d708d7", 
                                "kubeletVersion": "v1.6.1+5115d708d7", 
                                "machineID": "f9370ed252a14f73b014c1301a9b6d1b", 
                                "operatingSystem": "linux", 
                                "osImage": "Red Hat Enterprise Linux Server 7.3 (Maipo)", 
                                "systemUUID": "EC2A35E6-5819-BBD7-C1B5-A3E52E25F68B"
                            }
                        }
                    }
                ], 
                "kind": "List", 
                "metadata": {}, 
                "resourceVersion": "", 
                "selfLink": ""
            }
        ], 
        "returncode": 0
    }, 
    "state": "list"
}

TASK [openshift_logging_fluentd : Set openshift_logging_fluentd_hosts] *********
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:190
ok: [openshift] => {
    "ansible_facts": {
        "openshift_logging_fluentd_hosts": [
            "172.18.2.35"
        ]
    }, 
    "changed": false
}

TASK [openshift_logging_fluentd : include] *************************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:195
included: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_fluentd/tasks/label_and_wait.yaml for openshift

TASK [openshift_logging_fluentd : Label 172.18.2.35 for Fluentd deployment] ****
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_fluentd/tasks/label_and_wait.yaml:2
changed: [openshift] => {
    "changed": true, 
    "results": {
        "cmd": "/bin/oc label node 172.18.2.35 logging-infra-fluentd=true --overwrite", 
        "results": "", 
        "returncode": 0
    }, 
    "state": "add"
}

TASK [openshift_logging_fluentd : command] *************************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_fluentd/tasks/label_and_wait.yaml:10
changed: [openshift -> 127.0.0.1] => {
    "changed": true, 
    "cmd": [
        "sleep", 
        "0.5"
    ], 
    "delta": "0:00:00.502142", 
    "end": "2017-06-08 18:05:38.406623", 
    "rc": 0, 
    "start": "2017-06-08 18:05:37.904481"
}

TASK [openshift_logging_fluentd : Delete temp directory] ***********************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:202
ok: [openshift] => {
    "changed": false, 
    "path": "/tmp/openshift-logging-ansible-myKFHx", 
    "state": "absent"
}

TASK [openshift_logging : include] *********************************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging/tasks/install_logging.yaml:253
included: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging/tasks/update_master_config.yaml for openshift

TASK [openshift_logging : include] *********************************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging/tasks/main.yaml:36
skipping: [openshift] => {
    "changed": false, 
    "skip_reason": "Conditional result was False", 
    "skipped": true
}

TASK [openshift_logging : Cleaning up local temp dir] **************************
task path: /tmp/tmp.SyrcokxiSR/openhift-ansible/roles/openshift_logging/tasks/main.yaml:40
ok: [openshift -> 127.0.0.1] => {
    "changed": false, 
    "path": "/tmp/openshift-logging-ansible-WzWxcq", 
    "state": "absent"
}
META: ran handlers
META: ran handlers

PLAY [Update Master configs] ***************************************************
skipping: no hosts matched

PLAY RECAP *********************************************************************
localhost                  : ok=2    changed=0    unreachable=0    failed=0   
openshift                  : ok=207  changed=70   unreachable=0    failed=0   

/data/src/github.com/openshift/origin-aggregated-logging
Running /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:170: executing 'oc get pods -l component=es' expecting any result and text 'Running'; re-trying every 0.2s until completion or 180.000s...
SUCCESS after 26.040s: /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:170: executing 'oc get pods -l component=es' expecting any result and text 'Running'; re-trying every 0.2s until completion or 180.000s
Standard output from the command:
NAME                                      READY     STATUS    RESTARTS   AGE
logging-es-data-master-9hufah3f-1-tkzw9   0/1       Pending   0          1s
NAME                                      READY     STATUS              RESTARTS   AGE
logging-es-data-master-9hufah3f-1-tkzw9   0/1       ContainerCreating   0          1s
NAME                                      READY     STATUS              RESTARTS   AGE
logging-es-data-master-9hufah3f-1-tkzw9   0/1       ContainerCreating   0          2s
... repeated 2 times
NAME                                      READY     STATUS              RESTARTS   AGE
logging-es-data-master-9hufah3f-1-tkzw9   0/1       ContainerCreating   0          3s
... repeated 2 times
NAME                                      READY     STATUS              RESTARTS   AGE
logging-es-data-master-9hufah3f-1-tkzw9   0/1       ContainerCreating   0          4s
... repeated 3 times
NAME                                      READY     STATUS              RESTARTS   AGE
logging-es-data-master-9hufah3f-1-tkzw9   0/1       ContainerCreating   0          5s
... repeated 2 times
NAME                                      READY     STATUS              RESTARTS   AGE
logging-es-data-master-9hufah3f-1-tkzw9   0/1       ContainerCreating   0          6s
... repeated 3 times
NAME                                      READY     STATUS              RESTARTS   AGE
logging-es-data-master-9hufah3f-1-tkzw9   0/1       ContainerCreating   0          7s
... repeated 2 times
NAME                                      READY     STATUS              RESTARTS   AGE
logging-es-data-master-9hufah3f-1-tkzw9   0/1       ContainerCreating   0          8s
... repeated 2 times
NAME                                      READY     STATUS              RESTARTS   AGE
logging-es-data-master-9hufah3f-1-tkzw9   0/1       ContainerCreating   0          9s
... repeated 2 times
NAME                                      READY     STATUS              RESTARTS   AGE
logging-es-data-master-9hufah3f-1-tkzw9   0/1       ContainerCreating   0          10s
... repeated 3 times
NAME                                      READY     STATUS              RESTARTS   AGE
logging-es-data-master-9hufah3f-1-tkzw9   0/1       ContainerCreating   0          11s
... repeated 2 times
NAME                                      READY     STATUS              RESTARTS   AGE
logging-es-data-master-9hufah3f-1-tkzw9   0/1       ContainerCreating   0          12s
... repeated 3 times
NAME                                      READY     STATUS              RESTARTS   AGE
logging-es-data-master-9hufah3f-1-tkzw9   0/1       ContainerCreating   0          13s
... repeated 2 times
NAME                                      READY     STATUS              RESTARTS   AGE
logging-es-data-master-9hufah3f-1-tkzw9   0/1       ContainerCreating   0          14s
... repeated 2 times
NAME                                      READY     STATUS              RESTARTS   AGE
logging-es-data-master-9hufah3f-1-tkzw9   0/1       ContainerCreating   0          15s
... repeated 3 times
NAME                                      READY     STATUS              RESTARTS   AGE
logging-es-data-master-9hufah3f-1-tkzw9   0/1       ContainerCreating   0          16s
... repeated 2 times
NAME                                      READY     STATUS              RESTARTS   AGE
logging-es-data-master-9hufah3f-1-tkzw9   0/1       ContainerCreating   0          17s
... repeated 2 times
NAME                                      READY     STATUS              RESTARTS   AGE
logging-es-data-master-9hufah3f-1-tkzw9   0/1       ContainerCreating   0          18s
... repeated 2 times
NAME                                      READY     STATUS              RESTARTS   AGE
logging-es-data-master-9hufah3f-1-tkzw9   0/1       ContainerCreating   0          19s
... repeated 3 times
NAME                                      READY     STATUS              RESTARTS   AGE
logging-es-data-master-9hufah3f-1-tkzw9   0/1       ContainerCreating   0          20s
... repeated 2 times
NAME                                      READY     STATUS              RESTARTS   AGE
logging-es-data-master-9hufah3f-1-tkzw9   0/1       ContainerCreating   0          21s
... repeated 2 times
NAME                                      READY     STATUS              RESTARTS   AGE
logging-es-data-master-9hufah3f-1-tkzw9   0/1       ContainerCreating   0          22s
... repeated 3 times
NAME                                      READY     STATUS              RESTARTS   AGE
logging-es-data-master-9hufah3f-1-tkzw9   0/1       ContainerCreating   0          23s
... repeated 2 times
NAME                                      READY     STATUS              RESTARTS   AGE
logging-es-data-master-9hufah3f-1-tkzw9   0/1       ContainerCreating   0          24s
... repeated 2 times
NAME                                      READY     STATUS              RESTARTS   AGE
logging-es-data-master-9hufah3f-1-tkzw9   0/1       ContainerCreating   0          25s
... repeated 2 times
NAME                                      READY     STATUS              RESTARTS   AGE
logging-es-data-master-9hufah3f-1-tkzw9   0/1       ContainerCreating   0          26s
... repeated 2 times
NAME                                      READY     STATUS    RESTARTS   AGE
logging-es-data-master-9hufah3f-1-tkzw9   0/1       Running   0          27s
Standard error from the command:
Running /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:171: executing 'oc get pods -l component=kibana' expecting any result and text 'Running'; re-trying every 0.2s until completion or 180.000s...
FAILURE after 179.648s: /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:171: executing 'oc get pods -l component=kibana' expecting any result and text 'Running'; re-trying every 0.2s until completion or 180.000s: the command timed out
Standard output from the command:
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-x9cxn   0/2       ContainerCreating   0          19s
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-x9cxn   0/2       ContainerCreating   0          20s
... repeated 2 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-x9cxn   0/2       ContainerCreating   0          21s
... repeated 2 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-x9cxn   0/2       ContainerCreating   0          22s
... repeated 2 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-x9cxn   0/2       ContainerCreating   0          23s
... repeated 2 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-x9cxn   0/2       ContainerCreating   0          24s
... repeated 2 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-x9cxn   0/2       ContainerCreating   0          25s
... repeated 2 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-x9cxn   0/2       ContainerCreating   0          26s
... repeated 2 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-x9cxn   0/2       ContainerCreating   0          27s
... repeated 2 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-x9cxn   0/2       ContainerCreating   0          28s
... repeated 2 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-x9cxn   0/2       ContainerCreating   0          29s
... repeated 3 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-x9cxn   0/2       ContainerCreating   0          30s
... repeated 2 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-x9cxn   0/2       ContainerCreating   0          31s
... repeated 2 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-x9cxn   0/2       ContainerCreating   0          32s
... repeated 2 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-x9cxn   0/2       ContainerCreating   0          33s
... repeated 2 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-x9cxn   0/2       ContainerCreating   0          34s
... repeated 3 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-x9cxn   0/2       ContainerCreating   0          35s
... repeated 2 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-x9cxn   0/2       ContainerCreating   0          36s
... repeated 2 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-x9cxn   0/2       ContainerCreating   0          37s
... repeated 2 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-x9cxn   0/2       ContainerCreating   0          38s
... repeated 2 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-x9cxn   0/2       ContainerCreating   0          39s
... repeated 2 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-x9cxn   0/2       ContainerCreating   0          40s
... repeated 2 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-x9cxn   0/2       ContainerCreating   0          41s
... repeated 2 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-x9cxn   0/2       ContainerCreating   0          42s
... repeated 2 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-x9cxn   0/2       ContainerCreating   0          43s
... repeated 2 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-x9cxn   0/2       ContainerCreating   0          44s
... repeated 3 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-x9cxn   0/2       ContainerCreating   0          45s
... repeated 2 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-x9cxn   0/2       ContainerCreating   0          46s
... repeated 2 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-x9cxn   0/2       ContainerCreating   0          47s
... repeated 3 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-x9cxn   0/2       ContainerCreating   0          48s
... repeated 2 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-x9cxn   0/2       ContainerCreating   0          49s
... repeated 2 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-x9cxn   0/2       ContainerCreating   0          50s
... repeated 3 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-x9cxn   0/2       ContainerCreating   0          51s
... repeated 2 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-x9cxn   0/2       ContainerCreating   0          52s
... repeated 2 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-x9cxn   0/2       ContainerCreating   0          53s
... repeated 3 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-x9cxn   0/2       ContainerCreating   0          54s
... repeated 2 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-x9cxn   0/2       ContainerCreating   0          55s
... repeated 2 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-x9cxn   0/2       ContainerCreating   0          56s
... repeated 3 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-x9cxn   0/2       ContainerCreating   0          57s
... repeated 2 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-x9cxn   0/2       ContainerCreating   0          58s
... repeated 2 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-x9cxn   0/2       ContainerCreating   0          59s
... repeated 3 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-x9cxn   0/2       ContainerCreating   0          1m
... repeated 141 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-x9cxn   0/2       ContainerCreating   0          2m
... repeated 142 times
NAME                     READY     STATUS              RESTARTS   AGE
logging-kibana-1-x9cxn   0/2       ContainerCreating   0          3m
... repeated 45 times
Standard error from the command:
[ERROR] PID 4238: hack/lib/cmd.sh:617: `return "${return_code}"` exited with status 1.
[INFO] 		Stack Trace: 
[INFO] 		  1: hack/lib/cmd.sh:617: `return "${return_code}"`
[INFO] 		  2: /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:171: os::cmd::try_until_text
[INFO]   Exiting with code 1.
/data/src/github.com/openshift/origin-aggregated-logging/hack/lib/log/system.sh: line 31:  4603 Terminated              sar -A -o "${binary_logfile}" 1 86400 > /dev/null 2> "${stderr_logfile}"
[INFO] [CLEANUP] Beginning cleanup routines...
[INFO] [CLEANUP] Dumping cluster events to /tmp/origin-aggregated-logging/artifacts/events.txt
[INFO] [CLEANUP] Dumping etcd contents to /tmp/origin-aggregated-logging/artifacts/etcd
[WARNING] No compiled `etcdhelper` binary was found. Attempting to build one using:
[WARNING]   $ hack/build-go.sh tools/etcdhelper
++ Building go targets for linux/amd64: tools/etcdhelper
/data/src/github.com/openshift/origin-aggregated-logging/../origin/hack/build-go.sh took 200 seconds
2017-06-08 18:12:32.390394 I | warning: ignoring ServerName for user-provided CA for backwards compatibility is deprecated
[INFO] [CLEANUP] Dumping container logs to /tmp/origin-aggregated-logging/logs/containers
[INFO] [CLEANUP] Truncating log files over 200M
[INFO] [CLEANUP] Stopping docker containers
[INFO] [CLEANUP] Removing docker containers
[INFO] [CLEANUP] Killing child processes
[INFO] [CLEANUP] Pruning etcd data directory
[ERROR] /data/src/github.com/openshift/origin-aggregated-logging/logging.sh exited with code 1 after 00h 33m 59s
Error while running ssh/sudo command: 
set -e
pushd /data/src/github.com/openshift//origin-aggregated-logging/hack/testing >/dev/null
export PATH=$GOPATH/bin:$PATH

echo '***************************************************'
echo 'Running GIT_URL=https://github.com/openshift/origin-aggregated-logging GIT_BRANCH=master O_A_L_DIR=/data/src/github.com/openshift/origin-aggregated-logging OS_ROOT=/data/src/github.com/openshift/origin ENABLE_OPS_CLUSTER=true USE_LOCAL_SOURCE=true TEST_PERF=false VERBOSE=1 OS_ANSIBLE_REPO=https://github.com/openshift/openshift-ansible OS_ANSIBLE_BRANCH=master ./logging.sh...'
time GIT_URL=https://github.com/openshift/origin-aggregated-logging GIT_BRANCH=master O_A_L_DIR=/data/src/github.com/openshift/origin-aggregated-logging OS_ROOT=/data/src/github.com/openshift/origin ENABLE_OPS_CLUSTER=true USE_LOCAL_SOURCE=true TEST_PERF=false VERBOSE=1 OS_ANSIBLE_REPO=https://github.com/openshift/openshift-ansible OS_ANSIBLE_BRANCH=master ./logging.sh
echo 'Finished GIT_URL=https://github.com/openshift/origin-aggregated-logging GIT_BRANCH=master O_A_L_DIR=/data/src/github.com/openshift/origin-aggregated-logging OS_ROOT=/data/src/github.com/openshift/origin ENABLE_OPS_CLUSTER=true USE_LOCAL_SOURCE=true TEST_PERF=false VERBOSE=1 OS_ANSIBLE_REPO=https://github.com/openshift/openshift-ansible OS_ANSIBLE_BRANCH=master ./logging.sh'
echo '***************************************************'

popd >/dev/null
        
The SSH command responded with a non-zero exit status. Vagrant
assumes that this means the command failed. The output for this command
should be in the log above. Please read the output to determine what
went wrong.
==> openshiftdev: Downloading logs
==> openshiftdev: Downloading artifacts from '/var/log/yum.log' to '/var/lib/jenkins/jobs/test-origin-aggregated-logging/workspace@5/origin/artifacts/yum.log'
==> openshiftdev: Downloading artifacts from '/var/log/secure' to '/var/lib/jenkins/jobs/test-origin-aggregated-logging/workspace@5/origin/artifacts/secure'
==> openshiftdev: Downloading artifacts from '/var/log/audit/audit.log' to '/var/lib/jenkins/jobs/test-origin-aggregated-logging/workspace@5/origin/artifacts/audit.log'
==> openshiftdev: Downloading artifacts from '/tmp/origin-aggregated-logging/' to '/var/lib/jenkins/jobs/test-origin-aggregated-logging/workspace@5/origin/artifacts'
Build step 'Execute shell' marked build as failure
[description-setter] Could not determine description.
[PostBuildScript] - Execution post build scripts.
[workspace@5] $ /bin/sh -xe /tmp/hudson7637018335540499011.sh
+ INSTANCE_NAME=origin_logging-rhel7-1643
+ pushd origin
~/jobs/test-origin-aggregated-logging/workspace@5/origin ~/jobs/test-origin-aggregated-logging/workspace@5
+ rc=0
+ '[' -f .vagrant-openshift.json ']'
++ /usr/bin/vagrant ssh -c 'sudo ausearch -m avc'
+ ausearchresult='<no matches>'
+ rc=1
+ '[' '<no matches>' = '<no matches>' ']'
+ rc=0
+ /usr/bin/vagrant destroy -f
==> openshiftdev: Terminating the instance...
==> openshiftdev: Running cleanup tasks for 'shell' provisioner...
+ popd
~/jobs/test-origin-aggregated-logging/workspace@5
+ exit 0
[BFA] Scanning build for known causes...
[BFA] Found failure cause(s):
[BFA] Command Failure from category failure
[BFA] Done. 0s
Finished: FAILURE