Console Output
Skipping 274 KB..
Full LogK [openshift_logging_kibana : set_fact] *************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_kibana/tasks/determine_version.yaml:7
ok: [openshift] => {
"ansible_facts": {
"kibana_version": "3_5"
},
"changed": false
}
TASK [openshift_logging_kibana : set_fact] *************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_kibana/tasks/determine_version.yaml:12
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_kibana : fail] *****************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_kibana/tasks/determine_version.yaml:15
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_kibana : Create temp directory for doing work in] ******
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:7
ok: [openshift] => {
"changed": false,
"cmd": [
"mktemp",
"-d",
"/tmp/openshift-logging-ansible-XXXXXX"
],
"delta": "0:00:00.001929",
"end": "2017-06-08 12:04:50.318888",
"rc": 0,
"start": "2017-06-08 12:04:50.316959"
}
STDOUT:
/tmp/openshift-logging-ansible-cxQHsS
TASK [openshift_logging_kibana : set_fact] *************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:12
ok: [openshift] => {
"ansible_facts": {
"tempdir": "/tmp/openshift-logging-ansible-cxQHsS"
},
"changed": false
}
TASK [openshift_logging_kibana : Create templates subdirectory] ****************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:16
ok: [openshift] => {
"changed": false,
"gid": 0,
"group": "root",
"mode": "0755",
"owner": "root",
"path": "/tmp/openshift-logging-ansible-cxQHsS/templates",
"secontext": "unconfined_u:object_r:user_tmp_t:s0",
"size": 6,
"state": "directory",
"uid": 0
}
TASK [openshift_logging_kibana : Create Kibana service account] ****************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:26
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_kibana : Create Kibana service account] ****************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:34
ok: [openshift] => {
"changed": false,
"results": {
"cmd": "/bin/oc get sa aggregated-logging-kibana -o json -n logging",
"results": [
{
"apiVersion": "v1",
"imagePullSecrets": [
{
"name": "aggregated-logging-kibana-dockercfg-3854b"
}
],
"kind": "ServiceAccount",
"metadata": {
"creationTimestamp": "2017-06-08T16:04:43Z",
"name": "aggregated-logging-kibana",
"namespace": "logging",
"resourceVersion": "1404",
"selfLink": "/api/v1/namespaces/logging/serviceaccounts/aggregated-logging-kibana",
"uid": "2f4e827f-4c64-11e7-bcff-0eb8998b3e80"
},
"secrets": [
{
"name": "aggregated-logging-kibana-token-nj5w5"
},
{
"name": "aggregated-logging-kibana-dockercfg-3854b"
}
]
}
],
"returncode": 0
},
"state": "present"
}
TASK [openshift_logging_kibana : set_fact] *************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:42
ok: [openshift] => {
"ansible_facts": {
"kibana_component": "kibana-ops",
"kibana_name": "logging-kibana-ops"
},
"changed": false
}
TASK [openshift_logging_kibana : Retrieving the cert to use when generating secrets for the logging components] ***
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:46
ok: [openshift] => (item={u'name': u'ca_file', u'file': u'ca.crt'}) => {
"changed": false,
"content": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUMyakNDQWNLZ0F3SUJBZ0lCQVRBTkJna3Foa2lHOXcwQkFRc0ZBREFlTVJ3d0dnWURWUVFERXhOc2IyZG4KYVc1bkxYTnBaMjVsY2kxMFpYTjBNQjRYRFRFM01EWXdPREUyTURNMU5Gb1hEVEl5TURZd056RTJNRE0xTlZvdwpIakVjTUJvR0ExVUVBeE1UYkc5bloybHVaeTF6YVdkdVpYSXRkR1Z6ZERDQ0FTSXdEUVlKS29aSWh2Y05BUUVCCkJRQURnZ0VQQURDQ0FRb0NnZ0VCQU8vdzcra0pnNU43TW5UbzBFMGd6YTNRLzB4alpUQ2Vpalk1ZmlUWUhVKzAKR2ZNaXFWbHUxUW5NZE13aDFjU245VkhkOGV0V0hMMVpaQVBCcEVIbUYzTmZEYmV0RTdweWhkRm9RVmE5QytNRQppMDhjd3FVeSttQnpuSUJjQy9vU3NIZUdCVDk1YVF6ckNnbXRCdzVkenJnOWZnMDljM3YwSi8vRUxoYXJjRWNCCjRBT3RCejVrVHBjUnFLUDN2MDJVb3RtZVcwZ3FleXg4ZW5CM0VmZWN3K29McENjeXVXNGYvb0RzVHZnQTBMbUsKQWFDaUZDczlzTFNQUS8rVG1NbS91bW9OazZMa0JLSUlGeFFxN3BhYlB5R1lOdnlrVDl5TDNZbk5lN21VNlpVZApQSnBqdS9BOXM0Z3o1TmUvdWpPa0J1OVZEUkZndXBsaE80cnY5TzN5YjE4Q0F3RUFBYU1qTUNFd0RnWURWUjBQCkFRSC9CQVFEQWdLa01BOEdBMVVkRXdFQi93UUZNQU1CQWY4d0RRWUpLb1pJaHZjTkFRRUxCUUFEZ2dFQkFHa1MKcVpkeXd5cGxxK3FTNVVJMUhRWjg3MUhDRGdaMmM5YXcydytqalRxemVzUHhmekV6VTlwV2dJNWJFdTJ4akxvKwpVQUM0VzZoZUhaQWFiQk9nUFRYazVFM2ZTVTZYRmdLV1VEeDR0NDF3TGdFdU1QWmJhZkVVdkl5QjlHTjN3SGV0CjRDOUp0Q213V3RZbzJuQ2czMmt3MmdNbzU5VlExajZDbHZOdUdtVVFiTGRCMzQ5Rm1VbXh4bjkvRExTaURZNGgKQnJ4YlRiZFM5NHk5bHNMdmM1MWRiMFJYRytoclF1b3ZMeDdXaWltOE1kc3JaUDEwM2IyT2hYZ3A5MUdkQjA5bQpmRTRVS3l6VG1kTXZPVHlzOG50RzhFOFFXNjRDOVp5cjEyOVRLRUF0cS91QTNlRmZoQzJqaytQTEtXR3ZBSW0xCnFob1UwVEVBRlptVGtWcDUyMGc9Ci0tLS0tRU5EIENFUlRJRklDQVRFLS0tLS0K",
"encoding": "base64",
"item": {
"file": "ca.crt",
"name": "ca_file"
},
"source": "/etc/origin/logging/ca.crt"
}
ok: [openshift] => (item={u'name': u'kibana_internal_key', u'file': u'kibana-internal.key'}) => {
"changed": false,
"content": "LS0tLS1CRUdJTiBSU0EgUFJJVkFURSBLRVktLS0tLQpNSUlFcFFJQkFBS0NBUUVBNE1CbW56NUVVa3prU2orMUpyVng2Slg0Mzk0dDVHVmpoM0F0MGFVdDJWOVhKNFd0Cmt4dGpyeEEwRnpucDRuZ3lydVRMZE1obUlDcHM1eURlaTd3WVZLN3laSXpUMUo1RWthekw4NGRlZTlrV0JpajcKMGFyWkRCbEdtVVVKdTZ6c0xyd244MDFqS3MvR1lOdzJRRnlPZW94dEs2bDNWR1BRdXptRUh0b3dueDVJZmJ5TwpnTFlLNkduL0U0ZThOcFg4TGtsRm5MdGpwdENETnV2UXBxR2hWeHg0blA4Y1V0MGhpQ3Z0SzlQVFJVQm83cUE0ClMvRFJ1eEtuMGd6djgyQWZiMVBtTkdFUWVkcENBdnRJTHpVbzQ3MlY0V3J5ZEVQblZKWUw5SWlXdHlPMHlyWS8KWjljMWU0akVvZktxMkRmSUVEUHYwc0h1TWdhZFR5MkZrSXJQVXdJREFRQUJBb0lCQUI3YXF2dnZpMmVWQUw0awpzdFRHa2dXeEdRSjZ1T2dpOXgvdnlZcC9WNlVBenJFeFM5dm5KYTh4MVVUZkFYbkNzSXFNQkZaYThKSTM0TTEyCkdMY3NNbGJsMFliMlFscHRBOUxQMnd2cXFKRHVBM1VEQ1ZjR1VsZXozSnBML1FNVHdUVEZSZUxEMi9wVG5kTk0KUi9KY0FuOVdhZUpWbUhrd1hzWmxKZnNxVHdSNDNPc2xnNmIvMThDL2YrNnZXZ2p0OTFxekNWMVBpbXE5Y0lscAozVDJNUU5GSXFRcU5NUW9veDd5SlduOGI5ZTc3a3oxUWprSE5kM1FOdnoxMkZLSUQ5SUorWXkrVVhrTGZjTk5nCnF6SHk0QVhuTGRwdkYyeVNOekM0RmNhU2Y2ZThNQ2FZR3VJSnJ3K2tQeENQVC83dy9xcUxZRk15QjhiODR4VE8KQ3ZsbWJvRUNnWUVBNStpamhFelhVN3VwZVFJQ3NQTHRGa1Via3VhSGZLajU2cjUyY0doSk1ZMjVXejNMeXVidgpWRmhQVHZsS1A4YnpFRUg1aWlINVloUlkrdWRYbVdxN3lWdklWalZ3Q3Z3dktZRjVTWlpGd0czbE9ZZlR0MHpBCkJrRzlETEp5UjJDakJ4Q2JCczZQNkU4VFVwOE9lVFFjbzRhLzEyd3JMMWNNM1JoNXo5MnU1RUVDZ1lFQStCbHMKdW9SYUlKUnVWRk5oNlUrTnNDUjNJc2dvN1VXYWFGdnZya2ZybmxKdnAxYkZmMTFkbVpkS0RRTW1Dd2VweVhEQwo2emRETEZmSjFEU3Q3L1JoVnpydHB2elI5RXhBbWV0bU5RcG9UMEducDVDUXVCcnJtYjNSeU1BTGZhN0ZiVE1kCkZlR0J4WDcrcitBcXR5UEFLSUZRZ005MXZXQVFiVGpxK1R4V1BwTUNnWUVBMUJ3OTR2WDlSSUt5UVJHTHNKaWcKTE94TWloSDcvaEwzY3JOOTFOWXRpL3RhZTQrTDVDdFEzMDVlUzVkTFNBWUpydWNJL0VTU3VoaHVRaDV5U3NvSwpzK01VMjNUSTQrTkFwNi9hWlduaXBseGRlajgzYjhIdEtFdmNRODY3STNuSjR0NVhUT1NGVHk5ME51OXY1Mys0CmFnRUhXbHQvTEM5c2ZwU3laV01YWVlFQ2dZRUFnaHZMUW5GZ1ZvK2c0SVl0NWMzN2p6RkFiVFF0Q21takZUR0oKbTR0S0RHaE05NlB3eE45dzA3aGNkRUIvZGU5WjRqTkpkVk13M2hOWDIvcVljU3RtMVBrcFlwWlpnTlZ3VjRmMAp2TUl0ZThzQm9rV1ZMNGVtb0tURVhKUWlNcEl5SlJkV0hJQjdydGZ2dHNwSGZHMjZPME85dFIrWExtNHZqeWYzClNpZVRUeE1DZ1lFQTRJMXFhMjNKWTNPdEtYcC9BcW13V3J3d1c2NkJQdC9TRHcvZUdZWTR4d1J3TENRZWZVd2IKeDFoenViNG52d0ZRSlZ2V3lMTVg1Z08vVFlTaVlsTklDTTZzaE1DWDQ0YlBuSS8xem5WajZvbFJMbGM0L1QwRwpXK2VsWlRIUStqME9ydkNuYytWMDQzNklOWUxyYzl6b0h5cU04bTN0NmhHem54WDZzZkRNRnIwPQotLS0tLUVORCBSU0EgUFJJVkFURSBLRVktLS0tLQo=",
"encoding": "base64",
"item": {
"file": "kibana-internal.key",
"name": "kibana_internal_key"
},
"source": "/etc/origin/logging/kibana-internal.key"
}
ok: [openshift] => (item={u'name': u'kibana_internal_cert', u'file': u'kibana-internal.crt'}) => {
"changed": false,
"content": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURUakNDQWphZ0F3SUJBZ0lCQWpBTkJna3Foa2lHOXcwQkFRc0ZBREFlTVJ3d0dnWURWUVFERXhOc2IyZG4KYVc1bkxYTnBaMjVsY2kxMFpYTjBNQjRYRFRFM01EWXdPREUyTURNMU4xb1hEVEU1TURZd09ERTJNRE0xT0ZvdwpGakVVTUJJR0ExVUVBeE1MSUd0cFltRnVZUzF2Y0hNd2dnRWlNQTBHQ1NxR1NJYjNEUUVCQVFVQUE0SUJEd0F3CmdnRUtBb0lCQVFEZ3dHYWZQa1JTVE9SS1A3VW10WEhvbGZqZjNpM2taV09IY0MzUnBTM1pYMWNuaGEyVEcyT3YKRURRWE9lbmllREt1NU10MHlHWWdLbXpuSU42THZCaFVydkprak5QVW5rU1JyTXZ6aDE1NzJSWUdLUHZScXRrTQpHVWFaUlFtN3JPd3V2Q2Z6VFdNcXo4WmczRFpBWEk1NmpHMHJxWGRVWTlDN09ZUWUyakNmSGtoOXZJNkF0Z3JvCmFmOFRoN3cybGZ3dVNVV2N1Mk9tMElNMjY5Q21vYUZYSEhpYy94eFMzU0dJSyswcjA5TkZRR2p1b0RoTDhORzcKRXFmU0RPL3pZQjl2VStZMFlSQjUya0lDKzBndk5Tamp2WlhoYXZKMFErZFVsZ3YwaUphM0k3VEt0ajluMXpWNwppTVNoOHFyWU44Z1FNKy9Td2U0eUJwMVBMWVdRaXM5VEFnTUJBQUdqZ1o0d2dac3dEZ1lEVlIwUEFRSC9CQVFECkFnV2dNQk1HQTFVZEpRUU1NQW9HQ0NzR0FRVUZCd01CTUF3R0ExVWRFd0VCL3dRQ01BQXdaZ1lEVlIwUkJGOHcKWFlJTElHdHBZbUZ1WVMxdmNIT0NMQ0JyYVdKaGJtRXRiM0J6TG5KdmRYUmxjaTVrWldaaGRXeDBMbk4yWXk1agpiSFZ6ZEdWeUxteHZZMkZzZ2hnZ2EybGlZVzVoTGpFeU55NHdMakF1TVM1NGFYQXVhVytDQm10cFltRnVZVEFOCkJna3Foa2lHOXcwQkFRc0ZBQU9DQVFFQVdFRHllZ29FdkllT2hUOGI4eFFzMzh6K3IvYUJibnMxeUtiay82OVMKVXpFd0FoVHJNb1B2TVg2TzJpRzJscVhzRGdIdzBJc1VqRW9tbFhiNGNBcDZxWnR3LzEvNm1QVnJCNHdubmtQSQpiY2hoZFVFb2NteFowS24zN2dmb3JsUldXbDcrZDNTSUFwa1I3eGJtMHFiUkdnaC90a2JHSWZBVGQzOUdJbkVpCnZXMCtyZW1BOFA4TGhFbUkrZU15NnlGZEpBRUIya2NBWlhYZGRKL1BFZVczV2lqNmcxTzljdms2Ym9QYUo3VEkKdXNqMERqa05SSXBOejYxRnJLR3FFeHRtMHVxOWlPa3ZvQVdING5GSFoyQk5zR0hCdEUwbHlQQmN1bUg2enI1YwpvYlFVUnlNMHh0Zm1Xd29lY0FJTVJjNmdlSVZnUUlQM1ZSWGh6aE1wZnlCMnFBPT0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQotLS0tLUJFR0lOIENFUlRJRklDQVRFLS0tLS0KTUlJQzJqQ0NBY0tnQXdJQkFnSUJBVEFOQmdrcWhraUc5dzBCQVFzRkFEQWVNUnd3R2dZRFZRUURFeE5zYjJkbgphVzVuTFhOcFoyNWxjaTEwWlhOME1CNFhEVEUzTURZd09ERTJNRE0xTkZvWERUSXlNRFl3TnpFMk1ETTFOVm93CkhqRWNNQm9HQTFVRUF4TVRiRzluWjJsdVp5MXphV2R1WlhJdGRHVnpkRENDQVNJd0RRWUpLb1pJaHZjTkFRRUIKQlFBRGdnRVBBRENDQVFvQ2dnRUJBTy93NytrSmc1TjdNblRvMEUwZ3phM1EvMHhqWlRDZWlqWTVmaVRZSFUrMApHZk1pcVZsdTFRbk1kTXdoMWNTbjlWSGQ4ZXRXSEwxWlpBUEJwRUhtRjNOZkRiZXRFN3B5aGRGb1FWYTlDK01FCmkwOGN3cVV5K21Cem5JQmNDL29Tc0hlR0JUOTVhUXpyQ2dtdEJ3NWR6cmc5ZmcwOWMzdjBKLy9FTGhhcmNFY0IKNEFPdEJ6NWtUcGNScUtQM3YwMlVvdG1lVzBncWV5eDhlbkIzRWZlY3crb0xwQ2N5dVc0Zi9vRHNUdmdBMExtSwpBYUNpRkNzOXNMU1BRLytUbU1tL3Vtb05rNkxrQktJSUZ4UXE3cGFiUHlHWU52eWtUOXlMM1luTmU3bVU2WlVkClBKcGp1L0E5czRnejVOZS91ak9rQnU5VkRSRmd1cGxoTzRydjlPM3liMThDQXdFQUFhTWpNQ0V3RGdZRFZSMFAKQVFIL0JBUURBZ0trTUE4R0ExVWRFd0VCL3dRRk1BTUJBZjh3RFFZSktvWklodmNOQVFFTEJRQURnZ0VCQUdrUwpxWmR5d3lwbHErcVM1VUkxSFFaODcxSENEZ1oyYzlhdzJ3K2pqVHF6ZXNQeGZ6RXpVOXBXZ0k1YkV1MnhqTG8rClVBQzRXNmhlSFpBYWJCT2dQVFhrNUUzZlNVNlhGZ0tXVUR4NHQ0MXdMZ0V1TVBaYmFmRVV2SXlCOUdOM3dIZXQKNEM5SnRDbXdXdFlvMm5DZzMya3cyZ01vNTlWUTFqNkNsdk51R21VUWJMZEIzNDlGbVVteHhuOS9ETFNpRFk0aApCcnhiVGJkUzk0eTlsc0x2YzUxZGIwUlhHK2hyUXVvdkx4N1dpaW04TWRzclpQMTAzYjJPaFhncDkxR2RCMDltCmZFNFVLeXpUbWRNdk9UeXM4bnRHOEU4UVc2NEM5WnlyMTI5VEtFQXRxL3VBM2VGZmhDMmprK1BMS1dHdkFJbTEKcWhvVTBURUFGWm1Ua1ZwNTIwZz0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo=",
"encoding": "base64",
"item": {
"file": "kibana-internal.crt",
"name": "kibana_internal_cert"
},
"source": "/etc/origin/logging/kibana-internal.crt"
}
ok: [openshift] => (item={u'name': u'server_tls', u'file': u'server-tls.json'}) => {
"changed": false,
"content": "Ly8gU2VlIGZvciBhdmFpbGFibGUgb3B0aW9uczogaHR0cHM6Ly9ub2RlanMub3JnL2FwaS90bHMuaHRtbCN0bHNfdGxzX2NyZWF0ZXNlcnZlcl9vcHRpb25zX3NlY3VyZWNvbm5lY3Rpb25saXN0ZW5lcgp0bHNfb3B0aW9ucyA9IHsKCWNpcGhlcnM6ICdrRUVDREg6K2tFRUNESCtTSEE6a0VESDora0VESCtTSEE6K2tFREgrQ0FNRUxMSUE6a0VDREg6K2tFQ0RIK1NIQTprUlNBOitrUlNBK1NIQTora1JTQStDQU1FTExJQTohYU5VTEw6IWVOVUxMOiFTU0x2MjohUkM0OiFERVM6IUVYUDohU0VFRDohSURFQTorM0RFUycsCglob25vckNpcGhlck9yZGVyOiB0cnVlCn0K",
"encoding": "base64",
"item": {
"file": "server-tls.json",
"name": "server_tls"
},
"source": "/etc/origin/logging/server-tls.json"
}
TASK [openshift_logging_kibana : Set logging-kibana-ops service] ***************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:57
changed: [openshift] => {
"changed": true,
"results": {
"clusterip": "172.30.34.57",
"cmd": "/bin/oc get service logging-kibana-ops -o json -n logging",
"results": [
{
"apiVersion": "v1",
"kind": "Service",
"metadata": {
"creationTimestamp": "2017-06-08T16:04:52Z",
"name": "logging-kibana-ops",
"namespace": "logging",
"resourceVersion": "1428",
"selfLink": "/api/v1/namespaces/logging/services/logging-kibana-ops",
"uid": "349b7e34-4c64-11e7-bcff-0eb8998b3e80"
},
"spec": {
"clusterIP": "172.30.34.57",
"ports": [
{
"port": 443,
"protocol": "TCP",
"targetPort": "oaproxy"
}
],
"selector": {
"component": "kibana-ops",
"provider": "openshift"
},
"sessionAffinity": "None",
"type": "ClusterIP"
},
"status": {
"loadBalancer": {}
}
}
],
"returncode": 0
},
"state": "present"
}
TASK [openshift_logging_kibana : set_fact] *************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:74
[WARNING]: when statements should not include jinja2 templating delimiters
such as {{ }} or {% %}. Found: {{ openshift_logging_kibana_key | trim | length
> 0 }}
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_kibana : set_fact] *************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:79
[WARNING]: when statements should not include jinja2 templating delimiters
such as {{ }} or {% %}. Found: {{ openshift_logging_kibana_cert | trim | length
> 0 }}
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_kibana : set_fact] *************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:84
[WARNING]: when statements should not include jinja2 templating delimiters
such as {{ }} or {% %}. Found: {{ openshift_logging_kibana_ca | trim | length >
0 }}
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_kibana : set_fact] *************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:89
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_kibana : Generating Kibana route template] *************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:94
ok: [openshift] => {
"changed": false,
"checksum": "2ec7e9a79caf52f48f42961532f0ec2f28fa52fe",
"dest": "/tmp/openshift-logging-ansible-cxQHsS/templates/kibana-route.yaml",
"gid": 0,
"group": "root",
"md5sum": "e32e309b373769c6ae26717d062b581f",
"mode": "0644",
"owner": "root",
"secontext": "unconfined_u:object_r:admin_home_t:s0",
"size": 2726,
"src": "/root/.ansible/tmp/ansible-tmp-1496937892.55-86900678848426/source",
"state": "file",
"uid": 0
}
TASK [openshift_logging_kibana : Setting Kibana route] *************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:114
changed: [openshift] => {
"changed": true,
"results": {
"cmd": "/bin/oc get route logging-kibana-ops -o json -n logging",
"results": [
{
"apiVersion": "v1",
"kind": "Route",
"metadata": {
"creationTimestamp": "2017-06-08T16:04:53Z",
"labels": {
"component": "support",
"logging-infra": "support",
"provider": "openshift"
},
"name": "logging-kibana-ops",
"namespace": "logging",
"resourceVersion": "1431",
"selfLink": "/oapi/v1/namespaces/logging/routes/logging-kibana-ops",
"uid": "3551d037-4c64-11e7-bcff-0eb8998b3e80"
},
"spec": {
"host": "kibana-ops.router.default.svc.cluster.local",
"tls": {
"caCertificate": "-----BEGIN CERTIFICATE-----\nMIIC2jCCAcKgAwIBAgIBATANBgkqhkiG9w0BAQsFADAeMRwwGgYDVQQDExNsb2dn\naW5nLXNpZ25lci10ZXN0MB4XDTE3MDYwODE2MDM1NFoXDTIyMDYwNzE2MDM1NVow\nHjEcMBoGA1UEAxMTbG9nZ2luZy1zaWduZXItdGVzdDCCASIwDQYJKoZIhvcNAQEB\nBQADggEPADCCAQoCggEBAO/w7+kJg5N7MnTo0E0gza3Q/0xjZTCeijY5fiTYHU+0\nGfMiqVlu1QnMdMwh1cSn9VHd8etWHL1ZZAPBpEHmF3NfDbetE7pyhdFoQVa9C+ME\ni08cwqUy+mBznIBcC/oSsHeGBT95aQzrCgmtBw5dzrg9fg09c3v0J//ELharcEcB\n4AOtBz5kTpcRqKP3v02UotmeW0gqeyx8enB3Efecw+oLpCcyuW4f/oDsTvgA0LmK\nAaCiFCs9sLSPQ/+TmMm/umoNk6LkBKIIFxQq7pabPyGYNvykT9yL3YnNe7mU6ZUd\nPJpju/A9s4gz5Ne/ujOkBu9VDRFguplhO4rv9O3yb18CAwEAAaMjMCEwDgYDVR0P\nAQH/BAQDAgKkMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAGkS\nqZdywyplq+qS5UI1HQZ871HCDgZ2c9aw2w+jjTqzesPxfzEzU9pWgI5bEu2xjLo+\nUAC4W6heHZAabBOgPTXk5E3fSU6XFgKWUDx4t41wLgEuMPZbafEUvIyB9GN3wHet\n4C9JtCmwWtYo2nCg32kw2gMo59VQ1j6ClvNuGmUQbLdB349FmUmxxn9/DLSiDY4h\nBrxbTbdS94y9lsLvc51db0RXG+hrQuovLx7Wiim8MdsrZP103b2OhXgp91GdB09m\nfE4UKyzTmdMvOTys8ntG8E8QW64C9Zyr129TKEAtq/uA3eFfhC2jk+PLKWGvAIm1\nqhoU0TEAFZmTkVp520g=\n-----END CERTIFICATE-----\n",
"destinationCACertificate": "-----BEGIN CERTIFICATE-----\nMIIC2jCCAcKgAwIBAgIBATANBgkqhkiG9w0BAQsFADAeMRwwGgYDVQQDExNsb2dn\naW5nLXNpZ25lci10ZXN0MB4XDTE3MDYwODE2MDM1NFoXDTIyMDYwNzE2MDM1NVow\nHjEcMBoGA1UEAxMTbG9nZ2luZy1zaWduZXItdGVzdDCCASIwDQYJKoZIhvcNAQEB\nBQADggEPADCCAQoCggEBAO/w7+kJg5N7MnTo0E0gza3Q/0xjZTCeijY5fiTYHU+0\nGfMiqVlu1QnMdMwh1cSn9VHd8etWHL1ZZAPBpEHmF3NfDbetE7pyhdFoQVa9C+ME\ni08cwqUy+mBznIBcC/oSsHeGBT95aQzrCgmtBw5dzrg9fg09c3v0J//ELharcEcB\n4AOtBz5kTpcRqKP3v02UotmeW0gqeyx8enB3Efecw+oLpCcyuW4f/oDsTvgA0LmK\nAaCiFCs9sLSPQ/+TmMm/umoNk6LkBKIIFxQq7pabPyGYNvykT9yL3YnNe7mU6ZUd\nPJpju/A9s4gz5Ne/ujOkBu9VDRFguplhO4rv9O3yb18CAwEAAaMjMCEwDgYDVR0P\nAQH/BAQDAgKkMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAGkS\nqZdywyplq+qS5UI1HQZ871HCDgZ2c9aw2w+jjTqzesPxfzEzU9pWgI5bEu2xjLo+\nUAC4W6heHZAabBOgPTXk5E3fSU6XFgKWUDx4t41wLgEuMPZbafEUvIyB9GN3wHet\n4C9JtCmwWtYo2nCg32kw2gMo59VQ1j6ClvNuGmUQbLdB349FmUmxxn9/DLSiDY4h\nBrxbTbdS94y9lsLvc51db0RXG+hrQuovLx7Wiim8MdsrZP103b2OhXgp91GdB09m\nfE4UKyzTmdMvOTys8ntG8E8QW64C9Zyr129TKEAtq/uA3eFfhC2jk+PLKWGvAIm1\nqhoU0TEAFZmTkVp520g=\n-----END CERTIFICATE-----\n",
"insecureEdgeTerminationPolicy": "Redirect",
"termination": "reencrypt"
},
"to": {
"kind": "Service",
"name": "logging-kibana-ops",
"weight": 100
},
"wildcardPolicy": "None"
},
"status": {
"ingress": []
}
}
],
"returncode": 0
},
"state": "present"
}
TASK [openshift_logging_kibana : Generate proxy session] ***********************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:125
ok: [openshift] => {
"ansible_facts": {
"session_secret": "30X7dyqGA1WrRbZFCpE8acrZp0q19wkI1h4bwCaj8ur5rJor4lRPuDcG83DPDn7bqJAVQIxj8jPl8fpMigT1S1FXzfxuxA5ncy2elazB4SCWSwFXabOOfXPcUqXuHhNQaIwqQ5Yi5AsLKQToXWfFcLVpLjiA3VWZpwYztseZ7x8oi5jDC7QJ8mRrARWAqRwhpzsMlxU1"
},
"changed": false
}
TASK [openshift_logging_kibana : Generate oauth client secret] *****************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:132
ok: [openshift] => {
"ansible_facts": {
"oauth_secret": "GNi8SN4k9exFRSqZqzzpkuO9W9Wbb7hApK7FpBxIR5tHYdznDvRbWxFsM6ULPf5Y"
},
"changed": false
}
TASK [openshift_logging_kibana : Create oauth-client template] *****************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:138
changed: [openshift] => {
"changed": true,
"checksum": "5bab7c7e64c855105558bd34128a3f09f71af0de",
"dest": "/tmp/openshift-logging-ansible-cxQHsS/templates/oauth-client.yml",
"gid": 0,
"group": "root",
"md5sum": "9c9c4f5ba87058ce1e3ce65f5cabb1a3",
"mode": "0644",
"owner": "root",
"secontext": "unconfined_u:object_r:admin_home_t:s0",
"size": 332,
"src": "/root/.ansible/tmp/ansible-tmp-1496937893.64-90436484836641/source",
"state": "file",
"uid": 0
}
TASK [openshift_logging_kibana : Set kibana-proxy oauth-client] ****************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:146
changed: [openshift] => {
"changed": true,
"results": {
"cmd": "/bin/oc get oauthclient kibana-proxy -o json -n logging",
"results": [
{
"apiVersion": "v1",
"kind": "OAuthClient",
"metadata": {
"creationTimestamp": "2017-06-08T16:04:46Z",
"labels": {
"logging-infra": "support"
},
"name": "kibana-proxy",
"resourceVersion": "1433",
"selfLink": "/oapi/v1/oauthclients/kibana-proxy",
"uid": "3180f14e-4c64-11e7-bcff-0eb8998b3e80"
},
"redirectURIs": [
"https://kibana-ops.router.default.svc.cluster.local"
],
"scopeRestrictions": [
{
"literals": [
"user:info",
"user:check-access",
"user:list-projects"
]
}
],
"secret": "GNi8SN4k9exFRSqZqzzpkuO9W9Wbb7hApK7FpBxIR5tHYdznDvRbWxFsM6ULPf5Y"
}
],
"returncode": 0
},
"state": "present"
}
TASK [openshift_logging_kibana : Set Kibana secret] ****************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:157
ok: [openshift] => {
"changed": false,
"results": {
"apiVersion": "v1",
"data": {
"ca": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUMyakNDQWNLZ0F3SUJBZ0lCQVRBTkJna3Foa2lHOXcwQkFRc0ZBREFlTVJ3d0dnWURWUVFERXhOc2IyZG4KYVc1bkxYTnBaMjVsY2kxMFpYTjBNQjRYRFRFM01EWXdPREUyTURNMU5Gb1hEVEl5TURZd056RTJNRE0xTlZvdwpIakVjTUJvR0ExVUVBeE1UYkc5bloybHVaeTF6YVdkdVpYSXRkR1Z6ZERDQ0FTSXdEUVlKS29aSWh2Y05BUUVCCkJRQURnZ0VQQURDQ0FRb0NnZ0VCQU8vdzcra0pnNU43TW5UbzBFMGd6YTNRLzB4alpUQ2Vpalk1ZmlUWUhVKzAKR2ZNaXFWbHUxUW5NZE13aDFjU245VkhkOGV0V0hMMVpaQVBCcEVIbUYzTmZEYmV0RTdweWhkRm9RVmE5QytNRQppMDhjd3FVeSttQnpuSUJjQy9vU3NIZUdCVDk1YVF6ckNnbXRCdzVkenJnOWZnMDljM3YwSi8vRUxoYXJjRWNCCjRBT3RCejVrVHBjUnFLUDN2MDJVb3RtZVcwZ3FleXg4ZW5CM0VmZWN3K29McENjeXVXNGYvb0RzVHZnQTBMbUsKQWFDaUZDczlzTFNQUS8rVG1NbS91bW9OazZMa0JLSUlGeFFxN3BhYlB5R1lOdnlrVDl5TDNZbk5lN21VNlpVZApQSnBqdS9BOXM0Z3o1TmUvdWpPa0J1OVZEUkZndXBsaE80cnY5TzN5YjE4Q0F3RUFBYU1qTUNFd0RnWURWUjBQCkFRSC9CQVFEQWdLa01BOEdBMVVkRXdFQi93UUZNQU1CQWY4d0RRWUpLb1pJaHZjTkFRRUxCUUFEZ2dFQkFHa1MKcVpkeXd5cGxxK3FTNVVJMUhRWjg3MUhDRGdaMmM5YXcydytqalRxemVzUHhmekV6VTlwV2dJNWJFdTJ4akxvKwpVQUM0VzZoZUhaQWFiQk9nUFRYazVFM2ZTVTZYRmdLV1VEeDR0NDF3TGdFdU1QWmJhZkVVdkl5QjlHTjN3SGV0CjRDOUp0Q213V3RZbzJuQ2czMmt3MmdNbzU5VlExajZDbHZOdUdtVVFiTGRCMzQ5Rm1VbXh4bjkvRExTaURZNGgKQnJ4YlRiZFM5NHk5bHNMdmM1MWRiMFJYRytoclF1b3ZMeDdXaWltOE1kc3JaUDEwM2IyT2hYZ3A5MUdkQjA5bQpmRTRVS3l6VG1kTXZPVHlzOG50RzhFOFFXNjRDOVp5cjEyOVRLRUF0cS91QTNlRmZoQzJqaytQTEtXR3ZBSW0xCnFob1UwVEVBRlptVGtWcDUyMGc9Ci0tLS0tRU5EIENFUlRJRklDQVRFLS0tLS0K",
"cert": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURSVENDQWkyZ0F3SUJBZ0lCQXpBTkJna3Foa2lHOXcwQkFRVUZBREFlTVJ3d0dnWURWUVFERXhOc2IyZG4KYVc1bkxYTnBaMjVsY2kxMFpYTjBNQjRYRFRFM01EWXdPREUyTURRd01Wb1hEVEU1TURZd09ERTJNRFF3TVZvdwpSakVRTUE0R0ExVUVDZ3dIVEc5bloybHVaekVTTUJBR0ExVUVDd3dKVDNCbGJsTm9hV1owTVI0d0hBWURWUVFECkRCVnplWE4wWlcwdWJHOW5aMmx1Wnk1cmFXSmhibUV3Z2dFaU1BMEdDU3FHU0liM0RRRUJBUVVBQTRJQkR3QXcKZ2dFS0FvSUJBUUNuZ0ltTS9MdGV3ZE1oTXozdEZEUWRqMmZURDdSdFRiZkVHUmRQb3Y1WTZ6YWF0Y1dDZitHSwp0a1ZPbDNGdk1rcGhhQmp1WGNFOXlHNmJqbUhnY3FlYy9wSXB5WlRWSEpRcy9sODlRalZlVFJoUkUwNDJ5dytUCi9SY1RDU0xuR0NvV0kvZE1DZVJXZ21lS24vWS9tOHl1QVowK2Z2TktIWEdOc2oydko3TGZNT0l2VTFJdmN3bE8KQ3FkWkxKMW1acTlDOTNnYUFCOWR2ZWVrSGI1eTRsQWFJQXlUZ2RId0hpTlRadzdBQ2FIUEVXSGV1cStLb3B4Vwp6ZC9NTHRURTZnNmFVQ0o2cUxRcktsb25CMHRYT24wcTA0ZlNoSXBubjJWQ2tQVXZtaVRBT3JhUkpEMlVOOTJjClZwaHRvY3NwQ0NxOWFRS3NYaXQ4cndtekloSUhjOFo3QWdNQkFBR2paakJrTUE0R0ExVWREd0VCL3dRRUF3SUYKb0RBSkJnTlZIUk1FQWpBQU1CMEdBMVVkSlFRV01CUUdDQ3NHQVFVRkJ3TUJCZ2dyQmdFRkJRY0RBakFkQmdOVgpIUTRFRmdRVVBSbUIzVnU1R1Z3dGRpODRCUW4veG9DSGNuc3dDUVlEVlIwakJBSXdBREFOQmdrcWhraUc5dzBCCkFRVUZBQU9DQVFFQXB2OVAyYlhHZkFhVzlIaW9nek4vT1JSL3c2TERRdGMzTS9nN3JJWTN3T2ZoSGtWVVh2aXkKZ2RvZUJTUDhnWHYvTGFBV0VPN2FQem4yaEZiQk1FbGVnbU9ERHRjNGdKTGZYTHNoWWxuaG55RmpybW9OSEowRgpUaEIyMjJ1b2plMFZpM1NTRlVxRHJDcHJmUjkyS0FvK0VkUDE0UkUwd3BGRHVubTllR0x2RWg1c29odkdtSldtClNPa2RoL1hhMUp3YVJ6bkxQcjUvL0pkNXVQNG9aN0Q2eUhIS3F1WHBXR3VtVUFhL2VlMnNMOTlGcGVIc2pFZTcKcVR3dFFEWTYzdG9BMzV2OXRCdkEyeGNsSkdoZkgxYWJRRklwOXdSeVNWWGJkek03cm10MGRHeE1ZbUdXRlB2cwptOHVkR3RyUHB1YjM3Q2lRbS9kNFljMzBFV2xwRFoxSUx3PT0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo=",
"key": "LS0tLS1CRUdJTiBQUklWQVRFIEtFWS0tLS0tCk1JSUV2Z0lCQURBTkJna3Foa2lHOXcwQkFRRUZBQVNDQktnd2dnU2tBZ0VBQW9JQkFRQ25nSW1NL0x0ZXdkTWgKTXozdEZEUWRqMmZURDdSdFRiZkVHUmRQb3Y1WTZ6YWF0Y1dDZitHS3RrVk9sM0Z2TWtwaGFCanVYY0U5eUc2YgpqbUhnY3FlYy9wSXB5WlRWSEpRcy9sODlRalZlVFJoUkUwNDJ5dytUL1JjVENTTG5HQ29XSS9kTUNlUldnbWVLCm4vWS9tOHl1QVowK2Z2TktIWEdOc2oydko3TGZNT0l2VTFJdmN3bE9DcWRaTEoxbVpxOUM5M2dhQUI5ZHZlZWsKSGI1eTRsQWFJQXlUZ2RId0hpTlRadzdBQ2FIUEVXSGV1cStLb3B4V3pkL01MdFRFNmc2YVVDSjZxTFFyS2xvbgpCMHRYT24wcTA0ZlNoSXBubjJWQ2tQVXZtaVRBT3JhUkpEMlVOOTJjVnBodG9jc3BDQ3E5YVFLc1hpdDhyd216CkloSUhjOFo3QWdNQkFBRUNnZ0VBQTh6djBnb2QrajF0bS9BOHRIZGo0MVBRcUVKUDlyYnZ0M3hXUWhGUnNoVE0KTFdCNGRjaVBCb1d5TnVGS3lwb2FnL0ZIdDRqYkRKclNuNmJ3WTVaV1pVS0ZjSHRKYkRoMHlEeVJUcU9FOEhuUgpWbTJZdGJlUHgrelRMRFJsM1ZaNEJmNlU2dDNDbDQvbjU0WlBvUXZZN2czTlVwQ3cxbnFxaSt1Q2FNbit6ZitWCml0R0w0RnhGZlFVVkdPcExteUhxUlg0V1l1TjlqenFTQ2RsT0Uyd09PSlIybGFBelZSWC9RR1FuQmV6RHNDTGsKUElOMGhwWkVOM0ZHTFg3ZHJiNlhDc0NMT29OanlrN1pEMzNZVFVXVEFSaHZFdWRsTEJBV3FVcyt2MWhmQzdvZgpWdEdWckxlV2sybXI1WGFRckVObFc5RWw0M29DUHFydjNXZlJvSTBlcVFLQmdRRFhMTmdvZzVtVitxMkdUNmQ2CnlhNTU2UFRKQWtsRFpsUmZIRnZNSlZ6R2x5V1VKdCtBa25mMFFuc3ZIL3c1eURXcHVuY3pFZ2FmZm1WZ3ZLNHgKUUsyY29oY0xCcDdXSWxlNGN4Lzh0c1NVS09PTFlJWHFoQ3JTYjdNdjVoalFkMFVMa3BFc003RVNIamxUeWNXRwpWbXRSOG5kazMybmlIV0xsbFAvNEJYV1ZMd0tCZ1FESFNEQ3Jxc3AvcThGT0pLZE9HRDFqdzZkMThLZk41ZFJsCjJxM1JINGhhWnlTZStQcjhuVXFKamdMZkdHMkpuV1c1OXpZTkU4S1Z4YnNHbHMvTlB2dG43TzZNMnc0ZWVVNEIKaVdXSTkvQkVldy9PYTdkV2VGUHBMbmlJd2Y3d0VsczY3ZU5oalJJRlZubXFCTFRTTWMzNko2WnZDYjV5Nnc4YQo3ekEyaGJ6b2RRS0JnUUNEY1M5eVFFRzM3blBGMGk2NndTekY1OXB3VitjdE52dGZSbEdGT1dXZElvT2dKc2J0CklZd0pOL3RMNUR3TmlCSFA1c29kOW1yNVR4cmFweWU1aW9zRFhiaTUwWWVCdzJUV2xlejZVU053WmpIcDJFSzEKNlpySEJkanQ3NTlFNlRiU1JPeE5saDRhdFQvTHdUdmFQanc4eU9MS1NmZkNXclVwRFlYM21rNGpuUUtCZ0RSSwp1Q0xEdlVGRkk0TkswUEFJSWt2RWNuUGJRMTBlK3Q0RFJPc0VHRGFJckM1ZTdSVk9mTm1kZnhsbGdwREMwWWU3CjI5NFFtbFhNdmVmYnVRbUZobGpkN2piZDRGV1V4U2hZRW1HZmxkZVNaekptUjRTVUxJaWFuaHhjczNYeGl5anEKZFdrS3FIam9HU0xJKzEzUW5ka1BnSGZRbWIzeitsRzZBKys2cGpobEFvR0JBSTRyMUtJd0lhUW5tQVAwNkdIRApQWCtLQWpHWW9iM3JVMWdLUE1QcGEycHR6eVE2TzNEanV5UHFYb2FsVXFLd1VwK1htZ0hzenZ5R1lhNFIxTDBCCk9UL0R4OW9Pdm41UTR1eUp3MEd5QnBleVI3cWVrWWF4eWkxeXhQd21QT3BOaW1Lck96d1QzcitZaFdLVWVrUGwKbzd6WFRBenA1OS91Z20yMkhmN3VodWx2Ci0tLS0tRU5EIFBSSVZBVEUgS0VZLS0tLS0K"
},
"kind": "Secret",
"metadata": {
"creationTimestamp": null,
"name": "logging-kibana"
},
"type": "Opaque"
},
"state": "present"
}
TASK [openshift_logging_kibana : Set Kibana Proxy secret] **********************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:171
changed: [openshift] => {
"changed": true,
"results": {
"cmd": "/bin/oc replace -f /tmp/logging-kibana-proxy -n logging",
"results": "",
"returncode": 0
},
"state": "present"
}
TASK [openshift_logging_kibana : Generate Kibana DC template] ******************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:197
changed: [openshift] => {
"changed": true,
"checksum": "4d7335c43b8778c73bfca3b81b4eccfcb884fafc",
"dest": "/tmp/openshift-logging-ansible-cxQHsS/templates/kibana-dc.yaml",
"gid": 0,
"group": "root",
"md5sum": "71e15725f5d0476df5758fddb88b074d",
"mode": "0644",
"owner": "root",
"secontext": "unconfined_u:object_r:admin_home_t:s0",
"size": 3765,
"src": "/root/.ansible/tmp/ansible-tmp-1496937896.41-69231813526599/source",
"state": "file",
"uid": 0
}
TASK [openshift_logging_kibana : Set Kibana DC] ********************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:216
changed: [openshift] => {
"changed": true,
"results": {
"cmd": "/bin/oc get dc logging-kibana-ops -o json -n logging",
"results": [
{
"apiVersion": "v1",
"kind": "DeploymentConfig",
"metadata": {
"creationTimestamp": "2017-06-08T16:04:57Z",
"generation": 2,
"labels": {
"component": "kibana-ops",
"logging-infra": "kibana",
"provider": "openshift"
},
"name": "logging-kibana-ops",
"namespace": "logging",
"resourceVersion": "1437",
"selfLink": "/oapi/v1/namespaces/logging/deploymentconfigs/logging-kibana-ops",
"uid": "379ef175-4c64-11e7-bcff-0eb8998b3e80"
},
"spec": {
"replicas": 1,
"selector": {
"component": "kibana-ops",
"logging-infra": "kibana",
"provider": "openshift"
},
"strategy": {
"activeDeadlineSeconds": 21600,
"resources": {},
"rollingParams": {
"intervalSeconds": 1,
"maxSurge": "25%",
"maxUnavailable": "25%",
"timeoutSeconds": 600,
"updatePeriodSeconds": 1
},
"type": "Rolling"
},
"template": {
"metadata": {
"creationTimestamp": null,
"labels": {
"component": "kibana-ops",
"logging-infra": "kibana",
"provider": "openshift"
},
"name": "logging-kibana-ops"
},
"spec": {
"containers": [
{
"env": [
{
"name": "ES_HOST",
"value": "logging-es-ops"
},
{
"name": "ES_PORT",
"value": "9200"
},
{
"name": "KIBANA_MEMORY_LIMIT",
"valueFrom": {
"resourceFieldRef": {
"containerName": "kibana",
"divisor": "0",
"resource": "limits.memory"
}
}
}
],
"image": "172.30.177.244:5000/logging/logging-kibana:latest",
"imagePullPolicy": "Always",
"name": "kibana",
"readinessProbe": {
"exec": {
"command": [
"/usr/share/kibana/probe/readiness.sh"
]
},
"failureThreshold": 3,
"initialDelaySeconds": 5,
"periodSeconds": 5,
"successThreshold": 1,
"timeoutSeconds": 4
},
"resources": {
"limits": {
"memory": "736Mi"
}
},
"terminationMessagePath": "/dev/termination-log",
"terminationMessagePolicy": "File",
"volumeMounts": [
{
"mountPath": "/etc/kibana/keys",
"name": "kibana",
"readOnly": true
}
]
},
{
"env": [
{
"name": "OAP_BACKEND_URL",
"value": "http://localhost:5601"
},
{
"name": "OAP_AUTH_MODE",
"value": "oauth2"
},
{
"name": "OAP_TRANSFORM",
"value": "user_header,token_header"
},
{
"name": "OAP_OAUTH_ID",
"value": "kibana-proxy"
},
{
"name": "OAP_MASTER_URL",
"value": "https://kubernetes.default.svc.cluster.local"
},
{
"name": "OAP_PUBLIC_MASTER_URL",
"value": "https://172.18.15.100:8443"
},
{
"name": "OAP_LOGOUT_REDIRECT",
"value": "https://172.18.15.100:8443/console/logout"
},
{
"name": "OAP_MASTER_CA_FILE",
"value": "/var/run/secrets/kubernetes.io/serviceaccount/ca.crt"
},
{
"name": "OAP_DEBUG",
"value": "False"
},
{
"name": "OAP_OAUTH_SECRET_FILE",
"value": "/secret/oauth-secret"
},
{
"name": "OAP_SERVER_CERT_FILE",
"value": "/secret/server-cert"
},
{
"name": "OAP_SERVER_KEY_FILE",
"value": "/secret/server-key"
},
{
"name": "OAP_SERVER_TLS_FILE",
"value": "/secret/server-tls.json"
},
{
"name": "OAP_SESSION_SECRET_FILE",
"value": "/secret/session-secret"
},
{
"name": "OCP_AUTH_PROXY_MEMORY_LIMIT",
"valueFrom": {
"resourceFieldRef": {
"containerName": "kibana-proxy",
"divisor": "0",
"resource": "limits.memory"
}
}
}
],
"image": "172.30.177.244:5000/logging/logging-auth-proxy:latest",
"imagePullPolicy": "Always",
"name": "kibana-proxy",
"ports": [
{
"containerPort": 3000,
"name": "oaproxy",
"protocol": "TCP"
}
],
"resources": {
"limits": {
"memory": "96Mi"
}
},
"terminationMessagePath": "/dev/termination-log",
"terminationMessagePolicy": "File",
"volumeMounts": [
{
"mountPath": "/secret",
"name": "kibana-proxy",
"readOnly": true
}
]
}
],
"dnsPolicy": "ClusterFirst",
"restartPolicy": "Always",
"schedulerName": "default-scheduler",
"securityContext": {},
"serviceAccount": "aggregated-logging-kibana",
"serviceAccountName": "aggregated-logging-kibana",
"terminationGracePeriodSeconds": 30,
"volumes": [
{
"name": "kibana",
"secret": {
"defaultMode": 420,
"secretName": "logging-kibana"
}
},
{
"name": "kibana-proxy",
"secret": {
"defaultMode": 420,
"secretName": "logging-kibana-proxy"
}
}
]
}
},
"test": false,
"triggers": [
{
"type": "ConfigChange"
}
]
},
"status": {
"availableReplicas": 0,
"conditions": [
{
"lastTransitionTime": "2017-06-08T16:04:57Z",
"lastUpdateTime": "2017-06-08T16:04:57Z",
"message": "Deployment config does not have minimum availability.",
"status": "False",
"type": "Available"
}
],
"details": {
"causes": [
{
"type": "ConfigChange"
}
],
"message": "config change"
},
"latestVersion": 1,
"observedGeneration": 1,
"replicas": 0,
"unavailableReplicas": 0,
"updatedReplicas": 0
}
}
],
"returncode": 0
},
"state": "present"
}
TASK [openshift_logging_kibana : Delete temp directory] ************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:228
ok: [openshift] => {
"changed": false,
"path": "/tmp/openshift-logging-ansible-cxQHsS",
"state": "absent"
}
TASK [openshift_logging : include_role] ****************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging/tasks/install_logging.yaml:195
statically included: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_curator/tasks/determine_version.yaml
TASK [openshift_logging_curator : fail] ****************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_curator/tasks/determine_version.yaml:3
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_curator : set_fact] ************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_curator/tasks/determine_version.yaml:7
ok: [openshift] => {
"ansible_facts": {
"curator_version": "3_5"
},
"changed": false
}
TASK [openshift_logging_curator : set_fact] ************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_curator/tasks/determine_version.yaml:12
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_curator : fail] ****************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_curator/tasks/determine_version.yaml:15
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_curator : Create temp directory for doing work in] *****
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:5
ok: [openshift] => {
"changed": false,
"cmd": [
"mktemp",
"-d",
"/tmp/openshift-logging-ansible-XXXXXX"
],
"delta": "0:00:00.001996",
"end": "2017-06-08 12:04:58.767208",
"rc": 0,
"start": "2017-06-08 12:04:58.765212"
}
STDOUT:
/tmp/openshift-logging-ansible-hsGLSj
TASK [openshift_logging_curator : set_fact] ************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:10
ok: [openshift] => {
"ansible_facts": {
"tempdir": "/tmp/openshift-logging-ansible-hsGLSj"
},
"changed": false
}
TASK [openshift_logging_curator : Create templates subdirectory] ***************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:14
ok: [openshift] => {
"changed": false,
"gid": 0,
"group": "root",
"mode": "0755",
"owner": "root",
"path": "/tmp/openshift-logging-ansible-hsGLSj/templates",
"secontext": "unconfined_u:object_r:user_tmp_t:s0",
"size": 6,
"state": "directory",
"uid": 0
}
TASK [openshift_logging_curator : Create Curator service account] **************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:24
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_curator : Create Curator service account] **************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:32
changed: [openshift] => {
"changed": true,
"results": {
"cmd": "/bin/oc get sa aggregated-logging-curator -o json -n logging",
"results": [
{
"apiVersion": "v1",
"imagePullSecrets": [
{
"name": "aggregated-logging-curator-dockercfg-569lv"
}
],
"kind": "ServiceAccount",
"metadata": {
"creationTimestamp": "2017-06-08T16:04:59Z",
"name": "aggregated-logging-curator",
"namespace": "logging",
"resourceVersion": "1457",
"selfLink": "/api/v1/namespaces/logging/serviceaccounts/aggregated-logging-curator",
"uid": "3919a3e9-4c64-11e7-bcff-0eb8998b3e80"
},
"secrets": [
{
"name": "aggregated-logging-curator-token-1fhsb"
},
{
"name": "aggregated-logging-curator-dockercfg-569lv"
}
]
}
],
"returncode": 0
},
"state": "present"
}
TASK [openshift_logging_curator : copy] ****************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:41
ok: [openshift] => {
"changed": false,
"checksum": "9008efd9a8892dcc42c28c6dfb6708527880a6d8",
"dest": "/tmp/openshift-logging-ansible-hsGLSj/curator.yml",
"gid": 0,
"group": "root",
"md5sum": "5498c5fd98f3dd06e34b20eb1f55dc12",
"mode": "0644",
"owner": "root",
"secontext": "unconfined_u:object_r:admin_home_t:s0",
"size": 320,
"src": "/root/.ansible/tmp/ansible-tmp-1496937900.05-252327708126502/source",
"state": "file",
"uid": 0
}
TASK [openshift_logging_curator : copy] ****************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:47
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_curator : Set Curator configmap] ***********************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:53
changed: [openshift] => {
"changed": true,
"results": {
"cmd": "/bin/oc get configmap logging-curator -o json -n logging",
"results": [
{
"apiVersion": "v1",
"data": {
"config.yaml": "# Logging example curator config file\n\n# uncomment and use this to override the defaults from env vars\n#.defaults:\n# delete:\n# days: 30\n# runhour: 0\n# runminute: 0\n\n# to keep ops logs for a different duration:\n#.operations:\n# delete:\n# weeks: 8\n\n# example for a normal project\n#myapp:\n# delete:\n# weeks: 1\n"
},
"kind": "ConfigMap",
"metadata": {
"creationTimestamp": "2017-06-08T16:05:00Z",
"name": "logging-curator",
"namespace": "logging",
"resourceVersion": "1472",
"selfLink": "/api/v1/namespaces/logging/configmaps/logging-curator",
"uid": "39ced5bd-4c64-11e7-bcff-0eb8998b3e80"
}
}
],
"returncode": 0
},
"state": "present"
}
TASK [openshift_logging_curator : Set Curator secret] **************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:62
changed: [openshift] => {
"changed": true,
"results": {
"cmd": "/bin/oc secrets new logging-curator ca=/etc/origin/logging/ca.crt key=/etc/origin/logging/system.logging.curator.key cert=/etc/origin/logging/system.logging.curator.crt -n logging",
"results": "",
"returncode": 0
},
"state": "present"
}
TASK [openshift_logging_curator : set_fact] ************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:75
ok: [openshift] => {
"ansible_facts": {
"curator_component": "curator",
"curator_name": "logging-curator"
},
"changed": false
}
TASK [openshift_logging_curator : Generate Curator deploymentconfig] ***********
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:81
ok: [openshift] => {
"changed": false,
"checksum": "d5b8bef35ab9349986634609e00a1105eb61a79b",
"dest": "/tmp/openshift-logging-ansible-hsGLSj/templates/curator-dc.yaml",
"gid": 0,
"group": "root",
"md5sum": "c2fc0198f13270311212ec0c37d05c6e",
"mode": "0644",
"owner": "root",
"secontext": "unconfined_u:object_r:admin_home_t:s0",
"size": 2341,
"src": "/root/.ansible/tmp/ansible-tmp-1496937901.73-38939031371016/source",
"state": "file",
"uid": 0
}
TASK [openshift_logging_curator : Set Curator DC] ******************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:99
changed: [openshift] => {
"changed": true,
"results": {
"cmd": "/bin/oc get dc logging-curator -o json -n logging",
"results": [
{
"apiVersion": "v1",
"kind": "DeploymentConfig",
"metadata": {
"creationTimestamp": "2017-06-08T16:05:02Z",
"generation": 2,
"labels": {
"component": "curator",
"logging-infra": "curator",
"provider": "openshift"
},
"name": "logging-curator",
"namespace": "logging",
"resourceVersion": "1497",
"selfLink": "/oapi/v1/namespaces/logging/deploymentconfigs/logging-curator",
"uid": "3ad55f57-4c64-11e7-bcff-0eb8998b3e80"
},
"spec": {
"replicas": 1,
"selector": {
"component": "curator",
"logging-infra": "curator",
"provider": "openshift"
},
"strategy": {
"activeDeadlineSeconds": 21600,
"recreateParams": {
"timeoutSeconds": 600
},
"resources": {},
"rollingParams": {
"intervalSeconds": 1,
"maxSurge": "25%",
"maxUnavailable": "25%",
"timeoutSeconds": 600,
"updatePeriodSeconds": 1
},
"type": "Recreate"
},
"template": {
"metadata": {
"creationTimestamp": null,
"labels": {
"component": "curator",
"logging-infra": "curator",
"provider": "openshift"
},
"name": "logging-curator"
},
"spec": {
"containers": [
{
"env": [
{
"name": "K8S_HOST_URL",
"value": "https://kubernetes.default.svc.cluster.local"
},
{
"name": "ES_HOST",
"value": "logging-es"
},
{
"name": "ES_PORT",
"value": "9200"
},
{
"name": "ES_CLIENT_CERT",
"value": "/etc/curator/keys/cert"
},
{
"name": "ES_CLIENT_KEY",
"value": "/etc/curator/keys/key"
},
{
"name": "ES_CA",
"value": "/etc/curator/keys/ca"
},
{
"name": "CURATOR_DEFAULT_DAYS",
"value": "30"
},
{
"name": "CURATOR_RUN_HOUR",
"value": "0"
},
{
"name": "CURATOR_RUN_MINUTE",
"value": "0"
},
{
"name": "CURATOR_RUN_TIMEZONE",
"value": "UTC"
},
{
"name": "CURATOR_SCRIPT_LOG_LEVEL",
"value": "INFO"
},
{
"name": "CURATOR_LOG_LEVEL",
"value": "ERROR"
}
],
"image": "172.30.177.244:5000/logging/logging-curator:latest",
"imagePullPolicy": "Always",
"name": "curator",
"resources": {
"limits": {
"cpu": "100m"
}
},
"terminationMessagePath": "/dev/termination-log",
"terminationMessagePolicy": "File",
"volumeMounts": [
{
"mountPath": "/etc/curator/keys",
"name": "certs",
"readOnly": true
},
{
"mountPath": "/etc/curator/settings",
"name": "config",
"readOnly": true
}
]
}
],
"dnsPolicy": "ClusterFirst",
"restartPolicy": "Always",
"schedulerName": "default-scheduler",
"securityContext": {},
"serviceAccount": "aggregated-logging-curator",
"serviceAccountName": "aggregated-logging-curator",
"terminationGracePeriodSeconds": 30,
"volumes": [
{
"name": "certs",
"secret": {
"defaultMode": 420,
"secretName": "logging-curator"
}
},
{
"configMap": {
"defaultMode": 420,
"name": "logging-curator"
},
"name": "config"
}
]
}
},
"test": false,
"triggers": [
{
"type": "ConfigChange"
}
]
},
"status": {
"availableReplicas": 0,
"conditions": [
{
"lastTransitionTime": "2017-06-08T16:05:02Z",
"lastUpdateTime": "2017-06-08T16:05:02Z",
"message": "Deployment config does not have minimum availability.",
"status": "False",
"type": "Available"
},
{
"lastTransitionTime": "2017-06-08T16:05:02Z",
"lastUpdateTime": "2017-06-08T16:05:02Z",
"message": "replication controller \"logging-curator-1\" is waiting for pod \"logging-curator-1-deploy\" to run",
"status": "Unknown",
"type": "Progressing"
}
],
"details": {
"causes": [
{
"type": "ConfigChange"
}
],
"message": "config change"
},
"latestVersion": 1,
"observedGeneration": 2,
"replicas": 0,
"unavailableReplicas": 0,
"updatedReplicas": 0
}
}
],
"returncode": 0
},
"state": "present"
}
TASK [openshift_logging_curator : Delete temp directory] ***********************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:109
ok: [openshift] => {
"changed": false,
"path": "/tmp/openshift-logging-ansible-hsGLSj",
"state": "absent"
}
TASK [openshift_logging : include_role] ****************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging/tasks/install_logging.yaml:207
statically included: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_curator/tasks/determine_version.yaml
TASK [openshift_logging_curator : fail] ****************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_curator/tasks/determine_version.yaml:3
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_curator : set_fact] ************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_curator/tasks/determine_version.yaml:7
ok: [openshift] => {
"ansible_facts": {
"curator_version": "3_5"
},
"changed": false
}
TASK [openshift_logging_curator : set_fact] ************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_curator/tasks/determine_version.yaml:12
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_curator : fail] ****************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_curator/tasks/determine_version.yaml:15
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_curator : Create temp directory for doing work in] *****
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:5
ok: [openshift] => {
"changed": false,
"cmd": [
"mktemp",
"-d",
"/tmp/openshift-logging-ansible-XXXXXX"
],
"delta": "0:00:00.002037",
"end": "2017-06-08 12:05:05.156100",
"rc": 0,
"start": "2017-06-08 12:05:05.154063"
}
STDOUT:
/tmp/openshift-logging-ansible-YvSIoe
TASK [openshift_logging_curator : set_fact] ************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:10
ok: [openshift] => {
"ansible_facts": {
"tempdir": "/tmp/openshift-logging-ansible-YvSIoe"
},
"changed": false
}
TASK [openshift_logging_curator : Create templates subdirectory] ***************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:14
ok: [openshift] => {
"changed": false,
"gid": 0,
"group": "root",
"mode": "0755",
"owner": "root",
"path": "/tmp/openshift-logging-ansible-YvSIoe/templates",
"secontext": "unconfined_u:object_r:user_tmp_t:s0",
"size": 6,
"state": "directory",
"uid": 0
}
TASK [openshift_logging_curator : Create Curator service account] **************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:24
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_curator : Create Curator service account] **************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:32
ok: [openshift] => {
"changed": false,
"results": {
"cmd": "/bin/oc get sa aggregated-logging-curator -o json -n logging",
"results": [
{
"apiVersion": "v1",
"imagePullSecrets": [
{
"name": "aggregated-logging-curator-dockercfg-569lv"
}
],
"kind": "ServiceAccount",
"metadata": {
"creationTimestamp": "2017-06-08T16:04:59Z",
"name": "aggregated-logging-curator",
"namespace": "logging",
"resourceVersion": "1457",
"selfLink": "/api/v1/namespaces/logging/serviceaccounts/aggregated-logging-curator",
"uid": "3919a3e9-4c64-11e7-bcff-0eb8998b3e80"
},
"secrets": [
{
"name": "aggregated-logging-curator-token-1fhsb"
},
{
"name": "aggregated-logging-curator-dockercfg-569lv"
}
]
}
],
"returncode": 0
},
"state": "present"
}
TASK [openshift_logging_curator : copy] ****************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:41
ok: [openshift] => {
"changed": false,
"checksum": "9008efd9a8892dcc42c28c6dfb6708527880a6d8",
"dest": "/tmp/openshift-logging-ansible-YvSIoe/curator.yml",
"gid": 0,
"group": "root",
"md5sum": "5498c5fd98f3dd06e34b20eb1f55dc12",
"mode": "0644",
"owner": "root",
"secontext": "unconfined_u:object_r:admin_home_t:s0",
"size": 320,
"src": "/root/.ansible/tmp/ansible-tmp-1496937905.9-131043884572858/source",
"state": "file",
"uid": 0
}
TASK [openshift_logging_curator : copy] ****************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:47
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_curator : Set Curator configmap] ***********************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:53
ok: [openshift] => {
"changed": false,
"results": {
"cmd": "/bin/oc get configmap logging-curator -o json -n logging",
"results": [
{
"apiVersion": "v1",
"data": {
"config.yaml": "# Logging example curator config file\n\n# uncomment and use this to override the defaults from env vars\n#.defaults:\n# delete:\n# days: 30\n# runhour: 0\n# runminute: 0\n\n# to keep ops logs for a different duration:\n#.operations:\n# delete:\n# weeks: 8\n\n# example for a normal project\n#myapp:\n# delete:\n# weeks: 1\n"
},
"kind": "ConfigMap",
"metadata": {
"creationTimestamp": "2017-06-08T16:05:00Z",
"name": "logging-curator",
"namespace": "logging",
"resourceVersion": "1472",
"selfLink": "/api/v1/namespaces/logging/configmaps/logging-curator",
"uid": "39ced5bd-4c64-11e7-bcff-0eb8998b3e80"
}
}
],
"returncode": 0
},
"state": "present"
}
TASK [openshift_logging_curator : Set Curator secret] **************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:62
ok: [openshift] => {
"changed": false,
"results": {
"apiVersion": "v1",
"data": {
"ca": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUMyakNDQWNLZ0F3SUJBZ0lCQVRBTkJna3Foa2lHOXcwQkFRc0ZBREFlTVJ3d0dnWURWUVFERXhOc2IyZG4KYVc1bkxYTnBaMjVsY2kxMFpYTjBNQjRYRFRFM01EWXdPREUyTURNMU5Gb1hEVEl5TURZd056RTJNRE0xTlZvdwpIakVjTUJvR0ExVUVBeE1UYkc5bloybHVaeTF6YVdkdVpYSXRkR1Z6ZERDQ0FTSXdEUVlKS29aSWh2Y05BUUVCCkJRQURnZ0VQQURDQ0FRb0NnZ0VCQU8vdzcra0pnNU43TW5UbzBFMGd6YTNRLzB4alpUQ2Vpalk1ZmlUWUhVKzAKR2ZNaXFWbHUxUW5NZE13aDFjU245VkhkOGV0V0hMMVpaQVBCcEVIbUYzTmZEYmV0RTdweWhkRm9RVmE5QytNRQppMDhjd3FVeSttQnpuSUJjQy9vU3NIZUdCVDk1YVF6ckNnbXRCdzVkenJnOWZnMDljM3YwSi8vRUxoYXJjRWNCCjRBT3RCejVrVHBjUnFLUDN2MDJVb3RtZVcwZ3FleXg4ZW5CM0VmZWN3K29McENjeXVXNGYvb0RzVHZnQTBMbUsKQWFDaUZDczlzTFNQUS8rVG1NbS91bW9OazZMa0JLSUlGeFFxN3BhYlB5R1lOdnlrVDl5TDNZbk5lN21VNlpVZApQSnBqdS9BOXM0Z3o1TmUvdWpPa0J1OVZEUkZndXBsaE80cnY5TzN5YjE4Q0F3RUFBYU1qTUNFd0RnWURWUjBQCkFRSC9CQVFEQWdLa01BOEdBMVVkRXdFQi93UUZNQU1CQWY4d0RRWUpLb1pJaHZjTkFRRUxCUUFEZ2dFQkFHa1MKcVpkeXd5cGxxK3FTNVVJMUhRWjg3MUhDRGdaMmM5YXcydytqalRxemVzUHhmekV6VTlwV2dJNWJFdTJ4akxvKwpVQUM0VzZoZUhaQWFiQk9nUFRYazVFM2ZTVTZYRmdLV1VEeDR0NDF3TGdFdU1QWmJhZkVVdkl5QjlHTjN3SGV0CjRDOUp0Q213V3RZbzJuQ2czMmt3MmdNbzU5VlExajZDbHZOdUdtVVFiTGRCMzQ5Rm1VbXh4bjkvRExTaURZNGgKQnJ4YlRiZFM5NHk5bHNMdmM1MWRiMFJYRytoclF1b3ZMeDdXaWltOE1kc3JaUDEwM2IyT2hYZ3A5MUdkQjA5bQpmRTRVS3l6VG1kTXZPVHlzOG50RzhFOFFXNjRDOVp5cjEyOVRLRUF0cS91QTNlRmZoQzJqaytQTEtXR3ZBSW0xCnFob1UwVEVBRlptVGtWcDUyMGc9Ci0tLS0tRU5EIENFUlRJRklDQVRFLS0tLS0K",
"cert": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURSakNDQWk2Z0F3SUJBZ0lCQkRBTkJna3Foa2lHOXcwQkFRVUZBREFlTVJ3d0dnWURWUVFERXhOc2IyZG4KYVc1bkxYTnBaMjVsY2kxMFpYTjBNQjRYRFRFM01EWXdPREUyTURRd01Wb1hEVEU1TURZd09ERTJNRFF3TVZvdwpSekVRTUE0R0ExVUVDZ3dIVEc5bloybHVaekVTTUJBR0ExVUVDd3dKVDNCbGJsTm9hV1owTVI4d0hRWURWUVFECkRCWnplWE4wWlcwdWJHOW5aMmx1Wnk1amRYSmhkRzl5TUlJQklqQU5CZ2txaGtpRzl3MEJBUUVGQUFPQ0FROEEKTUlJQkNnS0NBUUVBdDJkNmpWZ25RYTJ3d05yUjZXemROd2EyVnh4MHZFN2d0ZDdoeFBadkYzbVc0TTh6RERtSQo1UVcvTTdYQWhBbTN0ai9sRVhGZFMwc0xtZ3RyM2FFWXFEVzBNVTMwOTlROEo0SG1KaUsyQ3JPVy9aUVN6OXc3CkoxVzdxTFFTYXhyYk9YQjYwOHJOVGdXcEVOVHloZzRSdFNrVFRyYzlUaHp5NmRVUG84SjVEbFNnSFp6MHVkZUoKMkhTQzIrR1RkRTJwVnkzQmlCVFFkZy9heWI0YXlXWXJsV0VXbENkYVgwc1M1SERlQmdpemVuWG5mcGpJaEw4VwppL2hxNUlqclB1OXFMN2dxRnYrQXN4RUJyUGJaYWl3ckNjV0NKR0hMYTB0dkRTUTNoK2wwZEt6dlNHMTNaQVFBCnorUVZxdUFIdWN3NnEzaUFIVEpDV3Y5MHZpREVnQ1JZMHdJREFRQUJvMll3WkRBT0JnTlZIUThCQWY4RUJBTUMKQmFBd0NRWURWUjBUQkFJd0FEQWRCZ05WSFNVRUZqQVVCZ2dyQmdFRkJRY0RBUVlJS3dZQkJRVUhBd0l3SFFZRApWUjBPQkJZRUZCOTl5c2pJWU1KajhCQWh5UXhFZWMvZUJWZWlNQWtHQTFVZEl3UUNNQUF3RFFZSktvWklodmNOCkFRRUZCUUFEZ2dFQkFDQWJJOVk3anNWSERWTklDcXpxUTFhTkZDL0h2WkQzQmtWUmZlR2wvQ1NmamQ5MGc3cXYKVGdBVmVSWU80dzNpRVo3ckRET1pYUy9qbUgveGc1SjRMUzdDUUpETW5OdWQ3c3l6QytsMll0RGJ0azk4bmJKKwo2SW5BYlFubWprekxBYTc5YWhCTUt2cXBYTmNRWi9sdTgyeHp5dEdzVkVsKzRJK1JXMy84RzR6bktzWHN6T3JzClYvdlk2MCt4M0syNUVrSExtc1gyeGVrMlJWN3ZXWTZvVDJWSDRyd1MwUVRyVWwxazJJaEpkVzkzK3c3NllBQzEKUjYyN1locXZuaWhlL3RTdlA0ZUJpV2VUUmwzTjIvY3lnems3NFZ2WThXT2VlVnNOVzFtbFJURm5oY04ya3UrOApjdDlZNUpKcVFIQkZHZ0szRXBmbmVzTmZpclltTmRWNlh4UT0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo=",
"key": "LS0tLS1CRUdJTiBQUklWQVRFIEtFWS0tLS0tCk1JSUV2UUlCQURBTkJna3Foa2lHOXcwQkFRRUZBQVNDQktjd2dnU2pBZ0VBQW9JQkFRQzNaM3FOV0NkQnJiREEKMnRIcGJOMDNCclpYSEhTOFR1QzEzdUhFOW04WGVaYmd6ek1NT1lqbEJiOHp0Y0NFQ2JlMlArVVJjVjFMU3d1YQpDMnZkb1Jpb05iUXhUZlQzMUR3bmdlWW1JcllLczViOWxCTFAzRHNuVmJ1b3RCSnJHdHM1Y0hyVHlzMU9CYWtRCjFQS0dEaEcxS1JOT3R6MU9IUExwMVErandua09WS0FkblBTNTE0bllkSUxiNFpOMFRhbFhMY0dJRk5CMkQ5ckoKdmhySlppdVZZUmFVSjFwZlN4TGtjTjRHQ0xONmRlZCttTWlFdnhhTCtHcmtpT3MrNzJvdnVDb1cvNEN6RVFHcwo5dGxxTENzSnhZSWtZY3RyUzI4TkpEZUg2WFIwck85SWJYZGtCQURQNUJXcTRBZTV6RHFyZUlBZE1rSmEvM1MrCklNU0FKRmpUQWdNQkFBRUNnZ0VBUUxOd3VJeHd6T3JKZ0QreDdmZEoweSs3QStBMkRuaUZMYVFEWWNqT1NDbk0KNEdJNFVBZThFNCtEZEdxYUQvcWRwTjNGb2ZHc1IvT2hsQ2FUVnd5TjlrbDcyQ2o0VXJXOHd2TkFqaE9ReFJXMgpIcDh1UThZZDJES1drVjExWWZPSGd5djdLTnhTc1VPdDhSSXV2THdaNHg4cWdJcUVSaHBRVjgyaVJ0TkVnWmtnCkRJVkNDejdoa1k2S3dWellteDNkb0VuZkF5VllYaElueURjaDZNS1VHMGkzdExIdmJrTlRLb2NRUGUzZTh2dG4KUkNnUE1TOU5JUGN1TTZxS0UzQnJaWktQWFUwWDBXRjNhQUpBTlJlMmFhNi9KWFd6OXVWTlFKTndVdDZVd0lELwpkVzlCVG44bEk2Q1h4N2sya0xqMy9hMUdSRmRwZ2xReWJxTXBzY2NQZ1FLQmdRRGtNZm96Vm1QTGxkS0JaelZqCnBtcCtwYWlRTngxWnZxMzgvL2s4TkpqVGdRU3hDZmhXY2RMOGovVEVML0twd2dRV1ZqS1VxOEt0SGRBd0pnVGgKYTFsNnVxRGVtWlR5TEVaU3FFTU1pZ1FxUE1xUW5SNzloZTZKK1NMOXdnckxyTUROYzJwK256OEduY0JMNDBuWgp2Rk43WEM1VzRTUWo0M0wxT09GbUVsenNaUUtCZ1FETndGbHREYkgyYW41Q2VibmxJVys4c2lWYSs0YXBLM3dGCmlQQ0VPc0Q2TlA1M281YWNHNjJURTRlc3BoaU5oU0ZGanZ2ZUxQcE45NVp2RUhIK0ovbmVOTTIzdk9FeTgwb2sKdWdLTDgra3BBRXpmTytuRHFINVZLREdVdzhHRkxtK1dWWGdBYVZrNDRYZWpFT3ZFcFQvMWNxbjJYdnRkRXg5OQppbExEdGcyUTF3S0JnUUM3eER2aUhrWERjU1pqZ0M4NExla1hDT3lvcmhFaWxYbkk0aitNd0tLMzlrT01BVzRzCmcwQmV2WDFrS1NmOWN6MjF1M3VuUmFYTW1PbURsT1VUTHlmVGl0bHdFT08wV2FZTHdaUmFMdjY2V29jN2MyM2EKb21mamMvTE01dXZIS0FFRmpPQ21EQmJxTk1mVUtkUE1xOS90YzY1Vm9yQUFMOW1aMmhIdDBWSlEyUUtCZ0NmRgpVb1BkN2RSdjZVLzRMNUsyejk1dG9RTGhNN3BuaVdXWWZzVlFoaFRYUVlqbXVpb2F2NFo2Yy9OUGQ5ZURNRmM5CnVEbk4vK1FxUTQzY1kreDQxYmU2QlRuSzdNem9jaUIvUkJiQll3Umw1bllRSXJPTFVrQXJzTW5NdlE5OVZseDgKME9GamU3ZWUvWDBydkNjYXVpNDdwOWJUclVya2MyWFU0d0twb2gycEFvR0FROUdvNjJ6MjlDaFZzOHFIU0xaKwo2SmRrekgyTjdXY3htaTNub1ZYeFNGMU9qZzA5RXR2cVo2VHVjSkxGaVFCVzhLS09pYW1ub3ZQN1J4MVBFMlRVClhwSy9halN4NVRxVXJVaEQ0MWJEZ1pBcDRlQ2w0b0VRYURNamVNZ2Z2aFR2SDBuK21OSmNxakVpUGQwN04wQ2IKTTZ2cEtRRjFBWmw0ck4wMDh0d3ptdkk9Ci0tLS0tRU5EIFBSSVZBVEUgS0VZLS0tLS0K"
},
"kind": "Secret",
"metadata": {
"creationTimestamp": null,
"name": "logging-curator"
},
"type": "Opaque"
},
"state": "present"
}
TASK [openshift_logging_curator : set_fact] ************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:75
ok: [openshift] => {
"ansible_facts": {
"curator_component": "curator-ops",
"curator_name": "logging-curator-ops"
},
"changed": false
}
TASK [openshift_logging_curator : Generate Curator deploymentconfig] ***********
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:81
ok: [openshift] => {
"changed": false,
"checksum": "cc1d5d57c9f49e2a5355004a3b5e53c499566f7b",
"dest": "/tmp/openshift-logging-ansible-YvSIoe/templates/curator-dc.yaml",
"gid": 0,
"group": "root",
"md5sum": "b0ed67398ba6d5d6832142f7b1f9680f",
"mode": "0644",
"owner": "root",
"secontext": "unconfined_u:object_r:admin_home_t:s0",
"size": 2365,
"src": "/root/.ansible/tmp/ansible-tmp-1496937907.24-48190385580853/source",
"state": "file",
"uid": 0
}
TASK [openshift_logging_curator : Set Curator DC] ******************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:99
changed: [openshift] => {
"changed": true,
"results": {
"cmd": "/bin/oc get dc logging-curator-ops -o json -n logging",
"results": [
{
"apiVersion": "v1",
"kind": "DeploymentConfig",
"metadata": {
"creationTimestamp": "2017-06-08T16:05:08Z",
"generation": 2,
"labels": {
"component": "curator-ops",
"logging-infra": "curator",
"provider": "openshift"
},
"name": "logging-curator-ops",
"namespace": "logging",
"resourceVersion": "1513",
"selfLink": "/oapi/v1/namespaces/logging/deploymentconfigs/logging-curator-ops",
"uid": "3e196576-4c64-11e7-bcff-0eb8998b3e80"
},
"spec": {
"replicas": 1,
"selector": {
"component": "curator-ops",
"logging-infra": "curator",
"provider": "openshift"
},
"strategy": {
"activeDeadlineSeconds": 21600,
"recreateParams": {
"timeoutSeconds": 600
},
"resources": {},
"rollingParams": {
"intervalSeconds": 1,
"maxSurge": "25%",
"maxUnavailable": "25%",
"timeoutSeconds": 600,
"updatePeriodSeconds": 1
},
"type": "Recreate"
},
"template": {
"metadata": {
"creationTimestamp": null,
"labels": {
"component": "curator-ops",
"logging-infra": "curator",
"provider": "openshift"
},
"name": "logging-curator-ops"
},
"spec": {
"containers": [
{
"env": [
{
"name": "K8S_HOST_URL",
"value": "https://kubernetes.default.svc.cluster.local"
},
{
"name": "ES_HOST",
"value": "logging-es-ops"
},
{
"name": "ES_PORT",
"value": "9200"
},
{
"name": "ES_CLIENT_CERT",
"value": "/etc/curator/keys/cert"
},
{
"name": "ES_CLIENT_KEY",
"value": "/etc/curator/keys/key"
},
{
"name": "ES_CA",
"value": "/etc/curator/keys/ca"
},
{
"name": "CURATOR_DEFAULT_DAYS",
"value": "30"
},
{
"name": "CURATOR_RUN_HOUR",
"value": "0"
},
{
"name": "CURATOR_RUN_MINUTE",
"value": "0"
},
{
"name": "CURATOR_RUN_TIMEZONE",
"value": "UTC"
},
{
"name": "CURATOR_SCRIPT_LOG_LEVEL",
"value": "INFO"
},
{
"name": "CURATOR_LOG_LEVEL",
"value": "ERROR"
}
],
"image": "172.30.177.244:5000/logging/logging-curator:latest",
"imagePullPolicy": "Always",
"name": "curator",
"resources": {
"limits": {
"cpu": "100m"
}
},
"terminationMessagePath": "/dev/termination-log",
"terminationMessagePolicy": "File",
"volumeMounts": [
{
"mountPath": "/etc/curator/keys",
"name": "certs",
"readOnly": true
},
{
"mountPath": "/etc/curator/settings",
"name": "config",
"readOnly": true
}
]
}
],
"dnsPolicy": "ClusterFirst",
"restartPolicy": "Always",
"schedulerName": "default-scheduler",
"securityContext": {},
"serviceAccount": "aggregated-logging-curator",
"serviceAccountName": "aggregated-logging-curator",
"terminationGracePeriodSeconds": 30,
"volumes": [
{
"name": "certs",
"secret": {
"defaultMode": 420,
"secretName": "logging-curator"
}
},
{
"configMap": {
"defaultMode": 420,
"name": "logging-curator"
},
"name": "config"
}
]
}
},
"test": false,
"triggers": [
{
"type": "ConfigChange"
}
]
},
"status": {
"availableReplicas": 0,
"conditions": [
{
"lastTransitionTime": "2017-06-08T16:05:08Z",
"lastUpdateTime": "2017-06-08T16:05:08Z",
"message": "Deployment config does not have minimum availability.",
"status": "False",
"type": "Available"
},
{
"lastTransitionTime": "2017-06-08T16:05:08Z",
"lastUpdateTime": "2017-06-08T16:05:08Z",
"message": "replication controller \"logging-curator-ops-1\" is waiting for pod \"logging-curator-ops-1-deploy\" to run",
"status": "Unknown",
"type": "Progressing"
}
],
"details": {
"causes": [
{
"type": "ConfigChange"
}
],
"message": "config change"
},
"latestVersion": 1,
"observedGeneration": 2,
"replicas": 0,
"unavailableReplicas": 0,
"updatedReplicas": 0
}
}
],
"returncode": 0
},
"state": "present"
}
TASK [openshift_logging_curator : Delete temp directory] ***********************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:109
ok: [openshift] => {
"changed": false,
"path": "/tmp/openshift-logging-ansible-YvSIoe",
"state": "absent"
}
TASK [openshift_logging : include_role] ****************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging/tasks/install_logging.yaml:226
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging : include_role] ****************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging/tasks/install_logging.yaml:241
statically included: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_fluentd/tasks/determine_version.yaml
TASK [openshift_logging_fluentd : fail] ****************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:2
[WARNING]: when statements should not include jinja2 templating delimiters
such as {{ }} or {% %}. Found: {{ openshift_logging_fluentd_nodeselector.keys()
| count }} > 1
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_fluentd : fail] ****************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:6
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_fluentd : fail] ****************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:10
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_fluentd : fail] ****************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:14
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_fluentd : fail] ****************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_fluentd/tasks/determine_version.yaml:3
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_fluentd : set_fact] ************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_fluentd/tasks/determine_version.yaml:7
ok: [openshift] => {
"ansible_facts": {
"fluentd_version": "3_5"
},
"changed": false
}
TASK [openshift_logging_fluentd : set_fact] ************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_fluentd/tasks/determine_version.yaml:12
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_fluentd : fail] ****************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_fluentd/tasks/determine_version.yaml:15
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_fluentd : set_fact] ************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:20
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_fluentd : set_fact] ************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:26
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_fluentd : Create temp directory for doing work in] *****
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:33
ok: [openshift] => {
"changed": false,
"cmd": [
"mktemp",
"-d",
"/tmp/openshift-logging-ansible-XXXXXX"
],
"delta": "0:00:00.002102",
"end": "2017-06-08 12:05:11.387477",
"rc": 0,
"start": "2017-06-08 12:05:11.385375"
}
STDOUT:
/tmp/openshift-logging-ansible-zpioXD
TASK [openshift_logging_fluentd : set_fact] ************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:38
ok: [openshift] => {
"ansible_facts": {
"tempdir": "/tmp/openshift-logging-ansible-zpioXD"
},
"changed": false
}
TASK [openshift_logging_fluentd : Create templates subdirectory] ***************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:41
ok: [openshift] => {
"changed": false,
"gid": 0,
"group": "root",
"mode": "0755",
"owner": "root",
"path": "/tmp/openshift-logging-ansible-zpioXD/templates",
"secontext": "unconfined_u:object_r:user_tmp_t:s0",
"size": 6,
"state": "directory",
"uid": 0
}
TASK [openshift_logging_fluentd : Create Fluentd service account] **************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:51
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_fluentd : Create Fluentd service account] **************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:59
changed: [openshift] => {
"changed": true,
"results": {
"cmd": "/bin/oc get sa aggregated-logging-fluentd -o json -n logging",
"results": [
{
"apiVersion": "v1",
"imagePullSecrets": [
{
"name": "aggregated-logging-fluentd-dockercfg-cdlfm"
}
],
"kind": "ServiceAccount",
"metadata": {
"creationTimestamp": "2017-06-08T16:05:12Z",
"name": "aggregated-logging-fluentd",
"namespace": "logging",
"resourceVersion": "1522",
"selfLink": "/api/v1/namespaces/logging/serviceaccounts/aggregated-logging-fluentd",
"uid": "408de88e-4c64-11e7-bcff-0eb8998b3e80"
},
"secrets": [
{
"name": "aggregated-logging-fluentd-token-dc6xv"
},
{
"name": "aggregated-logging-fluentd-dockercfg-cdlfm"
}
]
}
],
"returncode": 0
},
"state": "present"
}
TASK [openshift_logging_fluentd : Set privileged permissions for Fluentd] ******
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:68
changed: [openshift] => {
"changed": true,
"present": "present",
"results": {
"cmd": "/bin/oc adm policy add-scc-to-user privileged system:serviceaccount:logging:aggregated-logging-fluentd -n logging",
"results": "",
"returncode": 0
}
}
TASK [openshift_logging_fluentd : Set cluster-reader permissions for Fluentd] ***
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:77
changed: [openshift] => {
"changed": true,
"present": "present",
"results": {
"cmd": "/bin/oc adm policy add-cluster-role-to-user cluster-reader system:serviceaccount:logging:aggregated-logging-fluentd -n logging",
"results": "",
"returncode": 0
}
}
TASK [openshift_logging_fluentd : template] ************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:86
ok: [openshift] => {
"changed": false,
"checksum": "a8c8596f5fc2c5dd7c8d33d244af17a2555be086",
"dest": "/tmp/openshift-logging-ansible-zpioXD/fluent.conf",
"gid": 0,
"group": "root",
"md5sum": "579698b48ffce6276ee0e8d5ac71a338",
"mode": "0644",
"owner": "root",
"secontext": "unconfined_u:object_r:admin_home_t:s0",
"size": 1301,
"src": "/root/.ansible/tmp/ansible-tmp-1496937913.68-155271357239350/source",
"state": "file",
"uid": 0
}
TASK [openshift_logging_fluentd : copy] ****************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:94
ok: [openshift] => {
"changed": false,
"checksum": "b3e75eddc4a0765edc77da092384c0c6f95440e1",
"dest": "/tmp/openshift-logging-ansible-zpioXD/fluentd-throttle-config.yaml",
"gid": 0,
"group": "root",
"md5sum": "25871b8e0a9bedc166a6029872a6c336",
"mode": "0644",
"owner": "root",
"secontext": "unconfined_u:object_r:admin_home_t:s0",
"size": 133,
"src": "/root/.ansible/tmp/ansible-tmp-1496937914.04-6911054529736/source",
"state": "file",
"uid": 0
}
TASK [openshift_logging_fluentd : copy] ****************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:100
ok: [openshift] => {
"changed": false,
"checksum": "a3aa36da13f3108aa4ad5b98d4866007b44e9798",
"dest": "/tmp/openshift-logging-ansible-zpioXD/secure-forward.conf",
"gid": 0,
"group": "root",
"md5sum": "1084b00c427f4fa48dfc66d6ad6555d4",
"mode": "0644",
"owner": "root",
"secontext": "unconfined_u:object_r:admin_home_t:s0",
"size": 563,
"src": "/root/.ansible/tmp/ansible-tmp-1496937914.31-256915462039424/source",
"state": "file",
"uid": 0
}
TASK [openshift_logging_fluentd : copy] ****************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:107
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_fluentd : copy] ****************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:113
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_fluentd : copy] ****************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:119
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_fluentd : Set Fluentd configmap] ***********************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:125
changed: [openshift] => {
"changed": true,
"results": {
"cmd": "/bin/oc get configmap logging-fluentd -o json -n logging",
"results": [
{
"apiVersion": "v1",
"data": {
"fluent.conf": "# This file is the fluentd configuration entrypoint. Edit with care.\n\n@include configs.d/openshift/system.conf\n\n# In each section below, pre- and post- includes don't include anything initially;\n# they exist to enable future additions to openshift conf as needed.\n\n## sources\n## ordered so that syslog always runs last...\n@include configs.d/openshift/input-pre-*.conf\n@include configs.d/dynamic/input-docker-*.conf\n@include configs.d/dynamic/input-syslog-*.conf\n@include configs.d/openshift/input-post-*.conf\n##\n\n<label @INGRESS>\n## filters\n @include configs.d/openshift/filter-pre-*.conf\n @include configs.d/openshift/filter-retag-journal.conf\n @include configs.d/openshift/filter-k8s-meta.conf\n @include configs.d/openshift/filter-kibana-transform.conf\n @include configs.d/openshift/filter-k8s-flatten-hash.conf\n @include configs.d/openshift/filter-k8s-record-transform.conf\n @include configs.d/openshift/filter-syslog-record-transform.conf\n @include configs.d/openshift/filter-viaq-data-model.conf\n @include configs.d/openshift/filter-post-*.conf\n##\n\n## matches\n @include configs.d/openshift/output-pre-*.conf\n @include configs.d/openshift/output-operations.conf\n @include configs.d/openshift/output-applications.conf\n # no post - applications.conf matches everything left\n##\n</label>\n",
"secure-forward.conf": "# @type secure_forward\n\n# self_hostname ${HOSTNAME}\n# shared_key <SECRET_STRING>\n\n# secure yes\n# enable_strict_verification yes\n\n# ca_cert_path /etc/fluent/keys/your_ca_cert\n# ca_private_key_path /etc/fluent/keys/your_private_key\n # for private CA secret key\n# ca_private_key_passphrase passphrase\n\n# <server>\n # or IP\n# host server.fqdn.example.com\n# port 24284\n# </server>\n# <server>\n # ip address to connect\n# host 203.0.113.8\n # specify hostlabel for FQDN verification if ipaddress is used for host\n# hostlabel server.fqdn.example.com\n# </server>\n",
"throttle-config.yaml": "# Logging example fluentd throttling config file\n\n#example-project:\n# read_lines_limit: 10\n#\n#.operations:\n# read_lines_limit: 100\n"
},
"kind": "ConfigMap",
"metadata": {
"creationTimestamp": "2017-06-08T16:05:15Z",
"name": "logging-fluentd",
"namespace": "logging",
"resourceVersion": "1534",
"selfLink": "/api/v1/namespaces/logging/configmaps/logging-fluentd",
"uid": "424c0c96-4c64-11e7-bcff-0eb8998b3e80"
}
}
],
"returncode": 0
},
"state": "present"
}
TASK [openshift_logging_fluentd : Set logging-fluentd secret] ******************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:137
changed: [openshift] => {
"changed": true,
"results": {
"cmd": "/bin/oc secrets new logging-fluentd ca=/etc/origin/logging/ca.crt key=/etc/origin/logging/system.logging.fluentd.key cert=/etc/origin/logging/system.logging.fluentd.crt -n logging",
"results": "",
"returncode": 0
},
"state": "present"
}
TASK [openshift_logging_fluentd : Generate logging-fluentd daemonset definition] ***
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:154
ok: [openshift] => {
"changed": false,
"checksum": "cbd6d76ec1c9c3fc6d3ad139d3dc8e0a8794efe2",
"dest": "/tmp/openshift-logging-ansible-zpioXD/templates/logging-fluentd.yaml",
"gid": 0,
"group": "root",
"md5sum": "d27d3acbe8e6a4632ad4a38d47ace69b",
"mode": "0644",
"owner": "root",
"secontext": "unconfined_u:object_r:admin_home_t:s0",
"size": 3415,
"src": "/root/.ansible/tmp/ansible-tmp-1496937915.92-256422526990607/source",
"state": "file",
"uid": 0
}
TASK [openshift_logging_fluentd : Set logging-fluentd daemonset] ***************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:172
changed: [openshift] => {
"changed": true,
"results": {
"cmd": "/bin/oc get daemonset logging-fluentd -o json -n logging",
"results": [
{
"apiVersion": "extensions/v1beta1",
"kind": "DaemonSet",
"metadata": {
"creationTimestamp": "2017-06-08T16:05:16Z",
"generation": 1,
"labels": {
"component": "fluentd",
"logging-infra": "fluentd",
"provider": "openshift"
},
"name": "logging-fluentd",
"namespace": "logging",
"resourceVersion": "1542",
"selfLink": "/apis/extensions/v1beta1/namespaces/logging/daemonsets/logging-fluentd",
"uid": "43429c1b-4c64-11e7-bcff-0eb8998b3e80"
},
"spec": {
"selector": {
"matchLabels": {
"component": "fluentd",
"provider": "openshift"
}
},
"template": {
"metadata": {
"creationTimestamp": null,
"labels": {
"component": "fluentd",
"logging-infra": "fluentd",
"provider": "openshift"
},
"name": "fluentd-elasticsearch"
},
"spec": {
"containers": [
{
"env": [
{
"name": "K8S_HOST_URL",
"value": "https://kubernetes.default.svc.cluster.local"
},
{
"name": "ES_HOST",
"value": "logging-es"
},
{
"name": "ES_PORT",
"value": "9200"
},
{
"name": "ES_CLIENT_CERT",
"value": "/etc/fluent/keys/cert"
},
{
"name": "ES_CLIENT_KEY",
"value": "/etc/fluent/keys/key"
},
{
"name": "ES_CA",
"value": "/etc/fluent/keys/ca"
},
{
"name": "OPS_HOST",
"value": "logging-es-ops"
},
{
"name": "OPS_PORT",
"value": "9200"
},
{
"name": "OPS_CLIENT_CERT",
"value": "/etc/fluent/keys/cert"
},
{
"name": "OPS_CLIENT_KEY",
"value": "/etc/fluent/keys/key"
},
{
"name": "OPS_CA",
"value": "/etc/fluent/keys/ca"
},
{
"name": "ES_COPY",
"value": "false"
},
{
"name": "USE_JOURNAL",
"value": "true"
},
{
"name": "JOURNAL_SOURCE"
},
{
"name": "JOURNAL_READ_FROM_HEAD",
"value": "false"
}
],
"image": "172.30.177.244:5000/logging/logging-fluentd:latest",
"imagePullPolicy": "Always",
"name": "fluentd-elasticsearch",
"resources": {
"limits": {
"cpu": "100m",
"memory": "512Mi"
}
},
"securityContext": {
"privileged": true
},
"terminationMessagePath": "/dev/termination-log",
"terminationMessagePolicy": "File",
"volumeMounts": [
{
"mountPath": "/run/log/journal",
"name": "runlogjournal"
},
{
"mountPath": "/var/log",
"name": "varlog"
},
{
"mountPath": "/var/lib/docker/containers",
"name": "varlibdockercontainers",
"readOnly": true
},
{
"mountPath": "/etc/fluent/configs.d/user",
"name": "config",
"readOnly": true
},
{
"mountPath": "/etc/fluent/keys",
"name": "certs",
"readOnly": true
},
{
"mountPath": "/etc/docker-hostname",
"name": "dockerhostname",
"readOnly": true
},
{
"mountPath": "/etc/localtime",
"name": "localtime",
"readOnly": true
},
{
"mountPath": "/etc/sysconfig/docker",
"name": "dockercfg",
"readOnly": true
},
{
"mountPath": "/etc/docker",
"name": "dockerdaemoncfg",
"readOnly": true
}
]
}
],
"dnsPolicy": "ClusterFirst",
"nodeSelector": {
"logging-infra-fluentd": "true"
},
"restartPolicy": "Always",
"schedulerName": "default-scheduler",
"securityContext": {},
"serviceAccount": "aggregated-logging-fluentd",
"serviceAccountName": "aggregated-logging-fluentd",
"terminationGracePeriodSeconds": 30,
"volumes": [
{
"hostPath": {
"path": "/run/log/journal"
},
"name": "runlogjournal"
},
{
"hostPath": {
"path": "/var/log"
},
"name": "varlog"
},
{
"hostPath": {
"path": "/var/lib/docker/containers"
},
"name": "varlibdockercontainers"
},
{
"configMap": {
"defaultMode": 420,
"name": "logging-fluentd"
},
"name": "config"
},
{
"name": "certs",
"secret": {
"defaultMode": 420,
"secretName": "logging-fluentd"
}
},
{
"hostPath": {
"path": "/etc/hostname"
},
"name": "dockerhostname"
},
{
"hostPath": {
"path": "/etc/localtime"
},
"name": "localtime"
},
{
"hostPath": {
"path": "/etc/sysconfig/docker"
},
"name": "dockercfg"
},
{
"hostPath": {
"path": "/etc/docker"
},
"name": "dockerdaemoncfg"
}
]
}
},
"templateGeneration": 1,
"updateStrategy": {
"rollingUpdate": {
"maxUnavailable": 1
},
"type": "RollingUpdate"
}
},
"status": {
"currentNumberScheduled": 0,
"desiredNumberScheduled": 0,
"numberMisscheduled": 0,
"numberReady": 0,
"observedGeneration": 1
}
}
],
"returncode": 0
},
"state": "present"
}
TASK [openshift_logging_fluentd : Retrieve list of Fluentd hosts] **************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:183
ok: [openshift] => {
"changed": false,
"results": {
"cmd": "/bin/oc get node -o json -n default",
"results": [
{
"apiVersion": "v1",
"items": [
{
"apiVersion": "v1",
"kind": "Node",
"metadata": {
"annotations": {
"volumes.kubernetes.io/controller-managed-attach-detach": "true"
},
"creationTimestamp": "2017-06-08T15:47:29Z",
"labels": {
"beta.kubernetes.io/arch": "amd64",
"beta.kubernetes.io/os": "linux",
"kubernetes.io/hostname": "172.18.15.100"
},
"name": "172.18.15.100",
"namespace": "",
"resourceVersion": "1524",
"selfLink": "/api/v1/nodes/172.18.15.100",
"uid": "c7191a4f-4c61-11e7-bcff-0eb8998b3e80"
},
"spec": {
"externalID": "172.18.15.100",
"providerID": "aws:////i-07c3bd3167a9d0006"
},
"status": {
"addresses": [
{
"address": "172.18.15.100",
"type": "LegacyHostIP"
},
{
"address": "172.18.15.100",
"type": "InternalIP"
},
{
"address": "172.18.15.100",
"type": "Hostname"
}
],
"allocatable": {
"cpu": "4",
"memory": "7129288Ki",
"pods": "40"
},
"capacity": {
"cpu": "4",
"memory": "7231688Ki",
"pods": "40"
},
"conditions": [
{
"lastHeartbeatTime": "2017-06-08T16:05:13Z",
"lastTransitionTime": "2017-06-08T15:47:29Z",
"message": "kubelet has sufficient disk space available",
"reason": "KubeletHasSufficientDisk",
"status": "False",
"type": "OutOfDisk"
},
{
"lastHeartbeatTime": "2017-06-08T16:05:13Z",
"lastTransitionTime": "2017-06-08T15:47:29Z",
"message": "kubelet has sufficient memory available",
"reason": "KubeletHasSufficientMemory",
"status": "False",
"type": "MemoryPressure"
},
{
"lastHeartbeatTime": "2017-06-08T16:05:13Z",
"lastTransitionTime": "2017-06-08T15:47:29Z",
"message": "kubelet has no disk pressure",
"reason": "KubeletHasNoDiskPressure",
"status": "False",
"type": "DiskPressure"
},
{
"lastHeartbeatTime": "2017-06-08T16:05:13Z",
"lastTransitionTime": "2017-06-08T15:47:29Z",
"message": "kubelet is posting ready status",
"reason": "KubeletReady",
"status": "True",
"type": "Ready"
}
],
"daemonEndpoints": {
"kubeletEndpoint": {
"Port": 10250
}
},
"images": [
{
"names": [
"openshift/origin-federation:6acabdc",
"openshift/origin-federation:latest"
],
"sizeBytes": 1205885664
},
{
"names": [
"docker.io/openshift/origin-docker-registry@sha256:54f022c67562440fb5cc73421f32624747cd7836d45b9bb1f3e144eec437be12",
"docker.io/openshift/origin-docker-registry:latest"
],
"sizeBytes": 1100553091
},
{
"names": [
"openshift/origin-docker-registry:latest"
],
"sizeBytes": 1100164272
},
{
"names": [
"openshift/origin-gitserver:6acabdc",
"openshift/origin-gitserver:latest"
],
"sizeBytes": 1086520226
},
{
"names": [
"openshift/node:6acabdc",
"openshift/node:latest"
],
"sizeBytes": 1051721928
},
{
"names": [
"openshift/origin-keepalived-ipfailover:6acabdc",
"openshift/origin-keepalived-ipfailover:latest"
],
"sizeBytes": 1028529711
},
{
"names": [
"openshift/origin-haproxy-router:6acabdc",
"openshift/origin-haproxy-router:latest"
],
"sizeBytes": 1022758742
},
{
"names": [
"docker.io/openshift/origin-deployer@sha256:c05ebb0b18b04c9273781a4fc464ef266c9ff7923fc938af2b21f41ad320ed04",
"docker.io/openshift/origin-deployer:latest"
],
"sizeBytes": 1002342434
},
{
"names": [
"openshift/origin-recycler:6acabdc",
"openshift/origin-recycler:latest"
],
"sizeBytes": 1001728427
},
{
"names": [
"openshift/origin:6acabdc",
"openshift/origin:latest"
],
"sizeBytes": 1001728427
},
{
"names": [
"openshift/origin-f5-router:6acabdc",
"openshift/origin-f5-router:latest"
],
"sizeBytes": 1001728427
},
{
"names": [
"openshift/origin-docker-builder:latest"
],
"sizeBytes": 1001728427
},
{
"names": [
"openshift/origin-sti-builder:6acabdc",
"openshift/origin-sti-builder:latest"
],
"sizeBytes": 1001728427
},
{
"names": [
"rhel7.1:latest"
],
"sizeBytes": 765301508
},
{
"names": [
"openshift/dind-master:latest"
],
"sizeBytes": 731456758
},
{
"names": [
"openshift/dind-node:latest"
],
"sizeBytes": 731453034
},
{
"names": [
"172.30.177.244:5000/logging/logging-auth-proxy@sha256:4c57755cfd876d0bb7101dcfa7453c72a1e74ec1041486a8af7cecbda55f2ceb",
"172.30.177.244:5000/logging/logging-auth-proxy:latest"
],
"sizeBytes": 715535989
},
{
"names": [
"<none>@<none>",
"<none>:<none>"
],
"sizeBytes": 709532011
},
{
"names": [
"docker.io/node@sha256:46db0dd19955beb87b841c30a6b9812ba626473283e84117d1c016deee5949a9",
"docker.io/node:0.10.36"
],
"sizeBytes": 697128386
},
{
"names": [
"docker.io/openshift/origin-logging-kibana@sha256:70ead525ed596b73301e8df3ac229e33dd7f8431ec1233b37e96544c556530e9",
"docker.io/openshift/origin-logging-kibana:latest"
],
"sizeBytes": 682851528
},
{
"names": [
"172.30.177.244:5000/logging/logging-kibana@sha256:f02156feeaf400679442dba34bea131273f8f7815c2baa668528b5fdb6ab412b",
"172.30.177.244:5000/logging/logging-kibana:latest"
],
"sizeBytes": 682851513
},
{
"names": [
"openshift/dind:latest"
],
"sizeBytes": 640650210
},
{
"names": [
"172.30.177.244:5000/logging/logging-elasticsearch@sha256:ecf3fd5516359a8540b75266a46d3f446e0213f76c62dff14169aea8f3a0aa00",
"172.30.177.244:5000/logging/logging-elasticsearch:latest"
],
"sizeBytes": 623379762
},
{
"names": [
"172.30.177.244:5000/logging/logging-fluentd@sha256:e6212cea9d547520f83fb98e3eeb26b49a792c9571aa6287cbd5da7487eb2cc9",
"172.30.177.244:5000/logging/logging-fluentd:latest"
],
"sizeBytes": 472183172
},
{
"names": [
"172.30.177.244:5000/logging/logging-curator@sha256:238a72c8194d625df478aa6df6016023ee45686663d9cb6a4085008be4baaab9",
"172.30.177.244:5000/logging/logging-curator:latest"
],
"sizeBytes": 418288251
},
{
"names": [
"docker.io/openshift/base-centos7@sha256:aea292a3bddba020cde0ee83e6a45807931eb607c164ec6a3674f67039d8cd7c",
"docker.io/openshift/base-centos7:latest"
],
"sizeBytes": 383049978
},
{
"names": [
"rhel7.2:latest"
],
"sizeBytes": 377493597
},
{
"names": [
"openshift/origin-egress-router:6acabdc",
"openshift/origin-egress-router:latest"
],
"sizeBytes": 364745713
},
{
"names": [
"openshift/origin-base:latest"
],
"sizeBytes": 363070172
},
{
"names": [
"<none>@<none>",
"<none>:<none>"
],
"sizeBytes": 363024702
},
{
"names": [
"docker.io/fedora@sha256:69281ddd7b2600e5f2b17f1e12d7fba25207f459204fb2d15884f8432c479136",
"docker.io/fedora:25"
],
"sizeBytes": 230864375
},
{
"names": [
"docker.io/openshift/origin-logging-curator@sha256:e820338ca7fb0addfaec25d80d40a49f5ea25b24ff056ab6adbb42dd9eec94b4",
"docker.io/openshift/origin-logging-curator:latest"
],
"sizeBytes": 224977691
},
{
"names": [
"rhel7.3:latest",
"rhel7:latest"
],
"sizeBytes": 219121266
},
{
"names": [
"openshift/origin-pod:latest"
],
"sizeBytes": 213199843
},
{
"names": [
"registry.access.redhat.com/rhel7.2@sha256:98e6ca5d226c26e31a95cd67716afe22833c943e1926a21daf1a030906a02249",
"registry.access.redhat.com/rhel7.2:latest"
],
"sizeBytes": 201376319
},
{
"names": [
"registry.access.redhat.com/rhel7.3@sha256:1e232401d8e0ba53b36b757b4712fbcbd1dab9c21db039c45a84871a74e89e68",
"registry.access.redhat.com/rhel7.3:latest"
],
"sizeBytes": 192693772
},
{
"names": [
"docker.io/centos@sha256:bba1de7c9d900a898e3cadbae040dfe8a633c06bc104a0df76ae24483e03c077"
],
"sizeBytes": 192548999
},
{
"names": [
"openshift/origin-source:latest"
],
"sizeBytes": 192548894
},
{
"names": [
"docker.io/centos@sha256:aebf12af704307dfa0079b3babdca8d7e8ff6564696882bcb5d11f1d461f9ee9",
"docker.io/centos:7",
"docker.io/centos:centos7"
],
"sizeBytes": 192548537
},
{
"names": [
"registry.access.redhat.com/rhel7.1@sha256:1bc5a4c43bbb29a5a96a61896ff696933be3502e2f5fdc4cde02d9e101731fdd",
"registry.access.redhat.com/rhel7.1:latest"
],
"sizeBytes": 158229901
},
{
"names": [
"openshift/hello-openshift:6acabdc",
"openshift/hello-openshift:latest"
],
"sizeBytes": 5643318
}
],
"nodeInfo": {
"architecture": "amd64",
"bootID": "f356f0aa-5fd2-40b2-b778-184f4281b072",
"containerRuntimeVersion": "docker://1.12.6",
"kernelVersion": "3.10.0-327.22.2.el7.x86_64",
"kubeProxyVersion": "v1.6.1+5115d708d7",
"kubeletVersion": "v1.6.1+5115d708d7",
"machineID": "f9370ed252a14f73b014c1301a9b6d1b",
"operatingSystem": "linux",
"osImage": "Red Hat Enterprise Linux Server 7.3 (Maipo)",
"systemUUID": "EC2FC043-C186-0CEF-630D-6E601003422F"
}
}
}
],
"kind": "List",
"metadata": {},
"resourceVersion": "",
"selfLink": ""
}
],
"returncode": 0
},
"state": "list"
}
TASK [openshift_logging_fluentd : Set openshift_logging_fluentd_hosts] *********
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:190
ok: [openshift] => {
"ansible_facts": {
"openshift_logging_fluentd_hosts": [
"172.18.15.100"
]
},
"changed": false
}
TASK [openshift_logging_fluentd : include] *************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:195
included: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_fluentd/tasks/label_and_wait.yaml for openshift
TASK [openshift_logging_fluentd : Label 172.18.15.100 for Fluentd deployment] ***
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_fluentd/tasks/label_and_wait.yaml:2
changed: [openshift] => {
"changed": true,
"results": {
"cmd": "/bin/oc label node 172.18.15.100 logging-infra-fluentd=true --overwrite",
"results": "",
"returncode": 0
},
"state": "add"
}
TASK [openshift_logging_fluentd : command] *************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_fluentd/tasks/label_and_wait.yaml:10
changed: [openshift -> 127.0.0.1] => {
"changed": true,
"cmd": [
"sleep",
"0.5"
],
"delta": "0:00:00.502166",
"end": "2017-06-08 12:05:18.862995",
"rc": 0,
"start": "2017-06-08 12:05:18.360829"
}
TASK [openshift_logging_fluentd : Delete temp directory] ***********************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:202
ok: [openshift] => {
"changed": false,
"path": "/tmp/openshift-logging-ansible-zpioXD",
"state": "absent"
}
TASK [openshift_logging : include] *********************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging/tasks/install_logging.yaml:253
included: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging/tasks/update_master_config.yaml for openshift
TASK [openshift_logging : include] *********************************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging/tasks/main.yaml:36
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging : Cleaning up local temp dir] **************************
task path: /tmp/tmp.0IAaIPAewh/openhift-ansible/roles/openshift_logging/tasks/main.yaml:40
ok: [openshift -> 127.0.0.1] => {
"changed": false,
"path": "/tmp/openshift-logging-ansible-BJYZzU",
"state": "absent"
}
META: ran handlers
META: ran handlers
PLAY [Update Master configs] ***************************************************
skipping: no hosts matched
PLAY RECAP *********************************************************************
localhost : ok=2 changed=0 unreachable=0 failed=0
openshift : ok=207 changed=70 unreachable=0 failed=0
/data/src/github.com/openshift/origin-aggregated-logging
Running /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:170: executing 'oc get pods -l component=es' expecting any result and text 'Running'; re-trying every 0.2s until completion or 180.000s...
FAILURE after 179.696s: /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:170: executing 'oc get pods -l component=es' expecting any result and text 'Running'; re-trying every 0.2s until completion or 180.000s: the command timed out
Standard output from the command:
NAME READY STATUS RESTARTS AGE
logging-es-data-master-ofjzdurb-1-vbfhs 0/1 Pending 0 1s
... repeated 3 times
NAME READY STATUS RESTARTS AGE
logging-es-data-master-ofjzdurb-1-vbfhs 0/1 Pending 0 2s
... repeated 2 times
NAME READY STATUS RESTARTS AGE
logging-es-data-master-ofjzdurb-1-vbfhs 0/1 Pending 0 3s
NAME READY STATUS RESTARTS AGE
logging-es-data-master-ofjzdurb-1-vbfhs 0/1 ContainerCreating 0 3s
NAME READY STATUS RESTARTS AGE
logging-es-data-master-ofjzdurb-1-vbfhs 0/1 ContainerCreating 0 4s
... repeated 3 times
NAME READY STATUS RESTARTS AGE
logging-es-data-master-ofjzdurb-1-vbfhs 0/1 ContainerCreating 0 5s
... repeated 2 times
NAME READY STATUS RESTARTS AGE
logging-es-data-master-ofjzdurb-1-vbfhs 0/1 ContainerCreating 0 6s
... repeated 3 times
NAME READY STATUS RESTARTS AGE
logging-es-data-master-ofjzdurb-1-vbfhs 0/1 ContainerCreating 0 7s
... repeated 2 times
NAME READY STATUS RESTARTS AGE
logging-es-data-master-ofjzdurb-1-vbfhs 0/1 ContainerCreating 0 8s
... repeated 2 times
NAME READY STATUS RESTARTS AGE
logging-es-data-master-ofjzdurb-1-vbfhs 0/1 ContainerCreating 0 9s
... repeated 3 times
NAME READY STATUS RESTARTS AGE
logging-es-data-master-ofjzdurb-1-vbfhs 0/1 ContainerCreating 0 10s
... repeated 2 times
NAME READY STATUS RESTARTS AGE
logging-es-data-master-ofjzdurb-1-vbfhs 0/1 ContainerCreating 0 11s
... repeated 2 times
NAME READY STATUS RESTARTS AGE
logging-es-data-master-ofjzdurb-1-vbfhs 0/1 ContainerCreating 0 12s
... repeated 3 times
NAME READY STATUS RESTARTS AGE
logging-es-data-master-ofjzdurb-1-vbfhs 0/1 ContainerCreating 0 13s
... repeated 2 times
NAME READY STATUS RESTARTS AGE
logging-es-data-master-ofjzdurb-1-vbfhs 0/1 ContainerCreating 0 14s
... repeated 3 times
NAME READY STATUS RESTARTS AGE
logging-es-data-master-ofjzdurb-1-vbfhs 0/1 ContainerCreating 0 15s
... repeated 2 times
NAME READY STATUS RESTARTS AGE
logging-es-data-master-ofjzdurb-1-vbfhs 0/1 ContainerCreating 0 16s
... repeated 2 times
NAME READY STATUS RESTARTS AGE
logging-es-data-master-ofjzdurb-1-vbfhs 0/1 ContainerCreating 0 17s
... repeated 3 times
NAME READY STATUS RESTARTS AGE
logging-es-data-master-ofjzdurb-1-vbfhs 0/1 ContainerCreating 0 18s
... repeated 2 times
NAME READY STATUS RESTARTS AGE
logging-es-data-master-ofjzdurb-1-vbfhs 0/1 ContainerCreating 0 19s
... repeated 3 times
NAME READY STATUS RESTARTS AGE
logging-es-data-master-ofjzdurb-1-vbfhs 0/1 ContainerCreating 0 20s
... repeated 2 times
NAME READY STATUS RESTARTS AGE
logging-es-data-master-ofjzdurb-1-vbfhs 0/1 ContainerCreating 0 21s
... repeated 2 times
NAME READY STATUS RESTARTS AGE
logging-es-data-master-ofjzdurb-1-vbfhs 0/1 ContainerCreating 0 22s
... repeated 3 times
NAME READY STATUS RESTARTS AGE
logging-es-data-master-ofjzdurb-1-vbfhs 0/1 ContainerCreating 0 23s
... repeated 2 times
NAME READY STATUS RESTARTS AGE
logging-es-data-master-ofjzdurb-1-vbfhs 0/1 ContainerCreating 0 24s
... repeated 3 times
NAME READY STATUS RESTARTS AGE
logging-es-data-master-ofjzdurb-1-vbfhs 0/1 ContainerCreating 0 25s
... repeated 2 times
NAME READY STATUS RESTARTS AGE
logging-es-data-master-ofjzdurb-1-vbfhs 0/1 ContainerCreating 0 26s
... repeated 2 times
NAME READY STATUS RESTARTS AGE
logging-es-data-master-ofjzdurb-1-vbfhs 0/1 ContainerCreating 0 27s
Standard error from the command:
No resources found.
... repeated 372 times
[ERROR] PID 4258: hack/lib/cmd.sh:617: `return "${return_code}"` exited with status 1.
[INFO] Stack Trace:
[INFO] 1: hack/lib/cmd.sh:617: `return "${return_code}"`
[INFO] 2: /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:170: os::cmd::try_until_text
[INFO] Exiting with code 1.
/data/src/github.com/openshift/origin-aggregated-logging/hack/lib/log/system.sh: line 31: 4609 Terminated sar -A -o "${binary_logfile}" 1 86400 > /dev/null 2> "${stderr_logfile}"
[INFO] [CLEANUP] Beginning cleanup routines...
[INFO] [CLEANUP] Dumping cluster events to /tmp/origin-aggregated-logging/artifacts/events.txt
[INFO] [CLEANUP] Dumping etcd contents to /tmp/origin-aggregated-logging/artifacts/etcd
[WARNING] No compiled `etcdhelper` binary was found. Attempting to build one using:
[WARNING] $ hack/build-go.sh tools/etcdhelper
++ Building go targets for linux/amd64: tools/etcdhelper
/data/src/github.com/openshift/origin-aggregated-logging/../origin/hack/build-go.sh took 250 seconds
2017-06-08 12:12:38.240927 I | warning: ignoring ServerName for user-provided CA for backwards compatibility is deprecated
[INFO] [CLEANUP] Dumping container logs to /tmp/origin-aggregated-logging/logs/containers
[INFO] [CLEANUP] Truncating log files over 200M
[INFO] [CLEANUP] Stopping docker containers
[INFO] [CLEANUP] Removing docker containers
Error: No such image, container or task: 6a18a3bd1afd
json: cannot unmarshal array into Go value of type types.ContainerJSON
Error: No such image, container or task: c79521163af0
json: cannot unmarshal array into Go value of type types.ContainerJSON
Error: No such image, container or task: ead9ed415ef1
json: cannot unmarshal array into Go value of type types.ContainerJSON
Error response from daemon: You cannot remove a running container 86f0909959e88a0d78a4ffebfa608923077a4a4b480a9d7d832546297c78ef34. Stop the container before attempting removal or use -f
Error response from daemon: You cannot remove a running container 4fd23c956ee091cf9fb25dca7a9f48d60a875163c6ab09d77e6868626adc63cd. Stop the container before attempting removal or use -f
Error response from daemon: You cannot remove a running container a12441c374e9ce35237ef6b06763a539ef10ff5b26a9546b750a73f2f77a4954. Stop the container before attempting removal or use -f
Error response from daemon: You cannot remove a running container 02c84a117245337cf01f5c157a94f63ce5ec366d97fde24f23f0b6ee3348dd74. Stop the container before attempting removal or use -f
Error response from daemon: You cannot remove a running container 6cfad8a3e02b59711330e6b383f8335ae0e9cd88c0bed94937fa06acbcdb2b7d. Stop the container before attempting removal or use -f
Error response from daemon: You cannot remove a running container 55f0e08822e86132009b7ef3fc466d6f78ecdd27b95b8662c061f0ced7851c35. Stop the container before attempting removal or use -f
Error response from daemon: You cannot remove a running container af9308eef318fc01548bee79a53441749e4e573aa443ce4a4f0090fdf81d8222. Stop the container before attempting removal or use -f
Error response from daemon: You cannot remove a running container 26986ebb74c6bc5d3229d096a011e846203e5b449fb7afa34f3e5f7140764563. Stop the container before attempting removal or use -f
Error response from daemon: You cannot remove a running container ef7d9139cf00a10f9b4d35d582a3c60852c02dc67871ae26c198496a088b5122. Stop the container before attempting removal or use -f
Error response from daemon: You cannot remove a running container b2470a1e9cb02173c258c4a23510d06241b511376cab8243f7c24464481c9b42. Stop the container before attempting removal or use -f
Error response from daemon: You cannot remove a running container 5b67a2e5391c12315c545a19ec9bf607e416a9b0493720541661c12ab6ff5f52. Stop the container before attempting removal or use -f
Error response from daemon: You cannot remove a running container 189c9a31e676d2a0ea712a2bafc166a7a50bf73150a17d8935c7e3c2dc7b31c8. Stop the container before attempting removal or use -f
[INFO] [CLEANUP] Killing child processes
[INFO] [CLEANUP] Pruning etcd data directory
[ERROR] /data/src/github.com/openshift/origin-aggregated-logging/logging.sh exited with code 1 after 00h 34m 00s
Error while running ssh/sudo command:
set -e
pushd /data/src/github.com/openshift//origin-aggregated-logging/hack/testing >/dev/null
export PATH=$GOPATH/bin:$PATH
echo '***************************************************'
echo 'Running GIT_URL=https://github.com/openshift/origin-aggregated-logging GIT_BRANCH=master O_A_L_DIR=/data/src/github.com/openshift/origin-aggregated-logging OS_ROOT=/data/src/github.com/openshift/origin ENABLE_OPS_CLUSTER=true USE_LOCAL_SOURCE=true TEST_PERF=false VERBOSE=1 OS_ANSIBLE_REPO=https://github.com/openshift/openshift-ansible OS_ANSIBLE_BRANCH=master ./logging.sh...'
time GIT_URL=https://github.com/openshift/origin-aggregated-logging GIT_BRANCH=master O_A_L_DIR=/data/src/github.com/openshift/origin-aggregated-logging OS_ROOT=/data/src/github.com/openshift/origin ENABLE_OPS_CLUSTER=true USE_LOCAL_SOURCE=true TEST_PERF=false VERBOSE=1 OS_ANSIBLE_REPO=https://github.com/openshift/openshift-ansible OS_ANSIBLE_BRANCH=master ./logging.sh
echo 'Finished GIT_URL=https://github.com/openshift/origin-aggregated-logging GIT_BRANCH=master O_A_L_DIR=/data/src/github.com/openshift/origin-aggregated-logging OS_ROOT=/data/src/github.com/openshift/origin ENABLE_OPS_CLUSTER=true USE_LOCAL_SOURCE=true TEST_PERF=false VERBOSE=1 OS_ANSIBLE_REPO=https://github.com/openshift/openshift-ansible OS_ANSIBLE_BRANCH=master ./logging.sh'
echo '***************************************************'
popd >/dev/null
The SSH command responded with a non-zero exit status. Vagrant
assumes that this means the command failed. The output for this command
should be in the log above. Please read the output to determine what
went wrong.
==> openshiftdev: Downloading logs
==> openshiftdev: Downloading artifacts from '/var/log/yum.log' to '/var/lib/jenkins/jobs/test-origin-aggregated-logging/workspace@3/origin/artifacts/yum.log'
==> openshiftdev: Downloading artifacts from '/var/log/secure' to '/var/lib/jenkins/jobs/test-origin-aggregated-logging/workspace@3/origin/artifacts/secure'
==> openshiftdev: Downloading artifacts from '/var/log/audit/audit.log' to '/var/lib/jenkins/jobs/test-origin-aggregated-logging/workspace@3/origin/artifacts/audit.log'
==> openshiftdev: Downloading artifacts from '/tmp/origin-aggregated-logging/' to '/var/lib/jenkins/jobs/test-origin-aggregated-logging/workspace@3/origin/artifacts'
Build step 'Execute shell' marked build as failure
[description-setter] Could not determine description.
[PostBuildScript] - Execution post build scripts.
[workspace@3] $ /bin/sh -xe /tmp/hudson5871445125180850656.sh
+ INSTANCE_NAME=origin_logging-rhel7-1629
+ pushd origin
~/jobs/test-origin-aggregated-logging/workspace@3/origin ~/jobs/test-origin-aggregated-logging/workspace@3
+ rc=0
+ '[' -f .vagrant-openshift.json ']'
++ /usr/bin/vagrant ssh -c 'sudo ausearch -m avc'
+ ausearchresult='<no matches>'
+ rc=1
+ '[' '<no matches>' = '<no matches>' ']'
+ rc=0
+ /usr/bin/vagrant destroy -f
==> openshiftdev: Terminating the instance...
==> openshiftdev: Running cleanup tasks for 'shell' provisioner...
+ popd
~/jobs/test-origin-aggregated-logging/workspace@3
+ exit 0
[BFA] Scanning build for known causes...
[BFA] Found failure cause(s):
[BFA] Command Failure from category failure
[BFA] Done. 0s
Finished: FAILURE