Console Output
Skipping 273 KB..
Full Log "unavailableReplicas": 0,
"updatedReplicas": 0
}
}
],
"returncode": 0
},
"state": "present"
}
TASK [openshift_logging_kibana : Delete temp directory] ************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:228
ok: [openshift] => {
"changed": false,
"path": "/tmp/openshift-logging-ansible-HUyAQM",
"state": "absent"
}
TASK [openshift_logging : include_role] ****************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging/tasks/install_logging.yaml:166
statically included: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_kibana/tasks/determine_version.yaml
TASK [openshift_logging_kibana : fail] *****************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_kibana/tasks/determine_version.yaml:3
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_kibana : set_fact] *************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_kibana/tasks/determine_version.yaml:7
ok: [openshift] => {
"ansible_facts": {
"kibana_version": "3_5"
},
"changed": false
}
TASK [openshift_logging_kibana : set_fact] *************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_kibana/tasks/determine_version.yaml:12
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_kibana : fail] *****************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_kibana/tasks/determine_version.yaml:15
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_kibana : Create temp directory for doing work in] ******
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:7
ok: [openshift] => {
"changed": false,
"cmd": [
"mktemp",
"-d",
"/tmp/openshift-logging-ansible-XXXXXX"
],
"delta": "0:00:00.004224",
"end": "2017-06-08 12:00:59.209370",
"rc": 0,
"start": "2017-06-08 12:00:59.205146"
}
STDOUT:
/tmp/openshift-logging-ansible-7vUEC3
TASK [openshift_logging_kibana : set_fact] *************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:12
ok: [openshift] => {
"ansible_facts": {
"tempdir": "/tmp/openshift-logging-ansible-7vUEC3"
},
"changed": false
}
TASK [openshift_logging_kibana : Create templates subdirectory] ****************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:16
ok: [openshift] => {
"changed": false,
"gid": 0,
"group": "root",
"mode": "0755",
"owner": "root",
"path": "/tmp/openshift-logging-ansible-7vUEC3/templates",
"secontext": "unconfined_u:object_r:user_tmp_t:s0",
"size": 6,
"state": "directory",
"uid": 0
}
TASK [openshift_logging_kibana : Create Kibana service account] ****************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:26
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_kibana : Create Kibana service account] ****************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:34
ok: [openshift] => {
"changed": false,
"results": {
"cmd": "/bin/oc get sa aggregated-logging-kibana -o json -n logging",
"results": [
{
"apiVersion": "v1",
"imagePullSecrets": [
{
"name": "aggregated-logging-kibana-dockercfg-sx7gd"
}
],
"kind": "ServiceAccount",
"metadata": {
"creationTimestamp": "2017-06-08T16:00:48Z",
"name": "aggregated-logging-kibana",
"namespace": "logging",
"resourceVersion": "1525",
"selfLink": "/api/v1/namespaces/logging/serviceaccounts/aggregated-logging-kibana",
"uid": "a3b2f554-4c63-11e7-94aa-0e1649350dc2"
},
"secrets": [
{
"name": "aggregated-logging-kibana-token-q016z"
},
{
"name": "aggregated-logging-kibana-dockercfg-sx7gd"
}
]
}
],
"returncode": 0
},
"state": "present"
}
TASK [openshift_logging_kibana : set_fact] *************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:42
ok: [openshift] => {
"ansible_facts": {
"kibana_component": "kibana-ops",
"kibana_name": "logging-kibana-ops"
},
"changed": false
}
TASK [openshift_logging_kibana : Retrieving the cert to use when generating secrets for the logging components] ***
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:46
ok: [openshift] => (item={u'name': u'ca_file', u'file': u'ca.crt'}) => {
"changed": false,
"content": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUMyakNDQWNLZ0F3SUJBZ0lCQVRBTkJna3Foa2lHOXcwQkFRc0ZBREFlTVJ3d0dnWURWUVFERXhOc2IyZG4KYVc1bkxYTnBaMjVsY2kxMFpYTjBNQjRYRFRFM01EWXdPREUxTlRrMU1Gb1hEVEl5TURZd056RTFOVGsxTVZvdwpIakVjTUJvR0ExVUVBeE1UYkc5bloybHVaeTF6YVdkdVpYSXRkR1Z6ZERDQ0FTSXdEUVlKS29aSWh2Y05BUUVCCkJRQURnZ0VQQURDQ0FRb0NnZ0VCQU9relNqcnBVaVRaRytrUUh5UFNtOFFVcnBJTzFMaE1XRUdiZjF5bERXZ00KdDdJVkFKZ05FQXcwRy8wK1pVSFpVQUN1T1M2NEtaZkE1NnhHMTIrL24vYjRtR2Z1QXhjakxBZ2xhNmRadmVqdApEZTdLSnducVBVNXordysrQWRhc3BBS3kyNlJyZCtWVHllbk5hZiswMjdRdDR4bjNETnhLcjFuOXFJcjN3cnZWCnZYZGZVU215RmJjY0N2ZnBpRjZqZE1aL1BhR2VEYXhFelpSUDJrL2liVEIvSTlxTmhESE1xcUxsQXZERmkzZlMKU0JxMTdWbEh4WHJPeENyQm4vNzVyOE9DZVk5a25pTXB1N0NmVFY3Nm85eU81SnlnVWg3TldTQmY5a2w0cHVIMQpLV1htcmVZSWdyR3FxakVFQ2Qxa0cyRVZDMVgvZ1FWZEJHbjBqMkk4d1BjQ0F3RUFBYU1qTUNFd0RnWURWUjBQCkFRSC9CQVFEQWdLa01BOEdBMVVkRXdFQi93UUZNQU1CQWY4d0RRWUpLb1pJaHZjTkFRRUxCUUFEZ2dFQkFBaE8KbE5uS1RJNjRhWE8yOEVYa1pwcTZtMkdwY2xQTnpzM0dDVGE0RWYvMkJCa3pSd0ttNlgvN2NpZzh1S0lCNDE3Ugp1V1Jia2kvaHBGVGJYakhaRTJDOHEwSEpMVlVJKzdPQkcvS0cyOG5venpPNzMwZ0p4Q1o5NWZZNldsUjV4SURLCkJVeE9xYWhhRWlRTEx6MEtCUDBudUNxNlZCZWRIVEJPY1JGRDJlSXp6RlMvTFZqS2lqZ1MwZFZsb1VSV3JnZGYKdG1LSnhZYlZDOW9sK0MvdzFqYnp1cnFHZ1dRQ24wZGlOZ05HZ3I4TXVYRTZmQ0hMcnJSMVVGUTNXN0R6OUZ2SwpHTGNRcHJEQ3hUbUo0eTk5cy85aUEyUDhDbGQvWXMrVlB6WlB6UmVZcVRkdkVjQXpHb0drdVRiQXhQbmUyUEJICm5wMDM3WkY0UGFwZFdudGdxR3M9Ci0tLS0tRU5EIENFUlRJRklDQVRFLS0tLS0K",
"encoding": "base64",
"item": {
"file": "ca.crt",
"name": "ca_file"
},
"source": "/etc/origin/logging/ca.crt"
}
ok: [openshift] => (item={u'name': u'kibana_internal_key', u'file': u'kibana-internal.key'}) => {
"changed": false,
"content": "LS0tLS1CRUdJTiBSU0EgUFJJVkFURSBLRVktLS0tLQpNSUlFb3dJQkFBS0NBUUVBd0I5Y20zaG1ONENzMzVHL2pLZVBiS3hESk5ZUG1xSTQ2MmJWUXJQb3VvWGduSVpTCis3aHlyMGp6bTJMcDhTRjVCcEduaHYzWkcweXlwYkdIN0ZOVG8zNWRacnMvSEE5YnB5ZWxob1F5V0ZuTEw0U0UKSE1PN1pxNVVsdXBqQmducUk5YUVkK1krRjN5ZC9SQ3FuTFJhdDY1MDlFVzRLa3lFa1pTMitXeFVCYkF1R1pudwpqcTJaZm93emEvUFpvYXVWWDFGeCtJWnZHbE0yWDU1eHNsRnBiWHFMa3poSXlIOFZwMnl0d2ZlcXZkOGFRdFBJCmh1ZWhLUDArMXBqMDJIWVFJUDA3bnVqU3hPK0NMU2lrVVJkMHdWYlZuQUk0SHovcFU3MUNZUThsRHFzTmFpT2YKTzZnd0o5SC9NYVZ5ZVVSamtBTzJWMkFjYVBlY2NxK2lDN2FVV3dJREFRQUJBb0lCQUREaFNjeDhhM1UvbGJ3ago5dG93WDN0RXNLaVVsYysvNmo5cUlHUWlKZG9lNmJDcE5EKzdBK0s0NnRIajdxVmM0TS9kQ3dSN1hWdG12aVVOCjhBa2VnaThjbldMZnpRUzBtNXNCcVVsNkpOejVxNHBoYXNOdXdTVTB3V3pNSVhtTjJEWmFBOFlGbkZLWmNCRE0KeHJ1cjMxRFFZQTB1RjljYk5MZGRZTmhBeVhmUWxEQUo4Z2piWWN4ZmVudERJSU13SjFDSENYSWExdklvUXdabwpTWjdHOUN4VFFsNHBlUFVHbjF4ZjJPaEY2ZUlXM1E3WEYvV1FZME9iQmY1SjhrQ2pjcThDbldnMGxoamlVaFJBCmxlaFg3L1lHbmdjMzJPdjZKd1hzOEJyckM3c3k2bjlsMy9TOEU4a1pJWnRhNDlmSW1pSHZRTEQ0U1YvWEN4UjgKZTdrZi95RUNnWUVBejI2ajgwcDlzOEN1Tk1Ca0J2TzhvdnVQQ1dqa3ViWVgzU21KOWpVRXduR0xPUUFNUUUvdwp0L05BcmhSQU1pSDlKbVAvTmdCWWNFaURpUEExcXdhblNWczJaY3J4SUtQUmQ5dGdDa1cwVHZJVlZqMFBZS29ZCkZBV2QyWkY0OWhPNEV4T0xIYndXVXp4a2NEdUNpc2I0NXY2TEU3dTg0akpLWjVLOG9uUGpNSU1DZ1lFQTdSc1IKZUhMTytrbS9EMnl1VVdaNEd1R08rYmxMeHBKbFZqanlQb1ZPRkxXZ0xtbGFLdkpZTVovcThWWTRwcU5OV0tkWApab05jVEN6T3JuckROUVcwenlYOXE2K2tVck9rYllYNWRBdlpxNGx6ZktQNmkvdlpOWkdkdkVYNUkvdk1pYnVWCmFoY3lxalNHcWwvNVZ6NG45ZUdmNWpDRXN5b2NlUVVNb3BiUkZVa0NnWUEvQkhmZWc3VG9sUkxYaDlOYm9WU2YKbHhqL1hOU1A3dGdWSW5kOVN1SWxTR1ZwYmJCTElYNGFCRmFVRENic2xCTGFST3JWdHdrbkk0Q0NhNmVDUzhVcQpyZ0U2cjRyTnhiYnZXTUEybnJLR2dWa21GK3JDRFNxL2VtMVlHYS9MNG5XN3BlWlBwRUtNQ3Y3Z2NkUFk0VlhnCnAxZ05LSzNiY2pmVWUybS9XTUdlalFLQmdRRGhWalJJVUhROGtoR3VTdzl2OVA1NExaMS8zNFlRZGRreEZIWEUKelZQamdxbDA4bExyTmQ1emF4UVJ3R3Vla3R4VFFOWmphcnd3K1BTRUJjKzNlSERaM2JVemtYMk55NFNkUWhKTgpJMlgvREdaaE1rWk8rMDczQmlqdVlXSGh2TkFxcGNmZVI2V3k5TEIzQXpjb25yM0RoR1kra2lYTFVGNDI5WUdJCmsrU3BZUUtCZ0ZIRkJ4VDVxOW9XbUtBd0VMa1d6Z05XU0NqYlFrSjQ0RVROYjlqeEFsMGlyMHlwVGtXVmNDZDMKSFdxMVlzOEJ5bFdNaWEzcTEvSEhaUEQrNGRBSUs2UitSVEMvK1MvNitpUTBTcm1aU2R5S05rT3RZbjdETlBicAordWxTTk1TV2xuUmtsKy8xWTRpeVNrWVE4Y1plUFdtbVc3RStpYm9jU1RRQjZFS0hVQndrCi0tLS0tRU5EIFJTQSBQUklWQVRFIEtFWS0tLS0tCg==",
"encoding": "base64",
"item": {
"file": "kibana-internal.key",
"name": "kibana_internal_key"
},
"source": "/etc/origin/logging/kibana-internal.key"
}
ok: [openshift] => (item={u'name': u'kibana_internal_cert', u'file': u'kibana-internal.crt'}) => {
"changed": false,
"content": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURUakNDQWphZ0F3SUJBZ0lCQWpBTkJna3Foa2lHOXcwQkFRc0ZBREFlTVJ3d0dnWURWUVFERXhOc2IyZG4KYVc1bkxYTnBaMjVsY2kxMFpYTjBNQjRYRFRFM01EWXdPREUxTlRrMU1sb1hEVEU1TURZd09ERTFOVGsxTTFvdwpGakVVTUJJR0ExVUVBeE1MSUd0cFltRnVZUzF2Y0hNd2dnRWlNQTBHQ1NxR1NJYjNEUUVCQVFVQUE0SUJEd0F3CmdnRUtBb0lCQVFEQUgxeWJlR1kzZ0t6ZmtiK01wNDlzckVNazFnK2FvampyWnRWQ3MraTZoZUNjaGxMN3VIS3YKU1BPYll1bnhJWGtHa2FlRy9ka2JUTEtsc1lmc1UxT2pmbDFtdXo4Y0QxdW5KNldHaERKWVdjc3ZoSVFjdzd0bQpybFNXNm1NR0Nlb2oxb1IzNWo0WGZKMzlFS3FjdEZxM3JuVDBSYmdxVElTUmxMYjViRlFGc0M0Wm1mQ09yWmwrCmpETnI4OW1ocTVWZlVYSDRobThhVXpaZm5uR3lVV2x0ZW91VE9FaklmeFduYkszQjk2cTkzeHBDMDhpRzU2RW8KL1Q3V21QVFlkaEFnL1R1ZTZOTEU3NEl0S0tSUkYzVEJWdFdjQWpnZlArbFR2VUpoRHlVT3F3MXFJNTg3cURBbgowZjh4cFhKNVJHT1FBN1pYWUJ4bzk1eHlyNklMdHBSYkFnTUJBQUdqZ1o0d2dac3dEZ1lEVlIwUEFRSC9CQVFECkFnV2dNQk1HQTFVZEpRUU1NQW9HQ0NzR0FRVUZCd01CTUF3R0ExVWRFd0VCL3dRQ01BQXdaZ1lEVlIwUkJGOHcKWFlJTElHdHBZbUZ1WVMxdmNIT0NMQ0JyYVdKaGJtRXRiM0J6TG5KdmRYUmxjaTVrWldaaGRXeDBMbk4yWXk1agpiSFZ6ZEdWeUxteHZZMkZzZ2hnZ2EybGlZVzVoTGpFeU55NHdMakF1TVM1NGFYQXVhVytDQm10cFltRnVZVEFOCkJna3Foa2lHOXcwQkFRc0ZBQU9DQVFFQVZla3NNSVBmSGZhbElUTk5FL2NvYzM3a2lCc2xkMTh6Tzc5cXZOZTIKZnV1WHhjOHd0WmpWcGNkZlAwOVRSS1hSSUhyNC9xSDJUVWpTWTNYajc0STRQQmNobURZMnpwK3pIME1oZTR6ZAo3bjEza0c1aTd5aFFiQlRuMTlQaWNpS3lRdGNidnM2TElGVjF4MVplWHh1WW5MdGRMS0s0MnkzbXlHcmFMRmd4CjYxcVYvbFZGTVpwcGVRQTFSVWFrWGEvNTFNcnlCd2pQY3lnR2ovSjNoc29vcUh0WUVvVVR0R1NjaWRCQyt4aW8KbkplckNRVm1LbTRZaXZIcm9PaFpVYzJmeEVVOGR6RXJkbFZaeWF0TjNxNXFWM0hkTjZYUGQ3dDhhOEx5ckNqbwpibmRMeVJ5bnNHVW1PMFFSZTVrUUdjQkI1UVpMNngyK2Y1cnRSOGFJUVkwaTNRPT0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQotLS0tLUJFR0lOIENFUlRJRklDQVRFLS0tLS0KTUlJQzJqQ0NBY0tnQXdJQkFnSUJBVEFOQmdrcWhraUc5dzBCQVFzRkFEQWVNUnd3R2dZRFZRUURFeE5zYjJkbgphVzVuTFhOcFoyNWxjaTEwWlhOME1CNFhEVEUzTURZd09ERTFOVGsxTUZvWERUSXlNRFl3TnpFMU5UazFNVm93CkhqRWNNQm9HQTFVRUF4TVRiRzluWjJsdVp5MXphV2R1WlhJdGRHVnpkRENDQVNJd0RRWUpLb1pJaHZjTkFRRUIKQlFBRGdnRVBBRENDQVFvQ2dnRUJBT2t6U2pycFVpVFpHK2tRSHlQU204UVVycElPMUxoTVdFR2JmMXlsRFdnTQp0N0lWQUpnTkVBdzBHLzArWlVIWlVBQ3VPUzY0S1pmQTU2eEcxMisvbi9iNG1HZnVBeGNqTEFnbGE2ZFp2ZWp0CkRlN0tKd25xUFU1eit3KytBZGFzcEFLeTI2UnJkK1ZUeWVuTmFmKzAyN1F0NHhuM0ROeEtyMW45cUlyM3dydlYKdlhkZlVTbXlGYmNjQ3ZmcGlGNmpkTVovUGFHZURheEV6WlJQMmsvaWJUQi9JOXFOaERITXFxTGxBdkRGaTNmUwpTQnExN1ZsSHhYck94Q3JCbi83NXI4T0NlWTlrbmlNcHU3Q2ZUVjc2bzl5TzVKeWdVaDdOV1NCZjlrbDRwdUgxCktXWG1yZVlJZ3JHcXFqRUVDZDFrRzJFVkMxWC9nUVZkQkduMGoySTh3UGNDQXdFQUFhTWpNQ0V3RGdZRFZSMFAKQVFIL0JBUURBZ0trTUE4R0ExVWRFd0VCL3dRRk1BTUJBZjh3RFFZSktvWklodmNOQVFFTEJRQURnZ0VCQUFoTwpsTm5LVEk2NGFYTzI4RVhrWnBxNm0yR3BjbFBOenMzR0NUYTRFZi8yQkJrelJ3S202WC83Y2lnOHVLSUI0MTdSCnVXUmJraS9ocEZUYlhqSFpFMkM4cTBISkxWVUkrN09CRy9LRzI4bm96ek83MzBnSnhDWjk1Zlk2V2xSNXhJREsKQlV4T3FhaGFFaVFMTHowS0JQMG51Q3E2VkJlZEhUQk9jUkZEMmVJenpGUy9MVmpLaWpnUzBkVmxvVVJXcmdkZgp0bUtKeFliVkM5b2wrQy93MWpienVycUdnV1FDbjBkaU5nTkdncjhNdVhFNmZDSExyclIxVUZRM1c3RHo5RnZLCkdMY1FwckRDeFRtSjR5OTlzLzlpQTJQOENsZC9ZcytWUHpaUHpSZVlxVGR2RWNBekdvR2t1VGJBeFBuZTJQQkgKbnAwMzdaRjRQYXBkV250Z3FHcz0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo=",
"encoding": "base64",
"item": {
"file": "kibana-internal.crt",
"name": "kibana_internal_cert"
},
"source": "/etc/origin/logging/kibana-internal.crt"
}
ok: [openshift] => (item={u'name': u'server_tls', u'file': u'server-tls.json'}) => {
"changed": false,
"content": "Ly8gU2VlIGZvciBhdmFpbGFibGUgb3B0aW9uczogaHR0cHM6Ly9ub2RlanMub3JnL2FwaS90bHMuaHRtbCN0bHNfdGxzX2NyZWF0ZXNlcnZlcl9vcHRpb25zX3NlY3VyZWNvbm5lY3Rpb25saXN0ZW5lcgp0bHNfb3B0aW9ucyA9IHsKCWNpcGhlcnM6ICdrRUVDREg6K2tFRUNESCtTSEE6a0VESDora0VESCtTSEE6K2tFREgrQ0FNRUxMSUE6a0VDREg6K2tFQ0RIK1NIQTprUlNBOitrUlNBK1NIQTora1JTQStDQU1FTExJQTohYU5VTEw6IWVOVUxMOiFTU0x2MjohUkM0OiFERVM6IUVYUDohU0VFRDohSURFQTorM0RFUycsCglob25vckNpcGhlck9yZGVyOiB0cnVlCn0K",
"encoding": "base64",
"item": {
"file": "server-tls.json",
"name": "server_tls"
},
"source": "/etc/origin/logging/server-tls.json"
}
TASK [openshift_logging_kibana : Set logging-kibana-ops service] ***************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:57
changed: [openshift] => {
"changed": true,
"results": {
"clusterip": "172.30.27.219",
"cmd": "/bin/oc get service logging-kibana-ops -o json -n logging",
"results": [
{
"apiVersion": "v1",
"kind": "Service",
"metadata": {
"creationTimestamp": "2017-06-08T16:01:02Z",
"name": "logging-kibana-ops",
"namespace": "logging",
"resourceVersion": "1601",
"selfLink": "/api/v1/namespaces/logging/services/logging-kibana-ops",
"uid": "abe9555f-4c63-11e7-94aa-0e1649350dc2"
},
"spec": {
"clusterIP": "172.30.27.219",
"ports": [
{
"port": 443,
"protocol": "TCP",
"targetPort": "oaproxy"
}
],
"selector": {
"component": "kibana-ops",
"provider": "openshift"
},
"sessionAffinity": "None",
"type": "ClusterIP"
},
"status": {
"loadBalancer": {}
}
}
],
"returncode": 0
},
"state": "present"
}
TASK [openshift_logging_kibana : set_fact] *************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:74
[WARNING]: when statements should not include jinja2 templating delimiters
such as {{ }} or {% %}. Found: {{ openshift_logging_kibana_key | trim | length
> 0 }}
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_kibana : set_fact] *************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:79
[WARNING]: when statements should not include jinja2 templating delimiters
such as {{ }} or {% %}. Found: {{ openshift_logging_kibana_cert | trim | length
> 0 }}
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_kibana : set_fact] *************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:84
[WARNING]: when statements should not include jinja2 templating delimiters
such as {{ }} or {% %}. Found: {{ openshift_logging_kibana_ca | trim | length >
0 }}
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_kibana : set_fact] *************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:89
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_kibana : Generating Kibana route template] *************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:94
ok: [openshift] => {
"changed": false,
"checksum": "81d91cb23f49736a59b4e35ceff52ac59387f178",
"dest": "/tmp/openshift-logging-ansible-7vUEC3/templates/kibana-route.yaml",
"gid": 0,
"group": "root",
"md5sum": "3b5f21c8d154735dfc0edc061bec7979",
"mode": "0644",
"owner": "root",
"secontext": "unconfined_u:object_r:admin_home_t:s0",
"size": 2726,
"src": "/root/.ansible/tmp/ansible-tmp-1496937663.63-174574035422648/source",
"state": "file",
"uid": 0
}
TASK [openshift_logging_kibana : Setting Kibana route] *************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:114
changed: [openshift] => {
"changed": true,
"results": {
"cmd": "/bin/oc get route logging-kibana-ops -o json -n logging",
"results": [
{
"apiVersion": "v1",
"kind": "Route",
"metadata": {
"creationTimestamp": "2017-06-08T16:01:04Z",
"labels": {
"component": "support",
"logging-infra": "support",
"provider": "openshift"
},
"name": "logging-kibana-ops",
"namespace": "logging",
"resourceVersion": "1605",
"selfLink": "/oapi/v1/namespaces/logging/routes/logging-kibana-ops",
"uid": "ad2f58c1-4c63-11e7-94aa-0e1649350dc2"
},
"spec": {
"host": "kibana-ops.router.default.svc.cluster.local",
"tls": {
"caCertificate": "-----BEGIN CERTIFICATE-----\nMIIC2jCCAcKgAwIBAgIBATANBgkqhkiG9w0BAQsFADAeMRwwGgYDVQQDExNsb2dn\naW5nLXNpZ25lci10ZXN0MB4XDTE3MDYwODE1NTk1MFoXDTIyMDYwNzE1NTk1MVow\nHjEcMBoGA1UEAxMTbG9nZ2luZy1zaWduZXItdGVzdDCCASIwDQYJKoZIhvcNAQEB\nBQADggEPADCCAQoCggEBAOkzSjrpUiTZG+kQHyPSm8QUrpIO1LhMWEGbf1ylDWgM\nt7IVAJgNEAw0G/0+ZUHZUACuOS64KZfA56xG12+/n/b4mGfuAxcjLAgla6dZvejt\nDe7KJwnqPU5z+w++AdaspAKy26Rrd+VTyenNaf+027Qt4xn3DNxKr1n9qIr3wrvV\nvXdfUSmyFbccCvfpiF6jdMZ/PaGeDaxEzZRP2k/ibTB/I9qNhDHMqqLlAvDFi3fS\nSBq17VlHxXrOxCrBn/75r8OCeY9kniMpu7CfTV76o9yO5JygUh7NWSBf9kl4puH1\nKWXmreYIgrGqqjEECd1kG2EVC1X/gQVdBGn0j2I8wPcCAwEAAaMjMCEwDgYDVR0P\nAQH/BAQDAgKkMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAAhO\nlNnKTI64aXO28EXkZpq6m2GpclPNzs3GCTa4Ef/2BBkzRwKm6X/7cig8uKIB417R\nuWRbki/hpFTbXjHZE2C8q0HJLVUI+7OBG/KG28nozzO730gJxCZ95fY6WlR5xIDK\nBUxOqahaEiQLLz0KBP0nuCq6VBedHTBOcRFD2eIzzFS/LVjKijgS0dVloURWrgdf\ntmKJxYbVC9ol+C/w1jbzurqGgWQCn0diNgNGgr8MuXE6fCHLrrR1UFQ3W7Dz9FvK\nGLcQprDCxTmJ4y99s/9iA2P8Cld/Ys+VPzZPzReYqTdvEcAzGoGkuTbAxPne2PBH\nnp037ZF4PapdWntgqGs=\n-----END CERTIFICATE-----\n",
"destinationCACertificate": "-----BEGIN CERTIFICATE-----\nMIIC2jCCAcKgAwIBAgIBATANBgkqhkiG9w0BAQsFADAeMRwwGgYDVQQDExNsb2dn\naW5nLXNpZ25lci10ZXN0MB4XDTE3MDYwODE1NTk1MFoXDTIyMDYwNzE1NTk1MVow\nHjEcMBoGA1UEAxMTbG9nZ2luZy1zaWduZXItdGVzdDCCASIwDQYJKoZIhvcNAQEB\nBQADggEPADCCAQoCggEBAOkzSjrpUiTZG+kQHyPSm8QUrpIO1LhMWEGbf1ylDWgM\nt7IVAJgNEAw0G/0+ZUHZUACuOS64KZfA56xG12+/n/b4mGfuAxcjLAgla6dZvejt\nDe7KJwnqPU5z+w++AdaspAKy26Rrd+VTyenNaf+027Qt4xn3DNxKr1n9qIr3wrvV\nvXdfUSmyFbccCvfpiF6jdMZ/PaGeDaxEzZRP2k/ibTB/I9qNhDHMqqLlAvDFi3fS\nSBq17VlHxXrOxCrBn/75r8OCeY9kniMpu7CfTV76o9yO5JygUh7NWSBf9kl4puH1\nKWXmreYIgrGqqjEECd1kG2EVC1X/gQVdBGn0j2I8wPcCAwEAAaMjMCEwDgYDVR0P\nAQH/BAQDAgKkMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAAhO\nlNnKTI64aXO28EXkZpq6m2GpclPNzs3GCTa4Ef/2BBkzRwKm6X/7cig8uKIB417R\nuWRbki/hpFTbXjHZE2C8q0HJLVUI+7OBG/KG28nozzO730gJxCZ95fY6WlR5xIDK\nBUxOqahaEiQLLz0KBP0nuCq6VBedHTBOcRFD2eIzzFS/LVjKijgS0dVloURWrgdf\ntmKJxYbVC9ol+C/w1jbzurqGgWQCn0diNgNGgr8MuXE6fCHLrrR1UFQ3W7Dz9FvK\nGLcQprDCxTmJ4y99s/9iA2P8Cld/Ys+VPzZPzReYqTdvEcAzGoGkuTbAxPne2PBH\nnp037ZF4PapdWntgqGs=\n-----END CERTIFICATE-----\n",
"insecureEdgeTerminationPolicy": "Redirect",
"termination": "reencrypt"
},
"to": {
"kind": "Service",
"name": "logging-kibana-ops",
"weight": 100
},
"wildcardPolicy": "None"
},
"status": {
"ingress": [
{
"conditions": [
{
"lastTransitionTime": "2017-06-08T16:01:04Z",
"status": "True",
"type": "Admitted"
}
],
"host": "kibana-ops.router.default.svc.cluster.local",
"routerName": "router",
"wildcardPolicy": "None"
}
]
}
}
],
"returncode": 0
},
"state": "present"
}
TASK [openshift_logging_kibana : Generate proxy session] ***********************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:125
ok: [openshift] => {
"ansible_facts": {
"session_secret": "v3vq4cjwi4UAStY0PtEfneh2uHZ30gleCbNmByaCGDLXmIf0MwgymMybgDiPDJkLlrbar5XijzIA3jSyVL7saGaN6sz10QMSnaHsWRHiZXohkeSKMBgxXkD7GucqxmFLnfikQonsIDtun0G61TehLdHaALhGEoT1kblFJDekl9v3tHJ5G1rXG4cmKPfRQI0loBnEu7QF"
},
"changed": false
}
TASK [openshift_logging_kibana : Generate oauth client secret] *****************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:132
ok: [openshift] => {
"ansible_facts": {
"oauth_secret": "WrPK47rl8TwdDyaEglgNKBeVgCo9b6qs1pCGezBpUDdEkPOfancf2FyNDFrzZpmz"
},
"changed": false
}
TASK [openshift_logging_kibana : Create oauth-client template] *****************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:138
changed: [openshift] => {
"changed": true,
"checksum": "9e92537f04adfdeda260a24bd81d14af4bee0703",
"dest": "/tmp/openshift-logging-ansible-7vUEC3/templates/oauth-client.yml",
"gid": 0,
"group": "root",
"md5sum": "7fb80f5dc90ee3c9cfab6cacf5a0f7fa",
"mode": "0644",
"owner": "root",
"secontext": "unconfined_u:object_r:admin_home_t:s0",
"size": 332,
"src": "/root/.ansible/tmp/ansible-tmp-1496937665.32-29059572341737/source",
"state": "file",
"uid": 0
}
TASK [openshift_logging_kibana : Set kibana-proxy oauth-client] ****************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:146
changed: [openshift] => {
"changed": true,
"results": {
"cmd": "/bin/oc get oauthclient kibana-proxy -o json -n logging",
"results": [
{
"apiVersion": "v1",
"kind": "OAuthClient",
"metadata": {
"creationTimestamp": "2017-06-08T16:00:54Z",
"labels": {
"logging-infra": "support"
},
"name": "kibana-proxy",
"resourceVersion": "1609",
"selfLink": "/oapi/v1/oauthclients/kibana-proxy",
"uid": "a6ba2ddf-4c63-11e7-94aa-0e1649350dc2"
},
"redirectURIs": [
"https://kibana-ops.router.default.svc.cluster.local"
],
"scopeRestrictions": [
{
"literals": [
"user:info",
"user:check-access",
"user:list-projects"
]
}
],
"secret": "WrPK47rl8TwdDyaEglgNKBeVgCo9b6qs1pCGezBpUDdEkPOfancf2FyNDFrzZpmz"
}
],
"returncode": 0
},
"state": "present"
}
TASK [openshift_logging_kibana : Set Kibana secret] ****************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:157
ok: [openshift] => {
"changed": false,
"results": {
"apiVersion": "v1",
"data": {
"ca": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUMyakNDQWNLZ0F3SUJBZ0lCQVRBTkJna3Foa2lHOXcwQkFRc0ZBREFlTVJ3d0dnWURWUVFERXhOc2IyZG4KYVc1bkxYTnBaMjVsY2kxMFpYTjBNQjRYRFRFM01EWXdPREUxTlRrMU1Gb1hEVEl5TURZd056RTFOVGsxTVZvdwpIakVjTUJvR0ExVUVBeE1UYkc5bloybHVaeTF6YVdkdVpYSXRkR1Z6ZERDQ0FTSXdEUVlKS29aSWh2Y05BUUVCCkJRQURnZ0VQQURDQ0FRb0NnZ0VCQU9relNqcnBVaVRaRytrUUh5UFNtOFFVcnBJTzFMaE1XRUdiZjF5bERXZ00KdDdJVkFKZ05FQXcwRy8wK1pVSFpVQUN1T1M2NEtaZkE1NnhHMTIrL24vYjRtR2Z1QXhjakxBZ2xhNmRadmVqdApEZTdLSnducVBVNXordysrQWRhc3BBS3kyNlJyZCtWVHllbk5hZiswMjdRdDR4bjNETnhLcjFuOXFJcjN3cnZWCnZYZGZVU215RmJjY0N2ZnBpRjZqZE1aL1BhR2VEYXhFelpSUDJrL2liVEIvSTlxTmhESE1xcUxsQXZERmkzZlMKU0JxMTdWbEh4WHJPeENyQm4vNzVyOE9DZVk5a25pTXB1N0NmVFY3Nm85eU81SnlnVWg3TldTQmY5a2w0cHVIMQpLV1htcmVZSWdyR3FxakVFQ2Qxa0cyRVZDMVgvZ1FWZEJHbjBqMkk4d1BjQ0F3RUFBYU1qTUNFd0RnWURWUjBQCkFRSC9CQVFEQWdLa01BOEdBMVVkRXdFQi93UUZNQU1CQWY4d0RRWUpLb1pJaHZjTkFRRUxCUUFEZ2dFQkFBaE8KbE5uS1RJNjRhWE8yOEVYa1pwcTZtMkdwY2xQTnpzM0dDVGE0RWYvMkJCa3pSd0ttNlgvN2NpZzh1S0lCNDE3Ugp1V1Jia2kvaHBGVGJYakhaRTJDOHEwSEpMVlVJKzdPQkcvS0cyOG5venpPNzMwZ0p4Q1o5NWZZNldsUjV4SURLCkJVeE9xYWhhRWlRTEx6MEtCUDBudUNxNlZCZWRIVEJPY1JGRDJlSXp6RlMvTFZqS2lqZ1MwZFZsb1VSV3JnZGYKdG1LSnhZYlZDOW9sK0MvdzFqYnp1cnFHZ1dRQ24wZGlOZ05HZ3I4TXVYRTZmQ0hMcnJSMVVGUTNXN0R6OUZ2SwpHTGNRcHJEQ3hUbUo0eTk5cy85aUEyUDhDbGQvWXMrVlB6WlB6UmVZcVRkdkVjQXpHb0drdVRiQXhQbmUyUEJICm5wMDM3WkY0UGFwZFdudGdxR3M9Ci0tLS0tRU5EIENFUlRJRklDQVRFLS0tLS0K",
"cert": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURSVENDQWkyZ0F3SUJBZ0lCQXpBTkJna3Foa2lHOXcwQkFRVUZBREFlTVJ3d0dnWURWUVFERXhOc2IyZG4KYVc1bkxYTnBaMjVsY2kxMFpYTjBNQjRYRFRFM01EWXdPREUxTlRrMU5sb1hEVEU1TURZd09ERTFOVGsxTmxvdwpSakVRTUE0R0ExVUVDZ3dIVEc5bloybHVaekVTTUJBR0ExVUVDd3dKVDNCbGJsTm9hV1owTVI0d0hBWURWUVFECkRCVnplWE4wWlcwdWJHOW5aMmx1Wnk1cmFXSmhibUV3Z2dFaU1BMEdDU3FHU0liM0RRRUJBUVVBQTRJQkR3QXcKZ2dFS0FvSUJBUUNsWDBpZTJ0WWZRNnNPL1d6ZE5ONXFPd2F0aHlSK2ZuOThORlVvaFhLcVBQMkdJeit4WW1YcwpvcWhXbHd3WHQ5NTlVQzZtZnJ2b2g3Y0lzZUlnRjhzRjdLMk5QaVJ1VWQzNTdQS3lEYnZyelB0aVhWakhBeklyCnN2MWd1R24vb1RIbkUvZVhDam5kbjIzbUE5OTRMMEFZWUVjV0E4Rm95dUZkZXJYOERNWnFOaDE0TnAvVXlvYmUKUmpqR3I1eWpaeXN2dmhvQkNVL2RldUJ0Y1R2MnpyMUVKOVlEQlhYM3YrZDdsWks1a3BQRnVxUWNNN3ZXQ24zcAp1MFVnUExoS2J1MnI1d0FwR1I0aG1FS1o2bUVPUXVkQ2E2NkIzSmptNTUzOXBTOHZSUXdaZGFzVTVLUTl6T2ozCjNHRDBUYkYwQVRua01XTVl0dkJabENnYXB0aTVEN2VaQWdNQkFBR2paakJrTUE0R0ExVWREd0VCL3dRRUF3SUYKb0RBSkJnTlZIUk1FQWpBQU1CMEdBMVVkSlFRV01CUUdDQ3NHQVFVRkJ3TUJCZ2dyQmdFRkJRY0RBakFkQmdOVgpIUTRFRmdRVVlVWmQvSTgzUnpDTWF6bzlGTGJLZ0puZGJVVXdDUVlEVlIwakJBSXdBREFOQmdrcWhraUc5dzBCCkFRVUZBQU9DQVFFQWJFRkhFY056dHVzaGJ4TzY4QUVnbkxBREdyclgxTU9jbTVqanpBRkliSGttdnNnSTduZ0sKYlpkZGFWYWVaTTFTUWJGSFVLblhKck12U2RFY1hmckZIZC84SjNuQ29jOEJKaVkyUjV3aTVPQXVkVy9IQnhpaQo1WmZFOW9wTDNla0xVcnhZQmJwNVdwb2ZUNkhvV2lDQWtDWlZaRTV2QzV2R0liM1lRZmNpdm9HSUpEUEdXdExiClE1VjdlNFVjUDlpNC9UVEhkOTAvU2lKdWl0MkVNQWo1Sk1NcHlVYVpMNjUzd2FuWDloRUtQZTZqQ1BpYm0vemgKWTZxaWVRYjU1bXE4TGM4d0lPNkIrTTlLOFljVkluQWh0TmpHeFQzMUVwSGNuQ005THNwYzRlTy90RzljKzdXTAptRVpsbWhBTkh6Y2pQZlNocXdMK255OFluSVdFRDlsZlBBPT0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo=",
"key": "LS0tLS1CRUdJTiBQUklWQVRFIEtFWS0tLS0tCk1JSUV2UUlCQURBTkJna3Foa2lHOXcwQkFRRUZBQVNDQktjd2dnU2pBZ0VBQW9JQkFRQ2xYMGllMnRZZlE2c08KL1d6ZE5ONXFPd2F0aHlSK2ZuOThORlVvaFhLcVBQMkdJeit4WW1Yc29xaFdsd3dYdDk1OVVDNm1mcnZvaDdjSQpzZUlnRjhzRjdLMk5QaVJ1VWQzNTdQS3lEYnZyelB0aVhWakhBeklyc3YxZ3VHbi9vVEhuRS9lWENqbmRuMjNtCkE5OTRMMEFZWUVjV0E4Rm95dUZkZXJYOERNWnFOaDE0TnAvVXlvYmVSampHcjV5alp5c3Z2aG9CQ1UvZGV1QnQKY1R2MnpyMUVKOVlEQlhYM3YrZDdsWks1a3BQRnVxUWNNN3ZXQ24zcHUwVWdQTGhLYnUycjV3QXBHUjRobUVLWgo2bUVPUXVkQ2E2NkIzSmptNTUzOXBTOHZSUXdaZGFzVTVLUTl6T2ozM0dEMFRiRjBBVG5rTVdNWXR2QlpsQ2dhCnB0aTVEN2VaQWdNQkFBRUNnZ0VBWDlsZlBQdDN1YjA5eXoxbHVMeW80UWQvTWxxdTgwWWNXLy8xRDd4eEhteWwKUVBiek5ydmllWW81YVo2NG1wS3V1UkVkU05FUjFvKzZyYVowZXNkdm0rcDNHUE5ZcUR4NUhSK1I5MU53aFJaVQo2Y2xTNkY1WTJPMHdZTHZpYkJDZzNOODgxT1ZQYnIyMk0rZ3d3UGNaL2tiblRNSTJlcWVFZ2VyYXlkRVp1M3JkClZOVTU5L0hFZVQwVTY3UWpSallETWdmNVVVRHRSdlo4R0xGWnRUT2d2bU5nYWlSNnJsQlZQWlFoODZLQlUzWXcKd0x3aC9FR2RPTkQxdkh4MUtsOWVZb05BejFmQ1hOdzgyMkpPSU9IOUlKSmlzVDY5QXFET3FyR0tmTnJ5VFRPQQpxVzVsN0xZd1NRL0E3cnlNTjBrVW1lUjRoNVlhK2ZkdG9BM1FWVmQzZ1FLQmdRRFd0bkRwUlZFTWhOUTJXTHdkCnhYSHRhSUJudVRhcUdYZU56eDM3d25QM1QrbVVmdm92aXJVSWVSaXZzSkNhYm84dzhDWXYxc3VxRS9BT3RjSlEKZitIMzlXMUFVS3VETVJOaDl4TjJCZ3RLc0tnNjFJWG1INlI3cktHU28xTWxRdWF6VFMrUFRjcXhwcDI3clpETwpPajg2UXlKU1FnMkIzMVVZRXJsSDJwTGhhUUtCZ1FERksvdXRleThFWDlaZmdwMDBBSlZuSWE4Wmc4VGh3dSt1CjlJYjBISjllUjdzNG1rajlheGFCcXc5ODJINWlyZHo2L0s2RmtsbnF4RXBERG1VZjJ0dExjS2dpblpUck1qSzkKQTMxWXZrSFVJRVlISjBSUHIxdkQwMFJldzFJTDRKOHlrYms1WkFrQXRsU0swOFB6YjJ3bXl1V3VzTlhocXlwbwpwdlVDcTlrdXNRS0JnR3FoY2xPMjgvaFdveGxXV2g0aTUyQnk3SW9XaGxwVmlYVW9yZ2hRMnN5d3FCenlMb2ViCnlDb3NFYUYyKzJsbWpNQk9FM2pnb0lhWG5qbC85TCtMc3dvMG5ZdzZRK05FWlE3YTZKUk5qaUFLdVpGMTZBV1EKSTF5ME1BMm1CTzNWV3NNakN3S05MS09yVGx4ZFp6T3o0NkNvcEl2YmQ5L09yUERtbzVOV3JtazVBb0dCQUtUQQphSzcvdER5NmU5MFl2WlNiUER1TnFNcndFTTMzM2VEWnovNGdBSVo5OTVHSFVaLzNJRG8vSGxUYWJWaTFJR1hVClIxdXkrMUV3clVDMHdZakpqZDNPaDU5TS93Yzd6YXVrUTlPb1BrY3FwSGtMdFlmRDVqQ04wcDVBSk1scDZud04KeWJDTHh2NENYRWdZUks2ZmxzWWZXYVlMZXR2eTh4KzVDaGN1VXU0eEFvR0FYNnNMdUt1c1VZdlVvb0xGdEZ1cwphTUhnMVpIRnc2WE0wVWZ4ZVFMM2hNazVxTUgyOCtUTTFOcWdaOURaSithSUhiUjlBd25mYnpOK1gzMi9XN0FyCmM3S2tncTNVMzRyQXgzdG55NHd0OUgweC9tcEhXVk1wYy9CbXJyWEFJZFlWeDM0QVFjdkQ2aHUwNHFSODlzT3YKUHdEUUMrT3dSSkpvNHB5eHhsVDVaM289Ci0tLS0tRU5EIFBSSVZBVEUgS0VZLS0tLS0K"
},
"kind": "Secret",
"metadata": {
"creationTimestamp": null,
"name": "logging-kibana"
},
"type": "Opaque"
},
"state": "present"
}
TASK [openshift_logging_kibana : Set Kibana Proxy secret] **********************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:171
changed: [openshift] => {
"changed": true,
"results": {
"cmd": "/bin/oc replace -f /tmp/logging-kibana-proxy -n logging",
"results": "",
"returncode": 0
},
"state": "present"
}
TASK [openshift_logging_kibana : Generate Kibana DC template] ******************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:197
changed: [openshift] => {
"changed": true,
"checksum": "f2d980813716ecbcb8511ef64f2f90ff65ff483a",
"dest": "/tmp/openshift-logging-ansible-7vUEC3/templates/kibana-dc.yaml",
"gid": 0,
"group": "root",
"md5sum": "0f2814ec7663be90532db4729d9eaf69",
"mode": "0644",
"owner": "root",
"secontext": "unconfined_u:object_r:admin_home_t:s0",
"size": 3761,
"src": "/root/.ansible/tmp/ansible-tmp-1496937668.7-272576019342324/source",
"state": "file",
"uid": 0
}
TASK [openshift_logging_kibana : Set Kibana DC] ********************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:216
changed: [openshift] => {
"changed": true,
"results": {
"cmd": "/bin/oc get dc logging-kibana-ops -o json -n logging",
"results": [
{
"apiVersion": "v1",
"kind": "DeploymentConfig",
"metadata": {
"creationTimestamp": "2017-06-08T16:01:09Z",
"generation": 2,
"labels": {
"component": "kibana-ops",
"logging-infra": "kibana",
"provider": "openshift"
},
"name": "logging-kibana-ops",
"namespace": "logging",
"resourceVersion": "1624",
"selfLink": "/oapi/v1/namespaces/logging/deploymentconfigs/logging-kibana-ops",
"uid": "aff4f5fb-4c63-11e7-94aa-0e1649350dc2"
},
"spec": {
"replicas": 1,
"selector": {
"component": "kibana-ops",
"logging-infra": "kibana",
"provider": "openshift"
},
"strategy": {
"activeDeadlineSeconds": 21600,
"resources": {},
"rollingParams": {
"intervalSeconds": 1,
"maxSurge": "25%",
"maxUnavailable": "25%",
"timeoutSeconds": 600,
"updatePeriodSeconds": 1
},
"type": "Rolling"
},
"template": {
"metadata": {
"creationTimestamp": null,
"labels": {
"component": "kibana-ops",
"logging-infra": "kibana",
"provider": "openshift"
},
"name": "logging-kibana-ops"
},
"spec": {
"containers": [
{
"env": [
{
"name": "ES_HOST",
"value": "logging-es-ops"
},
{
"name": "ES_PORT",
"value": "9200"
},
{
"name": "KIBANA_MEMORY_LIMIT",
"valueFrom": {
"resourceFieldRef": {
"containerName": "kibana",
"divisor": "0",
"resource": "limits.memory"
}
}
}
],
"image": "172.30.255.47:5000/logging/logging-kibana:latest",
"imagePullPolicy": "Always",
"name": "kibana",
"readinessProbe": {
"exec": {
"command": [
"/usr/share/kibana/probe/readiness.sh"
]
},
"failureThreshold": 3,
"initialDelaySeconds": 5,
"periodSeconds": 5,
"successThreshold": 1,
"timeoutSeconds": 4
},
"resources": {
"limits": {
"memory": "736Mi"
}
},
"terminationMessagePath": "/dev/termination-log",
"terminationMessagePolicy": "File",
"volumeMounts": [
{
"mountPath": "/etc/kibana/keys",
"name": "kibana",
"readOnly": true
}
]
},
{
"env": [
{
"name": "OAP_BACKEND_URL",
"value": "http://localhost:5601"
},
{
"name": "OAP_AUTH_MODE",
"value": "oauth2"
},
{
"name": "OAP_TRANSFORM",
"value": "user_header,token_header"
},
{
"name": "OAP_OAUTH_ID",
"value": "kibana-proxy"
},
{
"name": "OAP_MASTER_URL",
"value": "https://kubernetes.default.svc.cluster.local"
},
{
"name": "OAP_PUBLIC_MASTER_URL",
"value": "https://172.18.3.237:8443"
},
{
"name": "OAP_LOGOUT_REDIRECT",
"value": "https://172.18.3.237:8443/console/logout"
},
{
"name": "OAP_MASTER_CA_FILE",
"value": "/var/run/secrets/kubernetes.io/serviceaccount/ca.crt"
},
{
"name": "OAP_DEBUG",
"value": "False"
},
{
"name": "OAP_OAUTH_SECRET_FILE",
"value": "/secret/oauth-secret"
},
{
"name": "OAP_SERVER_CERT_FILE",
"value": "/secret/server-cert"
},
{
"name": "OAP_SERVER_KEY_FILE",
"value": "/secret/server-key"
},
{
"name": "OAP_SERVER_TLS_FILE",
"value": "/secret/server-tls.json"
},
{
"name": "OAP_SESSION_SECRET_FILE",
"value": "/secret/session-secret"
},
{
"name": "OCP_AUTH_PROXY_MEMORY_LIMIT",
"valueFrom": {
"resourceFieldRef": {
"containerName": "kibana-proxy",
"divisor": "0",
"resource": "limits.memory"
}
}
}
],
"image": "172.30.255.47:5000/logging/logging-auth-proxy:latest",
"imagePullPolicy": "Always",
"name": "kibana-proxy",
"ports": [
{
"containerPort": 3000,
"name": "oaproxy",
"protocol": "TCP"
}
],
"resources": {
"limits": {
"memory": "96Mi"
}
},
"terminationMessagePath": "/dev/termination-log",
"terminationMessagePolicy": "File",
"volumeMounts": [
{
"mountPath": "/secret",
"name": "kibana-proxy",
"readOnly": true
}
]
}
],
"dnsPolicy": "ClusterFirst",
"restartPolicy": "Always",
"schedulerName": "default-scheduler",
"securityContext": {},
"serviceAccount": "aggregated-logging-kibana",
"serviceAccountName": "aggregated-logging-kibana",
"terminationGracePeriodSeconds": 30,
"volumes": [
{
"name": "kibana",
"secret": {
"defaultMode": 420,
"secretName": "logging-kibana"
}
},
{
"name": "kibana-proxy",
"secret": {
"defaultMode": 420,
"secretName": "logging-kibana-proxy"
}
}
]
}
},
"test": false,
"triggers": [
{
"type": "ConfigChange"
}
]
},
"status": {
"availableReplicas": 0,
"conditions": [
{
"lastTransitionTime": "2017-06-08T16:01:09Z",
"lastUpdateTime": "2017-06-08T16:01:09Z",
"message": "Deployment config does not have minimum availability.",
"status": "False",
"type": "Available"
},
{
"lastTransitionTime": "2017-06-08T16:01:09Z",
"lastUpdateTime": "2017-06-08T16:01:09Z",
"message": "replication controller \"logging-kibana-ops-1\" is waiting for pod \"logging-kibana-ops-1-deploy\" to run",
"status": "Unknown",
"type": "Progressing"
}
],
"details": {
"causes": [
{
"type": "ConfigChange"
}
],
"message": "config change"
},
"latestVersion": 1,
"observedGeneration": 2,
"replicas": 0,
"unavailableReplicas": 0,
"updatedReplicas": 0
}
}
],
"returncode": 0
},
"state": "present"
}
TASK [openshift_logging_kibana : Delete temp directory] ************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_kibana/tasks/main.yaml:228
ok: [openshift] => {
"changed": false,
"path": "/tmp/openshift-logging-ansible-7vUEC3",
"state": "absent"
}
TASK [openshift_logging : include_role] ****************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging/tasks/install_logging.yaml:195
statically included: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_curator/tasks/determine_version.yaml
TASK [openshift_logging_curator : fail] ****************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_curator/tasks/determine_version.yaml:3
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_curator : set_fact] ************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_curator/tasks/determine_version.yaml:7
ok: [openshift] => {
"ansible_facts": {
"curator_version": "3_5"
},
"changed": false
}
TASK [openshift_logging_curator : set_fact] ************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_curator/tasks/determine_version.yaml:12
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_curator : fail] ****************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_curator/tasks/determine_version.yaml:15
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_curator : Create temp directory for doing work in] *****
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:5
ok: [openshift] => {
"changed": false,
"cmd": [
"mktemp",
"-d",
"/tmp/openshift-logging-ansible-XXXXXX"
],
"delta": "0:00:00.003915",
"end": "2017-06-08 12:01:11.153327",
"rc": 0,
"start": "2017-06-08 12:01:11.149412"
}
STDOUT:
/tmp/openshift-logging-ansible-mt8iQS
TASK [openshift_logging_curator : set_fact] ************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:10
ok: [openshift] => {
"ansible_facts": {
"tempdir": "/tmp/openshift-logging-ansible-mt8iQS"
},
"changed": false
}
TASK [openshift_logging_curator : Create templates subdirectory] ***************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:14
ok: [openshift] => {
"changed": false,
"gid": 0,
"group": "root",
"mode": "0755",
"owner": "root",
"path": "/tmp/openshift-logging-ansible-mt8iQS/templates",
"secontext": "unconfined_u:object_r:user_tmp_t:s0",
"size": 6,
"state": "directory",
"uid": 0
}
TASK [openshift_logging_curator : Create Curator service account] **************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:24
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_curator : Create Curator service account] **************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:32
changed: [openshift] => {
"changed": true,
"results": {
"cmd": "/bin/oc get sa aggregated-logging-curator -o json -n logging",
"results": [
{
"apiVersion": "v1",
"imagePullSecrets": [
{
"name": "aggregated-logging-curator-dockercfg-s1xqh"
}
],
"kind": "ServiceAccount",
"metadata": {
"creationTimestamp": "2017-06-08T16:01:12Z",
"name": "aggregated-logging-curator",
"namespace": "logging",
"resourceVersion": "1638",
"selfLink": "/api/v1/namespaces/logging/serviceaccounts/aggregated-logging-curator",
"uid": "b19692e4-4c63-11e7-94aa-0e1649350dc2"
},
"secrets": [
{
"name": "aggregated-logging-curator-token-2hxcd"
},
{
"name": "aggregated-logging-curator-dockercfg-s1xqh"
}
]
}
],
"returncode": 0
},
"state": "present"
}
TASK [openshift_logging_curator : copy] ****************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:41
ok: [openshift] => {
"changed": false,
"checksum": "9008efd9a8892dcc42c28c6dfb6708527880a6d8",
"dest": "/tmp/openshift-logging-ansible-mt8iQS/curator.yml",
"gid": 0,
"group": "root",
"md5sum": "5498c5fd98f3dd06e34b20eb1f55dc12",
"mode": "0644",
"owner": "root",
"secontext": "unconfined_u:object_r:admin_home_t:s0",
"size": 320,
"src": "/root/.ansible/tmp/ansible-tmp-1496937672.72-127375036698555/source",
"state": "file",
"uid": 0
}
TASK [openshift_logging_curator : copy] ****************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:47
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_curator : Set Curator configmap] ***********************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:53
changed: [openshift] => {
"changed": true,
"results": {
"cmd": "/bin/oc get configmap logging-curator -o json -n logging",
"results": [
{
"apiVersion": "v1",
"data": {
"config.yaml": "# Logging example curator config file\n\n# uncomment and use this to override the defaults from env vars\n#.defaults:\n# delete:\n# days: 30\n# runhour: 0\n# runminute: 0\n\n# to keep ops logs for a different duration:\n#.operations:\n# delete:\n# weeks: 8\n\n# example for a normal project\n#myapp:\n# delete:\n# weeks: 1\n"
},
"kind": "ConfigMap",
"metadata": {
"creationTimestamp": "2017-06-08T16:01:13Z",
"name": "logging-curator",
"namespace": "logging",
"resourceVersion": "1640",
"selfLink": "/api/v1/namespaces/logging/configmaps/logging-curator",
"uid": "b25aefb9-4c63-11e7-94aa-0e1649350dc2"
}
}
],
"returncode": 0
},
"state": "present"
}
TASK [openshift_logging_curator : Set Curator secret] **************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:62
changed: [openshift] => {
"changed": true,
"results": {
"cmd": "/bin/oc secrets new logging-curator ca=/etc/origin/logging/ca.crt key=/etc/origin/logging/system.logging.curator.key cert=/etc/origin/logging/system.logging.curator.crt -n logging",
"results": "",
"returncode": 0
},
"state": "present"
}
TASK [openshift_logging_curator : set_fact] ************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:75
ok: [openshift] => {
"ansible_facts": {
"curator_component": "curator",
"curator_name": "logging-curator"
},
"changed": false
}
TASK [openshift_logging_curator : Generate Curator deploymentconfig] ***********
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:81
ok: [openshift] => {
"changed": false,
"checksum": "e3059c2899f1563c9f223ad22fed52a02d791752",
"dest": "/tmp/openshift-logging-ansible-mt8iQS/templates/curator-dc.yaml",
"gid": 0,
"group": "root",
"md5sum": "4935281471701d3b596a2daa02727c3d",
"mode": "0644",
"owner": "root",
"secontext": "unconfined_u:object_r:admin_home_t:s0",
"size": 2340,
"src": "/root/.ansible/tmp/ansible-tmp-1496937674.74-227565484127174/source",
"state": "file",
"uid": 0
}
TASK [openshift_logging_curator : Set Curator DC] ******************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:99
changed: [openshift] => {
"changed": true,
"results": {
"cmd": "/bin/oc get dc logging-curator -o json -n logging",
"results": [
{
"apiVersion": "v1",
"kind": "DeploymentConfig",
"metadata": {
"creationTimestamp": "2017-06-08T16:01:15Z",
"generation": 2,
"labels": {
"component": "curator",
"logging-infra": "curator",
"provider": "openshift"
},
"name": "logging-curator",
"namespace": "logging",
"resourceVersion": "1656",
"selfLink": "/oapi/v1/namespaces/logging/deploymentconfigs/logging-curator",
"uid": "b3b807c7-4c63-11e7-94aa-0e1649350dc2"
},
"spec": {
"replicas": 1,
"selector": {
"component": "curator",
"logging-infra": "curator",
"provider": "openshift"
},
"strategy": {
"activeDeadlineSeconds": 21600,
"recreateParams": {
"timeoutSeconds": 600
},
"resources": {},
"rollingParams": {
"intervalSeconds": 1,
"maxSurge": "25%",
"maxUnavailable": "25%",
"timeoutSeconds": 600,
"updatePeriodSeconds": 1
},
"type": "Recreate"
},
"template": {
"metadata": {
"creationTimestamp": null,
"labels": {
"component": "curator",
"logging-infra": "curator",
"provider": "openshift"
},
"name": "logging-curator"
},
"spec": {
"containers": [
{
"env": [
{
"name": "K8S_HOST_URL",
"value": "https://kubernetes.default.svc.cluster.local"
},
{
"name": "ES_HOST",
"value": "logging-es"
},
{
"name": "ES_PORT",
"value": "9200"
},
{
"name": "ES_CLIENT_CERT",
"value": "/etc/curator/keys/cert"
},
{
"name": "ES_CLIENT_KEY",
"value": "/etc/curator/keys/key"
},
{
"name": "ES_CA",
"value": "/etc/curator/keys/ca"
},
{
"name": "CURATOR_DEFAULT_DAYS",
"value": "30"
},
{
"name": "CURATOR_RUN_HOUR",
"value": "0"
},
{
"name": "CURATOR_RUN_MINUTE",
"value": "0"
},
{
"name": "CURATOR_RUN_TIMEZONE",
"value": "UTC"
},
{
"name": "CURATOR_SCRIPT_LOG_LEVEL",
"value": "INFO"
},
{
"name": "CURATOR_LOG_LEVEL",
"value": "ERROR"
}
],
"image": "172.30.255.47:5000/logging/logging-curator:latest",
"imagePullPolicy": "Always",
"name": "curator",
"resources": {
"limits": {
"cpu": "100m"
}
},
"terminationMessagePath": "/dev/termination-log",
"terminationMessagePolicy": "File",
"volumeMounts": [
{
"mountPath": "/etc/curator/keys",
"name": "certs",
"readOnly": true
},
{
"mountPath": "/etc/curator/settings",
"name": "config",
"readOnly": true
}
]
}
],
"dnsPolicy": "ClusterFirst",
"restartPolicy": "Always",
"schedulerName": "default-scheduler",
"securityContext": {},
"serviceAccount": "aggregated-logging-curator",
"serviceAccountName": "aggregated-logging-curator",
"terminationGracePeriodSeconds": 30,
"volumes": [
{
"name": "certs",
"secret": {
"defaultMode": 420,
"secretName": "logging-curator"
}
},
{
"configMap": {
"defaultMode": 420,
"name": "logging-curator"
},
"name": "config"
}
]
}
},
"test": false,
"triggers": [
{
"type": "ConfigChange"
}
]
},
"status": {
"availableReplicas": 0,
"conditions": [
{
"lastTransitionTime": "2017-06-08T16:01:15Z",
"lastUpdateTime": "2017-06-08T16:01:15Z",
"message": "Deployment config does not have minimum availability.",
"status": "False",
"type": "Available"
},
{
"lastTransitionTime": "2017-06-08T16:01:15Z",
"lastUpdateTime": "2017-06-08T16:01:15Z",
"message": "replication controller \"logging-curator-1\" is waiting for pod \"logging-curator-1-deploy\" to run",
"status": "Unknown",
"type": "Progressing"
}
],
"details": {
"causes": [
{
"type": "ConfigChange"
}
],
"message": "config change"
},
"latestVersion": 1,
"observedGeneration": 2,
"replicas": 0,
"unavailableReplicas": 0,
"updatedReplicas": 0
}
}
],
"returncode": 0
},
"state": "present"
}
TASK [openshift_logging_curator : Delete temp directory] ***********************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:109
ok: [openshift] => {
"changed": false,
"path": "/tmp/openshift-logging-ansible-mt8iQS",
"state": "absent"
}
TASK [openshift_logging : include_role] ****************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging/tasks/install_logging.yaml:207
statically included: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_curator/tasks/determine_version.yaml
TASK [openshift_logging_curator : fail] ****************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_curator/tasks/determine_version.yaml:3
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_curator : set_fact] ************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_curator/tasks/determine_version.yaml:7
ok: [openshift] => {
"ansible_facts": {
"curator_version": "3_5"
},
"changed": false
}
TASK [openshift_logging_curator : set_fact] ************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_curator/tasks/determine_version.yaml:12
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_curator : fail] ****************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_curator/tasks/determine_version.yaml:15
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_curator : Create temp directory for doing work in] *****
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:5
ok: [openshift] => {
"changed": false,
"cmd": [
"mktemp",
"-d",
"/tmp/openshift-logging-ansible-XXXXXX"
],
"delta": "0:00:01.003785",
"end": "2017-06-08 12:01:19.817458",
"rc": 0,
"start": "2017-06-08 12:01:18.813673"
}
STDOUT:
/tmp/openshift-logging-ansible-rrg5iz
TASK [openshift_logging_curator : set_fact] ************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:10
ok: [openshift] => {
"ansible_facts": {
"tempdir": "/tmp/openshift-logging-ansible-rrg5iz"
},
"changed": false
}
TASK [openshift_logging_curator : Create templates subdirectory] ***************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:14
ok: [openshift] => {
"changed": false,
"gid": 0,
"group": "root",
"mode": "0755",
"owner": "root",
"path": "/tmp/openshift-logging-ansible-rrg5iz/templates",
"secontext": "unconfined_u:object_r:user_tmp_t:s0",
"size": 6,
"state": "directory",
"uid": 0
}
TASK [openshift_logging_curator : Create Curator service account] **************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:24
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_curator : Create Curator service account] **************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:32
ok: [openshift] => {
"changed": false,
"results": {
"cmd": "/bin/oc get sa aggregated-logging-curator -o json -n logging",
"results": [
{
"apiVersion": "v1",
"imagePullSecrets": [
{
"name": "aggregated-logging-curator-dockercfg-s1xqh"
}
],
"kind": "ServiceAccount",
"metadata": {
"creationTimestamp": "2017-06-08T16:01:12Z",
"name": "aggregated-logging-curator",
"namespace": "logging",
"resourceVersion": "1638",
"selfLink": "/api/v1/namespaces/logging/serviceaccounts/aggregated-logging-curator",
"uid": "b19692e4-4c63-11e7-94aa-0e1649350dc2"
},
"secrets": [
{
"name": "aggregated-logging-curator-token-2hxcd"
},
{
"name": "aggregated-logging-curator-dockercfg-s1xqh"
}
]
}
],
"returncode": 0
},
"state": "present"
}
TASK [openshift_logging_curator : copy] ****************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:41
ok: [openshift] => {
"changed": false,
"checksum": "9008efd9a8892dcc42c28c6dfb6708527880a6d8",
"dest": "/tmp/openshift-logging-ansible-rrg5iz/curator.yml",
"gid": 0,
"group": "root",
"md5sum": "5498c5fd98f3dd06e34b20eb1f55dc12",
"mode": "0644",
"owner": "root",
"secontext": "unconfined_u:object_r:admin_home_t:s0",
"size": 320,
"src": "/root/.ansible/tmp/ansible-tmp-1496937680.63-62289953895467/source",
"state": "file",
"uid": 0
}
TASK [openshift_logging_curator : copy] ****************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:47
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_curator : Set Curator configmap] ***********************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:53
ok: [openshift] => {
"changed": false,
"results": {
"cmd": "/bin/oc get configmap logging-curator -o json -n logging",
"results": [
{
"apiVersion": "v1",
"data": {
"config.yaml": "# Logging example curator config file\n\n# uncomment and use this to override the defaults from env vars\n#.defaults:\n# delete:\n# days: 30\n# runhour: 0\n# runminute: 0\n\n# to keep ops logs for a different duration:\n#.operations:\n# delete:\n# weeks: 8\n\n# example for a normal project\n#myapp:\n# delete:\n# weeks: 1\n"
},
"kind": "ConfigMap",
"metadata": {
"creationTimestamp": "2017-06-08T16:01:13Z",
"name": "logging-curator",
"namespace": "logging",
"resourceVersion": "1640",
"selfLink": "/api/v1/namespaces/logging/configmaps/logging-curator",
"uid": "b25aefb9-4c63-11e7-94aa-0e1649350dc2"
}
}
],
"returncode": 0
},
"state": "present"
}
TASK [openshift_logging_curator : Set Curator secret] **************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:62
ok: [openshift] => {
"changed": false,
"results": {
"apiVersion": "v1",
"data": {
"ca": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUMyakNDQWNLZ0F3SUJBZ0lCQVRBTkJna3Foa2lHOXcwQkFRc0ZBREFlTVJ3d0dnWURWUVFERXhOc2IyZG4KYVc1bkxYTnBaMjVsY2kxMFpYTjBNQjRYRFRFM01EWXdPREUxTlRrMU1Gb1hEVEl5TURZd056RTFOVGsxTVZvdwpIakVjTUJvR0ExVUVBeE1UYkc5bloybHVaeTF6YVdkdVpYSXRkR1Z6ZERDQ0FTSXdEUVlKS29aSWh2Y05BUUVCCkJRQURnZ0VQQURDQ0FRb0NnZ0VCQU9relNqcnBVaVRaRytrUUh5UFNtOFFVcnBJTzFMaE1XRUdiZjF5bERXZ00KdDdJVkFKZ05FQXcwRy8wK1pVSFpVQUN1T1M2NEtaZkE1NnhHMTIrL24vYjRtR2Z1QXhjakxBZ2xhNmRadmVqdApEZTdLSnducVBVNXordysrQWRhc3BBS3kyNlJyZCtWVHllbk5hZiswMjdRdDR4bjNETnhLcjFuOXFJcjN3cnZWCnZYZGZVU215RmJjY0N2ZnBpRjZqZE1aL1BhR2VEYXhFelpSUDJrL2liVEIvSTlxTmhESE1xcUxsQXZERmkzZlMKU0JxMTdWbEh4WHJPeENyQm4vNzVyOE9DZVk5a25pTXB1N0NmVFY3Nm85eU81SnlnVWg3TldTQmY5a2w0cHVIMQpLV1htcmVZSWdyR3FxakVFQ2Qxa0cyRVZDMVgvZ1FWZEJHbjBqMkk4d1BjQ0F3RUFBYU1qTUNFd0RnWURWUjBQCkFRSC9CQVFEQWdLa01BOEdBMVVkRXdFQi93UUZNQU1CQWY4d0RRWUpLb1pJaHZjTkFRRUxCUUFEZ2dFQkFBaE8KbE5uS1RJNjRhWE8yOEVYa1pwcTZtMkdwY2xQTnpzM0dDVGE0RWYvMkJCa3pSd0ttNlgvN2NpZzh1S0lCNDE3Ugp1V1Jia2kvaHBGVGJYakhaRTJDOHEwSEpMVlVJKzdPQkcvS0cyOG5venpPNzMwZ0p4Q1o5NWZZNldsUjV4SURLCkJVeE9xYWhhRWlRTEx6MEtCUDBudUNxNlZCZWRIVEJPY1JGRDJlSXp6RlMvTFZqS2lqZ1MwZFZsb1VSV3JnZGYKdG1LSnhZYlZDOW9sK0MvdzFqYnp1cnFHZ1dRQ24wZGlOZ05HZ3I4TXVYRTZmQ0hMcnJSMVVGUTNXN0R6OUZ2SwpHTGNRcHJEQ3hUbUo0eTk5cy85aUEyUDhDbGQvWXMrVlB6WlB6UmVZcVRkdkVjQXpHb0drdVRiQXhQbmUyUEJICm5wMDM3WkY0UGFwZFdudGdxR3M9Ci0tLS0tRU5EIENFUlRJRklDQVRFLS0tLS0K",
"cert": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURSakNDQWk2Z0F3SUJBZ0lCQkRBTkJna3Foa2lHOXcwQkFRVUZBREFlTVJ3d0dnWURWUVFERXhOc2IyZG4KYVc1bkxYTnBaMjVsY2kxMFpYTjBNQjRYRFRFM01EWXdPREUxTlRrMU5sb1hEVEU1TURZd09ERTFOVGsxTmxvdwpSekVRTUE0R0ExVUVDZ3dIVEc5bloybHVaekVTTUJBR0ExVUVDd3dKVDNCbGJsTm9hV1owTVI4d0hRWURWUVFECkRCWnplWE4wWlcwdWJHOW5aMmx1Wnk1amRYSmhkRzl5TUlJQklqQU5CZ2txaGtpRzl3MEJBUUVGQUFPQ0FROEEKTUlJQkNnS0NBUUVBcHArV3VxeFVzZUp6c040RnVZamZxOVNQSXZUSWY4Yy81NHJLZU5pZTJJUU8zb2MzSDVxdQo2SFdlWklyUElZeVcxNGJZSXNVVDRqSW4wOVdjVU1sME9nZkpGN1ovT3JYL0FhRGNRMzJncGFVUzhQem1iajJ0Cm03c3dUelV3QzhFYjQ2UXhWaE5XakowWmlJaGlJN3V1UW5ZVzZsSTJqcnpCWHNPTEhtdzB2ZEk0N3hpcDVNZVUKZmwrU1hyN2VWYURqT24zYXBSZDc4ajN1VVBRaGk4MUFlNmFzNk90YlMrNkgwbndxODREcVFSNlNGRVdqVzh5dApVQkhkQzU2WWV2RzN2TURwTkFZMm9SV0ZzMjNuejdzMDN5SnJPMmRxRnR2N0Q4ZXdvUTZMcG4yT3dIcy9nOFpzCjd3aW1jMW5ZKzA2MExzZmFpbzkwSHlUc0k5RnJobVlqU1FJREFRQUJvMll3WkRBT0JnTlZIUThCQWY4RUJBTUMKQmFBd0NRWURWUjBUQkFJd0FEQWRCZ05WSFNVRUZqQVVCZ2dyQmdFRkJRY0RBUVlJS3dZQkJRVUhBd0l3SFFZRApWUjBPQkJZRUZFajVNaTZBK3dVcE5RSXJibGR3UVhaVHdFaTNNQWtHQTFVZEl3UUNNQUF3RFFZSktvWklodmNOCkFRRUZCUUFEZ2dFQkFDZUJnVHRsVnpNdjNoZnRvVGJ1VDBRTmtteDhBNHRJN0xaYkYwS254RHRkZGlkSTJGSUkKcEszV3JKMW5pcTI5dGkvaml3OXE5VTB3RWU4Z0pXYVhoVlB4Z0lsWG9jVHYwMFB6U1RWZUJNT1k0OTVwbmREeQptSGxqNGRkeUp1cVZ2a3JzMU5rR3VYVjM2SnluWmthUGpxSU1DekY4cmZMQlFMMHlicitLdkxDWFNYRlJZcFM3Cm9FTGlCUk5RU0c3amdQUzF5dFBxYndhTE5mVnFvdUZOVHFNdG8wUzdYRUZZLzFhdGV0NnBmTTJHNXZCMjA4U1oKUll3bC9wV3h4NDlBVDZEaGJFWkFrcW5yVTlxWTFqcFI2TGUzTG5Pa1R4aUtLN2doOHAwbEY4eWU2V25JSnVZNQowQ25SZE1OajNmME1DWTFUV1Badm8yckdxTE5YSXRsRWxrRT0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo=",
"key": "LS0tLS1CRUdJTiBQUklWQVRFIEtFWS0tLS0tCk1JSUV2d0lCQURBTkJna3Foa2lHOXcwQkFRRUZBQVNDQktrd2dnU2xBZ0VBQW9JQkFRQ21uNWE2ckZTeDRuT3cKM2dXNWlOK3IxSThpOU1oL3h6L25pc3A0Mko3WWhBN2VoemNmbXE3b2RaNWtpczhoakpiWGh0Z2l4UlBpTWlmVAoxWnhReVhRNkI4a1h0bjg2dGY4Qm9OeERmYUNscFJMdy9PWnVQYTJidXpCUE5UQUx3UnZqcERGV0UxYU1uUm1JCmlHSWp1NjVDZGhicVVqYU92TUZldzRzZWJEUzkwamp2R0tua3g1UitYNUpldnQ1Vm9PTTZmZHFsRjN2eVBlNVEKOUNHTHpVQjdwcXpvNjF0TDdvZlNmQ3J6Z09wQkhwSVVSYU5ieksxUUVkMExucGg2OGJlOHdPazBCamFoRllXegpiZWZQdXpUZkltczdaMm9XMi9zUHg3Q2hEb3VtZlk3QWV6K0R4bXp2Q0taeldkajdUclF1eDlxS2ozUWZKT3dqCjBXdUdaaU5KQWdNQkFBRUNnZ0VCQUpEYVFHRThtOHloUDA1TlZkQzd5eWRJZkw4NDZtMGJQTlQvOVpFbFVNS04KMjVkZEdYRGlPcGhnV0RpejYrb1FuTG4xd2tSSDdFZENyeGticE84ZWsybzNobnlVN1BxUGFZZHkzc25WbHlrcAozdE9lS0gxQ0pZRXpOSVpIaU9OUEYvM0lxaE5ZY0crQnk4YkVPZjB5bGdXMVA2cXBta0J3bS9MVU1FZGNibVZ0Cnl0Z0d5aGRGWjAzU1AwR28yYXJ3aE4wUGxGK0V1YjlKdmVwVmdRR3dZNVlMT0FpbS95SnRlTnlyWExCTlB1SEUKcU5IQmNqdmE1WnNORW1TRnpxU2gxN3d0Y3djN2NGTmdrdEtTdnFMNDhsNDVBS042TFBLbEpnWG96ZWtvemg3dAp6MDRKRSs3RGY5RlkrdlRGSm1XbUl5UVFzTHF3Q3o5Q2ZEcFU3M3B1RExrQ2dZRUEydWUrNXJma1M4djJ2NTFjCnVWMFJJY0RRQVJkZVozZDYzU3dqRlJuSjRCV25BcWRKN01BZWQxcU9nWlErMHJIaDF6WFlreGxWNmxQa3UvbFAKQWtmWm53ZlJhT3V6dEhrUzB0Q3RJVDhnUXNLZlViMm1YWmF5ZUZnLzVmZkQrM3lHWWNOWlMrUE5GZ1BReUpyaApkTG12QjUvRzZ4QXlyMjZMa3dFWmNOVlJvRHNDZ1lFQXd0dlNSOU5iWjVpZXZOMVV3ZW5BT0p5cGFicWRaNXJvCndFem9nYWtmR0NkUVdKUUJkbzVGdURGaUJieklFNFVlMUR4c21LU0p3SzlMRXl2UVYwRjZ1elhudjlCOEJ6NVcKalNuOXpFTjhDUE12am9NU0N2RTR1TXNTamNsYUlZOEFia3QzZWhMT2htUVh3Wm9hWldCR1EyV2pxUkkwYVBWTgpiRThyZ0pYVGRrc0NnWUVBenVDVkdaWlF1eEZYY0Y5WGNoYnlTZVBlc0NsVm5wTGNHb25MM2ZVeFJBVXlnTjk0CmpiWkRGS2tRWklXbG1abGl4ZkN0Yk5kVXlzL2VLNGZCazNZenhJZXU4R2xRdkE1d0s2dnE4ckNsM0hIeC8xNHMKQythUFpBeUMxdU1BNUhzYXhPbkpTbDlQUXE3NGNaMXQxTkpuQjkySU1ENXVxRHpneTEwT25nUDcwR0VDZ1lFQQpwZlNoK3hvL1Z0UGRIZTFES29QeWVrU3k1S0ZUUGRIcE9SNUhSMmJLWEwvZTVoSG56UmtPdC9rWEN0dmxhdnFOCmJ5U25PTG1wdUxtU3J4azVyNWJNK0hUSWs4ejBWUmI2aisrYmdFUlpkeVhtOHZFZWhNTTlFK1dnUDdHbFdiOTUKRm5hZm94QXhaTFFLcDVDZnVvZHNVQ3BZWUw1b2RKdTIvTy9RbFFzRnRsRUNnWUJJUExqWnpkdmhyc09aNjZLZwpyVlBkYThyV1Y5aEdBam1DUkFHdkVFUHcxcjJLOEdSd2dPSFU3djV0OVg5Y0Y5bTVBT1NkYmJMV2FxMXI5SnVvClkyMVF5c2pBaGJMdGVHclR4L1c1M3BCNXdOL0Z1Z2tSVWE1ejE3U1ViRmRRVHFpNkI0dUJFVFV0ZGZjclZEcmYKZXBTMTBBTjIvUnJRWjNvYUVaSHZtUWNvTnc9PQotLS0tLUVORCBQUklWQVRFIEtFWS0tLS0tCg=="
},
"kind": "Secret",
"metadata": {
"creationTimestamp": null,
"name": "logging-curator"
},
"type": "Opaque"
},
"state": "present"
}
TASK [openshift_logging_curator : set_fact] ************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:75
ok: [openshift] => {
"ansible_facts": {
"curator_component": "curator-ops",
"curator_name": "logging-curator-ops"
},
"changed": false
}
TASK [openshift_logging_curator : Generate Curator deploymentconfig] ***********
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:81
ok: [openshift] => {
"changed": false,
"checksum": "96c349b7f8a16c0be141e65654f5dd2401db0d04",
"dest": "/tmp/openshift-logging-ansible-rrg5iz/templates/curator-dc.yaml",
"gid": 0,
"group": "root",
"md5sum": "0241f1ecfa3fe4ffd914d6eccd650565",
"mode": "0644",
"owner": "root",
"secontext": "unconfined_u:object_r:admin_home_t:s0",
"size": 2364,
"src": "/root/.ansible/tmp/ansible-tmp-1496937682.27-137109705271937/source",
"state": "file",
"uid": 0
}
TASK [openshift_logging_curator : Set Curator DC] ******************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:99
changed: [openshift] => {
"changed": true,
"results": {
"cmd": "/bin/oc get dc logging-curator-ops -o json -n logging",
"results": [
{
"apiVersion": "v1",
"kind": "DeploymentConfig",
"metadata": {
"creationTimestamp": "2017-06-08T16:01:23Z",
"generation": 2,
"labels": {
"component": "curator-ops",
"logging-infra": "curator",
"provider": "openshift"
},
"name": "logging-curator-ops",
"namespace": "logging",
"resourceVersion": "1690",
"selfLink": "/oapi/v1/namespaces/logging/deploymentconfigs/logging-curator-ops",
"uid": "b810e577-4c63-11e7-94aa-0e1649350dc2"
},
"spec": {
"replicas": 1,
"selector": {
"component": "curator-ops",
"logging-infra": "curator",
"provider": "openshift"
},
"strategy": {
"activeDeadlineSeconds": 21600,
"recreateParams": {
"timeoutSeconds": 600
},
"resources": {},
"rollingParams": {
"intervalSeconds": 1,
"maxSurge": "25%",
"maxUnavailable": "25%",
"timeoutSeconds": 600,
"updatePeriodSeconds": 1
},
"type": "Recreate"
},
"template": {
"metadata": {
"creationTimestamp": null,
"labels": {
"component": "curator-ops",
"logging-infra": "curator",
"provider": "openshift"
},
"name": "logging-curator-ops"
},
"spec": {
"containers": [
{
"env": [
{
"name": "K8S_HOST_URL",
"value": "https://kubernetes.default.svc.cluster.local"
},
{
"name": "ES_HOST",
"value": "logging-es-ops"
},
{
"name": "ES_PORT",
"value": "9200"
},
{
"name": "ES_CLIENT_CERT",
"value": "/etc/curator/keys/cert"
},
{
"name": "ES_CLIENT_KEY",
"value": "/etc/curator/keys/key"
},
{
"name": "ES_CA",
"value": "/etc/curator/keys/ca"
},
{
"name": "CURATOR_DEFAULT_DAYS",
"value": "30"
},
{
"name": "CURATOR_RUN_HOUR",
"value": "0"
},
{
"name": "CURATOR_RUN_MINUTE",
"value": "0"
},
{
"name": "CURATOR_RUN_TIMEZONE",
"value": "UTC"
},
{
"name": "CURATOR_SCRIPT_LOG_LEVEL",
"value": "INFO"
},
{
"name": "CURATOR_LOG_LEVEL",
"value": "ERROR"
}
],
"image": "172.30.255.47:5000/logging/logging-curator:latest",
"imagePullPolicy": "Always",
"name": "curator",
"resources": {
"limits": {
"cpu": "100m"
}
},
"terminationMessagePath": "/dev/termination-log",
"terminationMessagePolicy": "File",
"volumeMounts": [
{
"mountPath": "/etc/curator/keys",
"name": "certs",
"readOnly": true
},
{
"mountPath": "/etc/curator/settings",
"name": "config",
"readOnly": true
}
]
}
],
"dnsPolicy": "ClusterFirst",
"restartPolicy": "Always",
"schedulerName": "default-scheduler",
"securityContext": {},
"serviceAccount": "aggregated-logging-curator",
"serviceAccountName": "aggregated-logging-curator",
"terminationGracePeriodSeconds": 30,
"volumes": [
{
"name": "certs",
"secret": {
"defaultMode": 420,
"secretName": "logging-curator"
}
},
{
"configMap": {
"defaultMode": 420,
"name": "logging-curator"
},
"name": "config"
}
]
}
},
"test": false,
"triggers": [
{
"type": "ConfigChange"
}
]
},
"status": {
"availableReplicas": 0,
"conditions": [
{
"lastTransitionTime": "2017-06-08T16:01:23Z",
"lastUpdateTime": "2017-06-08T16:01:23Z",
"message": "Deployment config does not have minimum availability.",
"status": "False",
"type": "Available"
},
{
"lastTransitionTime": "2017-06-08T16:01:23Z",
"lastUpdateTime": "2017-06-08T16:01:23Z",
"message": "replication controller \"logging-curator-ops-1\" is waiting for pod \"logging-curator-ops-1-deploy\" to run",
"status": "Unknown",
"type": "Progressing"
}
],
"details": {
"causes": [
{
"type": "ConfigChange"
}
],
"message": "config change"
},
"latestVersion": 1,
"observedGeneration": 2,
"replicas": 0,
"unavailableReplicas": 0,
"updatedReplicas": 0
}
}
],
"returncode": 0
},
"state": "present"
}
TASK [openshift_logging_curator : Delete temp directory] ***********************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_curator/tasks/main.yaml:109
ok: [openshift] => {
"changed": false,
"path": "/tmp/openshift-logging-ansible-rrg5iz",
"state": "absent"
}
TASK [openshift_logging : include_role] ****************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging/tasks/install_logging.yaml:226
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging : include_role] ****************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging/tasks/install_logging.yaml:241
statically included: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_fluentd/tasks/determine_version.yaml
TASK [openshift_logging_fluentd : fail] ****************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:2
[WARNING]: when statements should not include jinja2 templating delimiters
such as {{ }} or {% %}. Found: {{ openshift_logging_fluentd_nodeselector.keys()
| count }} > 1
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_fluentd : fail] ****************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:6
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_fluentd : fail] ****************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:10
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_fluentd : fail] ****************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:14
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_fluentd : fail] ****************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_fluentd/tasks/determine_version.yaml:3
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_fluentd : set_fact] ************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_fluentd/tasks/determine_version.yaml:7
ok: [openshift] => {
"ansible_facts": {
"fluentd_version": "3_5"
},
"changed": false
}
TASK [openshift_logging_fluentd : set_fact] ************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_fluentd/tasks/determine_version.yaml:12
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_fluentd : fail] ****************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_fluentd/tasks/determine_version.yaml:15
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_fluentd : set_fact] ************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:20
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_fluentd : set_fact] ************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:26
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_fluentd : Create temp directory for doing work in] *****
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:33
ok: [openshift] => {
"changed": false,
"cmd": [
"mktemp",
"-d",
"/tmp/openshift-logging-ansible-XXXXXX"
],
"delta": "0:00:00.002034",
"end": "2017-06-08 12:01:26.895004",
"rc": 0,
"start": "2017-06-08 12:01:26.892970"
}
STDOUT:
/tmp/openshift-logging-ansible-93vigU
TASK [openshift_logging_fluentd : set_fact] ************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:38
ok: [openshift] => {
"ansible_facts": {
"tempdir": "/tmp/openshift-logging-ansible-93vigU"
},
"changed": false
}
TASK [openshift_logging_fluentd : Create templates subdirectory] ***************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:41
ok: [openshift] => {
"changed": false,
"gid": 0,
"group": "root",
"mode": "0755",
"owner": "root",
"path": "/tmp/openshift-logging-ansible-93vigU/templates",
"secontext": "unconfined_u:object_r:user_tmp_t:s0",
"size": 6,
"state": "directory",
"uid": 0
}
TASK [openshift_logging_fluentd : Create Fluentd service account] **************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:51
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_fluentd : Create Fluentd service account] **************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:59
changed: [openshift] => {
"changed": true,
"results": {
"cmd": "/bin/oc get sa aggregated-logging-fluentd -o json -n logging",
"results": [
{
"apiVersion": "v1",
"imagePullSecrets": [
{
"name": "aggregated-logging-fluentd-dockercfg-l7k3x"
}
],
"kind": "ServiceAccount",
"metadata": {
"creationTimestamp": "2017-06-08T16:01:27Z",
"name": "aggregated-logging-fluentd",
"namespace": "logging",
"resourceVersion": "1704",
"selfLink": "/api/v1/namespaces/logging/serviceaccounts/aggregated-logging-fluentd",
"uid": "bac2706a-4c63-11e7-94aa-0e1649350dc2"
},
"secrets": [
{
"name": "aggregated-logging-fluentd-dockercfg-l7k3x"
},
{
"name": "aggregated-logging-fluentd-token-zc0vw"
}
]
}
],
"returncode": 0
},
"state": "present"
}
TASK [openshift_logging_fluentd : Set privileged permissions for Fluentd] ******
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:68
changed: [openshift] => {
"changed": true,
"present": "present",
"results": {
"cmd": "/bin/oc adm policy add-scc-to-user privileged system:serviceaccount:logging:aggregated-logging-fluentd -n logging",
"results": "",
"returncode": 0
}
}
TASK [openshift_logging_fluentd : Set cluster-reader permissions for Fluentd] ***
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:77
changed: [openshift] => {
"changed": true,
"present": "present",
"results": {
"cmd": "/bin/oc adm policy add-cluster-role-to-user cluster-reader system:serviceaccount:logging:aggregated-logging-fluentd -n logging",
"results": "",
"returncode": 0
}
}
TASK [openshift_logging_fluentd : template] ************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:86
ok: [openshift] => {
"changed": false,
"checksum": "a8c8596f5fc2c5dd7c8d33d244af17a2555be086",
"dest": "/tmp/openshift-logging-ansible-93vigU/fluent.conf",
"gid": 0,
"group": "root",
"md5sum": "579698b48ffce6276ee0e8d5ac71a338",
"mode": "0644",
"owner": "root",
"secontext": "unconfined_u:object_r:admin_home_t:s0",
"size": 1301,
"src": "/root/.ansible/tmp/ansible-tmp-1496937689.43-239761296344115/source",
"state": "file",
"uid": 0
}
TASK [openshift_logging_fluentd : copy] ****************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:94
ok: [openshift] => {
"changed": false,
"checksum": "b3e75eddc4a0765edc77da092384c0c6f95440e1",
"dest": "/tmp/openshift-logging-ansible-93vigU/fluentd-throttle-config.yaml",
"gid": 0,
"group": "root",
"md5sum": "25871b8e0a9bedc166a6029872a6c336",
"mode": "0644",
"owner": "root",
"secontext": "unconfined_u:object_r:admin_home_t:s0",
"size": 133,
"src": "/root/.ansible/tmp/ansible-tmp-1496937689.91-23262744748646/source",
"state": "file",
"uid": 0
}
TASK [openshift_logging_fluentd : copy] ****************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:100
ok: [openshift] => {
"changed": false,
"checksum": "a3aa36da13f3108aa4ad5b98d4866007b44e9798",
"dest": "/tmp/openshift-logging-ansible-93vigU/secure-forward.conf",
"gid": 0,
"group": "root",
"md5sum": "1084b00c427f4fa48dfc66d6ad6555d4",
"mode": "0644",
"owner": "root",
"secontext": "unconfined_u:object_r:admin_home_t:s0",
"size": 563,
"src": "/root/.ansible/tmp/ansible-tmp-1496937690.23-193864224121332/source",
"state": "file",
"uid": 0
}
TASK [openshift_logging_fluentd : copy] ****************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:107
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_fluentd : copy] ****************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:113
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_fluentd : copy] ****************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:119
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging_fluentd : Set Fluentd configmap] ***********************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:125
changed: [openshift] => {
"changed": true,
"results": {
"cmd": "/bin/oc get configmap logging-fluentd -o json -n logging",
"results": [
{
"apiVersion": "v1",
"data": {
"fluent.conf": "# This file is the fluentd configuration entrypoint. Edit with care.\n\n@include configs.d/openshift/system.conf\n\n# In each section below, pre- and post- includes don't include anything initially;\n# they exist to enable future additions to openshift conf as needed.\n\n## sources\n## ordered so that syslog always runs last...\n@include configs.d/openshift/input-pre-*.conf\n@include configs.d/dynamic/input-docker-*.conf\n@include configs.d/dynamic/input-syslog-*.conf\n@include configs.d/openshift/input-post-*.conf\n##\n\n<label @INGRESS>\n## filters\n @include configs.d/openshift/filter-pre-*.conf\n @include configs.d/openshift/filter-retag-journal.conf\n @include configs.d/openshift/filter-k8s-meta.conf\n @include configs.d/openshift/filter-kibana-transform.conf\n @include configs.d/openshift/filter-k8s-flatten-hash.conf\n @include configs.d/openshift/filter-k8s-record-transform.conf\n @include configs.d/openshift/filter-syslog-record-transform.conf\n @include configs.d/openshift/filter-viaq-data-model.conf\n @include configs.d/openshift/filter-post-*.conf\n##\n\n## matches\n @include configs.d/openshift/output-pre-*.conf\n @include configs.d/openshift/output-operations.conf\n @include configs.d/openshift/output-applications.conf\n # no post - applications.conf matches everything left\n##\n</label>\n",
"secure-forward.conf": "# @type secure_forward\n\n# self_hostname ${HOSTNAME}\n# shared_key <SECRET_STRING>\n\n# secure yes\n# enable_strict_verification yes\n\n# ca_cert_path /etc/fluent/keys/your_ca_cert\n# ca_private_key_path /etc/fluent/keys/your_private_key\n # for private CA secret key\n# ca_private_key_passphrase passphrase\n\n# <server>\n # or IP\n# host server.fqdn.example.com\n# port 24284\n# </server>\n# <server>\n # ip address to connect\n# host 203.0.113.8\n # specify hostlabel for FQDN verification if ipaddress is used for host\n# hostlabel server.fqdn.example.com\n# </server>\n",
"throttle-config.yaml": "# Logging example fluentd throttling config file\n\n#example-project:\n# read_lines_limit: 10\n#\n#.operations:\n# read_lines_limit: 100\n"
},
"kind": "ConfigMap",
"metadata": {
"creationTimestamp": "2017-06-08T16:01:31Z",
"name": "logging-fluentd",
"namespace": "logging",
"resourceVersion": "1728",
"selfLink": "/api/v1/namespaces/logging/configmaps/logging-fluentd",
"uid": "bccc79f7-4c63-11e7-94aa-0e1649350dc2"
}
}
],
"returncode": 0
},
"state": "present"
}
TASK [openshift_logging_fluentd : Set logging-fluentd secret] ******************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:137
changed: [openshift] => {
"changed": true,
"results": {
"cmd": "/bin/oc secrets new logging-fluentd ca=/etc/origin/logging/ca.crt key=/etc/origin/logging/system.logging.fluentd.key cert=/etc/origin/logging/system.logging.fluentd.crt -n logging",
"results": "",
"returncode": 0
},
"state": "present"
}
TASK [openshift_logging_fluentd : Generate logging-fluentd daemonset definition] ***
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:154
ok: [openshift] => {
"changed": false,
"checksum": "3d22736cd2f7d6f32c9aff1922f56b8d588ca2db",
"dest": "/tmp/openshift-logging-ansible-93vigU/templates/logging-fluentd.yaml",
"gid": 0,
"group": "root",
"md5sum": "6c0a7f6ddcbde86e304d77e889833a87",
"mode": "0644",
"owner": "root",
"secontext": "unconfined_u:object_r:admin_home_t:s0",
"size": 3414,
"src": "/root/.ansible/tmp/ansible-tmp-1496937692.0-213176700497659/source",
"state": "file",
"uid": 0
}
TASK [openshift_logging_fluentd : Set logging-fluentd daemonset] ***************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:172
changed: [openshift] => {
"changed": true,
"results": {
"cmd": "/bin/oc get daemonset logging-fluentd -o json -n logging",
"results": [
{
"apiVersion": "extensions/v1beta1",
"kind": "DaemonSet",
"metadata": {
"creationTimestamp": "2017-06-08T16:01:33Z",
"generation": 1,
"labels": {
"component": "fluentd",
"logging-infra": "fluentd",
"provider": "openshift"
},
"name": "logging-fluentd",
"namespace": "logging",
"resourceVersion": "1738",
"selfLink": "/apis/extensions/v1beta1/namespaces/logging/daemonsets/logging-fluentd",
"uid": "be081477-4c63-11e7-94aa-0e1649350dc2"
},
"spec": {
"selector": {
"matchLabels": {
"component": "fluentd",
"provider": "openshift"
}
},
"template": {
"metadata": {
"creationTimestamp": null,
"labels": {
"component": "fluentd",
"logging-infra": "fluentd",
"provider": "openshift"
},
"name": "fluentd-elasticsearch"
},
"spec": {
"containers": [
{
"env": [
{
"name": "K8S_HOST_URL",
"value": "https://kubernetes.default.svc.cluster.local"
},
{
"name": "ES_HOST",
"value": "logging-es"
},
{
"name": "ES_PORT",
"value": "9200"
},
{
"name": "ES_CLIENT_CERT",
"value": "/etc/fluent/keys/cert"
},
{
"name": "ES_CLIENT_KEY",
"value": "/etc/fluent/keys/key"
},
{
"name": "ES_CA",
"value": "/etc/fluent/keys/ca"
},
{
"name": "OPS_HOST",
"value": "logging-es-ops"
},
{
"name": "OPS_PORT",
"value": "9200"
},
{
"name": "OPS_CLIENT_CERT",
"value": "/etc/fluent/keys/cert"
},
{
"name": "OPS_CLIENT_KEY",
"value": "/etc/fluent/keys/key"
},
{
"name": "OPS_CA",
"value": "/etc/fluent/keys/ca"
},
{
"name": "ES_COPY",
"value": "false"
},
{
"name": "USE_JOURNAL",
"value": "true"
},
{
"name": "JOURNAL_SOURCE"
},
{
"name": "JOURNAL_READ_FROM_HEAD",
"value": "false"
}
],
"image": "172.30.255.47:5000/logging/logging-fluentd:latest",
"imagePullPolicy": "Always",
"name": "fluentd-elasticsearch",
"resources": {
"limits": {
"cpu": "100m",
"memory": "512Mi"
}
},
"securityContext": {
"privileged": true
},
"terminationMessagePath": "/dev/termination-log",
"terminationMessagePolicy": "File",
"volumeMounts": [
{
"mountPath": "/run/log/journal",
"name": "runlogjournal"
},
{
"mountPath": "/var/log",
"name": "varlog"
},
{
"mountPath": "/var/lib/docker/containers",
"name": "varlibdockercontainers",
"readOnly": true
},
{
"mountPath": "/etc/fluent/configs.d/user",
"name": "config",
"readOnly": true
},
{
"mountPath": "/etc/fluent/keys",
"name": "certs",
"readOnly": true
},
{
"mountPath": "/etc/docker-hostname",
"name": "dockerhostname",
"readOnly": true
},
{
"mountPath": "/etc/localtime",
"name": "localtime",
"readOnly": true
},
{
"mountPath": "/etc/sysconfig/docker",
"name": "dockercfg",
"readOnly": true
},
{
"mountPath": "/etc/docker",
"name": "dockerdaemoncfg",
"readOnly": true
}
]
}
],
"dnsPolicy": "ClusterFirst",
"nodeSelector": {
"logging-infra-fluentd": "true"
},
"restartPolicy": "Always",
"schedulerName": "default-scheduler",
"securityContext": {},
"serviceAccount": "aggregated-logging-fluentd",
"serviceAccountName": "aggregated-logging-fluentd",
"terminationGracePeriodSeconds": 30,
"volumes": [
{
"hostPath": {
"path": "/run/log/journal"
},
"name": "runlogjournal"
},
{
"hostPath": {
"path": "/var/log"
},
"name": "varlog"
},
{
"hostPath": {
"path": "/var/lib/docker/containers"
},
"name": "varlibdockercontainers"
},
{
"configMap": {
"defaultMode": 420,
"name": "logging-fluentd"
},
"name": "config"
},
{
"name": "certs",
"secret": {
"defaultMode": 420,
"secretName": "logging-fluentd"
}
},
{
"hostPath": {
"path": "/etc/hostname"
},
"name": "dockerhostname"
},
{
"hostPath": {
"path": "/etc/localtime"
},
"name": "localtime"
},
{
"hostPath": {
"path": "/etc/sysconfig/docker"
},
"name": "dockercfg"
},
{
"hostPath": {
"path": "/etc/docker"
},
"name": "dockerdaemoncfg"
}
]
}
},
"templateGeneration": 1,
"updateStrategy": {
"rollingUpdate": {
"maxUnavailable": 1
},
"type": "RollingUpdate"
}
},
"status": {
"currentNumberScheduled": 0,
"desiredNumberScheduled": 0,
"numberMisscheduled": 0,
"numberReady": 0,
"observedGeneration": 1
}
}
],
"returncode": 0
},
"state": "present"
}
TASK [openshift_logging_fluentd : Retrieve list of Fluentd hosts] **************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:183
ok: [openshift] => {
"changed": false,
"results": {
"cmd": "/bin/oc get node -o json -n default",
"results": [
{
"apiVersion": "v1",
"items": [
{
"apiVersion": "v1",
"kind": "Node",
"metadata": {
"annotations": {
"volumes.kubernetes.io/controller-managed-attach-detach": "true"
},
"creationTimestamp": "2017-06-08T15:38:33Z",
"labels": {
"beta.kubernetes.io/arch": "amd64",
"beta.kubernetes.io/os": "linux",
"kubernetes.io/hostname": "172.18.3.237"
},
"name": "172.18.3.237",
"namespace": "",
"resourceVersion": "1705",
"selfLink": "/api/v1/nodes/172.18.3.237",
"uid": "8768a5bc-4c60-11e7-94aa-0e1649350dc2"
},
"spec": {
"externalID": "172.18.3.237",
"providerID": "aws:////i-06878b3e9e9644cee"
},
"status": {
"addresses": [
{
"address": "172.18.3.237",
"type": "LegacyHostIP"
},
{
"address": "172.18.3.237",
"type": "InternalIP"
},
{
"address": "172.18.3.237",
"type": "Hostname"
}
],
"allocatable": {
"cpu": "4",
"memory": "7129288Ki",
"pods": "40"
},
"capacity": {
"cpu": "4",
"memory": "7231688Ki",
"pods": "40"
},
"conditions": [
{
"lastHeartbeatTime": "2017-06-08T16:01:28Z",
"lastTransitionTime": "2017-06-08T15:38:33Z",
"message": "kubelet has sufficient disk space available",
"reason": "KubeletHasSufficientDisk",
"status": "False",
"type": "OutOfDisk"
},
{
"lastHeartbeatTime": "2017-06-08T16:01:28Z",
"lastTransitionTime": "2017-06-08T15:38:33Z",
"message": "kubelet has sufficient memory available",
"reason": "KubeletHasSufficientMemory",
"status": "False",
"type": "MemoryPressure"
},
{
"lastHeartbeatTime": "2017-06-08T16:01:28Z",
"lastTransitionTime": "2017-06-08T15:38:33Z",
"message": "kubelet has no disk pressure",
"reason": "KubeletHasNoDiskPressure",
"status": "False",
"type": "DiskPressure"
},
{
"lastHeartbeatTime": "2017-06-08T16:01:28Z",
"lastTransitionTime": "2017-06-08T15:38:33Z",
"message": "kubelet is posting ready status",
"reason": "KubeletReady",
"status": "True",
"type": "Ready"
}
],
"daemonEndpoints": {
"kubeletEndpoint": {
"Port": 10250
}
},
"images": [
{
"names": [
"openshift/origin-federation:6acabdc",
"openshift/origin-federation:latest"
],
"sizeBytes": 1205885664
},
{
"names": [
"docker.io/openshift/origin-docker-registry@sha256:54f022c67562440fb5cc73421f32624747cd7836d45b9bb1f3e144eec437be12",
"docker.io/openshift/origin-docker-registry:latest"
],
"sizeBytes": 1100553091
},
{
"names": [
"openshift/origin-docker-registry:latest"
],
"sizeBytes": 1100164272
},
{
"names": [
"openshift/node:6acabdc",
"openshift/node:latest"
],
"sizeBytes": 1051721928
},
{
"names": [
"openshift/origin-haproxy-router:6acabdc",
"openshift/origin-haproxy-router:latest"
],
"sizeBytes": 1022758742
},
{
"names": [
"openshift/origin-docker-builder:latest"
],
"sizeBytes": 1001728427
},
{
"names": [
"openshift/origin-recycler:6acabdc",
"openshift/origin-recycler:latest"
],
"sizeBytes": 1001728427
},
{
"names": [
"openshift/origin-deployer:6acabdc",
"openshift/origin-deployer:latest"
],
"sizeBytes": 1001728427
},
{
"names": [
"openshift/origin:6acabdc",
"openshift/origin:latest"
],
"sizeBytes": 1001728427
},
{
"names": [
"openshift/origin-cluster-capacity:6acabdc",
"openshift/origin-cluster-capacity:latest"
],
"sizeBytes": 962455026
},
{
"names": [
"openshift/dind-master:latest"
],
"sizeBytes": 731456758
},
{
"names": [
"openshift/dind-node:latest"
],
"sizeBytes": 731453034
},
{
"names": [
"172.30.255.47:5000/logging/logging-auth-proxy@sha256:6ae7f9f4986fcacfc0397cb05b2c52234dfc214e82c1dfb3ed04fee27e471935",
"172.30.255.47:5000/logging/logging-auth-proxy:latest"
],
"sizeBytes": 715535980
},
{
"names": [
"<none>@<none>",
"<none>:<none>"
],
"sizeBytes": 709532011
},
{
"names": [
"docker.io/node@sha256:46db0dd19955beb87b841c30a6b9812ba626473283e84117d1c016deee5949a9",
"docker.io/node:0.10.36"
],
"sizeBytes": 697128386
},
{
"names": [
"docker.io/openshift/origin-logging-kibana@sha256:70ead525ed596b73301e8df3ac229e33dd7f8431ec1233b37e96544c556530e9",
"docker.io/openshift/origin-logging-kibana:latest"
],
"sizeBytes": 682851528
},
{
"names": [
"172.30.255.47:5000/logging/logging-kibana@sha256:56e34b1a2e934ab614299d4818546e7b0ad61fb03987188c80c880c86a59577a",
"172.30.255.47:5000/logging/logging-kibana:latest"
],
"sizeBytes": 682851513
},
{
"names": [
"openshift/dind:latest"
],
"sizeBytes": 640650210
},
{
"names": [
"172.30.255.47:5000/logging/logging-elasticsearch@sha256:a5bceb422ca90819dc7ac8847a8908fa1e033257961e387fb6e0967f9756bb7f",
"172.30.255.47:5000/logging/logging-elasticsearch:latest"
],
"sizeBytes": 623379762
},
{
"names": [
"172.30.255.47:5000/logging/logging-fluentd@sha256:e79d465f5fceb5b2fc70e3a3bf3d75dfb6d276919035f112cdf12f64d1653388",
"172.30.255.47:5000/logging/logging-fluentd:latest"
],
"sizeBytes": 472183180
},
{
"names": [
"172.30.255.47:5000/logging/logging-curator@sha256:4762bcdb87e470fc1246e0e2f320e5aa27bc077593da9c59f6dab4562f073fdf",
"172.30.255.47:5000/logging/logging-curator:latest"
],
"sizeBytes": 418288220
},
{
"names": [
"docker.io/openshift/base-centos7@sha256:aea292a3bddba020cde0ee83e6a45807931eb607c164ec6a3674f67039d8cd7c",
"docker.io/openshift/base-centos7:latest"
],
"sizeBytes": 383049978
},
{
"names": [
"rhel7.2:latest"
],
"sizeBytes": 377493597
},
{
"names": [
"openshift/origin-egress-router:6acabdc",
"openshift/origin-egress-router:latest"
],
"sizeBytes": 364745713
},
{
"names": [
"openshift/origin-base:latest"
],
"sizeBytes": 363070172
},
{
"names": [
"docker.io/openshift/origin-logging-fluentd@sha256:bc70848086a50bad58a2f41e166098e8ed351bf4dbe7af83caeb7a29f35b4395",
"docker.io/openshift/origin-logging-fluentd:latest"
],
"sizeBytes": 359217371
},
{
"names": [
"docker.io/fedora@sha256:69281ddd7b2600e5f2b17f1e12d7fba25207f459204fb2d15884f8432c479136",
"docker.io/fedora:25"
],
"sizeBytes": 230864375
},
{
"names": [
"docker.io/openshift/origin-logging-curator@sha256:e820338ca7fb0addfaec25d80d40a49f5ea25b24ff056ab6adbb42dd9eec94b4",
"docker.io/openshift/origin-logging-curator:latest"
],
"sizeBytes": 224977691
},
{
"names": [
"rhel7.3:latest",
"rhel7:latest"
],
"sizeBytes": 219121266
},
{
"names": [
"openshift/origin-pod:6acabdc",
"openshift/origin-pod:latest"
],
"sizeBytes": 213199843
},
{
"names": [
"registry.access.redhat.com/rhel7.2@sha256:98e6ca5d226c26e31a95cd67716afe22833c943e1926a21daf1a030906a02249",
"registry.access.redhat.com/rhel7.2:latest"
],
"sizeBytes": 201376319
},
{
"names": [
"registry.access.redhat.com/rhel7.3@sha256:1e232401d8e0ba53b36b757b4712fbcbd1dab9c21db039c45a84871a74e89e68",
"registry.access.redhat.com/rhel7.3:latest"
],
"sizeBytes": 192693772
},
{
"names": [
"docker.io/centos@sha256:bba1de7c9d900a898e3cadbae040dfe8a633c06bc104a0df76ae24483e03c077"
],
"sizeBytes": 192548999
},
{
"names": [
"openshift/origin-source:latest"
],
"sizeBytes": 192548894
},
{
"names": [
"docker.io/centos@sha256:aebf12af704307dfa0079b3babdca8d7e8ff6564696882bcb5d11f1d461f9ee9",
"docker.io/centos:7",
"docker.io/centos:centos7"
],
"sizeBytes": 192548537
},
{
"names": [
"registry.access.redhat.com/rhel7.1@sha256:1bc5a4c43bbb29a5a96a61896ff696933be3502e2f5fdc4cde02d9e101731fdd",
"registry.access.redhat.com/rhel7.1:latest"
],
"sizeBytes": 158229901
}
],
"nodeInfo": {
"architecture": "amd64",
"bootID": "c2cfc298-5593-4726-958d-742f09f4df0d",
"containerRuntimeVersion": "docker://1.12.6",
"kernelVersion": "3.10.0-327.22.2.el7.x86_64",
"kubeProxyVersion": "v1.6.1+5115d708d7",
"kubeletVersion": "v1.6.1+5115d708d7",
"machineID": "f9370ed252a14f73b014c1301a9b6d1b",
"operatingSystem": "linux",
"osImage": "Red Hat Enterprise Linux Server 7.3 (Maipo)",
"systemUUID": "EC2388AB-03B5-9846-ECC4-052DA3A164CF"
}
}
}
],
"kind": "List",
"metadata": {},
"resourceVersion": "",
"selfLink": ""
}
],
"returncode": 0
},
"state": "list"
}
TASK [openshift_logging_fluentd : Set openshift_logging_fluentd_hosts] *********
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:190
ok: [openshift] => {
"ansible_facts": {
"openshift_logging_fluentd_hosts": [
"172.18.3.237"
]
},
"changed": false
}
TASK [openshift_logging_fluentd : include] *************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:195
included: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_fluentd/tasks/label_and_wait.yaml for openshift
TASK [openshift_logging_fluentd : Label 172.18.3.237 for Fluentd deployment] ***
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_fluentd/tasks/label_and_wait.yaml:2
changed: [openshift] => {
"changed": true,
"results": {
"cmd": "/bin/oc label node 172.18.3.237 logging-infra-fluentd=true --overwrite",
"results": "",
"returncode": 0
},
"state": "add"
}
TASK [openshift_logging_fluentd : command] *************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_fluentd/tasks/label_and_wait.yaml:10
changed: [openshift -> 127.0.0.1] => {
"changed": true,
"cmd": [
"sleep",
"0.5"
],
"delta": "0:00:00.502260",
"end": "2017-06-08 12:01:35.810608",
"rc": 0,
"start": "2017-06-08 12:01:35.308348"
}
TASK [openshift_logging_fluentd : Delete temp directory] ***********************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging_fluentd/tasks/main.yaml:202
ok: [openshift] => {
"changed": false,
"path": "/tmp/openshift-logging-ansible-93vigU",
"state": "absent"
}
TASK [openshift_logging : include] *********************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging/tasks/install_logging.yaml:253
included: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging/tasks/update_master_config.yaml for openshift
TASK [openshift_logging : include] *********************************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging/tasks/main.yaml:36
skipping: [openshift] => {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
TASK [openshift_logging : Cleaning up local temp dir] **************************
task path: /tmp/tmp.giX4WBVEkB/openhift-ansible/roles/openshift_logging/tasks/main.yaml:40
ok: [openshift -> 127.0.0.1] => {
"changed": false,
"path": "/tmp/openshift-logging-ansible-BE0YYi",
"state": "absent"
}
META: ran handlers
META: ran handlers
PLAY [Update Master configs] ***************************************************
skipping: no hosts matched
PLAY RECAP *********************************************************************
localhost : ok=2 changed=0 unreachable=0 failed=0
openshift : ok=207 changed=70 unreachable=0 failed=0
/data/src/github.com/openshift/origin-aggregated-logging
Running /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:170: executing 'oc get pods -l component=es' expecting any result and text 'Running'; re-trying every 0.2s until completion or 180.000s...
SUCCESS after 0.291s: /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:170: executing 'oc get pods -l component=es' expecting any result and text 'Running'; re-trying every 0.2s until completion or 180.000s
Standard output from the command:
NAME READY STATUS RESTARTS AGE
logging-es-data-master-9s2p0i8l-1-4pfln 1/1 Running 0 1m
There was no error output from the command.
Running /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:171: executing 'oc get pods -l component=kibana' expecting any result and text 'Running'; re-trying every 0.2s until completion or 180.000s...
SUCCESS after 0.232s: /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:171: executing 'oc get pods -l component=kibana' expecting any result and text 'Running'; re-trying every 0.2s until completion or 180.000s
Standard output from the command:
NAME READY STATUS RESTARTS AGE
logging-kibana-1-btlws 2/2 Running 0 36s
There was no error output from the command.
Running /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:172: executing 'oc get pods -l component=curator' expecting any result and text 'Running'; re-trying every 0.2s until completion or 180.000s...
SUCCESS after 13.883s: /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:172: executing 'oc get pods -l component=curator' expecting any result and text 'Running'; re-trying every 0.2s until completion or 180.000s
Standard output from the command:
NAME READY STATUS RESTARTS AGE
logging-curator-1-f45s0 0/1 ContainerCreating 0 9s
NAME READY STATUS RESTARTS AGE
logging-curator-1-f45s0 0/1 ContainerCreating 0 10s
... repeated 2 times
NAME READY STATUS RESTARTS AGE
logging-curator-1-f45s0 0/1 ContainerCreating 0 11s
NAME READY STATUS RESTARTS AGE
logging-curator-1-f45s0 0/1 ContainerCreating 0 12s
... repeated 3 times
NAME READY STATUS RESTARTS AGE
logging-curator-1-f45s0 0/1 ContainerCreating 0 13s
... repeated 2 times
NAME READY STATUS RESTARTS AGE
logging-curator-1-f45s0 0/1 ContainerCreating 0 14s
... repeated 2 times
NAME READY STATUS RESTARTS AGE
logging-curator-1-f45s0 0/1 ContainerCreating 0 15s
... repeated 2 times
NAME READY STATUS RESTARTS AGE
logging-curator-1-f45s0 0/1 ContainerCreating 0 16s
... repeated 3 times
NAME READY STATUS RESTARTS AGE
logging-curator-1-f45s0 0/1 ContainerCreating 0 17s
... repeated 2 times
NAME READY STATUS RESTARTS AGE
logging-curator-1-f45s0 0/1 ContainerCreating 0 18s
... repeated 2 times
NAME READY STATUS RESTARTS AGE
logging-curator-1-f45s0 0/1 ContainerCreating 0 19s
... repeated 2 times
NAME READY STATUS RESTARTS AGE
logging-curator-1-f45s0 0/1 ContainerCreating 0 20s
... repeated 2 times
NAME READY STATUS RESTARTS AGE
logging-curator-1-f45s0 0/1 ContainerCreating 0 21s
... repeated 2 times
NAME READY STATUS RESTARTS AGE
logging-curator-1-f45s0 0/1 ContainerCreating 0 22s
... repeated 2 times
NAME READY STATUS RESTARTS AGE
logging-curator-1-f45s0 1/1 Running 0 23s
Standard error from the command:
Running /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:175: executing 'oc get pods -l component=es-ops' expecting any result and text 'Running'; re-trying every 0.2s until completion or 180.000s...
SUCCESS after 0.270s: /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:175: executing 'oc get pods -l component=es-ops' expecting any result and text 'Running'; re-trying every 0.2s until completion or 180.000s
Standard output from the command:
NAME READY STATUS RESTARTS AGE
logging-es-ops-data-master-obwim1kt-1-5j1nm 1/1 Running 0 1m
There was no error output from the command.
Running /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:176: executing 'oc get pods -l component=kibana-ops' expecting any result and text 'Running'; re-trying every 0.2s until completion or 180.000s...
SUCCESS after 0.267s: /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:176: executing 'oc get pods -l component=kibana-ops' expecting any result and text 'Running'; re-trying every 0.2s until completion or 180.000s
Standard output from the command:
NAME READY STATUS RESTARTS AGE
logging-kibana-ops-1-8g75g 1/2 Running 0 30s
There was no error output from the command.
Running /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:177: executing 'oc get pods -l component=curator-ops' expecting any result and text 'Running'; re-trying every 0.2s until completion or 180.000s...
SUCCESS after 1.120s: /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:177: executing 'oc get pods -l component=curator-ops' expecting any result and text 'Running'; re-trying every 0.2s until completion or 180.000s
Standard output from the command:
NAME READY STATUS RESTARTS AGE
logging-curator-ops-1-38441 0/1 ContainerCreating 0 13s
... repeated 2 times
NAME READY STATUS RESTARTS AGE
logging-curator-ops-1-38441 1/1 Running 0 14s
Standard error from the command:
Running /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:185: executing 'oc project logging > /dev/null' expecting success...
SUCCESS after 0.267s: /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:185: executing 'oc project logging > /dev/null' expecting success
There was no output from the command.
There was no error output from the command.
/data/src/github.com/openshift/origin-aggregated-logging/hack/testing /data/src/github.com/openshift/origin-aggregated-logging
--> Deploying template "logging/logging-fluentd-template-maker" for "-" to project logging
logging-fluentd-template-maker
---------
Template to create template for fluentd
* With parameters:
* MASTER_URL=https://kubernetes.default.svc.cluster.local
* ES_HOST=logging-es
* ES_PORT=9200
* ES_CLIENT_CERT=/etc/fluent/keys/cert
* ES_CLIENT_KEY=/etc/fluent/keys/key
* ES_CA=/etc/fluent/keys/ca
* OPS_HOST=logging-es-ops
* OPS_PORT=9200
* OPS_CLIENT_CERT=/etc/fluent/keys/cert
* OPS_CLIENT_KEY=/etc/fluent/keys/key
* OPS_CA=/etc/fluent/keys/ca
* ES_COPY=false
* ES_COPY_HOST=
* ES_COPY_PORT=
* ES_COPY_SCHEME=https
* ES_COPY_CLIENT_CERT=
* ES_COPY_CLIENT_KEY=
* ES_COPY_CA=
* ES_COPY_USERNAME=
* ES_COPY_PASSWORD=
* OPS_COPY_HOST=
* OPS_COPY_PORT=
* OPS_COPY_SCHEME=https
* OPS_COPY_CLIENT_CERT=
* OPS_COPY_CLIENT_KEY=
* OPS_COPY_CA=
* OPS_COPY_USERNAME=
* OPS_COPY_PASSWORD=
* IMAGE_PREFIX_DEFAULT=172.30.255.47:5000/logging/
* IMAGE_VERSION_DEFAULT=latest
* USE_JOURNAL=
* JOURNAL_SOURCE=
* JOURNAL_READ_FROM_HEAD=false
* USE_MUX=false
* USE_MUX_CLIENT=false
* MUX_ALLOW_EXTERNAL=false
* BUFFER_QUEUE_LIMIT=1024
* BUFFER_SIZE_LIMIT=16777216
--> Creating resources ...
template "logging-fluentd-template" created
--> Success
Run 'oc status' to view your app.
WARNING: bridge-nf-call-ip6tables is disabled
Error: timed out waiting for /var/log/journal.pos - check Fluentd pod log
[ERROR] PID 4246: /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:615: `return 1` exited with status 1.
[INFO] Stack Trace:
[INFO] 1: /data/src/github.com/openshift/origin-aggregated-logging/logging.sh:615: `return 1`
[INFO] Exiting with code 1.
/data/src/github.com/openshift/origin-aggregated-logging/hack/lib/log/system.sh: line 31: 4649 Terminated sar -A -o "${binary_logfile}" 1 86400 > /dev/null 2> "${stderr_logfile}" (wd: /data/src/github.com/openshift/origin-aggregated-logging)
[INFO] [CLEANUP] Beginning cleanup routines...
[INFO] [CLEANUP] Dumping cluster events to /tmp/origin-aggregated-logging/artifacts/events.txt
[INFO] [CLEANUP] Dumping etcd contents to /tmp/origin-aggregated-logging/artifacts/etcd
[WARNING] No compiled `etcdhelper` binary was found. Attempting to build one using:
[WARNING] $ hack/build-go.sh tools/etcdhelper
Error while running ssh/sudo command:
set -e
pushd /data/src/github.com/openshift//origin-aggregated-logging/hack/testing >/dev/null
export PATH=$GOPATH/bin:$PATH
echo '***************************************************'
echo 'Running GIT_URL=https://github.com/openshift/origin-aggregated-logging GIT_BRANCH=master O_A_L_DIR=/data/src/github.com/openshift/origin-aggregated-logging OS_ROOT=/data/src/github.com/openshift/origin ENABLE_OPS_CLUSTER=true USE_LOCAL_SOURCE=true TEST_PERF=false VERBOSE=1 OS_ANSIBLE_REPO=https://github.com/openshift/openshift-ansible OS_ANSIBLE_BRANCH=master ./logging.sh...'
time GIT_URL=https://github.com/openshift/origin-aggregated-logging GIT_BRANCH=master O_A_L_DIR=/data/src/github.com/openshift/origin-aggregated-logging OS_ROOT=/data/src/github.com/openshift/origin ENABLE_OPS_CLUSTER=true USE_LOCAL_SOURCE=true TEST_PERF=false VERBOSE=1 OS_ANSIBLE_REPO=https://github.com/openshift/openshift-ansible OS_ANSIBLE_BRANCH=master ./logging.sh
echo 'Finished GIT_URL=https://github.com/openshift/origin-aggregated-logging GIT_BRANCH=master O_A_L_DIR=/data/src/github.com/openshift/origin-aggregated-logging OS_ROOT=/data/src/github.com/openshift/origin ENABLE_OPS_CLUSTER=true USE_LOCAL_SOURCE=true TEST_PERF=false VERBOSE=1 OS_ANSIBLE_REPO=https://github.com/openshift/openshift-ansible OS_ANSIBLE_BRANCH=master ./logging.sh'
echo '***************************************************'
popd >/dev/null
SIGTERM
==> openshiftdev: Downloading logs
Build was aborted
Aborted by Rich Megginson
Publish artifacts to S3 Bucket Skipping publishing on S3 because build aborted
[description-setter] Could not determine description.
[PostBuildScript] - Execution post build scripts.
[workspace] $ /bin/sh -xe /tmp/hudson25446529649546822.sh
+ INSTANCE_NAME=origin_logging-rhel7-1628
+ pushd origin
~/jobs/test-origin-aggregated-logging/workspace/origin ~/jobs/test-origin-aggregated-logging/workspace
+ rc=0
+ '[' -f .vagrant-openshift.json ']'
++ /usr/bin/vagrant ssh -c 'sudo ausearch -m avc'
==> openshiftdev: Downloading artifacts from '/var/log/yum.log' to '/var/lib/jenkins/jobs/test-origin-aggregated-logging/workspace/origin/artifacts/yum.log'
==> openshiftdev: Downloading artifacts from '/var/log/secure' to '/var/lib/jenkins/jobs/test-origin-aggregated-logging/workspace/origin/artifacts/secure'
==> openshiftdev: Downloading artifacts from '/var/log/audit/audit.log' to '/var/lib/jenkins/jobs/test-origin-aggregated-logging/workspace/origin/artifacts/audit.log'
==> openshiftdev: Downloading artifacts from '/tmp/origin-aggregated-logging/' to '/var/lib/jenkins/jobs/test-origin-aggregated-logging/workspace/origin/artifacts'
+ ausearchresult='<no matches>'
+ rc=1
+ '[' '<no matches>' = '<no matches>' ']'
+ rc=0
+ /usr/bin/vagrant destroy -f
==> openshiftdev: Terminating the instance...
==> openshiftdev: Running cleanup tasks for 'shell' provisioner...
+ popd
~/jobs/test-origin-aggregated-logging/workspace
+ exit 0
[BFA] Scanning build for known causes...
[BFA] Found failure cause(s):
[BFA] Command Failure from category failure
[BFA] Done. 0s
Finished: ABORTED