journalctl -f -u kuryr-kubernetes.service
May 28 21:06:09 zu-kuryr-kubernetes-master kuryr-k8s-controller[13937]: 2019-05-28 21:06:09.737 13937 ERROR kuryr_kubernetes.controller.drivers.lbaasv2 [-] Error when creating listener: {"debuginfo": null, "faultcode": "Client", "faultstring": "Another Listener on this Load Balancer is already using protocol_port 53"}
May 28 21:06:11 zu-kuryr-kubernetes-master kuryr-k8s-controller[13937]: 2019-05-28 21:06:11.867 13937 ERROR kuryr_kubernetes.handlers.logging [-] Failed to handle event {u'object': {u'kind': u'Endpoints', u'subsets': [{u'ports': [{u'protocol': u'UDP', u'name': u'dns', u'port': 53}, {u'protocol': u'TCP', u'name': u'dns-tcp', u'port': 53}], u'addresses': [{u'ip': u'10.1.0.148', u'targetRef': {u'kind': u'Pod', u'resourceVersion': u'52208', u'namespace': u'kube-system', u'name': u'coredns-6c868f76bc-qdhd7', u'uid': u'41ad9eb5-8167-11e9-9b1c-525400e3dab3'}, u'nodeName': u'zu-kuryr-kubernetes-worker0'}], u'notReadyAddresses': [{u'ip': u'10.1.3.207', u'targetRef': {u'kind': u'Pod', u'resourceVersion': u'51809', u'namespace': u'kube-system', u'name': u'coredns-6c868f76bc-t76dr', u'uid': u'41b15414-8167-11e9-9b1c-525400e3dab3'}, u'nodeName': u'zu-kuryr-kubernetes-worker1'}]}], u'apiVersion': u'v1', u'metadata': {u'name': u'kube-dns', u'labels': {u'k8s-app': u'kube-dns', u'kubernetes.io/name': u'CoreDNS', u'kubernetes.io/cluster-service': u'true'}, u'namespace': u'kube-system', u'resourceVersion': u'52209', u'creationTimestamp': u'2019-05-28T16:40:09Z', u'annotations': {u'openstack.org/kuryr-lbaas-spec': u'{"versioned_object.data": {"ip": "10.2.0.10", "lb_ip": null, "ports": [{"versioned_object.data": {"name": "dns", "port": 53, "protocol": "UDP", "targetPort": "53"}, "versioned_object.name": "LBaaSPortSpec", "versioned_object.namespace": "kuryr_kubernetes", "versioned_object.version": "1.1"}, {"versioned_object.data": {"name": "dns-tcp", "port": 53, "protocol": "TCP", "targetPort": "53"}, "versioned_object.name": "LBaaSPortSpec", "versioned_object.namespace": "kuryr_kubernetes", "versioned_object.version": "1.1"}], "project_id": "14cdf813e739448388638ca8a5ce2475", "security_groups_ids": ["6cb8b9bb-defd-4e30-bc59-973cd865e305"], "subnet_id": "82d904ed-5129-4098-865b-f5222fc0167d", "type": "ClusterIP"}, "versioned_object.name": "LBaaSServiceSpec", "versioned_object.namespace": "kuryr_kubernetes", "versioned_object.version": "1.0"}'}, u'selfLink': u'/api/v1/namespaces/kube-system/endpoints/kube-dns', u'uid': u'41ad82fe-8167-11e9-9b1c-525400e3dab3'}}, u'type': u'MODIFIED'}: ResourceNotReady: Resource not ready: LBaaSListener(id=<?>,loadbalancer_id=4c7ea0ed-752e-4bda-b13b-cffd2e700682,name='kube-system/kube-dns:TCP:53',port=53,project_id='14cdf813e739448388638ca8a5ce2475',protocol='TCP')
May 28 21:06:11 zu-kuryr-kubernetes-master kuryr-k8s-controller[13937]: 2019-05-28 21:06:11.867 13937 ERROR kuryr_kubernetes.handlers.logging Traceback (most recent call last):
May 28 21:06:11 zu-kuryr-kubernetes-master kuryr-k8s-controller[13937]: 2019-05-28 21:06:11.867 13937 ERROR kuryr_kubernetes.handlers.logging File "/root/kuryr-k8s-controller/kuryr-kubernetes/kuryr_kubernetes/handlers/logging.py", line 37, in __call__
May 28 21:06:11 zu-kuryr-kubernetes-master kuryr-k8s-controller[13937]: 2019-05-28 21:06:11.867 13937 ERROR kuryr_kubernetes.handlers.logging self._handler(event)
May 28 21:06:11 zu-kuryr-kubernetes-master kuryr-k8s-controller[13937]: 2019-05-28 21:06:11.867 13937 ERROR kuryr_kubernetes.handlers.logging File "/root/kuryr-k8s-controller/kuryr-kubernetes/kuryr_kubernetes/handlers/retry.py", line 68, in __call__
May 28 21:06:11 zu-kuryr-kubernetes-master kuryr-k8s-controller[13937]: 2019-05-28 21:06:11.867 13937 ERROR kuryr_kubernetes.handlers.logging self._handler.set_liveness(alive=False)
May 28 21:06:11 zu-kuryr-kubernetes-master kuryr-k8s-controller[13937]: 2019-05-28 21:06:11.867 13937 ERROR kuryr_kubernetes.handlers.logging File "/usr/local/lib/python2.7/dist-packages/oslo_utils/excutils.py", line 220, in __exit__
May 28 21:06:11 zu-kuryr-kubernetes-master kuryr-k8s-controller[13937]: 2019-05-28 21:06:11.867 13937 ERROR kuryr_kubernetes.handlers.logging self.force_reraise()
May 28 21:06:11 zu-kuryr-kubernetes-master kuryr-k8s-controller[13937]: 2019-05-28 21:06:11.867 13937 ERROR kuryr_kubernetes.handlers.logging File "/usr/local/lib/python2.7/dist-packages/oslo_utils/excutils.py", line 196, in force_reraise
May 28 21:06:11 zu-kuryr-kubernetes-master kuryr-k8s-controller[13937]: 2019-05-28 21:06:11.867 13937 ERROR kuryr_kubernetes.handlers.logging six.reraise(self.type_, self.value, self.tb)
May 28 21:06:11 zu-kuryr-kubernetes-master kuryr-k8s-controller[13937]: 2019-05-28 21:06:11.867 13937 ERROR kuryr_kubernetes.handlers.logging File "/root/kuryr-k8s-controller/kuryr-kubernetes/kuryr_kubernetes/handlers/retry.py", line 56, in __call__
May 28 21:06:11 zu-kuryr-kubernetes-master kuryr-k8s-controller[13937]: 2019-05-28 21:06:11.867 13937 ERROR kuryr_kubernetes.handlers.logging self._handler(event)
May 28 21:06:11 zu-kuryr-kubernetes-master kuryr-k8s-controller[13937]: 2019-05-28 21:06:11.867 13937 ERROR kuryr_kubernetes.handlers.logging File "/root/kuryr-k8s-controller/kuryr-kubernetes/kuryr_kubernetes/handlers/k8s_base.py", line 72, in __call__
May 28 21:06:11 zu-kuryr-kubernetes-master kuryr-k8s-controller[13937]: 2019-05-28 21:06:11.867 13937 ERROR kuryr_kubernetes.handlers.logging self.on_present(obj)
May 28 21:06:11 zu-kuryr-kubernetes-master kuryr-k8s-controller[13937]: 2019-05-28 21:06:11.867 13937 ERROR kuryr_kubernetes.handlers.logging File "/root/kuryr-k8s-controller/kuryr-kubernetes/kuryr_kubernetes/controller/handlers/lbaas.py", line 184, in on_present
May 28 21:06:11 zu-kuryr-kubernetes-master kuryr-k8s-controller[13937]: 2019-05-28 21:06:11.867 13937 ERROR kuryr_kubernetes.handlers.logging if self._sync_lbaas_members(endpoints, lbaas_state, lbaas_spec):
May 28 21:06:11 zu-kuryr-kubernetes-master kuryr-k8s-controller[13937]: 2019-05-28 21:06:11.867 13937 ERROR kuryr_kubernetes.handlers.logging File "/root/kuryr-k8s-controller/kuryr-kubernetes/kuryr_kubernetes/controller/handlers/lbaas.py", line 271, in _sync_lbaas_members
May 28 21:06:11 zu-kuryr-kubernetes-master kuryr-k8s-controller[13937]: 2019-05-28 21:06:11.867 13937 ERROR kuryr_kubernetes.handlers.logging if self._sync_lbaas_pools(endpoints, lbaas_state, lbaas_spec):
May 28 21:06:11 zu-kuryr-kubernetes-master kuryr-k8s-controller[13937]: 2019-05-28 21:06:11.867 13937 ERROR kuryr_kubernetes.handlers.logging File "/root/kuryr-k8s-controller/kuryr-kubernetes/kuryr_kubernetes/controller/handlers/lbaas.py", line 438, in _sync_lbaas_pools
May 28 21:06:11 zu-kuryr-kubernetes-master kuryr-k8s-controller[13937]: 2019-05-28 21:06:11.867 13937 ERROR kuryr_kubernetes.handlers.logging if self._sync_lbaas_listeners(endpoints, lbaas_state, lbaas_spec):
May 28 21:06:11 zu-kuryr-kubernetes-master kuryr-k8s-controller[13937]: 2019-05-28 21:06:11.867 13937 ERROR kuryr_kubernetes.handlers.logging File "/root/kuryr-k8s-controller/kuryr-kubernetes/kuryr_kubernetes/controller/handlers/lbaas.py", line 498, in _sync_lbaas_listeners
May 28 21:06:11 zu-kuryr-kubernetes-master kuryr-k8s-controller[13937]: 2019-05-28 21:06:11.867 13937 ERROR kuryr_kubernetes.handlers.logging if self._add_new_listeners(endpoints, lbaas_spec, lbaas_state):
May 28 21:06:11 zu-kuryr-kubernetes-master kuryr-k8s-controller[13937]: 2019-05-28 21:06:11.867 13937 ERROR kuryr_kubernetes.handlers.logging File "/root/kuryr-k8s-controller/kuryr-kubernetes/kuryr_kubernetes/controller/handlers/lbaas.py", line 517, in _add_new_listeners
May 28 21:06:11 zu-kuryr-kubernetes-master kuryr-k8s-controller[13937]: 2019-05-28 21:06:11.867 13937 ERROR kuryr_kubernetes.handlers.logging service_type=lbaas_spec.type)
May 28 21:06:11 zu-kuryr-kubernetes-master kuryr-k8s-controller[13937]: 2019-05-28 21:06:11.867 13937 ERROR kuryr_kubernetes.handlers.logging File "/root/kuryr-k8s-controller/kuryr-kubernetes/kuryr_kubernetes/controller/drivers/lbaasv2.py", line 416, in ensure_listener
May 28 21:06:11 zu-kuryr-kubernetes-master kuryr-k8s-controller[13937]: 2019-05-28 21:06:11.867 13937 ERROR kuryr_kubernetes.handlers.logging self._find_listener, _LB_STS_POLL_SLOW_INTERVAL)
May 28 21:06:11 zu-kuryr-kubernetes-master kuryr-k8s-controller[13937]: 2019-05-28 21:06:11.867 13937 ERROR kuryr_kubernetes.handlers.logging File "/root/kuryr-k8s-controller/kuryr-kubernetes/kuryr_kubernetes/controller/drivers/lbaasv2.py", line 715, in _ensure_provisioned
May 28 21:06:11 zu-kuryr-kubernetes-master kuryr-k8s-controller[13937]: 2019-05-28 21:06:11.867 13937 ERROR kuryr_kubernetes.handlers.logging raise k_exc.ResourceNotReady(obj)
May 28 21:06:11 zu-kuryr-kubernetes-master kuryr-k8s-controller[13937]: 2019-05-28 21:06:11.867 13937 ERROR kuryr_kubernetes.handlers.logging ResourceNotReady: Resource not ready: LBaaSListener(id=<?>,loadbalancer_id=4c7ea0ed-752e-4bda-b13b-cffd2e700682,name='kube-system/kube-dns:TCP:53',port=53,project_id='14cdf813e739448388638ca8a5ce2475',protocol='TCP')
May 28 21:06:11 zu-kuryr-kubernetes-master kuryr-k8s-controller[13937]: 2019-05-28 21:06:11.867 13937 ERROR kuryr_kubernetes.handlers.logging
May 28 21:09:46 zu-kuryr-kubernetes-master kuryr-k8s-controller[13937]: 2019-05-28 21:09:46.192 13937 ERROR kuryr_kubernetes.controller.drivers.lbaasv2 [-] Error when creating loadbalancer: {"debuginfo": null, "faultcode": "Server", "faultstring": "Provider 'amphora' reports error: "}
root@zu-kuryr-kubernetes-master:~# kubectl get pod --all-namespaces
NAMESPACE NAME READY STATUS RESTARTS AGE
kube-system coredns-6c868f76bc-qdhd7 1/1 Running 39 151m
kube-system coredns-6c868f76bc-t76dr 1/1 Running 38 151m
root@zu-kuryr-kubernetes-master:~# kubectl -n describe pod coredns-6c868f76bc-qdhd7
Warning Unhealthy 2m41s (x191 over 152m) kubelet, zu-kuryr-kubernetes-worker0 Liveness probe failed: HTTP probe failed with statuscode: 503
root@zu-kuryr-kubernetes-worker0:~# docker logs 74981eebf1a0
.:53
2019/05/28 19:11:30 [INFO] CoreDNS-1.2.5
2019/05/28 19:11:30 [INFO] linux/amd64, go1.11.1, 204537b
CoreDNS-1.2.5
linux/amd64, go1.11.1, 204537b
2019/05/28 19:11:30 [INFO] plugin/reload: Running configuration MD5 = 331dd1443a59e2f683750cb10bf35971
127.0.0.1:51258 - [28/May/2019:19:11:30 +0000] 40971 "HINFO IN 5864675094937834239.3946692726340985353. udp 57 false 512" NXDOMAIN qr,rd,ra 133 0.014322304s
E0528 19:11:55.795329 1 reflector.go:205] github.com/coredns/coredns/plugin/kubernetes/controller.go:318: Failed to list *v1.Namespace: Get https://10.2.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0: dial tcp 10.2.0.1:443: i/o timeout
E0528 19:11:55.795644 1 reflector.go:205] github.com/coredns/coredns/plugin/kubernetes/controller.go:311: Failed to list *v1.Service: Get https://10.2.0.1:443/api/v1/services?limit=500&resourceVersion=0: dial tcp 10.2.0.1:443: i/o timeout
E0528 19:11:55.796306 1 reflector.go:205] github.com/coredns/coredns/plugin/kubernetes/controller.go:313: Failed to list *v1.Endpoints: Get https://10.2.0.1:443/api/v1/endpoints?limit=500&resourceVersion=0: dial tcp 10.2.0.1:443: i/o timeout
E0528 19:12:26.795877 1 reflector.go:205] github.com/coredns/coredns/plugin/kubernetes/controller.go:318: Failed to list *v1.Namespace: Get https://10.2.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0: dial tcp 10.2.0.1:443: i/o timeout
E0528 19:12:26.806288 1 reflector.go:205] github.com/coredns/coredns/plugin/kubernetes/controller.go:311: Failed to list *v1.Service: Get https://10.2.0.1:443/api/v1/services?limit=500&resourceVersion=0: dial tcp 10.2.0.1:443: i/o timeout
E0528 19:12:26.806305 1 reflector.go:205] github.com/coredns/coredns/plugin/kubernetes/controller.go:313: Failed to list *v1.Endpoints: Get https://10.2.0.1:443/api/v1/endpoints?limit=500&resourceVersion=0: dial tcp 10.2.0.1:443: i/o timeout
E0528 19:12:57.796376 1 reflector.go:205] github.com/coredns/coredns/plugin/kubernetes/controller.go:318: Failed to list *v1.Namespace: Get https://10.2.0.1:443/api/v1/namespaces?limit=500&resourceVersion=0: dial tcp 10.2.0.1:443: i/o timeout
E0528 19:12:57.807019 1 reflector.go:205] github.com/coredns/coredns/plugin/kubernetes/controller.go:311: Failed to list *v1.Service: Get https://10.2.0.1:443/api/v1/services?limit=500&resourceVersion=0: dial tcp 10.2.0.1:443: i/o timeout
E0528 19:12:57.808053 1 reflector.go:205] github.com/coredns/coredns/plugin/kubernetes/controller.go:313: Failed to list *v1.Endpoints: Get https://10.2.0.1:443/api/v1/endpoints?limit=500&resourceVersion=0: dial tcp 10.2.0.1:443: i/o timeout
2019/05/28 19:13:05 [INFO] SIGTERM: Shutting down servers then terminating
root@zu-kuryr-kubernetes-worker0:~# docker ps -a
CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES
74981eebf1a0 bd254cf72111 "/coredns -conf /etc…" 3 minutes ago Exited (0) About a minute ago k8s_coredns_coredns-6c868f76bc-qdhd7_kube-system_41ad9eb5-8167-11e9-9b1c-525400e3dab3_39
root@zu-kuryr-kubernetes-master:~# curl https://10.2.0.1:443/api/v1/namespaces -k
{
"kind": "NamespaceList",
"apiVersion": "v1",
"metadata": {
"selfLink": "/api/v1/namespaces",
"resourceVersion": "53461"
},
"items": [
{
"metadata": {
"name": "default",
"selfLink": "/api/v1/namespaces/default",
"uid": "984b676f-8129-11e9-a446-525400e3dab3",
"resourceVersion": "13",
"creationTimestamp": "2019-05-28T09:18:46Z"
},
"spec": {
"finalizers": [
"kubernetes"
]
}