Skip to content

Instantly share code, notes, and snippets.

@activeshadow
Created October 23, 2017 19:03
Show Gist options
  • Save activeshadow/88de07980d3de0bd296f504c0990e60f to your computer and use it in GitHub Desktop.
Save activeshadow/88de07980d3de0bd296f504c0990e60f to your computer and use it in GitHub Desktop.
kops GCE error logs
➜ ~ kops create cluster k8s.darkcubed.io --zones $ZONES --master-zones $ZONES --node-count 10 --project ${PROJECT} --authorization RBAC --cloud gce --ssh-public-key ~/.ssh/id_ed25519.pub -v 10 --yes
I1020 15:58:33.349589 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/config"
I1020 15:58:34.513984 22722 channel.go:92] resolving "stable" against default channel location "https://raw.githubusercontent.com/kubernetes/kops/master/channels/"
I1020 15:58:34.514059 22722 channel.go:97] Loading channel from "https://raw.githubusercontent.com/kubernetes/kops/master/channels/stable"
I1020 15:58:34.514086 22722 context.go:132] Performing HTTP request: GET https://raw.githubusercontent.com/kubernetes/kops/master/channels/stable
I1020 15:58:34.898258 22722 channel.go:106] Channel contents: spec:
images:
# We put the "legacy" version first, for kops versions that don't support versions ( < 1.5.0 )
- name: kope.io/k8s-1.4-debian-jessie-amd64-hvm-ebs-2017-07-28
providerID: aws
kubernetesVersion: ">=1.4.0 <1.5.0"
- name: kope.io/k8s-1.5-debian-jessie-amd64-hvm-ebs-2017-07-28
providerID: aws
kubernetesVersion: ">=1.5.0 <1.6.0"
- name: kope.io/k8s-1.6-debian-jessie-amd64-hvm-ebs-2017-07-28
providerID: aws
kubernetesVersion: ">=1.6.0 <1.7.0"
- name: kope.io/k8s-1.7-debian-jessie-amd64-hvm-ebs-2017-07-28
providerID: aws
kubernetesVersion: ">=1.7.0"
- providerID: gce
name: "cos-cloud/cos-stable-60-9592-90-0"
cluster:
kubernetesVersion: v1.5.7
networking:
kubenet: {}
kubernetesVersions:
- range: ">=1.7.0"
recommendedVersion: 1.7.4
requiredVersion: 1.7.0
- range: ">=1.6.0"
recommendedVersion: 1.6.7
requiredVersion: 1.6.0
- range: ">=1.5.0"
recommendedVersion: 1.5.7
requiredVersion: 1.5.1
- range: "<1.5.0"
recommendedVersion: 1.4.12
requiredVersion: 1.4.2
kopsVersions:
- range: ">=1.7.0-alpha.1"
recommendedVersion: 1.7.0
#requiredVersion: 1.7.0
kubernetesVersion: 1.7.4
- range: ">=1.6.0-alpha.1"
#recommendedVersion: 1.6.0
#requiredVersion: 1.6.0
kubernetesVersion: 1.6.7
- range: ">=1.5.0-alpha1"
recommendedVersion: 1.5.1
#requiredVersion: 1.5.1
kubernetesVersion: 1.5.7
- range: "<1.5.0"
recommendedVersion: 1.4.4
#requiredVersion: 1.4.4
kubernetesVersion: 1.4.12
I1020 15:58:34.899190 22722 create_cluster.go:389] networking mode=kubenet => {"kubenet":{}}
I1020 15:58:34.899469 22722 create_cluster.go:913] Using SSH public key: /home/btr/.ssh/id_ed25519.pub
I1020 15:58:34.899701 22722 defaults.go:203] Not setting up Proxy Excludes
I1020 15:58:34.903700 22722 populate_cluster_spec.go:380] Defaulted KubeControllerManager.ClusterCIDR to 100.96.0.0/11
I1020 15:58:34.903735 22722 populate_cluster_spec.go:387] Defaulted ServiceClusterIPRange to 100.64.0.0/13
I1020 15:58:34.903763 22722 defaults.go:203] Not setting up Proxy Excludes
I1020 15:58:34.903998 22722 clouddns.go:86] Using DefaultTokenSource &oauth2.reuseTokenSource{new:(*oauth2.tokenRefresher)(0xc420f324b0), mu:sync.Mutex{state:0, sema:0x0}, t:(*oauth2.Token)(0xc420edb7a0)}
I1020 15:58:34.904074 22722 clouddns.go:100] Successfully got DNS service: &{0xc420f32510 https://www.googleapis.com/dns/v1/projects/ 0xc42000ef50 0xc42000ef58 0xc42000ef60 0xc42000ef68}
I1020 15:58:34.904168 22722 utils.go:140] Querying for all DNS zones to find match for "k8s.darkcubed.io"
I1020 15:58:35.127029 22722 populate_cluster_spec.go:261] Defaulting DNS zone to: 1767471579328732963
I1020 15:58:35.127119 22722 tagbuilder.go:109] tags: [_gce _k8s_1_6 _networking_kubenet]
I1020 15:58:35.127308 22722 tree_walker.go:97] visit "config/components"
I1020 15:58:35.127396 22722 tree_walker.go:97] visit "config/components/docker"
I1020 15:58:35.127450 22722 tree_walker.go:97] visit "config/components/docker/_e2e_storage_test_environment"
I1020 15:58:35.127492 22722 tree_walker.go:120] Skipping directory "config/components/docker/_e2e_storage_test_environment" as tag "_e2e_storage_test_environment" not present
I1020 15:58:35.127528 22722 tree_walker.go:97] visit "config/components/docker/_networking_cni"
I1020 15:58:35.127563 22722 tree_walker.go:120] Skipping directory "config/components/docker/_networking_cni" as tag "_networking_cni" not present
I1020 15:58:35.127591 22722 tree_walker.go:97] visit "config/components/docker/_networking_kubenet"
I1020 15:58:35.127629 22722 tree_walker.go:124] Descending into directory, as tag is present: "config/components/docker/_networking_kubenet"
I1020 15:58:35.127658 22722 tree_walker.go:97] visit "config/components/docker/_networking_kubenet/kubenet.options"
I1020 15:58:35.127849 22722 tree_walker.go:97] visit "config/components/docker/docker.options"
I1020 15:58:35.128446 22722 options_loader.go:102] executing template components/docker/docker.options (tags=[])
I1020 15:58:35.128717 22722 options_loader.go:102] executing template components/docker/_networking_kubenet/kubenet.options (tags=[_networking_kubenet])
I1020 15:58:35.129979 22722 options_loader.go:129] executing builder *components.DefaultsOptionsBuilder
I1020 15:58:35.130081 22722 options_loader.go:129] executing builder *components.EtcdOptionsBuilder
I1020 15:58:35.130134 22722 options_loader.go:129] executing builder *components.KubeAPIServerOptionsBuilder
I1020 15:58:35.130339 22722 apiserver.go:203] Enabling apiserver insecure port, for healthchecks (issue #43784)
I1020 15:58:35.130392 22722 options_loader.go:129] executing builder *components.DockerOptionsBuilder
I1020 15:58:35.130453 22722 options_loader.go:129] executing builder *components.NetworkingOptionsBuilder
I1020 15:58:35.130508 22722 options_loader.go:129] executing builder *components.KubeDnsOptionsBuilder
I1020 15:58:35.130570 22722 options_loader.go:129] executing builder *components.KubeletOptionsBuilder
I1020 15:58:35.130649 22722 kubelet.go:140] Cloud Provider: gce
I1020 15:58:35.130741 22722 options_loader.go:129] executing builder *components.KubeControllerManagerOptionsBuilder
I1020 15:58:35.130806 22722 kubecontrollermanager.go:74] Kubernetes version "1.7.4" supports AttachDetachReconcileSyncPeriod; will configure
I1020 15:58:35.130882 22722 kubecontrollermanager.go:79] AttachDetachReconcileSyncPeriod is not set; will set to default 1m0s
I1020 15:58:35.130977 22722 options_loader.go:129] executing builder *components.KubeSchedulerOptionsBuilder
I1020 15:58:35.131048 22722 options_loader.go:129] executing builder *components.KubeProxyOptionsBuilder
I1020 15:58:35.133719 22722 options_loader.go:102] executing template components/docker/docker.options (tags=[])
I1020 15:58:35.134420 22722 options_loader.go:102] executing template components/docker/_networking_kubenet/kubenet.options (tags=[_networking_kubenet])
I1020 15:58:35.134790 22722 options_loader.go:129] executing builder *components.DefaultsOptionsBuilder
I1020 15:58:35.134854 22722 options_loader.go:129] executing builder *components.EtcdOptionsBuilder
I1020 15:58:35.134902 22722 options_loader.go:129] executing builder *components.KubeAPIServerOptionsBuilder
I1020 15:58:35.135092 22722 apiserver.go:203] Enabling apiserver insecure port, for healthchecks (issue #43784)
I1020 15:58:35.135142 22722 options_loader.go:129] executing builder *components.DockerOptionsBuilder
I1020 15:58:35.135196 22722 options_loader.go:129] executing builder *components.NetworkingOptionsBuilder
I1020 15:58:35.135249 22722 options_loader.go:129] executing builder *components.KubeDnsOptionsBuilder
I1020 15:58:35.135310 22722 options_loader.go:129] executing builder *components.KubeletOptionsBuilder
I1020 15:58:35.135389 22722 kubelet.go:140] Cloud Provider: gce
I1020 15:58:35.135456 22722 options_loader.go:129] executing builder *components.KubeControllerManagerOptionsBuilder
I1020 15:58:35.135524 22722 kubecontrollermanager.go:74] Kubernetes version "1.7.4" supports AttachDetachReconcileSyncPeriod; will configure
I1020 15:58:35.135618 22722 options_loader.go:129] executing builder *components.KubeSchedulerOptionsBuilder
I1020 15:58:35.135677 22722 options_loader.go:129] executing builder *components.KubeProxyOptionsBuilder
I1020 15:58:35.138265 22722 spec_builder.go:68] options: {
"channel": "stable",
"configBase": "gs://darkcubed-kops/k8s.darkcubed.io",
"cloudProvider": "gce",
"kubernetesVersion": "1.7.4",
"subnets": [
{
"name": "us-central1",
"region": "us-central1",
"type": "Public"
}
],
"project": "dark-cubed-researcher-v001",
"masterPublicName": "api.k8s.darkcubed.io",
"masterInternalName": "api.internal.k8s.darkcubed.io",
"topology": {
"masters": "public",
"nodes": "public",
"dns": {
"type": "Public"
}
},
"secretStore": "gs://darkcubed-kops/k8s.darkcubed.io/secrets",
"keyStore": "gs://darkcubed-kops/k8s.darkcubed.io/pki",
"configStore": "gs://darkcubed-kops/k8s.darkcubed.io",
"dnsZone": "1767471579328732963",
"clusterDNSDomain": "cluster.local",
"serviceClusterIPRange": "100.64.0.0/13",
"nonMasqueradeCIDR": "100.64.0.0/10",
"sshAccess": [
"0.0.0.0/0"
],
"kubernetesApiAccess": [
"0.0.0.0/0"
],
"etcdClusters": [
{
"name": "main",
"etcdMembers": [
{
"name": "a",
"instanceGroup": "master-us-central1-a"
},
{
"name": "b",
"instanceGroup": "master-us-central1-b"
},
{
"name": "c",
"instanceGroup": "master-us-central1-c"
}
],
"version": "2.2.1"
},
{
"name": "events",
"etcdMembers": [
{
"name": "a",
"instanceGroup": "master-us-central1-a"
},
{
"name": "b",
"instanceGroup": "master-us-central1-b"
},
{
"name": "c",
"instanceGroup": "master-us-central1-c"
}
],
"version": "2.2.1"
}
],
"docker": {
"bridge": "",
"ipMasq": false,
"ipTables": false,
"logDriver": "json-file",
"logLevel": "warn",
"logOpt": [
"max-size=10m",
"max-file=5"
],
"storage": "overlay,aufs",
"version": "1.12.6"
},
"kubeDNS": {
"image": "gcr.io/google_containers/kubedns-amd64:1.3",
"replicas": 2,
"domain": "cluster.local",
"serverIP": "100.64.0.10"
},
"kubeAPIServer": {
"image": "gcr.io/google_containers/kube-apiserver:v1.7.4",
"logLevel": 2,
"cloudProvider": "gce",
"securePort": 443,
"insecurePort": 8080,
"address": "127.0.0.1",
"admissionControl": [
"Initializers",
"NamespaceLifecycle",
"LimitRanger",
"ServiceAccount",
"PersistentVolumeLabel",
"DefaultStorageClass",
"DefaultTolerationSeconds",
"NodeRestriction",
"ResourceQuota"
],
"serviceClusterIPRange": "100.64.0.0/13",
"etcdServers": [
"http://127.0.0.1:4001"
],
"etcdServersOverrides": [
"/events#http://127.0.0.1:4002"
],
"allowPrivileged": true,
"apiServerCount": 3,
"anonymousAuth": false,
"kubeletPreferredAddressTypes": [
"InternalIP",
"Hostname",
"ExternalIP"
],
"storageBackend": "etcd2",
"authorizationMode": "RBAC"
},
"kubeControllerManager": {
"logLevel": 2,
"image": "gcr.io/google_containers/kube-controller-manager:v1.7.4",
"cloudProvider": "gce",
"clusterName": "k8s-darkcubed-io",
"clusterCIDR": "100.96.0.0/11",
"allocateNodeCIDRs": true,
"configureCloudRoutes": true,
"leaderElection": {
"leaderElect": true
},
"attachDetachReconcileSyncPeriod": "1m0s",
"useServiceAccountCredentials": true
},
"kubeScheduler": {
"logLevel": 2,
"image": "gcr.io/google_containers/kube-scheduler:v1.7.4",
"leaderElection": {
"leaderElect": true
}
},
"kubeProxy": {
"image": "gcr.io/google_containers/kube-proxy:v1.7.4",
"cpuRequest": "100m",
"logLevel": 2,
"clusterCIDR": "100.96.0.0/11",
"featureGates": null
},
"kubelet": {
"kubeconfigPath": "/var/lib/kubelet/kubeconfig",
"requireKubeconfig": true,
"logLevel": 2,
"podManifestPath": "/etc/kubernetes/manifests",
"podInfraContainerImage": "gcr.io/google_containers/pause-amd64:3.0",
"allowPrivileged": true,
"enableDebuggingHandlers": true,
"clusterDomain": "cluster.local",
"clusterDNS": "100.64.0.10",
"networkPluginName": "kubenet",
"cloudProvider": "gce",
"cgroupRoot": "/",
"hairpinMode": "promiscuous-bridge",
"nonMasqueradeCIDR": "100.64.0.0/10",
"networkPluginMTU": 9001,
"evictionHard": "memory.available\u003c100Mi,nodefs.available\u003c10%,nodefs.inodesFree\u003c5%,imagefs.available\u003c10%,imagefs.inodesFree\u003c5%",
"featureGates": {
"ExperimentalCriticalPodAnnotation": "true"
}
},
"masterKubelet": {
"kubeconfigPath": "/var/lib/kubelet/kubeconfig",
"requireKubeconfig": true,
"logLevel": 2,
"podManifestPath": "/etc/kubernetes/manifests",
"podInfraContainerImage": "gcr.io/google_containers/pause-amd64:3.0",
"allowPrivileged": true,
"enableDebuggingHandlers": true,
"clusterDomain": "cluster.local",
"clusterDNS": "100.64.0.10",
"networkPluginName": "kubenet",
"cloudProvider": "gce",
"cgroupRoot": "/",
"hairpinMode": "promiscuous-bridge",
"registerSchedulable": false,
"nonMasqueradeCIDR": "100.64.0.0/10",
"networkPluginMTU": 9001,
"evictionHard": "memory.available\u003c100Mi,nodefs.available\u003c10%,nodefs.inodesFree\u003c5%,imagefs.available\u003c10%,imagefs.inodesFree\u003c5%",
"featureGates": {
"ExperimentalCriticalPodAnnotation": "true"
}
},
"cloudConfig": {
"multizone": true,
"nodeTags": "k8s-darkcubed-io-k8s-io-role-node"
},
"networking": {
"kubenet": {}
},
"api": {
"dns": {}
},
"authorization": {
"rbac": {}
},
"iam": {
"legacy": false
}
}
I1020 15:58:35.148488 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/config"
I1020 15:58:35.237251 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/config"
I1020 15:58:35.383754 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/instancegroup/master-us-central1-a"
I1020 15:58:35.466478 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/instancegroup/master-us-central1-a"
I1020 15:58:35.592153 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/instancegroup/master-us-central1-b"
I1020 15:58:35.675665 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/instancegroup/master-us-central1-b"
I1020 15:58:35.974963 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/instancegroup/master-us-central1-c"
I1020 15:58:36.064274 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/instancegroup/master-us-central1-c"
I1020 15:58:36.204235 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/instancegroup/nodes"
I1020 15:58:36.292513 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/instancegroup/nodes"
I1020 15:58:36.416246 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/cluster.spec"
I1020 15:58:36.537365 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/pki/ssh/public/admin/9b65f733d7f9047606b4ad15ad0c4ea2"
I1020 15:58:36.698281 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/config"
I1020 15:58:36.895429 22722 gsfs.go:239] Listed files in gs://darkcubed-kops/k8s.darkcubed.io/instancegroup: [gs://darkcubed-kops/k8s.darkcubed.io/instancegroup/master-us-central1-a gs://darkcubed-kops/k8s.darkcubed.io/instancegroup/master-us-central1-b gs://darkcubed-kops/k8s.darkcubed.io/instancegroup/master-us-central1-c gs://darkcubed-kops/k8s.darkcubed.io/instancegroup/nodes]
I1020 15:58:36.895531 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/instancegroup/master-us-central1-a"
I1020 15:58:37.005292 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/instancegroup/master-us-central1-b"
I1020 15:58:37.100137 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/instancegroup/master-us-central1-c"
I1020 15:58:37.207433 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/instancegroup/nodes"
I1020 15:58:37.324630 22722 channel.go:92] resolving "stable" against default channel location "https://raw.githubusercontent.com/kubernetes/kops/master/channels/"
I1020 15:58:37.324668 22722 channel.go:97] Loading channel from "https://raw.githubusercontent.com/kubernetes/kops/master/channels/stable"
I1020 15:58:37.324687 22722 context.go:132] Performing HTTP request: GET https://raw.githubusercontent.com/kubernetes/kops/master/channels/stable
I1020 15:58:37.349621 22722 channel.go:106] Channel contents: spec:
images:
# We put the "legacy" version first, for kops versions that don't support versions ( < 1.5.0 )
- name: kope.io/k8s-1.4-debian-jessie-amd64-hvm-ebs-2017-07-28
providerID: aws
kubernetesVersion: ">=1.4.0 <1.5.0"
- name: kope.io/k8s-1.5-debian-jessie-amd64-hvm-ebs-2017-07-28
providerID: aws
kubernetesVersion: ">=1.5.0 <1.6.0"
- name: kope.io/k8s-1.6-debian-jessie-amd64-hvm-ebs-2017-07-28
providerID: aws
kubernetesVersion: ">=1.6.0 <1.7.0"
- name: kope.io/k8s-1.7-debian-jessie-amd64-hvm-ebs-2017-07-28
providerID: aws
kubernetesVersion: ">=1.7.0"
- providerID: gce
name: "cos-cloud/cos-stable-60-9592-90-0"
cluster:
kubernetesVersion: v1.5.7
networking:
kubenet: {}
kubernetesVersions:
- range: ">=1.7.0"
recommendedVersion: 1.7.4
requiredVersion: 1.7.0
- range: ">=1.6.0"
recommendedVersion: 1.6.7
requiredVersion: 1.6.0
- range: ">=1.5.0"
recommendedVersion: 1.5.7
requiredVersion: 1.5.1
- range: "<1.5.0"
recommendedVersion: 1.4.12
requiredVersion: 1.4.2
kopsVersions:
- range: ">=1.7.0-alpha.1"
recommendedVersion: 1.7.0
#requiredVersion: 1.7.0
kubernetesVersion: 1.7.4
- range: ">=1.6.0-alpha.1"
#recommendedVersion: 1.6.0
#requiredVersion: 1.6.0
kubernetesVersion: 1.6.7
- range: ">=1.5.0-alpha1"
recommendedVersion: 1.5.1
#requiredVersion: 1.5.1
kubernetesVersion: 1.5.7
- range: "<1.5.0"
recommendedVersion: 1.4.4
#requiredVersion: 1.4.4
kubernetesVersion: 1.4.12
I1020 15:58:37.349800 22722 populate_cluster_spec.go:380] Defaulted KubeControllerManager.ClusterCIDR to 100.96.0.0/11
I1020 15:58:37.349808 22722 populate_cluster_spec.go:387] Defaulted ServiceClusterIPRange to 100.64.0.0/13
I1020 15:58:37.349817 22722 defaults.go:203] Not setting up Proxy Excludes
I1020 15:58:37.349876 22722 clouddns.go:86] Using DefaultTokenSource &oauth2.reuseTokenSource{new:(*oauth2.tokenRefresher)(0xc420091da0), mu:sync.Mutex{state:0, sema:0x0}, t:(*oauth2.Token)(0xc420c269c0)}
I1020 15:58:37.349890 22722 clouddns.go:100] Successfully got DNS service: &{0xc420091f50 https://www.googleapis.com/dns/v1/projects/ 0xc420284160 0xc420284170 0xc420284178 0xc420284180}
I1020 15:58:37.349899 22722 utils.go:140] Querying for all DNS zones to find match for "k8s.darkcubed.io"
I1020 15:58:37.584768 22722 populate_cluster_spec.go:261] Defaulting DNS zone to: 1767471579328732963
I1020 15:58:37.584867 22722 tagbuilder.go:109] tags: [_gce _k8s_1_6 _networking_kubenet]
I1020 15:58:37.585037 22722 tree_walker.go:97] visit "config/components"
I1020 15:58:37.585108 22722 tree_walker.go:97] visit "config/components/docker"
I1020 15:58:37.585183 22722 tree_walker.go:97] visit "config/components/docker/_networking_kubenet"
I1020 15:58:37.585235 22722 tree_walker.go:124] Descending into directory, as tag is present: "config/components/docker/_networking_kubenet"
I1020 15:58:37.585299 22722 tree_walker.go:97] visit "config/components/docker/_networking_kubenet/kubenet.options"
I1020 15:58:37.585612 22722 tree_walker.go:97] visit "config/components/docker/docker.options"
I1020 15:58:37.585866 22722 tree_walker.go:97] visit "config/components/docker/_e2e_storage_test_environment"
I1020 15:58:37.585936 22722 tree_walker.go:120] Skipping directory "config/components/docker/_e2e_storage_test_environment" as tag "_e2e_storage_test_environment" not present
I1020 15:58:37.585982 22722 tree_walker.go:97] visit "config/components/docker/_networking_cni"
I1020 15:58:37.586037 22722 tree_walker.go:120] Skipping directory "config/components/docker/_networking_cni" as tag "_networking_cni" not present
I1020 15:58:37.586499 22722 options_loader.go:102] executing template components/docker/docker.options (tags=[])
I1020 15:58:37.586796 22722 options_loader.go:102] executing template components/docker/_networking_kubenet/kubenet.options (tags=[_networking_kubenet])
I1020 15:58:37.586961 22722 options_loader.go:129] executing builder *components.DefaultsOptionsBuilder
I1020 15:58:37.586999 22722 options_loader.go:129] executing builder *components.EtcdOptionsBuilder
I1020 15:58:37.587028 22722 options_loader.go:129] executing builder *components.KubeAPIServerOptionsBuilder
I1020 15:58:37.587162 22722 apiserver.go:203] Enabling apiserver insecure port, for healthchecks (issue #43784)
I1020 15:58:37.587194 22722 options_loader.go:129] executing builder *components.DockerOptionsBuilder
I1020 15:58:37.587230 22722 options_loader.go:129] executing builder *components.NetworkingOptionsBuilder
I1020 15:58:37.587266 22722 options_loader.go:129] executing builder *components.KubeDnsOptionsBuilder
I1020 15:58:37.587309 22722 options_loader.go:129] executing builder *components.KubeletOptionsBuilder
I1020 15:58:37.587366 22722 kubelet.go:140] Cloud Provider: gce
I1020 15:58:37.587418 22722 options_loader.go:129] executing builder *components.KubeControllerManagerOptionsBuilder
I1020 15:58:37.587461 22722 kubecontrollermanager.go:74] Kubernetes version "1.7.4" supports AttachDetachReconcileSyncPeriod; will configure
I1020 15:58:37.587496 22722 kubecontrollermanager.go:79] AttachDetachReconcileSyncPeriod is not set; will set to default 1m0s
I1020 15:58:37.587547 22722 options_loader.go:129] executing builder *components.KubeSchedulerOptionsBuilder
I1020 15:58:37.587584 22722 options_loader.go:129] executing builder *components.KubeProxyOptionsBuilder
I1020 15:58:37.588793 22722 options_loader.go:102] executing template components/docker/docker.options (tags=[])
I1020 15:58:37.589077 22722 options_loader.go:102] executing template components/docker/_networking_kubenet/kubenet.options (tags=[_networking_kubenet])
I1020 15:58:37.589224 22722 options_loader.go:129] executing builder *components.DefaultsOptionsBuilder
I1020 15:58:37.589259 22722 options_loader.go:129] executing builder *components.EtcdOptionsBuilder
I1020 15:58:37.589287 22722 options_loader.go:129] executing builder *components.KubeAPIServerOptionsBuilder
I1020 15:58:37.589394 22722 apiserver.go:203] Enabling apiserver insecure port, for healthchecks (issue #43784)
I1020 15:58:37.589442 22722 options_loader.go:129] executing builder *components.DockerOptionsBuilder
I1020 15:58:37.589475 22722 options_loader.go:129] executing builder *components.NetworkingOptionsBuilder
I1020 15:58:37.589511 22722 options_loader.go:129] executing builder *components.KubeDnsOptionsBuilder
I1020 15:58:37.589548 22722 options_loader.go:129] executing builder *components.KubeletOptionsBuilder
I1020 15:58:37.589596 22722 kubelet.go:140] Cloud Provider: gce
I1020 15:58:37.589635 22722 options_loader.go:129] executing builder *components.KubeControllerManagerOptionsBuilder
I1020 15:58:37.589676 22722 kubecontrollermanager.go:74] Kubernetes version "1.7.4" supports AttachDetachReconcileSyncPeriod; will configure
I1020 15:58:37.589725 22722 options_loader.go:129] executing builder *components.KubeSchedulerOptionsBuilder
I1020 15:58:37.589759 22722 options_loader.go:129] executing builder *components.KubeProxyOptionsBuilder
I1020 15:58:37.591218 22722 spec_builder.go:68] options: {
"channel": "stable",
"configBase": "gs://darkcubed-kops/k8s.darkcubed.io",
"cloudProvider": "gce",
"kubernetesVersion": "1.7.4",
"subnets": [
{
"name": "us-central1",
"region": "us-central1",
"type": "Public"
}
],
"project": "dark-cubed-researcher-v001",
"masterPublicName": "api.k8s.darkcubed.io",
"masterInternalName": "api.internal.k8s.darkcubed.io",
"topology": {
"masters": "public",
"nodes": "public",
"dns": {
"type": "Public"
}
},
"secretStore": "gs://darkcubed-kops/k8s.darkcubed.io/secrets",
"keyStore": "gs://darkcubed-kops/k8s.darkcubed.io/pki",
"configStore": "gs://darkcubed-kops/k8s.darkcubed.io",
"dnsZone": "1767471579328732963",
"clusterDNSDomain": "cluster.local",
"serviceClusterIPRange": "100.64.0.0/13",
"nonMasqueradeCIDR": "100.64.0.0/10",
"sshAccess": [
"0.0.0.0/0"
],
"kubernetesApiAccess": [
"0.0.0.0/0"
],
"etcdClusters": [
{
"name": "main",
"etcdMembers": [
{
"name": "a",
"instanceGroup": "master-us-central1-a"
},
{
"name": "b",
"instanceGroup": "master-us-central1-b"
},
{
"name": "c",
"instanceGroup": "master-us-central1-c"
}
],
"version": "2.2.1"
},
{
"name": "events",
"etcdMembers": [
{
"name": "a",
"instanceGroup": "master-us-central1-a"
},
{
"name": "b",
"instanceGroup": "master-us-central1-b"
},
{
"name": "c",
"instanceGroup": "master-us-central1-c"
}
],
"version": "2.2.1"
}
],
"docker": {
"bridge": "",
"ipMasq": false,
"ipTables": false,
"logDriver": "json-file",
"logLevel": "warn",
"logOpt": [
"max-size=10m",
"max-file=5"
],
"storage": "overlay,aufs",
"version": "1.12.6"
},
"kubeDNS": {
"image": "gcr.io/google_containers/kubedns-amd64:1.3",
"replicas": 2,
"domain": "cluster.local",
"serverIP": "100.64.0.10"
},
"kubeAPIServer": {
"image": "gcr.io/google_containers/kube-apiserver:v1.7.4",
"logLevel": 2,
"cloudProvider": "gce",
"securePort": 443,
"insecurePort": 8080,
"address": "127.0.0.1",
"admissionControl": [
"Initializers",
"NamespaceLifecycle",
"LimitRanger",
"ServiceAccount",
"PersistentVolumeLabel",
"DefaultStorageClass",
"DefaultTolerationSeconds",
"NodeRestriction",
"ResourceQuota"
],
"serviceClusterIPRange": "100.64.0.0/13",
"etcdServers": [
"http://127.0.0.1:4001"
],
"etcdServersOverrides": [
"/events#http://127.0.0.1:4002"
],
"allowPrivileged": true,
"apiServerCount": 3,
"anonymousAuth": false,
"kubeletPreferredAddressTypes": [
"InternalIP",
"Hostname",
"ExternalIP"
],
"storageBackend": "etcd2",
"authorizationMode": "RBAC"
},
"kubeControllerManager": {
"logLevel": 2,
"image": "gcr.io/google_containers/kube-controller-manager:v1.7.4",
"cloudProvider": "gce",
"clusterName": "k8s-darkcubed-io",
"clusterCIDR": "100.96.0.0/11",
"allocateNodeCIDRs": true,
"configureCloudRoutes": true,
"leaderElection": {
"leaderElect": true
},
"attachDetachReconcileSyncPeriod": "1m0s",
"useServiceAccountCredentials": true
},
"kubeScheduler": {
"logLevel": 2,
"image": "gcr.io/google_containers/kube-scheduler:v1.7.4",
"leaderElection": {
"leaderElect": true
}
},
"kubeProxy": {
"image": "gcr.io/google_containers/kube-proxy:v1.7.4",
"cpuRequest": "100m",
"logLevel": 2,
"clusterCIDR": "100.96.0.0/11",
"featureGates": null
},
"kubelet": {
"kubeconfigPath": "/var/lib/kubelet/kubeconfig",
"requireKubeconfig": true,
"logLevel": 2,
"podManifestPath": "/etc/kubernetes/manifests",
"podInfraContainerImage": "gcr.io/google_containers/pause-amd64:3.0",
"allowPrivileged": true,
"enableDebuggingHandlers": true,
"clusterDomain": "cluster.local",
"clusterDNS": "100.64.0.10",
"networkPluginName": "kubenet",
"cloudProvider": "gce",
"cgroupRoot": "/",
"hairpinMode": "promiscuous-bridge",
"nonMasqueradeCIDR": "100.64.0.0/10",
"networkPluginMTU": 9001,
"evictionHard": "memory.available\u003c100Mi,nodefs.available\u003c10%,nodefs.inodesFree\u003c5%,imagefs.available\u003c10%,imagefs.inodesFree\u003c5%",
"featureGates": {
"ExperimentalCriticalPodAnnotation": "true"
}
},
"masterKubelet": {
"kubeconfigPath": "/var/lib/kubelet/kubeconfig",
"requireKubeconfig": true,
"logLevel": 2,
"podManifestPath": "/etc/kubernetes/manifests",
"podInfraContainerImage": "gcr.io/google_containers/pause-amd64:3.0",
"allowPrivileged": true,
"enableDebuggingHandlers": true,
"clusterDomain": "cluster.local",
"clusterDNS": "100.64.0.10",
"networkPluginName": "kubenet",
"cloudProvider": "gce",
"cgroupRoot": "/",
"hairpinMode": "promiscuous-bridge",
"registerSchedulable": false,
"nonMasqueradeCIDR": "100.64.0.0/10",
"networkPluginMTU": 9001,
"evictionHard": "memory.available\u003c100Mi,nodefs.available\u003c10%,nodefs.inodesFree\u003c5%,imagefs.available\u003c10%,imagefs.inodesFree\u003c5%",
"featureGates": {
"ExperimentalCriticalPodAnnotation": "true"
}
},
"cloudConfig": {
"multizone": true,
"nodeTags": "k8s-darkcubed-io-k8s-io-role-node"
},
"networking": {
"kubenet": {}
},
"api": {
"dns": {}
},
"authorization": {
"rbac": {}
},
"iam": {
"legacy": false
}
}
I1020 15:58:37.592836 22722 channel.go:157] RecommendedVersion="1.7.0", Have="1.8.0-alpha.1". No upgrade needed.
I1020 15:58:37.592906 22722 channel.go:185] VersionRecommendationSpec does not specify RequiredVersion
I1020 15:58:37.592952 22722 channel.go:137] RecommendedVersion="1.7.4", Have="1.7.4". No upgrade needed.
I1020 15:58:37.592990 22722 channel.go:177] RequiredVersion="1.7.0", Have="1.7.4". No upgrade needed.
I1020 15:58:37.593203 22722 apply_cluster.go:229] Adding default kubelet release asset: https://storage.googleapis.com/kubernetes-release/release/v1.7.4/bin/linux/amd64/kubelet
I1020 15:58:37.593249 22722 context.go:132] Performing HTTP request: GET https://storage.googleapis.com/kubernetes-release/release/v1.7.4/bin/linux/amd64/kubelet.sha1
I1020 15:58:38.008942 22722 apply_cluster.go:857] Found hash "7bf3fda43bb8d0a55622ca68dcbfaf3cc7f2dddc" for "https://storage.googleapis.com/kubernetes-release/release/v1.7.4/bin/linux/amd64/kubelet"
I1020 15:58:38.009070 22722 apply_cluster.go:240] Adding default kubectl release asset: https://storage.googleapis.com/kubernetes-release/release/v1.7.4/bin/linux/amd64/kubectl
I1020 15:58:38.009128 22722 context.go:132] Performing HTTP request: GET https://storage.googleapis.com/kubernetes-release/release/v1.7.4/bin/linux/amd64/kubectl.sha1
I1020 15:58:38.035454 22722 apply_cluster.go:857] Found hash "819010a7a028b165f5e6df37b1bb7713ff6d070f" for "https://storage.googleapis.com/kubernetes-release/release/v1.7.4/bin/linux/amd64/kubectl"
I1020 15:58:38.035502 22722 networking.go:125] Adding default CNI asset: https://storage.googleapis.com/kubernetes-release/network-plugins/cni-0799f5732f2a11b329d9e3d51b9c8f2e3759f2ff.tar.gz
I1020 15:58:38.035519 22722 urls.go:40] Using default base url: "https://kubeupv2.s3.amazonaws.com/kops/1.8.0-alpha.1/"
I1020 15:58:38.035537 22722 apply_cluster.go:266] Using default utils.tar.gz location: "https://kubeupv2.s3.amazonaws.com/kops/1.8.0-alpha.1/linux/amd64/utils.tar.gz"
I1020 15:58:38.035563 22722 context.go:132] Performing HTTP request: GET https://kubeupv2.s3.amazonaws.com/kops/1.8.0-alpha.1/linux/amd64/utils.tar.gz.sha1
I1020 15:58:38.374873 22722 apply_cluster.go:857] Found hash "abd9afeea37cc34b5f3044baee33753bd1008caa" for "https://kubeupv2.s3.amazonaws.com/kops/1.8.0-alpha.1/linux/amd64/utils.tar.gz"
I1020 15:58:38.374979 22722 apply_cluster.go:277] Adding default kubernetes manifests asset: https://storage.googleapis.com/kubernetes-release/release/v1.7.4/kubernetes-manifests.tar.gz
I1020 15:58:38.375016 22722 context.go:132] Performing HTTP request: GET https://storage.googleapis.com/kubernetes-release/release/v1.7.4/kubernetes-manifests.tar.gz.sha1
I1020 15:58:38.454441 22722 apply_cluster.go:857] Found hash "4cd1a59918382bb8e23cd2c68db8eabaac76fbc6" for "https://storage.googleapis.com/kubernetes-release/release/v1.7.4/kubernetes-manifests.tar.gz"
I1020 15:58:38.454545 22722 urls.go:64] Using default nodeup location: "https://kubeupv2.s3.amazonaws.com/kops/1.8.0-alpha.1/linux/amd64/nodeup"
I1020 15:58:38.550668 22722 gsfs.go:239] Listed files in gs://darkcubed-kops/k8s.darkcubed.io/pki/ssh/public/admin: [gs://darkcubed-kops/k8s.darkcubed.io/pki/ssh/public/admin/9b65f733d7f9047606b4ad15ad0c4ea2]
I1020 15:58:38.550686 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/pki/ssh/public/admin/9b65f733d7f9047606b4ad15ad0c4ea2"
I1020 15:58:38.650516 22722 clouddns.go:86] Using DefaultTokenSource &oauth2.reuseTokenSource{new:(*oauth2.tokenRefresher)(0xc420775590), mu:sync.Mutex{state:0, sema:0x0}, t:(*oauth2.Token)(0xc420b22ae0)}
I1020 15:58:38.650539 22722 clouddns.go:100] Successfully got DNS service: &{0xc4207756b0 https://www.googleapis.com/dns/v1/projects/ 0xc42008e1f8 0xc42008e200 0xc42008e208 0xc42008e210}
I1020 15:58:38.885353 22722 dns.go:101] Doing DNS lookup to verify NS records for "darkcubed.io"
I1020 15:58:38.886633 22722 dns.go:118] Found NS records for "darkcubed.io": [ns-cloud-e4.googledomains.com. ns-cloud-e3.googledomains.com. ns-cloud-e2.googledomains.com. ns-cloud-e1.googledomains.com.]
I1020 15:58:38.886715 22722 tagbuilder.go:109] tags: [_gce _k8s_1_6 _networking_kubenet]
I1020 15:58:38.887206 22722 templates.go:79] loading (templated) resource "addons/dns-controller.addons.k8s.io/k8s-1.6.yaml"
I1020 15:58:38.887377 22722 templates.go:79] loading (templated) resource "addons/dns-controller.addons.k8s.io/pre-k8s-1.6.yaml"
I1020 15:58:38.887527 22722 templates.go:87] loading resource "addons/networking.kope.io/pre-k8s-1.6.yaml"
I1020 15:58:38.887701 22722 templates.go:87] loading resource "addons/networking.kope.io/k8s-1.6.yaml"
I1020 15:58:38.887970 22722 templates.go:79] loading (templated) resource "addons/networking.romana/k8s-1.6.yaml"
I1020 15:58:38.888206 22722 templates.go:79] loading (templated) resource "addons/networking.weave/k8s-1.6.yaml"
I1020 15:58:38.888434 22722 templates.go:79] loading (templated) resource "addons/networking.weave/k8s-1.7.yaml"
I1020 15:58:38.888647 22722 templates.go:79] loading (templated) resource "addons/networking.weave/pre-k8s-1.6.yaml"
I1020 15:58:38.888755 22722 templates.go:87] loading resource "addons/storage-gce.addons.k8s.io/v1.6.0.yaml"
I1020 15:58:38.888848 22722 templates.go:87] loading resource "addons/core.addons.k8s.io/addon.yaml"
I1020 15:58:38.888943 22722 templates.go:87] loading resource "addons/core.addons.k8s.io/v1.4.0.yaml"
I1020 15:58:38.889338 22722 templates.go:79] loading (templated) resource "addons/kube-dns.addons.k8s.io/k8s-1.6.yaml"
I1020 15:58:38.889787 22722 templates.go:79] loading (templated) resource "addons/kube-dns.addons.k8s.io/pre-k8s-1.6.yaml"
I1020 15:58:38.889990 22722 templates.go:79] loading (templated) resource "addons/networking.flannel/pre-k8s-1.6.yaml"
I1020 15:58:38.890246 22722 templates.go:79] loading (templated) resource "addons/networking.flannel/k8s-1.6.yaml"
I1020 15:58:38.890688 22722 templates.go:79] loading (templated) resource "addons/networking.projectcalico.org/k8s-1.6.yaml"
I1020 15:58:38.891093 22722 templates.go:79] loading (templated) resource "addons/networking.projectcalico.org/pre-k8s-1.6.yaml"
I1020 15:58:38.891513 22722 templates.go:79] loading (templated) resource "addons/networking.projectcalico.org.canal/k8s-1.6.yaml"
I1020 15:58:38.891961 22722 templates.go:79] loading (templated) resource "addons/networking.projectcalico.org.canal/k8s-1.8.yaml"
I1020 15:58:38.892291 22722 templates.go:79] loading (templated) resource "addons/networking.projectcalico.org.canal/pre-k8s-1.6.yaml"
I1020 15:58:38.892459 22722 templates.go:79] loading (templated) resource "addons/external-dns.addons.k8s.io/k8s-1.6.yaml"
I1020 15:58:38.892587 22722 templates.go:79] loading (templated) resource "addons/external-dns.addons.k8s.io/pre-k8s-1.6.yaml"
I1020 15:58:38.892723 22722 templates.go:87] loading resource "addons/external-dns.addons.k8s.io/README.md"
I1020 15:58:38.892802 22722 templates.go:87] loading resource "addons/limit-range.addons.k8s.io/addon.yaml"
I1020 15:58:38.892877 22722 templates.go:87] loading resource "addons/limit-range.addons.k8s.io/v1.5.0.yaml"
I1020 15:58:38.893084 22722 templates.go:79] loading (templated) resource "addons/networking.kuberouter/k8s-1.6.yaml"
I1020 15:58:38.893165 22722 templates.go:87] loading resource "addons/storage-aws.addons.k8s.io/v1.6.0.yaml"
I1020 15:58:38.893319 22722 templates.go:87] loading resource "addons/authentication.kope.io/k8s-1.6.yaml"
I1020 15:58:38.893400 22722 tree_walker.go:97] visit "config/components"
I1020 15:58:38.893447 22722 tree_walker.go:97] visit "config/components/docker"
I1020 15:58:38.893459 22722 tree_walker.go:97] visit "config/components/docker/_networking_cni"
I1020 15:58:38.893468 22722 tree_walker.go:120] Skipping directory "config/components/docker/_networking_cni" as tag "_networking_cni" not present
I1020 15:58:38.893473 22722 tree_walker.go:97] visit "config/components/docker/_networking_kubenet"
I1020 15:58:38.893481 22722 tree_walker.go:124] Descending into directory, as tag is present: "config/components/docker/_networking_kubenet"
I1020 15:58:38.893487 22722 tree_walker.go:97] visit "config/components/docker/_networking_kubenet/kubenet.options"
I1020 15:58:38.893495 22722 tree_walker.go:97] visit "config/components/docker/docker.options"
I1020 15:58:38.893501 22722 tree_walker.go:97] visit "config/components/docker/_e2e_storage_test_environment"
I1020 15:58:38.893510 22722 tree_walker.go:120] Skipping directory "config/components/docker/_e2e_storage_test_environment" as tag "_e2e_storage_test_environment" not present
I1020 15:58:38.893520 22722 tree_walker.go:97] visit "cloudup/resources"
I1020 15:58:38.893527 22722 tree_walker.go:97] visit "cloudup/resources/addons"
I1020 15:58:38.893543 22722 tree_walker.go:97] visit "cloudup/resources/addons/external-dns.addons.k8s.io"
I1020 15:58:38.893554 22722 tree_walker.go:97] visit "cloudup/resources/addons/external-dns.addons.k8s.io/README.md"
I1020 15:58:38.893595 22722 loader.go:322] loading resource "addons/external-dns.addons.k8s.io/README.md"
I1020 15:58:38.893601 22722 tree_walker.go:97] visit "cloudup/resources/addons/external-dns.addons.k8s.io/k8s-1.6.yaml.template"
I1020 15:58:38.893657 22722 loader.go:314] loading (templated) resource "addons/external-dns.addons.k8s.io/k8s-1.6.yaml"
I1020 15:58:38.893663 22722 tree_walker.go:97] visit "cloudup/resources/addons/external-dns.addons.k8s.io/pre-k8s-1.6.yaml.template"
I1020 15:58:38.893701 22722 loader.go:314] loading (templated) resource "addons/external-dns.addons.k8s.io/pre-k8s-1.6.yaml"
I1020 15:58:38.893707 22722 tree_walker.go:97] visit "cloudup/resources/addons/limit-range.addons.k8s.io"
I1020 15:58:38.893717 22722 tree_walker.go:97] visit "cloudup/resources/addons/limit-range.addons.k8s.io/addon.yaml"
I1020 15:58:38.893738 22722 loader.go:322] loading resource "addons/limit-range.addons.k8s.io/addon.yaml"
I1020 15:58:38.893744 22722 tree_walker.go:97] visit "cloudup/resources/addons/limit-range.addons.k8s.io/v1.5.0.yaml"
I1020 15:58:38.893764 22722 loader.go:322] loading resource "addons/limit-range.addons.k8s.io/v1.5.0.yaml"
I1020 15:58:38.893770 22722 tree_walker.go:97] visit "cloudup/resources/addons/networking.kuberouter"
I1020 15:58:38.893778 22722 tree_walker.go:97] visit "cloudup/resources/addons/networking.kuberouter/k8s-1.6.yaml.template"
I1020 15:58:38.893835 22722 loader.go:314] loading (templated) resource "addons/networking.kuberouter/k8s-1.6.yaml"
I1020 15:58:38.893842 22722 tree_walker.go:97] visit "cloudup/resources/addons/storage-aws.addons.k8s.io"
I1020 15:58:38.893850 22722 tree_walker.go:97] visit "cloudup/resources/addons/storage-aws.addons.k8s.io/v1.6.0.yaml"
I1020 15:58:38.893874 22722 loader.go:322] loading resource "addons/storage-aws.addons.k8s.io/v1.6.0.yaml"
I1020 15:58:38.893880 22722 tree_walker.go:97] visit "cloudup/resources/addons/authentication.kope.io"
I1020 15:58:38.893888 22722 tree_walker.go:97] visit "cloudup/resources/addons/authentication.kope.io/k8s-1.6.yaml"
I1020 15:58:38.893929 22722 loader.go:322] loading resource "addons/authentication.kope.io/k8s-1.6.yaml"
I1020 15:58:38.893936 22722 tree_walker.go:97] visit "cloudup/resources/addons/dns-controller.addons.k8s.io"
I1020 15:58:38.893946 22722 tree_walker.go:97] visit "cloudup/resources/addons/dns-controller.addons.k8s.io/pre-k8s-1.6.yaml.template"
I1020 15:58:38.893981 22722 loader.go:314] loading (templated) resource "addons/dns-controller.addons.k8s.io/pre-k8s-1.6.yaml"
I1020 15:58:38.893990 22722 tree_walker.go:97] visit "cloudup/resources/addons/dns-controller.addons.k8s.io/k8s-1.6.yaml.template"
I1020 15:58:38.894036 22722 loader.go:314] loading (templated) resource "addons/dns-controller.addons.k8s.io/k8s-1.6.yaml"
I1020 15:58:38.894042 22722 tree_walker.go:97] visit "cloudup/resources/addons/networking.kope.io"
I1020 15:58:38.894052 22722 tree_walker.go:97] visit "cloudup/resources/addons/networking.kope.io/k8s-1.6.yaml"
I1020 15:58:38.894101 22722 loader.go:322] loading resource "addons/networking.kope.io/k8s-1.6.yaml"
I1020 15:58:38.894107 22722 tree_walker.go:97] visit "cloudup/resources/addons/networking.kope.io/pre-k8s-1.6.yaml"
I1020 15:58:38.894138 22722 loader.go:322] loading resource "addons/networking.kope.io/pre-k8s-1.6.yaml"
I1020 15:58:38.894143 22722 tree_walker.go:97] visit "cloudup/resources/addons/networking.romana"
I1020 15:58:38.894152 22722 tree_walker.go:97] visit "cloudup/resources/addons/networking.romana/k8s-1.6.yaml.template"
I1020 15:58:38.894222 22722 loader.go:314] loading (templated) resource "addons/networking.romana/k8s-1.6.yaml"
I1020 15:58:38.894230 22722 tree_walker.go:97] visit "cloudup/resources/addons/networking.weave"
I1020 15:58:38.894241 22722 tree_walker.go:97] visit "cloudup/resources/addons/networking.weave/k8s-1.6.yaml.template"
I1020 15:58:38.894293 22722 loader.go:314] loading (templated) resource "addons/networking.weave/k8s-1.6.yaml"
I1020 15:58:38.894302 22722 tree_walker.go:97] visit "cloudup/resources/addons/networking.weave/k8s-1.7.yaml.template"
I1020 15:58:38.894356 22722 loader.go:314] loading (templated) resource "addons/networking.weave/k8s-1.7.yaml"
I1020 15:58:38.894362 22722 tree_walker.go:97] visit "cloudup/resources/addons/networking.weave/pre-k8s-1.6.yaml.template"
I1020 15:58:38.894410 22722 loader.go:314] loading (templated) resource "addons/networking.weave/pre-k8s-1.6.yaml"
I1020 15:58:38.894416 22722 tree_walker.go:97] visit "cloudup/resources/addons/storage-gce.addons.k8s.io"
I1020 15:58:38.894426 22722 tree_walker.go:97] visit "cloudup/resources/addons/storage-gce.addons.k8s.io/v1.6.0.yaml"
I1020 15:58:38.894452 22722 loader.go:322] loading resource "addons/storage-gce.addons.k8s.io/v1.6.0.yaml"
I1020 15:58:38.894458 22722 tree_walker.go:97] visit "cloudup/resources/addons/core.addons.k8s.io"
I1020 15:58:38.894467 22722 tree_walker.go:97] visit "cloudup/resources/addons/core.addons.k8s.io/addon.yaml"
I1020 15:58:38.894492 22722 loader.go:322] loading resource "addons/core.addons.k8s.io/addon.yaml"
I1020 15:58:38.894498 22722 tree_walker.go:97] visit "cloudup/resources/addons/core.addons.k8s.io/v1.4.0.yaml"
I1020 15:58:38.894520 22722 loader.go:322] loading resource "addons/core.addons.k8s.io/v1.4.0.yaml"
I1020 15:58:38.894526 22722 tree_walker.go:97] visit "cloudup/resources/addons/kube-dns.addons.k8s.io"
I1020 15:58:38.894536 22722 tree_walker.go:97] visit "cloudup/resources/addons/kube-dns.addons.k8s.io/pre-k8s-1.6.yaml.template"
I1020 15:58:38.894620 22722 loader.go:314] loading (templated) resource "addons/kube-dns.addons.k8s.io/pre-k8s-1.6.yaml"
I1020 15:58:38.894638 22722 tree_walker.go:97] visit "cloudup/resources/addons/kube-dns.addons.k8s.io/k8s-1.6.yaml.template"
I1020 15:58:38.894733 22722 loader.go:314] loading (templated) resource "addons/kube-dns.addons.k8s.io/k8s-1.6.yaml"
I1020 15:58:38.894741 22722 tree_walker.go:97] visit "cloudup/resources/addons/networking.flannel"
I1020 15:58:38.894751 22722 tree_walker.go:97] visit "cloudup/resources/addons/networking.flannel/k8s-1.6.yaml.template"
I1020 15:58:38.894799 22722 loader.go:314] loading (templated) resource "addons/networking.flannel/k8s-1.6.yaml"
I1020 15:58:38.894805 22722 tree_walker.go:97] visit "cloudup/resources/addons/networking.flannel/pre-k8s-1.6.yaml.template"
I1020 15:58:38.894859 22722 loader.go:314] loading (templated) resource "addons/networking.flannel/pre-k8s-1.6.yaml"
I1020 15:58:38.894865 22722 tree_walker.go:97] visit "cloudup/resources/addons/networking.projectcalico.org"
I1020 15:58:38.894875 22722 tree_walker.go:97] visit "cloudup/resources/addons/networking.projectcalico.org/k8s-1.6.yaml.template"
I1020 15:58:38.894970 22722 loader.go:314] loading (templated) resource "addons/networking.projectcalico.org/k8s-1.6.yaml"
I1020 15:58:38.894977 22722 tree_walker.go:97] visit "cloudup/resources/addons/networking.projectcalico.org/pre-k8s-1.6.yaml.template"
I1020 15:58:38.895071 22722 loader.go:314] loading (templated) resource "addons/networking.projectcalico.org/pre-k8s-1.6.yaml"
I1020 15:58:38.895078 22722 tree_walker.go:97] visit "cloudup/resources/addons/networking.projectcalico.org.canal"
I1020 15:58:38.895089 22722 tree_walker.go:97] visit "cloudup/resources/addons/networking.projectcalico.org.canal/k8s-1.6.yaml.template"
I1020 15:58:38.895185 22722 loader.go:314] loading (templated) resource "addons/networking.projectcalico.org.canal/k8s-1.6.yaml"
I1020 15:58:38.895192 22722 tree_walker.go:97] visit "cloudup/resources/addons/networking.projectcalico.org.canal/k8s-1.8.yaml.template"
I1020 15:58:38.895300 22722 loader.go:314] loading (templated) resource "addons/networking.projectcalico.org.canal/k8s-1.8.yaml"
I1020 15:58:38.895309 22722 tree_walker.go:97] visit "cloudup/resources/addons/networking.projectcalico.org.canal/pre-k8s-1.6.yaml.template"
I1020 15:58:38.895400 22722 loader.go:314] loading (templated) resource "addons/networking.projectcalico.org.canal/pre-k8s-1.6.yaml"
I1020 15:58:38.895946 22722 template_functions.go:145] watch-ingress=false set on dns-controller
I1020 15:58:38.896363 22722 visitor.go:39] float64 value at spec.replicas: %!s(float64=1)
I1020 15:58:38.896380 22722 images.go:56] Consider image for re-mapping: "kope/dns-controller:1.7.1"
I1020 15:58:38.896387 22722 visitor.go:34] string value at spec.template.spec.hostNetwork: %!s(bool=true)
I1020 15:58:38.898013 22722 images.go:56] Consider image for re-mapping: "gcr.io/google_containers/cluster-proportional-autoscaler-amd64:1.0.0"
I1020 15:58:38.898241 22722 visitor.go:39] float64 value at spec.strategy.rollingUpdate.maxUnavailable: %!s(float64=0)
I1020 15:58:38.898251 22722 images.go:56] Consider image for re-mapping: "gcr.io/google_containers/kubedns-amd64:1.9"
I1020 15:58:38.898256 22722 visitor.go:39] float64 value at spec.template.spec.containers.[0].livenessProbe.httpGet.port: %!s(float64=8080)
I1020 15:58:38.898263 22722 visitor.go:39] float64 value at spec.template.spec.containers.[0].livenessProbe.initialDelaySeconds: %!s(float64=60)
I1020 15:58:38.898270 22722 visitor.go:39] float64 value at spec.template.spec.containers.[0].livenessProbe.successThreshold: %!s(float64=1)
I1020 15:58:38.898276 22722 visitor.go:39] float64 value at spec.template.spec.containers.[0].livenessProbe.timeoutSeconds: %!s(float64=5)
I1020 15:58:38.898281 22722 visitor.go:39] float64 value at spec.template.spec.containers.[0].livenessProbe.failureThreshold: %!s(float64=5)
I1020 15:58:38.898287 22722 visitor.go:39] float64 value at spec.template.spec.containers.[0].ports.[0].containerPort: %!s(float64=10053)
I1020 15:58:38.898293 22722 visitor.go:39] float64 value at spec.template.spec.containers.[0].ports.[1].containerPort: %!s(float64=10053)
I1020 15:58:38.898299 22722 visitor.go:39] float64 value at spec.template.spec.containers.[0].ports.[2].containerPort: %!s(float64=10055)
I1020 15:58:38.898304 22722 visitor.go:39] float64 value at spec.template.spec.containers.[0].readinessProbe.initialDelaySeconds: %!s(float64=3)
I1020 15:58:38.898309 22722 visitor.go:39] float64 value at spec.template.spec.containers.[0].readinessProbe.timeoutSeconds: %!s(float64=5)
I1020 15:58:38.898315 22722 visitor.go:39] float64 value at spec.template.spec.containers.[0].readinessProbe.httpGet.port: %!s(float64=8081)
I1020 15:58:38.898323 22722 images.go:56] Consider image for re-mapping: "gcr.io/google_containers/k8s-dns-dnsmasq-amd64:1.14.5"
I1020 15:58:38.898329 22722 visitor.go:39] float64 value at spec.template.spec.containers.[1].livenessProbe.httpGet.port: %!s(float64=8080)
I1020 15:58:38.898334 22722 visitor.go:39] float64 value at spec.template.spec.containers.[1].livenessProbe.initialDelaySeconds: %!s(float64=60)
I1020 15:58:38.898339 22722 visitor.go:39] float64 value at spec.template.spec.containers.[1].livenessProbe.successThreshold: %!s(float64=1)
I1020 15:58:38.898344 22722 visitor.go:39] float64 value at spec.template.spec.containers.[1].livenessProbe.timeoutSeconds: %!s(float64=5)
I1020 15:58:38.898348 22722 visitor.go:39] float64 value at spec.template.spec.containers.[1].livenessProbe.failureThreshold: %!s(float64=5)
I1020 15:58:38.898354 22722 visitor.go:39] float64 value at spec.template.spec.containers.[1].ports.[0].containerPort: %!s(float64=53)
I1020 15:58:38.898359 22722 visitor.go:39] float64 value at spec.template.spec.containers.[1].ports.[1].containerPort: %!s(float64=53)
I1020 15:58:38.898366 22722 images.go:56] Consider image for re-mapping: "gcr.io/google_containers/dnsmasq-metrics-amd64:1.0"
I1020 15:58:38.898371 22722 visitor.go:39] float64 value at spec.template.spec.containers.[2].livenessProbe.failureThreshold: %!s(float64=5)
I1020 15:58:38.898376 22722 visitor.go:39] float64 value at spec.template.spec.containers.[2].livenessProbe.httpGet.port: %!s(float64=10054)
I1020 15:58:38.898382 22722 visitor.go:39] float64 value at spec.template.spec.containers.[2].livenessProbe.initialDelaySeconds: %!s(float64=60)
I1020 15:58:38.898386 22722 visitor.go:39] float64 value at spec.template.spec.containers.[2].livenessProbe.successThreshold: %!s(float64=1)
I1020 15:58:38.898391 22722 visitor.go:39] float64 value at spec.template.spec.containers.[2].livenessProbe.timeoutSeconds: %!s(float64=5)
I1020 15:58:38.898397 22722 visitor.go:39] float64 value at spec.template.spec.containers.[2].ports.[0].containerPort: %!s(float64=10054)
I1020 15:58:38.898404 22722 images.go:56] Consider image for re-mapping: "gcr.io/google_containers/exechealthz-amd64:1.2"
I1020 15:58:38.898410 22722 visitor.go:39] float64 value at spec.template.spec.containers.[3].ports.[0].containerPort: %!s(float64=8080)
I1020 15:58:38.899129 22722 visitor.go:39] float64 value at spec.ports.[0].port: %!s(float64=53)
I1020 15:58:38.899138 22722 visitor.go:39] float64 value at spec.ports.[1].port: %!s(float64=53)
I1020 15:58:38.900376 22722 images.go:56] Consider image for re-mapping: "gcr.io/google_containers/cluster-proportional-autoscaler-amd64:1.1.2-r2"
I1020 15:58:38.900622 22722 visitor.go:39] float64 value at spec.strategy.rollingUpdate.maxUnavailable: %!s(float64=0)
I1020 15:58:38.900634 22722 visitor.go:34] string value at spec.template.spec.volumes.[0].configMap.optional: %!s(bool=true)
I1020 15:58:38.900643 22722 visitor.go:39] float64 value at spec.template.spec.containers.[0].livenessProbe.successThreshold: %!s(float64=1)
I1020 15:58:38.900649 22722 visitor.go:39] float64 value at spec.template.spec.containers.[0].livenessProbe.timeoutSeconds: %!s(float64=5)
I1020 15:58:38.900654 22722 visitor.go:39] float64 value at spec.template.spec.containers.[0].livenessProbe.failureThreshold: %!s(float64=5)
I1020 15:58:38.900659 22722 visitor.go:39] float64 value at spec.template.spec.containers.[0].livenessProbe.httpGet.port: %!s(float64=10054)
I1020 15:58:38.900665 22722 visitor.go:39] float64 value at spec.template.spec.containers.[0].livenessProbe.initialDelaySeconds: %!s(float64=60)
I1020 15:58:38.900670 22722 visitor.go:39] float64 value at spec.template.spec.containers.[0].readinessProbe.httpGet.port: %!s(float64=8081)
I1020 15:58:38.900675 22722 visitor.go:39] float64 value at spec.template.spec.containers.[0].readinessProbe.initialDelaySeconds: %!s(float64=3)
I1020 15:58:38.900680 22722 visitor.go:39] float64 value at spec.template.spec.containers.[0].readinessProbe.timeoutSeconds: %!s(float64=5)
I1020 15:58:38.900686 22722 images.go:56] Consider image for re-mapping: "gcr.io/google_containers/k8s-dns-kube-dns-amd64:1.14.5"
I1020 15:58:38.900693 22722 visitor.go:39] float64 value at spec.template.spec.containers.[0].ports.[0].containerPort: %!s(float64=10053)
I1020 15:58:38.900699 22722 visitor.go:39] float64 value at spec.template.spec.containers.[0].ports.[1].containerPort: %!s(float64=10053)
I1020 15:58:38.900704 22722 visitor.go:39] float64 value at spec.template.spec.containers.[0].ports.[2].containerPort: %!s(float64=10055)
I1020 15:58:38.900713 22722 images.go:56] Consider image for re-mapping: "gcr.io/google_containers/k8s-dns-dnsmasq-nanny-amd64:1.14.5"
I1020 15:58:38.900719 22722 visitor.go:39] float64 value at spec.template.spec.containers.[1].livenessProbe.failureThreshold: %!s(float64=5)
I1020 15:58:38.900724 22722 visitor.go:39] float64 value at spec.template.spec.containers.[1].livenessProbe.httpGet.port: %!s(float64=10054)
I1020 15:58:38.900729 22722 visitor.go:39] float64 value at spec.template.spec.containers.[1].livenessProbe.initialDelaySeconds: %!s(float64=60)
I1020 15:58:38.900733 22722 visitor.go:39] float64 value at spec.template.spec.containers.[1].livenessProbe.successThreshold: %!s(float64=1)
I1020 15:58:38.900738 22722 visitor.go:39] float64 value at spec.template.spec.containers.[1].livenessProbe.timeoutSeconds: %!s(float64=5)
I1020 15:58:38.900744 22722 visitor.go:39] float64 value at spec.template.spec.containers.[1].ports.[0].containerPort: %!s(float64=53)
I1020 15:58:38.900749 22722 visitor.go:39] float64 value at spec.template.spec.containers.[1].ports.[1].containerPort: %!s(float64=53)
I1020 15:58:38.900756 22722 images.go:56] Consider image for re-mapping: "gcr.io/google_containers/k8s-dns-sidecar-amd64:1.14.5"
I1020 15:58:38.900761 22722 visitor.go:39] float64 value at spec.template.spec.containers.[2].livenessProbe.timeoutSeconds: %!s(float64=5)
I1020 15:58:38.900766 22722 visitor.go:39] float64 value at spec.template.spec.containers.[2].livenessProbe.failureThreshold: %!s(float64=5)
I1020 15:58:38.900771 22722 visitor.go:39] float64 value at spec.template.spec.containers.[2].livenessProbe.httpGet.port: %!s(float64=10054)
I1020 15:58:38.900776 22722 visitor.go:39] float64 value at spec.template.spec.containers.[2].livenessProbe.initialDelaySeconds: %!s(float64=60)
I1020 15:58:38.900781 22722 visitor.go:39] float64 value at spec.template.spec.containers.[2].livenessProbe.successThreshold: %!s(float64=1)
I1020 15:58:38.900786 22722 visitor.go:39] float64 value at spec.template.spec.containers.[2].ports.[0].containerPort: %!s(float64=10054)
I1020 15:58:38.901560 22722 visitor.go:39] float64 value at spec.ports.[0].port: %!s(float64=53)
I1020 15:58:38.901571 22722 visitor.go:39] float64 value at spec.ports.[1].port: %!s(float64=53)
I1020 15:58:38.902184 22722 template_functions.go:145] watch-ingress=false set on dns-controller
I1020 15:58:38.902378 22722 images.go:56] Consider image for re-mapping: "kope/dns-controller:1.7.1"
I1020 15:58:38.902391 22722 visitor.go:34] string value at spec.template.spec.hostNetwork: %!s(bool=true)
I1020 15:58:38.902398 22722 visitor.go:39] float64 value at spec.replicas: %!s(float64=1)
W1020 15:58:38.902710 22722 external_access.go:36] TODO: Harmonize gcemodel ExternalAccessModelBuilder with awsmodel
W1020 15:58:38.902729 22722 firewall.go:35] TODO: Harmonize gcemodel with awsmodel for firewall - GCE model is way too open
W1020 15:58:38.902737 22722 firewall.go:63] Adding overlay network for X -> node rule - HACK
W1020 15:58:38.902742 22722 firewall.go:64] We should probably use subnets?
W1020 15:58:38.902754 22722 firewall.go:118] Adding overlay network for X -> master rule - HACK
I1020 15:58:38.904093 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/cluster.spec"
I1020 15:58:39.057870 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/instancegroup/master-us-central1-a"
I1020 15:58:39.150138 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/instancegroup/master-us-central1-a"
I1020 15:58:39.266422 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/instancegroup/master-us-central1-a"
I1020 15:58:39.435157 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/instancegroup/master-us-central1-b"
I1020 15:58:39.529974 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/instancegroup/master-us-central1-b"
I1020 15:58:39.669369 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/instancegroup/master-us-central1-b"
I1020 15:58:39.799798 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/instancegroup/master-us-central1-c"
I1020 15:58:39.892896 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/instancegroup/master-us-central1-c"
I1020 15:58:40.192266 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/instancegroup/master-us-central1-c"
I1020 15:58:40.341369 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/instancegroup/nodes"
I1020 15:58:40.447555 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/instancegroup/nodes"
I1020 15:58:40.596110 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/instancegroup/nodes"
I1020 15:58:40.734651 22722 topological_sort.go:62] Dependencies:
I1020 15:58:40.734672 22722 topological_sort.go:64] k8s.darkcubed.io-addons-dns-controller.addons.k8s.io-pre-k8s-1.6: []
I1020 15:58:40.734682 22722 topological_sort.go:64] InstanceTemplate/master-us-central1-a-k8s-darkcubed-io: [Network/default]
I1020 15:58:40.734693 22722 topological_sort.go:64] InstanceGroupManager/b-master-us-central1-b-k8s-darkcubed-io: [InstanceTemplate/master-us-central1-b-k8s-darkcubed-io]
I1020 15:58:40.734705 22722 topological_sort.go:64] Secret/system:logging: []
I1020 15:58:40.734716 22722 topological_sort.go:64] Disk/b-etcd-main-k8s-darkcubed-io: []
I1020 15:58:40.734725 22722 topological_sort.go:64] InstanceGroupManager/c-master-us-central1-c-k8s-darkcubed-io: [InstanceTemplate/master-us-central1-c-k8s-darkcubed-io]
I1020 15:58:40.734738 22722 topological_sort.go:64] k8s.darkcubed.io-addons-storage-gce.addons.k8s.io: []
I1020 15:58:40.734751 22722 topological_sort.go:64] FirewallRule/nodeport-external-to-node-k8s-darkcubed-io: [Network/default]
I1020 15:58:40.734764 22722 topological_sort.go:64] FirewallRule/master-to-node-k8s-darkcubed-io: [Network/default]
I1020 15:58:40.734777 22722 topological_sort.go:64] k8s.darkcubed.io-addons-kube-dns.addons.k8s.io-k8s-1.6: []
I1020 15:58:40.734788 22722 topological_sort.go:64] Secret/system:scheduler: []
I1020 15:58:40.734799 22722 topological_sort.go:64] Keypair/kubelet: []
I1020 15:58:40.734811 22722 topological_sort.go:64] InstanceGroupManager/b-nodes-k8s-darkcubed-io: [InstanceTemplate/nodes-k8s-darkcubed-io]
I1020 15:58:40.734824 22722 topological_sort.go:64] FirewallRule/node-to-master-k8s-darkcubed-io: [Network/default]
I1020 15:58:40.734838 22722 topological_sort.go:64] InstanceTemplate/master-us-central1-c-k8s-darkcubed-io: [Network/default]
I1020 15:58:40.734852 22722 topological_sort.go:64] Keypair/kubecfg: []
I1020 15:58:40.734864 22722 topological_sort.go:64] FirewallRule/node-to-node-k8s-darkcubed-io: [Network/default]
I1020 15:58:40.734876 22722 topological_sort.go:64] FirewallRule/cidr-to-node-k8s-darkcubed-io: [Network/default]
I1020 15:58:40.734889 22722 topological_sort.go:64] Secret/system:controller_manager: []
I1020 15:58:40.734902 22722 topological_sort.go:64] FirewallRule/cidr-to-master-k8s-darkcubed-io: [Network/default]
I1020 15:58:40.734916 22722 topological_sort.go:64] k8s.darkcubed.io-addons-core.addons.k8s.io: []
I1020 15:58:40.734928 22722 topological_sort.go:64] Keypair/kube-controller-manager: []
I1020 15:58:40.734942 22722 topological_sort.go:64] Keypair/kops: []
I1020 15:58:40.734955 22722 topological_sort.go:64] FirewallRule/kubernetes-master-https-k8s-darkcubed-io: [Network/default]
I1020 15:58:40.734968 22722 topological_sort.go:64] Secret/kube-proxy: []
I1020 15:58:40.734981 22722 topological_sort.go:64] FirewallRule/ssh-external-to-node-k8s-darkcubed-io: [Network/default]
I1020 15:58:40.734995 22722 topological_sort.go:64] Keypair/kube-scheduler: []
I1020 15:58:40.735008 22722 topological_sort.go:64] Secret/system:monitoring: []
I1020 15:58:40.735021 22722 topological_sort.go:64] MirrorKeystore/mirror-keystore: [Secret/kube-proxy Secret/system:monitoring Secret/admin Secret/system:dns Secret/system:scheduler Secret/kubelet Secret/system:logging Secret/kube Secret/system:controller_manager]
I1020 15:58:40.735044 22722 topological_sort.go:64] k8s.darkcubed.io-addons-bootstrap: []
I1020 15:58:40.735060 22722 topological_sort.go:64] k8s.darkcubed.io-addons-dns-controller.addons.k8s.io-k8s-1.6: []
I1020 15:58:40.735073 22722 topological_sort.go:64] Keypair/master: []
I1020 15:58:40.735086 22722 topological_sort.go:64] Disk/c-etcd-main-k8s-darkcubed-io: []
I1020 15:58:40.735099 22722 topological_sort.go:64] Disk/a-etcd-main-k8s-darkcubed-io: []
I1020 15:58:40.735111 22722 topological_sort.go:64] Keypair/apiserver-proxy-client: []
I1020 15:58:40.735123 22722 topological_sort.go:64] Disk/a-etcd-events-k8s-darkcubed-io: []
I1020 15:58:40.735135 22722 topological_sort.go:64] MirrorSecrets/mirror-secrets: [Secret/kubelet Secret/system:logging Secret/kube Secret/system:controller_manager Secret/kube-proxy Secret/system:monitoring Secret/system:dns Secret/admin Secret/system:scheduler]
I1020 15:58:40.735159 22722 topological_sort.go:64] FirewallRule/master-to-master-k8s-darkcubed-io: [Network/default]
I1020 15:58:40.735175 22722 topological_sort.go:64] Keypair/kube-proxy: []
I1020 15:58:40.735188 22722 topological_sort.go:64] k8s.darkcubed.io-addons-kube-dns.addons.k8s.io-pre-k8s-1.6: []
I1020 15:58:40.735200 22722 topological_sort.go:64] Secret/system:dns: []
I1020 15:58:40.735212 22722 topological_sort.go:64] FirewallRule/ssh-external-to-master-k8s-darkcubed-io: [Network/default]
I1020 15:58:40.735226 22722 topological_sort.go:64] Network/default: []
I1020 15:58:40.735239 22722 topological_sort.go:64] InstanceGroupManager/a-master-us-central1-a-k8s-darkcubed-io: [InstanceTemplate/master-us-central1-a-k8s-darkcubed-io]
I1020 15:58:40.735253 22722 topological_sort.go:64] InstanceTemplate/nodes-k8s-darkcubed-io: [Network/default]
I1020 15:58:40.735266 22722 topological_sort.go:64] Secret/admin: []
I1020 15:58:40.735279 22722 topological_sort.go:64] Secret/kubelet: []
I1020 15:58:40.735292 22722 topological_sort.go:64] Secret/kube: []
I1020 15:58:40.735304 22722 topological_sort.go:64] Keypair/kubelet-api: []
I1020 15:58:40.735316 22722 topological_sort.go:64] k8s.darkcubed.io-addons-limit-range.addons.k8s.io: []
I1020 15:58:40.735330 22722 topological_sort.go:64] InstanceGroupManager/a-nodes-k8s-darkcubed-io: [InstanceTemplate/nodes-k8s-darkcubed-io]
I1020 15:58:40.735344 22722 topological_sort.go:64] Disk/c-etcd-events-k8s-darkcubed-io: []
I1020 15:58:40.735358 22722 topological_sort.go:64] InstanceGroupManager/c-nodes-k8s-darkcubed-io: [InstanceTemplate/nodes-k8s-darkcubed-io]
I1020 15:58:40.735372 22722 topological_sort.go:64] InstanceTemplate/master-us-central1-b-k8s-darkcubed-io: [Network/default]
I1020 15:58:40.735387 22722 topological_sort.go:64] Disk/b-etcd-events-k8s-darkcubed-io: []
I1020 15:58:40.735512 22722 executor.go:91] Tasks: 0 done / 55 total; 33 can run
I1020 15:58:40.735581 22722 executor.go:157] Executing task "Keypair/apiserver-proxy-client": *fitasks.Keypair {"Name":"apiserver-proxy-client","Lifecycle":"Sync","subject":"cn=apiserver-proxy-client","type":"client","alternateNames":null,"alternateNameTasks":null}
I1020 15:58:40.735652 22722 executor.go:157] Executing task "Disk/b-etcd-main-k8s-darkcubed-io": *gcetasks.Disk {"Name":"b-etcd-main-k8s-darkcubed-io","Lifecycle":"Sync","VolumeType":"pd-ssd","SizeGB":20,"Zone":"us-central1-b","Labels":{"k8s-io-cluster-name":"k8s-darkcubed-io","k8s-io-etcd-main":"b-2fa-2cb-2cc","k8s-io-role-master":"master"}}
I1020 15:58:40.735615 22722 executor.go:157] Executing task "Disk/c-etcd-events-k8s-darkcubed-io": *gcetasks.Disk {"Name":"c-etcd-events-k8s-darkcubed-io","Lifecycle":"Sync","VolumeType":"pd-ssd","SizeGB":20,"Zone":"us-central1-c","Labels":{"k8s-io-cluster-name":"k8s-darkcubed-io","k8s-io-etcd-events":"c-2fa-2cb-2cc","k8s-io-role-master":"master"}}
I1020 15:58:40.736161 22722 executor.go:157] Executing task "Disk/b-etcd-events-k8s-darkcubed-io": *gcetasks.Disk {"Name":"b-etcd-events-k8s-darkcubed-io","Lifecycle":"Sync","VolumeType":"pd-ssd","SizeGB":20,"Zone":"us-central1-b","Labels":{"k8s-io-cluster-name":"k8s-darkcubed-io","k8s-io-etcd-events":"b-2fa-2cb-2cc","k8s-io-role-master":"master"}}
I1020 15:58:40.736186 22722 executor.go:157] Executing task "Secret/system:dns": *fitasks.Secret {"Name":"system:dns","Lifecycle":"Sync"}
I1020 15:58:40.736367 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/secrets/system:dns"
I1020 15:58:40.736299 22722 executor.go:157] Executing task "k8s.darkcubed.io-addons-storage-gce.addons.k8s.io": *fitasks.ManagedFile {"Name":"k8s.darkcubed.io-addons-storage-gce.addons.k8s.io","Lifecycle":"Sync","Location":"addons/storage-gce.addons.k8s.io/v1.6.0.yaml","Contents":{"Name":"","Resource":{}}}
I1020 15:58:40.736471 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/addons/storage-gce.addons.k8s.io/v1.6.0.yaml"
I1020 15:58:40.736568 22722 executor.go:157] Executing task "Secret/kubelet": *fitasks.Secret {"Name":"kubelet","Lifecycle":"Sync"}
I1020 15:58:40.736620 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/secrets/kubelet"
I1020 15:58:40.736772 22722 executor.go:157] Executing task "Keypair/kube-scheduler": *fitasks.Keypair {"Name":"kube-scheduler","Lifecycle":"Sync","subject":"cn=system:kube-scheduler","type":"client","alternateNames":null,"alternateNameTasks":null}
I1020 15:58:40.736690 22722 executor.go:157] Executing task "Network/default": *gcetasks.Network {"Name":"default","Lifecycle":"Sync","Mode":"auto","CIDR":null}
I1020 15:58:40.736955 22722 executor.go:157] Executing task "k8s.darkcubed.io-addons-kube-dns.addons.k8s.io-pre-k8s-1.6": *fitasks.ManagedFile {"Name":"k8s.darkcubed.io-addons-kube-dns.addons.k8s.io-pre-k8s-1.6","Lifecycle":"Sync","Location":"addons/kube-dns.addons.k8s.io/pre-k8s-1.6.yaml","Contents":{"Name":"","Resource":{}}}
I1020 15:58:40.736953 22722 executor.go:157] Executing task "k8s.darkcubed.io-addons-kube-dns.addons.k8s.io-k8s-1.6": *fitasks.ManagedFile {"Name":"k8s.darkcubed.io-addons-kube-dns.addons.k8s.io-k8s-1.6","Lifecycle":"Sync","Location":"addons/kube-dns.addons.k8s.io/k8s-1.6.yaml","Contents":{"Name":"","Resource":{}}}
I1020 15:58:40.737008 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/addons/kube-dns.addons.k8s.io/pre-k8s-1.6.yaml"
I1020 15:58:40.737032 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/addons/kube-dns.addons.k8s.io/k8s-1.6.yaml"
I1020 15:58:40.737148 22722 executor.go:157] Executing task "Secret/system:controller_manager": *fitasks.Secret {"Name":"system:controller_manager","Lifecycle":"Sync"}
I1020 15:58:40.737209 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/secrets/system:controller_manager"
I1020 15:58:40.737232 22722 executor.go:157] Executing task "Keypair/kube-controller-manager": *fitasks.Keypair {"Name":"kube-controller-manager","Lifecycle":"Sync","subject":"cn=system:kube-controller-manager","type":"client","alternateNames":null,"alternateNameTasks":null}
I1020 15:58:40.737343 22722 executor.go:157] Executing task "Secret/kube": *fitasks.Secret {"Name":"kube","Lifecycle":"Sync"}
I1020 15:58:40.737382 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/secrets/kube"
I1020 15:58:40.737582 22722 executor.go:157] Executing task "Keypair/kubelet-api": *fitasks.Keypair {"Name":"kubelet-api","Lifecycle":"Sync","subject":"cn=kubelet-api","type":"client","alternateNames":null,"alternateNameTasks":null}
I1020 15:58:40.737745 22722 executor.go:157] Executing task "Disk/a-etcd-events-k8s-darkcubed-io": *gcetasks.Disk {"Name":"a-etcd-events-k8s-darkcubed-io","Lifecycle":"Sync","VolumeType":"pd-ssd","SizeGB":20,"Zone":"us-central1-a","Labels":{"k8s-io-cluster-name":"k8s-darkcubed-io","k8s-io-etcd-events":"a-2fa-2cb-2cc","k8s-io-role-master":"master"}}
I1020 15:58:40.737891 22722 executor.go:157] Executing task "Secret/system:monitoring": *fitasks.Secret {"Name":"system:monitoring","Lifecycle":"Sync"}
I1020 15:58:40.737927 22722 executor.go:157] Executing task "Keypair/kubelet": *fitasks.Keypair {"Name":"kubelet","Lifecycle":"Sync","subject":"o=system:nodes,cn=kubelet","type":"client","alternateNames":null,"alternateNameTasks":null}
I1020 15:58:40.737991 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/secrets/system:monitoring"
I1020 15:58:40.738168 22722 executor.go:157] Executing task "Secret/admin": *fitasks.Secret {"Name":"admin","Lifecycle":"Sync"}
I1020 15:58:40.738254 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/secrets/admin"
I1020 15:58:40.738280 22722 executor.go:157] Executing task "k8s.darkcubed.io-addons-dns-controller.addons.k8s.io-pre-k8s-1.6": *fitasks.ManagedFile {"Name":"k8s.darkcubed.io-addons-dns-controller.addons.k8s.io-pre-k8s-1.6","Lifecycle":"Sync","Location":"addons/dns-controller.addons.k8s.io/pre-k8s-1.6.yaml","Contents":{"Name":"","Resource":{}}}
I1020 15:58:40.738360 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/addons/dns-controller.addons.k8s.io/pre-k8s-1.6.yaml"
I1020 15:58:40.738493 22722 executor.go:157] Executing task "Disk/c-etcd-main-k8s-darkcubed-io": *gcetasks.Disk {"Name":"c-etcd-main-k8s-darkcubed-io","Lifecycle":"Sync","VolumeType":"pd-ssd","SizeGB":20,"Zone":"us-central1-c","Labels":{"k8s-io-cluster-name":"k8s-darkcubed-io","k8s-io-etcd-main":"c-2fa-2cb-2cc","k8s-io-role-master":"master"}}
I1020 15:58:40.738652 22722 executor.go:157] Executing task "k8s.darkcubed.io-addons-bootstrap": *fitasks.ManagedFile {"Name":"k8s.darkcubed.io-addons-bootstrap","Lifecycle":"Sync","Location":"addons/bootstrap-channel.yaml","Contents":{"Name":"","Resource":{}}}
I1020 15:58:40.738719 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/addons/bootstrap-channel.yaml"
I1020 15:58:40.738723 22722 executor.go:157] Executing task "k8s.darkcubed.io-addons-dns-controller.addons.k8s.io-k8s-1.6": *fitasks.ManagedFile {"Name":"k8s.darkcubed.io-addons-dns-controller.addons.k8s.io-k8s-1.6","Lifecycle":"Sync","Location":"addons/dns-controller.addons.k8s.io/k8s-1.6.yaml","Contents":{"Name":"","Resource":{}}}
I1020 15:58:40.738841 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/addons/dns-controller.addons.k8s.io/k8s-1.6.yaml"
I1020 15:58:40.738667 22722 executor.go:157] Executing task "Keypair/kubecfg": *fitasks.Keypair {"Name":"kubecfg","Lifecycle":"Sync","subject":"o=system:masters,cn=kubecfg","type":"client","alternateNames":null,"alternateNameTasks":null}
I1020 15:58:40.739045 22722 executor.go:157] Executing task "Secret/system:scheduler": *fitasks.Secret {"Name":"system:scheduler","Lifecycle":"Sync"}
I1020 15:58:40.739108 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/secrets/system:scheduler"
I1020 15:58:40.739207 22722 executor.go:157] Executing task "Keypair/master": *fitasks.Keypair {"Name":"master","Lifecycle":"Sync","subject":"cn=kubernetes-master","type":"server","alternateNames":["kubernetes","kubernetes.default","kubernetes.default.svc","kubernetes.default.svc.cluster.local","api.k8s.darkcubed.io","api.internal.k8s.darkcubed.io","100.64.0.1","127.0.0.1"],"alternateNameTasks":null}
I1020 15:58:40.739352 22722 executor.go:157] Executing task "Disk/a-etcd-main-k8s-darkcubed-io": *gcetasks.Disk {"Name":"a-etcd-main-k8s-darkcubed-io","Lifecycle":"Sync","VolumeType":"pd-ssd","SizeGB":20,"Zone":"us-central1-a","Labels":{"k8s-io-cluster-name":"k8s-darkcubed-io","k8s-io-etcd-main":"a-2fa-2cb-2cc","k8s-io-role-master":"master"}}
I1020 15:58:40.739431 22722 executor.go:157] Executing task "k8s.darkcubed.io-addons-core.addons.k8s.io": *fitasks.ManagedFile {"Name":"k8s.darkcubed.io-addons-core.addons.k8s.io","Lifecycle":"Sync","Location":"addons/core.addons.k8s.io/v1.4.0.yaml","Contents":{"Name":"","Resource":{}}}
I1020 15:58:40.739510 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/addons/core.addons.k8s.io/v1.4.0.yaml"
I1020 15:58:40.739636 22722 executor.go:157] Executing task "Secret/kube-proxy": *fitasks.Secret {"Name":"kube-proxy","Lifecycle":"Sync"}
I1020 15:58:40.739697 22722 executor.go:157] Executing task "Keypair/kube-proxy": *fitasks.Keypair {"Name":"kube-proxy","Lifecycle":"Sync","subject":"cn=system:kube-proxy","type":"client","alternateNames":null,"alternateNameTasks":null}
I1020 15:58:40.739722 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/secrets/kube-proxy"
I1020 15:58:40.739890 22722 executor.go:157] Executing task "Keypair/kops": *fitasks.Keypair {"Name":"kops","Lifecycle":"Sync","subject":"o=system:masters,cn=kops","type":"client","alternateNames":null,"alternateNameTasks":null}
I1020 15:58:40.739983 22722 executor.go:157] Executing task "k8s.darkcubed.io-addons-limit-range.addons.k8s.io": *fitasks.ManagedFile {"Name":"k8s.darkcubed.io-addons-limit-range.addons.k8s.io","Lifecycle":"Sync","Location":"addons/limit-range.addons.k8s.io/v1.5.0.yaml","Contents":{"Name":"","Resource":{}}}
I1020 15:58:40.740055 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/addons/limit-range.addons.k8s.io/v1.5.0.yaml"
I1020 15:58:40.736277 22722 executor.go:157] Executing task "Secret/system:logging": *fitasks.Secret {"Name":"system:logging","Lifecycle":"Sync"}
I1020 15:58:40.740198 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/secrets/system:logging"
I1020 15:58:40.825505 22722 gsfs.go:239] Listed files in gs://darkcubed-kops/k8s.darkcubed.io/pki/issued/apiserver-proxy-client: []
I1020 15:58:40.869308 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/secrets/system:dns"
I1020 15:58:40.870751 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/secrets/kubelet"
I1020 15:58:40.876768 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/addons/kube-dns.addons.k8s.io/k8s-1.6.yaml"
I1020 15:58:40.881549 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/addons/storage-gce.addons.k8s.io/v1.6.0.yaml"
I1020 15:58:40.881603 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/secrets/system:monitoring"
I1020 15:58:40.885835 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/secrets/system:controller_manager"
I1020 15:58:40.887100 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/secrets/kube"
I1020 15:58:40.908908 22722 gsfs.go:239] Listed files in gs://darkcubed-kops/k8s.darkcubed.io/pki/issued/kube-scheduler: []
I1020 15:58:40.908971 22722 gsfs.go:239] Listed files in gs://darkcubed-kops/k8s.darkcubed.io/pki/issued/kube-controller-manager: []
I1020 15:58:40.909267 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/addons/bootstrap-channel.yaml"
I1020 15:58:40.911917 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/addons/limit-range.addons.k8s.io/v1.5.0.yaml"
I1020 15:58:40.909307 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/addons/core.addons.k8s.io/v1.4.0.yaml"
I1020 15:58:40.914165 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/addons/kube-dns.addons.k8s.io/pre-k8s-1.6.yaml"
I1020 15:58:40.916234 22722 gsfs.go:239] Listed files in gs://darkcubed-kops/k8s.darkcubed.io/pki/issued/kubelet-api: []
I1020 15:58:40.920517 22722 gsfs.go:239] Listed files in gs://darkcubed-kops/k8s.darkcubed.io/pki/issued/kubelet: []
I1020 15:58:40.933013 22722 gsfs.go:239] Listed files in gs://darkcubed-kops/k8s.darkcubed.io/pki/issued/kops: []
I1020 15:58:40.933041 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/addons/dns-controller.addons.k8s.io/pre-k8s-1.6.yaml"
I1020 15:58:40.936364 22722 gsfs.go:239] Listed files in gs://darkcubed-kops/k8s.darkcubed.io/pki/issued/kube-proxy: []
I1020 15:58:40.936813 22722 gsfs.go:239] Listed files in gs://darkcubed-kops/k8s.darkcubed.io/pki/issued/master: []
I1020 15:58:40.936858 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/secrets/system:logging"
I1020 15:58:40.937127 22722 gsfs.go:239] Listed files in gs://darkcubed-kops/k8s.darkcubed.io/pki/issued/kubecfg: []
I1020 15:58:40.937360 22722 gsfs.go:239] Listed files in gs://darkcubed-kops/k8s.darkcubed.io/pki/private/apiserver-proxy-client: []
I1020 15:58:40.937713 22722 keypair.go:170] Creating PKI keypair "apiserver-proxy-client"
I1020 15:58:40.939238 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/secrets/system:scheduler"
I1020 15:58:40.939630 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/secrets/admin"
I1020 15:58:40.939972 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/secrets/kube-proxy"
I1020 15:58:40.940291 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/addons/dns-controller.addons.k8s.io/k8s-1.6.yaml"
I1020 15:58:40.955782 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/secrets/system:dns"
I1020 15:58:41.025167 22722 gsfs.go:239] Listed files in gs://darkcubed-kops/k8s.darkcubed.io/pki/private/kube-scheduler: []
I1020 15:58:41.025375 22722 keypair.go:170] Creating PKI keypair "kube-scheduler"
I1020 15:58:41.037309 22722 gsfs.go:239] Listed files in gs://darkcubed-kops/k8s.darkcubed.io/pki/private/kubelet-api: []
I1020 15:58:41.037545 22722 keypair.go:170] Creating PKI keypair "kubelet-api"
I1020 15:58:41.037679 22722 changes.go:80] Field changed "Mode" actual="legacy" expected="auto"
I1020 15:58:41.041922 22722 gsfs.go:239] Listed files in gs://darkcubed-kops/k8s.darkcubed.io/pki/private/kube-controller-manager: []
I1020 15:58:41.042067 22722 keypair.go:170] Creating PKI keypair "kube-controller-manager"
I1020 15:58:41.061882 22722 gsfs.go:239] Listed files in gs://darkcubed-kops/k8s.darkcubed.io/pki/private/master: []
I1020 15:58:41.062120 22722 keypair.go:170] Creating PKI keypair "master"
I1020 15:58:41.063715 22722 gsfs.go:239] Listed files in gs://darkcubed-kops/k8s.darkcubed.io/pki/private/kops: []
I1020 15:58:41.063775 22722 keypair.go:170] Creating PKI keypair "kops"
I1020 15:58:41.065530 22722 gsfs.go:239] Listed files in gs://darkcubed-kops/k8s.darkcubed.io/pki/private/kube-proxy: []
I1020 15:58:41.065573 22722 keypair.go:170] Creating PKI keypair "kube-proxy"
I1020 15:58:41.068618 22722 gsfs.go:239] Listed files in gs://darkcubed-kops/k8s.darkcubed.io/pki/private/kubelet: []
I1020 15:58:41.068756 22722 keypair.go:170] Creating PKI keypair "kubelet"
I1020 15:58:41.074043 22722 gsfs.go:239] Listed files in gs://darkcubed-kops/k8s.darkcubed.io/pki/issued/apiserver-proxy-client: []
I1020 15:58:41.074049 22722 gsfs.go:239] Listed files in gs://darkcubed-kops/k8s.darkcubed.io/pki/private/kubecfg: []
I1020 15:58:41.074309 22722 keypair.go:170] Creating PKI keypair "kubecfg"
I1020 15:58:41.075243 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/secrets/system:dns"
I1020 15:58:41.100657 22722 gsfs.go:239] Listed files in gs://darkcubed-kops/k8s.darkcubed.io/pki/issued/kube-scheduler: []
I1020 15:58:41.120576 22722 gsfs.go:239] Listed files in gs://darkcubed-kops/k8s.darkcubed.io/pki/issued/kubelet-api: []
I1020 15:58:41.131572 22722 gsfs.go:239] Listed files in gs://darkcubed-kops/k8s.darkcubed.io/pki/issued/kube-controller-manager: []
I1020 15:58:41.145880 22722 gsfs.go:239] Listed files in gs://darkcubed-kops/k8s.darkcubed.io/pki/issued/kubelet: []
I1020 15:58:41.147454 22722 gsfs.go:239] Listed files in gs://darkcubed-kops/k8s.darkcubed.io/pki/issued/master: []
I1020 15:58:41.151604 22722 gsfs.go:239] Listed files in gs://darkcubed-kops/k8s.darkcubed.io/pki/issued/kops: []
I1020 15:58:41.154307 22722 gsfs.go:239] Listed files in gs://darkcubed-kops/k8s.darkcubed.io/pki/issued/kube-proxy: []
I1020 15:58:41.160986 22722 gsfs.go:239] Listed files in gs://darkcubed-kops/k8s.darkcubed.io/pki/private/apiserver-proxy-client: []
I1020 15:58:41.164902 22722 gsfs.go:239] Listed files in gs://darkcubed-kops/k8s.darkcubed.io/pki/issued/kubecfg: []
I1020 15:58:41.180985 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/secrets/system:dns"
I1020 15:58:41.181150 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/secrets/kubelet"
I1020 15:58:41.188576 22722 gsfs.go:239] Listed files in gs://darkcubed-kops/k8s.darkcubed.io/pki/private/kube-scheduler: []
I1020 15:58:41.212863 22722 gsfs.go:239] Listed files in gs://darkcubed-kops/k8s.darkcubed.io/pki/private/kubelet-api: []
I1020 15:58:41.219238 22722 gsfs.go:239] Listed files in gs://darkcubed-kops/k8s.darkcubed.io/pki/private/kube-controller-manager: []
I1020 15:58:41.235809 22722 gsfs.go:239] Listed files in gs://darkcubed-kops/k8s.darkcubed.io/pki/private/kubelet: []
I1020 15:58:41.276121 22722 gsfs.go:239] Listed files in gs://darkcubed-kops/k8s.darkcubed.io/pki/private/kops: []
I1020 15:58:41.296125 22722 gsfs.go:239] Listed files in gs://darkcubed-kops/k8s.darkcubed.io/pki/private/master: []
I1020 15:58:41.306328 22722 gsfs.go:239] Listed files in gs://darkcubed-kops/k8s.darkcubed.io/pki/private/kube-proxy: []
I1020 15:58:41.326303 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/secrets/kubelet"
I1020 15:58:41.328186 22722 gsfs.go:239] Listed files in gs://darkcubed-kops/k8s.darkcubed.io/pki/private/kubecfg: []
I1020 15:58:41.477312 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/secrets/kubelet"
I1020 15:58:41.477447 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/secrets/system:monitoring"
I1020 15:58:41.509266 22722 vfs_castore.go:418] Issuing new certificate: "kube-controller-manager"
I1020 15:58:41.587719 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/secrets/system:monitoring"
I1020 15:58:41.648066 22722 gsfs.go:239] Listed files in gs://darkcubed-kops/k8s.darkcubed.io/pki/issued/ca: []
I1020 15:58:41.708670 22722 disk.go:148] Setting labels on disk "c-etcd-main-k8s-darkcubed-io": map[k8s-io-role-master:master k8s-io-etcd-main:c-2fa-2cb-2cc k8s-io-cluster-name:k8s-darkcubed-io]
I1020 15:58:41.728807 22722 disk.go:148] Setting labels on disk "a-etcd-main-k8s-darkcubed-io": map[k8s-io-cluster-name:k8s-darkcubed-io k8s-io-role-master:master k8s-io-etcd-main:a-2fa-2cb-2cc]
I1020 15:58:41.748877 22722 disk.go:148] Setting labels on disk "c-etcd-events-k8s-darkcubed-io": map[k8s-io-cluster-name:k8s-darkcubed-io k8s-io-role-master:master k8s-io-etcd-events:c-2fa-2cb-2cc]
I1020 15:58:41.760678 22722 vfs_castore.go:418] Issuing new certificate: "kube-proxy"
I1020 15:58:41.774105 22722 vfs_castore.go:418] Issuing new certificate: "kubecfg"
I1020 15:58:41.809229 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/secrets/system:monitoring"
I1020 15:58:41.809466 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/secrets/system:controller_manager"
I1020 15:58:41.829403 22722 disk.go:148] Setting labels on disk "b-etcd-main-k8s-darkcubed-io": map[k8s-io-cluster-name:k8s-darkcubed-io k8s-io-role-master:master k8s-io-etcd-main:b-2fa-2cb-2cc]
I1020 15:58:41.889783 22722 disk.go:148] Setting labels on disk "b-etcd-events-k8s-darkcubed-io": map[k8s-io-cluster-name:k8s-darkcubed-io k8s-io-role-master:master k8s-io-etcd-events:b-2fa-2cb-2cc]
I1020 15:58:41.896742 22722 vfs_castore.go:418] Issuing new certificate: "apiserver-proxy-client"
I1020 15:58:41.903831 22722 vfs_castore.go:418] Issuing new certificate: "kube-scheduler"
I1020 15:58:41.909691 22722 disk.go:148] Setting labels on disk "a-etcd-events-k8s-darkcubed-io": map[k8s-io-role-master:master k8s-io-etcd-events:a-2fa-2cb-2cc k8s-io-cluster-name:k8s-darkcubed-io]
I1020 15:58:41.914039 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/pki/private/ca/6479115885435641389351351019.key"
I1020 15:58:41.922075 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/secrets/system:controller_manager"
I1020 15:58:41.928173 22722 vfs_castore.go:418] Issuing new certificate: "kops"
I1020 15:58:41.962339 22722 vfs_castore.go:418] Issuing new certificate: "master"
I1020 15:58:41.985788 22722 vfs_castore.go:418] Issuing new certificate: "kubelet"
I1020 15:58:41.995019 22722 vfs_castore.go:418] Issuing new certificate: "kubelet-api"
I1020 15:58:42.044051 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/secrets/system:controller_manager"
I1020 15:58:42.044132 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/secrets/kube"
I1020 15:58:42.128586 22722 gsfs.go:239] Listed files in gs://darkcubed-kops/k8s.darkcubed.io/pki/private/ca: [gs://darkcubed-kops/k8s.darkcubed.io/pki/private/ca/6479115885435641389351351019.key]
I1020 15:58:42.128636 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/pki/private/ca/6479115885435641389351351019.key"
I1020 15:58:42.130839 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/secrets/kube"
I1020 15:58:42.235266 22722 privatekey.go:156] Parsing pem block: "RSA PRIVATE KEY"
I1020 15:58:42.236055 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/pki/issued/ca/6479115885435641389351351019.crt"
I1020 15:58:42.240715 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/secrets/kube"
I1020 15:58:42.240848 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/secrets/system:logging"
I1020 15:58:42.318849 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/secrets/system:logging"
I1020 15:58:42.442421 22722 gsfs.go:239] Listed files in gs://darkcubed-kops/k8s.darkcubed.io/pki/issued/ca: [gs://darkcubed-kops/k8s.darkcubed.io/pki/issued/ca/6479115885435641389351351019.crt]
I1020 15:58:42.442458 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/pki/issued/ca/6479115885435641389351351019.crt"
I1020 15:58:42.450467 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/secrets/system:logging"
I1020 15:58:42.450537 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/secrets/system:scheduler"
I1020 15:58:42.530338 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/secrets/system:scheduler"
I1020 15:58:42.635604 22722 certificate.go:102] Parsing pem block: "CERTIFICATE"
I1020 15:58:42.646087 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/pki/private/kube-proxy/6479115884777097114491606002.key"
I1020 15:58:42.646261 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/pki/private/kubecfg/6479115884834759379688714366.key"
I1020 15:58:42.646337 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/pki/private/apiserver-proxy-client/6479115885361516706142852039.key"
I1020 15:58:42.648133 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/pki/private/kube-controller-manager/6479115883697302479862172086.key"
I1020 15:58:42.650884 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/secrets/system:scheduler"
I1020 15:58:42.651378 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/secrets/kube-proxy"
I1020 15:58:42.653527 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/pki/private/kube-scheduler/6479115885391971939161718762.key"
I1020 15:58:42.656376 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/pki/private/kops/6479115885496496623756037966.key"
I1020 15:58:42.657865 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/pki/private/master/6479115885643237590643361714.key"
I1020 15:58:42.659878 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/pki/private/kubelet/6479115885743949374479545010.key"
I1020 15:58:42.662904 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/pki/private/kubelet-api/6479115885783618022204688166.key"
I1020 15:58:42.739224 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/secrets/kube-proxy"
I1020 15:58:42.770491 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/pki/issued/kubecfg/6479115884834759379688714366.crt"
I1020 15:58:42.815164 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/pki/issued/kubelet/6479115885743949374479545010.crt"
I1020 15:58:42.815567 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/pki/issued/kube-controller-manager/6479115883697302479862172086.crt"
I1020 15:58:42.817078 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/pki/issued/master/6479115885643237590643361714.crt"
I1020 15:58:42.821829 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/pki/issued/kube-proxy/6479115884777097114491606002.crt"
I1020 15:58:42.822357 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/pki/issued/kubelet-api/6479115885783618022204688166.crt"
I1020 15:58:42.823786 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/pki/issued/apiserver-proxy-client/6479115885361516706142852039.crt"
I1020 15:58:42.824911 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/pki/issued/kube-scheduler/6479115885391971939161718762.crt"
I1020 15:58:42.830269 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/pki/issued/kops/6479115885496496623756037966.crt"
I1020 15:58:42.855696 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/secrets/kube-proxy"
I1020 15:58:42.855780 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/secrets/admin"
I1020 15:58:42.886533 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/pki/issued/kubecfg/6479115884834759379688714366.crt"
I1020 15:58:42.942233 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/pki/issued/kube-controller-manager/6479115883697302479862172086.crt"
I1020 15:58:42.944114 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/pki/issued/kubelet/6479115885743949374479545010.crt"
I1020 15:58:42.944593 22722 gsfs.go:123] Writing file "gs://darkcubed-kops/k8s.darkcubed.io/secrets/admin"
I1020 15:58:42.977881 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/pki/issued/master/6479115885643237590643361714.crt"
I1020 15:58:42.977903 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/pki/issued/apiserver-proxy-client/6479115885361516706142852039.crt"
I1020 15:58:42.982664 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/pki/issued/kube-proxy/6479115884777097114491606002.crt"
I1020 15:58:42.985846 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/pki/issued/kubelet-api/6479115885783618022204688166.crt"
I1020 15:58:42.991660 22722 certificate.go:102] Parsing pem block: "CERTIFICATE"
I1020 15:58:42.991810 22722 keypair.go:192] created certificate &{{[] [system:masters] [] [] [] [] [] kubecfg [{2.5.4.10 system:masters} {2.5.4.3 kubecfg}] []} false 0xc420ad3900 0xc420c6cac0}
I1020 15:58:42.997813 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/pki/issued/kube-scheduler/6479115885391971939161718762.crt"
I1020 15:58:42.999170 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/pki/issued/kops/6479115885496496623756037966.crt"
I1020 15:58:43.038473 22722 certificate.go:102] Parsing pem block: "CERTIFICATE"
I1020 15:58:43.038582 22722 keypair.go:192] created certificate &{{[] [] [] [] [] [] [] system:kube-controller-manager [{2.5.4.3 system:kube-controller-manager}] []} false 0xc420c5c500 0xc420351600}
I1020 15:58:43.039942 22722 certificate.go:102] Parsing pem block: "CERTIFICATE"
I1020 15:58:43.040006 22722 keypair.go:192] created certificate &{{[] [system:nodes] [] [] [] [] [] kubelet [{2.5.4.10 system:nodes} {2.5.4.3 kubelet}] []} false 0xc420dba000 0xc420c6d4e0}
I1020 15:58:43.065188 22722 gsfs.go:179] Reading file "gs://darkcubed-kops/k8s.darkcubed.io/secrets/admin"
I1020 15:58:43.083423 22722 certificate.go:102] Parsing pem block: "CERTIFICATE"
I1020 15:58:43.083657 22722 keypair.go:192] created certificate &{{[] [] [] [] [] [] [] kubelet-api [{2.5.4.3 kubelet-api}] []} false 0xc420ee0a00 0xc42047d1b0}
I1020 15:58:43.085671 22722 certificate.go:102] Parsing pem block: "CERTIFICATE"
I1020 15:58:43.085778 22722 certificate.go:102] Parsing pem block: "CERTIFICATE"
I1020 15:58:43.085964 22722 keypair.go:192] created certificate &{{[] [] [] [] [] [] [] apiserver-proxy-client [{2.5.4.3 apiserver-proxy-client}] []} false 0xc420c5ca00 0xc420351a70}
I1020 15:58:43.085989 22722 keypair.go:192] created certificate &{{[] [] [] [] [] [] [] kubernetes-master [{2.5.4.3 kubernetes-master}] []} false 0xc420dbaf00 0xc420c6dfc0}
I1020 15:58:43.088805 22722 certificate.go:102] Parsing pem block: "CERTIFICATE"
I1020 15:58:43.088908 22722 keypair.go:192] created certificate &{{[] [system:masters] [] [] [] [] [] kops [{2.5.4.10 system:masters} {2.5.4.3 kops}] []} false 0xc420dbb400 0xc4203f6590}
I1020 15:58:43.106241 22722 certificate.go:102] Parsing pem block: "CERTIFICATE"
I1020 15:58:43.106435 22722 keypair.go:192] created certificate &{{[] [] [] [] [] [] [] system:kube-proxy [{2.5.4.3 system:kube-proxy}] []} false 0xc420c2a000 0xc4203f6ce0}
I1020 15:58:43.107733 22722 certificate.go:102] Parsing pem block: "CERTIFICATE"
I1020 15:58:43.107917 22722 keypair.go:192] created certificate &{{[] [] [] [] [] [] [] system:kube-scheduler [{2.5.4.3 system:kube-scheduler}] []} false 0xc420c5d400 0xc42026e010}
W1020 15:58:43.152299 22722 executor.go:109] error running task "Network/default" (9m57s remaining to succeed): cannot apply changes to Network: *gcetasks.Network {"Name":null,"Lifecycle":null,"Mode":"auto","CIDR":null}
I1020 15:58:43.152351 22722 executor.go:91] Tasks: 32 done / 55 total; 3 can run
I1020 15:58:43.152517 22722 executor.go:157] Executing task "Network/default": *gcetasks.Network {"Name":"default","Lifecycle":"Sync","Mode":"auto","CIDR":null}
I1020 15:58:43.152399 22722 executor.go:157] Executing task "MirrorKeystore/mirror-keystore": *fitasks.MirrorKeystore {"Name":"mirror-keystore","Lifecycle":null,"MirrorPath":{}}
I1020 15:58:43.152402 22722 executor.go:157] Executing task "MirrorSecrets/mirror-secrets": *fitasks.MirrorSecrets {"Name":"mirror-secrets","Lifecycle":null,"MirrorPath":{}}
I1020 15:58:43.335214 22722 changes.go:80] Field changed "Mode" actual="legacy" expected="auto"
W1020 15:58:43.335437 22722 executor.go:109] error running task "Network/default" (9m57s remaining to succeed): cannot apply changes to Network: *gcetasks.Network {"Name":null,"Lifecycle":null,"Mode":"auto","CIDR":null}
I1020 15:58:43.335491 22722 executor.go:91] Tasks: 34 done / 55 total; 1 can run
I1020 15:58:43.335518 22722 executor.go:157] Executing task "Network/default": *gcetasks.Network {"Name":"default","Lifecycle":"Sync","Mode":"auto","CIDR":null}
I1020 15:58:43.522245 22722 changes.go:80] Field changed "Mode" actual="legacy" expected="auto"
W1020 15:58:43.522530 22722 executor.go:109] error running task "Network/default" (9m57s remaining to succeed): cannot apply changes to Network: *gcetasks.Network {"Name":null,"Lifecycle":null,"Mode":"auto","CIDR":null}
I1020 15:58:43.522578 22722 executor.go:124] No progress made, sleeping before retrying 1 failed task(s)
I1020 15:58:53.522874 22722 executor.go:91] Tasks: 34 done / 55 total; 1 can run
I1020 15:58:53.522961 22722 executor.go:157] Executing task "Network/default": *gcetasks.Network {"Name":"default","Lifecycle":"Sync","Mode":"auto","CIDR":null}
I1020 15:58:53.680487 22722 changes.go:80] Field changed "Mode" actual="legacy" expected="auto"
W1020 15:58:53.680694 22722 executor.go:109] error running task "Network/default" (9m47s remaining to succeed): cannot apply changes to Network: *gcetasks.Network {"Name":null,"Lifecycle":null,"Mode":"auto","CIDR":null}
I1020 15:58:53.680737 22722 executor.go:124] No progress made, sleeping before retrying 1 failed task(s)
I1020 15:59:03.680952 22722 executor.go:91] Tasks: 34 done / 55 total; 1 can run
I1020 15:59:03.681028 22722 executor.go:157] Executing task "Network/default": *gcetasks.Network {"Name":"default","Lifecycle":"Sync","Mode":"auto","CIDR":null}
I1020 15:59:03.823395 22722 changes.go:80] Field changed "Mode" actual="legacy" expected="auto"
W1020 15:59:03.823586 22722 executor.go:109] error running task "Network/default" (9m36s remaining to succeed): cannot apply changes to Network: *gcetasks.Network {"Name":null,"Lifecycle":null,"Mode":"auto","CIDR":null}
I1020 15:59:03.823618 22722 executor.go:124] No progress made, sleeping before retrying 1 failed task(s)
I1020 15:59:13.824024 22722 executor.go:91] Tasks: 34 done / 55 total; 1 can run
I1020 15:59:13.824205 22722 executor.go:157] Executing task "Network/default": *gcetasks.Network {"Name":"default","Lifecycle":"Sync","Mode":"auto","CIDR":null}
I1020 15:59:13.979558 22722 changes.go:80] Field changed "Mode" actual="legacy" expected="auto"
W1020 15:59:13.979772 22722 executor.go:109] error running task "Network/default" (9m26s remaining to succeed): cannot apply changes to Network: *gcetasks.Network {"Name":null,"Lifecycle":null,"Mode":"auto","CIDR":null}
I1020 15:59:13.979815 22722 executor.go:124] No progress made, sleeping before retrying 1 failed task(s)
I1020 15:59:23.980274 22722 executor.go:91] Tasks: 34 done / 55 total; 1 can run
I1020 15:59:23.980445 22722 executor.go:157] Executing task "Network/default": *gcetasks.Network {"Name":"default","Lifecycle":"Sync","Mode":"auto","CIDR":null}
I1020 15:59:24.150847 22722 changes.go:80] Field changed "Mode" actual="legacy" expected="auto"
W1020 15:59:24.151122 22722 executor.go:109] error running task "Network/default" (9m16s remaining to succeed): cannot apply changes to Network: *gcetasks.Network {"Name":null,"Lifecycle":null,"Mode":"auto","CIDR":null}
I1020 15:59:24.151183 22722 executor.go:124] No progress made, sleeping before retrying 1 failed task(s)
I1020 15:59:34.151444 22722 executor.go:91] Tasks: 34 done / 55 total; 1 can run
I1020 15:59:34.151514 22722 executor.go:157] Executing task "Network/default": *gcetasks.Network {"Name":"default","Lifecycle":"Sync","Mode":"auto","CIDR":null}
I1020 15:59:34.335591 22722 changes.go:80] Field changed "Mode" actual="legacy" expected="auto"
W1020 15:59:34.335779 22722 executor.go:109] error running task "Network/default" (9m6s remaining to succeed): cannot apply changes to Network: *gcetasks.Network {"Name":null,"Lifecycle":null,"Mode":"auto","CIDR":null}
I1020 15:59:34.335824 22722 executor.go:124] No progress made, sleeping before retrying 1 failed task(s)
I1020 15:59:44.336007 22722 executor.go:91] Tasks: 34 done / 55 total; 1 can run
I1020 15:59:44.336074 22722 executor.go:157] Executing task "Network/default": *gcetasks.Network {"Name":"default","Lifecycle":"Sync","Mode":"auto","CIDR":null}
I1020 15:59:44.503359 22722 changes.go:80] Field changed "Mode" actual="legacy" expected="auto"
W1020 15:59:44.503651 22722 executor.go:109] error running task "Network/default" (8m56s remaining to succeed): cannot apply changes to Network: *gcetasks.Network {"Name":null,"Lifecycle":null,"Mode":"auto","CIDR":null}
I1020 15:59:44.503685 22722 executor.go:124] No progress made, sleeping before retrying 1 failed task(s)
I1020 15:59:54.504076 22722 executor.go:91] Tasks: 34 done / 55 total; 1 can run
I1020 15:59:54.504226 22722 executor.go:157] Executing task "Network/default": *gcetasks.Network {"Name":"default","Lifecycle":"Sync","Mode":"auto","CIDR":null}
I1020 15:59:54.646096 22722 changes.go:80] Field changed "Mode" actual="legacy" expected="auto"
W1020 15:59:54.646374 22722 executor.go:109] error running task "Network/default" (8m46s remaining to succeed): cannot apply changes to Network: *gcetasks.Network {"Name":null,"Lifecycle":null,"Mode":"auto","CIDR":null}
I1020 15:59:54.646421 22722 executor.go:124] No progress made, sleeping before retrying 1 failed task(s)
I1020 16:00:04.646890 22722 executor.go:91] Tasks: 34 done / 55 total; 1 can run
I1020 16:00:04.647052 22722 executor.go:157] Executing task "Network/default": *gcetasks.Network {"Name":"default","Lifecycle":"Sync","Mode":"auto","CIDR":null}
I1020 16:00:04.811930 22722 changes.go:80] Field changed "Mode" actual="legacy" expected="auto"
W1020 16:00:04.812205 22722 executor.go:109] error running task "Network/default" (8m35s remaining to succeed): cannot apply changes to Network: *gcetasks.Network {"Name":null,"Lifecycle":null,"Mode":"auto","CIDR":null}
I1020 16:00:04.812239 22722 executor.go:124] No progress made, sleeping before retrying 1 failed task(s)
I1020 16:00:14.812669 22722 executor.go:91] Tasks: 34 done / 55 total; 1 can run
I1020 16:00:14.812852 22722 executor.go:157] Executing task "Network/default": *gcetasks.Network {"Name":"default","Lifecycle":"Sync","Mode":"auto","CIDR":null}
I1020 16:00:15.004369 22722 changes.go:80] Field changed "Mode" actual="legacy" expected="auto"
W1020 16:00:15.004477 22722 executor.go:109] error running task "Network/default" (8m25s remaining to succeed): cannot apply changes to Network: *gcetasks.Network {"Name":null,"Lifecycle":null,"Mode":"auto","CIDR":null}
I1020 16:00:15.004496 22722 executor.go:124] No progress made, sleeping before retrying 1 failed task(s)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment