```bash
Kargo on main [!⇡] via usrbinkat@ via 🐍 v3.10.12 took 24s
🐋 ❯ task deploy
task: [pulumi-cancel] pulumi cancel --yes --stack containercraft/kargo/ci 2>/dev/null || true
task: [talos-clean] talosctl cluster destroy --name talos-kargo-docker 2>/dev/null || true
task: [talos-clean] docker rm --force talos-kargo-docker-controlplane-1 2>/dev/null || true
task: [pulumi-cancel] pulumi cancel --yes --stack containercraft/kargo/ci 2>/dev/null || true
task: [pulumi-clean] pulumi down --yes --skip-preview --refresh --stack containercraft/kargo/ci 2>/dev/null || true
Destroying (containercraft/ci)
View in Browser (Ctrl+O): https://app.pulumi.com/containercraft/kargo/ci/updates/48
Resources:
Duration: 1s
The resources in the stack have been deleted, but the history and configuration associated with the stack are still maintained.
If you want to remove the stack completely, run `pulumi stack rm containercraft/ci`.
task: [clean] ssh-keygen -f "/home/vscode/.ssh/known_hosts" -R "[127.0.0.1]:30590" 2>/dev/null || true
task: [clean] rm -rf .talos/manifest/* .kube/config
task: [pulumi-cancel] pulumi cancel --yes --stack containercraft/kargo/ci 2>/dev/null || true
task: [talos-clean] talosctl cluster destroy --name talos-kargo-docker 2>/dev/null || true
task: [talos-clean] docker rm --force talos-kargo-docker-controlplane-1 2>/dev/null || true
task: [pulumi-cancel] pulumi cancel --yes --stack containercraft/kargo/ci 2>/dev/null || true
task: [pulumi-clean] pulumi down --yes --skip-preview --refresh --stack containercraft/kargo/ci 2>/dev/null || true
Destroying (containercraft/ci)
View in Browser (Ctrl+O): https://app.pulumi.com/containercraft/kargo/ci/updates/49
Resources:
Duration: 1s
The resources in the stack have been deleted, but the history and configuration associated with the stack are still maintained.
If you want to remove the stack completely, run `pulumi stack rm containercraft/ci`.
task: [clean] ssh-keygen -f "/home/vscode/.ssh/known_hosts" -R "[127.0.0.1]:30590" 2>/dev/null || true
task: [clean] rm -rf .talos/manifest/* .kube/config
task: [init] direnv allow || true
task: [init] mkdir -p .kube .pulumi .talos
task: [init] touch /workspaces/Kargo/.kube/config /workspaces/Kargo/.talos/manifest/talosconfig
task: [init] chmod 600 /workspaces/Kargo/.kube/config /workspaces/Kargo/.talos/manifest/talosconfig
task: [talos-gen-config] talosctl gen config kargo https://10.0.5.2:6443 --config-patch @.talos/patch/cluster.yaml --force --output .talos/manifest --context talos-kargo-docker
generating PKI and tokens
Created .talos/manifest/controlplane.yaml
Created .talos/manifest/worker.yaml
Created .talos/manifest/talosconfig
task: [init] direnv allow || true
task: [init] mkdir -p .kube .pulumi .talos
task: [init] touch /workspaces/Kargo/.kube/config /workspaces/Kargo/.talos/manifest/talosconfig
task: [init] chmod 600 /workspaces/Kargo/.kube/config /workspaces/Kargo/.talos/manifest/talosconfig
task: [talos-deploy] talosctl cluster create \
--arch=arm64 \
--provisioner docker \
--init-node-as-endpoint \
--config-patch @.talos/patch/cluster.yaml \
--controlplanes 1 \
--memory 8192 \
--exposed-ports 30590:30590/tcp \
--context talos-kargo-docker \
--name talos-kargo-docker \
--workers 0 --crashdump
validating CIDR and reserving IPs
generating PKI and tokens
creating network talos-kargo-docker
creating controlplane nodes
creating worker nodes
waiting for API
bootstrapping cluster
waiting for etcd to be healthy: OK
waiting for etcd members to be consistent across nodes: OK
waiting for etcd members to be control plane nodes: OK
waiting for apid to be ready: OK
waiting for all nodes memory sizes: OK
waiting for all nodes disk sizes: OK
waiting for kubelet to be healthy: OK
waiting for all nodes to finish boot sequence: OK
waiting for all k8s nodes to report: OK
waiting for all k8s nodes to report ready: OK
waiting for all control plane static pods to be running: OK
waiting for all control plane components to be ready: OK
waiting for kube-proxy to report ready: OK
waiting for coredns to report ready: OK
waiting for all k8s nodes to report schedulable: OK
merging kubeconfig into "/workspaces/Kargo/.kube/config"
PROVISIONER docker
NAME talos-kargo-docker
NETWORK NAME talos-kargo-docker
NETWORK CIDR 10.5.0.0/24
NETWORK GATEWAY 10.5.0.1
NETWORK MTU 1500
KUBERNETES ENDPOINT https://127.0.0.1:58223
NODES:
NAME TYPE IP CPU RAM DISK
/talos-kargo-docker-controlplane-1 controlplane 10.5.0.2 2.00 8.6 GB -
task: [talos-ready] bash -c 'until kubectl --kubeconfig /workspaces/Kargo/.kube/config wait --for=condition=Ready pod -l k8s-app=kube-scheduler --namespace=kube-system --timeout=180s; do echo "Waiting for kube-scheduler..."; sleep 5; done' || true
pod/kube-scheduler-talos-kargo-docker-controlplane-1 condition met
task: [talos-ready] bash -c 'until kubectl --kubeconfig /workspaces/Kargo/.kube/config wait --for=condition=Ready pod -l k8s-app=kube-controller-manager --namespace=kube-system --timeout=180s; do echo "Waiting for kube-controller-manager..."; sleep 5; done' || true
pod/kube-controller-manager-talos-kargo-docker-controlplane-1 condition met
task: [talos-ready] bash -c 'until kubectl --kubeconfig /workspaces/Kargo/.kube/config wait --for=condition=Ready pod -l k8s-app=kube-apiserver --namespace=kube-system --timeout=180s; do echo "Waiting for kube-apiserver..."; sleep 5; done' || true
pod/kube-apiserver-talos-kargo-docker-controlplane-1 condition met
task: [all-pods-ready] bash -c 'until [ "$(kubectl get pods --all-namespaces --no-headers | grep -v "Running\\|Completed\\|Succeeded" | wc -l)" -eq 0 ]; do echo "Waiting for pods..."; sleep 5; done'
task: [all-pods-ready] kubectl get pods --all-namespaces --show-labels --kubeconfig /workspaces/Kargo/.kube/config
NAMESPACE NAME READY STATUS RESTARTS AGE LABELS
kube-system coredns-64b67fc8fd-bnz6p 1/1 Running 0 85s k8s-app=kube-dns,pod-template-hash=64b67fc8fd
kube-system coredns-64b67fc8fd-f2bjd 1/1 Running 0 85s k8s-app=kube-dns,pod-template-hash=64b67fc8fd
kube-system kube-apiserver-talos-kargo-docker-controlplane-1 1/1 Running 0 55s k8s-app=kube-apiserver,tier=control-plane
kube-system kube-controller-manager-talos-kargo-docker-controlplane-1 1/1 Running 2 (2m2s ago) 34s k8s-app=kube-controller-manager,tier=control-plane
kube-system kube-flannel-qpprj 1/1 Running 0 81s controller-revision-hash=85686fb59d,k8s-app=flannel,pod-template-generation=1,tier=node
kube-system kube-proxy-txhhj 1/1 Running 0 81s controller-revision-hash=6948f8df77,k8s-app=kube-proxy,pod-template-generation=1,tier=node
kube-system kube-scheduler-talos-kargo-docker-controlplane-1 1/1 Running 2 (2m2s ago) 7s k8s-app=kube-scheduler,tier=control-plane
task: [init] direnv allow || true
task: [init] mkdir -p .kube .pulumi .talos
task: [init] touch /workspaces/Kargo/.kube/config /workspaces/Kargo/.talos/manifest/talosconfig
task: [init] chmod 600 /workspaces/Kargo/.kube/config /workspaces/Kargo/.talos/manifest/talosconfig
task: [pulumi-login] pulumi login
Logged in to pulumi.com as usrbinkat (https://app.pulumi.com/usrbinkat)
task: [pulumi-login] pulumi install
Installing dependencies...
Creating virtual environment...
Finished creating virtual environment
Updating pip, setuptools, and wheel in virtual environment...
Requirement already satisfied: pip in /workspaces/Kargo/venv/lib/python3.10/site-packages (22.0.2)
Collecting pip
Downloading pip-24.2-py3-none-any.whl (1.8 MB)
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 1.8/1.8 MB 5.0 MB/s eta 0:00:00
Requirement already satisfied: setuptools in /workspaces/Kargo/venv/lib/python3.10/site-packages (59.6.0)
Collecting setuptools
Downloading setuptools-73.0.1-py3-none-any.whl (2.3 MB)
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 2.3/2.3 MB 8.5 MB/s eta 0:00:00
Collecting wheel
Downloading wheel-0.44.0-py3-none-any.whl (67 kB)
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 67.1/67.1 KB 13.5 MB/s eta 0:00:00
Installing collected packages: wheel, setuptools, pip
Attempting uninstall: setuptools
Found existing installation: setuptools 59.6.0
Uninstalling setuptools-59.6.0:
Successfully uninstalled setuptools-59.6.0
Attempting uninstall: pip
Found existing installation: pip 22.0.2
Uninstalling pip-22.0.2:
Successfully uninstalled pip-22.0.2
Successfully installed pip-24.2 setuptools-73.0.1 wheel-0.44.0
Finished updating
Installing dependencies in virtual environment...
Collecting pulumi>=3 (from -r requirements.txt (line 1))
Downloading pulumi-3.129.0-py3-none-any.whl.metadata (11 kB)
Collecting pulumi_kubernetes>=4.11.0 (from -r requirements.txt (line 2))
Downloading pulumi_kubernetes-4.17.1-py3-none-any.whl.metadata (9.4 kB)
Collecting kubernetes>=4.7.1 (from -r requirements.txt (line 3))
Downloading kubernetes-30.1.0-py2.py3-none-any.whl.metadata (1.5 kB)
Collecting beautifulsoup4 (from -r requirements.txt (line 4))
Downloading beautifulsoup4-4.12.3-py3-none-any.whl.metadata (3.8 kB)
Collecting pyyaml (from -r requirements.txt (line 5))
Downloading PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl.metadata (2.1 kB)
Collecting packaging (from -r requirements.txt (line 6))
Downloading packaging-24.1-py3-none-any.whl.metadata (3.2 kB)
Collecting protobuf~=4.21 (from pulumi>=3->-r requirements.txt (line 1))
Downloading protobuf-4.25.4-cp37-abi3-manylinux2014_aarch64.whl.metadata (541 bytes)
Collecting grpcio~=1.60.1 (from pulumi>=3->-r requirements.txt (line 1))
Downloading grpcio-1.60.2-cp310-cp310-manylinux_2_17_aarch64.whl.metadata (4.0 kB)
Collecting dill~=0.3 (from pulumi>=3->-r requirements.txt (line 1))
Downloading dill-0.3.8-py3-none-any.whl.metadata (10 kB)
Collecting six~=1.12 (from pulumi>=3->-r requirements.txt (line 1))
Downloading six-1.16.0-py2.py3-none-any.whl.metadata (1.8 kB)
Collecting semver~=2.13 (from pulumi>=3->-r requirements.txt (line 1))
Downloading semver-2.13.0-py2.py3-none-any.whl.metadata (5.0 kB)
Collecting parver>=0.2.1 (from pulumi_kubernetes>=4.11.0->-r requirements.txt (line 2))
Downloading parver-0.5-py3-none-any.whl.metadata (2.7 kB)
Collecting requests<3.0,>=2.21 (from pulumi_kubernetes>=4.11.0->-r requirements.txt (line 2))
Downloading requests-2.32.3-py3-none-any.whl.metadata (4.6 kB)
Collecting typing-extensions>=4.11 (from pulumi_kubernetes>=4.11.0->-r requirements.txt (line 2))
Downloading typing_extensions-4.12.2-py3-none-any.whl.metadata (3.0 kB)
Collecting certifi>=14.05.14 (from kubernetes>=4.7.1->-r requirements.txt (line 3))
Downloading certifi-2024.7.4-py3-none-any.whl.metadata (2.2 kB)
Collecting python-dateutil>=2.5.3 (from kubernetes>=4.7.1->-r requirements.txt (line 3))
Downloading python_dateutil-2.9.0.post0-py2.py3-none-any.whl.metadata (8.4 kB)
Collecting google-auth>=1.0.1 (from kubernetes>=4.7.1->-r requirements.txt (line 3))
Downloading google_auth-2.34.0-py2.py3-none-any.whl.metadata (4.7 kB)
Collecting websocket-client!=0.40.0,!=0.41.*,!=0.42.*,>=0.32.0 (from kubernetes>=4.7.1->-r requirements.txt (line 3))
Downloading websocket_client-1.8.0-py3-none-any.whl.metadata (8.0 kB)
Collecting requests-oauthlib (from kubernetes>=4.7.1->-r requirements.txt (line 3))
Downloading requests_oauthlib-2.0.0-py2.py3-none-any.whl.metadata (11 kB)
Collecting oauthlib>=3.2.2 (from kubernetes>=4.7.1->-r requirements.txt (line 3))
Downloading oauthlib-3.2.2-py3-none-any.whl.metadata (7.5 kB)
Collecting urllib3>=1.24.2 (from kubernetes>=4.7.1->-r requirements.txt (line 3))
Downloading urllib3-2.2.2-py3-none-any.whl.metadata (6.4 kB)
Collecting soupsieve>1.2 (from beautifulsoup4->-r requirements.txt (line 4))
Downloading soupsieve-2.6-py3-none-any.whl.metadata (4.6 kB)
Collecting cachetools<6.0,>=2.0.0 (from google-auth>=1.0.1->kubernetes>=4.7.1->-r requirements.txt (line 3))
Downloading cachetools-5.5.0-py3-none-any.whl.metadata (5.3 kB)
Collecting pyasn1-modules>=0.2.1 (from google-auth>=1.0.1->kubernetes>=4.7.1->-r requirements.txt (line 3))
Downloading pyasn1_modules-0.4.0-py3-none-any.whl.metadata (3.4 kB)
Collecting rsa<5,>=3.1.4 (from google-auth>=1.0.1->kubernetes>=4.7.1->-r requirements.txt (line 3))
Downloading rsa-4.9-py3-none-any.whl.metadata (4.2 kB)
Collecting arpeggio>=1.7 (from parver>=0.2.1->pulumi_kubernetes>=4.11.0->-r requirements.txt (line 2))
Downloading Arpeggio-2.0.2-py2.py3-none-any.whl.metadata (2.4 kB)
Collecting attrs>=19.2 (from parver>=0.2.1->pulumi_kubernetes>=4.11.0->-r requirements.txt (line 2))
Downloading attrs-24.2.0-py3-none-any.whl.metadata (11 kB)
Collecting charset-normalizer<4,>=2 (from requests<3.0,>=2.21->pulumi_kubernetes>=4.11.0->-r requirements.txt (line 2))
Downloading charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl.metadata (33 kB)
Collecting idna<4,>=2.5 (from requests<3.0,>=2.21->pulumi_kubernetes>=4.11.0->-r requirements.txt (line 2))
Downloading idna-3.7-py3-none-any.whl.metadata (9.9 kB)
Collecting pyasn1<0.7.0,>=0.4.6 (from pyasn1-modules>=0.2.1->google-auth>=1.0.1->kubernetes>=4.7.1->-r requirements.txt (line 3))
Downloading pyasn1-0.6.0-py2.py3-none-any.whl.metadata (8.3 kB)
Downloading pulumi-3.129.0-py3-none-any.whl (270 kB)
Downloading pulumi_kubernetes-4.17.1-py3-none-any.whl (2.5 MB)
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 2.5/2.5 MB 17.2 MB/s eta 0:00:00
Downloading kubernetes-30.1.0-py2.py3-none-any.whl (1.7 MB)
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 1.7/1.7 MB 20.5 MB/s eta 0:00:00
Downloading beautifulsoup4-4.12.3-py3-none-any.whl (147 kB)
Downloading PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl (718 kB)
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 718.5/718.5 kB 15.2 MB/s eta 0:00:00
Downloading packaging-24.1-py3-none-any.whl (53 kB)
Downloading certifi-2024.7.4-py3-none-any.whl (162 kB)
Downloading dill-0.3.8-py3-none-any.whl (116 kB)
Downloading google_auth-2.34.0-py2.py3-none-any.whl (200 kB)
Downloading grpcio-1.60.2-cp310-cp310-manylinux_2_17_aarch64.whl (5.1 MB)
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 5.1/5.1 MB 25.2 MB/s eta 0:00:00
Downloading oauthlib-3.2.2-py3-none-any.whl (151 kB)
Downloading parver-0.5-py3-none-any.whl (15 kB)
Downloading protobuf-4.25.4-cp37-abi3-manylinux2014_aarch64.whl (293 kB)
Downloading python_dateutil-2.9.0.post0-py2.py3-none-any.whl (229 kB)
Downloading requests-2.32.3-py3-none-any.whl (64 kB)
Downloading semver-2.13.0-py2.py3-none-any.whl (12 kB)
Downloading six-1.16.0-py2.py3-none-any.whl (11 kB)
Downloading soupsieve-2.6-py3-none-any.whl (36 kB)
Downloading typing_extensions-4.12.2-py3-none-any.whl (37 kB)
Downloading urllib3-2.2.2-py3-none-any.whl (121 kB)
Downloading websocket_client-1.8.0-py3-none-any.whl (58 kB)
Downloading requests_oauthlib-2.0.0-py2.py3-none-any.whl (24 kB)
Downloading Arpeggio-2.0.2-py2.py3-none-any.whl (55 kB)
Downloading attrs-24.2.0-py3-none-any.whl (63 kB)
Downloading cachetools-5.5.0-py3-none-any.whl (9.5 kB)
Downloading charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl (138 kB)
Downloading idna-3.7-py3-none-any.whl (66 kB)
Downloading pyasn1_modules-0.4.0-py3-none-any.whl (181 kB)
Downloading rsa-4.9-py3-none-any.whl (34 kB)
Downloading pyasn1-0.6.0-py2.py3-none-any.whl (85 kB)
Installing collected packages: arpeggio, websocket-client, urllib3, typing-extensions, soupsieve, six, semver, pyyaml, pyasn1, protobuf, packaging, oauthlib, idna, grpcio, dill, charset-normalizer, certifi, cachetools, attrs, rsa, requests, python-dateutil, pyasn1-modules, pulumi, parver, beautifulsoup4, requests-oauthlib, pulumi_kubernetes, google-auth, kubernetes
Successfully installed arpeggio-2.0.2 attrs-24.2.0 beautifulsoup4-4.12.3 cachetools-5.5.0 certifi-2024.7.4 charset-normalizer-3.3.2 dill-0.3.8 google-auth-2.34.0 grpcio-1.60.2 idna-3.7 kubernetes-30.1.0 oauthlib-3.2.2 packaging-24.1 parver-0.5 protobuf-4.25.4 pulumi-3.129.0 pulumi_kubernetes-4.17.1 pyasn1-0.6.0 pyasn1-modules-0.4.0 python-dateutil-2.9.0.post0 pyyaml-6.0.2 requests-2.32.3 requests-oauthlib-2.0.0 rsa-4.9 semver-2.13.0 six-1.16.0 soupsieve-2.6 typing-extensions-4.12.2 urllib3-2.2.2 websocket-client-1.8.0
Finished installing dependencies
Finished installing dependencies
resource plugin kubernetes-4.17.1 installing
Downloading plugin: 57.51 MiB / 57.51 MiB [=========================] 100.00% 2s
task: [pulumi-config] pulumi stack select --create containercraft/kargo/ci || true
task: [pulumi-config] pulumi config set --path kubernetes.distribution talos
task: [pulumi-config] pulumi config set --path kubernetes.context admin@talos-kargo-docker
task: [pulumi-config] pulumi config set --path kubernetes.kubeconfig /workspaces/Kargo/.kube/config
task: [pulumi-config] pulumi config set --path cilium.enabled false
task: [pulumi-config] pulumi config set --path multus.enabled false
task: [pulumi-config] pulumi config set --path vm.enabled false
task: [pulumi-cancel] pulumi cancel --yes --stack containercraft/kargo/ci 2>/dev/null || true
task: [pulumi-deploy] pulumi up --yes --skip-preview --refresh --continue-on-error --stack containercraft/kargo/ci || true
pulumi up --yes --skip-preview --refresh --stack containercraft/kargo/ci
Updating (containercraft/ci)
warning: Your Pulumi organization is on an expired trial or a canceled subscription. Update your subscription to maintain organization access: https://app.pulumi.com/containercraft/settings/billing-usage
View in Browser (Ctrl+O): https://app.pulumi.com/containercraft/kargo/ci/updates/50
Type Name Status Info
+ pulumi:pulumi:Stack kargo-ci **creating failed (77s)** 1 error; 4 messages
+ ├─ pulumi:providers:kubernetes k8sProvider created (0.15s)
+ ├─ kubernetes:yaml:ConfigFile cdi-operator created (18s)
+ │ ├─ kubernetes:core/v1:Namespace cdi created (0.15s)
+ │ ├─ kubernetes:core/v1:ServiceAccount cdi/cdi-operator created (0.49s)
+ │ ├─ kubernetes:rbac.authorization.k8s.io/v1:ClusterRole cdi-operator-cluster created (0.14s)
+ │ ├─ kubernetes:rbac.authorization.k8s.io/v1:ClusterRoleBinding cdi-operator created (0.28s)
+ │ ├─ kubernetes:rbac.authorization.k8s.io/v1:Role cdi/cdi-operator created (0.74s)
+ │ ├─ kubernetes:apiextensions.k8s.io/v1:CustomResourceDefinition cdis.cdi.kubevirt.io created (1s)
+ │ ├─ kubernetes:apps/v1:Deployment cdi/cdi-operator created (7s)
+ │ └─ kubernetes:rbac.authorization.k8s.io/v1:RoleBinding cdi/cdi-operator created (0.61s)
+ ├─ kubernetes:core/v1:Namespace cert-manager created (0.26s)
+ │ └─ kubernetes:helm.sh/v3:Release cert-manager created (20s)
+ │ └─ kubernetes:cert-manager.io/v1:ClusterIssuer cluster-selfsigned-issuer-root created (0.26s)
+ │ └─ kubernetes:cert-manager.io/v1:Certificate cluster-selfsigned-issuer-ca created (0.50s)
+ │ └─ kubernetes:cert-manager.io/v1:ClusterIssuer cluster-selfsigned-issuer created (0.32s)
+ │ └─ kubernetes:core/v1:Secret cluster-selfsigned-issuer-ca-secret created (1s)
+ ├─ kubernetes:core/v1:Namespace kubevirt created (0.45s)
+ │ └─ kubernetes:yaml:ConfigFile kubevirt-operator created (23s)
+ │ ├─ kubernetes:core/v1:ServiceAccount kubevirt/kubevirt-operator created (0.31s)
+ │ ├─ kubernetes:scheduling.k8s.io/v1:PriorityClass kubevirt/kubevirt-cluster-critical created (0.69s)
+ │ ├─ kubernetes:rbac.authorization.k8s.io/v1:RoleBinding kubevirt/kubevirt-operator-rolebinding created (1s)
+ │ ├─ kubernetes:rbac.authorization.k8s.io/v1:ClusterRole kubevirt/kubevirt.io:operator created (0.89s)
+ │ ├─ kubernetes:kubevirt.io/v1:KubeVirt kubevirt created (3s)
+ │ ├─ kubernetes:apps/v1:Deployment kubevirt/virt-operator created (22s)
+ │ ├─ kubernetes:rbac.authorization.k8s.io/v1:ClusterRole kubevirt/kubevirt-operator created (1s)
+ │ ├─ kubernetes:rbac.authorization.k8s.io/v1:Role kubevirt/kubevirt-operator created (1s)
+ │ ├─ kubernetes:rbac.authorization.k8s.io/v1:ClusterRoleBinding kubevirt/kubevirt-operator created (1s)
+ │ └─ kubernetes:apiextensions.k8s.io/v1:CustomResourceDefinition kubevirt/kubevirts.kubevirt.io created (2s)
+ ├─ kubernetes:core/v1:Namespace hostpath-provisioner created (0.34s)
+ │ └─ kubernetes:yaml:ConfigFile hostpath-provisioner-webhook created (12s)
+ │ ├─ kubernetes:cert-manager.io/v1:Issuer hostpath-provisioner/selfsigned-issuer created (0.43s)
+ │ ├─ kubernetes:cert-manager.io/v1:Certificate hostpath-provisioner/hostpath-provisioner-operator-webhook-service-cert created (0.78s)
+ │ ├─ kubernetes:core/v1:Service hostpath-provisioner/hostpath-provisioner-operator-webhook-service created (11s)
+ │ ├─ kubernetes:admissionregistration.k8s.io/v1:ValidatingWebhookConfiguration hostpathprovisioner.kubevirt.io created (1s)
+ │ └─ kubernetes:yaml:ConfigFile hostpath-provisioner-operator created (28s)
+ │ ├─ kubernetes:core/v1:ServiceAccount hostpath-provisioner-operator created (0.87s)
+ │ ├─ kubernetes:rbac.authorization.k8s.io/v1:Role hostpath-provisioner-operator created (2s)
+ │ ├─ kubernetes:rbac.authorization.k8s.io/v1:RoleBinding hostpath-provisioner-operator created (1s)
+ │ ├─ kubernetes:rbac.authorization.k8s.io/v1:ClusterRole hostpath-provisioner-operator created (2s)
+ │ ├─ kubernetes:rbac.authorization.k8s.io/v1:ClusterRoleBinding hostpath-provisioner-operator created (1s)
+ │ ├─ kubernetes:apiextensions.k8s.io/v1:CustomResourceDefinition hostpathprovisioners.hostpathprovisioner.kubevirt.io created (2s)
+ │ └─ kubernetes:apps/v1:Deployment hostpath-provisioner-operator created (24s)
└─ kubernetes:apiextensions.k8s.io/v1:CustomResourceDefinition hostpathprovisioners **failed** 1 error
Diagnostics:
pulumi:pulumi:Stack (kargo-ci):
Using helm release version: cert-manager/1.15.1
Setting version to latest stable: kubevirt/1.3.0
Using helm release version: hostpath-provisioner/0.19.0
Using helm release version: cdi/1.59.0
error: update failed
kubernetes:apiextensions.k8s.io/v1:CustomResourceDefinition (hostpathprovisioners):
error: resource 'hostpathprovisioners.hostpathprovisioner.kubevirt.io' does not exist
Outputs:
cert_manager_selfsigned_cert: [secret]
versions : {
cdi : {
enabled: true
version: "1.59.0"
}
cert_manager : {
enabled: true
version: "1.15.1"
}
cilium : {
enabled: false
version: <null>
}
hostpath_provisioner: {
enabled: true
version: "0.19.0"
}
kubevirt : {
enabled: true
version: "1.3.0"
}
}
Resources:
+ 43 created
Duration: 1m23s
Updating (containercraft/ci)
warning: Your Pulumi organization is on an expired trial or a canceled subscription. Update your subscription to maintain organization access: https://app.pulumi.com/containercraft/settings/billing-usage
View in Browser (Ctrl+O): https://app.pulumi.com/containercraft/kargo/ci/updates/51
Type Name Status Info
pulumi:pulumi:Stack kargo-ci 4 messages
├─ kubernetes:yaml:ConfigFile cdi-operator
│ ├─ kubernetes:rbac.authorization.k8s.io/v1:ClusterRoleBinding cdi-operator
│ ├─ kubernetes:core/v1:Namespace cdi
│ ├─ kubernetes:rbac.authorization.k8s.io/v1:ClusterRole cdi-operator-cluster
│ ├─ kubernetes:core/v1:ServiceAccount cdi/cdi-operator
│ ├─ kubernetes:rbac.authorization.k8s.io/v1:Role cdi/cdi-operator
│ ├─ kubernetes:rbac.authorization.k8s.io/v1:RoleBinding cdi/cdi-operator
│ ├─ kubernetes:apps/v1:Deployment cdi/cdi-operator
│ ├─ kubernetes:apiextensions.k8s.io/v1:CustomResourceDefinition cdis.cdi.kubevirt.io
+ │ └─ kubernetes:cdi.kubevirt.io/v1beta1:CDI cdi created (0.65s)
├─ kubernetes:core/v1:Namespace hostpath-provisioner
│ └─ kubernetes:yaml:ConfigFile hostpath-provisioner-webhook
│ ├─ kubernetes:yaml:ConfigFile hostpath-provisioner-operator
│ │ ├─ kubernetes:core/v1:ServiceAccount hostpath-provisioner-operator
│ │ ├─ kubernetes:rbac.authorization.k8s.io/v1:ClusterRoleBinding hostpath-provisioner-operator
│ │ ├─ kubernetes:rbac.authorization.k8s.io/v1:RoleBinding hostpath-provisioner-operator
│ │ ├─ kubernetes:rbac.authorization.k8s.io/v1:ClusterRole hostpath-provisioner-operator
│ │ ├─ kubernetes:rbac.authorization.k8s.io/v1:Role hostpath-provisioner-operator
│ │ ├─ kubernetes:apps/v1:Deployment hostpath-provisioner-operator
│ │ ├─ kubernetes:apiextensions.k8s.io/v1:CustomResourceDefinition hostpathprovisioners.hostpathprovisioner.kubevirt.io
+ │ │ └─ kubernetes:hostpathprovisioner.kubevirt.io/v1beta1:HostPathProvisioner hostpath-provisioner-hpp created (1s)
+ │ │ └─ kubernetes:storage.k8s.io/v1:StorageClass hostpath-storage-class-ssd created (0.29s)
│ ├─ kubernetes:cert-manager.io/v1:Certificate hostpath-provisioner/hostpath-provisioner-operator-webhook-service-cert
│ ├─ kubernetes:cert-manager.io/v1:Issuer hostpath-provisioner/selfsigned-issuer
│ ├─ kubernetes:admissionregistration.k8s.io/v1:ValidatingWebhookConfiguration hostpathprovisioner.kubevirt.io
│ └─ kubernetes:core/v1:Service hostpath-provisioner/hostpath-provisioner-operator-webhook-service
├─ kubernetes:core/v1:Namespace kubevirt
│ └─ kubernetes:yaml:ConfigFile kubevirt-operator
│ ├─ kubernetes:rbac.authorization.k8s.io/v1:ClusterRoleBinding kubevirt/kubevirt-operator
│ ├─ kubernetes:rbac.authorization.k8s.io/v1:Role kubevirt/kubevirt-operator
│ ├─ kubernetes:core/v1:ServiceAccount kubevirt/kubevirt-operator
│ ├─ kubernetes:apps/v1:Deployment kubevirt/virt-operator
~ │ ├─ kubernetes:kubevirt.io/v1:KubeVirt kubevirt updated (0.40s) [diff: ~spec]
│ ├─ kubernetes:rbac.authorization.k8s.io/v1:RoleBinding kubevirt/kubevirt-operator-rolebinding
│ ├─ kubernetes:rbac.authorization.k8s.io/v1:ClusterRole kubevirt/kubevirt.io:operator
│ ├─ kubernetes:scheduling.k8s.io/v1:PriorityClass kubevirt/kubevirt-cluster-critical
│ ├─ kubernetes:rbac.authorization.k8s.io/v1:ClusterRole kubevirt/kubevirt-operator
│ └─ kubernetes:apiextensions.k8s.io/v1:CustomResourceDefinition kubevirt/kubevirts.kubevirt.io
├─ pulumi:providers:kubernetes k8sProvider
└─ kubernetes:core/v1:Namespace cert-manager
└─ kubernetes:helm.sh/v3:Release cert-manager
└─ kubernetes:cert-manager.io/v1:ClusterIssuer cluster-selfsigned-issuer-root
└─ kubernetes:cert-manager.io/v1:Certificate cluster-selfsigned-issuer-ca
└─ kubernetes:cert-manager.io/v1:ClusterIssuer cluster-selfsigned-issuer
└─ kubernetes:core/v1:Secret cluster-selfsigned-issuer-ca-secret
Diagnostics:
pulumi:pulumi:Stack (kargo-ci):
Using helm release version: cert-manager/1.15.1
Setting version to latest stable: kubevirt/1.3.0
Using helm release version: hostpath-provisioner/0.19.0
Using helm release version: cdi/1.59.0
Outputs:
cert_manager_selfsigned_cert: [secret]
versions : {
cdi : {
enabled: true
version: "1.59.0"
}
cert_manager : {
enabled: true
version: "1.15.1"
}
cilium : {
enabled: false
version: <null>
}
hostpath_provisioner: {
enabled: true
version: "0.19.0"
}
kubevirt : {
enabled: true
version: "1.3.0"
}
}
Resources:
+ 3 created
~ 1 updated
4 changes. 42 unchanged
Duration: 23s
task: [all-pods-ready] bash -c 'until [ "$(kubectl get pods --all-namespaces --no-headers | grep -v "Running\\|Completed\\|Succeeded" | wc -l)" -eq 0 ]; do echo "Waiting for pods..."; sleep 5; done'
Waiting for pods...
Waiting for pods...
Waiting for pods...
task: [all-pods-ready] kubectl get pods --all-namespaces --show-labels --kubeconfig /workspaces/Kargo/.kube/config
NAMESPACE NAME READY STATUS RESTARTS AGE LABELS
cdi cdi-apiserver-5d565ddb6-x7nhk 1/1 Running 0 16s app.kubernetes.io/component=storage,app.kubernetes.io/managed-by=cdi-operator,app=containerized-data-importer,cdi.kubevirt.io=cdi-apiserver,operator.cdi.kubevirt.io/createVersion=v1.59.0,pod-template-hash=5d565ddb6
cdi cdi-deployment-fb59bcc87-g89kc 1/1 Running 0 16s app.kubernetes.io/component=storage,app.kubernetes.io/managed-by=cdi-operator,app=containerized-data-importer,cdi.kubevirt.io=cdi-deployment,operator.cdi.kubevirt.io/createVersion=v1.59.0,pod-template-hash=fb59bcc87,prometheus.cdi.kubevirt.io=true
cdi cdi-operator-595bfb44cd-pdck9 1/1 Running 0 107s cdi.kubevirt.io=cdi-operator,name=cdi-operator,operator.cdi.kubevirt.io=,pod-template-hash=595bfb44cd,prometheus.cdi.kubevirt.io=true
cdi cdi-uploadproxy-7657d8d89d-rmhwc 1/1 Running 0 16s app.kubernetes.io/component=storage,app.kubernetes.io/managed-by=cdi-operator,app=containerized-data-importer,cdi.kubevirt.io=cdi-uploadproxy,operator.cdi.kubevirt.io/createVersion=v1.59.0,pod-template-hash=7657d8d89d
cert-manager cert-manager-38614ca1-58d4cd5875-h2sxl 1/1 Running 0 115s app.kubernetes.io/component=controller,app.kubernetes.io/instance=cert-manager-38614ca1,app.kubernetes.io/managed-by=Helm,app.kubernetes.io/name=cert-manager,app.kubernetes.io/version=v1.15.1,app=cert-manager,helm.sh/chart=cert-manager-v1.15.1,pod-template-hash=58d4cd5875
cert-manager cert-manager-38614ca1-cainjector-85ff6484cc-sshh4 1/1 Running 0 115s app.kubernetes.io/component=cainjector,app.kubernetes.io/instance=cert-manager-38614ca1,app.kubernetes.io/managed-by=Helm,app.kubernetes.io/name=cainjector,app.kubernetes.io/version=v1.15.1,app=cainjector,helm.sh/chart=cert-manager-v1.15.1,pod-template-hash=85ff6484cc
cert-manager cert-manager-38614ca1-webhook-6bbfb456d7-jjpfh 1/1 Running 0 115s app.kubernetes.io/component=webhook,app.kubernetes.io/instance=cert-manager-38614ca1,app.kubernetes.io/managed-by=Helm,app.kubernetes.io/name=webhook,app.kubernetes.io/version=v1.15.1,app=webhook,helm.sh/chart=cert-manager-v1.15.1,pod-template-hash=6bbfb456d7
hostpath-provisioner hostpath-provisioner-operator-84d8988476-qg8ng 1/1 Running 0 63s name=hostpath-provisioner-operator,pod-template-hash=84d8988476,prometheus.hostpathprovisioner.kubevirt.io=true
kube-system coredns-64b67fc8fd-bnz6p 1/1 Running 0 3m51s k8s-app=kube-dns,pod-template-hash=64b67fc8fd
kube-system coredns-64b67fc8fd-f2bjd 1/1 Running 0 3m51s k8s-app=kube-dns,pod-template-hash=64b67fc8fd
kube-system kube-apiserver-talos-kargo-docker-controlplane-1 1/1 Running 0 3m21s k8s-app=kube-apiserver,tier=control-plane
kube-system kube-controller-manager-talos-kargo-docker-controlplane-1 1/1 Running 2 (4m28s ago) 3m k8s-app=kube-controller-manager,tier=control-plane
kube-system kube-flannel-qpprj 1/1 Running 0 3m47s controller-revision-hash=85686fb59d,k8s-app=flannel,pod-template-generation=1,tier=node
kube-system kube-proxy-txhhj 1/1 Running 0 3m47s controller-revision-hash=6948f8df77,k8s-app=kube-proxy,pod-template-generation=1,tier=node
kube-system kube-scheduler-talos-kargo-docker-controlplane-1 1/1 Running 2 (4m28s ago) 2m33s k8s-app=kube-scheduler,tier=control-plane
kubevirt virt-api-7976d99767-298wb 0/1 Running 1 (11s ago) 60s app.kubernetes.io/component=kubevirt,app.kubernetes.io/managed-by=virt-operator,app.kubernetes.io/version=v1.3.0,kubevirt.io=virt-api,pod-template-hash=7976d99767,prometheus.kubevirt.io=true
kubevirt virt-controller-bd5f9c85b-dtvpq 0/1 Running 1 (17s ago) 35s app.kubernetes.io/component=kubevirt,app.kubernetes.io/managed-by=virt-operator,app.kubernetes.io/version=v1.3.0,kubevirt.io=virt-controller,pod-template-hash=bd5f9c85b,prometheus.kubevirt.io=true
kubevirt virt-controller-bd5f9c85b-zdlgv 0/1 Running 1 (17s ago) 35s app.kubernetes.io/component=kubevirt,app.kubernetes.io/managed-by=virt-operator,app.kubernetes.io/version=v1.3.0,kubevirt.io=virt-controller,pod-template-hash=bd5f9c85b,prometheus.kubevirt.io=true
kubevirt virt-handler-tldnt 0/1 Running 0 35s app.kubernetes.io/component=kubevirt,app.kubernetes.io/managed-by=virt-operator,app.kubernetes.io/version=v1.3.0,controller-revision-hash=7d64dbc5bc,kubevirt.io=virt-handler,pod-template-generation=1,prometheus.kubevirt.io=true
kubevirt virt-operator-7c7b6c4d87-hfgw6 1/1 Running 0 92s kubevirt.io=virt-operator,name=virt-operator,pod-template-hash=7c7b6c4d87,prometheus.kubevirt.io=true
kubevirt virt-operator-7c7b6c4d87-qcwrx 1/1 Running 0 92s kubevirt.io=virt-operator,name=virt-operator,pod-template-hash=7c7b6c4d87,prometheus.kubevirt.io=true
direnv: loading /workspaces/Kargo/.envrc
direnv: export +ARCH +KUBECONFIG +OMNICONFIG +PULUMI_AUTOMATION_API_SKIP_VERSION_CHECK +PULUMI_HOME +PULUMI_K8S_DELETE_UNREACHABLE +PULUMI_SKIP_CONFIRMATIONS +PULUMI_SKIP_UPDATE_CHECK +TALOSCONFIG ~BROWSER ~PATH