Skip to content

Instantly share code, notes, and snippets.

@tmbdev
Created May 1, 2019 18:10
Show Gist options
  • Select an option

  • Save tmbdev/6a6b5d1f4a9b84d76ad3426be54ed160 to your computer and use it in GitHub Desktop.

Select an option

Save tmbdev/6a6b5d1f4a9b84d76ad3426be54ed160 to your computer and use it in GitHub Desktop.
Display the source blob
Display the rendered blob
Raw
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {
"kernel": "Bash"
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"snap \"microk8s\" is already installed, see 'snap help refresh'\n"
]
}
],
"source": [
"%use bash\n",
"sudo snap install microk8s --classic"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {
"kernel": "Bash"
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Disabling pod scheduling\n",
"node/sedna cordoned\n",
"error: unable to drain node \"sedna\", aborting command...\n",
"\n",
"There are pending nodes to be drained:\n",
" sedna\n",
"error: cannot delete DaemonSet-managed Pods (use --ignore-daemonsets to ignore): default/nginx-ingress-microk8s-controller-mtnd8, kube-system/nvidia-device-plugin-daemonset-4rcjj\n",
"\u001b[0m\u001b[?25h\u001b[KStopped.mon-apiserver microk8s.daemon-apiserver-kicker microk8s.da…\n"
]
}
],
"source": [
"sudo microk8s.stop\n",
"sleep 10"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {
"kernel": "Bash"
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"\u001b[0m\u001b[?25h\u001b[KStarted.emon-apiserver microk8s.daemon-apiserver-kicker microk8s.d…\n",
"Enabling pod scheduling\n",
"node/sedna uncordoned\n"
]
}
],
"source": [
"sudo microk8s.start"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {
"kernel": "Bash"
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Enabling dashboard\n",
"secret/kubernetes-dashboard-certs unchanged\n",
"serviceaccount/kubernetes-dashboard unchanged\n",
"deployment.apps/kubernetes-dashboard unchanged\n",
"service/kubernetes-dashboard unchanged\n",
"service/monitoring-grafana unchanged\n",
"service/monitoring-influxdb unchanged\n",
"service/heapster unchanged\n",
"deployment.extensions/monitoring-influxdb-grafana-v4 unchanged\n",
"serviceaccount/heapster unchanged\n",
"configmap/heapster-config unchanged\n",
"configmap/eventer-config unchanged\n",
"deployment.extensions/heapster-v1.5.2 unchanged\n",
"dashboard enabled\n",
"Enabling the private registry\n",
"Enabling default storage class\n",
"deployment.extensions/hostpath-provisioner unchanged\n",
"storageclass.storage.k8s.io/microk8s-hostpath unchanged\n",
"Storage will be available soon\n",
"Applying registry manifest\n",
"namespace/container-registry unchanged\n",
"persistentvolumeclaim/registry-claim unchanged\n",
"deployment.extensions/registry unchanged\n",
"service/registry unchanged\n",
"The registry is enabled\n",
"Enabling NVIDIA GPU\n",
"NVIDIA kernel module detected\n",
"Enabling DNS\n",
"Applying manifest\n",
"service/kube-dns unchanged\n",
"serviceaccount/kube-dns unchanged\n",
"configmap/kube-dns unchanged\n",
"deployment.extensions/kube-dns configured\n",
"Restarting kubelet\n",
"DNS is enabled\n",
"Applying manifest\n",
"daemonset.extensions/nvidia-device-plugin-daemonset unchanged\n",
"NVIDIA is enabled\n",
"Enabling default storage class\n",
"deployment.extensions/hostpath-provisioner unchanged\n",
"storageclass.storage.k8s.io/microk8s-hostpath unchanged\n",
"Storage will be available soon\n",
"Enabling DNS\n",
"Applying manifest\n",
"service/kube-dns unchanged\n",
"serviceaccount/kube-dns unchanged\n",
"configmap/kube-dns unchanged\n",
"deployment.extensions/kube-dns configured\n",
"Restarting kubelet\n",
"DNS is enabled\n"
]
}
],
"source": [
"microk8s.enable dashboard registry gpu storage dns"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {
"kernel": "Bash"
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"microk8s is running\n",
"addons:\n",
"jaeger: disabled\n",
"fluentd: disabled\n",
"gpu: enabled\n",
"storage: enabled\n",
"registry: enabled\n",
"ingress: enabled\n",
"dns: enabled\n",
"metrics-server: disabled\n",
"prometheus: disabled\n",
"istio: disabled\n",
"dashboard: enabled\n"
]
}
],
"source": [
"sleep 20\n",
"microk8s.status"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {
"kernel": "Bash"
},
"outputs": [
{
"data": {
"text/html": [
"<div class=\"sos_hint\">Cell content saved to <a href=\"nvidia-pod-example.yaml\" target=\"_blank\">nvidia-pod-example.yaml</a>, use option -r to also execute the cell.</div>"
],
"text/plain": [
"Cell content saved to nvidia-pod-example.yaml, use option -r to also execute the cell.\n"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"%save -f nvidia-pod-example.yaml\n",
"apiVersion: v1\n",
"kind: Pod\n",
"metadata:\n",
" name: cuda-vector-add\n",
"spec:\n",
" restartPolicy: OnFailure\n",
" containers:\n",
" - name: cuda-vector-add\n",
" # https://github.com/kubernetes/kubernetes/blob/v1.7.11/test/images/nvidia-cuda/Dockerfile\n",
" image: \"k8s.gcr.io/cuda-vector-add:v0.1\"\n",
" resources:\n",
" limits:\n",
" nvidia.com/gpu: 1 # requesting 1 GPU"
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {
"kernel": "Bash"
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"pod \"cuda-vector-add\" deleted\n"
]
}
],
"source": [
"microk8s.kubectl delete pod cuda-vector-add || true"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {
"kernel": "Bash"
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"NAME READY STATUS RESTARTS AGE\n",
"default-http-backend-855bc7bc45-kdw9j 1/1 Running 19 48d\n",
"nginx-ingress-microk8s-controller-mtnd8 1/1 Running 19 48d\n"
]
}
],
"source": [
"microk8s.kubectl get pods"
]
},
{
"cell_type": "code",
"execution_count": 10,
"metadata": {
"kernel": "Bash"
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"pod/cuda-vector-add created\n"
]
}
],
"source": [
"microk8s.kubectl apply -f nvidia-pod-example.yaml"
]
},
{
"cell_type": "code",
"execution_count": 11,
"metadata": {
"kernel": "Bash"
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"NAME READY STATUS RESTARTS AGE\n",
"cuda-vector-add 0/1 Pending 0 1s\n",
"default-http-backend-855bc7bc45-kdw9j 1/1 Running 19 48d\n",
"nginx-ingress-microk8s-controller-mtnd8 1/1 Running 19 48d\n"
]
}
],
"source": [
"microk8s.kubectl get pods"
]
},
{
"cell_type": "code",
"execution_count": 12,
"metadata": {
"kernel": "Bash"
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"NAME READY STATUS RESTARTS AGE\n",
"cuda-vector-add 0/1 Pending 0 9s\n",
"default-http-backend-855bc7bc45-kdw9j 1/1 Running 19 48d\n",
"nginx-ingress-microk8s-controller-mtnd8 1/1 Running 19 48d\n"
]
}
],
"source": [
"sleep 5\n",
"microk8s.kubectl get pods"
]
},
{
"cell_type": "code",
"execution_count": 13,
"metadata": {
"kernel": "Bash"
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Name: cuda-vector-add\n",
"Namespace: default\n",
"Priority: 0\n",
"PriorityClassName: <none>\n",
"Node: <none>\n",
"Labels: <none>\n",
"Annotations: kubectl.kubernetes.io/last-applied-configuration:\n",
" {\"apiVersion\":\"v1\",\"kind\":\"Pod\",\"metadata\":{\"annotations\":{},\"name\":\"cuda-vector-add\",\"namespace\":\"default\"},\"spec\":{\"containers\":[{\"image...\n",
"Status: Pending\n",
"IP: \n",
"Containers:\n",
" cuda-vector-add:\n",
" Image: k8s.gcr.io/cuda-vector-add:v0.1\n",
" Port: <none>\n",
" Host Port: <none>\n",
" Limits:\n",
" nvidia.com/gpu: 1\n",
" Requests:\n",
" nvidia.com/gpu: 1\n",
" Environment: <none>\n",
" Mounts:\n",
" /var/run/secrets/kubernetes.io/serviceaccount from default-token-g5p4m (ro)\n",
"Conditions:\n",
" Type Status\n",
" PodScheduled False \n",
"Volumes:\n",
" default-token-g5p4m:\n",
" Type: Secret (a volume populated by a Secret)\n",
" SecretName: default-token-g5p4m\n",
" Optional: false\n",
"QoS Class: BestEffort\n",
"Node-Selectors: <none>\n",
"Tolerations: node.kubernetes.io/not-ready:NoExecute for 300s\n",
" node.kubernetes.io/unreachable:NoExecute for 300s\n",
"Events:\n",
" Type Reason Age From Message\n",
" ---- ------ ---- ---- -------\n",
" Warning FailedScheduling 13s (x2 over 13s) default-scheduler 0/1 nodes are available: 1 Insufficient nvidia.com/gpu.\n"
]
}
],
"source": [
"microk8s.kubectl describe pod cuda-vector-add"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"kernel": "Bash"
},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "SoS",
"language": "sos",
"name": "sos"
},
"language_info": {
"codemirror_mode": "sos",
"file_extension": ".sos",
"mimetype": "text/x-sos",
"name": "sos",
"nbconvert_exporter": "sos_notebook.converter.SoS_Exporter",
"pygments_lexer": "sos"
},
"sos": {
"kernels": [
[
"Bash",
"bash",
"Bash",
"#E6EEFF",
"shell"
]
],
"panel": {
"displayed": true,
"height": 0
},
"version": "0.19.6"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment