Install vhs.
$ conda create -n textual-demo python=3.10 -y
$ conda activate textual-demo
$ pip install textual
$ vhs < demo.tape| import os | |
| import subprocess | |
| import time | |
| import socket | |
| DB_IS_DRIVER = os.getenv('DB_IS_DRIVER') | |
| DB_DRIVER_IP = os.getenv('DB_DRIVER_IP') | |
| if DB_IS_DRIVER == "TRUE": | |
| print("This node is the Dask scheduler.") |
| import apache_beam as beam | |
| from apache_beam.options.pipeline_options import PipelineOptions | |
| from apache_beam.runners.dask.dask_runner import DaskRunner | |
| from dask.distributed import Client, performance_report | |
| class NoopDoFn(beam.DoFn): | |
| def process(self, item): | |
| import time | |
| time.sleep(0.1) |
| import warnings | |
| import time | |
| from contextlib import contextmanager | |
| import apache_beam as beam | |
| from apache_beam.options.pipeline_options import PipelineOptions | |
| from apache_beam.runners.dask.dask_runner import DaskRunner | |
| from dask.distributed import Client | |
| from distributed.versions import VersionMismatchWarning |
| apiVersion: v1 | |
| kind: ServiceAccount | |
| metadata: | |
| name: jovyan | |
| --- | |
| apiVersion: rbac.authorization.k8s.io/v1 | |
| kind: Role | |
| metadata: | |
| name: jovyan | |
| rules: |
| apiVersion: kubernetes.dask.org/v1 | |
| kind: DaskCluster | |
| metadata: | |
| name: demo | |
| spec: | |
| worker: | |
| replicas: 2 | |
| spec: | |
| containers: | |
| - name: worker |
Install vhs.
$ conda create -n textual-demo python=3.10 -y
$ conda activate textual-demo
$ pip install textual
$ vhs < demo.tape| # A script that gives a local Dask cluster something to do without stressing hardware. | |
| # Useful for testing the dashboard. | |
| import time | |
| from dask_ctl import get_cluster | |
| from dask.distributed import Client, wait | |
| from dask import delayed |