$ gcloud dataproc clusters create --master-machine-type "n5-standard-4" --worker-machine-type "n1-standard-4" --num-preemptible-workers 1 mycluster
$ gcloud dataproc jobs submit spark --properties spark.dynamicAllocation.enabled=false --cluster mycluster --class techconf.example.spark01.Main --jars Documents\NetBeansProjects\example-spark01\target\example-spark01-0.1.jar
$ gcloud compute ssh --zone=us-central1-f --ssh-flag="-D" --ssh-flag="1080" --ssh-flag="-N" mycluster-m
$ opne http://mycluster-m:8088/ # with proxy 1080
$ gcloud dataproc clusters delete mycluster