Mikrotik hex router, dual WAN (AAISP on ether1, Virgin on ether2, LAN on ether5).
Configuration from scratch:
- Unplug.
- Plug in and quickly hold down reset switch until light starts flashing.
from pyspark.sql import SparkSession, Row, DataFrame | |
from pyspark.context import SparkContext | |
from pyspark import SparkConf | |
import time | |
import os | |
import python_pachyderm | |
conf = SparkConf() | |
minio = False | |
if minio: |
22/08/12 16:24:39 WARN Utils: Your hostname, mind resolves to a loopback address: 127.0.1.1; using 10.1.255.235 instead (on interface enp6s0f0) | |
22/08/12 16:24:39 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to another address | |
22/08/12 16:24:40 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable | |
22/08/12 16:24:40 INFO SparkContext: Running Spark version 3.3.0 | |
22/08/12 16:24:40 INFO ResourceUtils: ============================================================== | |
22/08/12 16:24:40 INFO ResourceUtils: No custom resources configured for spark.driver. | |
22/08/12 16:24:40 INFO ResourceUtils: ============================================================== | |
22/08/12 16:24:40 INFO SparkContext: Submitted application: spark.py | |
22/08/12 16:24:40 INFO ResourceProfile: Default ResourceProfile created, executor resources: Map(cores -> name: cores, amount: 1, script: , vendor: , memory -> name: memory, amount: 1024, script: , vendor: , offHeap -> name: o |
2022-08-12T15:24:47Z INFO {"source":"s3gateway"} http request: HEAD /master.rando2/nonemptyprefix8 | |
2022-08-12T15:24:47Z INFO pfs.API.InspectBranch {"request":{"branch":{"repo":{"name":"rando2","type":"user"},"name":"master"}}} | |
2022-08-12T15:24:47Z INFO pfs.API.InspectBranch {"duration":0.001245433,"request":{"branch":{"repo":{"name":"rando2","type":"user"},"name":"master"}},"response":{"branch":{"repo":{"name":"rando2","type":"user"},"name":"master"},"head":{"branch":{"repo":{"name":"rando2","type":"user"},"name":"master"},"id":"840126a60c0d4743baa399deedb9d54a"}}} | |
2022-08-12T15:24:47Z INFO pfs.API.InspectFile {"request":{"file":{"commit":{"branch":{"repo":{"name":"rando2","type":"user"},"name":"master"}},"path":"nonemptyprefix8"}}} | |
2022-08-12T15:24:47Z ERROR pfs.API.InspectFile {"duration":0.011114386,"error":"file nonemptyprefix8 not found in repo rando2 at commit 840126a60c0d4743baa399deedb9d54a","request":{"file":{"commit":{"branch":{"repo":{"name":"rando2","type":"user"},"name":"master"},"id":"840126a |
22/08/12 15:51:56 WARN Utils: Your hostname, mind resolves to a loopback address: 127.0.1.1; using 10.1.255.235 instead (on interface enp6s0f0) | |
22/08/12 15:51:56 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to another address | |
22/08/12 15:51:56 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable | |
22/08/12 15:51:57 INFO SparkContext: Running Spark version 3.3.0 | |
22/08/12 15:51:57 INFO ResourceUtils: ============================================================== | |
22/08/12 15:51:57 INFO ResourceUtils: No custom resources configured for spark.driver. | |
22/08/12 15:51:57 INFO ResourceUtils: ============================================================== | |
22/08/12 15:51:57 INFO SparkContext: Submitted application: spark.py | |
22/08/12 15:51:57 INFO ResourceProfile: Default ResourceProfile created, executor resources: Map(cores -> name: cores, amount: 1, script: , vendor: , memory -> name: memory, amount: 1024, script: , vendor: , offHeap -> name: o |
22/08/12 15:38:16 WARN Utils: Your hostname, mind resolves to a loopback address: 127.0.1.1; using 10.1.255.235 instead (on interface enp6s0f0) | |
22/08/12 15:38:16 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to another address | |
22/08/12 15:38:16 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable | |
22/08/12 15:38:17 INFO SparkContext: Running Spark version 3.3.0 | |
22/08/12 15:38:17 INFO ResourceUtils: ============================================================== | |
22/08/12 15:38:17 INFO ResourceUtils: No custom resources configured for spark.driver. | |
22/08/12 15:38:17 INFO ResourceUtils: ============================================================== | |
22/08/12 15:38:17 INFO SparkContext: Submitted application: spark.py | |
22/08/12 15:38:17 INFO ResourceProfile: Default ResourceProfile created, executor resources: Map(cores -> name: cores, amount: 1, script: , vendor: , memory -> name: memory, amount: 1024, script: , vendor: , offHeap -> name: o |
22/08/12 14:59:23 WARN Utils: Your hostname, mind resolves to a loopback address: 127.0.1.1; using 10.1.255.235 instead (on interface enp6s0f0) | |
22/08/12 14:59:23 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to another address | |
22/08/12 14:59:23 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable | |
22/08/12 14:59:24 INFO SparkContext: Running Spark version 3.3.0 | |
22/08/12 14:59:24 INFO ResourceUtils: ============================================================== | |
22/08/12 14:59:24 INFO ResourceUtils: No custom resources configured for spark.driver. | |
22/08/12 14:59:24 INFO ResourceUtils: ============================================================== | |
22/08/12 14:59:24 INFO SparkContext: Submitted application: spark.py | |
22/08/12 14:59:24 INFO ResourceProfile: Default ResourceProfile created, executor resources: Map(cores -> name: cores, amount: 1, script: , vendor: , memory -> name: memory, amount: 1024, script: , vendor: , offHeap -> name: o |
package main | |
// run in the vm | |
// * continuously set imagePullPolicy -> IfNotPresent because 'Always' makes docker hub | |
// hate us | |
// * continuously distribute regcreds secret to new serviceaccounts | |
// TODO: rewrite as k8s operator & admission controller | |
import ( |
controller-0: 14:18:45 INFO juju.cmd running jujud [2.9-rc7 0 d2ad8833d051366ca4d392fa42da40fba506311e gc go1.14.15] | |
controller-0: 14:18:45 INFO juju.worker.upgradesteps upgrade steps for 2.9-rc7 have already been run. | |
controller-0: 14:18:45 INFO juju.state using client-side transactions | |
controller-0: 14:18:45 INFO juju.state starting standard state workers | |
controller-0: 14:18:45 INFO juju.state creating cloud image metadata storage | |
controller-0: 14:18:45 INFO juju.state started state for model-9a364618-ad44-4329-8238-9d9b7644ba21 successfully | |
controller-0: 14:18:45 INFO juju.cmd.jujud juju database opened | |
controller-0: 14:18:45 INFO juju.state using client-side transactions | |
controller-0: 14:18:45 INFO juju.state.allwatcher allwatcher loaded for model "9a364618-ad44-4329-8238-9d9b7644ba21" in 57.421562ms | |
controller-0: 14:18:45 INFO juju.state starting standard state workers |
/interface bridge port | |
add bridge=bridge comment=defconf interface=ether3 | |
add bridge=bridge comment=defconf interface=ether4 | |
add bridge=bridge comment=defconf interface=ether5 | |
/ip address | |
# LAN | |
add address=192.168.88.1/24 comment=defconf interface=ether5 network=\ | |
192.168.88.0 | |
# default route for AAISP - XXX should ether1 be pppoe-out1? |