Skip to content

Instantly share code, notes, and snippets.

#! /bin/bash
while true
do
run_interval="5" # how many seconds between checks
active_vpn=$(nmcli con show | grep 4e99cc84-a234-4bc5-bc69-44df89b33fff | grep ens160)
if ! [ "${active_vpn}" ];
then
mysql:
image: mysql:latest
environment:
- MYSQL_ROOT_PASSWORD=123
- MYSQL_DATABASE=test_db
- MYSQL_USER=myuser
- MYSQL_PASSWORD=123
volumes:
- ./data:/docker-entrypoint-initdb.d
- ./MySQL/database:/var/lib/mysql:rw
private static SessionFactory factory;
private static ServiceRegistry serviceRegistry;
public static SessionFactory createSessionFactory() {
Configuration configuration =
new Configuration()
.configure()
.addAnnotatedClass(Employee.class);
serviceRegistry = new ServiceRegistryBuilder().applySettings(
@khajavi
khajavi / TestJalali.java
Created September 1, 2016 18:29
Jalali Calendar in Java
import com.ibm.icu.text.DateFormat;
import com.ibm.icu.util.Calendar;
public class TestJalali {
public static void main(String[] args) {
org.joda.time.base.AbstractInstant time = new org.joda.time.DateTime(1471590261000L);
System.out.println(time);
com.ibm.icu.util.ULocale locale = new com.ibm.icu.util.ULocale("fa_IR@calendar=persian");
com.ibm.icu.util.Calendar c = Calendar.getInstance();
@khajavi
khajavi / TimeStampExample.scala
Last active July 27, 2016 11:49
Example of using timestamp data type in Apache Saprk Dataset
import java.sql.Timestamp
import java.text.SimpleDateFormat
import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkConf, SparkContext}
case class Record(time: java.sql.Timestamp)
object TimeStampExample extends App {
val sc = new SparkContext(new SparkConf().setAppName("app").setMaster("local"))
object monadLaws extends App {
// To qualify as a monald, a type has to satisfy three laws:
// Associativity
// (m flatMap f) flatMap g = m flatMap (x => f(x) flatMap g)
val f = (a: Int) => List(a)
val g = (a: Int) => List(a - 1, a + 1)
val m1 = 1 to 2
val r1 = m1 flatMap f flatMap g
object forTranslationRules extends App {
val l1 = 1 to 4
val l2 = 3 to 7
val l3 = 2 to 8
// Simple generator
val a1 = for (a <- l1) yield a * 2
val a2 = l1 map (_ * 2)
assert(a1 == a2)
object convertMapToFlatMap extends App {
val list = 1 to 10
var res1 = list map (_ * 2)
val f = (a: Int) => a * 2
val unit = (a: Int) => List(a)
val res2 = list flatMap (f andThen unit)
@khajavi
khajavi / basetrait.scala
Created June 18, 2016 09:52
Base Model Apache Spark
trait BaseTrait extends Product with Serializable
case class AT(name: String) extends BaseTrait
case class BT(age: Int) extends BaseTrait
val listA = AT("a1") :: AT("a2") :: Nil
val listB = BT(1) :: BT(2) :: Nil
val list = listA ++ listB
val res = sc.parallelize(list.toSeq).toDS
@khajavi
khajavi / kryo.scala
Last active June 18, 2016 10:06
Apache Spark Kryo Encoder
class Bar(i: Int) {
override def toString = s"bar $i"
def bar = i
}
object BarEncoders {
implicit def barEncoder: org.apache.spark.sql.Encoder[Bar] = org.apache.spark.sql.Encoders.kryo[Bar]
}
abstract class Foo