Gremin traversal examples taken from the excellent DS330: DataStax Enterprise Graph course.
Add a Vertex
Vertex u = graph.addVertex("user");
u.property("userId","u2016");
u.property("age",36);
/** | |
* Generate Case class from DataFrame.schema | |
* | |
* val df:DataFrame = ... | |
* | |
* val s2cc = new Schema2CaseClass | |
* import s2cc.implicit._ | |
* | |
* println(s2cc.schemaToCaseClass(df.schema, "MyClass")) | |
* |
import com.google.common.cache.CacheBuilder; | |
import org.slf4j.Logger; | |
import org.slf4j.LoggerFactory; | |
import org.springframework.cache.CacheManager; | |
import org.springframework.cache.annotation.CachingConfigurer; | |
import org.springframework.cache.annotation.EnableCaching; | |
import org.springframework.cache.guava.GuavaCache; | |
import org.springframework.cache.interceptor.CacheErrorHandler; | |
import org.springframework.cache.interceptor.CacheResolver; | |
import org.springframework.cache.interceptor.KeyGenerator; |
package utils | |
import org.apache.spark.rdd.RDD | |
import org.apache.spark.sql.types.StructType | |
import org.apache.spark.sql.{DataFrame, Row} | |
import org.specs2.matcher.{Expectable, Matcher} | |
import org.specs2.mutable.Specification | |
/** | |
* Utility class to compare DataFrames and Rows inside unit tests |
package com.ossAccounts.ac.server; | |
import java.io.FileInputStream; | |
import org.dbunit.DBTestCase; | |
import org.dbunit.IDatabaseTester; | |
import org.dbunit.dataset.IDataSet; | |
import org.dbunit.dataset.xml.FlatXmlDataSetBuilder; | |
import org.dbunit.ext.mssql.InsertIdentityOperation; | |
import org.dbunit.operation.DatabaseOperation; | |
import org.hibernate.Session; |
public static ExecutorService createDefaultPool() { | |
int threads = Runtime.getRuntime().availableProcessors()*2; | |
int queueSize = threads * 25; | |
return new ThreadPoolExecutor(threads / 2, threads, 30L, TimeUnit.SECONDS, new ArrayBlockingQueue<>(queueSize), | |
new CallerBlocksPolicy()); | |
// new ThreadPoolExecutor.CallerRunsPolicy()); | |
} | |
static class CallerBlocksPolicy implements RejectedExecutionHandler { | |
@Override | |
public void rejectedExecution(Runnable r, ThreadPoolExecutor executor) { |
Gremin traversal examples taken from the excellent DS330: DataStax Enterprise Graph course.
Add a Vertex
Vertex u = graph.addVertex("user");
u.property("userId","u2016");
u.property("age",36);
original on :https://www.webslesson.info/2018/09/insert-tree-view-node-using-php-ajax.html | |
http://www.techsapphire.in/index/grandparent_parent_and_child_hierarchy_sql_server_or_bread_crumb/0-170 | |
https://www.youtube.com/watch?v=RswtHsz4v-0 | |
https://www.ruvictor.com/article/php-recursive-functions | |
<?php | |
//fetch.php | |
include('database_connection.php'); |
The EMR File System (EMRFS) is an implementation of HDFS that all Amazon EMR clusters use for reading and writing regular files from Amazon EMR directly to Amazon S3.
Coming from HDFS it is very easy to implement EMRFS.
You just need to pass URI("s3://<bucket-name>")
object while getting filesystem object.
package com.joe;
// vim: syntax=shell | |
$JAR=/usr/lib/spark/lib/spark-examples.jar | |
$KEY=MoissinB | |
# Create cluster with 1st step | |
aws emr create-cluster --profile $KEY \ | |
--name "Moissinb Cluster" \ | |
--release-label emr-5.10.0 \ | |
--applications Name=Spark \ |