Gremin traversal examples taken from the excellent DS330: DataStax Enterprise Graph course.
Add a Vertex
Vertex u = graph.addVertex("user");
u.property("userId","u2016");
u.property("age",36);
import com.google.common.cache.CacheBuilder; | |
import org.slf4j.Logger; | |
import org.slf4j.LoggerFactory; | |
import org.springframework.cache.CacheManager; | |
import org.springframework.cache.annotation.CachingConfigurer; | |
import org.springframework.cache.annotation.EnableCaching; | |
import org.springframework.cache.guava.GuavaCache; | |
import org.springframework.cache.interceptor.CacheErrorHandler; | |
import org.springframework.cache.interceptor.CacheResolver; | |
import org.springframework.cache.interceptor.KeyGenerator; |
DatabaseMetaData databaseMetaData = connection.getMetaData(); | |
int majorVersion = databaseMetaData.getDatabaseMajorVersion(); | |
int minorVersion = databaseMetaData.getDatabaseMinorVersion(); | |
String productName = databaseMetaData.getDatabaseProductName(); | |
String productVersion = databaseMetaData.getDatabaseProductVersion(); | |
int driverMajorVersion = databaseMetaData.getDriverMajorVersion(); | |
int driverMinorVersion = databaseMetaData.getDriverMinorVersion(); |
package utils | |
import org.apache.spark.rdd.RDD | |
import org.apache.spark.sql.types.StructType | |
import org.apache.spark.sql.{DataFrame, Row} | |
import org.specs2.matcher.{Expectable, Matcher} | |
import org.specs2.mutable.Specification | |
/** | |
* Utility class to compare DataFrames and Rows inside unit tests |
path="s3://path-to-file/" | |
sc = spark.sparkContext | |
URI = sc._gateway.jvm.java.net.URI | |
Path = sc._gateway.jvm.org.apache.hadoop.fs.Path | |
FileSystem = sc._gateway.jvm.org.apache.hadoop.fs.FileSystem | |
Configuration = sc._gateway.jvm.org.apache.hadoop.conf.Configuration | |
fs = FileSystem.get(URI(path), Configuration()) | |
files = fs.listStatus(Path(path)) |
package com.ossAccounts.ac.server; | |
import java.io.FileInputStream; | |
import org.dbunit.DBTestCase; | |
import org.dbunit.IDatabaseTester; | |
import org.dbunit.dataset.IDataSet; | |
import org.dbunit.dataset.xml.FlatXmlDataSetBuilder; | |
import org.dbunit.ext.mssql.InsertIdentityOperation; | |
import org.dbunit.operation.DatabaseOperation; | |
import org.hibernate.Session; |
import org.apache.spark.sql.types._ | |
val subjectSchema = StructType(Array( | |
StructField("correspondence_id", StringType, false), | |
StructField("subject", StringType, false), | |
StructField("thread_subject", StringType, false) | |
)) | |
val internal_subjects = spark.read.schema(subjectSchema).csv("internal") | |
val external_subjects = spark.read.schema(subjectSchema).csv("external") |
import re | |
import sys | |
def getWords(subject): | |
return re.sub(r"\W", " ", subject).split() | |
def isDigit(ch): | |
return ch.isdigit() | |
def isIdChar(ch): |
public static ExecutorService createDefaultPool() { | |
int threads = Runtime.getRuntime().availableProcessors()*2; | |
int queueSize = threads * 25; | |
return new ThreadPoolExecutor(threads / 2, threads, 30L, TimeUnit.SECONDS, new ArrayBlockingQueue<>(queueSize), | |
new CallerBlocksPolicy()); | |
// new ThreadPoolExecutor.CallerRunsPolicy()); | |
} | |
static class CallerBlocksPolicy implements RejectedExecutionHandler { | |
@Override | |
public void rejectedExecution(Runnable r, ThreadPoolExecutor executor) { |
Gremin traversal examples taken from the excellent DS330: DataStax Enterprise Graph course.
Add a Vertex
Vertex u = graph.addVertex("user");
u.property("userId","u2016");
u.property("age",36);
original on :https://www.webslesson.info/2018/09/insert-tree-view-node-using-php-ajax.html | |
http://www.techsapphire.in/index/grandparent_parent_and_child_hierarchy_sql_server_or_bread_crumb/0-170 | |
https://www.youtube.com/watch?v=RswtHsz4v-0 | |
https://www.ruvictor.com/article/php-recursive-functions | |
<?php | |
//fetch.php | |
include('database_connection.php'); |