Skip to content

Instantly share code, notes, and snippets.

@agibsonccc
Created August 23, 2016 23:53
Show Gist options
  • Select an option

  • Save agibsonccc/075c7dbb8623a7d88516852d8575828d to your computer and use it in GitHub Desktop.

Select an option

Save agibsonccc/075c7dbb8623a7d88516852d8575828d to your computer and use it in GitHub Desktop.
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>io.skymind.examples</groupId>
<artifactId>Skymind_Spark_Aug2016_Examples</artifactId>
<version>1.0-SNAPSHOT</version>
<packaging>jar</packaging>
<name>Skymind_Spark_Aug2016_Examples</name>
<url>http://maven.apache.org</url>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<slf4j.version>1.7.5</slf4j.version>
<jackson.version>2.4.4</jackson.version>
<jcommander.version>1.27</jcommander.version>
<!-- HDP 2.4 Version modify for older/newer HDP releases -->
<hdp.version>2.4.0.0-169</hdp.version>
<hadoop.version>2.7.1</hadoop.version>
<spark.version>1.6.0</spark.version>
<spark.scala.version>2.10</spark.scala.version>
<!-- DL4J Versioning -->
<nd4j.version>0.5.0</nd4j.version>
<dl4j.version>0.5.0</dl4j.version>
<datavec.version>0.5.0</datavec.version>
<!-- cdh spark stuff -->
<scala.binary.version>2.10</scala.binary.version>
<scala.version>2.10.4</scala.version>
<!--
<spark.version>1.3.1</spark.version>
-->
<!-- cdh spark stuff -->
</properties>
<profiles>
<!-- Default profile - provided scope for spark: "-P sparksubmit" -->
<profile>
<id>sparksubmit</id>
<properties>
<spark.scope>provided</spark.scope>
</properties>
<!--<activation>-->
<!--<activeByDefault>true</activeByDefault>-->
<!--</activation>-->
</profile>
<!-- For building jar for running Spark local: "-P sparklocal"-->
<profile>
<id>sparklocal</id>
<properties>
<spark.scope>compile</spark.scope>
</properties>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
</profile>
</profiles>
<repositories>
<!--
<repository>
<id>org.apache.hadoop</id>
<url>https://repository.cloudera.com/artifactory/cloudera-repos/</url>
</repository>
-->
<repository>
<releases>
<enabled>true</enabled>
<updatePolicy>always</updatePolicy>
<checksumPolicy>warn</checksumPolicy>
</releases>
<snapshots>
<enabled>false</enabled>
<updatePolicy>never</updatePolicy>
<checksumPolicy>fail</checksumPolicy>
</snapshots>
<id>HDPReleases</id>
<name>HDP Releases</name>
<url>//repo.hortonworks.com/content/repositories/releases/</url>
<layout>default</layout>
</repository>
</repositories>
<distributionManagement>
<snapshotRepository>
<id>sonatype-nexus-snapshots</id>
<name>Sonatype Nexus snapshot repository</name>
<url>https://oss.sonatype.org/content/repositories/snapshots</url>
</snapshotRepository>
<repository>
<id>nexus-releases</id>
<name>Nexus Release Repository</name>
<url>http://oss.sonatype.org/service/local/staging/deploy/maven2/</url>
</repository>
</distributionManagement>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.nd4j</groupId>
<artifactId>nd4j-native-platform</artifactId>
<version>${nd4j.version}</version>
</dependency>
<dependency>
<groupId>org.nd4j</groupId>
<artifactId>nd4j-api</artifactId>
<version>${nd4j.version}</version>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
<version>${scala.version}</version>
</dependency>
</dependencies>
</dependencyManagement>
<dependencies>
<!-- Spark and Scala Dependencies -->
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-mllib_${scala.binary.version}</artifactId>
<version>${spark.version}</version>
<scope>${spark.scope}</scope>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
<version>${scala.version}</version>
<scope>${spark.scope}</scope>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-reflect</artifactId>
<version>${scala.version}</version>
<scope>${spark.scope}</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_${scala.binary.version}</artifactId>
<version>${spark.version}</version>
<scope>${spark.scope}</scope>
</dependency>
<!-- Deeplearning4j Dependencies -->
<dependency>
<groupId>org.deeplearning4j</groupId>
<artifactId>deeplearning4j-core</artifactId>
<version>${dl4j.version}</version>
</dependency>
<dependency>
<groupId>org.deeplearning4j</groupId>
<artifactId>dl4j-spark_${scala.binary.version}</artifactId>
<version>${dl4j.version}</version>
</dependency>
<dependency>
<groupId>org.nd4j</groupId>
<artifactId>nd4j-kryo_${scala.binary.version}</artifactId>
<version>${nd4j.version}</version>
</dependency>
<dependency>
<groupId>org.nd4j</groupId>
<artifactId>nd4j-native-platform</artifactId>
<version>${nd4j.version}</version>
</dependency>
<!-- DataVec Dependencies -->
<dependency>
<groupId>org.datavec</groupId>
<artifactId>datavec-api</artifactId>
<version>${datavec.version}</version>
</dependency>
<dependency>
<groupId>org.datavec</groupId>
<artifactId>datavec-spark_${scala.binary.version}</artifactId>
<version>${datavec.version}</version>
</dependency>
<dependency>
<groupId>org.apache.avro</groupId>
<artifactId>avro</artifactId>
<version>1.7.1</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>${hadoop.version}</version>
<scope>${spark.scope}</scope>
</dependency>
<!-- hadoop-mapreduce-client-app -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-app</artifactId>
<version>${hadoop.version}</version>
<scope>${spark.scope}</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-common</artifactId>
<version>${hadoop.version}</version>
<scope>${spark.scope}</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.11</version>
<!--
<scope>test</scope>
-->
</dependency>
<dependency>
<groupId>joda-time</groupId>
<artifactId>joda-time</artifactId>
<version>2.7</version>
</dependency>
<dependency>
<groupId>org.apache.mrunit</groupId>
<artifactId>mrunit</artifactId>
<version>1.1.0</version>
<classifier>hadoop2</classifier>
</dependency>
<!-- JCommander for parsing args -->
<dependency>
<groupId>com.beust</groupId>
<artifactId>jcommander</artifactId>
<version>${jcommander.version}</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>1.7</source>
<target>1.7</target>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>1.6</version>
<configuration>
<createDependencyReducedPom>true</createDependencyReducedPom>
<filters>
<filter>
<artifact>*:*</artifact>
<excludes>
<exclude>org/datanucleus/**</exclude>
<exclude>META-INF/*.SF</exclude>
<exclude>META-INF/*.DSA</exclude>
<exclude>META-INF/*.RSA</exclude>
<!--
<exclude>org/apache/hadoop/**</exclude>
-->
<exclude>org/apache/mesos/**</exclude>
<exclude>org/apache/spark/**</exclude>
<!--
<exclude>org/springframework/**</exclude>
-->
<exclude>org/apache/zookeeper/**</exclude>
<!--
<exclude>akka/**</exclude>
-->
<exclude>data/**</exclude>
<exclude>demos/**</exclude>
<exclude>docs/**</exclude>
<exclude>models/**</exclude>
<exclude>oozie/**</exclude>
<exclude>parquet/**</exclude>
<exclude>py4j/**</exclude>
<exclude>pyspark/**</exclude>
<exclude>scala/**</exclude>
<exclude>sentiment/**</exclude>
<exclude>tachyon/**</exclude>
<exclude>UI/**</exclude>
<exclude>com/hazelcast/**</exclude>
<exclude>lib/static/Windows/**</exclude>
</excludes>
</filter>
</filters>
</configuration>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<transformers>
<transformer
implementation="org.apache.maven.plugins.shade.resource.AppendingTransformer">
<resource>reference.conf</resource>
</transformer>
<transformer
implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/>
<transformer
implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
</transformer>
</transformers>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment