Skip to content

Instantly share code, notes, and snippets.

View prasku5's full-sized avatar
๐Ÿ˜€
Hello there!!

prasanna kumar prasku5

๐Ÿ˜€
Hello there!!
View GitHub Profile
sqoop import \
--connect jdbc:mysql://localhost/source_database_name \ (This is the path to access the Source DB using JDBC Driver)
--username <username> \ (Source Database Username)
--password <password> \ (Source Database password)
--database source_database_name \ (The database name will become folder name in target HDFS )
--table Source_table_name \
--as-parquetfile \
-m 4
sqoop import \
--connect jdbc:mysql://localhost/source_database_name \ (This is the path to access the Source DB using JDBC Driver)
--username <username> \ (Source Database Username)
--password <password> \ (Source Database password)
--database source_database_name \ (The database name will become folder name in target HDFS )
--table Source_table_name \
--compress \ (This will store the files in .gz compression which is default)
-m 4
sqoop import \
--connect jdbc:mysql://localhost/source_database_name \ (This is the path to access the Source DB using JDBC Driver)
--username <username> \ (Source Database Username)
--password <password> \ (Source Database password)
--database source_database_name \ (The database name will become folder name in target HDFS )
--table Source_table_name \
--compress \
--compression-codec org.apache.hadoop.io.compress.BZip2Codec
-m 4
sqoop import \
--connect jdbc:mysql://localhost/source_database_name \ (This is the path to access the Source DB using JDBC Driver)
--username <username> \ (Source Database Username)
--password <password> \ (Source Database password)
--database source_database_name \ (The database name will become folder name in target HDFS )
--table Source_table_name \ (This will store the files in .gz compression which is default)
-m 4
--fields-terminated-by '\t' (we explicitly mention that data imported into HDFS will have the fields terminated with '\t' character)
--lines terminated-by '\n' (we explicitly mention that data imported into HDFS will have records terminated with '\n' character)
--null-string "Not Available" (If the Input data contains Null string type fields it will be replaced with the given string)
sqoop import \
--connect jdbc:mysql://localhost/source_database_name \ (This is the path to access the Source DB using JDBC Driver)
--username <username> \ (Source Database Username)
--password <password> \ (Source Database password)
--database source_database_name \ (The database name will become folder name in target HDFS )
--table Source_table_name \ (This will store the files in .gz compression which is default)
--hive-import \ (This will import the data imported in HDFS as Hive table automatically)
-m 4
sqoop import \
--connect jdbc:mysql://localhost/source_database_name \ (This is the path to access the Source DB using JDBC Driver)
--username <username> \ (Source Database Username)
--password <password> \ (Source Database password)
--database source_database_name \ (The database name will become folder name in target HDFS )
--table Source_table_name \ (This will store the files in .gz compression which is default)
--hive-import \ (This will import the data imported in HDFS as Hive table automatically)
--hive-table <table name>\ (This will be the name of hive table of our choice instead of default MySQL table name)
--hive-overwrite \ (This will overwrite the existing file in HDFS and recreate new table in Hive)
-m 4
sqoop job \
--create <job_name> \
--import \
--connect jdbc:mysql://localhost/source_database_name \ (This is the path to access the Source DB using JDBC Driver)
--username <username> \ (Source Database Username)
--password <password> \ (Source Database password)
--database source_database_name \ (The database name will become folder name in target HDFS )
--table Source_table_name \
-m 4
sqoop create-hive-table \
--connect jdbc:mysql://localhost/source_database_name \ (This is the path to access the Source DB using JDBC Driver)
--username <username> \ (Source Database Username)
--password <password> \ (Source Database password)
--database source_database_name \ (The database name will become folder name in target HDFS )
--table Source_table_name \
--hive-table <table_name> (This is our desired table name in Hive)
-m 4
sqoop eval \
--connect jdbc:mysql://localhost/source_database_name \ (This is the path to access the Source DB using JDBC Driver)
--username <username> \ (Source Database Username)
--password <password> \ (Source Database password)
--database source_database_name \ (The database name will become folder name in target HDFS )
--query "SELECT * FROM <table_name> LIMIT 10"
sqoop list-all-tables \
--connect jdbc:mysql://localhost/source_database_name \ (This is the path to access the Source DB using JDBC Driver)
--username <username> \ (Source Database Username)
--password <password> \ (Source Database password)
--database source_database_name \ (The database name will become folder name in target HDFS )