Sqoop Export:
-------------
Table in a MySQL db must exists there.
mysql -pcloudera
create database sara; // create brand new database
use sara; // use it
CREATE TABLE employee (
id INT NOT NULL PRIMARY KEY,
name VARCHAR(20),
deg VARCHAR(20),
salary INT,
dept VARCHAR(10));
mysql> describe employee;
+--------+-------------+------+-----+---------+-------+
| Field | Type | Null | Key | Default | Extra |
+--------+-------------+------+-----+---------+-------+
| id | int(11) | NO | PRI | NULL | |
| name | varchar(20) | YES | | NULL | |
| deg | varchar(20) | YES | | NULL | |
| salary | int(11) | YES | | NULL | |
| dept | varchar(10) | YES | | NULL | |
+--------+-------------+------+-----+---------+-------+
5 rows in set (0.03 sec)
new table created in sara (db);
sqoop export --connect jdbc:mysql://localhost/sara --username=root --password=cloudera --table employee --export-dir /user/root/emp -m 1
exported into : table named : employee in database named : sara
now : mysql -pcloudera
use sara
select * from employee
mysql> select * from employee;
+------+----------+--------------+--------+------+
| id | name | deg | salary | dept |
+------+----------+--------------+--------+------+
| 1201 | Gopal | Manager | 50000 | TP |
| 1202 | Manisha | Proof Reader | 50000 | TP |
| 1203 | Khalil | PHP dev | 30000 | AC |
| 1204 | Prasanth | PHP dev | 30000 | AC |
| 1205 | Raja | Admin | 20000 | TP |
| 1206 | Satish | team leader | 20000 | GR |
+------+----------+--------------+--------+------+
Subscribe to:
Post Comments (Atom)
Flume - Simple Demo
// create a folder in hdfs : $ hdfs dfs -mkdir /user/flumeExa // Create a shell script which generates : Hadoop in real world <n>...
-
How to fetch Spark Application Id programmaticall while running the Spark Job? scala> spark.sparkContext.applicationId res124: String = l...
-
// Lead Example // Lead means Next row's salary value spark.sql("SELECT id, fname,lname, designation, technology,salary, LEAD(sal...
-
from pyspark.sql import SparkSession spark = SparkSession.builder.appName("LondonCrimes").getOrCreate() data = spark.read.format(...
No comments:
Post a Comment