1.添加依赖 (注意sqoop1.4.7不能直接通过maven下,需要去下载jar放到工程里,然后本地导入)
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>2.8.4</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>2.8.4</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId>
<version>2.8.4</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-common</artifactId>
<version>2.8.4</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-jobclient</artifactId>
<version>2.8.4</version>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-common</artifactId>
<version>2.3.7</version>
<exclusions>
<exclusion>
<groupId>org.eclipse.jetty.aggregate</groupId>
<artifactId>jetty-all</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.avro</groupId>
<artifactId>avro-mapred</artifactId>
<version>1.8.1</version>
</dependency>
<dependency>
<groupId>org.apache.avro</groupId>
<artifactId>avro</artifactId>
<version>1.8.1</version>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-cli</artifactId>
<version>2.3.2</version>
<exclusions>
<exclusion>
<artifactId>libthrift</artifactId>
<groupId>org.apache.thrift</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.lmax</groupId>
<artifactId>disruptor</artifactId>
<version>3.4.2</version>
</dependency>
<dependency>
<groupId>org.apache.sqoop</groupId>
<artifactId>sqoop</artifactId>
<version>1.4.7</version>
<scope>system</scope>
<systemPath>${project.basedir}/libs/sqoop-1.4.7.jar</systemPath>
</dependency>
2.修改jdk配置文件 (注意要使用jdk1.8或1.8以下)
$%JAVA_HOME%\jre\lib\security\java.policy
grant {
}内
增加:
permission javax.management.MBeanTrustPermission “register”;
permission org.apache.derby.security.SystemPermission “engine”, “usederbyinternals”;
3.在resource里放入hive-site.xml
4.java代码
public class SqoopTest {
static {
try {
// 设置 HADOOP_HOME 目录
System.setProperty(“hadoop.home.dir”, “D:/hadoop-winutils-hadoop2.8.3/”);
// 加载库文件 D:/hadoop-winutils-hadoop2.8.3/
// System.load(“D:/hadoop-winutils-hadoop2.8.3/bin/hadoop.dll”);
System.load(“D:/hadoop/hadoop-winutils-hadoop2.7.3/bin/hadoop.dll”);
} catch (UnsatisfiedLinkError e) {
System.err.println(“Native code library failed to load.” + e);
System.exit(1);
}
}

public static void main(String[] args) throws Exception {
//hiveToMysql();
MysqlToHive();
}

public static void hiveToMysql() throws Exception {
Configuration conf = new Configuration();
conf.set(“fs.default.name”, “hdfs://hadoop01:8020/”);
String[] arg = new String[]{
“–connect”, “jdbc:mysql://192.168.101:3306/test”,
“–driver”, “com.mysql.jdbc.Driver”,
“-username”, “test”,
“-password”, “test”,
“–table”, “user”,
“–m”, “1”,
“–export-dir”, “/user/hive/warehouse/test.db/user”,
“–input-null-string”, “\\\\N”,
“–input-null-non-string”, “\\\\N”,
“–input-fields-terminated-by”, “,”
};
String[] expandArguments = OptionsFileUtil.expandArguments(arg);
SqoopTool tool = SqoopTool.getTool(“export”);
Configuration loadPlugins = SqoopTool.loadPlugins(conf);
Sqoop sqoop = new Sqoop((com.cloudera.sqoop.tool.SqoopTool) tool, loadPlugins);
int res = Sqoop.runSqoop(sqoop, expandArguments);
if (res == 0) {
System.out.println(“成功”);
} else {
System.out.println(“失败”);
}
}

public static void MysqlToHive() throws IOException {
String[] mysql = new String[]{
“–connect”, “jdbc:mysql://192.168.101:3306/test”,
“–driver”, “com.mysql.jdbc.Driver”,
“-username”, “test”,
“-password”, “test”,
“–table”, “user”,
“–target-dir”, “/user/hive/warehouse/test.db/user”,
“–delete-target-dir”,
“–hive-import”,
“–hive-database”, “test”,
“–hive-table”, “user”,
“-m”, “2”,
};
SqoopTool importTool = SqoopTool.getTool(“import”);
Configuration conf = new Configuration();
conf.set(“fs.default.name”, “hdfs://hadoop01:8020/”);
Configuration loadPlugins = SqoopTool.loadPlugins(conf);

Configuration hiveConf = new Configuration();
Configuration hive = HiveConfig.getHiveConf(hiveConf);
Sqoop sqoop = new Sqoop((com.cloudera.sqoop.tool.SqoopTool) importTool, loadPlugins);
int res = Sqoop.runSqoop(sqoop, mysql);
if (res == 0) {
System.out.println(“成功”);
} else {
System.out.println(“失败”);
}
}
}