[关闭]
@zzy0471 2016-08-28T14:43:31.000000Z 字数 1396 阅读 1237

大数据作业代码

大数据作业


使用Hive编写MapReduce程序

package demo.com.joey;

import java.sql.SQLException;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.Statement;
import java.sql.DriverManager;

public class Demo {

    private static String driverName = "org.apache.hadoop.hive.jdbc.HiveDriver";

      public static void main(String[] args) throws SQLException {

        try {
          Class.forName(driverName);
        } catch (ClassNotFoundException e) {
          e.printStackTrace();
          System.exit(1);
        }

        Connection con = DriverManager.getConnection("jdbc:hive://localhost:10000", "", "");
        Statement stmt = con.createStatement();

        String databaseName = "jdbc_demo";
        String tableName = "sample_data";
        String sourceFile = "/home/joey/sample.txt";

        //create database
        stmt.executeQuery("create database jdbc_demo");
        stmt.executeQuery("use " + databaseName);

        //create table
        stmt.executeQuery("drop table " + tableName);
        ResultSet res = stmt.executeQuery("create table " + tableName + " (name string) stored as textfile");

        // show tables
        String sql = "show tables '" + tableName + "'";
        System.out.println("Running: " + sql);
        res = stmt.executeQuery(sql);
        if (res.next()) {
          System.out.println(res.getString(1));
        }

        // describe table
        sql = "describe " + tableName;
        System.out.println("Running: " + sql);
        res = stmt.executeQuery(sql);
        while (res.next()) {
          System.out.println(res.getString(1) + "\t" + res.getString(2));
        } 

        // load data into table
        System.out.println("Running: " + sql);
        sql = "load data local inpath '"+ sourceFile + "' into table " + tableName;
        stmt.executeQuery(sql);
        System.out.println("load over");
      }
}
添加新批注
在作者公开此批注前,只有你和作者可见。
回复批注