SlideShare uma empresa Scribd logo
1 de 18
i   s
                                                                                                                                    th
Writing Codes in                                                                                                               in
                                                                                                                        a nd
                                                                                                                d   ,
                                                                                                             in
                                                                                                         m
                                                                                                    to
                                                                                               es
                                                                                           m
                                                                                      co
                                                                                  n
                                                                        a   tio
                                                                  lic
                                                           a pp
                                                      eb
                                                  w
                                         i   ze
                                      -s
                                 n et
                               la ing
                             p tt
                                 i
                          rm s f
                        te t i
                     e     i
                   Th se
                     ca


                                                              DWIVEDISHASHWAT@GMAIL.COM
lib  
commons-configuration-1.8.jar  
commons-lang-2.6.jar  
commons-logging-1.1.1.jar  
hadoop-core-1.0.0.jar  
hbase-0.92.1.jar  
log4j-1.2.16.jar  
slf4j-api-1.5.8.jar  
slf4j-log4j12-1.5.8.jar  
zookeeper-3.4.3.jar
 import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.MasterNotRunningException;
import org.apache.hadoop.hbase.ZooKeeperConnectionException;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.util.Bytes;
Configuration hConf = HBaseConfiguration.create(conf);
   hConf.set(Constants.HBASE_CONFIGURATION_ZOOKEEPER_QUORUM,
   hbaseZookeeperQuorum);
   hConf.set(Constants.HBASE_CONFIGURATION_ZOOKEEPER_CLIENTPORT,
   hbaseZookeeperClientPort); HTable hTable = new HTable(hConf, tableName);
public class HBaseTest {


private static Configuration conf = null;


/**
        * Initialization
        */
    static {
             conf = HBaseConfiguration.create();
    }


}
/**
      * Create a table
      */
  public static void creatTable(String tableName, String[] familys)
                      throws Exception {
           HBaseAdmin admin = new HBaseAdmin(conf);
           if (admin.tableExists(tableName)) {
                      System.out.println("table already exists!");
           } else {
                      HTableDescriptor tableDesc = new HTableDescriptor(tableName);
                      for (int i = 0; i < familys.length; i++) {
                       tableDesc.addFamily(new
  HColumnDescriptor(familys[i]));
                      }
                      admin.createTable(tableDesc);
                      System.out.println("create table " + tableName + " ok.");
           }
  }
/**
      * Delete a table
      */
  public static void deleteTable(String tableName) throws Exception {
           try {
                    HBaseAdmin admin = new HBaseAdmin(conf);
                    admin.disableTable(tableName);
                    admin.deleteTable(tableName);
                    System.out.println("delete table " + tableName + " ok.");
           } catch (MasterNotRunningException e) {
                    e.printStackTrace();
           } catch (ZooKeeperConnectionException e) {
                    e.printStackTrace();
           }
  }
/**
      * Put (or insert) a row
      */
  public static void addRecord(String tableName, String rowKey,
                    String family, String qualifier, String value) throws Exception {
           try {
                    HTable table = new HTable(conf, tableName);
                    Put put = new Put(Bytes.toBytes(rowKey));
                    put.add(Bytes.toBytes(family), Bytes.toBytes(qualifier), Bytes
                                       .toBytes(value));
                    table.put(put);
                    System.out.println("insert recored " + rowKey + " to table "
                                       + tableName + " ok.");
           } catch (IOException e) {
                    e.printStackTrace();
           }
  }
/**
      * Delete a row
      */
  public static void delRecord(String tableName, String rowKey)
                     throws IOException {
           HTable table = new HTable(conf, tableName);
           List<Delete> list = new ArrayList<Delete>();
           Delete del = new Delete(rowKey.getBytes());
           list.add(del);
           table.delete(list);
           System.out.println("del recored " + rowKey + " ok.");
  }
/**
      * Get a row
      */
      public static void getOneRecord (String tableName, String rowKey) throws
      IOException{
       HTable table = new HTable(conf, tableName);
       Get get = new Get(rowKey.getBytes());
       Result rs = table.get(get);
       for(KeyValue kv : rs.raw()){
           System.out.print(new String(kv.getRow()) + " " );
           System.out.print(new String(kv.getFamily()) + ":" );
           System.out.print(new String(kv.getQualifier()) + " " );
           System.out.print(kv.getTimestamp() + " " );
           System.out.println(new String(kv.getValue()));
       }
  }
/**
      * Scan (or list) a table
      */
      public static void getAllRecord (String tableName) {
       try{
              HTable table = new HTable(conf, tableName);
              Scan s = new Scan();
              ResultScanner ss = table.getScanner(s);
              for(Result r:ss){
                for(KeyValue kv : r.raw()){
               System.out.print(new String(kv.getRow()) + " ");
      System.out.print(new String(kv.getFamily()) + ":");
      System.out.print(new String(kv.getQualifier()) + " ");
               System.out.print(kv.getTimestamp() + " ");
      System.out.println(new String(kv.getValue()));          }   }
       } catch (IOException e){
           e.printStackTrace();
       }
  }
public static void main(String[] agrs) {
             try {
                     String tablename = "scores";
                     String[] familys = { "grade", "course" };
                     HBaseTest.creatTable(tablename, familys);
 
                     // add record zkb
                     HBaseTest.addRecord(tablename, "zkb", "grade", "", "5");
                     HBaseTest.addRecord(tablename, "zkb", "course", "", "90");
                     HBaseTest.addRecord(tablename, "zkb", "course", "math", "97");
                     HBaseTest.addRecord(tablename, "zkb", "course", "art", "87");
                     // add record baoniu
                     HBaseTest.addRecord(tablename, "baoniu", "grade", "", "4");
                     HBaseTest.addRecord(tablename, "baoniu", "course", "math",
    "89");
 
System.out.println("===========get one record========");
                  HBaseTest.getOneRecord(tablename, "zkb");
 
                  System.out.println("===========show all record========");
                  HBaseTest.getAllRecord(tablename);
 
                  System.out.println("===========del one record========");
                  HBaseTest.delRecord(tablename, "baoniu");
                  HBaseTest.getAllRecord(tablename);
 
                  System.out.println("===========show all record========");
                  HBaseTest.getAllRecord(tablename);
         } catch (Exception e) {
                  e.printStackTrace();
         }
    }}
Sqoop (“SQL-to-Hadoop”) is a straightforward command-
  line tool with the following capabilities:
Imports individual tables or entire databases to files in
  HDFS
Generates Java classes to allow you to interact with your
  imported data
Provides the ability to import from SQL databases
  straight into your Hive data warehouse
sqoop --connect jdbc:mysql://db.example.com/website --table USERS  --local
   --hive-import


This would connect to the MySQL database on this server and
  import the USERS table into HDFS. The –-local option instructs
  Sqoop to take advantage of a local MySQL connection which
  performs very well. The –-hive-import option means that after
  reading the data into HDFS, Sqoop will connect to the Hive
  metastore, create a table named USERS with the same
  columns and types (translated into their closest analogues in
  Hive), and load the data into the Hive warehouse directory on
  HDFS
Suppose you wanted to work with this data in MapReduce and
  weren’t concerned with Hive. When storing this table in HDFS,
  you might want to take advantage of compression, so you’d like
  to be able to store the data in Sequence Files.

sqoop --connect jdbc:mysql://db.example.com/website --table
  USERS  --as-sequencefile

Sqoop includes some other commands which allow you to inspect
  the database you are working with. For example, you can list
  the available database schemas (with the sqoop-list-databases
  tool) and tables within a schema (with the sqoop-list-tables
  tool). Sqoop also includes a primitive SQL execution shell (the
  sqoop-eval tool)
sqoop help
usage: sqoop COMMAND [ARGS]


Available commands:
 codegen           Generate code to interact with database records
 create-hive-table Import a table definition into Hive
 eval          Evaluate a SQL statement and display the results
 export        Export an HDFS directory to a database table
 help          List available commands
 import           Import a table from a database to HDFS
 import-all-tables Import tables from a database to HDFS
 list-databases     List available databases on a server
 list-tables      List available tables in a database
 version          Display version information


See 'sqoop help COMMAND' for information on a specific command.
sqoop help import
usage: sqoop import [GENERIC-ARGS] [TOOL-ARGS]


Common arguments:
 --connect <jdbc-uri>     Specify JDBC connect string
 --connect-manager <jdbc-uri>      Specify connection manager class to use
 --driver <class-name> Manually specify JDBC driver class to use
 --hadoop-home <dir>       Override $HADOOP_HOME
 --help             Print usage instructions
-P              Read password from console
 --password <password> Set authentication password
 --username <username> Set authentication username
 --verbose           Print more information while working

Mais conteúdo relacionado

Mais procurados

Jython: Python para la plataforma Java (JRSL 09)
Jython: Python para la plataforma Java (JRSL 09)Jython: Python para la plataforma Java (JRSL 09)
Jython: Python para la plataforma Java (JRSL 09)Leonardo Soto
 
TDC2016SP - Código funcional em Java: superando o hype
TDC2016SP - Código funcional em Java: superando o hypeTDC2016SP - Código funcional em Java: superando o hype
TDC2016SP - Código funcional em Java: superando o hypetdc-globalcode
 
mobl - model-driven engineering lecture
mobl - model-driven engineering lecturemobl - model-driven engineering lecture
mobl - model-driven engineering lecturezefhemel
 
Disassembling Go
Disassembling GoDisassembling Go
Disassembling GoEyal Post
 
Wx::Perl::Smart
Wx::Perl::SmartWx::Perl::Smart
Wx::Perl::Smartlichtkind
 
RxSwift 시작하기
RxSwift 시작하기RxSwift 시작하기
RxSwift 시작하기Suyeol Jeon
 
Internal DSLs
Internal DSLsInternal DSLs
Internal DSLszefhemel
 
Kotlin: Let's Make Android Great Again
Kotlin: Let's Make Android Great AgainKotlin: Let's Make Android Great Again
Kotlin: Let's Make Android Great AgainTaeho Kim
 
G*ワークショップ in 仙台 Grails(とことん)入門
G*ワークショップ in 仙台 Grails(とことん)入門G*ワークショップ in 仙台 Grails(とことん)入門
G*ワークショップ in 仙台 Grails(とことん)入門Tsuyoshi Yamamoto
 
레진코믹스가 코틀린으로 간 까닭은?
레진코믹스가 코틀린으로 간 까닭은?레진코믹스가 코틀린으로 간 까닭은?
레진코믹스가 코틀린으로 간 까닭은?Taeho Kim
 
Rデバッグあれこれ
RデバッグあれこれRデバッグあれこれ
RデバッグあれこれTakeshi Arabiki
 
(안드로이드 개발자를 위한) 오픈소스 라이브러리 사용 가이드
(안드로이드 개발자를 위한) 오픈소스 라이브러리 사용 가이드(안드로이드 개발자를 위한) 오픈소스 라이브러리 사용 가이드
(안드로이드 개발자를 위한) 오픈소스 라이브러리 사용 가이드Taeho Kim
 
Swift에서 꼬리재귀 사용기 (Tail Recursion)
Swift에서 꼬리재귀 사용기 (Tail Recursion)Swift에서 꼬리재귀 사용기 (Tail Recursion)
Swift에서 꼬리재귀 사용기 (Tail Recursion)진성 오
 

Mais procurados (18)

Jython: Python para la plataforma Java (JRSL 09)
Jython: Python para la plataforma Java (JRSL 09)Jython: Python para la plataforma Java (JRSL 09)
Jython: Python para la plataforma Java (JRSL 09)
 
TDC2016SP - Código funcional em Java: superando o hype
TDC2016SP - Código funcional em Java: superando o hypeTDC2016SP - Código funcional em Java: superando o hype
TDC2016SP - Código funcional em Java: superando o hype
 
Scala 2 + 2 > 4
Scala 2 + 2 > 4Scala 2 + 2 > 4
Scala 2 + 2 > 4
 
mobl - model-driven engineering lecture
mobl - model-driven engineering lecturemobl - model-driven engineering lecture
mobl - model-driven engineering lecture
 
Disassembling Go
Disassembling GoDisassembling Go
Disassembling Go
 
Wx::Perl::Smart
Wx::Perl::SmartWx::Perl::Smart
Wx::Perl::Smart
 
RxSwift 시작하기
RxSwift 시작하기RxSwift 시작하기
RxSwift 시작하기
 
07 php
07 php07 php
07 php
 
Internal DSLs
Internal DSLsInternal DSLs
Internal DSLs
 
Kotlin: Let's Make Android Great Again
Kotlin: Let's Make Android Great AgainKotlin: Let's Make Android Great Again
Kotlin: Let's Make Android Great Again
 
G*ワークショップ in 仙台 Grails(とことん)入門
G*ワークショップ in 仙台 Grails(とことん)入門G*ワークショップ in 仙台 Grails(とことん)入門
G*ワークショップ in 仙台 Grails(とことん)入門
 
EMFPath
EMFPathEMFPath
EMFPath
 
레진코믹스가 코틀린으로 간 까닭은?
레진코믹스가 코틀린으로 간 까닭은?레진코믹스가 코틀린으로 간 까닭은?
레진코믹스가 코틀린으로 간 까닭은?
 
Rデバッグあれこれ
RデバッグあれこれRデバッグあれこれ
Rデバッグあれこれ
 
Pooya Khaloo Presentation on IWMC 2015
Pooya Khaloo Presentation on IWMC 2015Pooya Khaloo Presentation on IWMC 2015
Pooya Khaloo Presentation on IWMC 2015
 
ruby3_6up
ruby3_6upruby3_6up
ruby3_6up
 
(안드로이드 개발자를 위한) 오픈소스 라이브러리 사용 가이드
(안드로이드 개발자를 위한) 오픈소스 라이브러리 사용 가이드(안드로이드 개발자를 위한) 오픈소스 라이브러리 사용 가이드
(안드로이드 개발자를 위한) 오픈소스 라이브러리 사용 가이드
 
Swift에서 꼬리재귀 사용기 (Tail Recursion)
Swift에서 꼬리재귀 사용기 (Tail Recursion)Swift에서 꼬리재귀 사용기 (Tail Recursion)
Swift에서 꼬리재귀 사용기 (Tail Recursion)
 

Destaque (6)

Hbase interact with shell
Hbase interact with shellHbase interact with shell
Hbase interact with shell
 
Edge
EdgeEdge
Edge
 
Inventory system
Inventory systemInventory system
Inventory system
 
Configure h base hadoop and hbase client
Configure h base hadoop and hbase clientConfigure h base hadoop and hbase client
Configure h base hadoop and hbase client
 
H base
H baseH base
H base
 
Introduction to apache hadoop
Introduction to apache hadoopIntroduction to apache hadoop
Introduction to apache hadoop
 

Semelhante a H base development

Data Structure - 2nd Study
Data Structure - 2nd StudyData Structure - 2nd Study
Data Structure - 2nd StudyChris Ohk
 
Что нам готовит грядущий C#7?
Что нам готовит грядущий C#7?Что нам готовит грядущий C#7?
Что нам готовит грядущий C#7?Andrey Akinshin
 
Twitter Author Prediction from Tweets using Bayesian Network
Twitter Author Prediction from Tweets using Bayesian NetworkTwitter Author Prediction from Tweets using Bayesian Network
Twitter Author Prediction from Tweets using Bayesian NetworkHendy Irawan
 
Mixing functional and object oriented approaches to programming in C#
Mixing functional and object oriented approaches to programming in C#Mixing functional and object oriented approaches to programming in C#
Mixing functional and object oriented approaches to programming in C#Mark Needham
 
Core csharp and net quick reference
Core csharp and net quick referenceCore csharp and net quick reference
Core csharp and net quick referenceilesh raval
 
Core c sharp and .net quick reference
Core c sharp and .net quick referenceCore c sharp and .net quick reference
Core c sharp and .net quick referenceArduino Aficionado
 
C++11 - A Change in Style - v2.0
C++11 - A Change in Style - v2.0C++11 - A Change in Style - v2.0
C++11 - A Change in Style - v2.0Yaser Zhian
 
Please code in C++ and do only the �TO DO�s and all of them. There a.pdf
Please code in C++ and do only the �TO DO�s and all of them. There a.pdfPlease code in C++ and do only the �TO DO�s and all of them. There a.pdf
Please code in C++ and do only the �TO DO�s and all of them. There a.pdffarankureshi
 
Introduction to Perl
Introduction to PerlIntroduction to Perl
Introduction to PerlSway Wang
 
C++ BinaryTree Help Creating main function for Trees...Here are .pdf
C++ BinaryTree Help  Creating main function for Trees...Here are .pdfC++ BinaryTree Help  Creating main function for Trees...Here are .pdf
C++ BinaryTree Help Creating main function for Trees...Here are .pdfforecastfashions
 
Groovify your java code by hervé roussel
Groovify your java code by hervé rousselGroovify your java code by hervé roussel
Groovify your java code by hervé rousselHervé Vũ Roussel
 

Semelhante a H base development (20)

Introduction to c part 2
Introduction to c   part  2Introduction to c   part  2
Introduction to c part 2
 
Tut1
Tut1Tut1
Tut1
 
H base programming
H base programmingH base programming
H base programming
 
Data Structure - 2nd Study
Data Structure - 2nd StudyData Structure - 2nd Study
Data Structure - 2nd Study
 
Что нам готовит грядущий C#7?
Что нам готовит грядущий C#7?Что нам готовит грядущий C#7?
Что нам готовит грядущий C#7?
 
Pointers [compatibility mode]
Pointers [compatibility mode]Pointers [compatibility mode]
Pointers [compatibility mode]
 
Twitter Author Prediction from Tweets using Bayesian Network
Twitter Author Prediction from Tweets using Bayesian NetworkTwitter Author Prediction from Tweets using Bayesian Network
Twitter Author Prediction from Tweets using Bayesian Network
 
Mixing functional and object oriented approaches to programming in C#
Mixing functional and object oriented approaches to programming in C#Mixing functional and object oriented approaches to programming in C#
Mixing functional and object oriented approaches to programming in C#
 
Core csharp and net quick reference
Core csharp and net quick referenceCore csharp and net quick reference
Core csharp and net quick reference
 
Core c sharp and .net quick reference
Core c sharp and .net quick referenceCore c sharp and .net quick reference
Core c sharp and .net quick reference
 
ddd+scala
ddd+scaladdd+scala
ddd+scala
 
C++11 - A Change in Style - v2.0
C++11 - A Change in Style - v2.0C++11 - A Change in Style - v2.0
C++11 - A Change in Style - v2.0
 
Abuse Perl
Abuse PerlAbuse Perl
Abuse Perl
 
Please code in C++ and do only the �TO DO�s and all of them. There a.pdf
Please code in C++ and do only the �TO DO�s and all of them. There a.pdfPlease code in C++ and do only the �TO DO�s and all of them. There a.pdf
Please code in C++ and do only the �TO DO�s and all of them. There a.pdf
 
Introduction to Perl
Introduction to PerlIntroduction to Perl
Introduction to Perl
 
C++ BinaryTree Help Creating main function for Trees...Here are .pdf
C++ BinaryTree Help  Creating main function for Trees...Here are .pdfC++ BinaryTree Help  Creating main function for Trees...Here are .pdf
C++ BinaryTree Help Creating main function for Trees...Here are .pdf
 
Groovify your java code by hervé roussel
Groovify your java code by hervé rousselGroovify your java code by hervé roussel
Groovify your java code by hervé roussel
 
Array strings
Array stringsArray strings
Array strings
 
Presentaion
PresentaionPresentaion
Presentaion
 
07-PHP.pptx
07-PHP.pptx07-PHP.pptx
07-PHP.pptx
 

Mais de Shashwat Shriparv (20)

Learning Linux Series Administrator Commands.pptx
Learning Linux Series Administrator Commands.pptxLearning Linux Series Administrator Commands.pptx
Learning Linux Series Administrator Commands.pptx
 
LibreOffice 7.3.pptx
LibreOffice 7.3.pptxLibreOffice 7.3.pptx
LibreOffice 7.3.pptx
 
Kerberos Architecture.pptx
Kerberos Architecture.pptxKerberos Architecture.pptx
Kerberos Architecture.pptx
 
Suspending a Process in Linux.pptx
Suspending a Process in Linux.pptxSuspending a Process in Linux.pptx
Suspending a Process in Linux.pptx
 
Kerberos Architecture.pptx
Kerberos Architecture.pptxKerberos Architecture.pptx
Kerberos Architecture.pptx
 
Command Seperators.pptx
Command Seperators.pptxCommand Seperators.pptx
Command Seperators.pptx
 
Upgrading hadoop
Upgrading hadoopUpgrading hadoop
Upgrading hadoop
 
Hadoop migration and upgradation
Hadoop migration and upgradationHadoop migration and upgradation
Hadoop migration and upgradation
 
R language introduction
R language introductionR language introduction
R language introduction
 
Hive query optimization infinity
Hive query optimization infinityHive query optimization infinity
Hive query optimization infinity
 
H base introduction & development
H base introduction & developmentH base introduction & development
H base introduction & development
 
Hbase
HbaseHbase
Hbase
 
My sql
My sqlMy sql
My sql
 
Apache tomcat
Apache tomcatApache tomcat
Apache tomcat
 
Linux 4 you
Linux 4 youLinux 4 you
Linux 4 you
 
Next generation technology
Next generation technologyNext generation technology
Next generation technology
 
Java interview questions
Java interview questionsJava interview questions
Java interview questions
 
C# interview quesions
C# interview quesionsC# interview quesions
C# interview quesions
 
I pv6
I pv6I pv6
I pv6
 
Intermediate code generation1
Intermediate code generation1Intermediate code generation1
Intermediate code generation1
 

H base development

  • 1. i s th Writing Codes in in a nd d , in m to es m co n a tio lic a pp eb w i ze -s n et la ing p tt i rm s f te t i e i Th se ca DWIVEDISHASHWAT@GMAIL.COM
  • 2. lib   commons-configuration-1.8.jar   commons-lang-2.6.jar   commons-logging-1.1.1.jar   hadoop-core-1.0.0.jar   hbase-0.92.1.jar   log4j-1.2.16.jar   slf4j-api-1.5.8.jar   slf4j-log4j12-1.5.8.jar   zookeeper-3.4.3.jar
  • 3.  import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.MasterNotRunningException; import org.apache.hadoop.hbase.ZooKeeperConnectionException; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.util.Bytes;
  • 4. Configuration hConf = HBaseConfiguration.create(conf); hConf.set(Constants.HBASE_CONFIGURATION_ZOOKEEPER_QUORUM, hbaseZookeeperQuorum); hConf.set(Constants.HBASE_CONFIGURATION_ZOOKEEPER_CLIENTPORT, hbaseZookeeperClientPort); HTable hTable = new HTable(hConf, tableName);
  • 5. public class HBaseTest { private static Configuration conf = null; /** * Initialization */ static { conf = HBaseConfiguration.create(); } }
  • 6. /** * Create a table */ public static void creatTable(String tableName, String[] familys) throws Exception { HBaseAdmin admin = new HBaseAdmin(conf); if (admin.tableExists(tableName)) { System.out.println("table already exists!"); } else { HTableDescriptor tableDesc = new HTableDescriptor(tableName); for (int i = 0; i < familys.length; i++) { tableDesc.addFamily(new HColumnDescriptor(familys[i])); } admin.createTable(tableDesc); System.out.println("create table " + tableName + " ok."); } }
  • 7. /** * Delete a table */ public static void deleteTable(String tableName) throws Exception { try { HBaseAdmin admin = new HBaseAdmin(conf); admin.disableTable(tableName); admin.deleteTable(tableName); System.out.println("delete table " + tableName + " ok."); } catch (MasterNotRunningException e) { e.printStackTrace(); } catch (ZooKeeperConnectionException e) { e.printStackTrace(); } }
  • 8. /** * Put (or insert) a row */ public static void addRecord(String tableName, String rowKey, String family, String qualifier, String value) throws Exception { try { HTable table = new HTable(conf, tableName); Put put = new Put(Bytes.toBytes(rowKey)); put.add(Bytes.toBytes(family), Bytes.toBytes(qualifier), Bytes .toBytes(value)); table.put(put); System.out.println("insert recored " + rowKey + " to table " + tableName + " ok."); } catch (IOException e) { e.printStackTrace(); } }
  • 9. /** * Delete a row */ public static void delRecord(String tableName, String rowKey) throws IOException { HTable table = new HTable(conf, tableName); List<Delete> list = new ArrayList<Delete>(); Delete del = new Delete(rowKey.getBytes()); list.add(del); table.delete(list); System.out.println("del recored " + rowKey + " ok."); }
  • 10. /** * Get a row */ public static void getOneRecord (String tableName, String rowKey) throws IOException{ HTable table = new HTable(conf, tableName); Get get = new Get(rowKey.getBytes()); Result rs = table.get(get); for(KeyValue kv : rs.raw()){ System.out.print(new String(kv.getRow()) + " " ); System.out.print(new String(kv.getFamily()) + ":" ); System.out.print(new String(kv.getQualifier()) + " " ); System.out.print(kv.getTimestamp() + " " ); System.out.println(new String(kv.getValue())); } }
  • 11. /** * Scan (or list) a table */ public static void getAllRecord (String tableName) { try{ HTable table = new HTable(conf, tableName); Scan s = new Scan(); ResultScanner ss = table.getScanner(s); for(Result r:ss){ for(KeyValue kv : r.raw()){ System.out.print(new String(kv.getRow()) + " "); System.out.print(new String(kv.getFamily()) + ":"); System.out.print(new String(kv.getQualifier()) + " "); System.out.print(kv.getTimestamp() + " "); System.out.println(new String(kv.getValue())); } } } catch (IOException e){ e.printStackTrace(); } }
  • 12. public static void main(String[] agrs) { try { String tablename = "scores"; String[] familys = { "grade", "course" }; HBaseTest.creatTable(tablename, familys);   // add record zkb HBaseTest.addRecord(tablename, "zkb", "grade", "", "5"); HBaseTest.addRecord(tablename, "zkb", "course", "", "90"); HBaseTest.addRecord(tablename, "zkb", "course", "math", "97"); HBaseTest.addRecord(tablename, "zkb", "course", "art", "87"); // add record baoniu HBaseTest.addRecord(tablename, "baoniu", "grade", "", "4"); HBaseTest.addRecord(tablename, "baoniu", "course", "math", "89");  
  • 13. System.out.println("===========get one record========"); HBaseTest.getOneRecord(tablename, "zkb");   System.out.println("===========show all record========"); HBaseTest.getAllRecord(tablename);   System.out.println("===========del one record========"); HBaseTest.delRecord(tablename, "baoniu"); HBaseTest.getAllRecord(tablename);   System.out.println("===========show all record========"); HBaseTest.getAllRecord(tablename); } catch (Exception e) { e.printStackTrace(); } }}
  • 14. Sqoop (“SQL-to-Hadoop”) is a straightforward command- line tool with the following capabilities: Imports individual tables or entire databases to files in HDFS Generates Java classes to allow you to interact with your imported data Provides the ability to import from SQL databases straight into your Hive data warehouse
  • 15. sqoop --connect jdbc:mysql://db.example.com/website --table USERS --local --hive-import This would connect to the MySQL database on this server and import the USERS table into HDFS. The –-local option instructs Sqoop to take advantage of a local MySQL connection which performs very well. The –-hive-import option means that after reading the data into HDFS, Sqoop will connect to the Hive metastore, create a table named USERS with the same columns and types (translated into their closest analogues in Hive), and load the data into the Hive warehouse directory on HDFS
  • 16. Suppose you wanted to work with this data in MapReduce and weren’t concerned with Hive. When storing this table in HDFS, you might want to take advantage of compression, so you’d like to be able to store the data in Sequence Files. sqoop --connect jdbc:mysql://db.example.com/website --table USERS --as-sequencefile Sqoop includes some other commands which allow you to inspect the database you are working with. For example, you can list the available database schemas (with the sqoop-list-databases tool) and tables within a schema (with the sqoop-list-tables tool). Sqoop also includes a primitive SQL execution shell (the sqoop-eval tool)
  • 17. sqoop help usage: sqoop COMMAND [ARGS] Available commands: codegen Generate code to interact with database records create-hive-table Import a table definition into Hive eval Evaluate a SQL statement and display the results export Export an HDFS directory to a database table help List available commands import Import a table from a database to HDFS import-all-tables Import tables from a database to HDFS list-databases List available databases on a server list-tables List available tables in a database version Display version information See 'sqoop help COMMAND' for information on a specific command.
  • 18. sqoop help import usage: sqoop import [GENERIC-ARGS] [TOOL-ARGS] Common arguments: --connect <jdbc-uri> Specify JDBC connect string --connect-manager <jdbc-uri> Specify connection manager class to use --driver <class-name> Manually specify JDBC driver class to use --hadoop-home <dir> Override $HADOOP_HOME --help Print usage instructions -P Read password from console --password <password> Set authentication password --username <username> Set authentication username --verbose Print more information while working