diff --git a/tutorial/CreateTable.java b/tutorial/CreateTable.java index 8114c34..9808c92 100644 --- a/tutorial/CreateTable.java +++ b/tutorial/CreateTable.java @@ -3,7 +3,9 @@ import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTableDescriptor; -import org.apache.hadoop.hbase.client.HBaseAdmin; +import org.apache.hadoop.hbase.client.Admin; +import org.apache.hadoop.hbase.client.Connection; +import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.conf.Configuration; @@ -15,19 +17,20 @@ public static void main(String[] args) throws IOException { // Instantiating configuration class Configuration con = HBaseConfiguration.create(); - // Instantiating HbaseAdmin class - HBaseAdmin admin = new HBaseAdmin(con); + try (Connection connection = ConnectionFactory.createConnection(con); + Admin admin = connection.getAdmin()) { - // Instantiating table descriptor class - HTableDescriptor tableDescriptor = new - HTableDescriptor(TableName.valueOf("emp")); + // Instantiating table descriptor class + HTableDescriptor tableDescriptor = new + HTableDescriptor(TableName.valueOf("emp")); - // Adding column families to table descriptor - tableDescriptor.addFamily(new HColumnDescriptor("personal")); - tableDescriptor.addFamily(new HColumnDescriptor("professional")); + // Adding column families to table descriptor + tableDescriptor.addFamily(new HColumnDescriptor("personal")); + tableDescriptor.addFamily(new HColumnDescriptor("professional")); - // Execute the table through admin - admin.createTable(tableDescriptor); - System.out.println(" Table created "); + // Execute the table through admin + admin.createTable(tableDescriptor); + System.out.println(" Table created "); + } } } diff --git a/tutorial/InsertData.java b/tutorial/InsertData.java index 012c432..a31cc6a 100644 --- a/tutorial/InsertData.java +++ b/tutorial/InsertData.java @@ -3,8 +3,11 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; -import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.Connection; +import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.util.Bytes; public class InsertData{ @@ -13,33 +16,30 @@ public static void main(String[] args) throws IOException { // Instantiating Configuration class Configuration config = HBaseConfiguration.create(); - - // Instantiating HTable class - HTable hTable = new HTable(config, "emp"); - - // Instantiating Put class - // accepts a row name. - Put p = new Put(Bytes.toBytes("row1")); - - // adding values using add() method - // accepts column family name, qualifier/row name ,value - p.add(Bytes.toBytes("personal"), - Bytes.toBytes("name"),Bytes.toBytes("raju")); - - p.add(Bytes.toBytes("personal"), - Bytes.toBytes("city"),Bytes.toBytes("hyderabad")); - - p.add(Bytes.toBytes("professional"),Bytes.toBytes("designation"), - Bytes.toBytes("manager")); - - p.add(Bytes.toBytes("professional"),Bytes.toBytes("salary"), - Bytes.toBytes("50000")); - - // Saving the put Instance to the HTable. - hTable.put(p); - System.out.println("data inserted"); - - // closing HTable - hTable.close(); + try (Connection conn = ConnectionFactory.createConnection(config); + Table hTable = conn.getTable(TableName.valueOf("emp"))) { + // + // Instantiating Put class + // accepts a row name. + Put p = new Put(Bytes.toBytes("row1")); + + // adding values using add() method + // accepts column family name, qualifier/row name ,value + p.addColumn(Bytes.toBytes("personal"), + Bytes.toBytes("name"),Bytes.toBytes("raju")); + + p.addColumn(Bytes.toBytes("personal"), + Bytes.toBytes("city"),Bytes.toBytes("hyderabad")); + + p.addColumn(Bytes.toBytes("professional"),Bytes.toBytes("designation"), + Bytes.toBytes("manager")); + + p.addColumn(Bytes.toBytes("professional"),Bytes.toBytes("salary"), + Bytes.toBytes("50000")); + + // Saving the put Instance to the HTable. + hTable.put(p); + System.out.println("data inserted"); + } } } diff --git a/tutorial/ListTables.java b/tutorial/ListTables.java index 203c233..41181e8 100644 --- a/tutorial/ListTables.java +++ b/tutorial/ListTables.java @@ -5,25 +5,27 @@ import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.MasterNotRunningException; -import org.apache.hadoop.hbase.client.HBaseAdmin; +import org.apache.hadoop.hbase.client.Admin; +import org.apache.hadoop.hbase.client.Connection; +import org.apache.hadoop.hbase.client.ConnectionFactory; public class ListTables { public static void main(String args[])throws MasterNotRunningException, IOException { // Instantiating a configuration class - Configuration conf = HBaseConfiguration.create(); - - // Instantiating HBaseAdmin class - HBaseAdmin admin = new HBaseAdmin(conf); - - // Getting all the list of tables using HBaseAdmin object - HTableDescriptor[] tableDescriptor = admin.listTables(); - - // printing all the table names. - for(int i=0; i < tableDescriptor.length;i++){ - System.out.println(tableDescriptor[i].getNameAsString()); + Configuration config = HBaseConfiguration.create(); + try (Connection connection = ConnectionFactory.createConnection(config); + Admin admin = connection.getAdmin()) { + // Getting all the list of tables using HBaseAdmin object + HTableDescriptor[] tableDescriptor = admin.listTables(); + + // printing all the table names. + for(int i=0; i < tableDescriptor.length;i++){ + System.out.println(tableDescriptor[i].getNameAsString()); + } } + } } diff --git a/tutorial/RetrieveData.java b/tutorial/RetrieveData.java index abf8d61..597be53 100644 --- a/tutorial/RetrieveData.java +++ b/tutorial/RetrieveData.java @@ -6,7 +6,10 @@ import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.client.Get; -import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.ConnectionFactory; +import org.apache.hadoop.hbase.client.Connection; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.util.Bytes; @@ -16,25 +19,25 @@ public static void main(String[] args) throws IOException, Exception{ // Instantiating Configuration class Configuration config = HBaseConfiguration.create(); + try (Connection conn = ConnectionFactory.createConnection(config); + Table table = conn.getTable(TableName.valueOf("emp"))) { - // Instantiating HTable class - HTable table = new HTable(config, "emp"); + // Instantiating Get class + Get g = new Get(Bytes.toBytes("row1")); - // Instantiating Get class - Get g = new Get(Bytes.toBytes("row1")); + // Reading the data + Result result = table.get(g); - // Reading the data - Result result = table.get(g); + // Reading values from Result class object + byte [] value = result.getValue(Bytes.toBytes("personal"),Bytes.toBytes("name")); - // Reading values from Result class object - byte [] value = result.getValue(Bytes.toBytes("personal"),Bytes.toBytes("name")); + byte [] value1 = result.getValue(Bytes.toBytes("personal"),Bytes.toBytes("city")); - byte [] value1 = result.getValue(Bytes.toBytes("personal"),Bytes.toBytes("city")); + // Printing the values + String name = Bytes.toString(value); + String city = Bytes.toString(value1); - // Printing the values - String name = Bytes.toString(value); - String city = Bytes.toString(value1); - - System.out.println("name: " + name + " city: " + city); + System.out.println("name: " + name + " city: " + city); + } } } diff --git a/tutorial/ScanTable.java b/tutorial/ScanTable.java index 1fd1beb..482c741 100644 --- a/tutorial/ScanTable.java +++ b/tutorial/ScanTable.java @@ -5,7 +5,10 @@ import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.Table; +import org.apache.hadoop.hbase.client.Connection; +import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; @@ -17,25 +20,24 @@ public static void main(String args[]) throws IOException{ // Instantiating Configuration class Configuration config = HBaseConfiguration.create(); + try (Connection conn = ConnectionFactory.createConnection(config); + Table table = conn.getTable(TableName.valueOf("emp"))) { - // Instantiating HTable class - HTable table = new HTable(config, "emp"); + // Instantiating the Scan class + Scan scan = new Scan(); - // Instantiating the Scan class - Scan scan = new Scan(); + // Scanning the required columns + scan.addColumn(Bytes.toBytes("personal"), Bytes.toBytes("name")); + scan.addColumn(Bytes.toBytes("personal"), Bytes.toBytes("city")); - // Scanning the required columns - scan.addColumn(Bytes.toBytes("personal"), Bytes.toBytes("name")); - scan.addColumn(Bytes.toBytes("personal"), Bytes.toBytes("city")); + // Getting the scan result + try (ResultScanner scanner = table.getScanner(scan)) { - // Getting the scan result - ResultScanner scanner = table.getScanner(scan); + // Reading values from scan result + for (Result result = scanner.next(); result != null; result = scanner.next()) - // Reading values from scan result - for (Result result = scanner.next(); result != null; result = scanner.next()) - - System.out.println("Found row : " + result); - //closing the scanner - scanner.close(); + System.out.println("Found row : " + result); + } + } } }