1. 程式人生 > 實用技巧 >7.Hbase操作與程式設計使用

7.Hbase操作與程式設計使用

使用HBase Shell命令 或 使用HBase Java API完成:

1.任務:

  • 列出HBase所以的表的相關資訊,例如表名;

  • 在終端打印出指定的表的所有記錄資料

  • 向已經建立好的表新增和刪除指定的列族或列

  • 清空指定的表的所有記錄資料

  • 統計表的行數

2.關係型資料庫中的表和資料(教材P92上),要求將其轉換為適合於HBase儲存的表並插入資料。

3.程式設計完成以下指定功能(教材P92下):

(1)createTable(String tableName, String[] fields)建立表。

importorg.apache.hadoop.conf.Configuration;

importorg.apache.hadoop.hbase.HBaseConfiguration; importorg.apache.hadoop.hbase.TableName; importorg.apache.hadoop.hbase.client.ColumnFamilyDescriptor; importorg.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; importorg.apache.hadoop.hbase.client.Connection; importorg.apache.hadoop.hbase.client.Admin;
importorg.apache.hadoop.hbase.client.ConnectionFactory; importorg.apache.hadoop.hbase.client.TableDescriptorBuilder; importorg.apache.hadoop.hbase.util.Bytes; importjava.io.IOException; publicclassCreateTable { publicstaticConfiguration configuration; publicstaticConnection connection; publicstatic
Admin admin; publicstaticvoidinit(){//建立連線 configuration = HBaseConfiguration.create(); configuration.set("hbase.rootdir","hdfs://localhost:9000/hbase"); try{ connection = ConnectionFactory.createConnection(configuration); admin = connection.getAdmin(); }catch(IOException e){ e.printStackTrace(); } } publicstaticvoidclose(){//關閉連線 try{ if(admin !=null){ admin.close(); } if(connection !=null){ connection.close(); } }catch(IOException e){ e.printStackTrace(); } } publicstaticvoidcreateTable(String tableName,String[] fields)throwsIOException{ init(); TableName tablename = TableName.valueOf(tableName);//定義表名 if(admin.tableExists(tablename)){ System.out.println("table is exists!"); admin.disableTable(tablename); admin.deleteTable(tablename); } TableDescriptorBuilder tableDescriptor = TableDescriptorBuilder.newBuilder(tablename); for(inti=0;i<fields.length;i++){ ColumnFamilyDescriptor family = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(fields[i])).build(); tableDescriptor.setColumnFamily(family); } admin.createTable(tableDescriptor.build()); close(); } publicstaticvoidmain(String[] args){ String[] fields = {"id","score"}; try{ createTable("test",fields); }catch(IOException e){ e.printStackTrace(); } } }

(2)addRecord(String tableName, String row, String[] fields, String[] values)

importjava.io.IOException;

importorg.apache.hadoop.conf.Configuration; importorg.apache.hadoop.hbase.HBaseConfiguration; importorg.apache.hadoop.hbase.TableName; importorg.apache.hadoop.hbase.client.Admin; importorg.apache.hadoop.hbase.client.Connection; importorg.apache.hadoop.hbase.client.ConnectionFactory; importorg.apache.hadoop.hbase.client.Put; importorg.apache.hadoop.hbase.client.Table; publicclassaddRecord { publicstaticConfiguration configuration; publicstaticConnection connection; publicstaticAdmin admin; publicstaticvoidinit(){//建立連線 configuration = HBaseConfiguration.create(); configuration.set("hbase.rootdir","hdfs://localhost:9000/hbase"); try{ connection = ConnectionFactory.createConnection(configuration); admin = connection.getAdmin(); }catch(IOException e){ e.printStackTrace(); } } publicstaticvoidclose(){//關閉連線 try{ if(admin !=null){ admin.close(); } if(connection !=null){ connection.close(); } }catch(IOException e){ e.printStackTrace(); } } publicstaticvoidaddRecord(String tableName,String row,String[] fields,String[] values)throwsIOException{ init();//連線Hbase Table table = connection.getTable(TableName.valueOf(tableName));//表連線 Put put =newPut(row.getBytes());//建立put物件 for(inti=0;i<fields.length;i++){ String[] cols = fields[i].split(":"); if(cols.length ==1){ put.addColumn(fields[i].getBytes(),"".getBytes(),values[i].getBytes()); } else{ put.addColumn(cols[0].getBytes(),cols[1].getBytes(),values[i].getBytes()); } table.put(put);//向表中新增資料 } close();//關閉連線 } publicstaticvoidmain(String[] args){ String[] fields = {"Score:Math","Score:Computer Science","Score:English"}; String[] values = {"90","90","90"}; try{ addRecord("grade","S_Name",fields,values); }catch(IOException e){ e.printStackTrace(); } } }

(3)scanColumn(String tableName, String column)

importjava.io.IOException;

importorg.apache.hadoop.conf.Configuration; importorg.apache.hadoop.hbase.Cell; importorg.apache.hadoop.hbase.CellUtil; importorg.apache.hadoop.hbase.HBaseConfiguration; importorg.apache.hadoop.hbase.TableName; importorg.apache.hadoop.hbase.client.Admin; importorg.apache.hadoop.hbase.client.Connection; importorg.apache.hadoop.hbase.client.ConnectionFactory; importorg.apache.hadoop.hbase.client.Get; importorg.apache.hadoop.hbase.client.Result; importorg.apache.hadoop.hbase.client.ResultScanner; importorg.apache.hadoop.hbase.client.Scan; importorg.apache.hadoop.hbase.client.Table; importorg.apache.hadoop.hbase.util.Bytes; publicclassscanColumn { publicstaticConfiguration configuration; publicstaticConnection connection; publicstaticAdmin admin; publicstaticvoidinit(){//建立連線 configuration = HBaseConfiguration.create(); configuration.set("hbase.rootdir","hdfs://localhost:9000/hbase"); try{ connection = ConnectionFactory.createConnection(configuration); admin = connection.getAdmin(); }catch(IOException e){ e.printStackTrace(); } } publicstaticvoidclose(){//關閉連線 try{ if(admin !=null){ admin.close(); } if(connection !=null){ connection.close(); } }catch(IOException e){ e.printStackTrace(); } } publicstaticvoidshowResult(Result result){ Cell[] cells = result.rawCells(); for(inti=0;i<cells.length;i++){ System.out.println("RowName:"+newString(CellUtil.cloneRow(cells[i])));//列印行鍵 System.out.println("ColumnName:"+newString(CellUtil.cloneQualifier(cells[i])));//列印列名 System.out.println("Value:"+newString(CellUtil.cloneValue(cells[i])));//列印值 System.out.println("Column Family:"+newString(CellUtil.cloneFamily(cells[i])));//列印列簇 System.out.println(); } } publicstaticvoidscanColumn(String tableName,String column){ init(); try{ Table table = connection.getTable(TableName.valueOf(tableName)); Scan scan =newScan(); scan.addFamily(Bytes.toBytes(column)); ResultScanner scanner = table.getScanner(scan); for(Result result = scanner.next();result !=null;result = scanner.next()){ showResult(result); } }catch(IOException e) { e.printStackTrace(); } finally{ close(); } } publicstaticvoidmain(String[] args){ scanColumn("Student","S_Age"); } }

(4)modifyData(String tableName, String row, String column)

importjava.io.IOException;

importorg.apache.hadoop.conf.Configuration; importorg.apache.hadoop.hbase.HBaseConfiguration; importorg.apache.hadoop.hbase.TableName; importorg.apache.hadoop.hbase.client.Admin; importorg.apache.hadoop.hbase.client.Connection; importorg.apache.hadoop.hbase.client.ConnectionFactory; importorg.apache.hadoop.hbase.client.Put; importorg.apache.hadoop.hbase.client.Table; publicclassmodifyData { publicstaticConfiguration configuration; publicstaticConnection connection; publicstaticAdmin admin; publicstaticvoidinit(){//建立連線 configuration = HBaseConfiguration.create(); configuration.set("hbase.rootdir","hdfs://localhost:9000/hbase"); try{ connection = ConnectionFactory.createConnection(configuration); admin = connection.getAdmin(); }catch(IOException e){ e.printStackTrace(); } } publicstaticvoidclose(){//關閉連線 try{ if(admin !=null){ admin.close(); } if(connection !=null){ connection.close(); } }catch(IOException e){ e.printStackTrace(); } } publicstaticvoidmodifyData(String tableName,String row,String column,String value)throwsIOException{ init(); Table table = connection.getTable(TableName.valueOf(tableName)); Put put =newPut(row.getBytes()); String[] cols = column.split(":"); if(cols.length ==1){ put.addColumn(column.getBytes(),"".getBytes(), value.getBytes()); } else{ put.addColumn(cols[0].getBytes(), cols[1].getBytes(), value.getBytes()); } table.put(put); close(); } publicstaticvoidmain(String[] args){ try{ modifyData("Student","1","S_Name","Tom"); } catch(Exception e){ e.printStackTrace(); } } }

(5)deleteRow(String tableName, String row)

importjava.io.IOException;

importorg.apache.hadoop.conf.Configuration; importorg.apache.hadoop.hbase.HBaseConfiguration; importorg.apache.hadoop.hbase.TableName; importorg.apache.hadoop.hbase.client.Admin; importorg.apache.hadoop.hbase.client.Connection; importorg.apache.hadoop.hbase.client.ConnectionFactory; importorg.apache.hadoop.hbase.client.Delete; importorg.apache.hadoop.hbase.client.Table; publicclassdeleteRow { publicstaticConfiguration configuration; publicstaticConnection connection; publicstaticAdmin admin; publicstaticvoidinit(){//建立連線 configuration = HBaseConfiguration.create(); configuration.set("hbase.rootdir","hdfs://localhost:9000/hbase"); try{ connection = ConnectionFactory.createConnection(configuration); admin = connection.getAdmin(); }catch(IOException e){ e.printStackTrace(); } } publicstaticvoidclose(){//關閉連線 try{ if(admin !=null){ admin.close(); } if(connection !=null){ connection.close(); } }catch(IOException e){ e.printStackTrace(); } } publicstaticvoiddeleteRow(String tableName,String row)throwsIOException{ init(); Table table = connection.getTable(TableName.valueOf(tableName)); Delete delete =newDelete(row.getBytes()); table.delete(delete); close(); } publicstaticvoidmain(String[] args){ try{ deleteRow("Student","3"); }catch(Exception e){ e.printStackTrace(); } } }