7.Hbase操作與程式設計使用
阿新 • • 發佈:2020-11-22
使用HBase Shell命令 或 使用HBase Java API完成:
1.任務:
- 列出HBase所以的表的相關資訊,例如表名;
- 在終端打印出指定的表的所有記錄資料
- 向已經建立好的表新增和刪除指定的列族或列
- 清空指定的表的所有記錄資料
- 統計表的行數
2.關係型資料庫中的表和資料(教材P92上),要求將其轉換為適合於HBase儲存的表並插入資料。
3.程式設計完成以下指定功能(教材P92下):
(1)createTable(String tableName, String[] fields)建立表。
import
org.apache.hadoop.conf.Configuration;
import
org.apache.hadoop.hbase.HBaseConfiguration;
import
org.apache.hadoop.hbase.TableName;
import
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import
org.apache.hadoop.hbase.client.Connection;
import
org.apache.hadoop.hbase.client.Admin;
import
org.apache.hadoop.hbase.client.ConnectionFactory;
import
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import
org.apache.hadoop.hbase.util.Bytes;
import
java.io.IOException;
public
class
CreateTable {
public
static
Configuration configuration;
public
static
Connection connection;
public
static
Admin admin;
public
static
void
init(){
//建立連線
configuration = HBaseConfiguration.create();
configuration.set(
"hbase.rootdir"
,
"hdfs://localhost:9000/hbase"
);
try
{
connection = ConnectionFactory.createConnection(configuration);
admin = connection.getAdmin();
}
catch
(IOException e){
e.printStackTrace();
}
}
public
static
void
close(){
//關閉連線
try
{
if
(admin !=
null
){
admin.close();
}
if
(connection !=
null
){
connection.close();
}
}
catch
(IOException e){
e.printStackTrace();
}
}
public
static
void
createTable(String tableName,String[] fields)
throws
IOException{
init();
TableName tablename = TableName.valueOf(tableName);
//定義表名
if
(admin.tableExists(tablename)){
System.out.println(
"table is exists!"
);
admin.disableTable(tablename);
admin.deleteTable(tablename);
}
TableDescriptorBuilder tableDescriptor = TableDescriptorBuilder.newBuilder(tablename);
for
(
int
i=
0
;i<fields.length;i++){
ColumnFamilyDescriptor family = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(fields[i])).build();
tableDescriptor.setColumnFamily(family);
}
admin.createTable(tableDescriptor.build());
close();
}
public
static
void
main(String[] args){
String[] fields = {
"id"
,
"score"
};
try
{
createTable(
"test"
,fields);
}
catch
(IOException e){
e.printStackTrace();
}
}
}
(2)addRecord(String tableName, String row, String[] fields, String[] values)
import
java.io.IOException;
import
org.apache.hadoop.conf.Configuration;
import
org.apache.hadoop.hbase.HBaseConfiguration;
import
org.apache.hadoop.hbase.TableName;
import
org.apache.hadoop.hbase.client.Admin;
import
org.apache.hadoop.hbase.client.Connection;
import
org.apache.hadoop.hbase.client.ConnectionFactory;
import
org.apache.hadoop.hbase.client.Put;
import
org.apache.hadoop.hbase.client.Table;
public
class
addRecord {
public
static
Configuration configuration;
public
static
Connection connection;
public
static
Admin admin;
public
static
void
init(){
//建立連線
configuration = HBaseConfiguration.create();
configuration.set(
"hbase.rootdir"
,
"hdfs://localhost:9000/hbase"
);
try
{
connection = ConnectionFactory.createConnection(configuration);
admin = connection.getAdmin();
}
catch
(IOException e){
e.printStackTrace();
}
}
public
static
void
close(){
//關閉連線
try
{
if
(admin !=
null
){
admin.close();
}
if
(connection !=
null
){
connection.close();
}
}
catch
(IOException e){
e.printStackTrace();
}
}
public
static
void
addRecord(String tableName,String row,String[] fields,String[] values)
throws
IOException{
init();
//連線Hbase
Table table = connection.getTable(TableName.valueOf(tableName));
//表連線
Put put =
new
Put(row.getBytes());
//建立put物件
for
(
int
i=
0
;i<fields.length;i++){
String[] cols = fields[i].split(
":"
);
if
(cols.length ==
1
){
put.addColumn(fields[i].getBytes(),
""
.getBytes(),values[i].getBytes());
}
else
{
put.addColumn(cols[
0
].getBytes(),cols[
1
].getBytes(),values[i].getBytes());
}
table.put(put);
//向表中新增資料
}
close();
//關閉連線
}
public
static
void
main(String[] args){
String[] fields = {
"Score:Math"
,
"Score:Computer Science"
,
"Score:English"
};
String[] values = {
"90"
,
"90"
,
"90"
};
try
{
addRecord(
"grade"
,
"S_Name"
,fields,values);
}
catch
(IOException e){
e.printStackTrace();
}
}
}
(3)scanColumn(String tableName, String column)
import
java.io.IOException;
import
org.apache.hadoop.conf.Configuration;
import
org.apache.hadoop.hbase.Cell;
import
org.apache.hadoop.hbase.CellUtil;
import
org.apache.hadoop.hbase.HBaseConfiguration;
import
org.apache.hadoop.hbase.TableName;
import
org.apache.hadoop.hbase.client.Admin;
import
org.apache.hadoop.hbase.client.Connection;
import
org.apache.hadoop.hbase.client.ConnectionFactory;
import
org.apache.hadoop.hbase.client.Get;
import
org.apache.hadoop.hbase.client.Result;
import
org.apache.hadoop.hbase.client.ResultScanner;
import
org.apache.hadoop.hbase.client.Scan;
import
org.apache.hadoop.hbase.client.Table;
import
org.apache.hadoop.hbase.util.Bytes;
public
class
scanColumn {
public
static
Configuration configuration;
public
static
Connection connection;
public
static
Admin admin;
public
static
void
init(){
//建立連線
configuration = HBaseConfiguration.create();
configuration.set(
"hbase.rootdir"
,
"hdfs://localhost:9000/hbase"
);
try
{
connection = ConnectionFactory.createConnection(configuration);
admin = connection.getAdmin();
}
catch
(IOException e){
e.printStackTrace();
}
}
public
static
void
close(){
//關閉連線
try
{
if
(admin !=
null
){
admin.close();
}
if
(connection !=
null
){
connection.close();
}
}
catch
(IOException e){
e.printStackTrace();
}
}
public
static
void
showResult(Result result){
Cell[] cells = result.rawCells();
for
(
int
i=
0
;i<cells.length;i++){
System.out.println(
"RowName:"
+
new
String(CellUtil.cloneRow(cells[i])));
//列印行鍵
System.out.println(
"ColumnName:"
+
new
String(CellUtil.cloneQualifier(cells[i])));
//列印列名
System.out.println(
"Value:"
+
new
String(CellUtil.cloneValue(cells[i])));
//列印值
System.out.println(
"Column Family:"
+
new
String(CellUtil.cloneFamily(cells[i])));
//列印列簇
System.out.println();
}
}
public
static
void
scanColumn(String tableName,String column){
init();
try
{
Table table = connection.getTable(TableName.valueOf(tableName));
Scan scan =
new
Scan();
scan.addFamily(Bytes.toBytes(column));
ResultScanner scanner = table.getScanner(scan);
for
(Result result = scanner.next();result !=
null
;result = scanner.next()){
showResult(result);
}
}
catch
(IOException e) {
e.printStackTrace();
}
finally
{
close();
}
}
public
static
void
main(String[] args){
scanColumn(
"Student"
,
"S_Age"
);
}
}
(4)modifyData(String tableName, String row, String column)
import
java.io.IOException;
import
org.apache.hadoop.conf.Configuration;
import
org.apache.hadoop.hbase.HBaseConfiguration;
import
org.apache.hadoop.hbase.TableName;
import
org.apache.hadoop.hbase.client.Admin;
import
org.apache.hadoop.hbase.client.Connection;
import
org.apache.hadoop.hbase.client.ConnectionFactory;
import
org.apache.hadoop.hbase.client.Put;
import
org.apache.hadoop.hbase.client.Table;
public
class
modifyData {
public
static
Configuration configuration;
public
static
Connection connection;
public
static
Admin admin;
public
static
void
init(){
//建立連線
configuration = HBaseConfiguration.create();
configuration.set(
"hbase.rootdir"
,
"hdfs://localhost:9000/hbase"
);
try
{
connection = ConnectionFactory.createConnection(configuration);
admin = connection.getAdmin();
}
catch
(IOException e){
e.printStackTrace();
}
}
public
static
void
close(){
//關閉連線
try
{
if
(admin !=
null
){
admin.close();
}
if
(connection !=
null
){
connection.close();
}
}
catch
(IOException e){
e.printStackTrace();
}
}
public
static
void
modifyData(String tableName,String row,String column,String value)
throws
IOException{
init();
Table table = connection.getTable(TableName.valueOf(tableName));
Put put =
new
Put(row.getBytes());
String[] cols = column.split(
":"
);
if
(cols.length ==
1
){
put.addColumn(column.getBytes(),
""
.getBytes(), value.getBytes());
}
else
{
put.addColumn(cols[
0
].getBytes(), cols[
1
].getBytes(), value.getBytes());
}
table.put(put);
close();
}
public
static
void
main(String[] args){
try
{
modifyData(
"Student"
,
"1"
,
"S_Name"
,
"Tom"
);
}
catch
(Exception e){
e.printStackTrace();
}
}
}
(5)deleteRow(String tableName, String row)
import
java.io.IOException;
import
org.apache.hadoop.conf.Configuration;
import
org.apache.hadoop.hbase.HBaseConfiguration;
import
org.apache.hadoop.hbase.TableName;
import
org.apache.hadoop.hbase.client.Admin;
import
org.apache.hadoop.hbase.client.Connection;
import
org.apache.hadoop.hbase.client.ConnectionFactory;
import
org.apache.hadoop.hbase.client.Delete;
import
org.apache.hadoop.hbase.client.Table;
public
class
deleteRow {
public
static
Configuration configuration;
public
static
Connection connection;
public
static
Admin admin;
public
static
void
init(){
//建立連線
configuration = HBaseConfiguration.create();
configuration.set(
"hbase.rootdir"
,
"hdfs://localhost:9000/hbase"
);
try
{
connection = ConnectionFactory.createConnection(configuration);
admin = connection.getAdmin();
}
catch
(IOException e){
e.printStackTrace();
}
}
public
static
void
close(){
//關閉連線
try
{
if
(admin !=
null
){
admin.close();
}
if
(connection !=
null
){
connection.close();
}
}
catch
(IOException e){
e.printStackTrace();
}
}
public
static
void
deleteRow(String tableName,String row)
throws
IOException{
init();
Table table = connection.getTable(TableName.valueOf(tableName));
Delete delete =
new
Delete(row.getBytes());
table.delete(delete);
close();
}
public
static
void
main(String[] args){
try
{
deleteRow(
"Student"
,
"3"
);
}
catch
(Exception e){
e.printStackTrace();
}
}
}