Hbase操作与编程使用
时间:2020-11-21
本文章向大家介绍Hbase操作与编程使用,主要包括Hbase操作与编程使用使用实例、应用技巧、基本知识点总结和需要注意事项,具有一定的参考价值,需要的朋友可以参考一下。
(1)createTable(String tableName, String[] fields)创建表。
import
org.apache.hadoop.conf.Configuration;
import
org.apache.hadoop.hbase.HBaseConfiguration;
import
org.apache.hadoop.hbase.TableName;
import
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import
org.apache.hadoop.hbase.client.Connection;
import
org.apache.hadoop.hbase.client.Admin;
import
org.apache.hadoop.hbase.client.ConnectionFactory;
import
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import
org.apache.hadoop.hbase.util.Bytes;
import
java.io.IOException;
public
class
CreateTable {
public
static
Configuration configuration;
public
static
Connection connection;
public
static
Admin admin;
public
static
void
init(){
//建立连接
configuration = HBaseConfiguration.create();
configuration.set(
"hbase.rootdir"
,
"hdfs://localhost:9000/hbase"
);
try
{
connection = ConnectionFactory.createConnection(configuration);
admin = connection.getAdmin();
}
catch
(IOException e){
e.printStackTrace();
}
}
public
static
void
close(){
//关闭连接
try
{
if
(admin !=
null
){
admin.close();
}
if
(connection !=
null
){
connection.close();
}
}
catch
(IOException e){
e.printStackTrace();
}
}
public
static
void
createTable(String tableName,String[] fields)
throws
IOException{
init();
TableName tablename = TableName.valueOf(tableName);
//定义表名
if
(admin.tableExists(tablename)){
System.out.println(
"table is exists!"
);
admin.disableTable(tablename);
admin.deleteTable(tablename);
}
TableDescriptorBuilder tableDescriptor = TableDescriptorBuilder.newBuilder(tablename);
for
(
int
i=
0
;i<fields.length;i++){
ColumnFamilyDescriptor family = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(fields[i])).build();
tableDescriptor.setColumnFamily(family);
}
admin.createTable(tableDescriptor.build());
close();
}
public
static
void
main(String[] args){
String[] fields = {
"id"
,
"score"
};
try
{
createTable(
"test"
,fields);
}
catch
(IOException e){
e.printStackTrace();
}
}
}
(2)addRecord(String tableName, String row, String[] fields, String[] values)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
|
import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Table; public class addRecord { public static Configuration configuration; public static Connection connection; public static Admin admin; public static void init(){ //建立连接 configuration = HBaseConfiguration.create(); configuration.set( "hbase.rootdir" , "hdfs://localhost:9000/hbase" ); try { connection = ConnectionFactory.createConnection(configuration); admin = connection.getAdmin(); } catch (IOException e){ e.printStackTrace(); } } public static void close(){ //关闭连接 try { if (admin != null ){ admin.close(); } if (connection != null ){ connection.close(); } } catch (IOException e){ e.printStackTrace(); } } public static void addRecord(String tableName,String row,String[] fields,String[] values) throws IOException{ init(); //连接Hbase Table table = connection.getTable(TableName.valueOf(tableName)); //表连接 Put put = new Put(row.getBytes()); //创建put对象 for ( int i= 0 ;i<fields.length;i++){ String[] cols = fields[i].split( ":" ); if (cols.length == 1 ){ put.addColumn(fields[i].getBytes(), "" .getBytes(),values[i].getBytes()); } else { put.addColumn(cols[ 0 ].getBytes(),cols[ 1 ].getBytes(),values[i].getBytes()); } table.put(put); //向表中添加数据 } close(); //关闭连接 } public static void main(String[] args){ String[] fields = { "Score:Math" , "Score:Computer Science" , "Score:English" }; String[] values = { "90" , "90" , "90" }; try { addRecord( "grade" , "S_Name" ,fields,values); } catch (IOException e){ e.printStackTrace(); } } } |
(3)scanColumn(String tableName, String column)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
|
import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.util.Bytes; public class scanColumn { public static Configuration configuration; public static Connection connection; public static Admin admin; public static void init(){ //建立连接 configuration = HBaseConfiguration.create(); configuration.set( "hbase.rootdir" , "hdfs://localhost:9000/hbase" ); try { connection = ConnectionFactory.createConnection(configuration); admin = connection.getAdmin(); } catch (IOException e){ e.printStackTrace(); } } public static void close(){ //关闭连接 try { if (admin != null ){ admin.close(); } if (connection != null ){ connection.close(); } } catch (IOException e){ e.printStackTrace(); } } public static void showResult(Result result){ Cell[] cells = result.rawCells(); for ( int i= 0 ;i<cells.length;i++){ System.out.println( "RowName:" + new String(CellUtil.cloneRow(cells[i]))); //打印行键 System.out.println( "ColumnName:" + new String(CellUtil.cloneQualifier(cells[i]))); //打印列名 System.out.println( "Value:" + new String(CellUtil.cloneValue(cells[i]))); //打印值 System.out.println( "Column Family:" + new String(CellUtil.cloneFamily(cells[i]))); //打印列簇 System.out.println(); } } public static void scanColumn(String tableName,String column){ init(); try { Table table = connection.getTable(TableName.valueOf(tableName)); Scan scan = new Scan(); scan.addFamily(Bytes.toBytes(column)); ResultScanner scanner = table.getScanner(scan); for (Result result = scanner.next();result != null ;result = scanner.next()){ showResult(result); } } catch (IOException e) { e.printStackTrace(); } finally { close(); } } public static void main(String[] args){ scanColumn( "Student" , "S_Age" ); } } |
(4)modifyData(String tableName, String row, String column)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
|
import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Table; public class modifyData { public static Configuration configuration; public static Connection connection; public static Admin admin; public static void init(){ //建立连接 configuration = HBaseConfiguration.create(); configuration.set( "hbase.rootdir" , "hdfs://localhost:9000/hbase" ); try { connection = ConnectionFactory.createConnection(configuration); admin = connection.getAdmin(); } catch (IOException e){ e.printStackTrace(); } } public static void close(){ //关闭连接 try { if (admin != null ){ admin.close(); } if (connection != null ){ connection.close(); } } catch (IOException e){ e.printStackTrace(); } } public static void modifyData(String tableName,String row,String column,String value) throws IOException{ init(); Table table = connection.getTable(TableName.valueOf(tableName)); Put put = new Put(row.getBytes()); String[] cols = column.split( ":" ); if (cols.length == 1 ){ put.addColumn(column.getBytes(), "" .getBytes(), value.getBytes()); } else { put.addColumn(cols[ 0 ].getBytes(), cols[ 1 ].getBytes(), value.getBytes()); } table.put(put); close(); } public static void main(String[] args){ try { modifyData( "Student" , "1" , "S_Name" , "Tom" ); } catch (Exception e){ e.printStackTrace(); } } } |
(5)deleteRow(String tableName, String row)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
|
import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Table; public class deleteRow { public static Configuration configuration; public static Connection connection; public static Admin admin; public static void init(){ //建立连接 configuration = HBaseConfiguration.create(); configuration.set( "hbase.rootdir" , "hdfs://localhost:9000/hbase" ); try { connection = ConnectionFactory.createConnection(configuration); admin = connection.getAdmin(); } catch (IOException e){ e.printStackTrace(); } } public static void close(){ //关闭连接 try { if (admin != null ){ admin.close(); } if (connection != null ){ connection.close(); } } catch (IOException e){ e.printStackTrace(); } } public static void deleteRow(String tableName,String row) throws IOException{ init(); Table table = connection.getTable(TableName.valueOf(tableName)); Delete delete = new Delete(row.getBytes()); table.delete(delete); close(); } public static void main(String[] args){ try { deleteRow( "Student" , "3" ); } catch (Exception e){ e.printStackTrace(); } } } |
原文地址:https://www.cnblogs.com/wwj1376195268/p/14017634.html
- JavaScript 教程
- JavaScript 编辑工具
- JavaScript 与HTML
- JavaScript 与Java
- JavaScript 数据结构
- JavaScript 基本数据类型
- JavaScript 特殊数据类型
- JavaScript 运算符
- JavaScript typeof 运算符
- JavaScript 表达式
- JavaScript 类型转换
- JavaScript 基本语法
- JavaScript 注释
- Javascript 基本处理流程
- Javascript 选择结构
- Javascript if 语句
- Javascript if 语句的嵌套
- Javascript switch 语句
- Javascript 循环结构
- Javascript 循环结构实例
- Javascript 跳转语句
- Javascript 控制语句总结
- Javascript 函数介绍
- Javascript 函数的定义
- Javascript 函数调用
- Javascript 几种特殊的函数
- JavaScript 内置函数简介
- Javascript eval() 函数
- Javascript isFinite() 函数
- Javascript isNaN() 函数
- parseInt() 与 parseFloat()
- escape() 与 unescape()
- Javascript 字符串介绍
- Javascript length属性
- javascript 字符串函数
- Javascript 日期对象简介
- Javascript 日期对象用途
- Date 对象属性和方法
- Javascript 数组是什么
- Javascript 创建数组
- Javascript 数组赋值与取值
- Javascript 数组属性和方法
- 2.通过QOpenGLWidget绘制三角形
- 树莓派基础实验16:霍尔传感器实验
- 1.opengl绘制三角形
- dubbo本地直连调试注意点
- Tomcat的使用及服务器的一些基础知识
- fastJson 之JSONObject.toJavaObject()方法不能解析嵌套自定义list对象
- 文件包含漏洞学习总结(结尾有实例)
- 树莓派基础实验17:温度传感器实验
- Java Servlet详解(体系结构+注解配置+生命周期)
- RabbitMq如何确保消息不丢失
- 《sql必知必会》——读书笔记(4)
- AkShare-债券数据-国债期货可交割券相关指标
- Linux From Scratch
- 介绍一款 API 敏捷开发工具
- java线程池(五):ForkJoinPool源码分析之一(外部提交及worker执行过程)