且构网

分享程序员开发的那些事...
且构网 - 分享程序员编程开发的那些事

hbase学习(一)hbase单机部署和java客户端连接单机hbase

更新时间:2022-06-12 03:42:45

最近看了些hbase的东西,打算写点什么,谁知鼓捣的过程中步步是坑,最终呕心沥血,憋出了这篇文章,实属不易。

hbase和hive总是成对出现的,简单说,hbase是数据库,hive是mapReduce作业。

先从hbase单机部署说起,尽量说的简洁点,说多了都是眼泪。

1.给服务器起个机器名,iptables关掉,同时本地绑定host。

起个机器名

1

vi /etc/hosts

127.0.0.1 localhost
211.155.225.210 love-kaige

1

vi /etc/sysconfig/network

NETWORKING=yes
HOSTNAME=love-kaige

关闭iptables

1

2

service iptables stop

chkconfig iptables off

本地绑定host
C:\Windows\System32\drivers\etc\hosts
211.155.225.210 love-kaige

然后reboot重启机子,执行
hostname显示love-kaige,service iptables status 显示 iptables: Firewall is not running. 即OK。

2.下载安装jdk和hbase。

jdk应该关系不大,我安装的是jdk-7u51-linux-x64.rpm,环境变量配好即可,此处略过。

hbase下载的是稳定版,地址:http://mirrors.aliyun.com/apache/hbase/stable/hbase-0.94.18.tar.gz。阿里云对apache下的项目和linux不同的发行版都做了镜像,方便了广大的码农,给个赞。

解压hbase,然后对hbase-site.xml进行修改,修改如下:

1

2

3

4

5

6

<configuration>

<property>

<name>hbase.rootdir</name>

<value>file:/root/hbase</value>

</property>

</configuration>

然后去hbase的bin目录,./start-hbase.sh起起来。

3.编写java代码。

添加依赖:

1

2

3

4

5

6

7

8

9

10

<dependency>

<groupId> org.apache.hadoop</groupId >

<artifactId> hadoop-core </artifactId>

<version> 1.0.4</version >

</dependency>

<dependency>

<groupId> org.apache.hbase</groupId >

<artifactId> hbase</artifactId >

<version> 0.94.18</version >

</dependency>

服务端和客户端的版本***一致,现在都是0.94.18,免得出现莫名奇妙的问题。hadoop的版本和hbase的版本也有对应关系,把官网的hbase和hadoop的版本匹配表搬过来,

Table 2.1. Hadoop version support matrix

HBase-0.92.x HBase-0.94.x HBase-0.96
Hadoop-0.20.205 S X X
Hadoop-0.22.x S X X
Hadoop-1.0.x S S S
Hadoop-1.1.x NT S S
Hadoop-0.23.x X S NT
Hadoop-2.x X S S

S = supported and tested,支持
X = not supported,不支持
NT = not tested enough.可以运行但测试不充分

由于 HBase 依赖 Hadoop,它配套发布了一个Hadoop jar 文件在它的 lib 下。该套装jar仅用于独立模式。在分布式模式下,Hadoop版本必须和HBase下的版本一致。用你运行的分布式Hadoop版本jar文件替换HBase lib目录下的Hadoop jar文件,以避免版本不匹配问题。确认替换了集群中所有HBase下的jar文件。Hadoop版本不匹配问题有不同表现,但看起来都像挂掉了。

贴代码

1

2

3

4

5

6

7

8

9

10

11

12

13

14

15

16

17

18

19

20

21

22

23

24

25

26

27

28

29

30

31

32

33

34

35

36

37

38

39

40

41

42

43

44

45

46

47

48

49

50

51

52

53

54

55

56

57

58

59

60

61

62

63

64

65

66

67

68

69

70

71

72

73

74

75

76

77

78

79

80

81

82

83

84

85

86

87

88

89

90

91

92

93

94

95

96

97

98

99

100

101

102

103

104

105

106

107

108

109

110

111

112

113

114

115

116

117

118

119

120

121

122

123

124

125

126

127

128

129

130

131

132

133

134

135

136

137

138

139

140

141

142

143

144

145

146

147

148

149

150

151

152

153

154

155

156

157

158

159

160

161

162

163

164

165

166

167

import java.util.ArrayList;

import java.util.HashMap;

import java.util.List;

import java.util.Map;

import org.apache.hadoop.conf.Configuration;

import org.apache.hadoop.hbase.HBaseConfiguration;

import org.apache.hadoop.hbase.HColumnDescriptor;

import org.apache.hadoop.hbase.HTableDescriptor;

import org.apache.hadoop.hbase.KeyValue;

import org.apache.hadoop.hbase.MasterNotRunningException;

import org.apache.hadoop.hbase.ZooKeeperConnectionException;

import org.apache.hadoop.hbase.client.Delete;

import org.apache.hadoop.hbase.client.Get;

import org.apache.hadoop.hbase.client.HBaseAdmin;

import org.apache.hadoop.hbase.client.HTable;

import org.apache.hadoop.hbase.client.Put;

import org.apache.hadoop.hbase.client.Result;

import org.apache.hadoop.hbase.client.ResultScanner;

import org.apache.hadoop.hbase.client.Scan;

import org.apache.hadoop.hbase.util.Bytes;

/**

*

* @author yankai913@gmail.com

* @date 2014-4-28

*/

public class SimpleClient {

static final String rowKey = "row1";

static HBaseAdmin hBaseAdmin;

static Configuration conf;

static {

conf = HBaseConfiguration.create();

conf.set("hbase.zookeeper.quorum", "love-kaige");

try {

hBaseAdmin = new HBaseAdmin(conf);

}

catch (MasterNotRunningException e) {

e.printStackTrace();

}

catch (ZooKeeperConnectionException e) {

e.printStackTrace();

}

}

public static void createTable(String tableName, String[] columns) throws Exception {

dropTable(tableName);

HTableDescriptor hTableDescriptor = new HTableDescriptor(tableName);

for (String columnName : columns) {

HColumnDescriptor column = new HColumnDescriptor(columnName);

hTableDescriptor.addFamily(column);

}

hBaseAdmin.createTable(hTableDescriptor);

System.out.println("create table successed");

}

public static void dropTable(String tableName) throws Exception {

if (hBaseAdmin.tableExists(tableName)) {

hBaseAdmin.disableTable(tableName);

hBaseAdmin.deleteTable(tableName);

}

System.out.println("drop table successed");

}

public static HTable getHTable(String tableName) throws Exception {

return new HTable(conf, tableName);

}

public static void insert(String tableName, Map<String, String> map) throws Exception {

HTable hTable = getHTable(tableName);

byte[] row1 = Bytes.toBytes(rowKey);

Put p1 = new Put(row1);

for (String columnName : map.keySet()) {

byte[] value = Bytes.toBytes(map.get(columnName));

String[] str = columnName.split(":");

byte[] family = Bytes.toBytes(str[0]);

byte[] qualifier = null;

if (str.length > 1) {

qualifier = Bytes.toBytes(str[1]);

}

p1.add(family, qualifier, value);

}

hTable.put(p1);

Get g1 = new Get(row1);

Result result = hTable.get(g1);

System.out.println("Get: " + result);

System.out.println("insert successed");

}

public static void delete(String tableName, String rowKey) throws Exception {

HTable hTable = getHTable(tableName);

List<Delete> list = new ArrayList<Delete>();

Delete d1 = new Delete(Bytes.toBytes(rowKey));

list.add(d1);

hTable.delete(list);

Get g1 = new Get(Bytes.toBytes(rowKey));

Result result = hTable.get(g1);

System.out.println("Get: " + result);

System.out.println("delete successed");

}

public static void selectOne(String tableName, String rowKey) throws Exception {

HTable hTable = getHTable(tableName);

Get g1 = new Get(Bytes.toBytes(rowKey));

Result result = hTable.get(g1);

foreach(result);

System.out.println("selectOne end");

}

private static void foreach(Result result) throws Exception {

for (KeyValue keyValue : result.raw()) {

StringBuilder sb = new StringBuilder();

sb.append(Bytes.toString(keyValue.getRow())).append("\t");

sb.append(Bytes.toString(keyValue.getFamily())).append("\t");

sb.append(Bytes.toString(keyValue.getQualifier())).append("\t");

sb.append(keyValue.getTimestamp()).append("\t");

sb.append(Bytes.toString(keyValue.getValue())).append("\t");

System.out.println(sb.toString());

}

}

public static void selectAll(String tableName) throws Exception {

HTable hTable = getHTable(tableName);

Scan scan = new Scan();

ResultScanner resultScanner = null;

try {

resultScanner = hTable.getScanner(scan);

for (Result result : resultScanner) {

foreach(result);

}

}

catch (Exception e) {

e.printStackTrace();

}

finally {

if (resultScanner != null) {

resultScanner.close();

}

}

System.out.println("selectAll end");

}

public static void main(String[] args) throws Exception {

String tableName = "tableTest";

String[] columns = new String[] { "column_A", "column_B" };

createTable(tableName, columns);

Map<String, String> map = new HashMap<String, String>();

map.put("column_A", "AAA");

map.put("column_B:1", "b1");

map.put("column_B:2", "b2");

insert(tableName, map);

selectOne(tableName, rowKey);

selectAll(tableName);

delete(tableName, rowKey);

dropTable(tableName);

}

练习代码看这里