第一关:批量获取数据
package step1;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.generated.rest.rest_jsp;
import org.apache.hadoop.hbase.util.Bytes;
public class Task {
public void batchGet() throws Exception {
/********* Begin *********/
Configuration config = HBaseConfiguration.create();
Connection Connection = ConnectionFactory.createConnection(config);
List<String> rows = new ArrayList<>();
rows.add("2018");
rows.add("2020");
TableName tableName = TableName.valueOf(Bytes.toBytes("step1_student"));
Table table = Connection.getTable(tableName);
getData(table,rows);
/********* End *********/
}
public List<String> getData(Table table, List<String> rows) throws Exception {
List<Get> gets = new ArrayList<>();
for (String str : rows) {
Get get = new Get(Bytes.toBytes(str));
gets.add(get);
}
List<String> values = new ArrayList<>();
Result[] results = table.get(gets);
for (Result result : results) {
System.out.println("Row:" + Bytes.toString(result.getRow()));
for (Cell kv : result.rawCells()) {
String family = Bytes.toString(CellUtil.cloneFamily(kv));
String qualifire = Bytes.toString(CellUtil.cloneQualifier(kv));
String value = Bytes.toString(CellUtil.cloneValue(kv));
values.add(value);
System.out.println(family + ":" + qualifire + "\t" + value);
}
}
return values;
}
}
第二关:批量删除数据
package step2;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableDescriptors;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
public class Task {
public void batchDelete()throws Exception{
/********* Begin *********/
Configuration config = HBaseConfiguration.create();
Connection conn = ConnectionFactory.createConnection(config);
TableName tableName = TableName.valueOf("step2_table");
Table table = conn.getTable(tableName); //获取表
List<Delete> deletes = new ArrayList<>();
for(int i = 1 ; i < 6;i++){
byte[] row = Bytes.toBytes("row" + i);
Delete delete = new Delete(row);
deletes.add(delete);
}
for(int i = 7 ; i < 11;i++){
byte[] row = Bytes.toBytes("row" + i);
Delete delete = new Delete(row);
deletes.add(delete);
}
table.delete(deletes);
/********* End *********/
}
}
第三关:批量导入数据至Hbase
package step3;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableDescriptors;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.util.Bytes;
public class Task {
public void batchPut() throws Exception {
/********* Begin *********/
Configuration config = new Configuration();
Connection conn = ConnectionFactory.createConnection(config);
Admin admin = conn.getAdmin();
TableName tableName = TableName.valueOf(Bytes.toBytes("stu"));
TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName);
ColumnFamilyDescriptor family = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("basic_info")).build();
ColumnFamilyDescriptor family2 = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("school_info")).build();
builder.setColumnFamily(family);
builder.setColumnFamily(family2);
admin.createTable(builder.build());
List<Put> puts = new ArrayList<>();
String[] rows = {"20181122", "20181123"};
String[][] basic_infos = {{"阿克蒙德", "male", "1987-05-23", "tel:139********", "HUNan-ChangSha"}, {"萨格拉斯", "male", "1986-05-23", "tel:187********", "HUNan-ChangSha"}};
String[] basic_colums = {"name", "gender", "birthday", "connect", "address"};
String[][] school_infos = {{"ChengXing", "class 1 grade 2", "Software"}, {"ChengXing", "class 2 grade 2", "Software"}};
String[] school_colums = {"college", "class", "object"};
for (int x = 0; x < rows.length; x++) {
// 循环添加数据
Put put = new Put(Bytes.toBytes(rows[x]));
for (int i = 0; i < basic_infos.length; i++) {
byte[] columFamily = Bytes.toBytes("basic_info");
byte[] qualifier = Bytes.toBytes(basic_colums[i]);
byte[] value = Bytes.toBytes(basic_infos[x][i]);
put.addColumn(columFamily, qualifier, value);
}
for (int i = 0; i < school_infos.length; i++) {
byte[] columFamily = Bytes.toBytes("school_info");
byte[] qualifier = Bytes.toBytes(school_colums[i]);
byte[] value = Bytes.toBytes(school_infos[x][i]);
put.addColumn(columFamily, qualifier, value);
}
puts.add(put);
}
Table table = conn.getTable(tableName);
table.put(puts);
/********* End *********/
}
}
8 条评论
网络不是法外之地 请遵纪守法
牛逼
终于找到了
呜呜呜,锋锋真不错!希望多来点
你一天,不好好学习,唉