hbase java api样例(版本1.3.1,新API)

验证了如下几种java api的使用方法。

1.创建表

2.创建表(预分区)

3.单条插入

4.批量插入

5.批量插入(写缓存)

6.单条get

7.批量get

8.简单scan

具体请参考GitHub。

https://github.com/quchunhui/hbase_sample

pom.xml文件

<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <artifactId>hbase_sample</artifactId>
    <groupId>hbase_sample</groupId>
    <version>1.0</version>
    <modelVersion>4.0.0</modelVersion>
    <dependencies>
        <dependency>
            <groupId>org.apache.hbase</groupId>
            <artifactId>hbase-client</artifactId>
            <version>1.3.1</version>
        </dependency>
        <dependency>
            <groupId>org.apache.hbase</groupId>
            <artifactId>hbase-server</artifactId>
            <version>1.3.1</version>
        </dependency>
        <dependency>
            <groupId>org.apache.hbase</groupId>
            <artifactId>hbase-common</artifactId>
            <version>1.3.1</version>
        </dependency>
    </dependencies>
    <build>
        <sourceDirectory>src/main/java</sourceDirectory>
        <outputDirectory>target/classes</outputDirectory>
        <plugins>
            <plugin>
                <artifactId>maven-compiler-plugin</artifactId>
                <version>3.1</version>
                <configuration>
                    <source>1.7</source>
                    <target>1.7</target>
                </configuration>
            </plugin>
            <plugin>
                <artifactId>maven-assembly-plugin</artifactId>
                <version>2.4</version>
                <configuration>
                    <descriptorRefs>
                        <descriptorRef>jar-with-dependencies</descriptorRef>
                    </descriptorRefs>
                </configuration>
                <executions>
                    <execution>
                        <id>make-assembly</id>
                        <phase>package</phase>
                        <goals>
                            <goal>single</goal>
                        </goals>
                    </execution>
                </executions>
            </plugin>
        </plugins>
    </build>
</project>

===创建表===

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;

public class create_table_sample1 {
    public static void main(String[] args) throws Exception {
        Configuration conf = HBaseConfiguration.create();
        conf.set("hbase.zookeeper.quorum", "192.168.1.80,192.168.1.81,192.168.1.82");
        Connection connection = ConnectionFactory.createConnection(conf);
        Admin admin = connection.getAdmin();
        HTableDescriptor desc = new HTableDescriptor(TableName.valueOf("TEST1"));

        HColumnDescriptor family1 = new HColumnDescriptor(constants.COLUMN_FAMILY_DF.getBytes());
        family1.setTimeToLive(2 * 60 * 60 * 24);     //过期时间
        family1.setMaxVersions(2);                   //版本数
        desc.addFamily(family1);

        HColumnDescriptor family2 = new HColumnDescriptor(constants.COLUMN_FAMILY_EX.getBytes());
        family2.setTimeToLive(3 * 60 * 60 * 24);     //过期时间
        family2.setMaxVersions(3);                   //版本数
        desc.addFamily(family2);

        admin.createTable(desc);
    }
}

===创建表(预分区)===

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.regionserver.BloomType;
import org.apache.hadoop.hbase.util.Bytes;

public class create_table_sample2 {
    public static void main(String[] args) throws Exception {
        Configuration conf = HBaseConfiguration.create();
        conf.set("hbase.zookeeper.quorum", "192.168.1.80,192.168.1.81,192.168.1.82");
        Connection connection = ConnectionFactory.createConnection(conf);
        Admin admin = connection.getAdmin();

        TableName table_name = TableName.valueOf("TEST1");
        if (admin.tableExists(table_name)) {
            admin.disableTable(table_name);
            admin.deleteTable(table_name);
        }

        HTableDescriptor desc = new HTableDescriptor(table_name);
        HColumnDescriptor family1 = new HColumnDescriptor(constants.COLUMN_FAMILY_DF.getBytes());
        family1.setTimeToLive(3 * 60 * 60 * 24);     //过期时间
        family1.setBloomFilterType(BloomType.ROW);   //按行过滤
        family1.setMaxVersions(3);                   //版本数
        desc.addFamily(family1);
        HColumnDescriptor family2 = new HColumnDescriptor(constants.COLUMN_FAMILY_EX.getBytes());
        family2.setTimeToLive(2 * 60 * 60 * 24);     //过期时间
        family2.setBloomFilterType(BloomType.ROW);   //按行过滤
        family2.setMaxVersions(2);                   //版本数
        desc.addFamily(family2);

        byte[][] splitKeys = {
                Bytes.toBytes("row01"),
                Bytes.toBytes("row02"),
                Bytes.toBytes("row04"),
                Bytes.toBytes("row06"),
                Bytes.toBytes("row08"),
        };

        admin.createTable(desc, splitKeys);
    }
}

===单条插入===

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
import java.util.Random;

public class table_put_sample1 {
    public static void main(String[] args) throws Exception {
        Configuration conf = HBaseConfiguration.create();
        conf.set("hbase.zookeeper.quorum", "192.168.1.80,192.168.1.81,192.168.1.82");
        Connection connection = ConnectionFactory.createConnection(conf);
        Table table = connection.getTable(TableName.valueOf(constants.TABLE_NAME));

        Random random = new Random();
        String[] rows = new String[] {"01", "02", "03"};
        String[] names = new String[] {"zhang san", "li si", "wang wu", "wei liu"};
        String[] sexs = new String[] {"men", "women"};
        String[] heights = new String[] {"165cm", "170cm", "175cm", "180cm"};
        String[] weights = new String[] {"50kg", "55kg", "60kg", "65kg", "70kg", "75kg", "80kg"};

        Put put = new Put(Bytes.toBytes("row" + rows[random.nextInt(rows.length)]));
        String name = names[random.nextInt(names.length)];
        put.addColumn(constants.COLUMN_FAMILY_DF.getBytes(), "name".getBytes(), name.getBytes());
        String sex = sexs[random.nextInt(sexs.length)];
        put.addColumn(constants.COLUMN_FAMILY_DF.getBytes(), "sex".getBytes(), sex.getBytes());
        String height = heights[random.nextInt(heights.length)];
        put.addColumn(constants.COLUMN_FAMILY_EX.getBytes(), "height".getBytes(), height.getBytes());
        String weight = weights[random.nextInt(weights.length)];
        put.addColumn(constants.COLUMN_FAMILY_EX.getBytes(), "weight".getBytes(), weight.getBytes());

        table.put(put);
        table.close();
        connection.close();
    }
}

===批量插入===

方式一:

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;

import java.util.ArrayList;
import java.util.List;
import java.util.Random;

public class table_put_sample2 {
    public static void main(String[] args) throws Exception {
        Configuration conf = HBaseConfiguration.create();
        conf.set("hbase.zookeeper.quorum", "192.168.1.80,192.168.1.81,192.168.1.82");
        Connection connection = ConnectionFactory.createConnection(conf);
        Table table = connection.getTable(TableName.valueOf(constants.TABLE_NAME));

        Random random = new Random();
        String[] rows = new String[] {"01", "02", "03"};
        String[] names = new String[] {"zhang san", "li si", "wang wu", "wei liu"};
        String[] sexs = new String[] {"men", "women"};
        String[] heights = new String[] {"165cm", "170cm", "175cm", "180cm"};
        String[] weights = new String[] {"50kg", "55kg", "60kg", "65kg", "70kg", "75kg", "80kg"};

        List<Put> puts = new ArrayList<>();
        for(String row : rows) {
            Put put = new Put(Bytes.toBytes("row" + row));
            String name = names[random.nextInt(names.length)];
            put.addColumn(constants.COLUMN_FAMILY_DF.getBytes(), "name".getBytes(), name.getBytes());
            String sex = sexs[random.nextInt(sexs.length)];
            put.addColumn(constants.COLUMN_FAMILY_DF.getBytes(), "sex".getBytes(), sex.getBytes());
            String height = heights[random.nextInt(heights.length)];
            put.addColumn(constants.COLUMN_FAMILY_EX.getBytes(), "height".getBytes(), height.getBytes());
            String weight = weights[random.nextInt(weights.length)];
            put.addColumn(constants.COLUMN_FAMILY_EX.getBytes(), "weight".getBytes(), weight.getBytes());
            puts.add(put);
        }

        table.put(puts);
        table.close();
        connection.close();
    }
}

方式二:

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;

import java.util.ArrayList;
import java.util.List;
import java.util.Random;

public class table_put_sample3 {
    public static void main(String[] args) throws Exception {
        Configuration conf = HBaseConfiguration.create();
        conf.set("hbase.zookeeper.quorum", "192.168.1.80,192.168.1.81,192.168.1.82");
        Connection connection = ConnectionFactory.createConnection(conf);
        Table table = connection.getTable(TableName.valueOf(constants.TABLE_NAME));

        Random random = new Random();
        String[] rows = new String[] {"01", "02", "03"};
        String[] names = new String[] {"zhang san", "li si", "wang wu", "wei liu"};
        String[] sexs = new String[] {"men", "women"};
        String[] heights = new String[] {"165cm", "170cm", "175cm", "180cm"};
        String[] weights = new String[] {"50kg", "55kg", "60kg", "65kg", "70kg", "75kg", "80kg"};

        List<Row> batch = new ArrayList<>();
        for(String row : rows) {
            Put put = new Put(Bytes.toBytes("row" + row));
            String name = names[random.nextInt(names.length)];
            put.addColumn(constants.COLUMN_FAMILY_DF.getBytes(), "name".getBytes(), name.getBytes());
            String sex = sexs[random.nextInt(sexs.length)];
            put.addColumn(constants.COLUMN_FAMILY_DF.getBytes(), "sex".getBytes(), sex.getBytes());
            String height = heights[random.nextInt(heights.length)];
            put.addColumn(constants.COLUMN_FAMILY_EX.getBytes(), "height".getBytes(), height.getBytes());
            String weight = weights[random.nextInt(weights.length)];
            put.addColumn(constants.COLUMN_FAMILY_EX.getBytes(), "weight".getBytes(), weight.getBytes());
            batch.add(put);
        }

        Object[] results = new Object[batch.size()];
        try {
            table.batch(batch, results);
        } catch (InterruptedException e) {
            e.printStackTrace();
            return;
        }

        batch.clear();
        table.close();
        connection.close();
    }
}

===批量插入(写缓存)===

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;

import java.util.ArrayList;
import java.util.List;
import java.util.Random;

public class table_put_sample4 {
    public static void main(String[] args) throws Exception {
        Configuration conf = HBaseConfiguration.create();
        conf.set("hbase.zookeeper.quorum", "192.168.1.80,192.168.1.81,192.168.1.82");
        conf.set("hbase.client.write.buffer", "1048576");//1M
        Connection connection = ConnectionFactory.createConnection(conf);
        BufferedMutator table = connection.getBufferedMutator(TableName.valueOf(constants.TABLE_NAME));
        System.out.print("[--------]write buffer size = " + table.getWriteBufferSize());

        Random random = new Random();
        String[] rows = new String[] {"01", "02", "03", "04", "05"};
        String[] names = new String[] {"zhang san", "li si", "wang wu", "wei liu"};
        String[] sexs = new String[] {"men", "women"};
        String[] heights = new String[] {"165cm", "170cm", "175cm", "180cm"};
        String[] weights = new String[] {"50kg", "55kg", "60kg", "65kg", "70kg", "75kg", "80kg"};

        List<Mutation> batch = new ArrayList<>();
        for(String row : rows) {
            Put put = new Put(Bytes.toBytes("row" + row));
            String name = names[random.nextInt(names.length)];
            put.addColumn(constants.COLUMN_FAMILY_DF.getBytes(), "name".getBytes(), name.getBytes());
            String sex = sexs[random.nextInt(sexs.length)];
            put.addColumn(constants.COLUMN_FAMILY_DF.getBytes(), "sex".getBytes(), sex.getBytes());
            String height = heights[random.nextInt(heights.length)];
            put.addColumn(constants.COLUMN_FAMILY_EX.getBytes(), "height".getBytes(), height.getBytes());
            String weight = weights[random.nextInt(weights.length)];
            put.addColumn(constants.COLUMN_FAMILY_EX.getBytes(), "weight".getBytes(), weight.getBytes());
            batch.add(put);
        }

        table.mutate(batch);
        table.flush();
        table.close();
        connection.close();
    }
}

===单条get===

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;

public class table_get_sample1 {
    public static void main(String[] args) throws Exception {
        Configuration conf = HBaseConfiguration.create();
        conf.set("hbase.zookeeper.quorum", "192.168.1.80,192.168.1.81,192.168.1.82");
        Connection connection = ConnectionFactory.createConnection(conf);
        Table table = connection.getTable(TableName.valueOf(constants.TABLE_NAME));

        Get get = new Get(("row01").getBytes());

        Result result = table.get(get);
        byte[] name = result.getValue(constants.COLUMN_FAMILY_DF.getBytes(), "name".getBytes());
        byte[] weight = result.getValue(constants.COLUMN_FAMILY_EX.getBytes(), "weight".getBytes());
        System.out.print("[------]name=" + new String(name) + "\n");
        System.out.print("[------]name=" + new String(weight) + "\n");
    }
}

===批量get===

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;

import java.util.ArrayList;
import java.util.List;

public class table_get_sample4 {
    public static void main(String[] args) throws Exception {
        Configuration conf = HBaseConfiguration.create();
        conf.set("hbase.zookeeper.quorum", "192.168.1.80,192.168.1.81,192.168.1.82");
        Connection connection = ConnectionFactory.createConnection(conf);
        Table table = connection.getTable(TableName.valueOf(constants.TABLE_NAME));

        List<Get> gets = new ArrayList<>();
        Get get1 = new Get(("row01").getBytes());
        get1.addColumn(constants.COLUMN_FAMILY_DF.getBytes(), "name".getBytes());
        get1.addColumn(constants.COLUMN_FAMILY_DF.getBytes(), "sex".getBytes());
        get1.addColumn(constants.COLUMN_FAMILY_EX.getBytes(), "height".getBytes());
        get1.addColumn(constants.COLUMN_FAMILY_EX.getBytes(), "weight".getBytes());
        gets.add(get1);
        Get get2 = new Get(("row02").getBytes());
        get2.addColumn(constants.COLUMN_FAMILY_DF.getBytes(), "name".getBytes());
        get2.addColumn(constants.COLUMN_FAMILY_DF.getBytes(), "sex".getBytes());
        get2.addColumn(constants.COLUMN_FAMILY_EX.getBytes(), "height".getBytes());
        get2.addColumn(constants.COLUMN_FAMILY_EX.getBytes(), "weight".getBytes());
        gets.add(get2);

        Result[] results = table.get(gets);
        for ( Result result : results) {
            byte[] name = result.getValue(constants.COLUMN_FAMILY_DF.getBytes(), "name".getBytes());
            byte[] sex = result.getValue(constants.COLUMN_FAMILY_DF.getBytes(), "sex".getBytes());
            byte[] height = result.getValue(constants.COLUMN_FAMILY_EX.getBytes(), "height".getBytes());
            byte[] weight = result.getValue(constants.COLUMN_FAMILY_EX.getBytes(), "weight".getBytes());
            System.out.print("[------]name=" + new String(name) + "\n");
            System.out.print("[------]sex=" + new String(sex) + "\n");
            System.out.print("[------]height=" + new String(height) + "\n");
            System.out.print("[------]weight=" + new String(weight) + "\n");
        }
    }
}

===简单scan===

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;

public class table_scan_sample3 {
    public static void main(String[] args) throws Exception {
        Configuration conf = HBaseConfiguration.create();
        conf.set("hbase.zookeeper.quorum", "192.168.1.80,192.168.1.81,192.168.1.82");
        Connection connection = ConnectionFactory.createConnection(conf);
        Table table = connection.getTable(TableName.valueOf(constants.TABLE_NAME));

        Scan scan = new Scan();
        scan.addColumn(constants.COLUMN_FAMILY_DF.getBytes(), "name".getBytes());
        scan.addFamily(constants.COLUMN_FAMILY_EX.getBytes());

        ResultScanner rs = table.getScanner(scan);
        for (Result r = rs.next(); r != null; r = rs.next()) {
            byte[] row_key = r.getRow();
            System.out.print("[------]row_key=" + new String(row_key) + "\n");
            byte[] name = r.getValue(constants.COLUMN_FAMILY_DF.getBytes(), "name".getBytes());
            System.out.print("[------]name=" + new String(name) + "\n");
            byte[] weight = r.getValue(constants.COLUMN_FAMILY_EX.getBytes(), "weight".getBytes());
            System.out.print("[------]weight=" + new String(weight) + "\n");
        }
    }
}

--END--

时间: 2024-10-09 22:07:18

hbase java api样例(版本1.3.1,新API)的相关文章

java多线程样例

这里我们做一个完整的样例来说明线程产生的方式不同而生成的线程的差别: package debug; import java.io.*;import java.lang.Thread; class MyThread extends Thread{ public int x = 0; public void run(){ System.out.println(++x); }} class R implements Runnable{ private int x = 0; public void ru

HTTP基本认证(Basic Authentication)的JAVA演示样例

大家在登录站点的时候.大部分时候是通过一个表单提交登录信息.可是有时候浏览器会弹出一个登录验证的对话框.例如以下图,这就是使用HTTP基本认证.以下来看看一看这个认证的工作过程:第一步:  client发送http request 给server,server验证该用户是否已经登录验证过了,假设没有的话,server会返回一个401 Unauthozied给client,而且在Response 的 header "WWW-Authenticate" 中加入信息. 例如以下图. 第二步:

hyperledger fabric超级账本java sdk样例e2e代码流程分析

一  checkConfig  Before 1.1  private static final TestConfig testConfig = TestConfig.getConfig(); 这里加载一个配置文件(test路径/src/test/java/org/hyperledger/fabric/sdk/testutils.properties,文件不存在就加载代码中写死的默认配置), 配置文件需要设置peer,orderer,ca,eventhub的地址,组织mspid,组织域名.解析配

HBase Filter程序样例及Shell(图)

==过滤器执行流程== reset() : reset the filter state before filtering a new row. filterAllRemaining(): true means row scan is over; false means keep going. filterRowKey(byte[],int,int): true means drop this row; false means include. filterKeyValue(Cell): dec

杭电ACM Java实现样例

若使用Java求解杭电ACM,答案提交必须注意两点: 1.类名一定得是Main,否则服务器无法编译: 2.必须用while进行输入判断. 以1000题测试题为例,代码如下: import java.util.Scanner; public class Main { public static void main(String[] args) { Scanner scan=new Scanner(System.in); while(scan.hasNextInt()) { int a=scan.n

Java 8 时间日期库的20个使用演示样例

除了lambda表达式,stream以及几个小的改进之外,Java 8还引入了一套全新的时间日期API,在本篇教程中我们将通过几个简单的任务演示样例来学习怎样使用Java 8的这套API.Java对日期,日历及时间的处理一直以来都饱受诟病.尤其是它决定将java.util.Date定义为可改动的以及将SimpleDateFormat实现成非线程安全的. 看来Java已经意识到须要为时间及日期功能提供更好的支持了,这对已经习惯使用Joda时间日期库的社区而言也是件好事. 关于这个新的时间日期库的最

java 状态模式 解说演示样例代码

package org.rui.pattern; import junit.framework.*; /** * 为了使同一个方法调用能够产生不同的行为,State 模式在代理(surrogate)的 * 生命周期内切换它所相应的实现(implementation).当你发现,在决定怎样实现任 对象去耦(Object decoupling) * http://blog.csdn.net/lxwde 28 何一个方法之前都必须作非常多測试的情况下,这是一种优化实现代码的方法.比如, * 童话故事青

java操作hbase样例

hbase安装方法请參考:hbase-0.94安装方法具体解释 hbase经常使用的shell命令请參考:hbase经常使用的shell命令样例 java操作hbase,在eclipse中创建一个java项目.将hbase安装文件根文件夹的jar包和lib文件夹下jar包导入项目,然后就能够编写java代码操作hbase了. 以下代码给出来一个简单的演示样例 /** * @date 2015-07-23 21:28:10 * @author sgl */ package com.songguol

Hbase java API 调用详解

Hbase java API 调用 一. hbase的安装 参考:http://blog.csdn.net/mapengbo521521/article/details/41777721 二.hbase访问方式 Native java api:最常规最高效的访问方式. Hbase shell:hbase的命令行工具,最简单的接口,适合管理员使用 Thrift gateway:利用thrift序列化结束支持各种语言,适合异构系统在线访问 Rest gateway:支持rest风格的http api