ElasticSearch之Java Api 测试

增加Maven依赖

<dependency>
            <groupId>org.elasticsearch</groupId>
            <artifactId>elasticsearch</artifactId>
            <version>5.0.0</version>
        </dependency>
        <dependency>
            <groupId>org.elasticsearch.client</groupId>
            <artifactId>transport</artifactId>
            <version>5.0.0</version>
        </dependency>
        <dependency>
            <groupId>org.apache.logging.log4j</groupId>
            <artifactId>log4j-api</artifactId>
            <version>2.7</version>
        </dependency>
        <dependency>
            <groupId>org.apache.logging.log4j</groupId>
            <artifactId>log4j-core</artifactId>
            <version>2.7</version>
        </dependency>
        <dependency>
            <groupId>com.fasterxml.jackson.core</groupId>
            <artifactId>jackson-core</artifactId>
            <version>2.8.0</version>
        </dependency>
        <dependency>
            <groupId>com.fasterxml.jackson.core</groupId>
            <artifactId>jackson-databind</artifactId>
            <version>2.8.0</version>
        </dependency>
        <dependency>
            <groupId>com.fasterxml.jackson.core</groupId>
            <artifactId>jackson-annotations</artifactId>
            <version>2.8.0</version>
        </dependency>

src/main/resource下增加log4j2.properties

appender.console.type = Console

appender.console.name = console

appender.console.layout.type = PatternLayout

rootLogger.level = info

rootLogger.appenderRef.console.ref = console

package com.zns.test;

import java.net.InetAddress;
import java.util.Map;
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.delete.DeleteResponse;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.action.update.UpdateResponse;
import org.elasticsearch.client.Requests;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.transport.InetSocketTransportAddress;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.metrics.avg.Avg;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
import org.elasticsearch.transport.client.PreBuiltTransportClient;

public class MyTest {
    public static TransportClient client=null;

    //创建索引库
    public static void createIndex(String index){
        CreateIndexResponse createIndexResponse = client.admin().indices().prepareCreate(index).get();
        System.out.println(createIndexResponse.isAcknowledged()); // true表示成功
    }

    //给索引库增加 type,mapping
    public static void addMapping(String index,String type) throws Exception{
        // 使用XContentBuilder创建Mapping
        XContentBuilder builder =
            XContentFactory.jsonBuilder()
                            .startObject()
                                .field("properties")
                                    .startObject()
                                        .field("user_id")
                                            .startObject()
                                                .field("type", "integer")
                                            .endObject()
                                        .field("name")
                                            .startObject()
                                                .field("analyzer", "standard")
                                                .field("type", "text")
                                            .endObject()
                                        .field("age")
                                            .startObject()
                                                .field("type", "integer")
                                            .endObject()
                                    .endObject()
                            .endObject();
        System.out.println(builder.string());
        PutMappingRequest mappingRequest = Requests.putMappingRequest(index).source(builder).type(type);
        client.admin().indices().putMapping(mappingRequest).actionGet();
    }

    //删除索引库
    public static void deleteIndex(String index){
        DeleteIndexResponse deleteIndexResponse = client.admin().indices().prepareDelete(index).get();
        System.out.println(deleteIndexResponse.isAcknowledged()); // true表示成功
    }

    //创建文档
    public static void createDoc(String index, String type) throws Exception {
        // 使用XContentBuilder创建一个doc source
        XContentBuilder builder = XContentFactory.jsonBuilder()
                .startObject()
                .field("user_id", "1")
                .field("name", "name1")
                .field("age", "1")
                .endObject();
        // setId(id) 如果没有设置id,则ES会自动生成一个id setSource 可以是XContentBuilder,map,json,javabeans等
        IndexResponse indexResponse = client.prepareIndex().setIndex(index).setType(type).setId("1").setSource(builder.string()).get();
    }

    //根据ID查询文档
    public static String getById(String index, String type, String id) {
        GetResponse getResponse = client.prepareGet().setIndex(index).setType(type).setId(id).get();
        return getResponse.getSourceAsString();
    }

    //查询文档
    public static void query(String index, String type) {
        SearchResponse response = client.prepareSearch(index).setTypes(type)
                .setSearchType(SearchType.DFS_QUERY_THEN_FETCH)
                //.setQuery(QueryBuilders.termQuery("user_id", "1"))
                //.setQuery(QueryBuilders.rangeQuery("user_id").lte("100"))
                //.setQuery(QueryBuilders.rangeQuery("age").from(1).to(18))
                .setFrom(0).setSize(10).setExplain(true).get();

        SearchHits hits = response.getHits();
        System.out.println(hits.getHits().length);
        SearchHit[] hits1 = hits.getHits();
        for(SearchHit hit :hits1){
            // ID 为hit.getId();
            Map<String, Object> source = hit.getSource();
            for(Map.Entry<String,Object> filed :source.entrySet()){
                String key = filed.getKey();
                System.out.println("key===="+key+"    value====="+filed.getValue().toString());
            }
        }
    }

    //根据ID更新文档
    public static void updateById(String index, String type, String id) throws Exception {
        XContentBuilder builder = XContentFactory.jsonBuilder()
                .startObject()
                .field("user_id", "1")
                .field("name", "name222")
                .field("age", "1")
                .endObject();
        UpdateResponse updateResponse = client.prepareUpdate(index, type, id).setDoc(builder).get();
    }

    //根据ID删除文档
    public static void deleteById(String index, String type, String id) {
        DeleteResponse deleteResponse =client
                .prepareDelete()
                .setIndex(index)
                .setType(type)
                .setId(id)
                .get();
        System.out.println(deleteResponse.status());
    }    

    //聚和函数 求平均年龄
    public static void juheTest(String index,String type){
        SearchResponse response = client
                .prepareSearch(index)
                .setTypes(type)
                .addAggregation(AggregationBuilders.avg("xxx").field("age"))
                .get();
        Avg avg = response.getAggregations().get("xxx");
        System.out.println(avg.getValue());
    }

    //批量增加操作
    public static void bulkAddTest(String index,String type) throws Exception{
        BulkRequestBuilder bulkRequest = client.prepareBulk();  

        XContentBuilder builder1 = XContentFactory.jsonBuilder()
                .startObject()
                .field("user_id", "11")
                .field("name", "name11")
                .field("age", "11")
                .endObject();
        bulkRequest.add(client.prepareIndex().setIndex(index).setType(type).setId("11").setSource(builder1.string()));

        XContentBuilder builder2 = XContentFactory.jsonBuilder()
                .startObject()
                .field("user_id", "22")
                .field("name", "name22")
                .field("age", "22")
                .endObject();
        bulkRequest.add(client.prepareIndex().setIndex(index).setType(type).setId("22").setSource(builder2.string()));

        BulkResponse bulkResponse = bulkRequest.get();
        if (bulkResponse.hasFailures()) {
            // process failures by iterating through each bulk response item
        }
    }

    //高亮
    public static void highlightTest(String index,String type,String keyword,Integer from,Integer size){
        HighlightBuilder highlightBuilder = new HighlightBuilder();
        highlightBuilder.preTags("<span style=\"color:red\">");
        highlightBuilder.postTags("</span>");
        highlightBuilder.field("*");
        highlightBuilder.requireFieldMatch(false);

        SearchRequestBuilder builder = client.prepareSearch(index);
        builder.setTypes(type);
        builder.setFrom(from);
        builder.setSize(size);
        builder.setSearchType(SearchType.DFS_QUERY_THEN_FETCH);
        builder.setExplain(true);
        builder.setQuery(QueryBuilders.multiMatchQuery(keyword,"name"));
        builder.highlighter(highlightBuilder);

        SearchResponse searchResponse = builder.get();
        //获取查询结果集
        SearchHits searchHits = searchResponse.getHits();
        System.out.println("共搜到:"+searchHits.getTotalHits()+"条结果!");
        SearchHit[] hits2 = searchHits.getHits();
        //遍历结果
        for(SearchHit hit:hits2){
            System.out.println("String方式打印文档搜索内容:");
            System.out.println(hit.getSourceAsString());
            System.out.println("Map方式打印高亮内容");
            System.out.println(hit.getHighlightFields());
            System.out.println("遍历高亮集合,打印高亮片段:");
            Text[] text = hit.getHighlightFields().get("name").getFragments();
            for (Text str : text) {
                System.out.println(str.string());
            }
        }
    }

    public static void main(String[] args) throws Exception {
        // 设置集群名称
        Settings settings = Settings.builder().put("cluster.name", "my-es").build();
        // 创建client
        client = new PreBuiltTransportClient(settings)
                .addTransportAddress(new InetSocketTransportAddress(InetAddress.getByName("127.0.0.1"), 9300));

        String index="znsindex";
        String type="znstype";

        //createIndex(index);
        //addMapping(index,type);
        //deleteIndex(index);
        //createDoc(index, type);
        //String result=getById(index, type, "1");
        //System.out.println(result);
        //deleteById(index, type, "1");
        //updateById(index, type, "1");
        //juheTest(index, type);
        //bulkAddTest(index, type);
        //highlightTest(index, type,"name1",0,10);
        //query(index, type);
    }
}

原文地址:https://www.cnblogs.com/zengnansheng/p/10389722.html

时间: 2024-10-09 00:08:59

ElasticSearch之Java Api 测试的相关文章

ubuntu12.04+kafka2.9.2+zookeeper3.4.5的分布式集群安装和demo(java api)测试

博文作者:迦壹 博客地址:http://idoall.org/home.php?mod=space&uid=1&do=blog&id=547 转载声明:可以转载, 但必须以超链接形式标明文章原始出处和作者信息及版权声明,谢谢合作! --------------------------------------- 目录: 一.什么是kafka? 二.kafka的官方网站在哪里? 三.在哪里下载?需要哪些组件的支持? 四.如何安装? 五.FAQ 六.扩展阅读 一.什么是kafka? ka

kafka2.9.2的分布式集群安装和demo(java api)测试

目录: 一.什么是kafka? 二.kafka的官方网站在哪里? 三.在哪里下载?需要哪些组件的支持? 四.如何安装? 五.FAQ 六.扩展阅读   一.什么是kafka? kafka是LinkedIn开发并开源的一个分布式MQ系统,现在是Apache的一个孵化项目.在它的主页描述kafka为一个高吞吐量的分布式(能将消息分散到不同的节点上)MQ.Kafka仅仅由7000行Scala编写,据了解,Kafka每秒可以生产约25万消息(50 MB),每秒处理55万消息(110 MB). kafka目

使用JAVA操作ElasticSearch(Java API 和Spring Data ElasticSearch)

Java API 我的ElasticSearch集群的版本是6.2.4,导入elasticsearch相关的maven依赖也是6.2.4,不同版本的api可能会有差异 一:maven依赖 <!--elasticsearch核心依赖--> <dependency> <groupId>org.elasticsearch</groupId> <artifactId>elasticsearch</artifactId> <version

kafka2.9.2的伪分布式集群安装和demo(java api)测试

1.什么是kafka? kafka是LinkedIn开发并开源的一个分布式MQ系统,现在是Apache的一个孵化项目.在它的主页描述kafka为一个高吞吐量的分布式(能将消息分散到不同的节点上)MQ.Kafka仅仅由7000行Scala编写,据了解,Kafka每秒可以生产约25万消息(50 MB),每秒处理55万消息(110 MB). kafka目前支持多种客户端语言:java,python,c++,php等等. kafka集群的简要图解如下,producer写入消息,consumer读取消息

ElasticSearch AggregationBuilders java api常用聚会查询

以球员信息为例,player索引的player type包含5个字段,姓名,年龄,薪水,球队,场上位置.index的mapping为: "mappings": { "player": { "properties": { "name": { "index": "not_analyzed", "type": "string" }, "age&

kafka学习之-java api测试

1.配置 package com.storm.storm_my.kafka; /** * * @author Peng.Li * */ public class KafkaConfigApiConstant { /** * * @author 配置的key * */ public interface kafkaPropertiesKeys { public static final String ZK_CONNECT = "zookeeper.connect"; public stat

elasticsearch java API 实现搜索样例

查看cluster.version:curl 'centos1:9200'插入:curl -XPUT 'http://localhost:9200/dept/employee/1' -d '{ "empname": "emp1"}'查看index:curl 'centos1:9200/_cat/indices?v'查看1条内容:curl 'centos1:9200/dept/employee/1?pretty'查看所有内容:curl 'centos1:9200/de

Elasticsearch JAVA api轻松搞定groupBy聚合

本文给出如何使用Elasticsearch的Java API做类似SQL的group by聚合. 为了简单起见,只给出一级groupby即group by field1(而不涉及到多级,例如group by field1, field2, ...):如果你需要多级的groupby,在实现上可能需要拆分的更加细致. 即将给出的方法,适用于如下的场景: 场景1:找出分组中的所有桶,例如,select group_name from index_name group by group_name; 场景

Elasticsearch Java API 配置测试

Elasticsearch1.X,2.X,5.X随着版本的迭代,除了系统升级,Java API也做了相对较大的调整,也就是说,1.X的API在2.X以及5.X乃至未来6.X版本都不是通用的. 本例子使用的版本是5.6.5 一. 首先添加maven依赖 <dependency> <groupId>org.elasticsearch.client</groupId> <artifactId>transport</artifactId> <ver