java api操作elasticsearch


elasticsearch的maven依赖
<dependency>
            <groupId>org.elasticsearch</groupId>
            <artifactId>elasticsearch</artifactId>
            <version>2.1.1</version>
            <scope>test</scope>
</dependency>
package com.cheguo.merchant.app.toolkit;

import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.common.transport.InetSocketTransportAddress;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.query.MatchQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;

import java.io.IOException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.Date;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;

/**
 * Created with IntelliJ IDEA.
 *
 * @author: yangxianyu
 * Date: 2017/12/14
 * Time: 下午3:13
 * Description:
 */
public class TestElk {
    public static void main(String[] args) {
        try {
            Client client = TransportClient.builder().build()
                    .addTransportAddress(new InetSocketTransportAddress(InetAddress.getByName("10.10.13.7"),9300));
            get(client);
            //put(client);
            //setMapping(client);
            client.close();
        } catch (UnknownHostException e) {
            e.printStackTrace();
        }
    }
    public static void get(Client client){
        String query = "奥迪";
        String[] str = new String[3];
        str[0] = "car_name";
        str[1] = "cars";
        if(isNumeric(query)){
            str[2] = "sale_price";
        }
        QueryBuilder queryBuilder = QueryBuilders.multiMatchQuery(query,str)
                //避免分词后台把带奥或迪的都查询出来
                .operator(MatchQueryBuilder.Operator.AND);
        SearchResponse response = client.prepareSearch("carsrc").setTypes("carsource")
                .setSize(3)//每页显示条数
                .setFrom(3*0)//从那条开始查询
                .setQuery(queryBuilder).execute().actionGet();
        SearchHits searchHits = response.getHits();
        if(searchHits.totalHits()>0){
            for(SearchHit searchHit : searchHits){
                System.out.println("total>>>>"+searchHit.getType()+"source>>>"+searchHit.getSourceAsString());
            }
        }else{
            System.out.println("total 0");
        }
    }
    public static void put(Client client){
        UpdateRequest uRequest = new UpdateRequest();
        try {
            //uRequest = client.prepareUpdate("carsrc","carsource","243").setDoc(model2XContent()).get();
            uRequest.index("carsrc");
            uRequest.type("carsource");
            uRequest.id("243");
            uRequest.doc(jsonBuilder().startObject()
                    .field("car_source_id","232")
                    .field("car_source_code","")
                    .field("sales_region","")
                    .field("car_code","")
                    .field("car_id", "78732")
                    .field("car_name", "温州市")
                    .field("cars","")
                    .field("msrp","")
                    .field("sale_price","")
                    .field("over_msrp","")
                    .field("car_color","")
                    .field("car_trim_color","")
                    .field("remark","")
                    .field("source_type","")
                    .field("source_kind","")
                    .field("source_status","")
                    .field("product_date","")
                    .field("procedure","")
                    .field("car_vin","")
                    .field("area_code","")
                    .field("area_name","")
                    .field("status","")
                    .field("company_id","")
                    .field("company_name","")
                    .field("order_user_id","")
                    .field("pay_user_id","")
                    .field("transport","")
                    .field("plate_limit","")
                    .field("validate","")
                    .field("valid_status","")
                    .field("buy_deposit","")
                    .field("sale_deposit","")
                    .field("pay_order_id","")
                    .field("score","")
                    .field("company_type","")
                    .field("come_from","")
                    .field("brand_logo","")
                    .field("pic_flag","")
                    .field("cost_price","")
                    .field("mini_price","")
                    .field("reason","")
                    .field("real_price","")
                    .field("number","")
                    .field("bill","")
                    .field("source_flag","2")
                    .field("gmt_create",new Date())
                    .field("gmt_modify",new Date())
                    .field("price_type","")
                    .field("price_value","")
                    .endObject());
            client.update(uRequest).get();
        } catch (Exception e) {
            e.printStackTrace();
        }
        //.prepareIndex("carsrc","carsource","1").
        //setSource(model2json()).get();
//        if(uRequest.) {
//            System.out.println("add success");
//        }
    }
    public static void setMapping(Client client) {
        try {
            PutMappingResponse response = client.admin().indices()
                    .preparePutMapping("carsrc")
                    .setType("carsource")
                    .setSource(XContentFactory.jsonBuilder()
                            .startObject()
                            .startObject("car_name")
                            .startObject("cars")
                            .field("analyzer", "whitespace")
                            .field("type", "string")
                            .endObject()
                            .endObject()
                            .endObject())
                    .execute().actionGet();
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    public static  String model2json(){
        String jsondata = "";
        try {
            XContentBuilder xContentBuilder = jsonBuilder();
            xContentBuilder.startObject().field("car_id","78732")
                    .field("car_source_id","243")
                    .field("sales_region","温州市")
                    .endObject();
        } catch (IOException e) {
            e.printStackTrace();
        }
        return jsondata;
    }
    public static  XContentBuilder model2XContent(){
        XContentBuilder xContentBuilder = null;
        try {
            xContentBuilder = jsonBuilder();
            xContentBuilder.startObject().field("","")
                    .field("","")
                    .endObject();
        } catch (IOException e) {
            e.printStackTrace();
        }
        return xContentBuilder;
    }
    public static boolean isNumeric(String str) {
        Pattern pattern = Pattern.compile("-?[0-9]+.?[0-9]+");
        Matcher isNum = pattern.matcher(str);
        if (!isNum.matches()) {
            return false;
        }
        return true;
    }
}
时间: 2024-11-14 11:21:36

java api操作elasticsearch的相关文章

大数据技术之_20_Elasticsearch学习_01_概述 + 快速入门 + Java API 操作 + 创建、删除索引 + 新建、搜索、更新删除文档 + 条件查询 + 映射操作

一 概述1.1 什么是搜索?1.2 如果用数据库做搜索会怎么样?1.3 什么是全文检索和 Lucene?1.4 什么是 Elasticsearch?1.5 Elasticsearch 的适用场景1.6 Elasticsearch 的特点1.7 Elasticsearch 的核心概念1.7.1 近实时1.7.2 Cluster(集群)1.7.3 Node(节点)1.7.4 Index(索引 --> 数据库)1.7.5 Type(类型 --> 表)1.7.6 Document(文档 -->

Java API操作HDFS

HDFS是存储数据的分布式文件系统,对HDFS的操作,就是对文件系统的操作,除了用HDFS的shell命令对文件系统进行操作,我们也可以利用Java API对文件系统进行操作,比如文件的创建.删除.修改权限等等,还有文件夹的创建.删除.重命名等等. 使用Java API对文件系统进行操作主要涉及以下几个类: 1.Configuration类:该类的对象封装了客户端或者服务端的配置. 2.FileSystem类:该类的对象是一个文件系统对象,可以利用该对象的一些方法来对文件进行操作,FileSys

Hadoop读书笔记(三)Java API操作HDFS

Hadoop读书笔记(一)Hadoop介绍:http://blog.csdn.net/caicongyang/article/details/39898629 Hadoop读书笔记(二)HDFS的shell操作:http://blog.csdn.net/caicongyang/article/details/41253927 JAVA URL 操作HDFS OperateByURL.java package hdfs; import java.io.InputStream; import jav

HDFS基础和java api操作

1. 概括 适合一次写入多次查询情况,不支持并发写情况 通过hadoop shell 上传的文件存放在DataNode的block中,通过linux shell只能看见block,看不见文件(HDFS将客户端的大文件存放在很多节点的数据块中,Block本质上是一个逻辑概念,它是hdfs读写数据的基本单位) HDFS中,如果一个文件小于一个数据块的大小,并不占用整个数据块存储空间 2. fs 可以使用hdfs shell操作hdfs,常用 fs命令如下: eg: hadoop fs -cat fi

通过HTTP RESTful API 操作elasticsearch搜索数据

通过HTTP RESTful API 操作elasticsearch搜索数据

使用java api操作Hadoop文件 Robbin

1 package cn.hadoop.fs; 2 3 import java.io.IOException; 4 import java.io.InputStream; 5 import java.net.URI; 6 import java.net.URISyntaxException; 7 8 import org.apache.hadoop.conf.Configuration; 9 import org.apache.hadoop.fs.FSDataOutputStream; 10 i

hive-通过Java API操作

通过Java API操作hive,算是测试hive第三种对外接口 测试hive 服务启动 1 package org.admln.hive; 2 3 import java.sql.SQLException; 4 import java.sql.Connection; 5 import java.sql.ResultSet; 6 import java.sql.Statement; 7 import java.sql.DriverManager; 8 9 public class testHiv

HBase 6、用Phoenix Java api操作HBase

开发环境准备:eclipse3.5.jdk1.7.window8.hadoop2.2.0.hbase0.98.0.2.phoenix4.3.0 1.从集群拷贝以下文件:core-site.xml.hbase-site.xml.hdfs-site.xml文件放到工程src下 2.把phoenix的phoenix-4.3.0-client.jar和phoenix-core-4.3.0.jar添加到工程classpath 3.配置集群中各节点的hosts文件,把客户端的hostname:IP添加进去

Java API操作ZooKeeper

创建会话 1 package org.zln.zk; 2 3 import org.apache.zookeeper.WatchedEvent; 4 import org.apache.zookeeper.Watcher; 5 import org.apache.zookeeper.ZooKeeper; 6 7 import java.io.IOException; 8 9 /** 10 * Created by sherry on 16/8/27. 11 */ 12 public class