From 8f84339deae9abfc5149f46a6c9eecccf3db7cf8 Mon Sep 17 00:00:00 2001
From: iamazy <1448588084@qq.com>
Date: Thu, 9 May 2019 21:55:34 +0800
Subject: [PATCH] remove export
---
pom.xml | 3 +-
.../dsl/plugin/RestSqlAction.java | 12 +--
.../dsl/plugin/export/ExportAction.java | 78 -------------------
src/main/resources/plugin-security.policy | 2 +-
4 files changed, 9 insertions(+), 86 deletions(-)
delete mode 100644 src/main/java/io/github/iamazy/elasticsearch/dsl/plugin/export/ExportAction.java
diff --git a/pom.xml b/pom.xml
index c8e15cc..77425fa 100755
--- a/pom.xml
+++ b/pom.xml
@@ -10,8 +10,9 @@
https://github.com/iamazy/elasticsearch-sql
- 7.0.1
+ 7.0.0
1.8
+ ${java.version}
isql
io.github.iamazy.elasticsearch.dsl.plugin.SqlPlugin
true
diff --git a/src/main/java/io/github/iamazy/elasticsearch/dsl/plugin/RestSqlAction.java b/src/main/java/io/github/iamazy/elasticsearch/dsl/plugin/RestSqlAction.java
index 880308e..52a9258 100644
--- a/src/main/java/io/github/iamazy/elasticsearch/dsl/plugin/RestSqlAction.java
+++ b/src/main/java/io/github/iamazy/elasticsearch/dsl/plugin/RestSqlAction.java
@@ -1,6 +1,7 @@
package io.github.iamazy.elasticsearch.dsl.plugin;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
+import com.google.common.util.concurrent.ThreadFactoryBuilder;
import io.github.iamazy.elasticsearch.dsl.sql.exception.ElasticSql2DslException;
import io.github.iamazy.elasticsearch.dsl.sql.model.ElasticSqlParseResult;
import io.github.iamazy.elasticsearch.dsl.sql.parser.ElasticSql2DslParser;
@@ -13,6 +14,7 @@
import org.elasticsearch.rest.*;
import java.io.IOException;
+import java.util.concurrent.*;
/**
@@ -30,14 +32,13 @@ public class RestSqlAction extends BaseRestHandler {
restController.registerHandler(RestRequest.Method.GET, "/_isql", this);
}
-
@Override
public String getName() {
return "isql";
}
@Override
- protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient nodeClient) throws IOException {
+ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient nodeClient) {
try (XContentParser parser = restRequest.contentOrSourceParamParser()) {
parser.mapStrings().forEach((k, v) -> restRequest.params().putIfAbsent(k, v));
} catch (IOException e) {
@@ -54,9 +55,6 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient
if (restRequest.path().endsWith("/_explain")) {
return channel -> channel.sendResponse(new BytesRestResponse(RestStatus.OK, builder.value(parseResult.toRequest().source())));
}
- else if(restRequest.path().endsWith("/_export")){
- return null;
- }
else {
if (parseResult.toFieldMapping() != null) {
return channel -> channel.sendResponse(new BytesRestResponse(RestStatus.OK, builder.value(nodeClient.admin().indices().getFieldMappings(parseResult.toFieldMapping()).actionGet())));
@@ -70,8 +68,10 @@ else if(restRequest.path().endsWith("/_export")){
return channel -> channel.sendResponse(new BytesRestResponse(RestStatus.OK, builder.value(nodeClient.search(parseResult.toRequest()).actionGet())));
}
}
- } catch (ElasticSql2DslException e) {
+ } catch (Exception e) {
return channel -> channel.sendResponse(new BytesRestResponse(RestStatus.INTERNAL_SERVER_ERROR, XContentType.JSON.mediaType(), "{\"error\":\"" + e.getMessage() + "\"}"));
}
}
+
+
}
diff --git a/src/main/java/io/github/iamazy/elasticsearch/dsl/plugin/export/ExportAction.java b/src/main/java/io/github/iamazy/elasticsearch/dsl/plugin/export/ExportAction.java
deleted file mode 100644
index 42cb316..0000000
--- a/src/main/java/io/github/iamazy/elasticsearch/dsl/plugin/export/ExportAction.java
+++ /dev/null
@@ -1,78 +0,0 @@
-package io.github.iamazy.elasticsearch.dsl.plugin.export;
-
-import io.github.iamazy.elasticsearch.dsl.cons.CoreConstants;
-import lombok.extern.slf4j.Slf4j;
-import org.elasticsearch.action.search.*;
-import org.elasticsearch.client.Client;
-import org.elasticsearch.common.collect.CopyOnWriteHashMap;
-import org.elasticsearch.common.unit.TimeValue;
-import org.elasticsearch.search.Scroll;
-import org.elasticsearch.search.SearchHit;
-
-import java.io.File;
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.Paths;
-import java.nio.file.StandardOpenOption;
-import java.util.HashMap;
-import java.util.Map;
-
-
-/**
- * @author iamazy
- * @date 2019/5/9
- * @descrition
- **/
-@Slf4j
-public class ExportAction {
-
- public static Map EXPORT_TASK_MAP=new CopyOnWriteHashMap<>();
-
- public void export(Client client, SearchRequest searchRequest,long size) throws IOException {
- final Scroll scroll = new Scroll(TimeValue.timeValueMinutes(1L));
- searchRequest.scroll(scroll);
- SearchResponse searchResponse = client.search(searchRequest).actionGet();
- String scrollId = searchResponse.getScrollId();
- SearchHit[] searchHits = searchResponse.getHits().getHits();
- String taskId=CoreConstants.OBJECT_ID_GENERATOR.generate().toString();
- File file=new File(taskId+".json");
- if(file.exists()){
- throw new IOException("任务Id已存在!!!");
- }else{
- if(!file.createNewFile()){
- throw new IOException("创建文件失败!!!");
- }
- }
- long total=searchResponse.getHits().getTotalHits().value;
- long currentSize=0L;
-
- Map currentTaskInfo=new HashMap<>(0);
- currentTaskInfo.put("taskId",taskId);
- currentTaskInfo.put("total",total);
- currentTaskInfo.put("indices",searchRequest.indices());
- while (searchHits != null && searchHits.length > 0) {
- SearchScrollRequest scrollRequest = new SearchScrollRequest(scrollId);
- scrollRequest.scroll(scroll);
- searchResponse = client.searchScroll(scrollRequest).actionGet();
- scrollId = searchResponse.getScrollId();
- searchHits = searchResponse.getHits().getHits();
- for(SearchHit hit:searchHits){
- Files.write(Paths.get(file.toURI()),hit.getSourceAsString().getBytes(), StandardOpenOption.APPEND);
- }
- double percent=((double) currentSize)/(double) total;
- currentTaskInfo.put("percent",percent);
- currentSize+=searchHits.length;
- EXPORT_TASK_MAP.put(taskId,currentTaskInfo);
- if(size