Skip to content

Commit

Permalink
Use _doc doc type instead of doc
Browse files Browse the repository at this point in the history
Closes dadoonet#552.
  • Loading branch information
dadoonet committed Jul 13, 2018
1 parent 8772671 commit d8616f6
Show file tree
Hide file tree
Showing 20 changed files with 35 additions and 33 deletions.
4 changes: 2 additions & 2 deletions cli/src/test/resources/legacy/2_0/david.json
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@
"port" : 9200
} ],
"index" : "david",
"type" : "doc",
"type" : "_doc",
"bulk_size" : 100,
"flush_interval" : "5s"
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ public class FsCrawlerImpl {
@Deprecated
public static final String INDEX_TYPE_FOLDER = "folder";
@Deprecated
public static final String INDEX_TYPE_DOC = "doc";
public static final String INDEX_TYPE_DOC = "_doc";

private static final Logger logger = LogManager.getLogger(FsCrawlerImpl.class);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -559,7 +559,7 @@ void esIndex(BulkProcessor bulkProcessor, String index, String id, String json,
logger.trace("JSon indexed : {}", json);

if (!closed) {
bulkProcessor.add(new IndexRequest(index, "doc", id).source(json, XContentType.JSON).setPipeline(pipeline));
bulkProcessor.add(new IndexRequest(index, "_doc", id).source(json, XContentType.JSON).setPipeline(pipeline));
} else {
logger.warn("trying to add new file while closing crawler. Document [{}]/[doc]/[{}] has been ignored", index, id);
}
Expand All @@ -571,7 +571,7 @@ void esIndex(BulkProcessor bulkProcessor, String index, String id, String json,
void esDelete(String index, String id) {
logger.debug("Deleting {}/doc/{}", index, id);
if (!closed) {
esClientManager.bulkProcessorDoc().add(new DeleteRequest(index, "doc", id));
esClientManager.bulkProcessorDoc().add(new DeleteRequest(index, "_doc", id));
} else {
logger.warn("trying to remove a file while closing crawler. Document [{}]/[doc]/[{}] has been ignored", index, id);
}
Expand Down
2 changes: 1 addition & 1 deletion docs/source/admin/fs/elasticsearch.rst
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ The following example uses a ``french`` analyzer to index the
}
},
"mappings": {
"doc": {
"_doc": {
"properties" : {
"attachment" : {
"type" : "binary",
Expand Down
2 changes: 1 addition & 1 deletion docs/source/admin/fs/local-fs.rst
Original file line number Diff line number Diff line change
Expand Up @@ -642,7 +642,7 @@ JSon document. This field is not indexed. Default mapping for
.. code:: json
{
"doc" : {
"_doc" : {
"properties" : {
"attachment" : {
"type" : "binary",
Expand Down
2 changes: 1 addition & 1 deletion docs/source/admin/fs/rest.rst
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ You will get back your document as it has been stored by elasticsearch:
{
"_index" : "fscrawler-rest-tests_doc",
"_type" : "doc",
"_type" : "_doc",
"_id" : "dd18bf3a8ea2a3e53e2661c7fb53534",
"_version" : 1,
"found" : true,
Expand Down
6 changes: 4 additions & 2 deletions docs/source/installation.rst
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,7 @@ to upgrade elasticsearch.
This procedure only applies if you did not set previously
``elasticsearch.type`` setting (default value was ``doc``). If you did,
then you also need to reindex the existing documents to the default
``doc`` type as per elasticsearch 6.0:
``_doc`` type as per elasticsearch 6.0:

::

Expand All @@ -161,7 +161,7 @@ then you also need to reindex the existing documents to the default
},
"dest": {
"index": "job_name",
"type": "doc"
"type": "_doc"
}
}
# Remove old type data from job_name index
Expand Down Expand Up @@ -252,3 +252,5 @@ Then restore old data:
The default mapping changed for FSCrawler for ``meta.raw.*`` fields.
Might be better to reindex your data.

- For new indices, FSCrawler now uses ``_doc`` as the default type name.

Original file line number Diff line number Diff line change
Expand Up @@ -116,8 +116,8 @@ public void testSearch() throws IOException {
elasticsearchClient.createIndex(getCrawlerName(), false, settings);
elasticsearchClient.waitForHealthyIndex(getCrawlerName());

elasticsearchClient.index(new IndexRequest(getCrawlerName(), "doc", "1").source("{ \"foo\": { \"bar\": \"bar\" } }", XContentType.JSON));
elasticsearchClient.index(new IndexRequest(getCrawlerName(), "doc", "2").source("{ \"foo\": { \"bar\": \"baz\" } }", XContentType.JSON));
elasticsearchClient.index(new IndexRequest(getCrawlerName(), "_doc", "1").source("{ \"foo\": { \"bar\": \"bar\" } }", XContentType.JSON));
elasticsearchClient.index(new IndexRequest(getCrawlerName(), "_doc", "2").source("{ \"foo\": { \"bar\": \"baz\" } }", XContentType.JSON));

elasticsearchClient.refresh(getCrawlerName());

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ public void test_filename_as_id() throws Exception {

assertThat("Document should exists with [roottxtfile.txt] id...", awaitBusy(() -> {
try {
return elasticsearchClient.exists(new GetRequest(getCrawlerName(), "doc", "roottxtfile.txt"));
return elasticsearchClient.exists(new GetRequest(getCrawlerName(), "_doc", "roottxtfile.txt"));
} catch (IOException e) {
return false;
}
Expand All @@ -70,14 +70,14 @@ public void test_remove_deleted_with_filename_as_id() throws Exception {

assertThat("Document should exists with [id1.txt] id...", awaitBusy(() -> {
try {
return elasticsearchClient.exists(new GetRequest(getCrawlerName(), "doc", "id1.txt"));
return elasticsearchClient.exists(new GetRequest(getCrawlerName(), "_doc", "id1.txt"));
} catch (IOException e) {
return false;
}
}), equalTo(true));
assertThat("Document should exists with [id2.txt] id...", awaitBusy(() -> {
try {
return elasticsearchClient.exists(new GetRequest(getCrawlerName(), "doc", "id2.txt"));
return elasticsearchClient.exists(new GetRequest(getCrawlerName(), "_doc", "id2.txt"));
} catch (IOException e) {
return false;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,10 +69,10 @@ public void test_mapping() throws Exception {

// This will cause an Elasticsearch Exception as the String is not a Date
// If the mapping is incorrect
elasticsearchClient.index(new IndexRequest(getCrawlerName(), "doc", "1")
elasticsearchClient.index(new IndexRequest(getCrawlerName(), "_doc", "1")
.source(json1, XContentType.JSON)
);
elasticsearchClient.index(new IndexRequest(getCrawlerName(), "doc", "2")
elasticsearchClient.index(new IndexRequest(getCrawlerName(), "_doc", "2")
.source(json2, XContentType.JSON)
);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -236,7 +236,7 @@ private void checkDocVersions(SearchResponse response, long maxVersion) {
for (SearchHit hit : response.getHits().getHits()) {
// Read the document. This is needed since 5.0 as search does not return the _version field
try {
GetResponse getHit = elasticsearchClient.get(new GetRequest(hit.getIndex(), "doc", hit.getId()));
GetResponse getHit = elasticsearchClient.get(new GetRequest(hit.getIndex(), "_doc", hit.getId()));
assertThat(getHit.getVersion(), lessThanOrEqualTo(maxVersion));
} catch (IOException e) {
fail("We got an IOException: " + e.getMessage());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ public void test_upgrade_version() throws Exception {

// Create fake data
for (int i = 0; i < nbDocs; i++) {
bulkProcessor.add(new IndexRequest(getCrawlerName(), "doc", "id" + i).source("{\"foo\":\"bar\"}", XContentType.JSON));
bulkProcessor.add(new IndexRequest(getCrawlerName(), "_doc", "id" + i).source("{\"foo\":\"bar\"}", XContentType.JSON));
}
for (int i = 0; i < nbFolders; i++) {
bulkProcessor.add(new IndexRequest(getCrawlerName(), "folder", "id" + i).source("{\"foo\":\"bar\"}", XContentType.JSON));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -109,15 +109,15 @@ public UploadResponse post(
logger.debug("Simulate mode is on, so we skip sending document [{}] to elasticsearch.", filename);
} else {
logger.debug("Sending document [{}] to elasticsearch.", filename);
bulkProcessor.add(new org.elasticsearch.action.index.IndexRequest(settings.getElasticsearch().getIndex(), "doc", id)
bulkProcessor.add(new org.elasticsearch.action.index.IndexRequest(settings.getElasticsearch().getIndex(), "_doc", id)
.setPipeline(settings.getElasticsearch().getPipeline())
.source(DocParser.toJson(doc), XContentType.JSON));
// Elasticsearch entity coordinates (we use the first node address)
Elasticsearch.Node node = settings.getElasticsearch().getNodes().get(0);
url = buildUrl(
node.getScheme().toLowerCase(), node.getHost(), node.getPort()) + "/" +
settings.getElasticsearch().getIndex() + "/" +
"doc" + "/" +
"_doc" + "/" +
id;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
}
},
"mappings": {
"doc": {
"_doc": {
"properties": {
"content": {
"type": "string"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
}
},
"mappings": {
"doc": {
"_doc": {
"properties": {
"real": {
"type": "string",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
}
},
"mappings": {
"doc": {
"_doc": {
"dynamic_templates": [
{
"raw_as_text": {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
}
},
"mappings": {
"doc": {
"_doc": {
"properties" : {
"real" : {
"type" : "keyword",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
}
},
"mappings": {
"doc": {
"_doc": {
"dynamic_templates": [
{
"raw_as_text": {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
}
},
"mappings": {
"doc": {
"_doc": {
"properties" : {
"real" : {
"type" : "keyword",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ public void fsSettingsForDocVersion2() throws Exception {
" }\n" +
" },\n" +
" \"mappings\": {\n" +
" \"doc\": {\n" +
" \"_doc\": {\n" +
" \"properties\": {\n" +
" \"content\": {\n" +
" \"type\": \"string\"\n" +
Expand Down Expand Up @@ -277,7 +277,7 @@ public void fsSettingsForFolderVersion2() throws Exception {
" }\n" +
" },\n" +
" \"mappings\": {\n" +
" \"doc\": {\n" +
" \"_doc\": {\n" +
" \"properties\": {\n" +
" \"real\": {\n" +
" \"type\": \"string\",\n" +
Expand Down Expand Up @@ -337,7 +337,7 @@ public void fsSettingsForDocVersion5() throws Exception {
" }\n" +
" },\n" +
" \"mappings\": {\n" +
" \"doc\": {\n" +
" \"_doc\": {\n" +
" \"dynamic_templates\": [\n" +
" {\n" +
" \"raw_as_text\": {\n" +
Expand Down Expand Up @@ -543,7 +543,7 @@ public void fsSettingsForFolderVersion5() throws Exception {
" }\n" +
" },\n" +
" \"mappings\": {\n" +
" \"doc\": {\n" +
" \"_doc\": {\n" +
" \"properties\" : {\n" +
" \"real\" : {\n" +
" \"type\" : \"keyword\",\n" +
Expand Down Expand Up @@ -600,7 +600,7 @@ public void fsSettingsForDocVersion6() throws Exception {
" }\n" +
" },\n" +
" \"mappings\": {\n" +
" \"doc\": {\n" +
" \"_doc\": {\n" +
" \"dynamic_templates\": [\n" +
" {\n" +
" \"raw_as_text\": {\n" +
Expand Down Expand Up @@ -806,7 +806,7 @@ public void fsSettingsForFolderVersion6() throws Exception {
" }\n" +
" },\n" +
" \"mappings\": {\n" +
" \"doc\": {\n" +
" \"_doc\": {\n" +
" \"properties\" : {\n" +
" \"real\" : {\n" +
" \"type\" : \"keyword\",\n" +
Expand Down

0 comments on commit d8616f6

Please sign in to comment.