# ======================== Elasticsearch Configuration ========================= # # NOTE: Elasticsearch comes with reasonable defaults for most settings. # This file overrides those reasonable settings for testing purposes # and is unlikely to be suited for any other goal: do not take this as # an example to run a production system. # # Please see the documentation for further information on configuration options: # # # ---------------------------------- Cluster ----------------------------------- # Use a descriptive name for your cluster: cluster.name: hsearch-testing # # ------------------------------------ Node ------------------------------------ # Use a descriptive name for the node: node.name: node-1 # # Lock the memory on startup; requires system permissions to be effective, # but shouldn't be harmful when the permissions are not granted. bootstrap.mlockall: true # # ---------------------------------- Network ----------------------------------- # Set the bind address to a specific IP (IPv4 or IPv6): network.host: 127.0.0.1 http.port: 9200 # --------------------------------- Discovery ---------------------------------- # Disable discovery to speedup forming a single node cluster: discovery.zen.ping.multicast.enabled: false discovery.zen.minimum_master_nodes: 1 # # ---------------------------------- Gateway ----------------------------------- # For more information, see the documentation at: # # ---------------------------------- Various ----------------------------------- # # Disable starting multiple nodes on a single system: # node.max_local_storage_nodes: 1 # Additional customizations to run a singleton node: node.local: true index.number_of_shards: 1 index.number_of_replicas: 0 # Enable Groovy scripting support for search: useful for spatial queries script.engine.groovy.inline.search: on # Custom analyzer used in tests index.analysis: analyzer: custom-analyzer: type: custom tokenizer: standard filter: [custom-filter, lowercase] stemmer: type: custom tokenizer: standard filter: [lowercase, stopWordsDefaultEnglish, snowballEnglish] ngram: type: custom tokenizer: standard filter: [lowercase, stopWordsDefaultEnglish, ngram3] htmlStrip: type: custom tokenizer: standard filter: [lowercase] char_filter: htmlStrip filter: custom-filter: type : stop stopwords : [test1, close] stopWordsDefaultEnglish: type: stop # default list of stop words. See org.apache.lucene.analysis.core.StopAnalyzer#ENGLISH_STOP_WORDS_SET stopwords: [a, an, and, are, as, at, be, but, by, for, if, in, into, is, it, no, not, of, on, or, such, that, the, their, then, there, these, they, this, to, was, will, with] ngram3: type : nGram min_gram: 3 max_gram: 3 snowballEnglish: type: snowball language: 'English' char_filter: htmlStrip: type: html_strip escaped_tags: [escaped]