Jelajahi Sumber

Remove most of the need for `// NOTCONSOLE`

and be much more stingy about what we consider a console candidate.

* Add `// CONSOLE` to check-running
* Fix version in some snippets
* Mark groovy snippets as groovy
* Fix versions in plugins
* Fix language marker errors
* Fix language parsing in snippets

  This adds support for snippets who's language is written like
  `[source, txt]` and `["source","js",subs="attributes,callouts"]`.

  This also makes language required for snippets which is nice because
  then we can be sure we can grep for snippets in a particular language.
Nik Everett 9 tahun lalu
induk
melakukan
5cff2a046d
44 mengubah file dengan 168 tambahan dan 160 penghapusan
  1. 21 11
      buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/DocsTestPlugin.groovy
  2. 3 0
      buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy
  3. 42 13
      buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/SnippetsTask.groovy
  4. 5 5
      docs/groovy-api/anatomy.asciidoc
  5. 2 2
      docs/groovy-api/client.asciidoc
  6. 1 1
      docs/groovy-api/delete.asciidoc
  7. 1 1
      docs/groovy-api/get.asciidoc
  8. 1 1
      docs/groovy-api/index_.asciidoc
  9. 8 8
      docs/groovy-api/search.asciidoc
  10. 0 2
      docs/plugins/analysis-icu.asciidoc
  11. 0 2
      docs/plugins/analysis-kuromoji.asciidoc
  12. 0 2
      docs/plugins/analysis-phonetic.asciidoc
  13. 0 2
      docs/plugins/analysis-smartcn.asciidoc
  14. 0 2
      docs/plugins/analysis-stempel.asciidoc
  15. 21 16
      docs/plugins/discovery-azure-classic.asciidoc
  16. 1 2
      docs/plugins/discovery-ec2.asciidoc
  17. 0 2
      docs/plugins/discovery-gce.asciidoc
  18. 4 4
      docs/plugins/index.asciidoc
  19. 0 2
      docs/plugins/ingest-attachment.asciidoc
  20. 0 2
      docs/plugins/ingest-geoip.asciidoc
  21. 0 2
      docs/plugins/ingest-user-agent.asciidoc
  22. 1 4
      docs/plugins/lang-javascript.asciidoc
  23. 0 2
      docs/plugins/lang-python.asciidoc
  24. 0 2
      docs/plugins/mapper-attachments.asciidoc
  25. 0 2
      docs/plugins/mapper-murmur3.asciidoc
  26. 0 2
      docs/plugins/mapper-size.asciidoc
  27. 0 12
      docs/plugins/plugin-script.asciidoc
  28. 0 2
      docs/plugins/repository-azure.asciidoc
  29. 1 2
      docs/plugins/repository-gcs.asciidoc
  30. 0 2
      docs/plugins/repository-hdfs.asciidoc
  31. 3 2
      docs/plugins/repository-s3.asciidoc
  32. 0 2
      docs/plugins/store-smb.asciidoc
  33. 1 1
      docs/python/index.asciidoc
  34. 7 5
      docs/reference/aggregations/bucket/histogram-aggregation.asciidoc
  35. 8 6
      docs/reference/aggregations/bucket/terms-aggregation.asciidoc
  36. 3 1
      docs/reference/aggregations/metrics/cardinality-aggregation.asciidoc
  37. 7 5
      docs/reference/aggregations/pipeline.asciidoc
  38. 2 0
      docs/reference/cluster/health.asciidoc
  39. 10 13
      docs/reference/index.asciidoc
  40. 1 0
      docs/reference/search/validate.asciidoc
  41. 13 7
      docs/reference/setup/install/check-running.asciidoc
  42. 0 1
      docs/reference/setup/sysconfig/heap_size.asciidoc
  43. 0 2
      docs/reference/setup/sysconfig/swap.asciidoc
  44. 1 3
      docs/reference/testing/testing-framework.asciidoc

+ 21 - 11
buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/DocsTestPlugin.groovy

@@ -18,6 +18,7 @@
  */
 package org.elasticsearch.gradle.doc
 
+import org.elasticsearch.gradle.VersionProperties
 import org.elasticsearch.gradle.test.RestTestPlugin
 import org.gradle.api.Project
 import org.gradle.api.Task
@@ -30,9 +31,19 @@ public class DocsTestPlugin extends RestTestPlugin {
     @Override
     public void apply(Project project) {
         super.apply(project)
+        Map<String, String> defaultSubstitutions = [
+            /* These match up with the asciidoc syntax for substitutions but
+             * the values may differ. In particular {version} needs to resolve
+             * to the version being built for testing but needs to resolve to
+             * the last released version for docs. */
+            '\\{version\\}':
+                VersionProperties.elasticsearch.replace('-SNAPSHOT', ''),
+            '\\{lucene_version\\}' : VersionProperties.lucene,
+        ]
         Task listSnippets = project.tasks.create('listSnippets', SnippetsTask)
         listSnippets.group 'Docs'
         listSnippets.description 'List each snippet'
+        listSnippets.defaultSubstitutions = defaultSubstitutions
         listSnippets.perSnippet { println(it.toString()) }
 
         Task listConsoleCandidates = project.tasks.create(
@@ -40,6 +51,7 @@ public class DocsTestPlugin extends RestTestPlugin {
         listConsoleCandidates.group 'Docs'
         listConsoleCandidates.description
                 'List snippets that probably should be marked // CONSOLE'
+        listConsoleCandidates.defaultSubstitutions = defaultSubstitutions
         listConsoleCandidates.perSnippet {
             if (
                        it.console != null // Already marked, nothing to do
@@ -47,19 +59,17 @@ public class DocsTestPlugin extends RestTestPlugin {
                 ) {
                 return
             }
-            List<String> languages = [
-                // This language should almost always be marked console
-                'js',
-                // These are often curl commands that should be converted but
-                // are probably false positives
-                'sh', 'shell',
-            ]
-            if (false == languages.contains(it.language)) {
-                return
+            if (    // js almost always should be `// CONSOLE`
+                    it.language == 'js' ||
+                    // snippets containing `curl` *probably* should
+                    // be `// CONSOLE`
+                    it.curl) {
+                println(it.toString())
             }
-            println(it.toString())
         }
 
-        project.tasks.create('buildRestTests', RestTestsFromSnippetsTask)
+        Task buildRestTests = project.tasks.create(
+                'buildRestTests', RestTestsFromSnippetsTask)
+        buildRestTests.defaultSubstitutions = defaultSubstitutions
     }
 }

+ 3 - 0
buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy

@@ -146,6 +146,9 @@ public class RestTestsFromSnippetsTask extends SnippetsTask {
         void emitDo(String method, String pathAndQuery, String body,
                 String catchPart, List warnings, boolean inSetup) {
             def (String path, String query) = pathAndQuery.tokenize('?')
+            if (path == null) {
+                path = '' // Catch requests to the root...
+            }
             current.println("  - do:")
             if (catchPart != null) {
                 current.println("      catch: $catchPart")

+ 42 - 13
buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/SnippetsTask.groovy

@@ -22,6 +22,7 @@ package org.elasticsearch.gradle.doc
 import org.gradle.api.DefaultTask
 import org.gradle.api.InvalidUserDataException
 import org.gradle.api.file.ConfigurableFileTree
+import org.gradle.api.tasks.Input
 import org.gradle.api.tasks.InputFiles
 import org.gradle.api.tasks.TaskAction
 
@@ -60,6 +61,12 @@ public class SnippetsTask extends DefaultTask {
         exclude 'build'
     }
 
+    /**
+     * Substitutions done on every snippet's contents.
+     */
+    @Input
+    Map<String, String> defaultSubstitutions = [:]
+
     @TaskAction
     public void executeTask() {
         /*
@@ -75,21 +82,39 @@ public class SnippetsTask extends DefaultTask {
             Closure emit = {
                 snippet.contents = contents.toString()
                 contents = null
+                Closure doSubstitution = { String pattern, String subst ->
+                    /*
+                     * $body is really common but it looks like a
+                     * backreference so we just escape it here to make the
+                     * tests cleaner.
+                     */
+                    subst = subst.replace('$body', '\\$body')
+                    // \n is a new line....
+                    subst = subst.replace('\\n', '\n')
+                    snippet.contents = snippet.contents.replaceAll(
+                        pattern, subst)
+                }
+                defaultSubstitutions.each doSubstitution
                 if (substitutions != null) {
-                    substitutions.each { String pattern, String subst ->
-                        /*
-                         * $body is really common but it looks like a
-                         * backreference so we just escape it here to make the
-                         * tests cleaner.
-                         */
-                        subst = subst.replace('$body', '\\$body')
-                        // \n is a new line....
-                        subst = subst.replace('\\n', '\n')
-                        snippet.contents = snippet.contents.replaceAll(
-                            pattern, subst)
-                    }
+                    substitutions.each doSubstitution
                     substitutions = null
                 }
+                if (snippet.language == null) {
+                    throw new InvalidUserDataException("$snippet: "
+                        + "Snippet missing a language. This is required by "
+                        + "Elasticsearch's doc testing infrastructure so we "
+                        + "be sure we don't accidentally forget to test a "
+                        + "snippet.")
+                }
+                // Try to detect snippets that contain `curl`
+                if (snippet.language == 'sh' || snippet.language == 'shell') {
+                    snippet.curl = snippet.contents.contains('curl')
+                    if (snippet.console == false && snippet.curl == false) {
+                        throw new InvalidUserDataException("$snippet: "
+                            + "No need for NOTCONSOLE if snippet doesn't "
+                            + "contain `curl`.")
+                    }
+                }
                 perSnippet(snippet)
                 snippet = null
             }
@@ -107,7 +132,7 @@ public class SnippetsTask extends DefaultTask {
                     }
                     return
                 }
-                matcher = line =~ /\[source,(\w+)]\s*/
+                matcher = line =~ /\["?source"?,\s*"?(\w+)"?(,.*)?].*/
                 if (matcher.matches()) {
                     lastLanguage = matcher.group(1)
                     lastLanguageLine = lineNumber
@@ -250,6 +275,7 @@ public class SnippetsTask extends DefaultTask {
         String language = null
         String catchPart = null
         String setup = null
+        boolean curl
         List warnings = new ArrayList()
 
         @Override
@@ -285,6 +311,9 @@ public class SnippetsTask extends DefaultTask {
             if (testSetup) {
                 result += '// TESTSETUP'
             }
+            if (curl) {
+                result += '(curl)'
+            }
             return result
         }
     }

+ 5 - 5
docs/groovy-api/anatomy.asciidoc

@@ -17,7 +17,7 @@ elasticsearch Java `ActionFuture` (in turn a nicer extension to Java own
 `Future`) which allows to register listeners (closures) on it for
 success and failures, as well as blocking for the response. For example:
 
-[source,js]
+[source,groovy]
 --------------------------------------------------
 def indexR = client.index {
     index "test"
@@ -38,7 +38,7 @@ println "Indexed $indexR.response.id into $indexR.response.index/$indexR.respons
 In the above example, calling `indexR.response` will simply block for
 the response. We can also block for the response for a specific timeout:
 
-[source,js]
+[source,groovy]
 --------------------------------------------------
 IndexResponse response = indexR.response "5s" // block for 5 seconds, same as:
 response = indexR.response 5, TimeValue.SECONDS //
@@ -47,7 +47,7 @@ response = indexR.response 5, TimeValue.SECONDS //
 We can also register closures that will be called on success and on
 failure:
 
-[source,js]
+[source,groovy]
 --------------------------------------------------
 indexR.success = {IndexResponse response ->
     println "Indexed $response.id into $response.index/$response.type"
@@ -65,7 +65,7 @@ This option allows to pass the actual instance of the request (instead
 of a closure) as a parameter. The rest is similar to the closure as a
 parameter option (the `GActionFuture` handling). For example:
 
-[source,js]
+[source,groovy]
 --------------------------------------------------
 def indexR = client.index (new IndexRequest(
         index: "test",
@@ -90,7 +90,7 @@ The last option is to provide an actual instance of the API request, and
 an `ActionListener` for the callback. This is exactly like the Java API
 with the added `gexecute` which returns the `GActionFuture`:
 
-[source,js]
+[source,groovy]
 --------------------------------------------------
 def indexR = node.client.prepareIndex("test", "type1", "1").setSource({
     test = "value"

+ 2 - 2
docs/groovy-api/client.asciidoc

@@ -13,7 +13,7 @@ within the cluster.
 A Node based client is the simplest form to get a `GClient` to start
 executing operations against elasticsearch.
 
-[source,js]
+[source,groovy]
 --------------------------------------------------
 import org.elasticsearch.groovy.client.GClient
 import org.elasticsearch.groovy.node.GNode
@@ -33,7 +33,7 @@ Since elasticsearch allows to configure it using JSON based settings,
 the configuration itself can be done using a closure that represent the
 JSON:
 
-[source,js]
+[source,groovy]
 --------------------------------------------------
 import org.elasticsearch.groovy.node.GNode
 import org.elasticsearch.groovy.node.GNodeBuilder

+ 1 - 1
docs/groovy-api/delete.asciidoc

@@ -6,7 +6,7 @@ The delete API is very similar to the
 Java delete API, here is an
 example:
 
-[source,js]
+[source,groovy]
 --------------------------------------------------
 def deleteF = node.client.delete {
     index "test"

+ 1 - 1
docs/groovy-api/get.asciidoc

@@ -7,7 +7,7 @@ Java get API. The main benefit
 of using groovy is handling the source content. It can be automatically
 converted to a `Map` which means using Groovy to navigate it is simple:
 
-[source,js]
+[source,groovy]
 --------------------------------------------------
 def getF = node.client.get {
     index "test"

+ 1 - 1
docs/groovy-api/index_.asciidoc

@@ -7,7 +7,7 @@ Java index API. The Groovy
 extension to it is the ability to provide the indexed source using a
 closure. For example:
 
-[source,js]
+[source,groovy]
 --------------------------------------------------
 def indexR = client.index {
     index "test"

+ 8 - 8
docs/groovy-api/search.asciidoc

@@ -7,7 +7,7 @@ Java search API. The Groovy
 extension allows to provide the search source to execute as a `Closure`
 including the query itself (similar to GORM criteria builder):
 
-[source,js]
+[source,groovy]
 --------------------------------------------------
 def search = node.client.search {
     indices "test"
@@ -19,7 +19,7 @@ def search = node.client.search {
     }
 }
 
-search.response.hits.each {SearchHit hit -> 
+search.response.hits.each {SearchHit hit ->
     println "Got hit $hit.id from $hit.index/$hit.type"
 }
 --------------------------------------------------
@@ -27,13 +27,13 @@ search.response.hits.each {SearchHit hit ->
 It can also be executed using the "Java API" while still using a closure
 for the query:
 
-[source,js]
+[source,groovy]
 --------------------------------------------------
 def search = node.client.prepareSearch("test").setQuery({
         term(test: "value")
 }).gexecute();
 
-search.response.hits.each {SearchHit hit -> 
+search.response.hits.each {SearchHit hit ->
     println "Got hit $hit.id from $hit.index/$hit.type"
 }
 --------------------------------------------------
@@ -48,7 +48,7 @@ The format of the search `Closure` follows the same JSON syntax as the
 Term query where multiple values are provided (see
 {ref}/query-dsl-terms-query.html[terms]):
 
-[source,js]
+[source,groovy]
 --------------------------------------------------
 def search = node.client.search {
     indices "test"
@@ -64,7 +64,7 @@ def search = node.client.search {
 Query string (see
 {ref}/query-dsl-query-string-query.html[query string]):
 
-[source,js]
+[source,groovy]
 --------------------------------------------------
 def search = node.client.search {
     indices "test"
@@ -82,7 +82,7 @@ def search = node.client.search {
 Pagination (see
 {ref}/search-request-from-size.html[from/size]):
 
-[source,js]
+[source,groovy]
 --------------------------------------------------
 def search = node.client.search {
     indices "test"
@@ -99,7 +99,7 @@ def search = node.client.search {
 
 Sorting (see {ref}/search-request-sort.html[sort]):
 
-[source,js]
+[source,groovy]
 --------------------------------------------------
 def search = node.client.search {
     indices "test"

+ 0 - 2
docs/plugins/analysis-icu.asciidoc

@@ -17,7 +17,6 @@ This plugin can be installed using the plugin manager:
 ----------------------------------------------------------------
 sudo bin/elasticsearch-plugin install analysis-icu
 ----------------------------------------------------------------
-// NOTCONSOLE
 
 The plugin must be installed on every node in the cluster, and each node must
 be restarted after installation.
@@ -32,7 +31,6 @@ The plugin can be removed with the following command:
 ----------------------------------------------------------------
 sudo bin/elasticsearch-plugin remove analysis-icu
 ----------------------------------------------------------------
-// NOTCONSOLE
 
 The node must be stopped before removing the plugin.
 

+ 0 - 2
docs/plugins/analysis-kuromoji.asciidoc

@@ -14,7 +14,6 @@ This plugin can be installed using the plugin manager:
 ----------------------------------------------------------------
 sudo bin/elasticsearch-plugin install analysis-kuromoji
 ----------------------------------------------------------------
-// NOTCONSOLE
 
 The plugin must be installed on every node in the cluster, and each node must
 be restarted after installation.
@@ -29,7 +28,6 @@ The plugin can be removed with the following command:
 ----------------------------------------------------------------
 sudo bin/elasticsearch-plugin remove analysis-kuromoji
 ----------------------------------------------------------------
-// NOTCONSOLE
 
 The node must be stopped before removing the plugin.
 

+ 0 - 2
docs/plugins/analysis-phonetic.asciidoc

@@ -15,7 +15,6 @@ This plugin can be installed using the plugin manager:
 ----------------------------------------------------------------
 sudo bin/elasticsearch-plugin install analysis-phonetic
 ----------------------------------------------------------------
-// NOTCONSOLE
 
 The plugin must be installed on every node in the cluster, and each node must
 be restarted after installation.
@@ -30,7 +29,6 @@ The plugin can be removed with the following command:
 ----------------------------------------------------------------
 sudo bin/elasticsearch-plugin remove analysis-phonetic
 ----------------------------------------------------------------
-// NOTCONSOLE
 
 The node must be stopped before removing the plugin.
 

+ 0 - 2
docs/plugins/analysis-smartcn.asciidoc

@@ -20,7 +20,6 @@ This plugin can be installed using the plugin manager:
 ----------------------------------------------------------------
 sudo bin/elasticsearch-plugin install analysis-smartcn
 ----------------------------------------------------------------
-// NOTCONSOLE
 
 The plugin must be installed on every node in the cluster, and each node must
 be restarted after installation.
@@ -35,7 +34,6 @@ The plugin can be removed with the following command:
 ----------------------------------------------------------------
 sudo bin/elasticsearch-plugin remove analysis-smartcn
 ----------------------------------------------------------------
-// NOTCONSOLE
 
 The node must be stopped before removing the plugin.
 

+ 0 - 2
docs/plugins/analysis-stempel.asciidoc

@@ -17,7 +17,6 @@ This plugin can be installed using the plugin manager:
 ----------------------------------------------------------------
 sudo bin/elasticsearch-plugin install analysis-stempel
 ----------------------------------------------------------------
-// NOTCONSOLE
 
 The plugin must be installed on every node in the cluster, and each node must
 be restarted after installation.
@@ -32,7 +31,6 @@ The plugin can be removed with the following command:
 ----------------------------------------------------------------
 sudo bin/elasticsearch-plugin remove analysis-stempel
 ----------------------------------------------------------------
-// NOTCONSOLE
 
 The node must be stopped before removing the plugin.
 

+ 21 - 16
docs/plugins/discovery-azure-classic.asciidoc

@@ -17,7 +17,6 @@ This plugin can be installed using the plugin manager:
 ----------------------------------------------------------------
 sudo bin/elasticsearch-plugin install discovery-azure-classic
 ----------------------------------------------------------------
-// NOTCONSOLE
 
 The plugin must be installed on every node in the cluster, and each node must
 be restarted after installation.
@@ -32,7 +31,6 @@ The plugin can be removed with the following command:
 ----------------------------------------------------------------
 sudo bin/elasticsearch-plugin remove discovery-azure-classic
 ----------------------------------------------------------------
-// NOTCONSOLE
 
 The node must be stopped before removing the plugin.
 
@@ -359,7 +357,7 @@ ssh azure-elasticsearch-cluster.cloudapp.net
 
 Once connected, install Elasticsearch:
 
-[source,sh]
+["source","sh",subs="attributes,callouts"]
 ----
 # Install Latest Java version
 # Read http://www.webupd8.org/2012/09/install-oracle-java-8-in-ubuntu-via-ppa.html for details
@@ -372,36 +370,43 @@ sudo apt-get install oracle-java8-installer
 # sudo apt-get install openjdk-8-jre-headless
 
 # Download Elasticsearch
-curl -s https://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-2.0.0.deb -o elasticsearch-2.0.0.deb
+curl -s https://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-{version}.deb -o elasticsearch-{version}.deb
 
 # Prepare Elasticsearch installation
-sudo dpkg -i elasticsearch-2.0.0.deb
+sudo dpkg -i elasticsearch-{version}.deb
 ----
 
 Check that elasticsearch is running:
 
-[source,sh]
+[source,js]
 ----
-curl http://localhost:9200/
+GET /
 ----
+// CONSOLE
 
 This command should give you a JSON result:
 
-[source,js]
-----
+["source","js",subs="attributes,callouts"]
+--------------------------------------------
 {
-  "status" : 200,
-  "name" : "Living Colossus",
+  "name" : "Cp8oag6",
+  "cluster_name" : "elasticsearch",
   "version" : {
-    "number" : "2.0.0",
-    "build_hash" : "a46900e9c72c0a623d71b54016357d5f94c8ea32",
-    "build_timestamp" : "2014-02-12T16:18:34Z",
+    "number" : "{version}",
+    "build_hash" : "f27399d",
+    "build_date" : "2016-03-30T09:51:41.449Z",
     "build_snapshot" : false,
-    "lucene_version" : "5.1"
+    "lucene_version" : "{lucene_version}"
   },
   "tagline" : "You Know, for Search"
 }
-----
+--------------------------------------------
+// TESTRESPONSE[s/"name" : "Cp8oag6",/"name" : "$body.name",/]
+// TESTRESPONSE[s/"cluster_name" : "elasticsearch",/"cluster_name" : "$body.cluster_name",/]
+// TESTRESPONSE[s/"build_hash" : "f27399d",/"build_hash" : "$body.version.build_hash",/]
+// TESTRESPONSE[s/"build_date" : "2016-03-30T09:51:41.449Z",/"build_date" : $body.version.build_date,/]
+// TESTRESPONSE[s/"build_snapshot" : false,/"build_snapshot" : $body.version.build_snapshot,/]
+// So much s/// but at least we test that the layout is close to matching....
 
 [[discovery-azure-classic-long-plugin]]
 ===== Install elasticsearch cloud azure plugin

+ 1 - 2
docs/plugins/discovery-ec2.asciidoc

@@ -15,7 +15,6 @@ This plugin can be installed using the plugin manager:
 ----------------------------------------------------------------
 sudo bin/elasticsearch-plugin install discovery-ec2
 ----------------------------------------------------------------
-// NOTCONSOLE
 
 The plugin must be installed on every node in the cluster, and each node must
 be restarted after installation.
@@ -30,7 +29,6 @@ The plugin can be removed with the following command:
 ----------------------------------------------------------------
 sudo bin/elasticsearch-plugin remove discovery-ec2
 ----------------------------------------------------------------
-// NOTCONSOLE
 
 The node must be stopped before removing the plugin.
 
@@ -231,6 +229,7 @@ Management Console. It should look similar to this.
   "Version": "2012-10-17"
 }
 ----
+// NOTCONSOLE
 
 [[discovery-ec2-filtering]]
 ===== Filtering by Tags

+ 0 - 2
docs/plugins/discovery-gce.asciidoc

@@ -13,7 +13,6 @@ This plugin can be installed using the plugin manager:
 ----------------------------------------------------------------
 sudo bin/elasticsearch-plugin install discovery-gce
 ----------------------------------------------------------------
-// NOTCONSOLE
 
 The plugin must be installed on every node in the cluster, and each node must
 be restarted after installation.
@@ -28,7 +27,6 @@ The plugin can be removed with the following command:
 ----------------------------------------------------------------
 sudo bin/elasticsearch-plugin remove discovery-gce
 ----------------------------------------------------------------
-// NOTCONSOLE
 
 The node must be stopped before removing the plugin.
 

+ 4 - 4
docs/plugins/index.asciidoc

@@ -1,7 +1,9 @@
 = Elasticsearch Plugins and Integrations
 
-:ref: https://www.elastic.co/guide/en/elasticsearch/reference/master
-:guide: https://www.elastic.co/guide
+:ref:            https://www.elastic.co/guide/en/elasticsearch/reference/master
+:guide:          https://www.elastic.co/guide
+:version:        5.0.0-alpha5
+:lucene_version: 6.2.0
 
 [[intro]]
 == Introduction to plugins
@@ -62,5 +64,3 @@ include::integrations.asciidoc[]
 include::authors.asciidoc[]
 
 include::redirects.asciidoc[]
-
-

+ 0 - 2
docs/plugins/ingest-attachment.asciidoc

@@ -21,7 +21,6 @@ This plugin can be installed using the plugin manager:
 ----------------------------------------------------------------
 sudo bin/elasticsearch-plugin install ingest-attachment
 ----------------------------------------------------------------
-// NOTCONSOLE
 
 The plugin must be installed on every node in the cluster, and each node must
 be restarted after installation.
@@ -36,7 +35,6 @@ The plugin can be removed with the following command:
 ----------------------------------------------------------------
 sudo bin/elasticsearch-plugin remove ingest-attachment
 ----------------------------------------------------------------
-// NOTCONSOLE
 
 The node must be stopped before removing the plugin.
 

+ 0 - 2
docs/plugins/ingest-geoip.asciidoc

@@ -21,7 +21,6 @@ This plugin can be installed using the plugin manager:
 ----------------------------------------------------------------
 sudo bin/elasticsearch-plugin install ingest-geoip
 ----------------------------------------------------------------
-// NOTCONSOLE
 
 The plugin must be installed on every node in the cluster, and each node must
 be restarted after installation.
@@ -36,7 +35,6 @@ The plugin can be removed with the following command:
 ----------------------------------------------------------------
 sudo bin/elasticsearch-plugin remove ingest-geoip
 ----------------------------------------------------------------
-// NOTCONSOLE
 
 The node must be stopped before removing the plugin.
 

+ 0 - 2
docs/plugins/ingest-user-agent.asciidoc

@@ -16,7 +16,6 @@ This plugin can be installed using the plugin manager:
 ----------------------------------------------------------------
 sudo bin/elasticsearch-plugin install ingest-user-agent
 ----------------------------------------------------------------
-// NOTCONSOLE
 
 The plugin must be installed on every node in the cluster, and each node must
 be restarted after installation.
@@ -31,7 +30,6 @@ The plugin can be removed with the following command:
 ----------------------------------------------------------------
 sudo bin/elasticsearch-plugin remove ingest-user-agent
 ----------------------------------------------------------------
-// NOTCONSOLE
 
 The node must be stopped before removing the plugin.
 

+ 1 - 4
docs/plugins/lang-javascript.asciidoc

@@ -17,7 +17,6 @@ This plugin can be installed using the plugin manager:
 ----------------------------------------------------------------
 sudo bin/elasticsearch-plugin install lang-javascript
 ----------------------------------------------------------------
-// NOTCONSOLE
 
 The plugin must be installed on every node in the cluster, and each node must
 be restarted after installation.
@@ -32,7 +31,6 @@ The plugin can be removed with the following command:
 ----------------------------------------------------------------
 sudo bin/elasticsearch-plugin remove lang-javascript
 ----------------------------------------------------------------
-// NOTCONSOLE
 
 The node must be stopped before removing the plugin.
 
@@ -147,11 +145,10 @@ JavaScript:
 First, save this file as `config/scripts/my_script.js` on every node
 in the cluster:
 
-[source,js]
+[source,painless]
 ----
 doc["num"].value * factor
 ----
-// NOTCONSOLE
 
 then use the script as follows:
 

+ 0 - 2
docs/plugins/lang-python.asciidoc

@@ -16,7 +16,6 @@ This plugin can be installed using the plugin manager:
 ----------------------------------------------------------------
 sudo bin/elasticsearch-plugin install lang-python
 ----------------------------------------------------------------
-// NOTCONSOLE
 
 The plugin must be installed on every node in the cluster, and each node must
 be restarted after installation.
@@ -31,7 +30,6 @@ The plugin can be removed with the following command:
 ----------------------------------------------------------------
 sudo bin/elasticsearch-plugin remove lang-python
 ----------------------------------------------------------------
-// NOTCONSOLE
 
 The node must be stopped before removing the plugin.
 

+ 0 - 2
docs/plugins/mapper-attachments.asciidoc

@@ -19,7 +19,6 @@ This plugin can be installed using the plugin manager:
 ----------------------------------------------------------------
 sudo bin/elasticsearch-plugin install mapper-attachments
 ----------------------------------------------------------------
-// NOTCONSOLE
 
 The plugin must be installed on every node in the cluster, and each node must
 be restarted after installation.
@@ -34,7 +33,6 @@ The plugin can be removed with the following command:
 ----------------------------------------------------------------
 sudo bin/elasticsearch-plugin remove mapper-attachments
 ----------------------------------------------------------------
-// NOTCONSOLE
 
 The node must be stopped before removing the plugin.
 

+ 0 - 2
docs/plugins/mapper-murmur3.asciidoc

@@ -15,7 +15,6 @@ This plugin can be installed using the plugin manager:
 ----------------------------------------------------------------
 sudo bin/elasticsearch-plugin install mapper-murmur3
 ----------------------------------------------------------------
-// NOTCONSOLE
 
 The plugin must be installed on every node in the cluster, and each node must
 be restarted after installation.
@@ -30,7 +29,6 @@ The plugin can be removed with the following command:
 ----------------------------------------------------------------
 sudo bin/elasticsearch-plugin remove mapper-murmur3
 ----------------------------------------------------------------
-// NOTCONSOLE
 
 The node must be stopped before removing the plugin.
 

+ 0 - 2
docs/plugins/mapper-size.asciidoc

@@ -15,7 +15,6 @@ This plugin can be installed using the plugin manager:
 ----------------------------------------------------------------
 sudo bin/elasticsearch-plugin install mapper-size
 ----------------------------------------------------------------
-// NOTCONSOLE
 
 The plugin must be installed on every node in the cluster, and each node must
 be restarted after installation.
@@ -30,7 +29,6 @@ The plugin can be removed with the following command:
 ----------------------------------------------------------------
 sudo bin/elasticsearch-plugin remove mapper-size
 ----------------------------------------------------------------
-// NOTCONSOLE
 
 The node must be stopped before removing the plugin.
 

+ 0 - 12
docs/plugins/plugin-script.asciidoc

@@ -15,7 +15,6 @@ Run the following command to get usage instructions:
 -----------------------------------
 sudo bin/elasticsearch-plugin -h
 -----------------------------------
-// NOTCONSOLE
 
 [IMPORTANT]
 .Running as root
@@ -42,7 +41,6 @@ Core Elasticsearch plugins can be installed as follows:
 -----------------------------------
 sudo bin/elasticsearch-plugin install [plugin_name]
 -----------------------------------
-// NOTCONSOLE
 
 For instance, to install the core <<analysis-icu,ICU plugin>>, just run the
 following command:
@@ -51,7 +49,6 @@ following command:
 -----------------------------------
 sudo bin/elasticsearch-plugin install analysis-icu
 -----------------------------------
-// NOTCONSOLE
 
 This command will install the version of the plugin that matches your
 Elasticsearch version and also show a progress bar while downloading.
@@ -65,7 +62,6 @@ A plugin can also be downloaded directly from a custom location by specifying th
 -----------------------------------
 sudo bin/elasticsearch-plugin install [url] <1>
 -----------------------------------
-// NOTCONSOLE
 <1> must be a valid URL, the plugin name is determined from its descriptor.
 
 For instance, to install a plugin from your local file system, you could run:
@@ -74,7 +70,6 @@ For instance, to install a plugin from your local file system, you could run:
 -----------------------------------
 sudo bin/elasticsearch-plugin install file:///path/to/plugin.zip
 -----------------------------------
-// NOTCONSOLE
 
 The plugin script will refuse to talk to an HTTPS URL with an untrusted
 certificate. To use a self-signed HTTPS cert, you will need to add the CA cert
@@ -84,7 +79,6 @@ to a local Java truststore and pass the location to the script as follows:
 -----------------------------------
 sudo ES_JAVA_OPTS="-Djavax.net.ssl.trustStore=/path/to/trustStore.jks" bin/elasticsearch-plugin install https://....
 -----------------------------------
-// NOTCONSOLE
 
 [[listing-removing]]
 === Listing and Removing Installed Plugins
@@ -98,7 +92,6 @@ A list of the currently loaded plugins can be retrieved with the `list` option:
 -----------------------------------
 sudo bin/elasticsearch-plugin list
 -----------------------------------
-// NOTCONSOLE
 
 Alternatively, use the {ref}/cluster-nodes-info.html[node-info API] to find
 out which plugins are installed on each node in the cluster
@@ -113,7 +106,6 @@ Plugins can be removed manually, by deleting the appropriate directory under
 -----------------------------------
 sudo bin/elasticsearch-plugin remove [pluginname]
 -----------------------------------
-// NOTCONSOLE
 
 After a Java plugin has been removed, you will need to restart the node to complete the removal process.
 
@@ -145,7 +137,6 @@ can do this as follows:
 ---------------------
 sudo bin/elasticsearch-plugin -Epath.conf=/path/to/custom/config/dir install <plugin name>
 ---------------------
-// NOTCONSOLE
 
 You can also set the `CONF_DIR` environment variable to the custom config
 directory path.
@@ -168,7 +159,6 @@ sudo bin/elasticsearch-plugin install analysis-icu --timeout 1m
 # Wait forever (default)
 sudo bin/elasticsearch-plugin install analysis-icu --timeout 0
 -----------------------------------
-// NOTCONSOLE
 
 [float]
 === Proxy settings
@@ -181,7 +171,6 @@ and `http.proxyPort` (or `https.proxyHost` and `https.proxyPort`):
 -----------------------------------
 sudo ES_JAVA_OPTS="-Dhttp.proxyHost=host_name -Dhttp.proxyPort=port_number -Dhttps.proxyHost=host_name -Dhttps.proxyPort=https_port_number" bin/elasticsearch-plugin install analysis-icu
 -----------------------------------
-// NOTCONSOLE
 
 Or on Windows:
 
@@ -190,7 +179,6 @@ Or on Windows:
 set ES_JAVA_OPTS="-DproxyHost=host_name -DproxyPort=port_number -Dhttps.proxyHost=host_name -Dhttps.proxyPort=https_port_number"
 bin/elasticsearch-plugin install analysis-icu
 ------------------------------------
-// NOTCONSOLE
 
 === Plugins directory
 

+ 0 - 2
docs/plugins/repository-azure.asciidoc

@@ -14,7 +14,6 @@ This plugin can be installed using the plugin manager:
 ----------------------------------------------------------------
 sudo bin/elasticsearch-plugin install repository-azure
 ----------------------------------------------------------------
-// NOTCONSOLE
 
 The plugin must be installed on every node in the cluster, and each node must
 be restarted after installation.
@@ -29,7 +28,6 @@ The plugin can be removed with the following command:
 ----------------------------------------------------------------
 sudo bin/elasticsearch-plugin remove repository-azure
 ----------------------------------------------------------------
-// NOTCONSOLE
 
 The node must be stopped before removing the plugin.
 

+ 1 - 2
docs/plugins/repository-gcs.asciidoc

@@ -14,7 +14,6 @@ This plugin can be installed using the plugin manager:
 ----------------------------------------------------------------
 sudo bin/elasticsearch-plugin install repository-gcs
 ----------------------------------------------------------------
-// NOTCONSOLE
 
 NOTE: The plugin requires new permission to be installed in order to work
 
@@ -31,7 +30,6 @@ The plugin can be removed with the following command:
 ----------------------------------------------------------------
 sudo bin/elasticsearch-plugin remove repository-gcs
 ----------------------------------------------------------------
-// NOTCONSOLE
 
 The node must be stopped before removing the plugin.
 
@@ -133,6 +131,7 @@ A service account file looks like this:
   "client_x509_cert_url": "..."
 }
 ----
+// NOTCONSOLE
 
 This file must be copied in the `config` directory of the elasticsearch installation and on
 every node of the cluster.

+ 0 - 2
docs/plugins/repository-hdfs.asciidoc

@@ -14,7 +14,6 @@ This plugin can be installed through the plugin manager:
 ----------------------------------------------------------------
 sudo bin/elasticsearch-plugin install repository-hdfs
 ----------------------------------------------------------------
-// NOTCONSOLE
 
 The plugin must be installed on _every_ node in the cluster, and each node must
 be restarted after installation.
@@ -29,7 +28,6 @@ The plugin can be removed by specifying the _installed_ package:
 ----------------------------------------------------------------
 sudo bin/elasticsearch-plugin remove repository-hdfs
 ----------------------------------------------------------------
-// NOTCONSOLE
 
 The node must be stopped before removing the plugin.
 

+ 3 - 2
docs/plugins/repository-s3.asciidoc

@@ -16,7 +16,6 @@ This plugin can be installed using the plugin manager:
 ----------------------------------------------------------------
 sudo bin/elasticsearch-plugin install repository-s3
 ----------------------------------------------------------------
-// NOTCONSOLE
 
 The plugin must be installed on every node in the cluster, and each node must
 be restarted after installation.
@@ -31,7 +30,6 @@ The plugin can be removed with the following command:
 ----------------------------------------------------------------
 sudo bin/elasticsearch-plugin remove repository-s3
 ----------------------------------------------------------------
-// NOTCONSOLE
 
 The node must be stopped before removing the plugin.
 
@@ -303,6 +301,7 @@ IAM in conjunction with pre-existing S3 buckets. Here is an example policy which
   "Version": "2012-10-17"
 }
 ----
+// NOTCONSOLE
 
 You may further restrict the permissions by specifying a prefix within the bucket, in this example, named "foo".
 
@@ -346,6 +345,7 @@ You may further restrict the permissions by specifying a prefix within the bucke
   "Version": "2012-10-17"
 }
 ----
+// NOTCONSOLE
 
 The bucket needs to exist to register a repository for snapshots. If you did not create the bucket then the repository
 registration will fail. If you want elasticsearch to create the bucket instead, you can add the permission to create a
@@ -363,6 +363,7 @@ specific bucket like this:
    ]
 }
 ----
+// NOTCONSOLE
 
 [[repository-s3-endpoint]]
 ===== Using other S3 endpoint

+ 0 - 2
docs/plugins/store-smb.asciidoc

@@ -13,7 +13,6 @@ This plugin can be installed using the plugin manager:
 ----------------------------------------------------------------
 sudo bin/elasticsearch-plugin install store-smb
 ----------------------------------------------------------------
-// NOTCONSOLE
 
 The plugin must be installed on every node in the cluster, and each node must
 be restarted after installation.
@@ -28,7 +27,6 @@ The plugin can be removed with the following command:
 ----------------------------------------------------------------
 sudo bin/elasticsearch-plugin remove store-smb
 ----------------------------------------------------------------
-// NOTCONSOLE
 
 The node must be stopped before removing the plugin.
 

+ 1 - 1
docs/python/index.asciidoc

@@ -35,7 +35,6 @@ It can be installed with pip:
 ------------------------------------
 pip install elasticsearch
 ------------------------------------
-// NOTCONSOLE
 
 === Versioning
 
@@ -49,6 +48,7 @@ later, 0.4 releases are meant to work with Elasticsearch 0.90.*.
 The recommended way to set your requirements in your `setup.py` or
 `requirements.txt` is:
 
+[source,txt]
 ------------------------------------
     # Elasticsearch 2.x
     elasticsearch>=2.0.0,<3.0.0

+ 7 - 5
docs/reference/aggregations/bucket/histogram-aggregation.asciidoc

@@ -227,12 +227,14 @@ a multi-value metrics aggregation, and in case of a single-value metrics aggrega
 
 The path must be defined in the following form:
 
+// https://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_Form
+[source,ebnf]
 --------------------------------------------------
-AGG_SEPARATOR       :=  '>'
-METRIC_SEPARATOR    :=  '.'
-AGG_NAME            :=  <the name of the aggregation>
-METRIC              :=  <the name of the metric (in case of multi-value metrics aggregation)>
-PATH                :=  <AGG_NAME>[<AGG_SEPARATOR><AGG_NAME>]*[<METRIC_SEPARATOR><METRIC>]
+AGG_SEPARATOR       =  '>' ;
+METRIC_SEPARATOR    =  '.' ;
+AGG_NAME            =  <the name of the aggregation> ;
+METRIC              =  <the name of the metric (in case of multi-value metrics aggregation)> ;
+PATH                =  <AGG_NAME> [ <AGG_SEPARATOR>, <AGG_NAME> ]* [ <METRIC_SEPARATOR>, <METRIC> ] ;
 --------------------------------------------------
 
 [source,js]

+ 8 - 6
docs/reference/aggregations/bucket/terms-aggregation.asciidoc

@@ -344,12 +344,14 @@ a multi-value metrics aggregation, and in case of a single-value metrics aggrega
 
 The path must be defined in the following form:
 
---------------------------------------------------
-AGG_SEPARATOR       :=  '>'
-METRIC_SEPARATOR    :=  '.'
-AGG_NAME            :=  <the name of the aggregation>
-METRIC              :=  <the name of the metric (in case of multi-value metrics aggregation)>
-PATH                :=  <AGG_NAME>[<AGG_SEPARATOR><AGG_NAME>]*[<METRIC_SEPARATOR><METRIC>]
+// https://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_Form
+[source,ebnf]
+--------------------------------------------------
+AGG_SEPARATOR       =  '>' ;
+METRIC_SEPARATOR    =  '.' ;
+AGG_NAME            =  <the name of the aggregation> ;
+METRIC              =  <the name of the metric (in case of multi-value metrics aggregation)> ;
+PATH                =  <AGG_NAME> [ <AGG_SEPARATOR>, <AGG_NAME> ]* [ <METRIC_SEPARATOR>, <METRIC> ] ;
 --------------------------------------------------
 
 [source,js]

+ 3 - 1
docs/reference/aggregations/metrics/cardinality-aggregation.asciidoc

@@ -71,6 +71,7 @@ The following chart shows how the error varies before and after the threshold:
 
 ////
 To generate this chart use this gnuplot script:
+[source,gnuplot]
 -------
 #!/usr/bin/gnuplot
 reset
@@ -95,6 +96,7 @@ plot "test.dat" using 1:2 title "threshold=100", \
 
 and generate data in a 'test.dat' file using the below Java code:
 
+[source,java]
 -------
 private static double error(HyperLogLogPlusPlus h, long expected) {
     double actual = h.cardinality(0);
@@ -140,7 +142,7 @@ counting millions of items.
 On string fields that have a high cardinality, it might be faster to store the
 hash of your field values in your index and then run the cardinality aggregation
 on this field. This can either be done by providing hash values from client-side
-or by letting elasticsearch compute hash values for you by using the 
+or by letting elasticsearch compute hash values for you by using the
 {plugins}/mapper-murmur3.html[`mapper-murmur3`] plugin.
 
 NOTE: Pre-computing hashes is usually only useful on very large and/or

+ 7 - 5
docs/reference/aggregations/pipeline.asciidoc

@@ -34,12 +34,14 @@ will be included in the final output.
 Most pipeline aggregations require another aggregation as their input.  The input aggregation is defined via the `buckets_path`
 parameter, which follows a specific format:
 
+// https://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_Form
+[source,ebnf]
 --------------------------------------------------
-AGG_SEPARATOR       :=  '>'
-METRIC_SEPARATOR    :=  '.'
-AGG_NAME            :=  <the name of the aggregation>
-METRIC              :=  <the name of the metric (in case of multi-value metrics aggregation)>
-PATH                :=  <AGG_NAME>[<AGG_SEPARATOR><AGG_NAME>]*[<METRIC_SEPARATOR><METRIC>]
+AGG_SEPARATOR       =  '>' ;
+METRIC_SEPARATOR    =  '.' ;
+AGG_NAME            =  <the name of the aggregation> ;
+METRIC              =  <the name of the metric (in case of multi-value metrics aggregation)> ;
+PATH                =  <AGG_NAME> [ <AGG_SEPARATOR>, <AGG_NAME> ]* [ <METRIC_SEPARATOR>, <METRIC> ] ;
 --------------------------------------------------
 
 For example, the path `"my_bucket>my_stats.avg"` will path to the `avg` value in the `"my_stats"` metric, which is

+ 2 - 0
docs/reference/cluster/health.asciidoc

@@ -13,6 +13,8 @@ GET _cluster/health
 // TEST[s/^/PUT test1\n/]
 
 Returns this:
+
+[source,js]
 --------------------------------------------------
 {
   "cluster_name" : "testcluster",

+ 10 - 13
docs/reference/index.asciidoc

@@ -1,15 +1,16 @@
 [[elasticsearch-reference]]
 = Elasticsearch Reference
 
-:version:       5.0.0-alpha5
-:major-version: 5.x
-:branch:        master
-:jdk:           1.8.0_73
-:defguide:      https://www.elastic.co/guide/en/elasticsearch/guide/master
-:plugins:       https://www.elastic.co/guide/en/elasticsearch/plugins/master
-:javaclient:    https://www.elastic.co/guide/en/elasticsearch/client/java-api/master/
-:issue:         https://github.com/elastic/elasticsearch/issues/
-:pull:          https://github.com/elastic/elasticsearch/pull/
+:version:        5.0.0-alpha5
+:major-version:  5.x
+:lucene_version: 6.2.0
+:branch:         master
+:jdk:            1.8.0_73
+:defguide:       https://www.elastic.co/guide/en/elasticsearch/guide/master
+:plugins:        https://www.elastic.co/guide/en/elasticsearch/plugins/master
+:javaclient:     https://www.elastic.co/guide/en/elasticsearch/client/java-api/master/
+:issue:          https://github.com/elastic/elasticsearch/issues/
+:pull:           https://github.com/elastic/elasticsearch/pull/
 
 include::getting-started.asciidoc[]
 
@@ -52,7 +53,3 @@ include::glossary.asciidoc[]
 include::release-notes.asciidoc[]
 
 include::redirects.asciidoc[]
-
-
-
-

+ 1 - 0
docs/reference/search/validate.asciidoc

@@ -111,6 +111,7 @@ GET twitter/tweet/_validate/query?q=post_date:foo&explain=true
 
 responds with:
 
+[source,js]
 --------------------------------------------------
 {
   "valid" : false,

+ 13 - 7
docs/reference/setup/install/check-running.asciidoc

@@ -3,26 +3,32 @@
 You can test that your Elasticsearch node is running by sending an HTTP
 request to port `9200` on `localhost`:
 
-[source,sh]
+[source,js]
 --------------------------------------------
-curl localhost:9200
+GET /
 --------------------------------------------
+// CONSOLE
 
 which should give you a response something like this:
 
-[source,js]
+["source","js",subs="attributes,callouts"]
 --------------------------------------------
 {
-  "name" : "Harry Leland",
+  "name" : "Cp8oag6",
   "cluster_name" : "elasticsearch",
   "version" : {
-    "number" : "5.0.0-alpha1",
+    "number" : "{version}",
     "build_hash" : "f27399d",
     "build_date" : "2016-03-30T09:51:41.449Z",
     "build_snapshot" : false,
-    "lucene_version" : "6.0.0"
+    "lucene_version" : "{lucene_version}"
   },
   "tagline" : "You Know, for Search"
 }
 --------------------------------------------
-
+// TESTRESPONSE[s/"name" : "Cp8oag6",/"name" : "$body.name",/]
+// TESTRESPONSE[s/"cluster_name" : "elasticsearch",/"cluster_name" : "$body.cluster_name",/]
+// TESTRESPONSE[s/"build_hash" : "f27399d",/"build_hash" : "$body.version.build_hash",/]
+// TESTRESPONSE[s/"build_date" : "2016-03-30T09:51:41.449Z",/"build_date" : $body.version.build_date,/]
+// TESTRESPONSE[s/"build_snapshot" : false,/"build_snapshot" : $body.version.build_snapshot,/]
+// So much s/// but at least we test that the layout is close to matching....

+ 0 - 1
docs/reference/setup/sysconfig/heap_size.asciidoc

@@ -63,7 +63,6 @@ in the jvm.options file and setting these values via `ES_JAVA_OPTS`:
 ES_JAVA_OPTS="-Xms2g -Xmx2g" ./bin/elasticsearch <1>
 ES_JAVA_OPTS="-Xms4000m -Xmx4000m" ./bin/elasticsearch <2>
 ------------------
-// NOTCONSOLE
 <1> Set the minimum and maximum heap size to 2 GB.
 <2> Set the minimum and maximum heap size to 4000 MB.
 

+ 0 - 2
docs/reference/setup/sysconfig/swap.asciidoc

@@ -70,7 +70,6 @@ specifying a new temp directory, by starting Elasticsearch with:
 --------------
 ./bin/elasticsearch -Djava.io.tmpdir=/path/to/temp/dir
 --------------
-// NOTCONSOLE
 
 or using the `ES_JAVA_OPTS` environment variable:
 
@@ -79,7 +78,6 @@ or using the `ES_JAVA_OPTS` environment variable:
 export ES_JAVA_OPTS="$ES_JAVA_OPTS -Djava.io.tmpdir=/path/to/temp/dir"
 ./bin/elasticsearch
 --------------
-// NOTCONSOLE
 
 [[disable-swap-files]]
 ==== Disable all swap files

+ 1 - 3
docs/reference/testing/testing-framework.asciidoc

@@ -20,7 +20,7 @@ All of the tests are run using a custom junit runner, the `RandomizedRunner` pro
 
 First, you need to include the testing dependency in your project, along with the elasticsearch dependency you have already added. If you use maven and its `pom.xml` file, it looks like this
 
-[[source,xml]]
+[source,xml]
 --------------------------------------------------
 <dependencies>
   <dependency>
@@ -258,5 +258,3 @@ assertHitCount(searchResponse, 4);
 assertFirstHit(searchResponse, hasId("4"));
 assertSearchHits(searchResponse, "1", "2", "3", "4");
 ----------------------------
-
-