Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@

package org.elasticsearch.gradle.doc

import groovy.transform.PackageScope
import org.elasticsearch.gradle.doc.SnippetsTask.Snippet
import org.gradle.api.InvalidUserDataException
import org.gradle.api.tasks.Input
Expand Down Expand Up @@ -99,6 +100,43 @@ public class RestTestsFromSnippetsTask extends SnippetsTask {
return snippet.language == 'js' || snippet.curl
}

/**
* Converts Kibana's block quoted strings into standard JSON. These
* {@code """} delimited strings can be embedded in CONSOLE and can
* contain newlines and {@code "} without the normal JSON escaping.
* This has to add it.
*/
@PackageScope
static String replaceBlockQuote(String body) {
int start = body.indexOf('"""');
if (start < 0) {
return body
}
/*
* 1.3 is a fairly wild guess of the extra space needed to hold
* the escaped string.
*/
StringBuilder result = new StringBuilder((int) (body.length() * 1.3));
int startOfNormal = 0;
while (start >= 0) {
int end = body.indexOf('"""', start + 3);
if (end < 0) {
throw new InvalidUserDataException(
"Invalid block quote starting at $start in:\n$body")
}
result.append(body.substring(startOfNormal, start));
result.append('"');
result.append(body.substring(start + 3, end)
.replace('"', '\\"')
.replace("\n", "\\n"));
result.append('"');
startOfNormal = end + 3;
start = body.indexOf('"""', startOfNormal);
}
result.append(body.substring(startOfNormal));
return result.toString();
}

private class TestBuilder {
private static final String SYNTAX = {
String method = /(?<method>GET|PUT|POST|HEAD|OPTIONS|DELETE)/
Expand Down Expand Up @@ -259,6 +297,8 @@ public class RestTestsFromSnippetsTask extends SnippetsTask {
if (body != null) {
// Throw out the leading newline we get from parsing the body
body = body.substring(1)
// Replace """ quoted strings with valid json ones
body = replaceBlockQuote(body)
current.println(" body: |")
body.eachLine { current.println(" $it") }
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.elasticsearch.gradle.doc

import org.elasticsearch.gradle.doc.SnippetsTask.Snippet
import org.gradle.api.InvalidUserDataException

import static org.elasticsearch.gradle.doc.RestTestsFromSnippetsTask.replaceBlockQuote

class RestTestFromSnippetsTaskTest extends GroovyTestCase {
void testInvalidBlockQuote() {
String input = "\"foo\": \"\"\"bar\"";
String message = shouldFail({ replaceBlockQuote(input) });
assertEquals("Invalid block quote starting at 7 in:\n$input", message);
}

void testSimpleBlockQuote() {
assertEquals("\"foo\": \"bort baz\"",
replaceBlockQuote("\"foo\": \"\"\"bort baz\"\"\""));
}

void testMultipleBlockQuotes() {
assertEquals("\"foo\": \"bort baz\", \"bar\": \"other\"",
replaceBlockQuote("\"foo\": \"\"\"bort baz\"\"\", \"bar\": \"\"\"other\"\"\""));
}

void testEscapingInBlockQuote() {
assertEquals("\"foo\": \"bort\\\" baz\"",
replaceBlockQuote("\"foo\": \"\"\"bort\" baz\"\"\""));
assertEquals("\"foo\": \"bort\\n baz\"",
replaceBlockQuote("\"foo\": \"\"\"bort\n baz\"\"\""));
}
}
47 changes: 40 additions & 7 deletions docs/painless/painless-getting-started.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,13 @@ GET hockey/_search
"script_score": {
"script": {
"lang": "painless",
"source": "int total = 0; for (int i = 0; i < doc['goals'].length; ++i) { total += doc['goals'][i]; } return total;"
"source": """
int total = 0;
for (int i = 0; i < doc['goals'].length; ++i) {
total += doc['goals'][i];
}
return total;
"""
}
}
}
Expand All @@ -75,7 +81,13 @@ GET hockey/_search
"total_goals": {
"script": {
"lang": "painless",
"source": "int total = 0; for (int i = 0; i < doc['goals'].length; ++i) { total += doc['goals'][i]; } return total;"
"source": """
int total = 0;
for (int i = 0; i < doc['goals'].length; ++i) {
total += doc['goals'][i];
}
return total;
"""
}
}
}
Expand Down Expand Up @@ -157,7 +169,10 @@ POST hockey/player/1/_update
{
"script": {
"lang": "painless",
"source": "ctx._source.last = params.last; ctx._source.nick = params.nick",
"source": """
ctx._source.last = params.last;
ctx._source.nick = params.nick
""",
"params": {
"last": "gaudreau",
"nick": "hockey"
Expand Down Expand Up @@ -228,7 +243,13 @@ POST hockey/player/_update_by_query
{
"script": {
"lang": "painless",
"source": "if (ctx._source.last =~ /b/) {ctx._source.last += \"matched\"} else {ctx.op = 'noop'}"
"source": """
if (ctx._source.last =~ /b/) {
ctx._source.last += "matched";
} else {
ctx.op = "noop";
}
"""
}
}
----------------------------------------------------------------
Expand All @@ -243,7 +264,13 @@ POST hockey/player/_update_by_query
{
"script": {
"lang": "painless",
"source": "if (ctx._source.last ==~ /[^aeiou].*[aeiou]/) {ctx._source.last += \"matched\"} else {ctx.op = 'noop'}"
"source": """
if (ctx._source.last ==~ /[^aeiou].*[aeiou]/) {
ctx._source.last += "matched";
} else {
ctx.op = "noop";
}
"""
}
}
----------------------------------------------------------------
Expand Down Expand Up @@ -296,7 +323,10 @@ POST hockey/player/_update_by_query
{
"script": {
"lang": "painless",
"source": "ctx._source.last = ctx._source.last.replaceAll(/[aeiou]/, m -> m.group().toUpperCase(Locale.ROOT))"
"source": """
ctx._source.last = ctx._source.last.replaceAll(/[aeiou]/, m ->
m.group().toUpperCase(Locale.ROOT))
"""
}
}
----------------------------------------------------------------
Expand All @@ -311,7 +341,10 @@ POST hockey/player/_update_by_query
{
"script": {
"lang": "painless",
"source": "ctx._source.last = ctx._source.last.replaceFirst(/[aeiou]/, m -> m.group().toUpperCase(Locale.ROOT))"
"source": """
ctx._source.last = ctx._source.last.replaceFirst(/[aeiou]/, m ->
m.group().toUpperCase(Locale.ROOT))
"""
}
}
----------------------------------------------------------------
Expand Down
7 changes: 5 additions & 2 deletions docs/reference/ingest/ingest-node.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -563,7 +563,7 @@ to set the index that the document will be indexed into:
--------------------------------------------------
// NOTCONSOLE

Dynamic field names are also supported. This example sets the field named after the
Dynamic field names are also supported. This example sets the field named after the
value of `service` to the value of the field `code`:

[source,js]
Expand Down Expand Up @@ -1829,7 +1829,10 @@ PUT _ingest/pipeline/my_index
"processors": [
{
"script": {
"source": " ctx._index = 'my_index'; ctx._type = '_doc' "
"source": """
ctx._index = 'my_index';
ctx._type = '_doc';
"""
}
}
]
Expand Down