Skip to content

Instantly share code, notes, and snippets.

@markharwood
Last active August 29, 2015 14:05
Show Gist options
  • Save markharwood/0747e741b6fed9bbb32b to your computer and use it in GitHub Desktop.
Save markharwood/0747e741b6fed9bbb32b to your computer and use it in GitHub Desktop.
Timeout benchmarks
These files are used for benchmarking query timeout logic in elasticsearch.
* A python script is used to create a CSV file with a mix of good and bad queries based on example data.
* A Jmeter config file is used to fire the queries off at elasticsearch and detect if the elasticsearch server killed any problem queries according to the allotted timeout setting.
The times taken for different query types are recorded along with an indication of if elasticsearch was effective in killing problem queries.
import gzip
import csv
import time
import datetime
import json
# This script reads a data file (car test results) and uses it to create a CSV file of query clauses for benchmarking.
# It creates mostly well-behaved queries and randomly throws in "problem" queries of various forms
# Each CSV row in the output is a set of clauses (query/agg/filter) and some metadata for reporting purposes.
import random
f=gzip.open('/Users/Mark/Documents/work/irdata/MOT/test_result_2013.txt.gz')
start = time.time()
with open('/Users/Mark/Documents/work/irdata/MOT/queries.csv', 'w') as fp:
reader = csv.reader(f, delimiter='|')
#writer = csv.writer(fp, delimiter=',')
writer = csv.writer(fp, delimiter='|', quoting=csv.QUOTE_NONE, escapechar='^', quotechar="")
lc=0
i=0
requestMeta={}
for row in reader:
i+=1
# Take one in 50 rows to avoid clumping of tests around how content is actually held on disk
if i%50 == 0:
lc+=1
#===============
# Default scope of data matched
dayRange=60
size=10
fromDoc=0
model=row[9]
# ===========================================
# ==== Set the query type
# ===========================================
textQuery = {
"term": {"Model.raw": model}
}
requestMeta["q"] = "term"
# From http://www.hathitrust.org/blogs/large-scale-search/slow-queries-and-common-words-part-1
# the stat - "phrase queries are generally between 3-10% of all queries (Spink & Jansen, 2005)"
if random.randrange(20) == 1: # 1 in 20 queries run as a phrase query
textQuery= {
"match" : {
"Model" : {
"query" : model,
"type" : "phrase",
"slop" : 2
}
}
}
requestMeta["q"]="phrase";
if random.randrange(100) == 1: # 1 in 100 queries run as a fuzzy query
textQuery= {
"match" : {
"Model.raw" : {
"query" : model,
"fuzziness" : 2
}
}
}
requestMeta["q"]="fuzzy";
if random.randrange(150) == 1 == 0: # 1 in 150 queries run as a regex query finding models ending in A
textQuery = {
"regexp": {
"Model.raw": {
"value": ".*[^ ]A"
}
}
}
requestMeta["q"] = "regex";
if random.randrange(200) == 1: # 1 in 200 queries run as a prefix query
textQuery = {
"prefix": {
"Model.raw": model[0:min(2,len(model))]
}
}
requestMeta["q"] = "prefix";
#if lc % 998 == 0: # 1 in 1000 queries run as a wildcard
if random.randrange(1000) == 1: # 1 in 1000 queries run as a wildcard
textQuery = {
"query_string": {
"query": (model[0:min(1,len(model))]+"*")
}
}
requestMeta["q"] = "wildcard";
#print json.dumps(textQuery)
if random.randrange(2000) == 1: # 1 in 2000 queries run as a scripted score based on mileage
# See https://elasticsearch.zendesk.com/agent/#/tickets/403
textQuery={
"function_score": {
"boost_mode": "replace",
"query": {"term": {"Model.raw": model}},
"script_score": {
"script": "_score * Integer.parseInt(doc['TestMileage'].value) "
}
}
}
dayRange=365*100 #Nasty range of data
requestMeta["q"] = "script score";
requestMeta["f"]="bigDate"
#print json.dumps(textQuery)
if random.randrange(3000) == 1: # 1 in 3000 queries run as a v expensive terms query with 1000 terms
milesArr=[]
# Not a lot of point creating queries with > 1024 clauses as this is the default limit capped
# in
for x in range (0,1000):
milesArr.append(10000+ random.randrange(90000))
textQuery = {
"terms": {
"TestMileage": milesArr
}
}
requestMeta["q"] = "1kTerms";
if random.randrange(5000) == 1: # 1 in 5000 queries run as a v expensive terms query with 10000 terms
milesArr=[]
# See https://elasticsearch.zendesk.com/agent/#/tickets/1075
for x in range (0,10000):
milesArr.append(10000+ random.randrange(90000))
textQuery={ "filtered" : {
"query" : {
"match_all" : {}
},
"filter" : {
"and" : {
"filters" : [ {
"or" : {
"filters" : [ {
"terms" : {
"TestMileage": milesArr
}
}
]
}
}]
}
}
}
}
requestMeta["q"] = "10kTerms";
# ===========================================
# ==== Set the paging depth
# ===========================================
if random.randrange(20) == 1: #every 20th search we ask for the second page
fromDoc=10
if random.randrange(1000) == 1: #1 in 1000 searches we ask for an insane depth on a broad date range
fromDoc=10000
dayRange=365*100
# ===========================================
# ==== Set the agg type
# ===========================================
aggs=json.dumps({
"makes":{ "terms":{"field":"Make.raw"}}
})
requestMeta["agg"]="term"
if random.randrange(25) == 1: #every nth row we use scripts to add cost to query
aggs=json.dumps({
"makes":{ "terms":{"script":"doc['Make.raw'].value"}}
})
requestMeta["agg"]="script"
if random.randrange(100) == 1:
requestMeta["agg"]="monthHisto"
aggs=json.dumps({
"modelsOverTime" : {
"date_histogram" : {
"field" : "FirstUseDate",
"interval" : "month"
}
}
})
dayRange=365*10
# ===========================================
# ==== Set a filter for the query
# ===========================================
startDateString=row[13]
date=datetime.datetime.strptime(startDateString, '%Y-%m-%d').date()
requestMeta["f"]="smlDate"
# One in 50 date ranges is for a full year
if random.randrange(50) == 1:
dayRange = 365
size=1000
requestMeta["f"]="bigDate"
date += datetime.timedelta(days=dayRange)
endDateString=date.strftime('%Y-%m-%d')
if random.randrange(1000) == 1: # One in 1000 results is for many docs
size=10000
requestMeta["sz"]=size
requestMeta["from"]=fromDoc;
query = {
"bool": {
"must": [
textQuery
, {
"constant_score": {
"filter": {
"range": {
"FirstUseDate": {
"gte": startDateString,
"lte": endDateString
}
}
}
}
}
]
}
}
writer.writerow(["/mots/mot/_search",json.dumps(requestMeta), fromDoc , size, aggs, json.dumps(query)])
if lc >100000:
break
f.close()
<?xml version="1.0" encoding="UTF-8"?>
<jmeterTestPlan version="1.2" properties="2.6" jmeter="2.11 r1554548">
<hashTree>
<TestPlan guiclass="TestPlanGui" testclass="TestPlan" testname="ElasticSearch" enabled="true">
<stringProp name="TestPlan.comments"></stringProp>
<boolProp name="TestPlan.functional_mode">false</boolProp>
<boolProp name="TestPlan.serialize_threadgroups">false</boolProp>
<elementProp name="TestPlan.user_defined_variables" elementType="Arguments" guiclass="ArgumentsPanel" testclass="Arguments" testname="User Defined Variables" enabled="true">
<collectionProp name="Arguments.arguments"/>
</elementProp>
<stringProp name="TestPlan.user_define_classpath"></stringProp>
</TestPlan>
<hashTree>
<ThreadGroup guiclass="ThreadGroupGui" testclass="ThreadGroup" testname="Thread Group" enabled="true">
<stringProp name="ThreadGroup.on_sample_error">continue</stringProp>
<elementProp name="ThreadGroup.main_controller" elementType="LoopController" guiclass="LoopControlPanel" testclass="LoopController" testname="Loop Controller" enabled="true">
<boolProp name="LoopController.continue_forever">false</boolProp>
<stringProp name="LoopController.loops">5000</stringProp>
</elementProp>
<stringProp name="ThreadGroup.num_threads">5</stringProp>
<stringProp name="ThreadGroup.ramp_time">1</stringProp>
<longProp name="ThreadGroup.start_time">1407948749000</longProp>
<longProp name="ThreadGroup.end_time">1407948749000</longProp>
<boolProp name="ThreadGroup.scheduler">false</boolProp>
<stringProp name="ThreadGroup.duration"></stringProp>
<stringProp name="ThreadGroup.delay"></stringProp>
</ThreadGroup>
<hashTree>
<Arguments guiclass="ArgumentsPanel" testclass="Arguments" testname="User Defined Variables" enabled="true">
<collectionProp name="Arguments.arguments">
<elementProp name="timeoutInMillis" elementType="Argument">
<stringProp name="Argument.name">timeoutInMillis</stringProp>
<stringProp name="Argument.value">500</stringProp>
<stringProp name="Argument.desc">The timeout setting for all requests</stringProp>
<stringProp name="Argument.metadata">=</stringProp>
</elementProp>
</collectionProp>
</Arguments>
<hashTree/>
<CSVDataSet guiclass="TestBeanGUI" testclass="CSVDataSet" testname="CSV query source" enabled="true">
<stringProp name="delimiter">|</stringProp>
<stringProp name="fileEncoding"></stringProp>
<stringProp name="filename">/Users/Mark/Documents/work/irdata/MOT/queries.csv</stringProp>
<boolProp name="quotedData">false</boolProp>
<boolProp name="recycle">true</boolProp>
<stringProp name="shareMode">shareMode.all</stringProp>
<boolProp name="stopThread">false</boolProp>
<stringProp name="variableNames">path,requestMeta,fromDoc,size,aggs,query</stringProp>
</CSVDataSet>
<hashTree/>
<ConfigTestElement guiclass="HttpDefaultsGui" testclass="ConfigTestElement" testname="HTTP Request Defaults" enabled="true">
<elementProp name="HTTPsampler.Arguments" elementType="Arguments" guiclass="HTTPArgumentsPanel" testclass="Arguments" testname="User Defined Variables" enabled="true">
<collectionProp name="Arguments.arguments"/>
</elementProp>
<stringProp name="HTTPSampler.domain">localhost</stringProp>
<stringProp name="HTTPSampler.port">9200</stringProp>
<stringProp name="HTTPSampler.connect_timeout"></stringProp>
<stringProp name="HTTPSampler.response_timeout"></stringProp>
<stringProp name="HTTPSampler.protocol"></stringProp>
<stringProp name="HTTPSampler.contentEncoding"></stringProp>
<stringProp name="HTTPSampler.path"></stringProp>
<stringProp name="HTTPSampler.concurrentPool">4</stringProp>
</ConfigTestElement>
<hashTree/>
<HTTPSamplerProxy guiclass="HttpTestSampleGui" testclass="HTTPSamplerProxy" testname="Query template" enabled="true">
<boolProp name="HTTPSampler.postBodyRaw">true</boolProp>
<elementProp name="HTTPsampler.Arguments" elementType="Arguments">
<collectionProp name="Arguments.arguments">
<elementProp name="" elementType="HTTPArgument">
<boolProp name="HTTPArgument.always_encode">false</boolProp>
<stringProp name="Argument.value">{&#xd;
&quot;timeout&quot;:${timeoutInMillis},&#xd;
&quot;from&quot;: ${fromDoc},&#xd;
&quot;size&quot;:${size},&#xd;
&quot;query&quot; : ${query},&#xd;
&quot;aggs&quot; : ${aggs} &#xd;
}</stringProp>
<stringProp name="Argument.metadata">=</stringProp>
</elementProp>
</collectionProp>
</elementProp>
<stringProp name="HTTPSampler.domain"></stringProp>
<stringProp name="HTTPSampler.port"></stringProp>
<stringProp name="HTTPSampler.connect_timeout"></stringProp>
<stringProp name="HTTPSampler.response_timeout"></stringProp>
<stringProp name="HTTPSampler.protocol"></stringProp>
<stringProp name="HTTPSampler.contentEncoding"></stringProp>
<stringProp name="HTTPSampler.path">${path}</stringProp>
<stringProp name="HTTPSampler.method">POST</stringProp>
<boolProp name="HTTPSampler.follow_redirects">true</boolProp>
<boolProp name="HTTPSampler.auto_redirects">false</boolProp>
<boolProp name="HTTPSampler.use_keepalive">true</boolProp>
<boolProp name="HTTPSampler.DO_MULTIPART_POST">false</boolProp>
<boolProp name="HTTPSampler.monitor">false</boolProp>
<stringProp name="HTTPSampler.embedded_url_re"></stringProp>
</HTTPSamplerProxy>
<hashTree/>
<RegexExtractor guiclass="RegexExtractorGui" testclass="RegexExtractor" testname="TimedOutRegex spotter" enabled="true">
<stringProp name="TestPlan.comments">Tells me if elasticsearch timed out processing the request</stringProp>
<stringProp name="RegexExtractor.useHeaders">false</stringProp>
<stringProp name="RegexExtractor.refname">TimedOut</stringProp>
<stringProp name="RegexExtractor.regex">timed_out&quot;:([^,]+)</stringProp>
<stringProp name="RegexExtractor.template">$1$</stringProp>
<stringProp name="RegexExtractor.default">NOTFOUND</stringProp>
<stringProp name="RegexExtractor.match_number">1</stringProp>
</RegexExtractor>
<hashTree/>
<BeanShellPostProcessor guiclass="TestBeanGUI" testclass="BeanShellPostProcessor" testname="BeanShell Labeller" enabled="true">
<stringProp name="filename"></stringProp>
<stringProp name="parameters"></stringProp>
<boolProp name="resetInterpreter">false</boolProp>
<stringProp name="script">//If the regex spots the &quot;timedout&quot; elasticsearch response
if(&quot;true&quot;.equals(vars.get(&quot;TimedOut&quot;))){
prev.setSuccessful(false); //For JMeter reporting performances report as error
prev.setResponseCode(&quot;503&quot;); //Temporary unavailable
prev.setSampleLabel(&quot;Detected overrun &quot;+vars.get(&quot;requestMeta&quot;));
}else{
// prev.setSampleLabel(vars.get(&quot;requestMeta&quot;));
if(prev.getTime()&gt;100+Integer.parseInt(vars.get(&quot;timeoutInMillis&quot;))){ //allow 100ms ovverun for timer granularity
prev.setSampleLabel(&quot;Undetected overrun &quot;+vars.get(&quot;requestMeta&quot;));
prev.setSuccessful(false); //For JMeter reporting performances report as error
prev.setResponseCode(&quot;503&quot;); //Temporary unavailable
}
}
</stringProp>
</BeanShellPostProcessor>
<hashTree/>
<ResultCollector guiclass="GraphVisualizer" testclass="ResultCollector" testname="Graph Results" enabled="true">
<boolProp name="ResultCollector.error_logging">false</boolProp>
<objProp>
<name>saveConfig</name>
<value class="SampleSaveConfiguration">
<time>true</time>
<latency>true</latency>
<timestamp>true</timestamp>
<success>true</success>
<label>true</label>
<code>true</code>
<message>true</message>
<threadName>true</threadName>
<dataType>true</dataType>
<encoding>false</encoding>
<assertions>true</assertions>
<subresults>true</subresults>
<responseData>false</responseData>
<samplerData>false</samplerData>
<xml>false</xml>
<fieldNames>false</fieldNames>
<responseHeaders>false</responseHeaders>
<requestHeaders>false</requestHeaders>
<responseDataOnError>false</responseDataOnError>
<saveAssertionResultsFailureMessage>false</saveAssertionResultsFailureMessage>
<assertionsResultsToSave>0</assertionsResultsToSave>
<bytes>true</bytes>
</value>
</objProp>
<stringProp name="filename"></stringProp>
</ResultCollector>
<hashTree/>
<ResultCollector guiclass="TableVisualizer" testclass="ResultCollector" testname="Errors Table" enabled="true">
<boolProp name="ResultCollector.error_logging">true</boolProp>
<objProp>
<name>saveConfig</name>
<value class="SampleSaveConfiguration">
<time>true</time>
<latency>true</latency>
<timestamp>true</timestamp>
<success>true</success>
<label>true</label>
<code>true</code>
<message>true</message>
<threadName>true</threadName>
<dataType>true</dataType>
<encoding>false</encoding>
<assertions>true</assertions>
<subresults>true</subresults>
<responseData>false</responseData>
<samplerData>false</samplerData>
<xml>false</xml>
<fieldNames>false</fieldNames>
<responseHeaders>false</responseHeaders>
<requestHeaders>false</requestHeaders>
<responseDataOnError>false</responseDataOnError>
<saveAssertionResultsFailureMessage>false</saveAssertionResultsFailureMessage>
<assertionsResultsToSave>0</assertionsResultsToSave>
<bytes>true</bytes>
</value>
</objProp>
<stringProp name="filename"></stringProp>
</ResultCollector>
<hashTree/>
<ResultCollector guiclass="SummaryReport" testclass="ResultCollector" testname="Summary Report" enabled="true">
<boolProp name="ResultCollector.error_logging">false</boolProp>
<objProp>
<name>saveConfig</name>
<value class="SampleSaveConfiguration">
<time>true</time>
<latency>true</latency>
<timestamp>true</timestamp>
<success>true</success>
<label>true</label>
<code>true</code>
<message>true</message>
<threadName>true</threadName>
<dataType>true</dataType>
<encoding>false</encoding>
<assertions>true</assertions>
<subresults>true</subresults>
<responseData>false</responseData>
<samplerData>false</samplerData>
<xml>false</xml>
<fieldNames>false</fieldNames>
<responseHeaders>false</responseHeaders>
<requestHeaders>false</requestHeaders>
<responseDataOnError>false</responseDataOnError>
<saveAssertionResultsFailureMessage>false</saveAssertionResultsFailureMessage>
<assertionsResultsToSave>0</assertionsResultsToSave>
<bytes>true</bytes>
</value>
</objProp>
<stringProp name="filename"></stringProp>
</ResultCollector>
<hashTree/>
<ResultCollector guiclass="ViewResultsFullVisualizer" testclass="ResultCollector" testname="View Results Tree" enabled="false">
<boolProp name="ResultCollector.error_logging">true</boolProp>
<objProp>
<name>saveConfig</name>
<value class="SampleSaveConfiguration">
<time>true</time>
<latency>true</latency>
<timestamp>true</timestamp>
<success>true</success>
<label>true</label>
<code>true</code>
<message>true</message>
<threadName>true</threadName>
<dataType>true</dataType>
<encoding>false</encoding>
<assertions>true</assertions>
<subresults>true</subresults>
<responseData>false</responseData>
<samplerData>false</samplerData>
<xml>false</xml>
<fieldNames>false</fieldNames>
<responseHeaders>false</responseHeaders>
<requestHeaders>false</requestHeaders>
<responseDataOnError>false</responseDataOnError>
<saveAssertionResultsFailureMessage>false</saveAssertionResultsFailureMessage>
<assertionsResultsToSave>0</assertionsResultsToSave>
<bytes>true</bytes>
</value>
</objProp>
<stringProp name="filename"></stringProp>
</ResultCollector>
<hashTree/>
<ResultCollector guiclass="SplineVisualizer" testclass="ResultCollector" testname="Spline Visualizer" enabled="false">
<boolProp name="ResultCollector.error_logging">false</boolProp>
<objProp>
<name>saveConfig</name>
<value class="SampleSaveConfiguration">
<time>true</time>
<latency>true</latency>
<timestamp>true</timestamp>
<success>true</success>
<label>true</label>
<code>true</code>
<message>true</message>
<threadName>true</threadName>
<dataType>true</dataType>
<encoding>false</encoding>
<assertions>true</assertions>
<subresults>true</subresults>
<responseData>false</responseData>
<samplerData>false</samplerData>
<xml>false</xml>
<fieldNames>false</fieldNames>
<responseHeaders>false</responseHeaders>
<requestHeaders>false</requestHeaders>
<responseDataOnError>false</responseDataOnError>
<saveAssertionResultsFailureMessage>false</saveAssertionResultsFailureMessage>
<assertionsResultsToSave>0</assertionsResultsToSave>
<bytes>true</bytes>
</value>
</objProp>
<stringProp name="filename"></stringProp>
</ResultCollector>
<hashTree/>
<ResultCollector guiclass="DistributionGraphVisualizer" testclass="ResultCollector" testname="Distribution Graph (alpha)" enabled="false">
<boolProp name="ResultCollector.error_logging">false</boolProp>
<objProp>
<name>saveConfig</name>
<value class="SampleSaveConfiguration">
<time>true</time>
<latency>true</latency>
<timestamp>true</timestamp>
<success>true</success>
<label>true</label>
<code>true</code>
<message>true</message>
<threadName>true</threadName>
<dataType>true</dataType>
<encoding>false</encoding>
<assertions>true</assertions>
<subresults>true</subresults>
<responseData>false</responseData>
<samplerData>false</samplerData>
<xml>false</xml>
<fieldNames>false</fieldNames>
<responseHeaders>false</responseHeaders>
<requestHeaders>false</requestHeaders>
<responseDataOnError>false</responseDataOnError>
<saveAssertionResultsFailureMessage>false</saveAssertionResultsFailureMessage>
<assertionsResultsToSave>0</assertionsResultsToSave>
<bytes>true</bytes>
</value>
</objProp>
<stringProp name="filename"></stringProp>
</ResultCollector>
<hashTree/>
</hashTree>
</hashTree>
</hashTree>
</jmeterTestPlan>
Below is a partial stack trace I had while running the above benchmarks which looks like scripts are recursing and I was eventually getting stack overflows.
The error occurs with this query:
{
"query": {
"function_score": {
"query": {
"term": {
"Model.raw": "JAZZ SE"
}
},
"script_score": {
"script": "_score * doc['TestMileage'].value "
},
"boost_mode": "replace"
}
},
"aggs": {
"makes": {
"terms": {
"script": "doc['Make.raw'].value"
}
}
}
}
and results in this repeated sequence:
at Script2.run(Script2.groovy:1)
at org.elasticsearch.script.groovy.GroovyScriptEngineService$GroovyScript.run(GroovyScriptEngineService.java:242)
at org.elasticsearch.script.groovy.GroovyScriptEngineService$GroovyScript.runAsDouble(GroovyScriptEngineService.java:266)
at org.elasticsearch.script.ScriptService$TimeRestrictedSearchScript.runAsDouble(ScriptService.java:592)
at org.elasticsearch.common.lucene.search.function.ScriptScoreFunction.score(ScriptScoreFunction.java:99)
at org.elasticsearch.common.lucene.search.function.FunctionScoreQuery$CustomBoostFactorScorer.score(FunctionScoreQuery.java:176)
at org.apache.lucene.search.ConjunctionScorer.score(ConjunctionScorer.java:108)
at org.apache.lucene.search.ScoreCachingWrappingScorer.score(ScoreCachingWrappingScorer.java:49)
at org.elasticsearch.search.lookup.DocLookup.score(DocLookup.java:88)
at org.elasticsearch.script.ScoreAccessor.score(ScoreAccessor.java:42)
at org.elasticsearch.script.ScoreAccessor.longValue(ScoreAccessor.java:55)
at org.codehaus.groovy.runtime.typehandling.LongMath.multiplyImpl(LongMath.java:42)
at org.codehaus.groovy.runtime.dgmimpl.NumberNumberMultiply$NumberNumber.invoke(NumberNumberMultiply.java:383)
at org.codehaus.groovy.runtime.callsite.PojoMetaMethodSite.call(PojoMetaMethodSite.java:53)
at org.codehaus.groovy.runtime.callsite.AbstractCallSite.call(AbstractCallSite.java:116)
at Script2.run(Script2.groovy:1)
at org.elasticsearch.script.groovy.GroovyScriptEngineService$GroovyScript.run(GroovyScriptEngineService.java:242)
at org.elasticsearch.script.groovy.GroovyScriptEngineService$GroovyScript.runAsDouble(GroovyScriptEngineService.java:266)
at org.elasticsearch.script.ScriptService$TimeRestrictedSearchScript.runAsDouble(ScriptService.java:592)
at org.elasticsearch.common.lucene.search.function.ScriptScoreFunction.score(ScriptScoreFunction.java:99)
at org.elasticsearch.common.lucene.search.function.FunctionScoreQuery$CustomBoostFactorScorer.score(FunctionScoreQuery.java:176)
at org.apache.lucene.search.ConjunctionScorer.score(ConjunctionScorer.java:108)
at org.apache.lucene.search.ScoreCachingWrappingScorer.score(ScoreCachingWrappingScorer.java:49)
at org.elasticsearch.search.lookup.DocLookup.score(DocLookup.java:88)
at org.elasticsearch.script.ScoreAccessor.score(ScoreAccessor.java:42)
at org.elasticsearch.script.ScoreAccessor.longValue(ScoreAccessor.java:55)
at org.codehaus.groovy.runtime.typehandling.LongMath.multiplyImpl(LongMath.java:42)
at org.codehaus.groovy.runtime.dgmimpl.NumberNumberMultiply$NumberNumber.invoke(NumberNumberMultiply.java:383)
at org.codehaus.groovy.runtime.callsite.PojoMetaMethodSite.call(PojoMetaMethodSite.java:53)
at org.codehaus.groovy.runtime.callsite.AbstractCallSite.call(AbstractCallSite.java:116)
at Script2.run(Script2.groovy:1)
at org.elasticsearch.script.groovy.GroovyScriptEngineService$GroovyScript.run(GroovyScriptEngineService.java:242)
at org.elasticsearch.script.groovy.GroovyScriptEngineService$GroovyScript.runAsDouble(GroovyScriptEngineService.java:266)
at org.elasticsearch.script.ScriptService$TimeRestrictedSearchScript.runAsDouble(ScriptService.java:592)
at org.elasticsearch.common.lucene.search.function.ScriptScoreFunction.score(ScriptScoreFunction.java:99)
at org.elasticsearch.common.lucene.search.function.FunctionScoreQuery$CustomBoostFactorScorer.score(FunctionScoreQuery.java:176)
at org.apache.lucene.search.ConjunctionScorer.score(ConjunctionScorer.java:108)
at org.apache.lucene.search.ScoreCachingWrappingScorer.score(ScoreCachingWrappingScorer.java:49)
at org.elasticsearch.search.lookup.DocLookup.score(DocLookup.java:88)
at org.elasticsearch.script.ScoreAccessor.score(ScoreAccessor.java:42)
at org.elasticsearch.script.ScoreAccessor.longValue(ScoreAccessor.java:55)
at org.codehaus.groovy.runtime.typehandling.LongMath.multiplyImpl(LongMath.java:42)
at org.codehaus.groovy.runtime.dgmimpl.NumberNumberMultiply$NumberNumber.invoke(NumberNumberMultiply.java:383)
at org.codehaus.groovy.runtime.callsite.PojoMetaMethodSite.call(PojoMetaMethodSite.java:53)
at org.codehaus.groovy.runtime.callsite.AbstractCallSite.call(AbstractCallSite.java:116)
at Script2.run(Script2.groovy:1)
at org.elasticsearch.script.groovy.GroovyScriptEngineService$GroovyScript.run(GroovyScriptEngineService.java:242)
at org.elasticsearch.script.groovy.GroovyScriptEngineService$GroovyScript.runAsDouble(GroovyScriptEngineService.java:266)
at org.elasticsearch.script.ScriptService$TimeRestrictedSearchScript.runAsDouble(ScriptService.java:592)
at org.elasticsearch.common.lucene.search.function.ScriptScoreFunction.score(ScriptScoreFunction.java:99)
at org.elasticsearch.common.lucene.search.function.FunctionScoreQuery$CustomBoostFactorScorer.score(FunctionScoreQuery.java:176)
at org.apache.lucene.search.ConjunctionScorer.score(ConjunctionScorer.java:108)
at org.apache.lucene.search.ScoreCachingWrappingScorer.score(ScoreCachingWrappingScorer.java:49)
at org.elasticsearch.search.lookup.DocLookup.score(DocLookup.java:88)
at org.elasticsearch.script.ScoreAccessor.score(ScoreAccessor.java:42)
at org.elasticsearch.script.ScoreAccessor.longValue(ScoreAccessor.java:55)
at org.codehaus.groovy.runtime.typehandling.LongMath.multiplyImpl(LongMath.java:42)
at org.codehaus.groovy.runtime.dgmimpl.NumberNumberMultiply$NumberNumber.invoke(NumberNumberMultiply.java:383)
at org.codehaus.groovy.runtime.callsite.PojoMetaMethodSite.call(PojoMetaMethodSite.java:53)
at org.codehaus.groovy.runtime.callsite.AbstractCallSite.call(AbstractCallSite.java:116)
at Script2.run(Script2.groovy:1)
at org.elasticsearch.script.groovy.GroovyScriptEngineService$GroovyScript.run(GroovyScriptEngineService.java:242)
at org.elasticsearch.script.groovy.GroovyScriptEngineService$GroovyScript.runAsDouble(GroovyScriptEngineService.java:266)
at org.elasticsearch.script.ScriptService$TimeRestrictedSearchScript.runAsDouble(ScriptService.java:592)
at org.elasticsearch.common.lucene.search.function.ScriptScoreFunction.score(ScriptScoreFunction.java:99)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment