Skip to content

Commit

Permalink
first nCache (new fieldCache) pass
Browse files Browse the repository at this point in the history
  • Loading branch information
yonik committed Feb 3, 2014
1 parent 1cde291 commit 96bbb77
Show file tree
Hide file tree
Showing 37 changed files with 1,822 additions and 113 deletions.
3 changes: 2 additions & 1 deletion solr/core/src/java/org/apache/solr/core/HS.java
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ public long allocArray(long numElements, int elementSize, boolean zero) throws O
}

public void freeArray(long ptr) {
assert arraySizeBytes(ptr) >= 0;
numFree.incrementAndGet();
unsafe.putLong(ptr - SIZE_OFFSET, -123456789L); // put negative length to trip asserts
unsafe.freeMemory(ptr - HEADER_SIZE);
Expand Down Expand Up @@ -176,7 +177,7 @@ public static int getShort(long ptr, int index) {

public static void setShort(long ptr, int index, short val) {
assert (index>=0) && ((((long)index+1)<<1)) <= arraySizeBytes(ptr);
unsafe.putInt(ptr + (((long)index)<<1), val);
unsafe.putShort(ptr + (((long)index)<<1), val);
}

public static int getInt(long ptr, int index) {
Expand Down
103 changes: 103 additions & 0 deletions solr/core/src/java/org/apache/solr/core/RefCountBase.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
package org.apache.solr.core;

/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

import org.apache.solr.core.RefCount;

import java.io.Closeable;
import java.util.concurrent.atomic.AtomicInteger;

public abstract class RefCountBase implements RefCount, Closeable {
private final AtomicInteger refcount = new AtomicInteger(1);

@Override
public int getRefCount() {
return refcount.get();
}

@Override
public int incref() {
// debug_incref();

int count;
while ((count = refcount.get()) > 0) {
if (refcount.compareAndSet(count, count+1)) {
return count+1;
}
}
throw new RuntimeException("Trying to incref freed native object " + this);
}

@Override
public int decref() {
// debug_decref();

int count;
while ((count = refcount.get()) > 0) {
int newCount = count - 1;
if (refcount.compareAndSet(count, newCount)) {
if (newCount == 0) {
free();
}
return newCount;
}
}

throw new RuntimeException("Too many decrefs detected for native object " + this);
}


@Override
public boolean tryIncref() {
// debug_incref();

int count;
while ((count = refcount.get()) > 0) {
if (refcount.compareAndSet(count, count+1)) {
return true;
}
}
return false;
}

@Override
public boolean tryDecref() {
// debug_decref();

int count;
while ((count = refcount.get()) > 0) {
int newCount = count - 1;
if (refcount.compareAndSet(count, newCount)) {
if (newCount == 0) {
free();
}
return true;
}
}

return false;
}


protected abstract void free();

@Override // for Closeable
public void close() {
decref();
}
}
23 changes: 17 additions & 6 deletions solr/core/src/java/org/apache/solr/core/SolrConfig.java
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.schema.IndexSchemaFactory;
import org.apache.solr.search.field.TopValues;
import org.apache.solr.util.DOMUtil;
import org.apache.solr.util.FileUtils;
import org.apache.solr.util.RegexFileFilter;
Expand All @@ -37,7 +38,6 @@
import org.apache.solr.update.UpdateLog;
import org.apache.solr.update.processor.UpdateRequestProcessorChain;
import org.apache.solr.spelling.QueryConverter;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.index.IndexDeletionPolicy;
import org.apache.lucene.util.Version;

Expand Down Expand Up @@ -176,11 +176,7 @@ public SolrConfig(SolrResourceLoader loader, String name, InputSource is)
if(get("query/HashDocSet", null) != null)
log.warn("solrconfig.xml: <HashDocSet> is deprecated and no longer recommended used.");

// TODO: Old code - in case somebody wants to re-enable. Also see SolrIndexSearcher#search()
// filtOptEnabled = getBool("query/boolTofilterOptimizer/@enabled", false);
// filtOptCacheSize = getInt("query/boolTofilterOptimizer/@cacheSize",32);
// filtOptThreshold = getFloat("query/boolTofilterOptimizer/@threshold",.05f);


useFilterForSortedQuery = getBool("query/useFilterForSortedQuery", false);
queryResultWindowSize = Math.max(1, getInt("query/queryResultWindowSize", 1));
queryResultMaxDocsCached = getInt("query/queryResultMaxDocsCached", Integer.MAX_VALUE);
Expand All @@ -197,6 +193,20 @@ public SolrConfig(SolrResourceLoader loader, String name, InputSource is)
}
filterCacheConfig = conf;

conf = CacheConfig.getConfig(this, "query/nCache");
if (conf == null) {
Map<String,String> args = new HashMap<String,String>();
args.put("name","nCache");
args.put("size","64");
args.put("autowarmCount","100%");
args.put("showItems","-1");
conf = new CacheConfig(FastLRUCache.class, args, new TopValues.Regenerator());
} else {
conf.clazz = FastLRUCache.class;
}
nCacheConfig = conf;


queryResultCacheConfig = CacheConfig.getConfig(this, "query/queryResultCache");
documentCacheConfig = CacheConfig.getConfig(this, "query/documentCache");
conf = CacheConfig.getConfig(this, "query/fieldValueCache");
Expand Down Expand Up @@ -344,6 +354,7 @@ public List<PluginInfo> readPluginInfos(String tag, boolean requireName, boolean
public final CacheConfig queryResultCacheConfig;
public final CacheConfig documentCacheConfig;
public final CacheConfig fieldValueCacheConfig;
public final CacheConfig nCacheConfig;
public final CacheConfig[] userCacheConfigs;
// SolrIndexSearcher - more...
public final boolean useFilterForSortedQuery;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,30 +58,32 @@ public class FieldFacetStats {
final AtomicReader topLevelReader;
AtomicReaderContext leave;
final ValueSource valueSource;
final QueryContext qcontext;
AtomicReaderContext context;
FuncValues values;

SortedDocValues topLevelSortedValues = null;

private final BytesRef tempBR = new BytesRef();

public FieldFacetStats(SolrIndexSearcher searcher, String name, SchemaField field_sf, SchemaField facet_sf, boolean calcDistinct) {
public FieldFacetStats(SolrIndexSearcher searcher, String name, SchemaField field_sf, SchemaField facet_sf, boolean calcDistinct) throws IOException {
this.name = name;
this.field_sf = field_sf;
this.facet_sf = facet_sf;
this.calcDistinct = calcDistinct;

topLevelReader = searcher.getAtomicReader();
valueSource = facet_sf.getType().getValueSource(facet_sf, null);

qcontext = QueryContext.newContext(searcher);
valueSource.createWeight(qcontext, searcher);
facetStatsValues = new HashMap<String, StatsValues>();
facetStatsTerms = new ArrayList<HashMap<String, Integer>>();
}

private StatsValues getStatsValues(String key) throws IOException {
StatsValues stats = facetStatsValues.get(key);
if (stats == null) {
stats = StatsValuesFactory.createStatsValues(field_sf, calcDistinct);
stats = StatsValuesFactory.createStatsValues(qcontext, field_sf, calcDistinct);
facetStatsValues.put(key, stats);
stats.setNextReader(context);
}
Expand Down Expand Up @@ -142,7 +144,7 @@ public boolean accumulateTermNum(int statsTermNum, BytesRef value) throws IOExce
String key = (String) pairs.getKey();
StatsValues facetStats = facetStatsValues.get(key);
if (facetStats == null) {
facetStats = StatsValuesFactory.createStatsValues(field_sf, calcDistinct);
facetStats = StatsValuesFactory.createStatsValues(qcontext, field_sf, calcDistinct);
facetStatsValues.put(key, facetStats);
}
Integer count = (Integer) pairs.getValue();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@
import org.apache.solr.schema.SchemaField;
import org.apache.solr.search.DocIterator;
import org.apache.solr.search.DocSet;
import org.apache.solr.search.QueryContext;
import org.apache.solr.search.SolrIndexSearcher;

/**
Expand Down Expand Up @@ -178,7 +179,8 @@ void parse(SolrParams params, ResponseBuilder rb) {
for (String field : statsFs) {
boolean calcDistinct = params.getFieldBool(field, StatsParams.STATS_CALC_DISTINCT, false);
SchemaField sf = rb.req.getSchema().getField(field);
statsFields.put(field, StatsValuesFactory.createStatsValues(sf, calcDistinct));
QueryContext qcontext = QueryContext.newContext(rb.req.getSearcher());
statsFields.put(field, StatsValuesFactory.createStatsValues(qcontext, sf, calcDistinct));
}
}
}
Expand Down Expand Up @@ -247,7 +249,8 @@ public NamedList<?> getFieldCacheStats(String fieldName, boolean calcDistinct, S
IndexSchema schema = searcher.getSchema();
final SchemaField sf = schema.getField(fieldName);

final StatsValues allstats = StatsValuesFactory.createStatsValues(sf, calcDistinct);
QueryContext qcontext = QueryContext.newContext(searcher);
final StatsValues allstats = StatsValuesFactory.createStatsValues(qcontext, sf, calcDistinct);

List<FieldFacetStats> facetStats = new ArrayList<FieldFacetStats>();
for( String facetField : facet ) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ public class StatsValuesFactory {
* @param sf SchemaField for the field whose statistics will be created by the resulting StatsValues
* @return Instance of StatsValues that will create statistics from values from a field of the given type
*/
public static StatsValues createStatsValues(SchemaField sf, boolean calcDistinct) {
public static StatsValues createStatsValues(QueryContext qcontext, SchemaField sf, boolean calcDistinct) {
// TODO: allow for custom field types
FieldType fieldType = sf.getType();
if (DoubleField.class.isInstance(fieldType) ||
Expand All @@ -54,13 +54,13 @@ public static StatsValues createStatsValues(SchemaField sf, boolean calcDistinct
SortableIntField.class.isInstance(fieldType) ||
SortableLongField.class.isInstance(fieldType) ||
SortableFloatField.class.isInstance(fieldType)) {
return new NumericStatsValues(sf, calcDistinct);
return new NumericStatsValues(qcontext, sf, calcDistinct);
} else if (DateField.class.isInstance(fieldType)) {
return new DateStatsValues(sf, calcDistinct);
return new DateStatsValues(qcontext, sf, calcDistinct);
} else if (StrField.class.isInstance(fieldType)) {
return new StringStatsValues(sf, calcDistinct);
return new StringStatsValues(qcontext, sf, calcDistinct);
} else if (sf.getType().getClass().equals(EnumField.class)) {
return new EnumStatsValues(sf, calcDistinct);
return new EnumStatsValues(qcontext, sf, calcDistinct);
} else {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Field type " + fieldType + " is not currently supported");
}
Expand All @@ -85,17 +85,20 @@ abstract class AbstractStatsValues<T> implements StatsValues {
protected long countDistinct;
protected Set<T> distinctValues;
private ValueSource valueSource;
private final QueryContext qcontext;
protected FuncValues values;
protected boolean calcDistinct = false;

// facetField facetValue
protected Map<String, Map<String, StatsValues>> facets = new HashMap<String, Map<String, StatsValues>>();

protected AbstractStatsValues(SchemaField sf, boolean calcDistinct) {
protected AbstractStatsValues(QueryContext qcontext, SchemaField sf, boolean calcDistinct) {
this.sf = sf;
this.ft = sf.getType();
this.distinctValues = new TreeSet<T>();
this.calcDistinct = calcDistinct;
// this.valueSource = ft.getValueSource(sf, null); // FIXME - throws exception for multi-valued fields.... why doesn't it later on? setNextReader must never be called?
this.qcontext = qcontext;
}

/**
Expand Down Expand Up @@ -130,7 +133,7 @@ public void accumulate(NamedList stv) {
String val = vals.getName(j);
StatsValues vvals = addTo.get(val);
if (vvals == null) {
vvals = StatsValuesFactory.createStatsValues(sf, calcDistinct);
vvals = StatsValuesFactory.createStatsValues(qcontext, sf, calcDistinct);
addTo.put(val, vvals);
}
vvals.accumulate((NamedList) vals.getVal(j));
Expand Down Expand Up @@ -216,7 +219,7 @@ public void setNextReader(AtomicReaderContext ctx) throws IOException {
if (valueSource == null) {
valueSource = ft.getValueSource(sf, null);
}
values = valueSource.getValues(new QueryContext(null), ctx); // TODO: FIXME: get real context
values = valueSource.getValues(qcontext, ctx);
}

/**
Expand Down Expand Up @@ -258,8 +261,8 @@ class NumericStatsValues extends AbstractStatsValues<Number> {
double sum;
double sumOfSquares;

public NumericStatsValues(SchemaField sf, boolean calcDistinct) {
super(sf, calcDistinct);
public NumericStatsValues(QueryContext qcontext, SchemaField sf, boolean calcDistinct) {
super(qcontext, sf, calcDistinct);
min = Double.POSITIVE_INFINITY;
max = Double.NEGATIVE_INFINITY;
}
Expand Down Expand Up @@ -333,8 +336,8 @@ private double getStandardDeviation() {
*/
class EnumStatsValues extends AbstractStatsValues<EnumFieldValue> {

public EnumStatsValues(SchemaField sf, boolean calcDistinct) {
super(sf, calcDistinct);
public EnumStatsValues(QueryContext qcontext, SchemaField sf, boolean calcDistinct) {
super(qcontext, sf, calcDistinct);
}

/**
Expand Down Expand Up @@ -402,8 +405,8 @@ class DateStatsValues extends AbstractStatsValues<Date> {
private long sum = -1;
double sumOfSquares = 0;

public DateStatsValues(SchemaField sf, boolean calcDistinct) {
super(sf, calcDistinct);
public DateStatsValues(QueryContext qcontext, SchemaField sf, boolean calcDistinct) {
super(qcontext, sf, calcDistinct);
}

@Override
Expand Down Expand Up @@ -488,8 +491,8 @@ private double getStandardDeviation() {
*/
class StringStatsValues extends AbstractStatsValues<String> {

public StringStatsValues(SchemaField sf, boolean calcDistinct) {
super(sf, calcDistinct);
public StringStatsValues(QueryContext qcontext, SchemaField sf, boolean calcDistinct) {
super(qcontext, sf, calcDistinct);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -234,6 +234,7 @@ protected void parseParams(String type, String param) throws SyntaxError, IOExce
this.docs = searcher.getDocSet(qlist);
return;
}
grouping.getCommands().get(0).prepare();
AbstractAllGroupHeadsCollector allGroupHeadsCollector = grouping.getCommands().get(0).createAllGroupCollector();
try (DocSet base = searcher.getDocSet(qlist)) {
searcher.search(new MatchAllDocsQuery(), base.getTopFilter(), allGroupHeadsCollector);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@

import java.io.Closeable;
import java.util.Date;
import java.util.Deque;
import java.util.TimeZone;
import java.util.LinkedList;
import java.util.List;
Expand All @@ -40,7 +41,7 @@ public class SolrRequestInfo {
protected Date now;
protected TimeZone tz;
protected ResponseBuilder rb;
protected List<Closeable> closeHooks;
protected Deque<Closeable> closeHooks;


public static SolrRequestInfo getRequestInfo() {
Expand Down Expand Up @@ -152,7 +153,8 @@ public void addCloseHook(Closeable hook) {
if (closeHooks == null) {
closeHooks = new LinkedList<Closeable>();
}
closeHooks.add(hook);
// addFirst so we will close in reverse order
closeHooks.addFirst(hook);
}
}
}
Loading

0 comments on commit 96bbb77

Please sign in to comment.