From cbe5714f687f1a6aa3b63e2d7eefde359071341a Mon Sep 17 00:00:00 2001 From: Mike Drob Date: Wed, 15 Sep 2021 15:43:27 -0500 Subject: [PATCH 1/8] SOLR-15555 Fix cache test that assumed no autowarming --- .../org/apache/solr/query/SolrRangeQuery.java | 2 +- .../org/apache/solr/util/TestInjection.java | 4 +- .../apache/solr/search/TestRangeQuery.java | 57 +++++++++++-------- 3 files changed, 36 insertions(+), 27 deletions(-) diff --git a/solr/core/src/java/org/apache/solr/query/SolrRangeQuery.java b/solr/core/src/java/org/apache/solr/query/SolrRangeQuery.java index c400d6615b0..d1e5f2360ac 100644 --- a/solr/core/src/java/org/apache/solr/query/SolrRangeQuery.java +++ b/solr/core/src/java/org/apache/solr/query/SolrRangeQuery.java @@ -168,7 +168,7 @@ public DocSet createDocSet(SolrIndexSearcher searcher) throws IOException { } private DocSet createDocSet(SolrIndexSearcher searcher, long cost) throws IOException { - assert TestInjection.injectDocSetDelay(); + assert TestInjection.injectDocSetDelay(this); int maxDoc = searcher.maxDoc(); BitDocSet liveDocs = searcher.getLiveDocSet(); FixedBitSet liveBits = liveDocs.size() == maxDoc ? null : liveDocs.getBits(); diff --git a/solr/core/src/java/org/apache/solr/util/TestInjection.java b/solr/core/src/java/org/apache/solr/util/TestInjection.java index 0e62c994714..81c2e5a6714 100644 --- a/solr/core/src/java/org/apache/solr/util/TestInjection.java +++ b/solr/core/src/java/org/apache/solr/util/TestInjection.java @@ -582,11 +582,11 @@ public static boolean injectUIFOutOfMemoryError() { return true; } - public static boolean injectDocSetDelay() { + public static boolean injectDocSetDelay(Object query) { if (delayBeforeCreatingNewDocSet != null) { countDocSetDelays.incrementAndGet(); try { - log.info("Pausing DocSet for {}ms", delayBeforeCreatingNewDocSet); + log.info("Pausing DocSet for {}ms: {}", delayBeforeCreatingNewDocSet, query); if (log.isDebugEnabled()) { log.debug("", new Exception("Stack Trace")); } diff --git a/solr/core/src/test/org/apache/solr/search/TestRangeQuery.java b/solr/core/src/test/org/apache/solr/search/TestRangeQuery.java index fc2689f7f42..d5259670510 100644 --- a/solr/core/src/test/org/apache/solr/search/TestRangeQuery.java +++ b/solr/core/src/test/org/apache/solr/search/TestRangeQuery.java @@ -57,7 +57,8 @@ public class TestRangeQuery extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig.xml", "schema11.xml"); + // use a solrconfig that does not have autowarming + initCore("solrconfig_perf.xml", "schema11.xml"); } @Override @@ -375,8 +376,6 @@ public void testRandomRangeQueries() throws Exception { @Test public void testRangeQueryWithFilterCache() throws Exception { - TestInjection.delayBeforeCreatingNewDocSet = 500; - // sometimes a very small index, sometimes a very large index // final int numDocs = random().nextBoolean() ? random().nextInt(50) : atLeast(1000); final int numDocs = 99; @@ -384,30 +383,40 @@ public void testRangeQueryWithFilterCache() throws Exception { addInt(doc, 0, 0, "foo_i"); }); - ExecutorService queryService = ExecutorUtil.newMDCAwareFixedThreadPool(4, new SolrNamedThreadFactory("TestRangeQuery")); - try (SolrCore core = h.getCoreInc()) { - SolrRequestHandler defaultHandler = core.getRequestHandler(""); + // ensure delay comes after createIndex - so we don't affect/count any cache warming from queries left over by other test methods + TestInjection.delayBeforeCreatingNewDocSet = TEST_NIGHTLY ? 50 : 500; // Run more queries nightly, so use shorter delay + + final int MAX_QUERY_RANGE = 222; // Arbitrary number in the middle of the value range + final int QUERY_START = TEST_NIGHTLY ? 1 : MAX_QUERY_RANGE; // Either run queries for the full range, or just the last one + final int NUM_QUERIES = TEST_NIGHTLY ? 101 : 10; + for (int j = QUERY_START ; j <= MAX_QUERY_RANGE; j++) { + ExecutorService queryService = ExecutorUtil.newMDCAwareFixedThreadPool(4, new SolrNamedThreadFactory("TestRangeQuery-" + j)); + try (SolrCore core = h.getCoreInc()) { + SolrRequestHandler defaultHandler = core.getRequestHandler(""); + + ModifiableSolrParams params = new ModifiableSolrParams(); + params.set("q", "*:*"); + params.add("fq", "id:[0 TO " + j + "]"); // These should all come from FilterCache + + // Regular: 10 threads with 4 executors would be enough for 3 waves, or approximately 1500ms of delay + // Nightly: 101 threads with 4 executors is 26 waves, approximately 1300ms delay + CountDownLatch atLeastOnceCompleted = new CountDownLatch(TEST_NIGHTLY ? 30 : 1); + for (int i = 0; i < NUM_QUERIES; i++) { + queryService.submit(() -> { + try (SolrQueryRequest req = req(params)) { + core.execute(defaultHandler, req, new SolrQueryResponse()); + } + atLeastOnceCompleted.countDown(); + }); + } - ModifiableSolrParams params = new ModifiableSolrParams(); - params.set("q", "*:*"); - params.add("fq", "id:[0 TO 222]"); // These should all come from FilterCache + queryService.shutdown(); // No more requests will be queued up + atLeastOnceCompleted.await(); // Wait for the first batch of queries to complete + assertTrue(queryService.awaitTermination(1, TimeUnit.SECONDS)); // All queries after should be very fast - // 10 threads with 4 executors would be enough for 3 waves, or approximately 1500ms of delay - CountDownLatch atLeastOnceCompleted = new CountDownLatch(1); - for (int i = 0; i < 10; i++) { - queryService.submit(() -> { - try (SolrQueryRequest req = req(params)) { - core.execute(defaultHandler, req, new SolrQueryResponse()); - } - atLeastOnceCompleted.countDown(); - }); + assertEquals("Create only one DocSet outside of cache", 1, TestInjection.countDocSetDelays.get()); } - - queryService.shutdown(); // No more requests will be queued up - atLeastOnceCompleted.await(); // Wait for the first query to complete - assertTrue(queryService.awaitTermination(1, TimeUnit.SECONDS)); // All queries after should be very fast - - assertEquals("Create only one DocSet outside of cache", 1, TestInjection.countDocSetDelays.get()); + TestInjection.countDocSetDelays.set(0); } } From c94657554a5c6c0a4a299859d9dddbaabe66d677 Mon Sep 17 00:00:00 2001 From: Chris Hostetter Date: Wed, 15 Sep 2021 14:54:26 -0700 Subject: [PATCH 2/8] SOLR-15622: delete SolrLogLayout (dead code, orig meant for tests but never used) --- .../org/apache/solr/util/SolrLogLayout.java | 381 ------------------ 1 file changed, 381 deletions(-) delete mode 100644 solr/core/src/java/org/apache/solr/util/SolrLogLayout.java diff --git a/solr/core/src/java/org/apache/solr/util/SolrLogLayout.java b/solr/core/src/java/org/apache/solr/util/SolrLogLayout.java deleted file mode 100644 index ded3a546818..00000000000 --- a/solr/core/src/java/org/apache/solr/util/SolrLogLayout.java +++ /dev/null @@ -1,381 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.solr.util; - -import java.nio.charset.Charset; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import java.util.WeakHashMap; - -import org.apache.logging.log4j.Level; -import org.apache.logging.log4j.core.LogEvent; -import org.apache.logging.log4j.core.config.plugins.Plugin; -import org.apache.logging.log4j.core.config.plugins.PluginAttribute; -import org.apache.logging.log4j.core.config.plugins.PluginFactory; -import org.apache.logging.log4j.core.layout.AbstractStringLayout; -import org.apache.solr.cloud.ZkController; -import org.apache.solr.common.SolrException; -import org.apache.solr.common.StringUtils; -import org.apache.solr.common.cloud.DocCollection; -import org.apache.solr.common.cloud.Replica; -import org.apache.solr.common.util.SuppressForbidden; -import org.apache.solr.core.SolrCore; -import org.apache.solr.request.SolrQueryRequest; -import org.apache.solr.request.SolrRequestInfo; -import org.slf4j.MDC; - -import static org.apache.solr.common.cloud.ZkStateReader.COLLECTION_PROP; -import static org.apache.solr.common.cloud.ZkStateReader.CORE_NAME_PROP; -import static org.apache.solr.common.cloud.ZkStateReader.NODE_NAME_PROP; -import static org.apache.solr.common.cloud.ZkStateReader.REPLICA_PROP; -import static org.apache.solr.common.cloud.ZkStateReader.SHARD_ID_PROP; - -@SuppressForbidden(reason = "class is specific to log4j2") -@Plugin(name = "SolrLogLayout", category = "Core", elementType = "layout", printObject = true) -public class SolrLogLayout extends AbstractStringLayout { - - protected SolrLogLayout(Charset charset) { - super(charset); - } - - @PluginFactory - public static SolrLogLayout createLayout(@PluginAttribute(value = "charset", defaultString = "UTF-8") Charset charset) { - return new SolrLogLayout(charset); - } - - /** - * Add this interface to a thread group and the string returned by getTag() - * will appear in log statements of any threads under that group. - */ - public static interface TG { - public String getTag(); - } - - @SuppressForbidden(reason = "Need currentTimeMillis to compare against log event timestamp. " + - "This is inaccurate but unavoidable due to interface limitations, in any case this is just for logging.") - final long startTime = System.currentTimeMillis(); - - long lastTime = startTime; - Map methodAlias = new HashMap<>(); - - public static class Method { - public String className; - public String methodName; - - public Method(String className, String methodName) { - this.className = className; - this.methodName = methodName; - } - - @Override - public int hashCode() { - return className.hashCode() + methodName.hashCode(); - } - - @Override - public boolean equals(Object obj) { - if (!(obj instanceof Method)) return false; - Method other = (Method) obj; - return (className.equals(other.className) && methodName - .equals(other.methodName)); - } - - @Override - public String toString() { - return className + '.' + methodName; - } - } - - public static class CoreInfo { - static int maxCoreNum; - String shortId; - String url; - Map coreProps; - } - - Map coreInfoMap = new WeakHashMap<>(); - - public void appendThread(StringBuilder sb) { - Thread th = Thread.currentThread(); - - // NOTE: LogRecord.getThreadID is *not* equal to Thread.getId() - sb.append(" T"); - sb.append(th.getId()); - } - - @Override - public String toSerializable(LogEvent event) { - return _format(event); - } - - public String _format(LogEvent event) { - String message = event.getMessage().getFormattedMessage(); - if (message == null) { - message = ""; - } - StringBuilder sb = new StringBuilder(message.length() + 80); - - long now = event.getTimeMillis(); - long timeFromStart = now - startTime; - lastTime = now; - String shortClassName = getShortClassName(event.getSource().getClassName(), event.getSource().getMethodName()); - - /*** - * sb.append(timeFromStart).append(' ').append(timeSinceLast); - * sb.append(' '); - * sb.append(record.getSourceClassName()).append('.').append( - * record.getSourceMethodName()); sb.append(' '); - * sb.append(record.getLevel()); - ***/ - - SolrRequestInfo requestInfo = SolrRequestInfo.getRequestInfo(); - - SolrCore core; - try (SolrQueryRequest req = (requestInfo == null) ? null : requestInfo.getReq()) { - core = (req == null) ? null : req.getCore(); - } - ZkController zkController; - CoreInfo info = null; - - if (core != null) { - info = coreInfoMap.get(core.hashCode()); - if (info == null) { - info = new CoreInfo(); - info.shortId = "C" + Integer.toString(CoreInfo.maxCoreNum++); - coreInfoMap.put(core.hashCode(), info); - - if (sb.length() == 0) sb.append("ASYNC "); - sb.append(" NEW_CORE ").append(info.shortId); - sb.append(" name=").append(core.getName()); - sb.append(" ").append(core); - } - - zkController = core.getCoreContainer().getZkController(); - if (zkController != null) { - if (info.url == null) { - info.url = zkController.getBaseUrl() + "/" + core.getName(); - sb.append(" url=").append(info.url).append(" node=").append(zkController.getNodeName()); - } - - Map coreProps = getReplicaProps(zkController, core); - if (info.coreProps == null || !coreProps.equals(info.coreProps)) { - info.coreProps = coreProps; - final String corePropsString = "coll:" - + core.getCoreDescriptor().getCloudDescriptor() - .getCollectionName() + " core:" + core.getName() + " props:" - + coreProps; - sb.append(" ").append(info.shortId).append("_STATE=").append(corePropsString); - } - } - } - - if (sb.length() > 0) sb.append('\n'); - sb.append(timeFromStart); - - // sb.append("\nL").append(record.getSequenceNumber()); // log number is - // useful for sequencing when looking at multiple parts of a log file, but - // ms since start should be fine. - appendThread(sb); - - appendMDC(sb); - - // todo: should be able to get port from core container for non zk tests - - if (info != null) { - sb.append(' ').append(info.shortId); // core - } - - if (shortClassName.length() > 0) { - sb.append(' ').append(shortClassName); - } - - if (event.getLevel() != Level.INFO) { - sb.append(' ').append(event.getLevel()); - } - - sb.append(' '); - appendMultiLineString(sb, message); - Throwable th = event.getThrown(); - - if (th != null) { - sb.append(' '); - String err = SolrException.toStr(th); - String ignoredMsg = SolrException.doIgnore(th, err); - if (ignoredMsg != null) { - sb.append(ignoredMsg); - } else { - sb.append(err); - } - } - - - sb.append('\n'); - - /*** - * Isn't core specific... prob better logged from zkController if (info != - * null) { ClusterState clusterState = zkController.getClusterState(); if - * (info.clusterState != clusterState) { // something has changed in the - * matrix... sb.append(zkController.getBaseUrl() + - * " sees new ClusterState:"); } } - ***/ - - return sb.toString(); - } - - private Map getReplicaProps(ZkController zkController, SolrCore core) { - final String collectionName = core.getCoreDescriptor().getCloudDescriptor().getCollectionName(); - DocCollection collection = zkController.getClusterState().getCollectionOrNull(collectionName); - Replica replica = collection.getReplica(zkController.getCoreNodeName(core.getCoreDescriptor())); - if (replica != null) { - return replica.getProperties(); - } - return Collections.emptyMap(); - } - - private void addFirstLine(StringBuilder sb, String msg) { - // INFO: [] webapp=/solr path=/select params={q=foobarbaz} hits=0 status=0 - // QTime=1 - - if (!shorterFormat || !msg.startsWith("[")) { - sb.append(msg); - return; - } - - int idx = msg.indexOf(']'); - if (idx < 0 || !msg.startsWith(" webapp=", idx + 1)) { - sb.append(msg); - return; - } - - idx = msg.indexOf(' ', idx + 8); // space after webapp= - if (idx < 0) { - sb.append(msg); - return; - } - idx = msg.indexOf('=', idx + 1); // = in path= - if (idx < 0) { - sb.append(msg); - return; - } - - int idx2 = msg.indexOf(' ', idx + 1); - if (idx2 < 0) { - sb.append(msg); - return; - } - - sb.append(msg.substring(idx + 1, idx2 + 1)); // path - - idx = msg.indexOf("params=", idx2); - if (idx < 0) { - sb.append(msg.substring(idx2)); - } else { - sb.append(msg.substring(idx + 7)); - } - } - - private void appendMultiLineString(StringBuilder sb, String msg) { - int idx = msg.indexOf('\n'); - if (idx < 0) { - addFirstLine(sb, msg); - return; - } - - int lastIdx = -1; - for (;;) { - if (idx < 0) { - if (lastIdx == -1) { - addFirstLine(sb, msg.substring(lastIdx + 1)); - } else { - sb.append(msg.substring(lastIdx + 1)); - } - break; - } - if (lastIdx == -1) { - addFirstLine(sb, msg.substring(lastIdx + 1, idx)); - } else { - sb.append(msg.substring(lastIdx + 1, idx)); - } - - sb.append("\n\t"); - lastIdx = idx; - idx = msg.indexOf('\n', lastIdx + 1); - } - } - - // TODO: name this better... it's only for cloud tests where every core - // container has just one solr server so Port/Core are fine - public boolean shorterFormat = false; - - public void setShorterFormat() { - shorterFormat = true; - // looking at /update is enough... we don't need "UPDATE /update" - methodAlias.put(new Method( - "org.apache.solr.update.processor.LogUpdateProcessor", "finish"), ""); - } - - private Method classAndMethod = new Method(null, null); // don't need to be - // thread safe - - private String getShortClassName(String name, String method) { - classAndMethod.className = name; - classAndMethod.methodName = method; - - String out = methodAlias.get(classAndMethod); - if (out != null) return out; - - StringBuilder sb = new StringBuilder(); - - int lastDot = name.lastIndexOf('.'); - if (lastDot < 0) return name + '.' + method; - - int prevIndex = -1; - for (;;) { - char ch = name.charAt(prevIndex + 1); - sb.append(ch); - int idx = name.indexOf('.', prevIndex + 1); - ch = name.charAt(idx + 1); - if (idx >= lastDot || Character.isUpperCase(ch)) { - sb.append(name.substring(idx)); - break; - } - prevIndex = idx; - } - - return sb.toString() + '.' + method; - } - - - private void appendMDC(StringBuilder sb) { - if (!StringUtils.isEmpty(MDC.get(NODE_NAME_PROP))) { - sb.append(" n:").append(MDC.get(NODE_NAME_PROP)); - } - if (!StringUtils.isEmpty(MDC.get(COLLECTION_PROP))) { - sb.append(" c:").append(MDC.get(COLLECTION_PROP)); - } - if (!StringUtils.isEmpty(MDC.get(SHARD_ID_PROP))) { - sb.append(" s:").append(MDC.get(SHARD_ID_PROP)); - } - if (!StringUtils.isEmpty(MDC.get(REPLICA_PROP))) { - sb.append(" r:").append(MDC.get(REPLICA_PROP)); - } - if (!StringUtils.isEmpty(MDC.get(CORE_NAME_PROP))) { - sb.append(" x:").append(MDC.get(CORE_NAME_PROP)); - } - } -} From df903cbdc1de7fb2c3469b9773d6f9d23d591a5d Mon Sep 17 00:00:00 2001 From: Eric Pugh Date: Thu, 16 Sep 2021 08:43:31 -0400 Subject: [PATCH 3/8] SOLR-10887: Append .xml to managed-schema file VERSION 2 (#279) Managed schema file is now called "managed-schema.xml", however there is a fallback to "managed-schema" since that is a common legacy name for the file. Updated the name of the files used in running tests. --- dev-docs/overseer/overseer.adoc | 2 +- solr/CHANGES.txt | 2 + .../apache/solr/core/ConfigSetService.java | 2 +- .../handler/designer/SchemaDesignerAPI.java | 2 +- .../schema/ManagedIndexSchemaFactory.java | 93 +++++++-- .../org/apache/solr/schema/SchemaManager.java | 18 +- .../solr/schema/ZkIndexSchemaReader.java | 14 +- .../conf/solrconfig-managed-schema.xml | 2 +- .../managed-schema | 0 .../legacy-managed-schema/solrconfig.xml | 60 ++++++ .../managed-schema.xml} | 0 .../managed-schema.xml} | 0 .../with-script-processor/managed-schema.xml | 25 +++ .../apache/solr/cloud/TestConfigSetsAPI.java | 71 +++++-- .../solr/schema/SpatialRPTFieldTypeTest.java | 52 ++--- .../solr/schema/TestCloudManagedSchema.java | 4 +- .../apache/solr/schema/TestManagedSchema.java | 87 ++++----- .../{managed-schema => managed-schema.xml} | 178 +++++++++--------- solr/solr-ref-guide/src/config-sets.adoc | 4 +- .../src/configuration-files.adoc | 6 +- solr/solr-ref-guide/src/coreadmin-api.adoc | 2 +- .../src/documents-fields-schema-design.adoc | 2 +- solr/solr-ref-guide/src/schema-api.adoc | 6 +- solr/solr-ref-guide/src/schema-elements.adoc | 2 +- solr/solr-ref-guide/src/schema-factory.adoc | 10 +- solr/solr-ref-guide/src/schemaless-mode.adoc | 2 +- solr/solr-ref-guide/src/solr-glossary.adoc | 4 +- ...cloud-with-legacy-configuration-files.adoc | 2 +- solr/solr-ref-guide/src/tutorial-diy.adoc | 2 +- .../src/tutorial-techproducts.adoc | 2 +- .../src/user-managed-index-replication.adoc | 2 +- .../conf/solrconfig-managed-schema.xml | 7 +- 32 files changed, 431 insertions(+), 234 deletions(-) rename solr/core/src/test-files/solr/configsets/upload/{regular => legacy-managed-schema}/managed-schema (100%) create mode 100644 solr/core/src/test-files/solr/configsets/upload/legacy-managed-schema/solrconfig.xml rename solr/core/src/test-files/solr/configsets/upload/{with-lib-directive/managed-schema => regular/managed-schema.xml} (100%) rename solr/core/src/test-files/solr/configsets/upload/{with-script-processor/managed-schema => with-lib-directive/managed-schema.xml} (100%) create mode 100644 solr/core/src/test-files/solr/configsets/upload/with-script-processor/managed-schema.xml rename solr/server/solr/configsets/_default/conf/{managed-schema => managed-schema.xml} (97%) diff --git a/dev-docs/overseer/overseer.adoc b/dev-docs/overseer/overseer.adoc index 6929270a6de..350096b6633 100644 --- a/dev-docs/overseer/overseer.adoc +++ b/dev-docs/overseer/overseer.adoc @@ -322,7 +322,7 @@ These messages are received from the Collection API queue (there’s a single qu The `createConfigSet()` call implementing `CREATE` copies all the files of an existing config set (by default the `_default` config set) into a new config set, merges the existing config set properties if any with new ones specified in the message (config set properties in the message are properties that start with `"configSetProp."`, for example `configSetProp.immutable` is a property that prevents the config set from ever being deleted) and writes the resulting properties into `/configs//configsetprops.json` (note: creating a config set based on an `immutable` config set makes the new one immutable as well unless it explicitly specifies that `configSetProp.immutable` is false). -Note the `_default` config set is defined in the SolrCloud distribution and copied if absent into Zookeeper when SolrCloud starts (`ZkController.bootstrapDefaultConfigSet`) then used only from Zookeeper. This config set has a directory `lang` with language specific stop words, contractions and other, and it contains files `managed-schema`, `params.json`, `protwords.txt`, `solrconfig.xml`, `stopwords.txt` and `synonyms.txt`. +Note the `_default` config set is defined in the SolrCloud distribution and copied if absent into Zookeeper when SolrCloud starts (`ZkController.bootstrapDefaultConfigSet`) then used only from Zookeeper. This config set has a directory `lang` with language specific stop words, contractions and other, and it contains files `managed-schema.xml`, `params.json`, `protwords.txt`, `solrconfig.xml`, `stopwords.txt` and `synonyms.txt`. deleteConfigSet() deletes the whole znode structure at `/configs/__` assuming the config set is not used by any collection and is not immutable (the only case where an immutable config set can be deleted is when its creation has failed midway). diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index 4d53d9c8f06..43e0b366777 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -143,6 +143,8 @@ when told to. The admin UI now tells it to. (Nazerke Seidan, David Smiley) This was already working for XML & "javabin"/SolrJ. Previously, omitting the ID would be confused for a partial/atomic update. (David Smiley) +* SOLR-10887: Migrate "managed-schema" file naming to "managed-schema.xml" file name, with a fallback to the legacy "managed-schema". (Eric Pugh, David Smiley) + Build --------------------- diff --git a/solr/core/src/java/org/apache/solr/core/ConfigSetService.java b/solr/core/src/java/org/apache/solr/core/ConfigSetService.java index a4b246a03fc..de80fbaf41f 100644 --- a/solr/core/src/java/org/apache/solr/core/ConfigSetService.java +++ b/solr/core/src/java/org/apache/solr/core/ConfigSetService.java @@ -265,7 +265,7 @@ protected IndexSchema createIndexSchema(CoreDescriptor cd, SolrConfig solrConfig // This is the schema name that we think will actually be used. In the case of a managed schema, // we don't know for sure without examining what files exists in the configSet, and we don't // want to pay the overhead of that at this juncture. If we guess wrong, no schema sharing. - // The fix is usually to name your schema managed-schema instead of schema.xml. + // The fix is usually to name your schema managed-schema.xml instead of schema.xml. IndexSchemaFactory indexSchemaFactory = IndexSchemaFactory.newIndexSchemaFactory(solrConfig); String configSet = cd.getConfigSet(); diff --git a/solr/core/src/java/org/apache/solr/handler/designer/SchemaDesignerAPI.java b/solr/core/src/java/org/apache/solr/handler/designer/SchemaDesignerAPI.java index 1108065a2fb..4eb26b2125c 100644 --- a/solr/core/src/java/org/apache/solr/handler/designer/SchemaDesignerAPI.java +++ b/solr/core/src/java/org/apache/solr/handler/designer/SchemaDesignerAPI.java @@ -1042,7 +1042,7 @@ protected Map buildResponse(String configSet, final String prefix = configPathInZk + "/"; final int prefixLen = prefix.length(); Set stripPrefix = files.stream().map(f -> f.startsWith(prefix) ? f.substring(prefixLen) : f).collect(Collectors.toSet()); - stripPrefix.remove(DEFAULT_MANAGED_SCHEMA_RESOURCE_NAME); + stripPrefix.remove(schema.getResourceName()); stripPrefix.remove("lang"); stripPrefix.remove(CONFIGOVERLAY_JSON); // treat this file as private diff --git a/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchemaFactory.java b/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchemaFactory.java index 75bdd3a7fa5..7c3e670f13b 100644 --- a/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchemaFactory.java +++ b/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchemaFactory.java @@ -20,6 +20,9 @@ import java.io.IOException; import java.io.InputStream; import java.lang.invoke.MethodHandles; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; import org.apache.commons.io.IOUtils; import org.apache.solr.cloud.ZkController; @@ -34,6 +37,7 @@ import org.apache.solr.core.SolrConfig; import org.apache.solr.core.SolrCore; import org.apache.solr.core.SolrResourceLoader; +import org.apache.solr.core.SolrResourceNotFoundException; import org.apache.solr.util.SystemIdResolver; import org.apache.solr.util.plugin.SolrCoreAware; import org.apache.zookeeper.CreateMode; @@ -49,7 +53,8 @@ public class ManagedIndexSchemaFactory extends IndexSchemaFactory implements Sol public static final String UPGRADED_SCHEMA_EXTENSION = ".bak"; private static final String SCHEMA_DOT_XML = "schema.xml"; - public static final String DEFAULT_MANAGED_SCHEMA_RESOURCE_NAME = "managed-schema"; + public static final String DEFAULT_MANAGED_SCHEMA_RESOURCE_NAME = "managed-schema.xml"; + public static final String LEGACY_MANAGED_SCHEMA_RESOURCE_NAME = "managed-schema"; public static final String MANAGED_SCHEMA_RESOURCE_NAME = "managedSchemaResourceName"; private boolean isMutable = true; @@ -79,7 +84,6 @@ public void init(NamedList args) { log.error(msg); throw new SolrException(ErrorCode.SERVER_ERROR, msg); } - if (args.size() > 0) { String msg = "Unexpected arg(s): " + args; log.error(msg); @@ -92,6 +96,65 @@ public String getSchemaResourceName(String cdResourceName) { return managedSchemaResourceName; // actually a guess; reality depends on the actual files in the config set :-( } + /** + * Lookup the path to the managed schema, dealing with falling back to the + * legacy managed-schema file, instead of the expected managed-schema.xml file if the legacy file exists. + * + * This method is duplicated in ManagedIndexSchema. + */ + public String lookupZKManagedSchemaPath() { + final ZkSolrResourceLoader zkLoader = (ZkSolrResourceLoader)loader; + final ZkController zkController = zkLoader.getZkController(); + final SolrZkClient zkClient = zkController.getZkClient(); + String managedSchemaPath = zkLoader.getConfigSetZkPath() + "/" + managedSchemaResourceName; + final String legacyManagedSchemaPath = zkLoader.getConfigSetZkPath() + "/" + ManagedIndexSchemaFactory.LEGACY_MANAGED_SCHEMA_RESOURCE_NAME; + try { + // check if we are using the legacy managed-schema file name. + if (zkClient.exists(legacyManagedSchemaPath, true)){ + log.debug("Legacy managed schema resource {} found - loading legacy managed schema instead of {} file." + , ManagedIndexSchemaFactory.LEGACY_MANAGED_SCHEMA_RESOURCE_NAME, managedSchemaResourceName); + managedSchemaPath = legacyManagedSchemaPath; + } + } catch (KeeperException e) { + throw new RuntimeException(e); + } catch (InterruptedException e) { + // Restore the interrupted status + Thread.currentThread().interrupt(); + throw new RuntimeException(e); + + } + return managedSchemaPath; + } + + /** + * Lookup the path to the managed schema, dealing with falling back to the + * legacy managed-schema file, instead of the expected managed-schema.xml file if the legacy file exists. + */ + public Path lookupLocalManagedSchemaPath() { + final Path legacyManagedSchemaPath = Paths.get(loader.getConfigPath().toString(), ManagedIndexSchemaFactory.LEGACY_MANAGED_SCHEMA_RESOURCE_NAME); + + Path managedSchemaPath = Paths.get(loader.getConfigPath().toString(), managedSchemaResourceName); + + // check if we are using the legacy managed-schema file name. + if (Files.exists(legacyManagedSchemaPath)){ + log.debug("Legacy managed schema resource {} found - loading legacy managed schema instead of {} file.", ManagedIndexSchemaFactory.LEGACY_MANAGED_SCHEMA_RESOURCE_NAME, managedSchemaResourceName); + managedSchemaPath = legacyManagedSchemaPath; + } + + Path parentPath = managedSchemaPath.getParent(); + if (!Files.isDirectory(parentPath)) { + try { + Files.createDirectories(parentPath); + } + catch (IOException ioe) { + final String msg = "Can't create managed schema directory " + parentPath; + log.error(msg); + throw new SolrException(ErrorCode.SERVER_ERROR, msg); + } + } + + return managedSchemaPath; + } /** * First, try to locate the managed schema file named in the managedSchemaResourceName * param. If the managed schema file exists and is accessible, it is used to instantiate @@ -102,7 +165,7 @@ public String getSchemaResourceName(String cdResourceName) { * * Once the IndexSchema is instantiated, if the managed schema file does not exist, * the instantiated IndexSchema is persisted to the managed schema file named in the - * managedSchemaResourceName param, in the directory given by + * managedSchemaResourceName param, in the directory given by * {@link org.apache.solr.core.SolrResourceLoader#getConfigDir()}, or if configs are * in ZooKeeper, under {@link org.apache.solr.cloud.ZkSolrResourceLoader#getConfigSetZkPath()}. * @@ -126,7 +189,8 @@ public ManagedIndexSchema create(String resourceName, SolrConfig config, ConfigS } else { // ZooKeeper final ZkSolrResourceLoader zkLoader = (ZkSolrResourceLoader)loader; final SolrZkClient zkClient = zkLoader.getZkController().getZkClient(); - final String managedSchemaPath = zkLoader.getConfigSetZkPath() + "/" + managedSchemaResourceName; + final String managedSchemaPath = lookupZKManagedSchemaPath(); + managedSchemaResourceName = managedSchemaPath.substring(managedSchemaPath.lastIndexOf("/")+1); // not loving this Stat stat = new Stat(); try { // Attempt to load the managed schema @@ -153,7 +217,7 @@ public ManagedIndexSchema create(String resourceName, SolrConfig config, ConfigS schemaInputStream = loader.openResource(resourceName); loadedResource = resourceName; shouldUpgrade = true; - } catch (Exception e) { + } catch (IOException e) { try { // Retry to load the managed schema, in case it was created since the first attempt byte[] data = zkClient.getData(managedSchemaPath, null, stat, true); @@ -195,6 +259,8 @@ private InputStream readSchemaLocally() { InputStream schemaInputStream = null; try { // Attempt to load the managed schema + final Path managedSchemaPath = lookupLocalManagedSchemaPath(); + managedSchemaResourceName = managedSchemaPath.getName(managedSchemaPath.getNameCount()-1).toString(); schemaInputStream = loader.openResource(managedSchemaResourceName); loadedResource = managedSchemaResourceName; warnIfNonManagedSchemaExists(); @@ -219,7 +285,7 @@ private InputStream readSchemaLocally() { } /** - * Return whether a non-managed schema exists, either in local storage or on ZooKeeper. + * Return whether a non-managed schema exists, either in local storage or on ZooKeeper. */ private void warnIfNonManagedSchemaExists() { if ( ! resourceName.equals(managedSchemaResourceName)) { @@ -232,7 +298,7 @@ private void warnIfNonManagedSchemaExists() { exists = zkLoader.getZkController().pathExists(nonManagedSchemaPath); } catch (InterruptedException e) { Thread.currentThread().interrupt(); // Restore the interrupted status - log.warn("", e); // Log as warning and suppress the exception + log.warn("", e); // Log as warning and suppress the exception } catch (KeeperException e) { // log as warning and suppress the exception log.warn("Error checking for the existence of the non-managed schema {}", resourceName, e); @@ -244,7 +310,10 @@ private void warnIfNonManagedSchemaExists() { if (null != nonManagedSchemaInputStream) { exists = true; } - } catch (IOException e) { + } catch (SolrResourceNotFoundException e) { + // This is expected when the non-managed schema does not exist + } catch (IOException e) { + throw new RuntimeException(e); // This is expected when the non-managed schema does not exist } finally { IOUtils.closeQuietly(nonManagedSchemaInputStream); @@ -258,7 +327,7 @@ private void warnIfNonManagedSchemaExists() { } /** - * Persist the managed schema and rename the non-managed schema + * Persist the managed schema and rename the non-managed schema * by appending {@link #UPGRADED_SCHEMA_EXTENSION}. * * Failure to rename the non-managed schema will be logged as a warning, @@ -290,7 +359,7 @@ private void upgradeToManagedSchema() { } else { File upgradedSchemaFile = new File(nonManagedSchemaFile + UPGRADED_SCHEMA_EXTENSION); if (nonManagedSchemaFile.renameTo(upgradedSchemaFile)) { - // Set the resource name to the managed schema so that the CoreAdminHandler returns a findable filename + // Set the resource name to the managed schema so that the CoreAdminHandler returns a findable filename schema.setResourceName(managedSchemaResourceName); log.info("After upgrading to managed schema, renamed the non-managed schema {} to {}" @@ -320,7 +389,7 @@ private File locateConfigFile(String resource) { } /** - * Persist the managed schema to ZooKeeper and rename the non-managed schema + * Persist the managed schema to ZooKeeper and rename the non-managed schema * by appending {@link #UPGRADED_SCHEMA_EXTENSION}. * * Failure to rename the non-managed schema will be logged as a warning, @@ -434,7 +503,7 @@ public void setSchema(ManagedIndexSchema schema) { this.schema = schema; core.setLatestSchema(schema); } - + public boolean isMutable() { return isMutable; } diff --git a/solr/core/src/java/org/apache/solr/schema/SchemaManager.java b/solr/core/src/java/org/apache/solr/schema/SchemaManager.java index cc9e1503a85..03bdde24fe0 100644 --- a/solr/core/src/java/org/apache/solr/schema/SchemaManager.java +++ b/solr/core/src/java/org/apache/solr/schema/SchemaManager.java @@ -427,31 +427,33 @@ private ManagedIndexSchema getFreshManagedSchema(SolrCore core) throws IOExcepti KeeperException, InterruptedException { SolrResourceLoader resourceLoader = core.getResourceLoader(); - String name = core.getLatestSchema().getResourceName(); + String schemaResourceName = core.getLatestSchema().getResourceName(); if (resourceLoader instanceof ZkSolrResourceLoader) { final ZkSolrResourceLoader zkLoader = (ZkSolrResourceLoader)resourceLoader; SolrZkClient zkClient = zkLoader.getZkController().getZkClient(); + String managedSchemaPath = zkLoader.getConfigSetZkPath() + "/" + schemaResourceName; try { - if (!zkClient.exists(zkLoader.getConfigSetZkPath() + "/" + name, true)) { - String backupName = name + ManagedIndexSchemaFactory.UPGRADED_SCHEMA_EXTENSION; + if (!zkClient.exists(managedSchemaPath, true)) { + String backupName = schemaResourceName + ManagedIndexSchemaFactory.UPGRADED_SCHEMA_EXTENSION; if (!zkClient.exists(zkLoader.getConfigSetZkPath() + "/" + backupName, true)) { - log.warn("Unable to retrieve fresh managed schema, neither {} nor {} exist.", name, backupName); + log.warn("Unable to retrieve fresh managed schema, neither {} nor {} exist.", schemaResourceName, backupName); // use current schema return (ManagedIndexSchema) core.getLatestSchema(); } else { - name = backupName; + schemaResourceName = backupName; } } } catch (Exception e) { - log.warn("Unable to retrieve fresh managed schema {}", name, e); + log.warn("Unable to retrieve fresh managed schema {}", schemaResourceName, e); // use current schema return (ManagedIndexSchema) core.getLatestSchema(); } - InputStream in = resourceLoader.openResource(name); + schemaResourceName = managedSchemaPath.substring(managedSchemaPath.lastIndexOf("/")+1); + InputStream in = resourceLoader.openResource(schemaResourceName); if (in instanceof ZkSolrResourceLoader.ZkByteArrayInputStream) { int version = ((ZkSolrResourceLoader.ZkByteArrayInputStream) in).getStat().getVersion(); log.info("managed schema loaded . version : {} ", version); - return new ManagedIndexSchema(core.getSolrConfig(), name, () -> IndexSchemaFactory.getParsedSchema(in, zkLoader, core.getLatestSchema().getResourceName()), true, name, version, + return new ManagedIndexSchema(core.getSolrConfig(), schemaResourceName, () -> IndexSchemaFactory.getParsedSchema(in, zkLoader, core.getLatestSchema().getResourceName()), true, schemaResourceName, version, core.getLatestSchema().getSchemaUpdateLock()); } else { return (ManagedIndexSchema) core.getLatestSchema(); diff --git a/solr/core/src/java/org/apache/solr/schema/ZkIndexSchemaReader.java b/solr/core/src/java/org/apache/solr/schema/ZkIndexSchemaReader.java index e20a62c3362..3a4ba52d929 100644 --- a/solr/core/src/java/org/apache/solr/schema/ZkIndexSchemaReader.java +++ b/solr/core/src/java/org/apache/solr/schema/ZkIndexSchemaReader.java @@ -48,8 +48,8 @@ public ZkIndexSchemaReader(ManagedIndexSchemaFactory managedIndexSchemaFactory, this.managedIndexSchemaFactory = managedIndexSchemaFactory; zkLoader = (ZkSolrResourceLoader)managedIndexSchemaFactory.getResourceLoader(); this.zkClient = zkLoader.getZkController().getZkClient(); - this.managedSchemaPath = zkLoader.getConfigSetZkPath() + "/" + managedIndexSchemaFactory.getManagedSchemaResourceName(); - this.uniqueCoreId = solrCore.getName()+":"+solrCore.getStartNanoTime(); + this.managedSchemaPath = managedIndexSchemaFactory.lookupZKManagedSchemaPath(); + this.uniqueCoreId = solrCore.getName() + ":" + solrCore.getStartNanoTime(); // register a CloseHook for the core this reader is linked to, so that we can de-register the listener solrCore.addCloseHook(new CloseHook() { @@ -172,10 +172,10 @@ void updateSchema(Watcher watcher, int expectedZkVersion) throws KeeperException } long start = System.nanoTime(); String resourceName = managedIndexSchemaFactory.getManagedSchemaResourceName(); - ManagedIndexSchema newSchema = new ManagedIndexSchema - (managedIndexSchemaFactory.getConfig(), resourceName, - () -> IndexSchemaFactory.getParsedSchema(new ByteArrayInputStream(data),zkLoader , resourceName), managedIndexSchemaFactory.isMutable(), - resourceName, stat.getVersion(), oldSchema.getSchemaUpdateLock()); + ManagedIndexSchema newSchema = new ManagedIndexSchema(managedIndexSchemaFactory.getConfig(), resourceName, + () -> IndexSchemaFactory.getParsedSchema(new ByteArrayInputStream(data), zkLoader, resourceName), + managedIndexSchemaFactory.isMutable(), + resourceName, stat.getVersion(), oldSchema.getSchemaUpdateLock()); managedIndexSchemaFactory.setSchema(newSchema); long stop = System.nanoTime(); log.info("Finished refreshing schema in {} ms", TimeUnit.MILLISECONDS.convert(stop - start, TimeUnit.NANOSECONDS)); @@ -198,7 +198,7 @@ public void command() { // force update now as the schema may have changed while our zk session was expired updateSchema(null, -1); } catch (Exception exc) { - log.error("Failed to update managed-schema watcher after session expiration due to: {}", exc); + log.error("Failed to update managed schema watcher after session expiration due to: {}", exc); } } diff --git a/solr/core/src/test-files/solr/collection1/conf/solrconfig-managed-schema.xml b/solr/core/src/test-files/solr/collection1/conf/solrconfig-managed-schema.xml index 46158cd77cf..e00b0c10de8 100644 --- a/solr/core/src/test-files/solr/collection1/conf/solrconfig-managed-schema.xml +++ b/solr/core/src/test-files/solr/collection1/conf/solrconfig-managed-schema.xml @@ -24,7 +24,7 @@ ${managed.schema.mutable} - ${managed.schema.resourceName:managed-schema} + ${managed.schema.resourceName:managed-schema.xml} diff --git a/solr/core/src/test-files/solr/configsets/upload/regular/managed-schema b/solr/core/src/test-files/solr/configsets/upload/legacy-managed-schema/managed-schema similarity index 100% rename from solr/core/src/test-files/solr/configsets/upload/regular/managed-schema rename to solr/core/src/test-files/solr/configsets/upload/legacy-managed-schema/managed-schema diff --git a/solr/core/src/test-files/solr/configsets/upload/legacy-managed-schema/solrconfig.xml b/solr/core/src/test-files/solr/configsets/upload/legacy-managed-schema/solrconfig.xml new file mode 100644 index 00000000000..827b5bd7253 --- /dev/null +++ b/solr/core/src/test-files/solr/configsets/upload/legacy-managed-schema/solrconfig.xml @@ -0,0 +1,60 @@ + + + + + + + + + ${solr.data.dir:} + + + + ${tests.luceneMatchVersion:LATEST} + + + + ${solr.commitwithin.softcommit:true} + + + + + + explicit + true + text + + + + + + + + diff --git a/solr/core/src/test-files/solr/configsets/upload/with-lib-directive/managed-schema b/solr/core/src/test-files/solr/configsets/upload/regular/managed-schema.xml similarity index 100% rename from solr/core/src/test-files/solr/configsets/upload/with-lib-directive/managed-schema rename to solr/core/src/test-files/solr/configsets/upload/regular/managed-schema.xml diff --git a/solr/core/src/test-files/solr/configsets/upload/with-script-processor/managed-schema b/solr/core/src/test-files/solr/configsets/upload/with-lib-directive/managed-schema.xml similarity index 100% rename from solr/core/src/test-files/solr/configsets/upload/with-script-processor/managed-schema rename to solr/core/src/test-files/solr/configsets/upload/with-lib-directive/managed-schema.xml diff --git a/solr/core/src/test-files/solr/configsets/upload/with-script-processor/managed-schema.xml b/solr/core/src/test-files/solr/configsets/upload/with-script-processor/managed-schema.xml new file mode 100644 index 00000000000..9e2f9471026 --- /dev/null +++ b/solr/core/src/test-files/solr/configsets/upload/with-script-processor/managed-schema.xml @@ -0,0 +1,25 @@ + + + + + + + + + + diff --git a/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPI.java b/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPI.java index d5550800aa6..532b2fd28a3 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPI.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPI.java @@ -32,6 +32,7 @@ import java.lang.invoke.MethodHandles; import java.net.URI; import java.nio.ByteBuffer; +import java.nio.charset.Charset; import java.security.Principal; import java.util.Arrays; import java.util.Collection; @@ -403,6 +404,42 @@ public void testUploadDisabled(boolean v2) throws Exception { } } + public void testUploadLegacyManagedSchemaFile() throws Exception { + String configSetName = "legacy-managed-schema"; + SolrZkClient zkClient = new SolrZkClient(cluster.getZkServer().getZkAddress(), + AbstractZkTestCase.TIMEOUT, 45000, null); + try { + long statusCode = uploadConfigSet(configSetName, "", null, zkClient, true); + assertEquals(0l, statusCode); + + assertTrue("managed-schema file should have been uploaded", + zkClient.exists("/configs/"+configSetName+"/managed-schema", true)); + } finally { + zkClient.close(); + } + + // try to create a collection with the uploaded configset + createCollection("newcollection", configSetName, 1, 1, cluster.getSolrClient()); + + String payload = "{\n" + + " 'add-field' : {\n" + + " 'name':'a1',\n" + + " 'type': 'string',\n" + + " 'stored':true,\n" + + " 'indexed':false\n" + + " },\n" + + " }"; + + ByteBuffer buff = Charset.forName("UTF-8").encode(payload); + Map map = postDataAndGetResponse(cluster.getSolrClient(), + cluster.getJettySolrRunners().get(0).getBaseUrl().toString() + + "/newcollection/schema?wt=js" + + "on", buff, null, false); + Map responseHeader = (Map)map.get("responseHeader"); + Long status = (Long)responseHeader.get("status"); + assertEquals((long)status, 0L); + } + @Test public void testOverwriteV1() throws Exception { testOverwrite(false); @@ -829,7 +866,7 @@ public void testUploadWithLibDirective() throws Exception { // try to create a collection with the uploaded configset CollectionAdminResponse resp = createCollection("newcollection3", "with-lib-directive" + trustedSuffix, 1, 1, cluster.getSolrClient()); - + SolrInputDocument doc = sdoc("id", "4055", "subject", "Solr"); cluster.getSolrClient().add("newcollection3", doc); cluster.getSolrClient().commit("newcollection3"); @@ -858,11 +895,11 @@ private void uploadConfigSetWithAssertions(String configSetName, String suffix, } } private void assertConfigsetFiles(String configSetName, String suffix, SolrZkClient zkClient) throws KeeperException, InterruptedException, IOException { - assertTrue("managed-schema file should have been uploaded", - zkClient.exists("/configs/"+configSetName+suffix+"/managed-schema", true)); - assertTrue("managed-schema file contents on zookeeper are not exactly same as that of the file uploaded in config", - Arrays.equals(zkClient.getData("/configs/"+configSetName+suffix+"/managed-schema", null, null, true), - readFile("solr/configsets/upload/"+configSetName+"/managed-schema"))); + assertTrue("managed-schema.xml file should have been uploaded", + zkClient.exists("/configs/"+configSetName+suffix+"/managed-schema.xml", true)); + assertTrue("managed-schema.xml file contents on zookeeper are not exactly same as that of the file uploaded in config", + Arrays.equals(zkClient.getData("/configs/"+configSetName+suffix+"/managed-schema.xml", null, null, true), + readFile("solr/configsets/upload/"+configSetName+"/managed-schema.xml"))); assertTrue("solrconfig.xml file should have been uploaded", zkClient.exists("/configs/"+configSetName+suffix+"/solrconfig.xml", true)); @@ -1001,7 +1038,7 @@ private static void zip(File directory, File zipfile) throws IOException { zout.close(); } } - + public void scriptRequest(String collection) throws SolrServerException, IOException { SolrClient client = cluster.getSolrClient(); SolrInputDocument doc = sdoc("id", "4055", "subject", "Solr"); @@ -1026,21 +1063,21 @@ protected CollectionAdminResponse createCollection(String collectionName, String res.setResponse(client.request(request)); return res; } - + public static Map postDataAndGetResponse(CloudSolrClient cloudClient, String uri, ByteBuffer bytarr, String username, boolean usePut) throws IOException { HttpEntityEnclosingRequestBase httpRequest = null; HttpEntity entity; String response = null; Map m = null; - + try { if (usePut) { httpRequest = new HttpPut(uri); } else { httpRequest = new HttpPost(uri); } - + if (username != null) { httpRequest.addHeader(new BasicHeader("user", username)); } @@ -1093,7 +1130,7 @@ private byte[] readFile(String fname) throws IOException { } return buf; } - + @Test public void testDeleteErrors() throws Exception { final String baseUrl = cluster.getJettySolrRunners().get(0).getBaseUrl().toString(); @@ -1188,15 +1225,15 @@ public void testList() throws Exception { } /** - * A simple sanity check that the test-framework hueristic logic for setting - * {@link ExternalPaths#DEFAULT_CONFIGSET} is working as it should + * A simple sanity check that the test-framework hueristic logic for setting + * {@link ExternalPaths#DEFAULT_CONFIGSET} is working as it should * in the current test env, and finding the real directory which matches what {@link ZkController} * finds and uses to bootstrap ZK in cloud based tests. * *

- * This assumes the {@link SolrDispatchFilter#SOLR_DEFAULT_CONFDIR_ATTRIBUTE} system property - * has not been externally set in the environment where this test is being run -- which should - * never be the case, since it would prevent the test-framework from using + * This assumes the {@link SolrDispatchFilter#SOLR_DEFAULT_CONFDIR_ATTRIBUTE} system property + * has not been externally set in the environment where this test is being run -- which should + * never be the case, since it would prevent the test-framework from using * {@link ExternalPaths#DEFAULT_CONFIGSET} * * @see SolrDispatchFilter#SOLR_DEFAULT_CONFDIR_ATTRIBUTE @@ -1208,7 +1245,7 @@ public void testUserAndTestDefaultConfigsetsAreSame() throws IOException { final File extPath = new File(ExternalPaths.DEFAULT_CONFIGSET); assertTrue("_default dir doesn't exist: " + ExternalPaths.DEFAULT_CONFIGSET, extPath.exists()); assertTrue("_default dir isn't a dir: " + ExternalPaths.DEFAULT_CONFIGSET, extPath.isDirectory()); - + final String zkBootStrap = ConfigSetService.getDefaultConfigDirPath(); assertEquals("extPath _default configset dir vs zk bootstrap path", ExternalPaths.DEFAULT_CONFIGSET, zkBootStrap); diff --git a/solr/core/src/test/org/apache/solr/schema/SpatialRPTFieldTypeTest.java b/solr/core/src/test/org/apache/solr/schema/SpatialRPTFieldTypeTest.java index 8eade38a943..02e6d20861b 100644 --- a/solr/core/src/test/org/apache/solr/schema/SpatialRPTFieldTypeTest.java +++ b/solr/core/src/test/org/apache/solr/schema/SpatialRPTFieldTypeTest.java @@ -28,13 +28,13 @@ import org.locationtech.spatial4j.shape.Shape; public class SpatialRPTFieldTypeTest extends AbstractBadConfigTestBase { - + private static File tmpSolrHome; private static File tmpConfDir; - + private static final String collection = "collection1"; private static final String confDir = collection + "/conf"; - + @Before private void initManagedSchemaCore() throws Exception { tmpSolrHome = createTempDir().toFile(); @@ -48,14 +48,14 @@ private void initManagedSchemaCore() throws Exception { FileUtils.copyFileToDirectory(new File(testHomeConfDir, "schema-minimal.xml"), tmpConfDir); FileUtils.copyFileToDirectory(new File(testHomeConfDir, "schema_codec.xml"), tmpConfDir); FileUtils.copyFileToDirectory(new File(testHomeConfDir, "schema-bm25.xml"), tmpConfDir); - + // initCore will trigger an upgrade to managed schema, since the solrconfig has // System.setProperty("managed.schema.mutable", "false"); System.setProperty("enable.update.log", "false"); initCore("solrconfig-managed-schema.xml", "schema-minimal.xml", tmpSolrHome.getPath()); } - + @After private void afterClass() throws Exception { deleteCore(); @@ -68,47 +68,47 @@ private void afterClass() throws Exception { static final String DISTANCE_DEGREES = "1.3520328"; static final String DISTANCE_KILOMETERS = "150.33939"; static final String DISTANCE_MILES = "93.416565"; - + public void testDistanceUnitsDegrees() throws Exception { setupRPTField("degrees", "true"); - + assertU(adoc("str", "X", "geo", INDEXED_COORDINATES)); assertU(commit()); String q; - + q = "geo:{!geofilt score=distance filter=false sfield=geo pt="+QUERY_COORDINATES+" d=180}"; assertQ(req("q", q, "fl", "*,score"), "//result/doc/float[@name='score'][.='"+DISTANCE_DEGREES+"']"); - + q = "geo:{!geofilt score=degrees filter=false sfield=geo pt="+QUERY_COORDINATES+" d=180}"; assertQ(req("q", q, "fl", "*,score"), "//result/doc/float[@name='score'][.='"+DISTANCE_DEGREES+"']"); - + q = "geo:{!geofilt score=kilometers filter=false sfield=geo pt="+QUERY_COORDINATES+" d=180}"; assertQ(req("q", q, "fl", "*,score"), "//result/doc/float[@name='score'][.='"+DISTANCE_KILOMETERS+"']"); - + q = "geo:{!geofilt score=miles filter=false sfield=geo pt="+QUERY_COORDINATES+" d=180}"; assertQ(req("q", q, "fl", "*,score"), "//result/doc/float[@name='score'][.='"+DISTANCE_MILES+"']"); } - + public void testDistanceUnitsKilometers() throws Exception { setupRPTField("kilometers", "true"); - + assertU(adoc("str", "X", "geo", INDEXED_COORDINATES)); assertU(commit()); String q; - + q = "geo:{!geofilt score=distance filter=false sfield=geo pt="+QUERY_COORDINATES+" d=1000}"; assertQ(req("q", q, "fl", "*,score"), "//result/doc/float[@name='score'][.='"+DISTANCE_KILOMETERS+"']"); - + q = "geo:{!geofilt score=degrees filter=false sfield=geo pt="+QUERY_COORDINATES+" d=1000}"; assertQ(req("q", q, "fl", "*,score"), "//result/doc/float[@name='score'][.='"+DISTANCE_DEGREES+"']"); - + q = "geo:{!geofilt score=kilometers filter=false sfield=geo pt="+QUERY_COORDINATES+" d=1000}"; assertQ(req("q", q, "fl", "*,score"), "//result/doc/float[@name='score'][.='"+DISTANCE_KILOMETERS+"']"); - + q = "geo:{!geofilt score=miles filter=false sfield=geo pt="+QUERY_COORDINATES+" d=1000}"; assertQ(req("q", q, "fl", "*,score"), "//result/doc/float[@name='score'][.='"+DISTANCE_MILES+"']"); } - + public void testJunkValuesForDistanceUnits() throws Exception { Exception ex = expectThrows(Exception.class, () -> setupRPTField("rose", "true")); assertTrue(ex.getMessage().startsWith("Must specify distanceUnits as one of")); @@ -116,17 +116,17 @@ public void testJunkValuesForDistanceUnits() throws Exception { public void testMaxDistErrConversion() throws Exception { deleteCore(); - File managedSchemaFile = new File(tmpConfDir, "managed-schema"); - Files.delete(managedSchemaFile.toPath()); // Delete managed-schema so it won't block parsing a new schema + File managedSchemaFile = new File(tmpConfDir, "managed-schema.xml"); + Files.delete(managedSchemaFile.toPath()); // Delete managed-schema.xml so it won't block parsing a new schema System.setProperty("managed.schema.mutable", "true"); initCore("solrconfig-managed-schema.xml", "schema-one-field-no-dynamic-field.xml", tmpSolrHome.getPath()); - + String fieldName = "new_text_field"; assertNull("Field '" + fieldName + "' is present in the schema", h.getCore().getLatestSchema().getFieldOrNull(fieldName)); - + IndexSchema oldSchema = h.getCore().getLatestSchema(); - + SpatialRecursivePrefixTreeFieldType rptFieldType = new SpatialRecursivePrefixTreeFieldType(); Map rptMap = new HashMap(); @@ -232,15 +232,15 @@ public void testShapeToFromStringGeoJSON() throws Exception { private void setupRPTField(String distanceUnits, String geo, String format, FieldType fieldType) throws Exception { deleteCore(); - File managedSchemaFile = new File(tmpConfDir, "managed-schema"); - Files.delete(managedSchemaFile.toPath()); // Delete managed-schema so it won't block parsing a new schema + File managedSchemaFile = new File(tmpConfDir, "managed-schema.xml"); + Files.delete(managedSchemaFile.toPath()); // Delete managed-schema.xml so it won't block parsing a new schema System.setProperty("managed.schema.mutable", "true"); initCore("solrconfig-managed-schema.xml", "schema-one-field-no-dynamic-field.xml", tmpSolrHome.getPath()); String fieldName = "new_text_field"; assertNull("Field '" + fieldName + "' is present in the schema", h.getCore().getLatestSchema().getFieldOrNull(fieldName)); - + IndexSchema oldSchema = h.getCore().getLatestSchema(); if (fieldType == null) { diff --git a/solr/core/src/test/org/apache/solr/schema/TestCloudManagedSchema.java b/solr/core/src/test/org/apache/solr/schema/TestCloudManagedSchema.java index 652063b9988..54c679d096a 100644 --- a/solr/core/src/test/org/apache/solr/schema/TestCloudManagedSchema.java +++ b/solr/core/src/test/org/apache/solr/schema/TestCloudManagedSchema.java @@ -65,12 +65,12 @@ public void test() throws Exception { NamedList collectionStatus = (NamedList)status.getVal(0); String collectionSchema = (String)collectionStatus.get(CoreAdminParams.SCHEMA); // Make sure the upgrade to managed schema happened - assertEquals("Schema resource name differs from expected name", "managed-schema", collectionSchema); + assertEquals("Schema resource name differs from expected name", "managed-schema.xml", collectionSchema); SolrZkClient zkClient = new SolrZkClient(zkServer.getZkHost(), 30000); try { // Make sure "DO NOT EDIT" is in the content of the managed schema - String fileContent = getFileContentFromZooKeeper(zkClient, "/solr/configs/conf1/managed-schema"); + String fileContent = getFileContentFromZooKeeper(zkClient, "/solr/configs/conf1/managed-schema.xml"); assertTrue("Managed schema is missing", fileContent.contains("DO NOT EDIT")); // Make sure the original non-managed schema is no longer in ZooKeeper diff --git a/solr/core/src/test/org/apache/solr/schema/TestManagedSchema.java b/solr/core/src/test/org/apache/solr/schema/TestManagedSchema.java index d209f9d9edb..42a5a79a321 100644 --- a/solr/core/src/test/org/apache/solr/schema/TestManagedSchema.java +++ b/solr/core/src/test/org/apache/solr/schema/TestManagedSchema.java @@ -48,7 +48,7 @@ public class TestManagedSchema extends AbstractBadConfigTestBase { private static final String collection = "collection1"; private static final String confDir = collection + "/conf"; - + @Before private void initManagedSchemaCore() throws Exception { tmpSolrHome = createTempDir().toFile(); @@ -64,6 +64,7 @@ private void initManagedSchemaCore() throws Exception { FileUtils.copyFileToDirectory(new File(testHomeConfDir, "schema_codec.xml"), tmpConfDir); FileUtils.copyFileToDirectory(new File(testHomeConfDir, "schema-bm25.xml"), tmpConfDir); + // initCore will trigger an upgrade to managed schema, since the solrconfig has // System.setProperty("managed.schema.mutable", "false"); @@ -77,30 +78,30 @@ private void afterClass() throws Exception { System.clearProperty("managed.schema.mutable"); System.clearProperty("enable.update.log"); } - + public void testUpgrade() throws Exception { - File managedSchemaFile = new File(tmpConfDir, "managed-schema"); + File managedSchemaFile = new File(tmpConfDir, "managed-schema.xml"); assertTrue(managedSchemaFile.exists()); String managedSchema = FileUtils.readFileToString(managedSchemaFile, "UTF-8"); assertTrue(managedSchema.contains("DO NOT EDIT")); File upgradedOriginalSchemaFile = new File(tmpConfDir, "schema-minimal.xml.bak"); assertTrue(upgradedOriginalSchemaFile.exists()); - assertSchemaResource(collection, "managed-schema"); + assertSchemaResource(collection, "managed-schema.xml"); } - + public void testUpgradeThenRestart() throws Exception { - assertSchemaResource(collection, "managed-schema"); + assertSchemaResource(collection, "managed-schema.xml"); deleteCore(); File nonManagedSchemaFile = new File(tmpConfDir, "schema-minimal.xml"); assertFalse(nonManagedSchemaFile.exists()); initCore("solrconfig-managed-schema.xml", "schema-minimal.xml", tmpSolrHome.getPath()); - File managedSchemaFile = new File(tmpConfDir, "managed-schema"); + File managedSchemaFile = new File(tmpConfDir, "managed-schema.xml"); assertTrue(managedSchemaFile.exists()); String managedSchema = FileUtils.readFileToString(managedSchemaFile, "UTF-8"); assertTrue(managedSchema.contains("DO NOT EDIT")); File upgradedOriginalSchemaFile = new File(tmpConfDir, "schema-minimal.xml.bak"); assertTrue(upgradedOriginalSchemaFile.exists()); - assertSchemaResource(collection, "managed-schema"); + assertSchemaResource(collection, "managed-schema.xml"); } public void testUpgradeThenRestartNonManaged() throws Exception { @@ -112,13 +113,13 @@ public void testUpgradeThenRestartNonManaged() throws Exception { } public void testUpgradeThenRestartNonManagedAfterPuttingBackNonManagedSchema() throws Exception { - assertSchemaResource(collection, "managed-schema"); + assertSchemaResource(collection, "managed-schema.xml"); deleteCore(); File nonManagedSchemaFile = new File(tmpConfDir, "schema-minimal.xml"); assertFalse(nonManagedSchemaFile.exists()); File upgradedOriginalSchemaFile = new File(tmpConfDir, "schema-minimal.xml.bak"); assertTrue(upgradedOriginalSchemaFile.exists()); - + // After upgrade to managed schema, downgrading to non-managed should work after putting back the non-managed schema. FileUtils.moveFile(upgradedOriginalSchemaFile, nonManagedSchemaFile); initCore("solrconfig-basic.xml", "schema-minimal.xml", tmpSolrHome.getPath()); @@ -135,9 +136,9 @@ public void testDefaultSchemaFactory() throws Exception { SolrQueryResponse response = new SolrQueryResponse(); admin.handleRequestBody(request, response); assertNull("Exception on create", response.getException()); - assertSchemaResource(collection, "managed-schema"); + assertSchemaResource(collection, "managed-schema.xml"); } - + private void assertSchemaResource(String collection, String expectedSchemaResource) throws Exception { final CoreContainer cores = h.getCoreContainer(); final CoreAdminHandler admin = new CoreAdminHandler(cores); @@ -153,7 +154,7 @@ private void assertSchemaResource(String collection, String expectedSchemaResour } public void testAddFieldWhenNotMutable() throws Exception { - assertSchemaResource(collection, "managed-schema"); + assertSchemaResource(collection, "managed-schema.xml"); String errString = "This ManagedIndexSchema is not mutable."; ignoreException(Pattern.quote(errString)); try { @@ -177,20 +178,20 @@ public void testAddFieldWhenNotMutable() throws Exception { resetExceptionIgnores(); } } - + public void testAddFieldPersistence() throws Exception { - assertSchemaResource(collection, "managed-schema"); + assertSchemaResource(collection, "managed-schema.xml"); deleteCore(); - File managedSchemaFile = new File(tmpConfDir, "managed-schema"); - Files.delete(managedSchemaFile.toPath()); // Delete managed-schema so it won't block parsing a new schema + File managedSchemaFile = new File(tmpConfDir, "managed-schema.xml"); + Files.delete(managedSchemaFile.toPath()); // Delete managed-schema.xml so it won't block parsing a new schema System.setProperty("managed.schema.mutable", "true"); initCore("solrconfig-managed-schema.xml", "schema-one-field-no-dynamic-field.xml", tmpSolrHome.getPath()); - + assertTrue(managedSchemaFile.exists()); String managedSchemaContents = FileUtils.readFileToString(managedSchemaFile, "UTF-8"); assertFalse(managedSchemaContents.contains("\"new_field\"")); - + Map options = new HashMap<>(); options.put("stored", "false"); IndexSchema oldSchema = h.getCore().getLatestSchema(); @@ -206,12 +207,12 @@ public void testAddFieldPersistence() throws Exception { stream.close(); // Explicitly close so that Windows can delete this file assertTrue(managedSchemaContents.contains("")); } - + public void testAddedFieldIndexableAndQueryable() throws Exception { - assertSchemaResource(collection, "managed-schema"); + assertSchemaResource(collection, "managed-schema.xml"); deleteCore(); - File managedSchemaFile = new File(tmpConfDir, "managed-schema"); - Files.delete(managedSchemaFile.toPath()); // Delete managed-schema so it won't block parsing a new schema + File managedSchemaFile = new File(tmpConfDir, "managed-schema.xml"); + Files.delete(managedSchemaFile.toPath()); // Delete managed-schema.xml so it won't block parsing a new schema System.setProperty("managed.schema.mutable", "true"); initCore("solrconfig-managed-schema.xml", "schema-one-field-no-dynamic-field.xml", tmpSolrHome.getPath()); @@ -254,16 +255,16 @@ public void testAddedFieldIndexableAndQueryable() throws Exception { assertQ(req("new_field:thing1"), "//*[@numFound='1']"); } - + public void testAddFieldWhenItAlreadyExists() throws Exception{ deleteCore(); - File managedSchemaFile = new File(tmpConfDir, "managed-schema"); - Files.delete(managedSchemaFile.toPath()); // Delete managed-schema so it won't block parsing a new schema + File managedSchemaFile = new File(tmpConfDir, "managed-schema.xml"); + Files.delete(managedSchemaFile.toPath()); // Delete managed-schema.xml so it won't block parsing a new schema System.setProperty("managed.schema.mutable", "true"); initCore("solrconfig-managed-schema.xml", "schema-one-field-no-dynamic-field.xml", tmpSolrHome.getPath()); assertNotNull("Field 'str' is not present in the schema", h.getCore().getLatestSchema().getFieldOrNull("str")); - + String errString = "Field 'str' already exists."; ignoreException(Pattern.quote(errString)); try { @@ -290,8 +291,8 @@ public void testAddFieldWhenItAlreadyExists() throws Exception{ public void testAddSameFieldTwice() throws Exception{ deleteCore(); - File managedSchemaFile = new File(tmpConfDir, "managed-schema"); - Files.delete(managedSchemaFile.toPath()); // Delete managed-schema so it won't block parsing a new schema + File managedSchemaFile = new File(tmpConfDir, "managed-schema.xml"); + Files.delete(managedSchemaFile.toPath()); // Delete managed-schema.xml so it won't block parsing a new schema System.setProperty("managed.schema.mutable", "true"); initCore("solrconfig-managed-schema.xml", "schema-one-field-no-dynamic-field.xml", tmpSolrHome.getPath()); @@ -325,8 +326,8 @@ public void testAddSameFieldTwice() throws Exception{ public void testAddDynamicField() throws Exception{ deleteCore(); - File managedSchemaFile = new File(tmpConfDir, "managed-schema"); - Files.delete(managedSchemaFile.toPath()); // Delete managed-schema so it won't block parsing a new schema + File managedSchemaFile = new File(tmpConfDir, "managed-schema.xml"); + Files.delete(managedSchemaFile.toPath()); // Delete managed-schema.xml so it won't block parsing a new schema System.setProperty("managed.schema.mutable", "true"); initCore("solrconfig-managed-schema.xml", "schema-one-field-no-dynamic-field.xml", tmpSolrHome.getPath()); @@ -355,20 +356,20 @@ public void testAddDynamicField() throws Exception{ resetExceptionIgnores(); } } - + public void testAddWithSchemaCodecFactory() throws Exception { deleteCore(); - File managedSchemaFile = new File(tmpConfDir, "managed-schema"); - Files.delete(managedSchemaFile.toPath()); // Delete managed-schema so it won't block parsing a new schema + File managedSchemaFile = new File(tmpConfDir, "managed-schema.xml"); + Files.delete(managedSchemaFile.toPath()); // Delete managed-schema.xml so it won't block parsing a new schema System.setProperty("managed.schema.mutable", "true"); initCore("solrconfig-managed-schema.xml", "schema_codec.xml", tmpSolrHome.getPath()); String uniqueKey = "string_f"; - assertNotNull("Unique key field '" + uniqueKey + "' is not present in the schema", + assertNotNull("Unique key field '" + uniqueKey + "' is not present in the schema", h.getCore().getLatestSchema().getFieldOrNull(uniqueKey)); String fieldName = "string_disk_new_field"; - assertNull("Field '" + fieldName + "' is present in the schema", + assertNull("Field '" + fieldName + "' is present in the schema", h.getCore().getLatestSchema().getFieldOrNull(fieldName)); Map options = new HashMap<>(); @@ -386,8 +387,8 @@ public void testAddWithSchemaCodecFactory() throws Exception { public void testAddWithSchemaSimilarityFactory() throws Exception { deleteCore(); - File managedSchemaFile = new File(tmpConfDir, "managed-schema"); - Files.delete(managedSchemaFile.toPath()); // Delete managed-schema so it won't block parsing a new schema + File managedSchemaFile = new File(tmpConfDir, "managed-schema.xml"); + Files.delete(managedSchemaFile.toPath()); // Delete managed-schema.xml so it won't block parsing a new schema System.setProperty("managed.schema.mutable", "true"); initCore("solrconfig-managed-schema.xml", "schema-bm25.xml", tmpSolrHome.getPath()); @@ -413,10 +414,10 @@ public void testAddWithSchemaSimilarityFactory() throws Exception { } public void testPersistUniqueKey() throws Exception { - assertSchemaResource(collection, "managed-schema"); + assertSchemaResource(collection, "managed-schema.xml"); deleteCore(); - File managedSchemaFile = new File(tmpConfDir, "managed-schema"); - Files.delete(managedSchemaFile.toPath()); // Delete managed-schema so it won't block parsing a new schema + File managedSchemaFile = new File(tmpConfDir, "managed-schema.xml"); + Files.delete(managedSchemaFile.toPath()); // Delete managed-schema.xml so it won't block parsing a new schema System.setProperty("managed.schema.mutable", "true"); initCore("solrconfig-managed-schema.xml", "schema-one-field-no-dynamic-field-unique-key.xml", tmpSolrHome.getPath()); @@ -451,8 +452,8 @@ public void testPersistUniqueKey() throws Exception { public void testAddFieldThenReload() throws Exception { deleteCore(); - File managedSchemaFile = new File(tmpConfDir, "managed-schema"); - Files.delete(managedSchemaFile.toPath()); // Delete managed-schema so it won't block parsing a new schema + File managedSchemaFile = new File(tmpConfDir, "managed-schema.xml"); + Files.delete(managedSchemaFile.toPath()); // Delete managed-schema.xml so it won't block parsing a new schema System.setProperty("managed.schema.mutable", "true"); initCore("solrconfig-managed-schema.xml", "schema-one-field-no-dynamic-field.xml", tmpSolrHome.getPath()); diff --git a/solr/server/solr/configsets/_default/conf/managed-schema b/solr/server/solr/configsets/_default/conf/managed-schema.xml similarity index 97% rename from solr/server/solr/configsets/_default/conf/managed-schema rename to solr/server/solr/configsets/_default/conf/managed-schema.xml index 7d46d789a1f..5be18eb5d80 100644 --- a/solr/server/solr/configsets/_default/conf/managed-schema +++ b/solr/server/solr/configsets/_default/conf/managed-schema.xml @@ -40,34 +40,34 @@ @@ -130,7 +130,7 @@ for fields via the specification of patterns to match field names. EXAMPLE: name="*_i" will match any field ending in _i (like myid_i, z_i) RESTRICTION: the glob-like pattern in the name attribute must have a "*" only at the start or the end. --> - + @@ -227,7 +227,7 @@ - + - - + --> + removes stop words from case-insensitive "stopwords.txt" + (empty by default), and down cases. At query time only, it + also applies synonyms. + --> @@ -310,15 +310,15 @@ - + + --> @@ -357,7 +357,7 @@ + --> @@ -372,7 +372,7 @@ + --> @@ -451,7 +451,7 @@ @@ -487,7 +487,7 @@ - @@ -517,7 +517,7 @@ - + @@ -578,18 +578,18 @@ - + - + - + @@ -598,7 +598,7 @@ - + @@ -615,29 +615,29 @@ - + - + - + - + - + @@ -647,11 +647,11 @@ - + - + @@ -659,11 +659,11 @@ - + - + @@ -686,14 +686,14 @@ - + - + @@ -707,11 +707,11 @@ - + - + @@ -719,11 +719,11 @@ - + - + @@ -734,11 +734,11 @@ - + - + @@ -749,11 +749,11 @@ - + - + @@ -761,11 +761,11 @@ - + - + @@ -776,11 +776,11 @@ - + - + @@ -788,22 +788,22 @@ - + - + - + - + @@ -811,11 +811,11 @@ - + - + @@ -825,7 +825,7 @@ - + @@ -887,7 +887,7 @@ This dictionary was built with MeCab, it defines a format for the features adapted for the Korean language. - + Nori also has a convenient user dictionary feature that allows overriding the statistical model with your own entries for segmentation, part-of-speech tags and readings without a need to specify weights. Notice that user dictionaries have not been subject to extensive testing. @@ -900,7 +900,7 @@ --> @@ -914,18 +914,18 @@ - + - + - + @@ -933,11 +933,11 @@ - + - + @@ -946,11 +946,11 @@ - + - + @@ -960,22 +960,22 @@ - + - + - + - + @@ -983,11 +983,11 @@ - + - + @@ -995,7 +995,7 @@ - + @@ -1005,11 +1005,11 @@ - + - + @@ -1018,8 +1018,8 @@ ${tests.luceneMatchVersion:LATEST} true - managed-schema + managed-schema.xml - From b0a4b0213016bef59fcc6a280c498cbf35e64230 Mon Sep 17 00:00:00 2001 From: Chris Hostetter Date: Thu, 16 Sep 2021 15:41:45 -0700 Subject: [PATCH 4/8] SOLR-15630: Logging MDC values no longer include a hardcoded prefix, allowing custom logging configurations access to the plain values. The default log4j2.xml PatternLayout has been updated to ensure the values are formatted with the existing prefixes. --- solr/CHANGES.txt | 3 +++ solr/benchmark/log4j2-bench.xml | 8 ++++---- solr/benchmark/src/test-files/log4j2.xml | 8 ++++---- solr/contrib/clustering/src/test-files/log4j2.xml | 2 +- .../contrib/gcs-repository/src/test-files/log4j2.xml | 4 ++-- solr/contrib/s3-repository/src/test-files/log4j2.xml | 4 ++-- .../org/apache/solr/logging/MDCLoggingContext.java | 12 ++++++------ solr/core/src/test-files/log4j2.xml | 2 +- .../metrics/reporters/SolrSlf4jReporterTest.java | 2 +- solr/docker/tests/cases/test_log4j/log4j2.xml | 6 +++--- solr/server/resources/log4j2.xml | 6 +++--- solr/solr-ref-guide/src/major-changes-in-solr-9.adoc | 7 +++++++ solr/solrj/src/test-files/log4j2.xml | 2 +- 13 files changed, 38 insertions(+), 28 deletions(-) diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index 43e0b366777..12bfdde6f6e 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -145,6 +145,9 @@ when told to. The admin UI now tells it to. (Nazerke Seidan, David Smiley) * SOLR-10887: Migrate "managed-schema" file naming to "managed-schema.xml" file name, with a fallback to the legacy "managed-schema". (Eric Pugh, David Smiley) +* SOLR-15630: Logging MDC values no longer include a hardcoded prefix, allowing custom logging configurations access to the plain values. + The default log4j2.xml PatternLayout has been updated to ensure the values are formatted with the existing prefixes. (hossman) + Build --------------------- diff --git a/solr/benchmark/log4j2-bench.xml b/solr/benchmark/log4j2-bench.xml index ae4cecb5030..f0fd8766816 100644 --- a/solr/benchmark/log4j2-bench.xml +++ b/solr/benchmark/log4j2-bench.xml @@ -23,7 +23,7 @@ - %maxLen{%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p (%t) [%X{collection} %X{shard} %X{replica} %X{core}] %c{1.} + %maxLen{%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p (%t) [%notEmpty{c:%X{collection}}%notEmpty{ s:%X{shard}}%notEmpty{ r:%X{replica}}%notEmpty{ x:%X{core}}] %c{1.} %m%notEmpty{ =>%ex{short}}}{10240}%n @@ -35,7 +35,7 @@ filePattern="${sys:solr.log.dir:-work/solr-logs}/${sys:solr.log.name:-solr}.log.%i"> - %maxLen{%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p (%t) [%X{collection} %X{shard} %X{replica} %X{core}] %c{1.} + %maxLen{%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p (%t) [%notEmpty{c:%X{collection}}%notEmpty{ s:%X{shard}}%notEmpty{ r:%X{replica}}%notEmpty{ x:%X{core}}] %c{1.} %m%notEmpty{ =>%ex{short}}}{10240}%n @@ -52,7 +52,7 @@ filePattern="${sys:solr.log.dir}/${sys:solr.log.name}_slow_requests.log.%i"> - %maxLen{%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p (%t) [%X{collection} %X{shard} %X{replica} %X{core}] %c{1.} + %maxLen{%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p (%t) [%notEmpty{c:%X{collection}}%notEmpty{ s:%X{shard}}%notEmpty{ r:%X{replica}}%notEmpty{ x:%X{core}}] %c{1.} %m%notEmpty{ =>%ex{short}}}{10240}%n @@ -69,7 +69,7 @@ fileName="${sys:solr.log.dir:-build/work/solr-logs}/${sys:solr.log.name:-solr}_random_counts.log"> - %maxLen{%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p (%t) [%X{collection} %X{shard} %X{replica} %X{core}] %c{1.} + %maxLen{%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p (%t) [%notEmpty{c:%X{collection}}%notEmpty{ s:%X{shard}}%notEmpty{ r:%X{replica}}%notEmpty{ x:%X{core}}] %c{1.} %m%notEmpty{ =>%ex{short}}}{10240}%n diff --git a/solr/benchmark/src/test-files/log4j2.xml b/solr/benchmark/src/test-files/log4j2.xml index b77dd4a08ff..e54e11b474b 100644 --- a/solr/benchmark/src/test-files/log4j2.xml +++ b/solr/benchmark/src/test-files/log4j2.xml @@ -23,7 +23,7 @@ - %maxLen{%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p (%t) [%X{collection} %X{shard} %X{replica} %X{core}] %c{1.} + %maxLen{%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p (%t) [%notEmpty{c:%X{collection}}%notEmpty{ s:%X{shard}}%notEmpty{ r:%X{replica}}%notEmpty{ x:%X{core}}] %c{1.} %m%notEmpty{ =>%ex{short}}}{10240}%n @@ -35,7 +35,7 @@ filePattern="${sys:solr.log.dir:-build/work/solr-logs}/${sys:solr.log.name:-solr}.log.%i"> - %maxLen{%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p (%t) [%X{collection} %X{shard} %X{replica} %X{core}] %c{1.} + %maxLen{%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p (%t) [%notEmpty{c:%X{collection}}%notEmpty{ s:%X{shard}}%notEmpty{ r:%X{replica}}%notEmpty{ x:%X{core}}] %c{1.} %m%notEmpty{ =>%ex{short}}}{10240}%n @@ -52,7 +52,7 @@ filePattern="${sys:solr.log.dir:-build/work/solr-logs}/${sys:solr.log.name:-solr}_slow_requests.log.%i"> - %maxLen{%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p (%t) [%X{collection} %X{shard} %X{replica} %X{core}] %c{1.} + %maxLen{%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p (%t) [%notEmpty{c:%X{collection}}%notEmpty{ s:%X{shard}}%notEmpty{ r:%X{replica}}%notEmpty{ x:%X{core}}] %c{1.} %m%notEmpty{ =>%ex{short}}}{10240}%n @@ -68,7 +68,7 @@ fileName="${sys:solr.log.dir:-build/work/solr-logs}/${sys:solr.log.name:-solr}_random_counts.log"> - %maxLen{%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p (%t) [%X{collection} %X{shard} %X{replica} %X{core}] %c{1.} + %maxLen{%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p (%t) [%notEmpty{c:%X{collection}}%notEmpty{ s:%X{shard}}%notEmpty{ r:%X{replica}}%notEmpty{ x:%X{core}}] %c{1.} %m%notEmpty{ =>%ex{short}}}{10240}%n diff --git a/solr/contrib/clustering/src/test-files/log4j2.xml b/solr/contrib/clustering/src/test-files/log4j2.xml index ed01a6b2c76..5e696935965 100644 --- a/solr/contrib/clustering/src/test-files/log4j2.xml +++ b/solr/contrib/clustering/src/test-files/log4j2.xml @@ -21,7 +21,7 @@ - %maxLen{%-4r %-5p (%t) [%X{node_name} %X{collection} %X{shard} %X{replica} %X{core} %X{trace_id}] %c{1.} %m%notEmpty{ + %maxLen{%-4r %-5p (%t) [%notEmpty{n:%X{node_name}}%notEmpty{ c:%X{collection}}%notEmpty{ s:%X{shard}}%notEmpty{ r:%X{replica}}%notEmpty{ x:%X{core}}%notEmpty{ t:%X{trace_id}}] %c{1.} %m%notEmpty{ =>%ex{short}}}{10240}%n diff --git a/solr/contrib/gcs-repository/src/test-files/log4j2.xml b/solr/contrib/gcs-repository/src/test-files/log4j2.xml index 46ad20c18ee..6b02a3b06b4 100644 --- a/solr/contrib/gcs-repository/src/test-files/log4j2.xml +++ b/solr/contrib/gcs-repository/src/test-files/log4j2.xml @@ -21,7 +21,7 @@ - %maxLen{%-4r %-5p (%t) [%X{node_name} %X{collection} %X{shard} %X{replica} %X{core} %X{trace_id}] %c{1.} %m%notEmpty{ + %maxLen{%-4r %-5p (%t) [%notEmpty{n:%X{node_name}}%notEmpty{ c:%X{collection}}%notEmpty{ s:%X{shard}}%notEmpty{ r:%X{replica}}%notEmpty{ x:%X{core}}%notEmpty{ t:%X{trace_id}}] %c{1.} %m%notEmpty{ =>%ex{short}}}{10240}%n @@ -50,7 +50,7 @@ - %-4r %-5p (%t) [%X{node_name} %X{collection} %X{shard} %X{replica} %X{core}] %c{1.} %m%n + %-4r %-5p (%t) [%notEmpty{n:%X{node_name}}%notEmpty{ c:%X{collection}}%notEmpty{ s:%X{shard}}%notEmpty{ r:%X{replica}}%notEmpty{ x:%X{core}}] %c{1.} %m%n diff --git a/solr/contrib/s3-repository/src/test-files/log4j2.xml b/solr/contrib/s3-repository/src/test-files/log4j2.xml index 229a9316418..950caea970d 100644 --- a/solr/contrib/s3-repository/src/test-files/log4j2.xml +++ b/solr/contrib/s3-repository/src/test-files/log4j2.xml @@ -21,7 +21,7 @@ - %maxLen{%-4r %-5p (%t) [%X{node_name} %X{collection} %X{shard} %X{replica} %X{core} %X{trace_id}] %c{1.} %m%notEmpty{ + %maxLen{%-4r %-5p (%t) [%notEmpty{n:%X{node_name}}%notEmpty{ c:%X{collection}}%notEmpty{ s:%X{shard}}%notEmpty{ r:%X{replica}}%notEmpty{ x:%X{core}}%notEmpty{ t:%X{trace_id}}] %c{1.} %m%notEmpty{ =>%ex{short}}}{10240}%n @@ -50,7 +50,7 @@ - %-4r %-5p (%t) [%X{node_name} %X{collection} %X{shard} %X{replica} %X{core}] %c{1.} %m%n + %-4r %-5p (%t) [%notEmpty{n:%X{node_name}}%notEmpty{ c:%X{collection}}%notEmpty{ s:%X{shard}}%notEmpty{ r:%X{replica}}%notEmpty{ x:%X{core}}] %c{1.} %m%n diff --git a/solr/core/src/java/org/apache/solr/logging/MDCLoggingContext.java b/solr/core/src/java/org/apache/solr/logging/MDCLoggingContext.java index 492735be6e7..613f351d631 100644 --- a/solr/core/src/java/org/apache/solr/logging/MDCLoggingContext.java +++ b/solr/core/src/java/org/apache/solr/logging/MDCLoggingContext.java @@ -42,7 +42,7 @@ public class MDCLoggingContext { public static void setCollection(String collection) { if (collection != null) { - MDC.put(COLLECTION_PROP, "c:" + collection); + MDC.put(COLLECTION_PROP, collection); } else { MDC.remove(COLLECTION_PROP); } @@ -50,7 +50,7 @@ public static void setCollection(String collection) { public static void setTracerId(String traceId) { if (!StringUtils.isEmpty(traceId)) { - MDC.put(TRACE_ID, "t:" + traceId); + MDC.put(TRACE_ID, traceId); } else { MDC.remove(TRACE_ID); } @@ -58,7 +58,7 @@ public static void setTracerId(String traceId) { public static void setShard(String shard) { if (shard != null) { - MDC.put(SHARD_ID_PROP, "s:" + shard); + MDC.put(SHARD_ID_PROP, shard); } else { MDC.remove(SHARD_ID_PROP); } @@ -66,7 +66,7 @@ public static void setShard(String shard) { public static void setReplica(String replica) { if (replica != null) { - MDC.put(REPLICA_PROP, "r:" + replica); + MDC.put(REPLICA_PROP, replica); } else { MDC.remove(REPLICA_PROP); } @@ -74,7 +74,7 @@ public static void setReplica(String replica) { public static void setCoreName(String core) { if (core != null) { - MDC.put(CORE_NAME_PROP, "x:" + core); + MDC.put(CORE_NAME_PROP, core); } else { MDC.remove(CORE_NAME_PROP); } @@ -100,7 +100,7 @@ public static void setNode(String node) { private static void setNodeName(String node) { if (node != null) { - MDC.put(NODE_NAME_PROP, "n:" + node); + MDC.put(NODE_NAME_PROP, node); } else { MDC.remove(NODE_NAME_PROP); } diff --git a/solr/core/src/test-files/log4j2.xml b/solr/core/src/test-files/log4j2.xml index 53dcae5c748..0e8f08c0c61 100644 --- a/solr/core/src/test-files/log4j2.xml +++ b/solr/core/src/test-files/log4j2.xml @@ -21,7 +21,7 @@ - %maxLen{%-4r %-5p (%t) [%X{node_name} %X{collection} %X{shard} %X{replica} %X{core} %X{trace_id}] %c{1.} %m%notEmpty{ + %maxLen{%-4r %-5p (%t) [%notEmpty{n:%X{node_name}}%notEmpty{ c:%X{collection}}%notEmpty{ s:%X{shard}}%notEmpty{ r:%X{replica}}%notEmpty{ x:%X{core}}%notEmpty{ t:%X{trace_id}}] %c{1.} %m%notEmpty{ =>%ex{short}}}{10240}%n diff --git a/solr/core/src/test/org/apache/solr/metrics/reporters/SolrSlf4jReporterTest.java b/solr/core/src/test/org/apache/solr/metrics/reporters/SolrSlf4jReporterTest.java index b78e45bb211..65b6a090d9d 100644 --- a/solr/core/src/test/org/apache/solr/metrics/reporters/SolrSlf4jReporterTest.java +++ b/solr/core/src/test/org/apache/solr/metrics/reporters/SolrSlf4jReporterTest.java @@ -94,7 +94,7 @@ public void testReporter() throws Exception { if (history.stream().filter(d -> "foobar".equals(d.getFirstValue("logger"))).count() == 0) { fail("No 'foobar' logs in: " + history.toString()); } - if (history.stream().filter(d -> "x:collection1".equals(d.getFirstValue("core"))).count() == 0) { + if (history.stream().filter(d -> "collection1".equals(d.getFirstValue("core"))).count() == 0) { fail("No 'solr.core' or MDC context in logs: " + history.toString()); } } diff --git a/solr/docker/tests/cases/test_log4j/log4j2.xml b/solr/docker/tests/cases/test_log4j/log4j2.xml index 9a629595b13..5e319cc12be 100644 --- a/solr/docker/tests/cases/test_log4j/log4j2.xml +++ b/solr/docker/tests/cases/test_log4j/log4j2.xml @@ -22,7 +22,7 @@ - %d{yyyy-MM-dd HH:mm:ss.SSS} %-5p (%t) [%X{collection} %X{shard} %X{replica} %X{core}] %c{1.} %m%n + %d{yyyy-MM-dd HH:mm:ss.SSS} %-5p (%t) [%notEmpty{c:%X{collection}}%notEmpty{ s:%X{shard}}%notEmpty{ r:%X{replica}}%notEmpty{ x:%X{core}}] %c{1.} %m%n @@ -33,7 +33,7 @@ filePattern="${sys:solr.log.dir}/solr.log.%i" > - %d{yyyy-MM-dd HH:mm:ss.SSS} %-5p (%t) [%X{collection} %X{shard} %X{replica} %X{core}] %c{1.} %m%n + %d{yyyy-MM-dd HH:mm:ss.SSS} %-5p (%t) [%notEmpty{c:%X{collection}}%notEmpty{ s:%X{shard}}%notEmpty{ r:%X{replica}}%notEmpty{ x:%X{core}}] %c{1.} %m%n @@ -61,7 +61,7 @@ filePattern="${sys:solr.log.dir}/solr_slow_requests.log.%i" > - %d{yyyy-MM-dd HH:mm:ss.SSS} %-5p (%t) [%X{collection} %X{shard} %X{replica} %X{core}] %c{1.} %m%n + %d{yyyy-MM-dd HH:mm:ss.SSS} %-5p (%t) [%notEmpty{c:%X{collection}}%notEmpty{ s:%X{shard}}%notEmpty{ r:%X{replica}}%notEmpty{ x:%X{core}}] %c{1.} %m%n diff --git a/solr/server/resources/log4j2.xml b/solr/server/resources/log4j2.xml index 0a545b2a68c..4ec2fa2ecfb 100644 --- a/solr/server/resources/log4j2.xml +++ b/solr/server/resources/log4j2.xml @@ -23,7 +23,7 @@ - %maxLen{%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p (%t) [%X{collection} %X{shard} %X{replica} %X{core}] %c{1.} %m%notEmpty{ =>%ex{short}}}{10240}%n + %maxLen{%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p (%t) [%notEmpty{c:%X{collection}}%notEmpty{ s:%X{shard}}%notEmpty{ r:%X{replica}}%notEmpty{ x:%X{core}}] %c{1.} %m%notEmpty{ =>%ex{short}}}{10240}%n @@ -34,7 +34,7 @@ filePattern="${sys:solr.log.dir}/solr.log.%i" > - %maxLen{%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p (%t) [%X{collection} %X{shard} %X{replica} %X{core}] %c{1.} %m%notEmpty{ =>%ex{short}}}{10240}%n + %maxLen{%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p (%t) [%notEmpty{c:%X{collection}}%notEmpty{ s:%X{shard}}%notEmpty{ r:%X{replica}}%notEmpty{ x:%X{core}}] %c{1.} %m%notEmpty{ =>%ex{short}}}{10240}%n @@ -50,7 +50,7 @@ filePattern="${sys:solr.log.dir}/solr_slow_requests.log.%i" > - %maxLen{%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p (%t) [%X{collection} %X{shard} %X{replica} %X{core}] %c{1.} %m%notEmpty{ =>%ex{short}}}{10240}%n + %maxLen{%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p (%t) [%notEmpty{c:%X{collection}}%notEmpty{ s:%X{shard}}%notEmpty{ r:%X{replica}}%notEmpty{ x:%X{core}}] %c{1.} %m%notEmpty{ =>%ex{short}}}{10240}%n diff --git a/solr/solr-ref-guide/src/major-changes-in-solr-9.adoc b/solr/solr-ref-guide/src/major-changes-in-solr-9.adoc index 360edc421b3..c2e6933513c 100644 --- a/solr/solr-ref-guide/src/major-changes-in-solr-9.adoc +++ b/solr/solr-ref-guide/src/major-changes-in-solr-9.adoc @@ -201,6 +201,13 @@ Users who upgrade from prior versions of Solr may find that some requests involv User's in this situation are advised to consider the complexity f their queries/configuration, and increase the value of `<>` if warranted. +=== Log4J configuration & Solr MDC values + +link:http://www.slf4j.org/apidocs/org/slf4j/MDC.html[MDC] values that Solr sets for use by Logging calls (such as the collection name, shard name, replica name, etc...) have been modified to now be "bare" values, with out the special single character prefixes that were included in past version. For example: In 8.x Log messages for a collection named "gettingstarted" would have an MDC value with a key `collection` mapped to a value of `c:gettingstarted`, in 9.x the value will simply be `gettingstarted`. + +Solr's default `log4j2.xml` configuration file has been modified to prepend these same prefixes to MDC values when included in Log messages as part of the ``. Users who have custom logging configurations that wish to ensure Solr 9.x logs are consistently formatted after upgrading will need to make similar changes to their logging configuration files. See link:https://issues.apache.org/jira/browse/SOLR-15630[SOLR-15630] for more details. + + === base_url removed from stored state If you're able to upgrade SolrJ to 8.8.x for all of your client applications, then you can set `-Dsolr.storeBaseUrl=false` (introduced in Solr 8.8.1) diff --git a/solr/solrj/src/test-files/log4j2.xml b/solr/solrj/src/test-files/log4j2.xml index 57956155e27..96f69f1dc8b 100644 --- a/solr/solrj/src/test-files/log4j2.xml +++ b/solr/solrj/src/test-files/log4j2.xml @@ -21,7 +21,7 @@ - %maxLen{%-4r %-5p (%t) [%X{node_name} %X{collection} %X{shard} %X{replica} %X{core} %X{trace_id}] %c{1.} %m%notEmpty{ + %maxLen{%-4r %-5p (%t) [%notEmpty{n:%X{node_name}}%notEmpty{ c:%X{collection}}%notEmpty{ s:%X{shard}}%notEmpty{ r:%X{replica}}%notEmpty{ x:%X{core}}%notEmpty{ t:%X{trace_id}}] %c{1.} %m%notEmpty{ =>%ex{short}}}{10240}%n From ba6a347bba107fe515493114d9ed6ae87d812cc4 Mon Sep 17 00:00:00 2001 From: Christine Poerschke Date: Mon, 20 Sep 2021 17:51:57 +0100 Subject: [PATCH 5/8] SOLR-15173: replace 'master' with 'main' in release wizard (#297) --- dev-tools/scripts/releaseWizard.py | 12 +++---- dev-tools/scripts/releaseWizard.yaml | 52 ++++++++++++++-------------- dev-tools/scripts/scriptutil.py | 2 +- 3 files changed, 33 insertions(+), 33 deletions(-) diff --git a/dev-tools/scripts/releaseWizard.py b/dev-tools/scripts/releaseWizard.py index 9d885fb9b0c..b10fada4ff5 100755 --- a/dev-tools/scripts/releaseWizard.py +++ b/dev-tools/scripts/releaseWizard.py @@ -116,7 +116,7 @@ def expand_jinja(text, vars=None): 'set_java_home': set_java_home, 'latest_version': state.get_latest_version(), 'latest_lts_version': state.get_latest_lts_version(), - 'master_version': state.get_master_version(), + 'main_version': state.get_main_version(), 'mirrored_versions': state.get_mirrored_versions(), 'mirrored_versions_to_delete': state.get_mirrored_versions_to_delete(), 'home': os.path.expanduser("~") @@ -365,7 +365,7 @@ def get_mirrored_versions_to_delete(self): raise Exception("Release version %s must have same major version as current minor or lts release") return [ver for ver in versions if ver not in to_keep] - def get_master_version(self): + def get_main_version(self): v = Version.parse(self.get_latest_version()) return "%s.%s.%s" % (v.major + 1, 0, 0) @@ -394,10 +394,10 @@ def validate_release_version(self, branch_type, branch, release_version): if not ver.is_minor_release(): sys.exit("You can only release minor releases from an existing stable branch") elif branch_type == BranchType.unstable: - if not branch == 'master': + if not branch == 'main': sys.exit("Incompatible branch and branch_type") if not ver.is_major_release(): - sys.exit("You can only release a new major version from master branch") + sys.exit("You can only release a new major version from main branch") if not getScriptVersion() == release_version: print("WARNING: Expected release version %s when on branch %s, but got %s" % ( getScriptVersion(), branch, release_version)) @@ -405,7 +405,7 @@ def validate_release_version(self, branch_type, branch, release_version): def get_base_branch_name(self): v = Version.parse(self.release_version) if v.is_major_release(): - return 'master' + return 'main' elif v.is_minor_release(): return self.get_stable_branch_name() elif v.major == Version.parse(self.get_latest_version()).major: @@ -573,7 +573,7 @@ def get_minor_branch_name(self): def get_stable_branch_name(self): if self.release_type == 'major': - v = Version.parse(self.get_master_version()) + v = Version.parse(self.get_main_version()) else: v = Version.parse(self.get_latest_version()) return "branch_%sx" % v.major diff --git a/dev-tools/scripts/releaseWizard.yaml b/dev-tools/scripts/releaseWizard.yaml index 71966fd005b..4b6d31bd9fa 100644 --- a/dev-tools/scripts/releaseWizard.yaml +++ b/dev-tools/scripts/releaseWizard.yaml @@ -410,7 +410,7 @@ groups: cmd: "{{ gradle_cmd }} clean check -x test" - !Todo id: create_stable_branch - title: Create a new stable branch, off from master + title: Create a new stable branch, off from main description: In our case we'll create {{ stable_branch }} types: - major @@ -420,7 +420,7 @@ groups: commands_text: Run these commands to create a stable branch commands: - !Command - cmd: git checkout master + cmd: git checkout main tee: true - !Command cmd: git pull --ff-only @@ -467,7 +467,7 @@ groups: tee: true - !Todo id: add_version_major - title: Add a new major version on master branch + title: Add a new major version on main branch types: - major depends: clean_git_checkout @@ -475,10 +475,10 @@ groups: next_version: "{{ release_version_major + 1 }}.0.0" commands: !Commands root_folder: '{{ git_checkout_folder }}' - commands_text: Run these commands to add the new major version {{ next_version }} to the master branch + commands_text: Run these commands to add the new major version {{ next_version }} to the main branch commands: - !Command - cmd: git checkout master + cmd: git checkout main tee: true - !Command cmd: python3 -u dev-tools/scripts/addVersion.py {{ next_version }} @@ -632,7 +632,7 @@ groups: Go to the JIRA "Manage Versions" Administration pages and add the new version: {% if release_type == 'major' -%} - . Change name of version `master ({{ release_version_major }}.0)` into `{{ release_version_major }}.0` + . Change name of version `main ({{ release_version_major }}.0)` into `{{ release_version_major }}.0` {%- endif %} . Create a new (unreleased) version `{{ get_next_version }}` @@ -972,7 +972,7 @@ groups: logfile: svn_rm_containing.log comment: Clean up containing folder on the staging repo tee: true - post_description: 'Note at this point you will see the Jenkins job "Lucene-Solr-SmokeRelease-master" begin to fail, until you run the "Generate Backcompat Indexes" ' + post_description: 'Note at this point you will see the Jenkins job "Lucene-Solr-SmokeRelease-main" begin to fail, until you run the "Generate Backcompat Indexes" ' - !Todo id: stage_maven title: Stage the maven artifacts for publishing @@ -1190,7 +1190,7 @@ groups: depends: - prepare_announce_solr description: | - Push the website changes to 'master' branch, and check the staging site. + Push the website changes to 'main' branch, and check the staging site. You will get a chance to preview the diff of all changes before you push. If you need to do changes, do the changes (e.g. by re-running previous step 'Update rest of webpage') and commit your changes. Then re-run this step and push when everything is OK. @@ -1201,7 +1201,7 @@ groups: You have to exit the editor after review to continue. commands: - !Command - cmd: git checkout master && git status + cmd: git checkout main && git status stdout: true - !Command cmd: git diff @@ -1240,7 +1240,7 @@ groups: cmd: git checkout production && git pull --ff-only stdout: true - !Command - cmd: git merge master + cmd: git merge main stdout: true - !Command cmd: git push origin @@ -1269,9 +1269,9 @@ groups: commands_text: Edit DOAP files commands: - !Command - cmd: git checkout master && git pull --ff-only + cmd: git checkout main && git pull --ff-only stdout: true - comment: Goto master branch + comment: Goto main branch - !Command cmd: "{{ editor }} dev-tools/doap/lucene.rdf" comment: Edit Lucene DOAP, add version {{ release_version }} @@ -1288,16 +1288,16 @@ groups: cmd: git push origin logfile: push.log stdout: true - comment: Push the master branch + comment: Push the main branch - !Command cmd: "git checkout {{ stable_branch }} && git pull --ff-only" stdout: true comment: Checkout the stable branch - !Command - cmd: "git cherry-pick master" + cmd: "git cherry-pick main" logfile: commit.log stdout: true - comment: Cherrypick the DOAP changes from master onto the stable branch. + comment: Cherrypick the DOAP changes from main onto the stable branch. - !Command cmd: git show HEAD stdout: true @@ -1407,24 +1407,24 @@ groups: commands: !Commands root_folder: '{{ git_checkout_folder }}' commands_text: | - Update versions on master and stable branch. + Update versions on main and stable branch. You may have to hand-edit some files before commit, so go slowly :) confirm_each_command: true commands: - !Command - cmd: git checkout master && git pull --ff-only && git clean -df && git checkout -- . - comment: Go to master branch - logfile: checkout-master.log + cmd: git checkout main && git pull --ff-only && git clean -df && git checkout -- . + comment: Go to main branch + logfile: checkout-main.log - !Command cmd: python3 -u dev-tools/scripts/addVersion.py {{ release_version }} - logfile: addversion-master.log + logfile: addversion-main.log - !Command cmd: git diff - logfile: diff-master.log + logfile: diff-main.log tee: true - !Command cmd: git add -u . && git commit -m "Add bugfix version {{ release_version }}" && git push - logfile: commit-master.log + logfile: commit-main.log - !Command cmd: git checkout {{ stable_branch }} && git pull --ff-only && git clean -df && git checkout -- . logfile: checkout-stable.log @@ -1465,9 +1465,9 @@ groups: tee: true comment: Find version regexes - !Command - cmd: git checkout master && git pull --ff-only && git clean -df && git checkout -- . - comment: Go to master branch - logfile: checkout-master.log + cmd: git checkout main && git pull --ff-only && git clean -df && git checkout -- . + comment: Go to main branch + logfile: checkout-main.log - !Command cmd: "{{ editor }} solr/CHANGES.txt" comment: Edit Solr CHANGES, do necessary changes @@ -1478,7 +1478,7 @@ groups: stdout: true - !Command cmd: git add -u . && git commit -m "Sync CHANGES for {{ release_version }}" && git push - logfile: commit-master.log + logfile: commit-main.log - !Command cmd: git checkout {{ stable_branch }} && git pull --ff-only && git clean -df && git checkout -- . comment: Go to stable branch diff --git a/dev-tools/scripts/scriptutil.py b/dev-tools/scripts/scriptutil.py index 04983c0ea98..97992e116f7 100644 --- a/dev-tools/scripts/scriptutil.py +++ b/dev-tools/scripts/scriptutil.py @@ -124,7 +124,7 @@ def find_branch_type(): else: raise Exception('git status missing branch name') - if branchName == b'master': + if branchName == b'main': return BranchType.unstable if re.match(r'branch_(\d+)x', branchName.decode('UTF-8')): return BranchType.stable From 2291d90f9c678cbfd5e8fc6bc72206e8215ac745 Mon Sep 17 00:00:00 2001 From: Mike Drob Date: Mon, 20 Sep 2021 12:32:15 -0500 Subject: [PATCH 6/8] LUCENE-10107 Read multi-line commit from Manifest (#2575) --- dev-tools/scripts/buildAndPushRelease.py | 6 +++++- dev-tools/scripts/smokeTestRelease.py | 12 ++++++++---- 2 files changed, 13 insertions(+), 5 deletions(-) diff --git a/dev-tools/scripts/buildAndPushRelease.py b/dev-tools/scripts/buildAndPushRelease.py index 21861cb5811..62010f06653 100755 --- a/dev-tools/scripts/buildAndPushRelease.py +++ b/dev-tools/scripts/buildAndPushRelease.py @@ -344,7 +344,11 @@ def main(): print('Next run the smoker tester:') p = re.compile(".*/") m = p.match(sys.argv[0]) - print('%s -u %ssmokeTestRelease.py %s' % (sys.executable, m.group(), url)) + if not c.sign: + signed = "--not-signed" + else: + signed = "" + print('%s -u %ssmokeTestRelease.py %s %s' % (sys.executable, m.group(), signed, url)) if __name__ == '__main__': try: diff --git a/dev-tools/scripts/smokeTestRelease.py b/dev-tools/scripts/smokeTestRelease.py index eca6e65d821..3db19ead344 100755 --- a/dev-tools/scripts/smokeTestRelease.py +++ b/dev-tools/scripts/smokeTestRelease.py @@ -169,11 +169,15 @@ def checkJARMetaData(desc, jarFile, gitRevision, version): if gitRevision != 'skip': # Make sure this matches the version and git revision we think we are releasing: - # TODO: LUCENE-7023: is it OK that Implementation-Version's value now spans two lines? - verifyRevision = 'Implementation-Version: %s %s' % (version, gitRevision) - if s.find(verifyRevision) == -1: - raise RuntimeError('%s is missing "%s" inside its META-INF/MANIFEST.MF (wrong git revision?)' % \ + match = re.search("Implementation-Version: (.+\r\n .+)", s, re.MULTILINE) + if match: + implLine = match.group(1).replace("\r\n ", "") + verifyRevision = '%s %s' % (version, gitRevision) + if implLine.find(verifyRevision) == -1: + raise RuntimeError('%s is missing "%s" inside its META-INF/MANIFEST.MF (wrong git revision?)' % \ (desc, verifyRevision)) + else: + raise RuntimeError('%s is missing Implementation-Version inside its META-INF/MANIFEST.MF' % desc) notice = decodeUTF8(z.read(NOTICE_FILE_NAME)) license = decodeUTF8(z.read(LICENSE_FILE_NAME)) From 223080d592b7685385b17d46553cf2d02bca3543 Mon Sep 17 00:00:00 2001 From: Mike Drob Date: Mon, 20 Sep 2021 13:45:20 -0500 Subject: [PATCH 7/8] Revert "LUCENE-10107 Read multi-line commit from Manifest (#2575)" This reverts commit 2291d90f9c678cbfd5e8fc6bc72206e8215ac745. --- dev-tools/scripts/buildAndPushRelease.py | 6 +----- dev-tools/scripts/smokeTestRelease.py | 12 ++++-------- 2 files changed, 5 insertions(+), 13 deletions(-) diff --git a/dev-tools/scripts/buildAndPushRelease.py b/dev-tools/scripts/buildAndPushRelease.py index 62010f06653..21861cb5811 100755 --- a/dev-tools/scripts/buildAndPushRelease.py +++ b/dev-tools/scripts/buildAndPushRelease.py @@ -344,11 +344,7 @@ def main(): print('Next run the smoker tester:') p = re.compile(".*/") m = p.match(sys.argv[0]) - if not c.sign: - signed = "--not-signed" - else: - signed = "" - print('%s -u %ssmokeTestRelease.py %s %s' % (sys.executable, m.group(), signed, url)) + print('%s -u %ssmokeTestRelease.py %s' % (sys.executable, m.group(), url)) if __name__ == '__main__': try: diff --git a/dev-tools/scripts/smokeTestRelease.py b/dev-tools/scripts/smokeTestRelease.py index 3db19ead344..eca6e65d821 100755 --- a/dev-tools/scripts/smokeTestRelease.py +++ b/dev-tools/scripts/smokeTestRelease.py @@ -169,15 +169,11 @@ def checkJARMetaData(desc, jarFile, gitRevision, version): if gitRevision != 'skip': # Make sure this matches the version and git revision we think we are releasing: - match = re.search("Implementation-Version: (.+\r\n .+)", s, re.MULTILINE) - if match: - implLine = match.group(1).replace("\r\n ", "") - verifyRevision = '%s %s' % (version, gitRevision) - if implLine.find(verifyRevision) == -1: - raise RuntimeError('%s is missing "%s" inside its META-INF/MANIFEST.MF (wrong git revision?)' % \ + # TODO: LUCENE-7023: is it OK that Implementation-Version's value now spans two lines? + verifyRevision = 'Implementation-Version: %s %s' % (version, gitRevision) + if s.find(verifyRevision) == -1: + raise RuntimeError('%s is missing "%s" inside its META-INF/MANIFEST.MF (wrong git revision?)' % \ (desc, verifyRevision)) - else: - raise RuntimeError('%s is missing Implementation-Version inside its META-INF/MANIFEST.MF' % desc) notice = decodeUTF8(z.read(NOTICE_FILE_NAME)) license = decodeUTF8(z.read(LICENSE_FILE_NAME)) From b50cdf2a28ced922b2fa01195bef8c75f9a0dda7 Mon Sep 17 00:00:00 2001 From: Mark Miller Date: Sat, 9 Oct 2021 23:24:35 -0500 Subject: [PATCH 8/8] SOLR-15560: Optimize JavaBinCodec and related Jetty end points. SOLR-12555 WIP WIP WIP WIP wip3 SOLR-15653: Fix collection creation race that assumes a local clusterstate when the collection has only just been created. --- solr/benchmark/log4j2-bench.xml | 10 +- .../solr/bench/generators/StringsDSL.java | 44 +- .../solr/bench/index/CloudIndexing.java | 21 +- .../solr/bench/javabin/JavaBinBasicPerf.java | 542 +++++ .../solr/bench/javabin/package-info.java | 19 + .../LegacyAbstractAnalyticsFacetTest.java | 2 + .../apache/solr/s3/AbstractS3ClientTest.java | 10 +- .../solr/s3/S3BackupRepositoryTest.java | 9 +- .../solr/s3/S3IncrementalBackupTest.java | 20 +- .../src/test/org/apache/solr/s3/S3Mock.java | 36 + .../apache/solr/s3/S3OutputStreamTest.java | 10 +- .../solrj/embedded/EmbeddedSolrServer.java | 132 +- .../solrj/embedded/JettySolrRunner.java | 13 +- .../solr/cloud/api/collections/Assign.java | 18 +- .../api/collections/CreateCollectionCmd.java | 4 +- .../component/ActiveTasksListComponent.java | 149 +- .../solr/handler/export/ExportWriter.java | 13 +- .../handler/export/ExportWriterStream.java | 41 +- .../solr/handler/loader/JavabinLoader.java | 178 +- .../solr/handler/loader/JsonLoader.java | 4 +- .../apache/solr/handler/loader/XMLLoader.java | 113 +- .../solr/response/BinaryResponseWriter.java | 101 +- .../transform/RawValueTransformerFactory.java | 85 +- .../response/transform/WriteableGeoJSON.java | 2 +- .../solr/update/HdfsTransactionLog.java | 262 +- .../apache/solr/update/TransactionLog.java | 368 +-- .../java/org/apache/solr/util/ExportTool.java | 352 +-- .../org/apache/solr/util/PackageTool.java | 9 +- .../apache/solr/util/RecordingJSONParser.java | 2 +- .../apache/solr/util/StartupLoggingUtils.java | 2 +- .../solr/cloud/TestRebalanceLeaders.java | 402 +++- .../solr/cloud/TestStressLiveNodes.java | 8 +- .../HdfsCloudIncrementalBackupTest.java | 202 +- .../TestHdfsCloudBackupRestore.java | 6 +- .../solr/cloud/hdfs/HdfsNNFailoverTest.java | 2 + .../solr/cloud/hdfs/HdfsRecoverLeaseTest.java | 144 +- .../apache/solr/cloud/hdfs/HdfsTestUtil.java | 13 +- .../solr/cloud/hdfs/HdfsThreadLeakTest.java | 9 +- .../core/CachingDirectoryFactoryTest.java | 91 +- .../solr/core/HdfsDirectoryFactoryTest.java | 134 +- .../HdfsBackupRepositoryIntegrationTest.java | 6 +- .../handler/TestHdfsBackupRestoreCore.java | 156 +- .../handler/admin/DaemonStreamApiTest.java | 9 + .../solr/store/hdfs/HdfsDirectoryTest.java | 12 +- solr/server/etc/jetty-http.xml | 2 + solr/server/etc/jetty-https.xml | 1 + solr/solrj/build.gradle | 4 +- .../apache/solr/client/solrj/SolrRequest.java | 14 +- .../solr/client/solrj/impl/AsyncTracker.java | 161 ++ .../solrj/impl/BinaryRequestWriter.java | 6 +- .../solrj/impl/BinaryResponseParser.java | 15 +- .../impl/ConcurrentUpdateHttp2SolrClient.java | 30 +- .../client/solrj/impl/Http2SolrClient.java | 580 +++-- .../impl/SolrOutputStreamContentProvider.java | 124 + .../impl/StreamingBinaryResponseParser.java | 176 +- .../io/stream/JavabinTupleStreamParser.java | 119 +- .../request/ContentStreamUpdateRequest.java | 5 +- .../request/JavaBinUpdateRequestCodec.java | 178 +- .../client/solrj/request/UpdateRequest.java | 57 +- .../solr/common/ConditionalKeyMapWriter.java | 3 +- .../solr/common/ConditionalMapWriter.java | 2 +- .../org/apache/solr/common/MapWriter.java | 130 +- .../apache/solr/common/SolrDocumentList.java | 8 + .../util/BufferedBytesOutputStream.java | 106 + .../util/ByteArrayUtf8CharSequence.java | 11 +- .../apache/solr/common/util/ByteUtils.java | 183 +- .../apache/solr/common/util/BytesBlock.java | 4 +- .../solr/common/util/BytesInputStream.java | 109 + .../solr/common/util/BytesOutputStream.java | 72 +- .../solr/common/util/ContentStreamBase.java | 187 +- .../solr/common/util/FastInputStream.java | 32 +- .../solr/common/util/FastJavaBinDecoder.java | 215 +- .../apache/solr/common/util/JavaBinCodec.java | 2105 +++++++++++------ .../solr/common/util/JavaBinCodecOld.java | 1258 ++++++++++ .../solr/common/util/JavaBinInputStream.java | 311 +++ .../solr/common/util/JavaBinOutputStream.java | 359 +++ .../apache/solr/common/util/NamedList.java | 526 ++-- .../solr/common/util/Utf8CharSequence.java | 9 +- .../org/apache/solr/common/util/Utils.java | 412 ++-- solr/solrj/src/java/org/noggit/CharArr.java | 114 +- .../solrj/request/TestUpdateRequestCodec.java | 173 +- .../cloud/TestDocCollectionWatcher.java | 4 +- .../solr/common/util/TestFastInputStream.java | 50 +- .../common/util/TestFastJavabinDecoder.java | 303 +-- .../solr/common/util/TestJavaBinCodec.java | 374 +-- .../common/util/Utf8CharSequenceTest.java | 2 +- .../apache/solr/SolrIgnoredThreadsFilter.java | 24 +- .../java/org/apache/solr/SolrTestCase.java | 250 +- .../java/org/apache/solr/SolrTestCaseJ4.java | 365 ++- .../AbstractIncrementalBackupTest.java | 12 +- .../solr/util/NotSecurePseudoRandom.java | 2 + .../solr/util/NotSecurePseudoRandomSpi.java | 0 .../util/SolrTestNonSecureRandomProvider.java | 25 + versions.props | 1 + 94 files changed, 8751 insertions(+), 4227 deletions(-) create mode 100755 solr/benchmark/src/java/org/apache/solr/bench/javabin/JavaBinBasicPerf.java create mode 100644 solr/benchmark/src/java/org/apache/solr/bench/javabin/package-info.java create mode 100644 solr/contrib/s3-repository/src/test/org/apache/solr/s3/S3Mock.java create mode 100644 solr/solrj/src/java/org/apache/solr/client/solrj/impl/AsyncTracker.java create mode 100644 solr/solrj/src/java/org/apache/solr/client/solrj/impl/SolrOutputStreamContentProvider.java create mode 100644 solr/solrj/src/java/org/apache/solr/common/util/BufferedBytesOutputStream.java create mode 100644 solr/solrj/src/java/org/apache/solr/common/util/BytesInputStream.java create mode 100644 solr/solrj/src/java/org/apache/solr/common/util/JavaBinCodecOld.java create mode 100644 solr/solrj/src/java/org/apache/solr/common/util/JavaBinInputStream.java create mode 100644 solr/solrj/src/java/org/apache/solr/common/util/JavaBinOutputStream.java create mode 100644 solr/test-framework/src/java/org/apache/solr/util/NotSecurePseudoRandom.java create mode 100644 solr/test-framework/src/java/org/apache/solr/util/NotSecurePseudoRandomSpi.java create mode 100644 solr/test-framework/src/java/org/apache/solr/util/SolrTestNonSecureRandomProvider.java diff --git a/solr/benchmark/log4j2-bench.xml b/solr/benchmark/log4j2-bench.xml index f0fd8766816..c422685c8c9 100644 --- a/solr/benchmark/log4j2-bench.xml +++ b/solr/benchmark/log4j2-bench.xml @@ -31,7 +31,7 @@ @@ -48,7 +48,7 @@ @@ -74,9 +74,6 @@ - - - @@ -92,6 +89,9 @@ + + + diff --git a/solr/benchmark/src/java/org/apache/solr/bench/generators/StringsDSL.java b/solr/benchmark/src/java/org/apache/solr/bench/generators/StringsDSL.java index 33d573c173e..fb32b21b2d1 100644 --- a/solr/benchmark/src/java/org/apache/solr/bench/generators/StringsDSL.java +++ b/solr/benchmark/src/java/org/apache/solr/bench/generators/StringsDSL.java @@ -19,13 +19,14 @@ import static org.apache.solr.bench.generators.SourceDSL.checkArguments; import static org.apache.solr.bench.generators.SourceDSL.integers; +import java.io.IOException; import java.io.InputStream; import java.nio.charset.StandardCharsets; import java.util.ArrayList; -import java.util.Collections; import java.util.List; +import java.util.ListIterator; import java.util.Objects; -import java.util.Random; +import java.util.RandomAccess; import java.util.Scanner; import java.util.SplittableRandom; import org.apache.solr.bench.BaseBenchState; @@ -51,14 +52,18 @@ public class StringsDSL { // english word list via https://github.com/dwyl/english-words words = new ArrayList<>(1000); - InputStream inputStream = StringsDSL.class.getClassLoader().getResourceAsStream("words.txt"); - try (Scanner scanner = - new Scanner(Objects.requireNonNull(inputStream), StandardCharsets.UTF_8.name())) { + + try (InputStream inputStream = + StringsDSL.class.getClassLoader().getResourceAsStream("words.txt"); + Scanner scanner = + new Scanner(Objects.requireNonNull(inputStream), StandardCharsets.UTF_8.name())) { while (scanner.hasNextLine()) { words.add(scanner.nextLine()); } + } catch (IOException e) { + throw new RuntimeException(e); } - Collections.shuffle(words, new Random(BaseBenchState.getRandomSeed())); + shuffle(words, new SplittableRandom(BaseBenchState.getRandomSeed())); WORD_SIZE = words.size(); } @@ -439,6 +444,33 @@ public String generate(RandomnessSource in) { } } + private static void shuffle(List list, SplittableRandom random) { + @SuppressWarnings("unchecked") // we won't put foreign objects in + final List objectList = (List) list; + + if (list instanceof RandomAccess) { + for (int i = objectList.size() - 1; i > 0; i--) { + int index = random.nextInt(i + 1); + objectList.set(index, objectList.set(i, objectList.get(index))); + } + } else { + Object[] array = objectList.toArray(); + for (int i = array.length - 1; i > 0; i--) { + int index = random.nextInt(i + 1); + Object temp = array[i]; + array[i] = array[index]; + array[index] = temp; + } + + int i = 0; + ListIterator it = objectList.listIterator(); + while (it.hasNext()) { + it.next(); + it.set(array[i++]); + } + } + } + private static final int[] blockStarts = { 0x0000, 0x0080, 0x0100, 0x0180, 0x0250, 0x02B0, 0x0300, 0x0370, 0x0400, 0x0500, 0x0530, 0x0590, 0x0600, 0x0700, 0x0750, 0x0780, 0x07C0, 0x0800, 0x0900, 0x0980, 0x0A00, 0x0A80, 0x0B00, 0x0B80, diff --git a/solr/benchmark/src/java/org/apache/solr/bench/index/CloudIndexing.java b/solr/benchmark/src/java/org/apache/solr/bench/index/CloudIndexing.java index 981ce0dc455..0e7127d6206 100755 --- a/solr/benchmark/src/java/org/apache/solr/bench/index/CloudIndexing.java +++ b/solr/benchmark/src/java/org/apache/solr/bench/index/CloudIndexing.java @@ -37,11 +37,9 @@ import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.Setup; import org.openjdk.jmh.annotations.State; -import org.openjdk.jmh.annotations.TearDown; import org.openjdk.jmh.annotations.Threads; import org.openjdk.jmh.annotations.Timeout; import org.openjdk.jmh.annotations.Warmup; -import org.openjdk.jmh.infra.BenchmarkParams; @BenchmarkMode(Mode.Throughput) @OutputTimeUnit(TimeUnit.SECONDS) @@ -70,12 +68,6 @@ public static class BenchState { @Param({"1", "3"}) int numReplicas; - @Param({"0", "15", "30", "70", "100", "500", "1000"}) - int useStringUtf8Over; - - @Param({"true", "false"}) - boolean directBuffer; - private final org.apache.solr.bench.Docs largeDocs; private Iterator largeDocIterator; @@ -101,7 +93,7 @@ public BenchState() { smallDocs = docs() .field("id", integers().incrementing()) - .field("text", strings().basicLatinAlphabet().multi(2).ofLengthBetween(20, 32)) + .field("text", strings().basicLatinAlphabet().multi(3).ofLengthBetween(20, 32)) .field("int1_i", integers().all()) .field("int2_i", integers().all()) .field("long1_l", longs().all()); @@ -129,21 +121,10 @@ public SolrInputDocument getSmallDoc() { @Setup(Level.Trial) public void doSetup(MiniClusterState.MiniClusterBenchState miniClusterState) throws Exception { - System.setProperty("useStringUtf8Over", Integer.toString(useStringUtf8Over)); - System.setProperty("httpClientDirectBuffer", Boolean.toString(directBuffer)); - System.setProperty("mergePolicyFactory", "org.apache.solr.index.NoMergePolicyFactory"); miniClusterState.startMiniCluster(nodeCount); miniClusterState.createCollection(COLLECTION, numShards, numReplicas); } - - @TearDown(Level.Trial) - public void doTearDown( - MiniClusterState.MiniClusterBenchState miniClusterState, BenchmarkParams benchmarkParams) - throws Exception { - - // miniClusterState.shutdownMiniCluster(benchmarkParams); - } } @Benchmark diff --git a/solr/benchmark/src/java/org/apache/solr/bench/javabin/JavaBinBasicPerf.java b/solr/benchmark/src/java/org/apache/solr/bench/javabin/JavaBinBasicPerf.java new file mode 100755 index 00000000000..feadd11bbca --- /dev/null +++ b/solr/benchmark/src/java/org/apache/solr/bench/javabin/JavaBinBasicPerf.java @@ -0,0 +1,542 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.bench.javabin; + +import static org.apache.solr.bench.Docs.docs; +import static org.apache.solr.bench.generators.SourceDSL.dates; +import static org.apache.solr.bench.generators.SourceDSL.doubles; +import static org.apache.solr.bench.generators.SourceDSL.floats; +import static org.apache.solr.bench.generators.SourceDSL.integers; +import static org.apache.solr.bench.generators.SourceDSL.longs; +import static org.apache.solr.bench.generators.SourceDSL.maps; +import static org.apache.solr.bench.generators.SourceDSL.strings; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Date; +import java.util.Iterator; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.NoSuchElementException; +import java.util.Queue; +import java.util.concurrent.ConcurrentLinkedQueue; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; +import org.apache.solr.bench.BaseBenchState; +import org.apache.solr.bench.Docs; +import org.apache.solr.bench.SolrGenerate; +import org.apache.solr.bench.SplittableRandomGenerator; +import org.apache.solr.bench.generators.LazyGen; +import org.apache.solr.bench.generators.NamedListGen; +import org.apache.solr.bench.generators.SolrGen; +import org.apache.solr.common.SolrDocument; +import org.apache.solr.common.SolrDocumentList; +import org.apache.solr.common.util.BytesInputStream; +import org.apache.solr.common.util.BytesOutputStream; +import org.apache.solr.common.util.JavaBinCodec; +import org.apache.solr.common.util.NamedList; +import org.apache.solr.common.util.SolrNamedThreadFactory; +import org.apache.solr.common.util.SuppressForbidden; +import org.eclipse.jetty.io.RuntimeIOException; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Level; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Param; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.Setup; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.Threads; +import org.openjdk.jmh.annotations.Timeout; +import org.openjdk.jmh.annotations.Warmup; +import org.quicktheories.api.Pair; +import org.quicktheories.core.Gen; +import org.quicktheories.impl.BenchmarkRandomSource; + +@BenchmarkMode(Mode.Throughput) +@OutputTimeUnit(TimeUnit.SECONDS) +@Threads(1) +@Warmup(time = 15, iterations = 10) +@Measurement(time = 30, iterations = 5) +@Fork(value = 1) +@Timeout(time = 60) +public class JavaBinBasicPerf { + + public static final int COUNT = 10; + + @State(Scope.Thread) + public static class ThreadState { + private final BytesOutputStream baos = new BytesOutputStream(1024 * 1024 * 24); + } + + @State(Scope.Benchmark) + public static class BenchState { + + @Param({"1.0"}) + public float scale; + + @Param({ + "default" + }) // nested, numeric, large_strings, very_large_text_and_strings, many_token_field, + // small_strings + public String content; + + private final Queue responseByteArrays = new ConcurrentLinkedQueue<>(); + private final Queue responses = new ConcurrentLinkedQueue<>(); + + private volatile Iterator responseiterator; + private volatile Iterator responseByteArrayIterator; + + @SuppressForbidden(reason = "NoMDCAwareNecessary") + @Setup(Level.Trial) + public void doSetup(BaseBenchState baseBenchState) throws Exception { + + BaseBenchState.log("scale=" + scale); + ExecutorService executorService = + Executors.newFixedThreadPool( + Runtime.getRuntime().availableProcessors(), + new SolrNamedThreadFactory("JavaBinPerf DataGen")); + + responseByteArrays.clear(); + responses.clear(); + + AtomicBoolean stop = new AtomicBoolean(false); + AtomicReference failed = new AtomicReference<>(); + for (int i = 0; i < 100 && !stop.get(); i++) { + int finalI = i; + executorService.submit( + () -> { + try { + Object response; + switch (content) { + case "default": + if (scale > 2 && finalI >= 50) { + stop.set(true); + } + + response = defaultContent(COUNT, scale); + break; + case "numeric": + response = numericsContent((int) (COUNT * scale)); + break; + case "large_strings": + if (scale > 2 && finalI >= 10) { + stop.set(true); + } + response = largeStringsContent(COUNT, scale); + break; + case "very_large_text_and_strings": + if (finalI >= 10) { + stop.set(true); + } + response = veryLargeTextAndStrings(COUNT, scale); + break; + case "many_token_field": + response = manyTokenFieldContent(COUNT, scale); + break; + case "small_strings": + response = smallStrings(COUNT, scale); + break; + case "nested": + response = nested(baseBenchState, scale); + break; + default: + BaseBenchState.log( + String.format(Locale.ENGLISH, "Unknown content type: %s", content)); + throw new IllegalArgumentException("Unknown content type: " + content); + } + + try (final JavaBinCodec jbc = new JavaBinCodec()) { + BytesOutputStream baos = new BytesOutputStream(1024 << 8); + jbc.marshal(response, baos, true); + responseByteArrays.add(baos.toBytes()); + responses.add(response); + } catch (IOException e) { + BaseBenchState.log("IOException " + e.getMessage()); + throw new RuntimeIOException(e); + } + } catch (Exception e) { + e.printStackTrace(); + failed.set(e); + executorService.shutdownNow(); + } + }); + } + + if (failed.get() != null) { + throw failed.get(); + } + + executorService.shutdown(); + boolean result = false; + while (!result) { + result = executorService.awaitTermination(600, TimeUnit.MINUTES); + } + + BaseBenchState.log( + "setup responses=" + + responses.size() + + " responseByteArrays=" + + responseByteArrays.size()); + + responseiterator = responses.iterator(); + responseByteArrayIterator = responseByteArrays.iterator(); + } + + public Object getResponse() { + if (!responseiterator.hasNext()) { + responseiterator = responses.iterator(); + } + while (true) { + try { + return responseiterator.next(); + } catch (NoSuchElementException e) { + responseiterator = responses.iterator(); + } + } + } + + public byte[] getResponseByteArray() { + Iterator rbai = responseByteArrayIterator; + if (!rbai.hasNext()) { + rbai = responseByteArrays.iterator(); + responseByteArrayIterator = rbai; + } + while (true) { + try { + byte[] array = rbai.next(); + if (array == null) { + throw new NoSuchElementException(); + } + return array; + } catch (NoSuchElementException e) { + rbai = responseByteArrays.iterator(); + responseByteArrayIterator = rbai; + } + } + } + + private Object nested(BaseBenchState baseBenchState, float scale) { + SplittableRandomGenerator random = + new SplittableRandomGenerator(BaseBenchState.getRandomSeed()); + + Gen> mapGen = + maps().of(getKey(), getValue(10)).ofSizeBetween((int) (20 * scale), (int) (30 * scale)); + + // BaseBenchState.log("map:" + map); + + return mapGen.generate(new BenchmarkRandomSource(random)); + } + + private static SolrGen getKey() { + return strings().betweenCodePoints('a', 'z' + 1).ofLengthBetween(1, 257); + } + + @SuppressWarnings({"unchecked", "rawtypes"}) + private static SolrGen getValue(int depth) { + if (depth == 0) { + return integers().from(1).upToAndIncluding(5000); + } + List values = new ArrayList(4); + values.add( + Pair.of( + 4, maps().of(getKey(), new LazyGen(() -> getValue(depth - 1))).ofSizeBetween(1, 25))); + values.add( + Pair.of( + 4, + new NamedListGen( + maps() + .of(getKey(), new LazyGen(() -> getValue(depth - 1))) + .ofSizeBetween(1, 35)))); + values.add(Pair.of(COUNT, integers().all())); + values.add(Pair.of(16, longs().all())); + values.add(Pair.of(8, doubles().all())); + values.add(Pair.of(5, floats().all())); + values.add(Pair.of(9, dates().all())); + return SolrGenerate.frequency(values); + } + + private static Object numericsContent(int count) { + List topLevel = new ArrayList<>(16); + for (int i = 0; i < count; i++) { + List types = new ArrayList<>(16); + + types.add((short) 2); + types.add((double) 3); + + types.add(-4); + types.add(4); + types.add(42); + + types.add((long) -56547532); + types.add((long) 578675675); + types.add((long) 500000); + topLevel.add(types); + } + + return topLevel; + } + + private static Object defaultContent(int count, float scale) { + NamedList response = new NamedList<>(); + + NamedList header = new NamedList<>(); + header.add("status", 0); + header.add("headerStuff", "values"); + response.add("header", header); + + Docs docs = + docs() + .field("id", integers().incrementing()) + .field( + "facet_s", + strings() + .basicLatinAlphabet() + .maxCardinality(5) + .ofLengthBetween(50, (int) (64 * scale))) + .field( + "facet2_s", + strings().basicLatinAlphabet().maxCardinality(100).ofLengthBetween(12, 16)) + .field( + "facet3_s", + strings().basicLatinAlphabet().maxCardinality(1200).ofLengthBetween(110, 128)) + .field( + "text", + strings() + .basicLatinAlphabet() + .multi((int) (50 * scale)) + .ofLengthBetween(10, (int) (100 * scale))) + .field( + "text2_s", + strings() + .basicLatinAlphabet() + .multi((int) (150 * scale)) + .ofLengthBetween(6, (int) (25 * scale))) + .field( + "text3_t", + strings() + .basicLatinAlphabet() + .multi((int) (1000 * scale)) + .ofLengthBetween(4, (int) (COUNT * scale))) + .field("int_i", integers().all()) + .field("long1_l", longs().all()) + .field("long2_l", longs().all()) + .field("long3_l", longs().all()) + .field("int2_i", integers().allWithMaxCardinality(500)); + + SolrDocumentList docList = new SolrDocumentList(); + for (int i = 0; i < count; i++) { + SolrDocument doc = docs.document(); + docList.add(doc); + } + docList.setNumFound((long) scale); + docList.setMaxScore(1.0f); + docList.setStart(0); + + response.add("docs", docList); + + response.add("int", 42); + response.add("long", 5000_023L); + response.add("date", new Date()); + + return response; + } + } + + @Benchmark + @Timeout(time = 300) + public Object encode(BenchState state, ThreadState threadState) throws Exception { + try (final JavaBinCodec jbc = new JavaBinCodec()) { + jbc.marshal(state.getResponse(), threadState.baos, false); + return threadState.baos; + } finally { + threadState.baos.reset(); + } + } + + @Benchmark + @Timeout(time = 300) + public Object decode(BenchState state) throws Exception { + try (JavaBinCodec jbc = new JavaBinCodec()) { + return jbc.unmarshal(new BytesInputStream(state.getResponseByteArray())); + } + } + + private static Object largeStringsContent(int count, float scale) { + Docs docs = + docs() + .field( + "string_s", + strings().basicLatinAlphabet().ofLengthBetween(2000, (int) (2800 * scale))); + + SolrDocumentList docList = new SolrDocumentList(); + for (int i = 0; i < count * scale; i++) { + SolrDocument doc = docs.document(); + docList.add(doc); + } + docList.setNumFound((long) (count * scale)); + docList.setMaxScore(1.0f); + docList.setStart(0); + + return docList; + } + + private static Object manyTokenFieldContent(int count, float scale) { + Docs docs = + docs() + .field( + "string_s", + strings() + .basicLatinAlphabet() + .multi(Math.round(1500 * scale)) + .ofLengthBetween(50, 100)); + SolrDocumentList docList = new SolrDocumentList(); + for (int i = 0; i < count; i++) { + SolrDocument doc = docs.document(); + docList.add(doc); + } + docList.setNumFound(count); + docList.setMaxScore(1.0f); + docList.setStart(0); + + return docList; + } + + private static Object smallStrings(int count, float scale) { + NamedList response = new NamedList<>(); + + NamedList header = new NamedList<>(); + header.add("status", 0); + header.add("headerStuff", "values"); + response.add("header", header); + + Docs docs = + docs() + .field("id", integers().incrementing()) + .field( + "facet_s", + strings() + .basicLatinAlphabet() + .maxCardinality(5) + .ofLengthBetween(10, (int) (25 * scale))) + .field( + "facet2_s", + strings().basicLatinAlphabet().maxCardinality(100).ofLengthBetween(6, 12)) + .field( + "facet3_s", + strings().basicLatinAlphabet().maxCardinality(1200).ofLengthBetween(15, 35)) + .field( + "text", + strings() + .basicLatinAlphabet() + .multi((int) (80 * scale)) + .ofLengthBetween(100, (int) (200 * scale))) + .field( + "text2_s", + strings() + .basicLatinAlphabet() + .multi((int) (800 * scale)) + .ofLengthBetween(50, (int) (150 * scale))); + + SolrDocumentList docList = new SolrDocumentList(); + for (int i = 0; i < count; i++) { + SolrDocument doc = docs.document(); + docList.add(doc); + } + docList.setNumFound((long) scale); + docList.setMaxScore(1.0f); + docList.setStart(0); + + response.add("docs", docList); + + return response; + } + + private static Object veryLargeTextAndStrings(int count, float scale) { + // BaseBenchState.log("count=" + count + ' ' + "scale=" + scale + ' ' + "count * scale=" + count + // * scale); + NamedList response = new NamedList<>(); + + NamedList header = new NamedList<>(); + header.add("status", 0); + header.add("headerStuff", "values"); + response.add("header", header); + + Docs docs = + docs() + .field("id", integers().incrementing()) + .field( + "facet_s", + strings() + .basicLatinAlphabet() + .maxCardinality(5) + .ofLengthBetween(50, (int) (64 * scale))) + .field( + "facet2_s", + strings().basicLatinAlphabet().maxCardinality(100).ofLengthBetween(12, 16)) + .field( + "facet3_s", + strings().basicLatinAlphabet().maxCardinality(1200).ofLengthBetween(110, 128)) + .field( + "text", + strings() + .basicLatinAlphabet() + .multi((int) (800 * scale)) + .ofLengthBetween(400, (int) (500 * scale))) + .field( + "text2_s", + strings() + .basicLatinAlphabet() + .multi((int) (800 * scale)) + .ofLengthBetween(1000, (int) (1500 * scale))) + .field( + "text3_t", + strings() + .basicLatinAlphabet() + .multi((int) (800 * scale)) + .ofLengthBetween(1500, (int) (2000 * scale))) + .field("int_i", integers().all()) + .field("long1_l", longs().all()) + .field("long2_l", longs().all()) + .field("long3_l", longs().all()) + .field("int2_i", integers().allWithMaxCardinality(500)); + + SolrDocumentList docList = new SolrDocumentList(); + for (int i = 0; i < count; i++) { + SolrDocument doc = docs.document(); + docList.add(doc); + } + docList.setNumFound(count); + docList.setMaxScore(1.0f); + docList.setStart(0); + + response.add("docs", docList); + + response.add("int", 42); + response.add("long", 5000_023L); + response.add("date", new Date()); + + return response; + } +} diff --git a/solr/benchmark/src/java/org/apache/solr/bench/javabin/package-info.java b/solr/benchmark/src/java/org/apache/solr/bench/javabin/package-info.java new file mode 100644 index 00000000000..acedda15fd5 --- /dev/null +++ b/solr/benchmark/src/java/org/apache/solr/bench/javabin/package-info.java @@ -0,0 +1,19 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** Solr JMH benchmarks focused on JavaBinCodec */ +package org.apache.solr.bench.javabin; diff --git a/solr/contrib/analytics/src/test/org/apache/solr/analytics/legacy/facet/LegacyAbstractAnalyticsFacetTest.java b/solr/contrib/analytics/src/test/org/apache/solr/analytics/legacy/facet/LegacyAbstractAnalyticsFacetTest.java index 94f679d1ba1..b82872ee570 100644 --- a/solr/contrib/analytics/src/test/org/apache/solr/analytics/legacy/facet/LegacyAbstractAnalyticsFacetTest.java +++ b/solr/contrib/analytics/src/test/org/apache/solr/analytics/legacy/facet/LegacyAbstractAnalyticsFacetTest.java @@ -16,6 +16,7 @@ */ package org.apache.solr.analytics.legacy.facet; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering; import java.io.ByteArrayInputStream; import java.io.FileNotFoundException; import java.io.IOException; @@ -53,6 +54,7 @@ import javax.xml.xpath.XPathExpressionException; import javax.xml.xpath.XPathFactory; +@ThreadLeakLingering(linger = 5000) public class LegacyAbstractAnalyticsFacetTest extends SolrTestCaseJ4 { protected static final HashMap defaults = new HashMap<>(); diff --git a/solr/contrib/s3-repository/src/test/org/apache/solr/s3/AbstractS3ClientTest.java b/solr/contrib/s3-repository/src/test/org/apache/solr/s3/AbstractS3ClientTest.java index 70a63444798..4c033daa8c9 100644 --- a/solr/contrib/s3-repository/src/test/org/apache/solr/s3/AbstractS3ClientTest.java +++ b/solr/contrib/s3-repository/src/test/org/apache/solr/s3/AbstractS3ClientTest.java @@ -23,15 +23,16 @@ import org.apache.commons.io.IOUtils; import org.apache.solr.SolrTestCaseJ4; import org.junit.After; +import org.junit.AfterClass; import org.junit.Before; -import org.junit.ClassRule; +import org.junit.Rule; /** Abstract class for test with S3Mock. */ public class AbstractS3ClientTest extends SolrTestCaseJ4 { private static final String BUCKET_NAME = "test-bucket"; - @ClassRule + @Rule public static final S3MockRule S3_MOCK_RULE = S3MockRule.builder().silent().withInitialBuckets(BUCKET_NAME).build(); @@ -52,6 +53,11 @@ public void tearDownClient() { client.close(); } + @AfterClass + public static void afterS3OutputStreamTest() { + interruptThreadsOnTearDown(); // not closed properly + } + /** * Helper method to push a string to S3. * diff --git a/solr/contrib/s3-repository/src/test/org/apache/solr/s3/S3BackupRepositoryTest.java b/solr/contrib/s3-repository/src/test/org/apache/solr/s3/S3BackupRepositoryTest.java index 2a5828c1c6a..73af8d50ae0 100644 --- a/solr/contrib/s3-repository/src/test/org/apache/solr/s3/S3BackupRepositoryTest.java +++ b/solr/contrib/s3-repository/src/test/org/apache/solr/s3/S3BackupRepositoryTest.java @@ -39,7 +39,7 @@ import org.apache.solr.cloud.api.collections.AbstractBackupRepositoryTest; import org.apache.solr.common.util.NamedList; import org.apache.solr.core.backup.repository.BackupRepository; -import org.junit.ClassRule; +import org.junit.AfterClass; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; @@ -53,10 +53,15 @@ public class S3BackupRepositoryTest extends AbstractBackupRepositoryTest { @Rule public TemporaryFolder temporaryFolder = new TemporaryFolder(); - @ClassRule + @Rule public static final S3MockRule S3_MOCK_RULE = S3MockRule.builder().silent().withInitialBuckets(BUCKET_NAME).build(); + @AfterClass + public static void afterS3OutputStreamTest() { + interruptThreadsOnTearDown(); // not closed properly + } + /** * Sent by {@link org.apache.solr.handler.ReplicationHandler}, ensure we don't choke on the bare * URI. diff --git a/solr/contrib/s3-repository/src/test/org/apache/solr/s3/S3IncrementalBackupTest.java b/solr/contrib/s3-repository/src/test/org/apache/solr/s3/S3IncrementalBackupTest.java index 60ee8a84325..1b5ce3aedda 100644 --- a/solr/contrib/s3-repository/src/test/org/apache/solr/s3/S3IncrementalBackupTest.java +++ b/solr/contrib/s3-repository/src/test/org/apache/solr/s3/S3IncrementalBackupTest.java @@ -17,12 +17,11 @@ package org.apache.solr.s3; -import com.adobe.testing.s3mock.junit4.S3MockRule; import java.lang.invoke.MethodHandles; import org.apache.lucene.util.LuceneTestCase; import org.apache.solr.cloud.api.collections.AbstractIncrementalBackupTest; +import org.junit.AfterClass; import org.junit.BeforeClass; -import org.junit.ClassRule; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import software.amazon.awssdk.regions.Region; @@ -35,9 +34,8 @@ public class S3IncrementalBackupTest extends AbstractIncrementalBackupTest { private static final String BUCKET_NAME = S3IncrementalBackupTest.class.getSimpleName(); - @ClassRule - public static final S3MockRule S3_MOCK_RULE = - S3MockRule.builder().silent().withInitialBuckets(BUCKET_NAME).build(); + public static final S3Mock S3_MOCK = + S3Mock.builder().silent().withInitialBuckets(BUCKET_NAME).build(); public static final String SOLR_XML = "\n" @@ -84,20 +82,28 @@ public static void ensureCompatibleLocale() { } @BeforeClass - public static void setupClass() throws Exception { + public static void beforeS3IncrementalBackupTest() throws Exception { System.setProperty("aws.accessKeyId", "foo"); System.setProperty("aws.secretAccessKey", "bar"); + S3_MOCK.start(); + configureCluster(NUM_SHARDS) // nodes .addConfig("conf1", getFile("conf/solrconfig.xml").getParentFile().toPath()) .withSolrXml( SOLR_XML .replace("BUCKET", BUCKET_NAME) .replace("REGION", Region.US_EAST_1.id()) - .replace("ENDPOINT", "http://localhost:" + S3_MOCK_RULE.getHttpPort())) + .replace("ENDPOINT", "http://localhost:" + S3_MOCK.getHttpPort())) .configure(); } + @AfterClass + public static void afterS3IncrementalBackupTest() throws Exception { + cluster.shutdown(); + S3_MOCK.stop(); + } + @Override public String getCollectionNamePrefix() { return "backuprestore"; diff --git a/solr/contrib/s3-repository/src/test/org/apache/solr/s3/S3Mock.java b/solr/contrib/s3-repository/src/test/org/apache/solr/s3/S3Mock.java new file mode 100644 index 00000000000..bc6437089e0 --- /dev/null +++ b/solr/contrib/s3-repository/src/test/org/apache/solr/s3/S3Mock.java @@ -0,0 +1,36 @@ +package org.apache.solr.s3; + +import com.adobe.testing.s3mock.testsupport.common.S3MockStarter; +import java.util.Map; + +public class S3Mock extends S3MockStarter { + + /** Creates an instance with the default configuration. */ + public S3Mock() { + super(null); + } + + public static Builder builder() { + return new Builder(); + } + + private S3Mock(final Map properties) { + super(properties); + } + + public void start() { + super.start(); + } + + public void stop() { + super.stop(); + } + + public static class Builder extends S3MockStarter.BaseBuilder { + + @Override + public S3Mock build() { + return new S3Mock(arguments); + } + } +} diff --git a/solr/contrib/s3-repository/src/test/org/apache/solr/s3/S3OutputStreamTest.java b/solr/contrib/s3-repository/src/test/org/apache/solr/s3/S3OutputStreamTest.java index 3fcd5a3d781..8bcfa78ec78 100644 --- a/solr/contrib/s3-repository/src/test/org/apache/solr/s3/S3OutputStreamTest.java +++ b/solr/contrib/s3-repository/src/test/org/apache/solr/s3/S3OutputStreamTest.java @@ -24,8 +24,9 @@ import org.apache.commons.lang3.RandomStringUtils; import org.apache.solr.SolrTestCaseJ4; import org.junit.After; +import org.junit.AfterClass; import org.junit.Before; -import org.junit.ClassRule; +import org.junit.Rule; import org.junit.Test; import software.amazon.awssdk.services.s3.S3Client; @@ -33,7 +34,7 @@ public class S3OutputStreamTest extends SolrTestCaseJ4 { private static final String BUCKET = S3OutputStreamTest.class.getSimpleName(); - @ClassRule + @Rule public static final S3MockRule S3_MOCK_RULE = S3MockRule.builder().silent().withInitialBuckets(BUCKET).build(); @@ -49,6 +50,11 @@ public void tearDownClient() { s3.close(); } + @AfterClass + public static void afterS3OutputStreamTest() { + interruptThreadsOnTearDown(); // not closed properly + } + /** * Basic check writing content byte-by-byte. They should be kept in the internal buffer and * flushed to S3 only once. diff --git a/solr/core/src/java/org/apache/solr/client/solrj/embedded/EmbeddedSolrServer.java b/solr/core/src/java/org/apache/solr/client/solrj/embedded/EmbeddedSolrServer.java index 1066c3ef7e7..7a8374a4d52 100644 --- a/solr/core/src/java/org/apache/solr/client/solrj/embedded/EmbeddedSolrServer.java +++ b/solr/core/src/java/org/apache/solr/client/solrj/embedded/EmbeddedSolrServer.java @@ -28,8 +28,6 @@ import java.util.Properties; import java.util.Set; import java.util.function.Supplier; - -import org.apache.commons.io.output.ByteArrayOutputStream; import org.apache.lucene.search.TotalHits.Relation; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrRequest; @@ -45,6 +43,7 @@ import org.apache.solr.common.params.CommonParams; import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.params.SolrParams; +import org.apache.solr.common.util.BytesOutputStream; import org.apache.solr.common.util.ContentStream; import org.apache.solr.common.util.ContentStreamBase; import org.apache.solr.common.util.JavaBinCodec; @@ -74,7 +73,8 @@ public class EmbeddedSolrServer extends SolrClient { private boolean containerIsLocal = false; public enum RequestWriterSupplier { - JavaBin(() -> new BinaryRequestWriter()), XML(() -> new RequestWriter()); + JavaBin(() -> new BinaryRequestWriter()), + XML(() -> new RequestWriter()); private Supplier supplier; @@ -90,7 +90,7 @@ public RequestWriter newRequestWriter() { /** * Create an EmbeddedSolrServer using a given solr home directory * - * @param solrHome the solr home directory + * @param solrHome the solr home directory * @param defaultCoreName the core to route requests to by default (optional) */ public EmbeddedSolrServer(Path solrHome, String defaultCoreName) { @@ -101,7 +101,7 @@ public EmbeddedSolrServer(Path solrHome, String defaultCoreName) { /** * Create an EmbeddedSolrServer using a NodeConfig * - * @param nodeConfig the configuration + * @param nodeConfig the configuration * @param defaultCoreName the core to route requests to by default (optional) */ public EmbeddedSolrServer(NodeConfig nodeConfig, String defaultCoreName) { @@ -114,9 +114,7 @@ private static CoreContainer load(CoreContainer cc) { return cc; } - /** - * Create an EmbeddedSolrServer wrapping a particular SolrCore - */ + /** Create an EmbeddedSolrServer wrapping a particular SolrCore */ public EmbeddedSolrServer(SolrCore core) { this(core.getCoreContainer(), core.getName()); } @@ -125,7 +123,7 @@ public EmbeddedSolrServer(SolrCore core) { * Create an EmbeddedSolrServer wrapping a CoreContainer. * * @param coreContainer the core container - * @param coreName the core to route requests to by default (optional) + * @param coreName the core to route requests to by default (optional) */ public EmbeddedSolrServer(CoreContainer coreContainer, String coreName) { this(coreContainer, coreName, RequestWriterSupplier.JavaBin); @@ -134,15 +132,12 @@ public EmbeddedSolrServer(CoreContainer coreContainer, String coreName) { /** * Create an EmbeddedSolrServer wrapping a CoreContainer. * - * @param coreContainer - * the core container - * @param coreName - * the core to route requests to by default - * @param supplier - * the supplier used to create a {@link RequestWriter} + * @param coreContainer the core container + * @param coreName the core to route requests to by default + * @param supplier the supplier used to create a {@link RequestWriter} */ - public EmbeddedSolrServer(CoreContainer coreContainer, String coreName, - RequestWriterSupplier supplier) { + public EmbeddedSolrServer( + CoreContainer coreContainer, String coreName, RequestWriterSupplier supplier) { if (coreContainer == null) { throw new NullPointerException("CoreContainer instance required"); } @@ -156,7 +151,8 @@ public EmbeddedSolrServer(CoreContainer coreContainer, String coreName, // It *should* be able to convert the response directly into a named list. @Override - public NamedList request(SolrRequest request, String coreName) throws SolrServerException, IOException { + public NamedList request(SolrRequest request, String coreName) + throws SolrServerException, IOException { String path = request.getPath(); if (path == null || !path.startsWith("/")) { @@ -166,7 +162,8 @@ public NamedList request(SolrRequest request, String coreName) throws SolrRequestHandler handler = coreContainer.getRequestHandler(path); if (handler != null) { try { - SolrQueryRequest req = _parser.buildRequestFrom(null, request.getParams(), getContentStreams(request)); + SolrQueryRequest req = + _parser.buildRequestFrom(null, request.getParams(), getContentStreams(request)); req.getContext().put("httpMethod", request.getMethod().name()); req.getContext().put(PATH, path); SolrQueryResponse resp = new SolrQueryResponse(); @@ -183,7 +180,8 @@ public NamedList request(SolrRequest request, String coreName) throws if (coreName == null) { coreName = this.coreName; if (coreName == null) { - throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, + throw new SolrException( + SolrException.ErrorCode.BAD_REQUEST, "No core specified on request and no default core has been set."); } } @@ -247,13 +245,15 @@ public void writeResults(ResultContext ctx, JavaBinCodec codec) throws IOExcepti } }; + try (BytesOutputStream out = new BytesOutputStream(128)) { + createJavaBinCodec(callback, resolver) + .setWritableDocFields(resolver) + .marshal(rsp.getValues(), out, true); - try (ByteArrayOutputStream out = new ByteArrayOutputStream()) { - createJavaBinCodec(callback, resolver).setWritableDocFields(resolver).marshal(rsp.getValues(), out); - - try (InputStream in = out.toInputStream()) { + try (InputStream in = out.inputStream()) { @SuppressWarnings({"unchecked"}) - NamedList resolved = (NamedList) new JavaBinCodec(resolver).unmarshal(in); + NamedList resolved = + (NamedList) new JavaBinCodec(resolver).unmarshal(in); return resolved; } } @@ -300,46 +300,27 @@ private Set getContentStreams(SolrRequest request) throws IOEx final byte[] buf = baos.toByteArray(); if (buf.length > 0) { - return Collections.singleton(new ContentStreamBase() { + return Collections.singleton( + new ContentStreamBase() { - @Override - public InputStream getStream() throws IOException { - return new ByteArrayInputStream(buf); - } + @Override + public InputStream getStream() throws IOException { + return new ByteArrayInputStream(buf); + } - @Override - public String getContentType() { - return cType; - } - }); + @Override + public String getContentType() { + return cType; + } + }); } return null; } - private JavaBinCodec createJavaBinCodec(final StreamingResponseCallback callback, final BinaryResponseWriter.Resolver resolver) { - return new JavaBinCodec(resolver) { - - @Override - public void writeSolrDocument(SolrDocument doc) { - callback.streamSolrDocument(doc); - //super.writeSolrDocument( doc, fields ); - } - - @Override - public void writeSolrDocumentList(SolrDocumentList docs) throws IOException { - if (docs.size() > 0) { - SolrDocumentList tmp = new SolrDocumentList(); - tmp.setMaxScore(docs.getMaxScore()); - tmp.setNumFound(docs.getNumFound()); - tmp.setStart(docs.getStart()); - docs = tmp; - } - callback.streamDocListInfo(docs.getNumFound(), docs.getStart(), docs.getMaxScore()); - super.writeSolrDocumentList(docs); - } - - }; + private static JavaBinCodec createJavaBinCodec( + final StreamingResponseCallback callback, final BinaryResponseWriter.Resolver resolver) { + return new EmbeddedJavaBinCodec(resolver, callback); } private static void checkForExceptions(SolrQueryResponse rsp) throws Exception { @@ -349,12 +330,9 @@ private static void checkForExceptions(SolrQueryResponse rsp) throws Exception { } throw new SolrServerException(rsp.getException()); } - } - /** - * Closes any resources created by this instance - */ + /** Closes any resources created by this instance */ @Override public void close() throws IOException { if (containerIsLocal) { @@ -370,4 +348,34 @@ public void close() throws IOException { public CoreContainer getCoreContainer() { return coreContainer; } + + private static class EmbeddedJavaBinCodec extends JavaBinCodec { + + private final StreamingResponseCallback callback; + + public EmbeddedJavaBinCodec( + BinaryResponseWriter.Resolver resolver, StreamingResponseCallback callback) { + super(resolver); + this.callback = callback; + } + + @Override + public void writeSolrDocument(SolrDocument doc) { + callback.streamSolrDocument(doc); + // super.writeSolrDocument( doc, fields ); + } + + @Override + public void writeSolrDocumentList(SolrDocumentList docs) throws IOException { + if (docs.size() > 0) { + SolrDocumentList tmp = new SolrDocumentList(); + tmp.setMaxScore(docs.getMaxScore()); + tmp.setNumFound(docs.getNumFound()); + tmp.setStart(docs.getStart()); + docs = tmp; + } + callback.streamDocListInfo(docs.getNumFound(), docs.getStart(), docs.getMaxScore()); + super.writeSolrDocumentList(docs); + } + } } diff --git a/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java b/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java index 5f481c48e92..0a752419e79 100644 --- a/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java +++ b/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java @@ -270,7 +270,7 @@ public JettySolrRunner(String solrHome, Properties nodeProperties, JettyConfig c } catch (Exception e) { throw new RuntimeException(e); } - setProxyPort(proxy.getListenPort()); + proxyPort = proxy.getListenPort(); } this.init(this.config.port); @@ -299,6 +299,7 @@ private void init(int port) { final SslContextFactory.Server sslcontext = SSLConfig.createContextFactory(config.sslConfig); HttpConfiguration configuration = new HttpConfiguration(); + configuration.setOutputBufferSize(32 * 1024); // jetty 10/11 default ServerConnector connector; if (sslcontext != null) { configuration.setSecureScheme("https"); @@ -318,6 +319,8 @@ private void init(int port) { connector.setDefaultProtocol(sslConnectionFactory.getProtocol()); HTTP2ServerConnectionFactory http2ConnectionFactory = new HTTP2ServerConnectionFactory(configuration); + http2ConnectionFactory.setInputBufferSize(16384 + 9); // Jetty 10/11 default - max frame len + head len + http2ConnectionFactory.setMaxConcurrentStreams(512); ALPNServerConnectionFactory alpn = new ALPNServerConnectionFactory( http2ConnectionFactory.getProtocol(), @@ -340,11 +343,17 @@ private void init(int port) { connector.setHost("127.0.0.1"); connector.setIdleTimeout(THREAD_POOL_MAX_IDLE_TIME_MS); + server.setConnectors(new Connector[] {connector}); server.setSessionIdManager(new DefaultSessionIdManager(server, new Random())); } else { HttpConfiguration configuration = new HttpConfiguration(); - ServerConnector connector = new ServerConnector(server, new HttpConnectionFactory(configuration), new HTTP2CServerConnectionFactory(configuration)); + configuration.setOutputBufferSize(32 * 1024); // jetty 10/11 default + HTTP2CServerConnectionFactory http2ConnectionFactory = new HTTP2CServerConnectionFactory(configuration); + ServerConnector connector = new ServerConnector(server, new HttpConnectionFactory(configuration), http2ConnectionFactory); + http2ConnectionFactory.setInputBufferSize(16384 + 9); // Jetty 10/11 default - max frame len + head len + http2ConnectionFactory.setMaxConcurrentStreams(512); + connector.setReuseAddress(true); connector.setPort(port); connector.setHost("127.0.0.1"); diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/Assign.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/Assign.java index 87d1dc1350d..25c612d3ea3 100644 --- a/solr/core/src/java/org/apache/solr/cloud/api/collections/Assign.java +++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/Assign.java @@ -180,7 +180,7 @@ private static int defaultCounterValue(DocCollection collection, boolean newColl if (newCollection) return 0; int defaultValue; - if (collection.getSlice(shard) != null && collection.getSlice(shard).getReplicas().isEmpty()) { + if (collection == null || collection.getSlice(shard) != null && collection.getSlice(shard).getReplicas().isEmpty()) { return 0; } else { defaultValue = collection.getReplicas().size() * 2; @@ -201,20 +201,20 @@ private static int defaultCounterValue(DocCollection collection, boolean newColl return defaultValue; } - public static String buildSolrCoreName(DistribStateManager stateManager, DocCollection collection, String shard, Replica.Type type, boolean newCollection) { - Slice slice = collection.getSlice(shard); + public static String buildSolrCoreName(DistribStateManager stateManager, String collectionName, DocCollection collection, String shard, Replica.Type type, boolean newCollection) { + int defaultValue = defaultCounterValue(collection, newCollection, shard); - int replicaNum = incAndGetId(stateManager, collection.getName(), defaultValue); - String coreName = buildSolrCoreName(collection.getName(), shard, type, replicaNum); - while (existCoreName(coreName, slice)) { - replicaNum = incAndGetId(stateManager, collection.getName(), defaultValue); - coreName = buildSolrCoreName(collection.getName(), shard, type, replicaNum); + int replicaNum = incAndGetId(stateManager, collectionName, defaultValue); + String coreName = buildSolrCoreName(collectionName, shard, type, replicaNum); + while (collection != null && existCoreName(coreName, collection.getSlice(shard))) { + replicaNum = incAndGetId(stateManager, collectionName, defaultValue); + coreName = buildSolrCoreName(collectionName, shard, type, replicaNum); } return coreName; } public static String buildSolrCoreName(DistribStateManager stateManager, DocCollection collection, String shard, Replica.Type type) { - return buildSolrCoreName(stateManager, collection, shard, type, false); + return buildSolrCoreName(stateManager, collection.getName(), collection, shard, type, false); } private static boolean existCoreName(String coreName, Slice slice) { diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/CreateCollectionCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/CreateCollectionCmd.java index 04cb7b846ed..45211d4513a 100644 --- a/solr/core/src/java/org/apache/solr/cloud/api/collections/CreateCollectionCmd.java +++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/CreateCollectionCmd.java @@ -231,8 +231,8 @@ public void call(ClusterState clusterState, ZkNodeProps message, NamedList { - Iterator> iterator = rb.req.getCore().getCancellableQueryTracker().getActiveQueriesGenerated(); + rb.rsp.add( + "taskList", + (MapWriter) + ew -> { + Iterator> iterator = + rb.req.getCore().getCancellableQueryTracker().getActiveQueriesGenerated(); - while (iterator.hasNext()) { + while (iterator.hasNext()) { Map.Entry entry = iterator.next(); ew.put(entry.getKey(), entry.getValue()); - } - }); + } + }); + } + + @Override + @SuppressWarnings("unchecked") + public void handleResponses(ResponseBuilder rb, ShardRequest sreq) { + if (!shouldProcess) { + return; } - @Override - @SuppressWarnings("unchecked") - public void handleResponses(ResponseBuilder rb, ShardRequest sreq) { - if (!shouldProcess) { - return; - } + NamedList resultList = new NamedList<>(); - NamedList resultList = new NamedList<>(); + for (ShardResponse r : sreq.responses) { - for (ShardResponse r : sreq.responses) { + if (rb.getTaskStatusCheckUUID() != null) { + boolean isTaskActiveOnShard = r.getSolrResponse().getResponse().getBooleanArg("taskStatus"); - if (rb.getTaskStatusCheckUUID() != null) { - boolean isTaskActiveOnShard = r.getSolrResponse().getResponse().getBooleanArg("taskStatus"); - - if (isTaskActiveOnShard) { - rb.rsp.getValues().add("taskStatus", "id:" + rb.getTaskStatusCheckUUID() + ", status: active"); - return; - } else { - continue; - } - } - - LinkedHashMap result = (LinkedHashMap) r.getSolrResponse() - .getResponse().get("taskList"); - - Iterator> iterator = result.entrySet().iterator(); + if (isTaskActiveOnShard) { + rb.rsp + .getValues() + .add("taskStatus", "id:" + rb.getTaskStatusCheckUUID() + ", status: active"); + return; + } else { + continue; + } + } - while (iterator.hasNext()) { - Map.Entry entry = iterator.next(); + Map result = + (Map) r.getSolrResponse().getResponse().get("taskList"); - resultList.add(entry.getKey(), entry.getValue()); - } - } + Iterator> iterator = result.entrySet().iterator(); - if (rb.getTaskStatusCheckUUID() != null) { - // We got here with the specific taskID check being specified -- this means that the taskID was not - // found in active tasks on any shard - rb.rsp.getValues().add("taskStatus", "id:" + rb.getTaskStatusCheckUUID() + ", status: inactive"); - return; - } + while (iterator.hasNext()) { + Map.Entry entry = iterator.next(); - rb.rsp.getValues().add("taskList", resultList); + resultList.add(entry.getKey(), entry.getValue()); + } } - @Override - public String getDescription() { - return "Responsible for listing all active cancellable tasks and also supports checking the status of " + - "a particular task"; + if (rb.getTaskStatusCheckUUID() != null) { + // We got here with the specific taskID check being specified -- this means that the taskID + // was not + // found in active tasks on any shard + rb.rsp + .getValues() + .add("taskStatus", "id:" + rb.getTaskStatusCheckUUID() + ", status: inactive"); + return; } - @Override - public Category getCategory() { - return Category.OTHER; - } + rb.rsp.getValues().add("taskList", resultList); + } + + @Override + public String getDescription() { + return "Responsible for listing all active cancellable tasks and also supports checking the status of " + + "a particular task"; + } + + @Override + public Category getCategory() { + return Category.OTHER; + } } diff --git a/solr/core/src/java/org/apache/solr/handler/export/ExportWriter.java b/solr/core/src/java/org/apache/solr/handler/export/ExportWriter.java index 72380fb4d74..c1f3e3adbd7 100644 --- a/solr/core/src/java/org/apache/solr/handler/export/ExportWriter.java +++ b/solr/core/src/java/org/apache/solr/handler/export/ExportWriter.java @@ -17,6 +17,9 @@ package org.apache.solr.handler.export; +import static java.util.Collections.singletonList; +import static java.util.Collections.singletonMap; + import java.io.Closeable; import java.io.IOException; import java.io.OutputStream; @@ -27,7 +30,6 @@ import java.util.List; import java.util.Map; import java.util.TreeSet; - import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SortedDocValues; @@ -78,9 +80,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static java.util.Collections.singletonList; -import static java.util.Collections.singletonMap; - /** * Prepares and writes the documents requested by /export requests * @@ -184,8 +183,8 @@ public void write(OutputStream os) throws IOException { private void _write(OutputStream os) throws IOException { QueryResponseWriter rw = req.getCore().getResponseWriters().get(wt); if (rw instanceof BinaryResponseWriter) { - //todo add support for other writers after testing - writer = new JavaBinCodec(os, null); + // todo add support for other writers after testing + writer = new JavaBinCodec(os, null, false); } else { respWriter = new OutputStreamWriter(os, StandardCharsets.UTF_8); writer = JSONResponseWriter.getPushWriter(respWriter, req, res); @@ -214,7 +213,7 @@ private void _write(OutputStream os) throws IOException { return; } - if (sort != null && sort.needsScores()) { + if (sort.needsScores()) { writeException((new IOException(new SyntaxError("Scoring is not currently supported with xsort."))), writer, true); return; } diff --git a/solr/core/src/java/org/apache/solr/handler/export/ExportWriterStream.java b/solr/core/src/java/org/apache/solr/handler/export/ExportWriterStream.java index 3d0b3b13ada..3bb3c29ba78 100644 --- a/solr/core/src/java/org/apache/solr/handler/export/ExportWriterStream.java +++ b/solr/core/src/java/org/apache/solr/handler/export/ExportWriterStream.java @@ -24,7 +24,6 @@ import java.util.Map; import java.util.concurrent.BrokenBarrierException; import java.util.concurrent.TimeoutException; - import org.apache.solr.client.solrj.io.Tuple; import org.apache.solr.client.solrj.io.comp.ComparatorOrder; import org.apache.solr.client.solrj.io.comp.FieldComparator; @@ -45,8 +44,11 @@ /** * Stream implementation that helps supporting 'expr' streaming in export writer. - *

Note: this class is made public only to allow access from {@link org.apache.solr.handler.ExportHandler}, - * it should be treated as an internal detail of implementation.

+ * + *

Note: this class is made public only to allow access from {@link + * org.apache.solr.handler.ExportHandler}, it should be treated as an internal detail of + * implementation. + * * @lucene.experimental */ public class ExportWriterStream extends TupleStream implements Expressible { @@ -59,18 +61,18 @@ public class ExportWriterStream extends TupleStream implements Expressible { ExportBuffers exportBuffers; ExportBuffers.Buffer buffer; - private static final class TupleEntryWriter implements EntryWriter { + private static final class TupleEntryWriter extends EntryWriter { Tuple tuple; @Override public EntryWriter put(CharSequence k, Object v) throws IOException { if (v instanceof IteratorWriter) { List lst = new ArrayList<>(); - ((IteratorWriter)v).toList(lst); + ((IteratorWriter) v).toList(lst); v = lst; } else if (v instanceof MapWriter) { Map map = new HashMap<>(); - ((MapWriter)v).toMap(map); + ((MapWriter) v).toMap(map); v = map; } tuple.put(k.toString(), v); @@ -83,8 +85,8 @@ public ExportWriterStream(StreamExpression expression, StreamFactory factory) th } /** - * NOTE: this context must contain an instance of {@link ExportBuffers} under the - * {@link ExportBuffers#EXPORT_BUFFERS_KEY} key. + * NOTE: this context must contain an instance of {@link ExportBuffers} under the {@link + * ExportBuffers#EXPORT_BUFFERS_KEY} key. */ @Override public void setStreamContext(StreamContext context) { @@ -103,7 +105,8 @@ private StreamComparator parseComp(String sort) throws IOException { for (int i = 0; i < sorts.length; i++) { String s = sorts[i]; - String[] spec = s.trim().split("\\s+"); //This should take into account spaces in the sort spec. + String[] spec = + s.trim().split("\\s+"); // This should take into account spaces in the sort spec. if (spec.length != 2) { throw new IOException("Invalid sort spec:" + s); @@ -112,7 +115,12 @@ private StreamComparator parseComp(String sort) throws IOException { String fieldName = spec[0].trim(); String order = spec[1].trim(); - comps[i] = new FieldComparator(fieldName, order.equalsIgnoreCase("asc") ? ComparatorOrder.ASCENDING : ComparatorOrder.DESCENDING); + comps[i] = + new FieldComparator( + fieldName, + order.equalsIgnoreCase("asc") + ? ComparatorOrder.ASCENDING + : ComparatorOrder.DESCENDING); } if (comps.length > 1) { @@ -141,15 +149,17 @@ public Tuple read() throws IOException { try { buffer.outDocsIndex = ExportBuffers.Buffer.EMPTY; - //log.debug("--- ews exchange empty buffer {}", buffer); + // log.debug("--- ews exchange empty buffer {}", buffer); boolean exchanged = false; while (!exchanged) { try { long startExchangeBuffers = System.nanoTime(); exportBuffers.exchangeBuffers(); long endExchangeBuffers = System.nanoTime(); - if(log.isDebugEnabled()) { - log.debug("Waited for reader thread:{}", Long.toString(((endExchangeBuffers - startExchangeBuffers) / 1000000))); + if (log.isDebugEnabled()) { + log.debug( + "Waited for reader thread:{}", + Long.toString(((endExchangeBuffers - startExchangeBuffers) / 1000000))); } exchanged = true; } catch (TimeoutException e) { @@ -193,7 +203,7 @@ public Tuple read() throws IOException { res = Tuple.EOF(); } else { pos = buffer.outDocsIndex; - index = -1; //restart index. + index = -1; // restart index. log.debug("--- ews new pos={}", pos); } } @@ -209,7 +219,8 @@ public Tuple read() throws IOException { SortDoc sortDoc = buffer.outDocs[++index]; tupleEntryWriter.tuple = new Tuple(); - exportBuffers.exportWriter.writeDoc(sortDoc, exportBuffers.leaves, tupleEntryWriter, exportBuffers.exportWriter.fieldWriters); + exportBuffers.exportWriter.writeDoc( + sortDoc, exportBuffers.leaves, tupleEntryWriter, exportBuffers.exportWriter.fieldWriters); pos--; return tupleEntryWriter.tuple; } diff --git a/solr/core/src/java/org/apache/solr/handler/loader/JavabinLoader.java b/solr/core/src/java/org/apache/solr/handler/loader/JavabinLoader.java index 5671359f707..cdec0bffe30 100644 --- a/solr/core/src/java/org/apache/solr/handler/loader/JavabinLoader.java +++ b/solr/core/src/java/org/apache/solr/handler/loader/JavabinLoader.java @@ -24,7 +24,6 @@ import java.util.Map; import java.util.Map.Entry; import java.util.Set; - import org.apache.solr.client.solrj.request.JavaBinUpdateRequestCodec; import org.apache.solr.client.solrj.request.UpdateRequest; import org.apache.solr.common.SolrException; @@ -34,8 +33,6 @@ import org.apache.solr.common.params.UpdateParams; import org.apache.solr.common.util.ContentStream; import org.apache.solr.common.util.ContentStreamBase; -import org.apache.solr.common.util.DataInputInputStream; -import org.apache.solr.common.util.FastInputStream; import org.apache.solr.common.util.JavaBinCodec; import org.apache.solr.common.util.NamedList; import org.apache.solr.request.SolrQueryRequest; @@ -63,63 +60,76 @@ public JavabinLoader(ContentStreamLoader contentStreamLoader) { } @Override - public void load(SolrQueryRequest req, SolrQueryResponse rsp, ContentStream stream, UpdateRequestProcessor processor) throws Exception { + public void load( + SolrQueryRequest req, + SolrQueryResponse rsp, + ContentStream stream, + UpdateRequestProcessor processor) + throws Exception { InputStream is = null; try { is = stream.getStream(); parseAndLoadDocs(req, rsp, is, processor); } finally { - if(is != null) { + if (is != null) { is.close(); } } } - - private void parseAndLoadDocs(final SolrQueryRequest req, SolrQueryResponse rsp, InputStream stream, - final UpdateRequestProcessor processor) throws IOException { + + private void parseAndLoadDocs( + final SolrQueryRequest req, + SolrQueryResponse rsp, + InputStream stream, + final UpdateRequestProcessor processor) + throws IOException { if (req.getParams().getBool("multistream", false)) { handleMultiStream(req, rsp, stream, processor); return; } UpdateRequest update = null; - JavaBinUpdateRequestCodec.StreamingUpdateHandler handler = new JavaBinUpdateRequestCodec.StreamingUpdateHandler() { - private AddUpdateCommand addCmd = null; + JavaBinUpdateRequestCodec.StreamingUpdateHandler handler = + new JavaBinUpdateRequestCodec.StreamingUpdateHandler() { + private AddUpdateCommand addCmd = null; - @Override - public void update(SolrInputDocument document, UpdateRequest updateRequest, Integer commitWithin, Boolean overwrite) { - if (document == null) { - return; - } - if (addCmd == null) { - addCmd = getAddCommand(req, updateRequest.getParams()); - } - addCmd.solrDoc = document; - if (commitWithin != null) { - addCmd.commitWithin = commitWithin; - } - if (overwrite != null) { - addCmd.overwrite = overwrite; - } + @Override + public void update( + SolrInputDocument document, + UpdateRequest updateRequest, + Integer commitWithin, + Boolean overwrite) { + if (document == null) { + return; + } + if (addCmd == null) { + addCmd = getAddCommand(req, updateRequest.getParams()); + } + addCmd.solrDoc = document; + if (commitWithin != null) { + addCmd.commitWithin = commitWithin; + } + if (overwrite != null) { + addCmd.overwrite = overwrite; + } - if (updateRequest.isLastDocInBatch()) { - // this is a hint to downstream code that indicates we've sent the last doc in a batch - addCmd.isLastDocInBatch = true; - } + if (updateRequest.isLastDocInBatch()) { + // this is a hint to downstream code that indicates we've sent the last doc in a batch + addCmd.isLastDocInBatch = true; + } + + try { + processor.processAdd(addCmd); + addCmd.clear(); + } catch (IOException e) { + throw new SolrException( + SolrException.ErrorCode.SERVER_ERROR, "ERROR adding document " + document, e); + } + } + }; - try { - processor.processAdd(addCmd); - addCmd.clear(); - } catch (IOException e) { - throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "ERROR adding document " + document, e); - } - } - }; - FastInputStream in = FastInputStream.wrap(stream); for (; ; ) { - if (in.peek() == -1) return; try { - update = new JavaBinUpdateRequestCodec() - .unmarshal(in, handler); + update = new JavaBinUpdateRequestCodec().unmarshal(stream, handler); } catch (EOFException e) { break; // this is expected } @@ -129,43 +139,48 @@ public void update(SolrInputDocument document, UpdateRequest updateRequest, Inte } } - private void handleMultiStream(SolrQueryRequest req, SolrQueryResponse rsp, InputStream stream, UpdateRequestProcessor processor) + private void handleMultiStream( + SolrQueryRequest req, + SolrQueryResponse rsp, + InputStream stream, + UpdateRequestProcessor processor) throws IOException { - FastInputStream in = FastInputStream.wrap(stream); + SolrParams old = req.getParams(); - try (JavaBinCodec jbc = new JavaBinCodec() { - SolrParams params; - AddUpdateCommand addCmd = null; - - @Override - public List readIterator(DataInputInputStream fis) throws IOException { - while (true) { - Object o = readVal(fis); - if (o == END_OBJ) break; - if (o instanceof NamedList) { - params = ((NamedList) o).toSolrParams(); - } else { - try { - if (o instanceof byte[]) { - if (params != null) req.setParams(params); - byte[] buf = (byte[]) o; - contentStreamLoader.load(req, rsp, new ContentStreamBase.ByteArrayStream(buf, null), processor); + try (JavaBinCodec jbc = + new JavaBinCodec() { + SolrParams params; + AddUpdateCommand addCmd = null; + + @Override + public List readIterator(JavaBinCodec javaBinCodec) throws IOException { + while (true) { + Object o = readVal(this); + if (o == END_OBJ) break; + if (o instanceof NamedList) { + params = ((NamedList) o).toSolrParams(); } else { - throw new RuntimeException("unsupported type "); + try { + if (o instanceof byte[]) { + if (params != null) req.setParams(params); + byte[] buf = (byte[]) o; + contentStreamLoader.load( + req, rsp, new ContentStreamBase.ByteArrayStream(buf, null), processor); + } else { + throw new RuntimeException("unsupported type "); + } + } catch (Exception e) { + throw new RuntimeException(e); + } finally { + params = null; + req.setParams(old); + } } - } catch (Exception e) { - throw new RuntimeException(e); - } finally { - params = null; - req.setParams(old); } + return Collections.emptyList(); } - } - return Collections.emptyList(); - } - - }) { - jbc.unmarshal(in); + }) { + jbc.unmarshal(stream); } } @@ -176,18 +191,19 @@ private AddUpdateCommand getAddCommand(SolrQueryRequest req, SolrParams params) return addCmd; } - private void delete(SolrQueryRequest req, UpdateRequest update, UpdateRequestProcessor processor) throws IOException { + private void delete(SolrQueryRequest req, UpdateRequest update, UpdateRequestProcessor processor) + throws IOException { SolrParams params = update.getParams(); DeleteUpdateCommand delcmd = new DeleteUpdateCommand(req); - if(params != null) { + if (params != null) { delcmd.commitWithin = params.getInt(UpdateParams.COMMIT_WITHIN, -1); } - - if(update.getDeleteByIdMap() != null) { - Set>> entries = update.getDeleteByIdMap().entrySet(); - for (Entry> e : entries) { + + if (update.getDeleteByIdMap() != null) { + Set>> entries = update.getDeleteByIdMap().entrySet(); + for (Entry> e : entries) { delcmd.id = e.getKey(); - Map map = e.getValue(); + Map map = e.getValue(); if (map != null) { Long version = (Long) map.get("ver"); if (version != null) { @@ -204,8 +220,8 @@ private void delete(SolrQueryRequest req, UpdateRequest update, UpdateRequestPro delcmd.clear(); } } - - if(update.getDeleteQuery() != null) { + + if (update.getDeleteQuery() != null) { for (String s : update.getDeleteQuery()) { delcmd.query = s; processor.processDelete(delcmd); diff --git a/solr/core/src/java/org/apache/solr/handler/loader/JsonLoader.java b/solr/core/src/java/org/apache/solr/handler/loader/JsonLoader.java index 0295ad0fc87..c253725c7fd 100644 --- a/solr/core/src/java/org/apache/solr/handler/loader/JsonLoader.java +++ b/solr/core/src/java/org/apache/solr/handler/loader/JsonLoader.java @@ -170,7 +170,7 @@ void processUpdate(Reader reader) throws IOException { handleSplitMode(split, f, reader); return; } - parser = new JSONParser(reader); + parser = new JSONParser(reader, new char[16834]); int ev = parser.nextEvent(); while (ev != JSONParser.EOF) { @@ -247,7 +247,7 @@ private void handleSplitMode(String split, String[] fields, final Reader reader) throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Raw data can be stored only if split=/"); parser = new RecordingJSONParser(reader); } else { - parser = new JSONParser(reader); + parser = new JSONParser(reader, new char[16834]); } diff --git a/solr/core/src/java/org/apache/solr/handler/loader/XMLLoader.java b/solr/core/src/java/org/apache/solr/handler/loader/XMLLoader.java index 551959c9bab..f2a20126846 100644 --- a/solr/core/src/java/org/apache/solr/handler/loader/XMLLoader.java +++ b/solr/core/src/java/org/apache/solr/handler/loader/XMLLoader.java @@ -19,6 +19,7 @@ import static org.apache.solr.common.params.CommonParams.ID; import static org.apache.solr.common.params.CommonParams.NAME; +import com.google.common.collect.Lists; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; @@ -35,8 +36,6 @@ import javax.xml.stream.XMLStreamConstants; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamReader; -import com.google.common.collect.Lists; -import org.apache.commons.io.IOUtils; import org.apache.solr.common.EmptyEntityResolver; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrInputDocument; @@ -46,6 +45,7 @@ import org.apache.solr.common.params.UpdateParams; import org.apache.solr.common.util.ContentStream; import org.apache.solr.common.util.ContentStreamBase; +import org.apache.solr.common.util.IOUtils; import org.apache.solr.common.util.StrUtils; import org.apache.solr.common.util.XMLErrorLogger; import org.apache.solr.handler.RequestHandlerUtils; @@ -85,9 +85,10 @@ public XMLLoader init(SolrParams args) { } catch (IllegalArgumentException ex) { // Other implementations will likely throw this exception since "reuse-instance" // isimplementation specific. - log.debug("Unable to set the 'reuse-instance' property for the input chain: {}", inputFactory); + log.debug( + "Unable to set the 'reuse-instance' property for the input chain: {}", inputFactory); } - + // Init SAX parser (for XSL): saxFactory = SAXParserFactory.newInstance(); saxFactory.setNamespaceAware(true); // XSL needs this! @@ -102,9 +103,14 @@ public String getDefaultWT() { } @Override - public void load(SolrQueryRequest req, SolrQueryResponse rsp, ContentStream stream, UpdateRequestProcessor processor) throws Exception { + public void load( + SolrQueryRequest req, + SolrQueryResponse rsp, + ContentStream stream, + UpdateRequestProcessor processor) + throws Exception { final String charset = ContentStreamBase.getCharsetFromContentType(stream.getContentType()); - + InputStream is = null; XMLStreamReader parser = null; @@ -112,18 +118,21 @@ public void load(SolrQueryRequest req, SolrQueryResponse rsp, ContentStream stre try { is = stream.getStream(); if (log.isTraceEnabled()) { - final byte[] body = IOUtils.toByteArray(is); + final byte[] body = org.apache.commons.io.IOUtils.toByteArray(is); // TODO: The charset may be wrong, as the real charset is later // determined by the XML parser, the content-type is only used as a hint! if (log.isTraceEnabled()) { - log.trace("body: {}", new String(body, (charset == null) ? - ContentStreamBase.DEFAULT_CHARSET : charset)); + log.trace( + "body: {}", + new String(body, (charset == null) ? ContentStreamBase.DEFAULT_CHARSET : charset)); } IOUtils.closeQuietly(is); is = new ByteArrayInputStream(body); } - parser = (charset == null) ? - inputFactory.createXMLStreamReader(is) : inputFactory.createXMLStreamReader(is, charset); + parser = + (charset == null) + ? inputFactory.createXMLStreamReader(is) + : inputFactory.createXMLStreamReader(is, charset); this.processUpdate(req, processor, parser); } catch (XMLStreamException e) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e.getMessage(), e); @@ -133,11 +142,10 @@ public void load(SolrQueryRequest req, SolrQueryResponse rsp, ContentStream stre } } - /** - * @since solr 1.2 - */ - protected void processUpdate(SolrQueryRequest req, UpdateRequestProcessor processor, XMLStreamReader parser) - throws XMLStreamException, IOException, FactoryConfigurationError { + /** @since solr 1.2 */ + protected void processUpdate( + SolrQueryRequest req, UpdateRequestProcessor processor, XMLStreamReader parser) + throws XMLStreamException, IOException, FactoryConfigurationError { AddUpdateCommand addCmd = null; SolrParams params = req.getParams(); while (true) { @@ -154,10 +162,11 @@ protected void processUpdate(SolrQueryRequest req, UpdateRequestProcessor proces addCmd = new AddUpdateCommand(req); - // First look for commitWithin parameter on the request, will be overwritten for individual 's + // First look for commitWithin parameter on the request, will be overwritten for + // individual 's addCmd.commitWithin = params.getInt(UpdateParams.COMMIT_WITHIN, -1); addCmd.overwrite = params.getBool(UpdateParams.OVERWRITE, true); - + for (int i = 0; i < parser.getAttributeCount(); i++) { String attrName = parser.getAttributeLocalName(i); String attrVal = parser.getAttributeValue(i); @@ -171,20 +180,24 @@ protected void processUpdate(SolrQueryRequest req, UpdateRequestProcessor proces } } else if ("doc".equals(currTag)) { - if(addCmd != null) { + if (addCmd != null) { log.trace("adding doc..."); addCmd.clear(); addCmd.solrDoc = readDoc(parser); processor.processAdd(addCmd); } else { - throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Unexpected tag without an tag surrounding it."); + throw new SolrException( + SolrException.ErrorCode.BAD_REQUEST, + "Unexpected tag without an tag surrounding it."); } - } else if (UpdateRequestHandler.COMMIT.equals(currTag) || UpdateRequestHandler.OPTIMIZE.equals(currTag)) { + } else if (UpdateRequestHandler.COMMIT.equals(currTag) + || UpdateRequestHandler.OPTIMIZE.equals(currTag)) { log.trace("parsing {}", currTag); - CommitUpdateCommand cmd = new CommitUpdateCommand(req, UpdateRequestHandler.OPTIMIZE.equals(currTag)); + CommitUpdateCommand cmd = + new CommitUpdateCommand(req, UpdateRequestHandler.OPTIMIZE.equals(currTag)); ModifiableSolrParams mp = new ModifiableSolrParams(); - + for (int i = 0; i < parser.getAttributeCount(); i++) { String attrName = parser.getAttributeLocalName(i); String attrVal = parser.getAttributeValue(i); @@ -192,7 +205,9 @@ protected void processUpdate(SolrQueryRequest req, UpdateRequestProcessor proces } RequestHandlerUtils.validateCommitParams(mp); - SolrParams p = SolrParams.wrapDefaults(mp, req.getParams()); // default to the normal request params for commit options + SolrParams p = + SolrParams.wrapDefaults( + mp, req.getParams()); // default to the normal request params for commit options RequestHandlerUtils.updateCommit(cmd, p); processor.processCommit(cmd); @@ -213,14 +228,14 @@ else if (UpdateRequestHandler.DELETE.equals(currTag)) { } } - /** - * @since solr 1.3 - */ - void processDelete(SolrQueryRequest req, UpdateRequestProcessor processor, XMLStreamReader parser) throws XMLStreamException, IOException { + /** @since solr 1.3 */ + void processDelete(SolrQueryRequest req, UpdateRequestProcessor processor, XMLStreamReader parser) + throws XMLStreamException, IOException { // Parse the command DeleteUpdateCommand deleteCmd = new DeleteUpdateCommand(req); - // First look for commitWithin parameter on the request, will be overwritten for individual 's + // First look for commitWithin parameter on the request, will be overwritten for individual + // 's SolrParams params = req.getParams(); deleteCmd.commitWithin = params.getInt(UpdateParams.COMMIT_WITHIN, -1); @@ -247,11 +262,10 @@ void processDelete(SolrQueryRequest req, UpdateRequestProcessor processor, XMLSt if (!(ID.equals(mode) || "query".equals(mode))) { String msg = "XML element has invalid XML child element: " + mode; log.warn(msg); - throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, - msg); + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, msg); } text.setLength(0); - + if (ID.equals(mode)) { for (int i = 0; i < parser.getAttributeCount(); i++) { String attrName = parser.getAttributeLocalName(i); @@ -269,7 +283,7 @@ void processDelete(SolrQueryRequest req, UpdateRequestProcessor processor, XMLSt case XMLStreamConstants.END_ELEMENT: String currTag = parser.getLocalName(); if (ID.equals(currTag)) { - deleteCmd.setId(text.toString()); + deleteCmd.setId(text.toString()); } else if ("query".equals(currTag)) { deleteCmd.setQuery(text.toString()); } else if ("delete".equals(currTag)) { @@ -277,8 +291,7 @@ void processDelete(SolrQueryRequest req, UpdateRequestProcessor processor, XMLSt } else { String msg = "XML element has invalid XML (closing) child element: " + currTag; log.warn(msg); - throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, - msg); + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, msg); } processor.processDelete(deleteCmd); deleteCmd.clear(); @@ -294,7 +307,6 @@ void processDelete(SolrQueryRequest req, UpdateRequestProcessor processor, XMLSt } } - /** * Given the input stream, read a document * @@ -308,7 +320,10 @@ public SolrInputDocument readDoc(XMLStreamReader parser) throws XMLStreamExcepti for (int i = 0; i < parser.getAttributeCount(); i++) { attrName = parser.getAttributeLocalName(i); if ("boost".equals(attrName)) { - String message = "Ignoring document boost: " + parser.getAttributeValue(i) + " as index-time boosts are not supported anymore"; + String message = + "Ignoring document boost: " + + parser.getAttributeValue(i) + + " as index-time boosts are not supported anymore"; if (WARNED_ABOUT_INDEX_TIME_BOOSTS.compareAndSet(false, true)) { log.warn(message); } else { @@ -330,7 +345,7 @@ public SolrInputDocument readDoc(XMLStreamReader parser) throws XMLStreamExcepti while (!complete) { int event = parser.next(); switch (event) { - // Add everything to the text + // Add everything to the text case XMLStreamConstants.SPACE: case XMLStreamConstants.CDATA: case XMLStreamConstants.CHARACTERS: @@ -373,7 +388,7 @@ public SolrInputDocument readDoc(XMLStreamReader parser) throws XMLStreamExcepti } break; } - if(!isLabeledChildDoc){ + if (!isLabeledChildDoc) { // only add data if this is not a childDoc, since it was added already doc.addField(name, v); } else { @@ -389,25 +404,22 @@ public SolrInputDocument readDoc(XMLStreamReader parser) throws XMLStreamExcepti text.setLength(0); String localName = parser.getLocalName(); if ("doc".equals(localName)) { - if(name != null) { + if (name != null) { // flag to prevent spaces after doc from being added isLabeledChildDoc = true; - if(!doc.containsKey(name)) { + if (!doc.containsKey(name)) { doc.setField(name, Lists.newArrayList()); } doc.addField(name, readDoc(parser)); break; } - if (subDocs == null) - subDocs = Lists.newArrayList(); + if (subDocs == null) subDocs = Lists.newArrayList(); subDocs.add(readDoc(parser)); - } - else { + } else { if (!"field".equals(localName)) { String msg = "XML element has invalid XML child element: " + localName; log.warn(msg); - throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, - msg); + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, msg); } update = null; isNull = false; @@ -418,7 +430,10 @@ public SolrInputDocument readDoc(XMLStreamReader parser) throws XMLStreamExcepti if (NAME.equals(attrName)) { name = attrVal; } else if ("boost".equals(attrName)) { - String message = "Ignoring field boost: " + attrVal + " as index-time boosts are not supported anymore"; + String message = + "Ignoring field boost: " + + attrVal + + " as index-time boosts are not supported anymore"; if (WARNED_ABOUT_INDEX_TIME_BOOSTS.compareAndSet(false, true)) { log.warn(message); } else { @@ -437,7 +452,7 @@ public SolrInputDocument readDoc(XMLStreamReader parser) throws XMLStreamExcepti } } - if (updateMap != null) { + if (updateMap != null) { for (Map.Entry> entry : updateMap.entrySet()) { name = entry.getKey(); Map value = entry.getValue(); diff --git a/solr/core/src/java/org/apache/solr/response/BinaryResponseWriter.java b/solr/core/src/java/org/apache/solr/response/BinaryResponseWriter.java index 34299116157..feb1011b9d4 100644 --- a/solr/core/src/java/org/apache/solr/response/BinaryResponseWriter.java +++ b/solr/core/src/java/org/apache/solr/response/BinaryResponseWriter.java @@ -16,6 +16,8 @@ */ package org.apache.solr.response; +import static org.apache.solr.common.util.ByteArrayUtf8CharSequence.convertCharSeq; + import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; @@ -27,7 +29,6 @@ import java.util.Iterator; import java.util.List; import java.util.function.Consumer; - import org.apache.commons.io.output.ByteArrayOutputStream; import org.apache.lucene.document.StoredField; import org.apache.lucene.index.IndexableField; @@ -46,15 +47,13 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.apache.solr.common.util.ByteArrayUtf8CharSequence.convertCharSeq; - - public class BinaryResponseWriter implements BinaryQueryResponseWriter { -// public static boolean useUtf8CharSeq = true; + // public static boolean useUtf8CharSeq = true; private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @Override - public void write(OutputStream out, SolrQueryRequest req, SolrQueryResponse response) throws IOException { + public void write(OutputStream out, SolrQueryRequest req, SolrQueryResponse response) + throws IOException { Resolver resolver = new Resolver(req, response.getReturnFields()); if (req.getParams().getBool(CommonParams.OMIT_HEADER, false)) response.removeResponseHeader(); try (JavaBinCodec jbc = new JavaBinCodec(resolver)) { @@ -62,16 +61,18 @@ public void write(OutputStream out, SolrQueryRequest req, SolrQueryResponse resp } } - private static void serialize(SolrQueryResponse response,Resolver resolver, String f) throws IOException { - try (JavaBinCodec jbc = new JavaBinCodec(resolver); FileOutputStream fos = new FileOutputStream(f)) { + private static void serialize(SolrQueryResponse response, Resolver resolver, String f) + throws IOException, java.io.FileNotFoundException { + try (JavaBinCodec jbc = new JavaBinCodec(resolver); + FileOutputStream fos = new FileOutputStream(f)) { jbc.setWritableDocFields(resolver).marshal(response.getValues(), fos); fos.flush(); } - } @Override - public void write(Writer writer, SolrQueryRequest request, SolrQueryResponse response) throws IOException { + public void write(Writer writer, SolrQueryRequest request, SolrQueryResponse response) + throws IOException { throw new RuntimeException("This is a binary writer , Cannot write to a characterstream"); } @@ -80,7 +81,8 @@ public String getContentType(SolrQueryRequest request, SolrQueryResponse respons return BinaryResponseParser.BINARY_CONTENT_TYPE; } - public static class Resolver implements JavaBinCodec.ObjectResolver , JavaBinCodec.WritableDocFields { + public static class Resolver + implements JavaBinCodec.ObjectResolver, JavaBinCodec.WritableDocFields { protected final SolrQueryRequest solrQueryRequest; protected IndexSchema schema; protected ReturnFields returnFields; @@ -95,21 +97,22 @@ public Object resolve(Object o, JavaBinCodec codec) throws IOException { if (o instanceof StoredField) { CharSequence val = ((StoredField) o).getCharSequenceValue(); if (val instanceof Utf8CharSequence) { - codec.writeUTF8Str((Utf8CharSequence) val); + JavaBinCodec.writeUTF8Str(codec, (Utf8CharSequence) val); return null; } } if (o instanceof ResultContext) { ReturnFields orig = returnFields; - ResultContext res = (ResultContext)o; - if(res.getReturnFields()!=null) { + ResultContext res = (ResultContext) o; + if (res.getReturnFields() != null) { returnFields = res.getReturnFields(); } -// if (useUtf8CharSeq) { - ResultContext.READASBYTES.set(fieldName -> { - SchemaField fld = res.getRequest().getSchema().getFieldOrNull(fieldName); - return fld != null && fld.getType().isUtf8Field(); - }); + + ResultContext.READASBYTES.set( + fieldName -> { + SchemaField fld = res.getRequest().getSchema().getFieldOrNull(fieldName); + return fld != null && fld.getType().isUtf8Field(); + }); try { writeResults(res, codec); @@ -121,14 +124,15 @@ public Object resolve(Object o, JavaBinCodec codec) throws IOException { return null; // null means we completely handled it } if (o instanceof DocList) { - ResultContext ctx = new BasicResultContext((DocList)o, returnFields, null, null, solrQueryRequest); + ResultContext ctx = + new BasicResultContext((DocList) o, returnFields, null, null, solrQueryRequest); writeResults(ctx, codec); return null; // null means we completely handled it } - if( o instanceof IndexableField ) { - if(schema == null) schema = solrQueryRequest.getSchema(); + if (o instanceof IndexableField) { + if (schema == null) schema = solrQueryRequest.getSchema(); - IndexableField f = (IndexableField)o; + IndexableField f = (IndexableField) o; SchemaField sf = schema.getFieldOrNull(f.name()); try { o = DocsStreamer.getValue(sf, f); @@ -149,8 +153,8 @@ public boolean wantsAllFields() { return returnFields.wantsAllFields(); } - protected void writeResultsBody( ResultContext res, JavaBinCodec codec ) throws IOException { - codec.writeTag(JavaBinCodec.ARR, res.getDocList().size()); + protected void writeResultsBody(ResultContext res, JavaBinCodec codec) throws IOException { + JavaBinCodec.writeTag(codec, JavaBinCodec.ARR, res.getDocList().size()); Iterator docStreamer = res.getProcessedDocuments(); while (docStreamer.hasNext()) { SolrDocument doc = docStreamer.next(); @@ -159,32 +163,29 @@ protected void writeResultsBody( ResultContext res, JavaBinCodec codec ) throws } public void writeResults(ResultContext ctx, JavaBinCodec codec) throws IOException { - codec.writeTag(JavaBinCodec.SOLRDOCLST); + JavaBinCodec.writeTag(codec, JavaBinCodec.SOLRDOCLST); List l = new ArrayList<>(4); - l.add( ctx.getDocList().matches()); + l.add(ctx.getDocList().matches()); l.add((long) ctx.getDocList().offset()); - + Float maxScore = null; if (ctx.wantsScores()) { maxScore = ctx.getDocList().maxScore(); } l.add(maxScore); l.add(ctx.getDocList().hitCountRelation() == TotalHits.Relation.EQUAL_TO); - codec.writeArray(l); - + JavaBinCodec.writeArray(codec, l); + // this is a seprate function so that streaming responses can use just that part - writeResultsBody( ctx, codec ); + writeResultsBody(ctx, codec); } - } - /** * TODO -- there may be a way to do this without marshal at all... * - * @return a response object equivalent to what you get from the XML/JSON/javabin parser. Documents become - * SolrDocuments, DocList becomes SolrDocumentList etc. - * + * @return a response object equivalent to what you get from the XML/JSON/javabin parser. + * Documents become SolrDocuments, DocList becomes SolrDocumentList etc. * @since solr 1.4 */ @SuppressWarnings("unchecked") @@ -197,31 +198,26 @@ public static NamedList getParsedResponse(SolrQueryRequest req, SolrQuer ByteArrayOutputStream out = new ByteArrayOutputStream(); try (JavaBinCodec jbc = new JavaBinCodec(resolver)) { - jbc.setWritableDocFields(resolver).marshal(rsp.getValues(), out); + jbc.setWritableDocFields(resolver).marshal(rsp.getValues(), out, true); } InputStream in = out.toInputStream(); try (JavaBinCodec jbc = new JavaBinCodec(resolver)) { return (NamedList) jbc.unmarshal(in); } - } - catch (Exception ex) { + } catch (Exception ex) { throw new RuntimeException(ex); } } static class MaskCharSeqSolrDocument extends SolrDocument { - /** - * Get the value or collection of values for a given field. - */ + /** Get the value or collection of values for a given field. */ @Override public Object getFieldValue(String name) { return convertCharSeq(_fields.get(name)); } - /** - * Get a collection of values for a given field name - */ + /** Get a collection of values for a given field name */ @SuppressWarnings("unchecked") @Override public Collection getFieldValues(String name) { @@ -251,14 +247,11 @@ public Collection getRawFieldValues(String name) { return null; } - - /** - * Iterate of String->Object keys - */ + /** Iterate of String->Object keys */ @Override public Iterator> iterator() { Iterator> it = _fields.entrySet().iterator(); - return new Iterator>() { + return new Iterator<>() { @Override public boolean hasNext() { return it.hasNext(); @@ -272,18 +265,15 @@ public Entry next() { }; } - /////////////////////////////////////////////////////////////////// // Get the field values /////////////////////////////////////////////////////////////////// - /** - * returns the first value for a field - */ + /** returns the first value for a field */ @Override public Object getFirstValue(String name) { Object v = _fields.get(name); - if (v == null || !(v instanceof Collection)) return convertCharSeq(v); + if (!(v instanceof Collection)) return convertCharSeq(v); Collection c = (Collection) v; if (c.size() > 0) { return convertCharSeq(c.iterator().next()); @@ -305,5 +295,4 @@ public void forEach(Consumer> action) { super.forEach(action); } } - } diff --git a/solr/core/src/java/org/apache/solr/response/transform/RawValueTransformerFactory.java b/solr/core/src/java/org/apache/solr/response/transform/RawValueTransformerFactory.java index 5838e10520e..91e36638b36 100644 --- a/solr/core/src/java/org/apache/solr/response/transform/RawValueTransformerFactory.java +++ b/solr/core/src/java/org/apache/solr/response/transform/RawValueTransformerFactory.java @@ -16,11 +16,10 @@ */ package org.apache.solr.response.transform; +import com.google.common.base.Strings; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; - -import com.google.common.base.Strings; import org.apache.lucene.index.IndexableField; import org.apache.solr.common.SolrDocument; import org.apache.solr.common.params.CommonParams; @@ -33,94 +32,85 @@ import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.response.QueryResponseWriter; -/** - * @since solr 5.2 - */ -public class RawValueTransformerFactory extends TransformerFactory -{ +/** @since solr 5.2 */ +public class RawValueTransformerFactory extends TransformerFactory { String applyToWT = null; - - public RawValueTransformerFactory() { - - } + + public RawValueTransformerFactory() {} public RawValueTransformerFactory(String wt) { this.applyToWT = wt; } - + @Override public void init(NamedList args) { super.init(args); - if(defaultUserArgs!=null&&defaultUserArgs.startsWith("wt=")) { + if (defaultUserArgs != null && defaultUserArgs.startsWith("wt=")) { applyToWT = defaultUserArgs.substring(3); } } - + @Override public DocTransformer create(String display, SolrParams params, SolrQueryRequest req) { String field = params.get("f"); - if(Strings.isNullOrEmpty(field)) { + if (Strings.isNullOrEmpty(field)) { field = display; } // When a 'wt' is specified in the transformer, only apply it to the same wt boolean apply = true; - if(applyToWT!=null) { + if (applyToWT != null) { String qwt = req.getParams().get(CommonParams.WT); - if(qwt==null) { + if (qwt == null) { QueryResponseWriter qw = req.getCore().getQueryResponseWriter(req); QueryResponseWriter dw = req.getCore().getQueryResponseWriter(applyToWT); - if(qw!=dw) { + if (qw != dw) { apply = false; } - } - else { + } else { apply = applyToWT.equals(qwt); } } - if(apply) { - return new RawTransformer( field, display ); + if (apply) { + return new RawTransformer(field, display); } - + if (field.equals(display)) { // we have to ensure the field is returned return new DocTransformer.NoopFieldTransformer(field); } - return new RenameFieldTransformer( field, display, false ); + return new RenameFieldTransformer(field, display, false); } - - static class RawTransformer extends DocTransformer - { + + static class RawTransformer extends DocTransformer { final String field; final String display; - public RawTransformer( String field, String display ) - { + public RawTransformer(String field, String display) { this.field = field; this.display = display; } @Override - public String getName() - { + public String getName() { return display; } @Override public void transform(SolrDocument doc, int docid) { Object val = doc.remove(field); - if(val==null) { + if (val == null) { return; } - if(val instanceof Collection) { - Collection current = (Collection)val; - ArrayList vals = new ArrayList(); - for(Object v : current) { + if (val instanceof Collection) { + Collection current = (Collection) val; + ArrayList vals = + new ArrayList(); + for (Object v : current) { vals.add(new WriteableStringValue(v)); } doc.setField(display, vals); - } - else { + } else { doc.setField(display, new WriteableStringValue(val)); } } @@ -130,21 +120,20 @@ public String[] getExtraRequestFields() { return new String[] {this.field}; } } - + public static class WriteableStringValue extends WriteableValue { public final Object val; - + public WriteableStringValue(Object val) { this.val = val; } - + @Override public void write(String name, TextWriter writer) throws IOException { String str = null; - if(val instanceof IndexableField) { // delays holding it in memory - str = ((IndexableField)val).stringValue(); - } - else { + if (val instanceof IndexableField) { // delays holding it in memory + str = ((IndexableField) val).stringValue(); + } else { str = val.toString(); } writer.getWriter().write(str); @@ -153,13 +142,11 @@ public void write(String name, TextWriter writer) throws IOException { @Override public Object resolve(Object o, JavaBinCodec codec) throws IOException { ObjectResolver orig = codec.getResolver(); - if(orig != null) { - codec.writeVal(orig.resolve(val, codec)); + if (orig != null) { + JavaBinCodec.writeVal(codec, orig.resolve(val, codec)); return null; } return val.toString(); } } } - - diff --git a/solr/core/src/java/org/apache/solr/response/transform/WriteableGeoJSON.java b/solr/core/src/java/org/apache/solr/response/transform/WriteableGeoJSON.java index f89f5c847f5..107e01279ac 100644 --- a/solr/core/src/java/org/apache/solr/response/transform/WriteableGeoJSON.java +++ b/solr/core/src/java/org/apache/solr/response/transform/WriteableGeoJSON.java @@ -39,7 +39,7 @@ public WriteableGeoJSON(Shape shape, ShapeWriter jsonWriter) { @Override public Object resolve(Object o, JavaBinCodec codec) throws IOException { - codec.writeStr(jsonWriter.toString(shape)); + JavaBinCodec.writeStr(codec, jsonWriter.toString(shape)); return null; // this means we wrote it } diff --git a/solr/core/src/java/org/apache/solr/update/HdfsTransactionLog.java b/solr/core/src/java/org/apache/solr/update/HdfsTransactionLog.java index 8982b6fe323..0965d0c82f5 100644 --- a/solr/core/src/java/org/apache/solr/update/HdfsTransactionLog.java +++ b/solr/core/src/java/org/apache/solr/update/HdfsTransactionLog.java @@ -24,14 +24,12 @@ import java.util.List; import java.util.Map; import java.util.TreeMap; - import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrInputDocument; -import org.apache.solr.common.util.DataInputInputStream; import org.apache.solr.common.util.FastInputStream; import org.apache.solr.common.util.FastOutputStream; import org.apache.solr.common.util.JavaBinCodec; @@ -42,20 +40,18 @@ import org.slf4j.LoggerFactory; /** - * Log Format: List{Operation, Version, ...} - * ADD, VERSION, DOC - * DELETE, VERSION, ID_BYTES - * DELETE_BY_QUERY, VERSION, String + * Log Format: List{Operation, Version, ...} ADD, VERSION, DOC DELETE, VERSION, ID_BYTES + * DELETE_BY_QUERY, VERSION, String * - * TODO: keep two files, one for [operation, version, id] and the other for the actual - * document data. That way we could throw away document log files more readily - * while retaining the smaller operation log files longer (and we can retrieve - * the stored fields from the latest documents from the index). + *

TODO: keep two files, one for [operation, version, id] and the other for the actual document + * data. That way we could throw away document log files more readily while retaining the smaller + * operation log files longer (and we can retrieve the stored fields from the latest documents from + * the index). * - * This would require keeping all source fields stored of course. + *

This would require keeping all source fields stored of course. * - * This would also allow to not log document data for requests with commit=true - * in them (since we know that if the request succeeds, all docs will be committed) + *

This would also allow to not log document data for requests with commit=true in them (since we + * know that if the request succeeds, all docs will be committed) * * @deprecated since 8.6 */ @@ -65,7 +61,6 @@ public class HdfsTransactionLog extends TransactionLog { private static boolean debug = log.isDebugEnabled(); private static boolean trace = log.isTraceEnabled(); - Path tlogFile; private long finalLogSize; @@ -74,53 +69,65 @@ public class HdfsTransactionLog extends TransactionLog { private volatile boolean isClosed = false; - HdfsTransactionLog(FileSystem fs, Path tlogFile, Collection globalStrings, Integer tlogDfsReplication) { + HdfsTransactionLog( + FileSystem fs, Path tlogFile, Collection globalStrings, Integer tlogDfsReplication) { this(fs, tlogFile, globalStrings, false, tlogDfsReplication); } - HdfsTransactionLog(FileSystem fs, Path tlogFile, Collection globalStrings, boolean openExisting, Integer tlogDfsReplication) { + HdfsTransactionLog( + FileSystem fs, + Path tlogFile, + Collection globalStrings, + boolean openExisting, + Integer tlogDfsReplication) { super(); boolean success = false; this.fs = fs; try { this.tlogFile = tlogFile; - + if (fs.exists(tlogFile) && openExisting) { - FSHDFSUtils.recoverFileLease(fs, tlogFile, fs.getConf(), new CallerInfo(){ + FSHDFSUtils.recoverFileLease( + fs, + tlogFile, + fs.getConf(), + new CallerInfo() { + + @Override + public boolean isCallerClosed() { + return isClosed; + } + }); - @Override - public boolean isCallerClosed() { - return isClosed; - }}); - tlogOutStream = fs.append(tlogFile); } else { fs.delete(tlogFile, false); - - tlogOutStream = fs.create(tlogFile, (short)tlogDfsReplication.intValue()); + + tlogOutStream = fs.create(tlogFile, (short) tlogDfsReplication.intValue()); tlogOutStream.hsync(); } fos = new FastOutputStream(tlogOutStream, new byte[65536], 0); - long start = tlogOutStream.getPos(); + long start = tlogOutStream.getPos(); if (openExisting) { if (start > 0) { readHeader(null); - - // we should already be at the end - // raf.seek(start); - // assert channel.position() == start; - fos.setWritten(start); // reflect that we aren't starting at the beginning - //assert fos.size() == channel.size(); + // we should already be at the end + // raf.seek(start); + + // assert channel.position() == start; + fos.setWritten(start); // reflect that we aren't starting at the beginning + // assert fos.size() == channel.size(); } else { addGlobalStrings(globalStrings); } } else { if (start > 0) { - log.error("New transaction log already exists:{} size={}", tlogFile, tlogOutStream.size()); + log.error( + "New transaction log already exists:{} size={}", tlogFile, tlogOutStream.size()); } addGlobalStrings(globalStrings); @@ -130,7 +137,7 @@ public boolean isCallerClosed() { assert ObjectReleaseTracker.track(this); log.debug("Opening new tlog {}", this); - + } catch (IOException e) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e); } finally { @@ -148,16 +155,16 @@ public boolean isCallerClosed() { public boolean endsWithCommit() throws IOException { ensureFlushed(); long size = getLogSize(); - + // the end of the file should have the end message (added during a commit) plus a 4 byte size - byte[] buf = new byte[ END_MESSAGE.length() ]; + byte[] buf = new byte[END_MESSAGE.length()]; long pos = size - END_MESSAGE.length() - 4; if (pos < 0) return false; - + FSDataFastInputStream dis = new FSDataFastInputStream(fs.open(tlogFile), pos); try { dis.read(buf); - for (int i=0; i header = null; + Map header = null; try { try (LogCodec codec = new LogCodec(resolver)) { - header = (Map) codec.unmarshal(fis); + header = (Map) codec.unmarshal(fis); } - + fis.readInt(); // skip size } finally { if (fis != null && closeFis) { @@ -187,10 +194,10 @@ private void readHeader(FastInputStream fis) throws IOException { // needed to read other records synchronized (this) { - globalStringList = (List)header.get("strings"); + globalStringList = (List) header.get("strings"); globalStringMap = new HashMap<>(globalStringList.size()); - for (int i=0; i= sz) { log.info("Read available inputstream data, opening new inputstream pos={} sz={}", pos, sz); - + fis.close(); initStream(pos); } - + if (pos == 0) { readHeader(fis); - // shouldn't currently happen - header and first record are currently written at the same time + // shouldn't currently happen - header and first record are currently written at the same + // time synchronized (HdfsTransactionLog.this) { if (fis.position() >= getLogSize()) { return null; @@ -434,7 +444,7 @@ public Object next() throws IOException, InterruptedException { } } - Object o = codec.readVal(fis); + Object o = JavaBinCodec.readVal(codec); // skip over record size int size = fis.readInt(); @@ -455,23 +465,29 @@ public void close() { @Override public String toString() { synchronized (HdfsTransactionLog.this) { - return "LogReader{" + "file=" + tlogFile + ", position=" + fis.position() + ", end=" + getLogSize() + "}"; + return "LogReader{" + + "file=" + + tlogFile + + ", position=" + + fis.position() + + ", end=" + + getLogSize() + + "}"; } } - + @Override public long currentPos() { return fis.position(); } - + @Override public long currentSize() { return getLogSize(); } - } - public class HDFSSortedLogReader extends HDFSLogReader{ + public class HDFSSortedLogReader extends HDFSLogReader { private long startingPos; private boolean inOrder = true; private TreeMap versionToPos; @@ -490,7 +506,7 @@ public Object next() throws IOException, InterruptedException { long pos = startingPos; long lastVersion = Long.MIN_VALUE; - while ( (o = super.next()) != null) { + while ((o = super.next()) != null) { List entry = (List) o; long version = (Long) entry.get(UpdateLog.VERSION_IDX); version = Math.abs(version); @@ -517,17 +533,19 @@ public Object next() throws IOException, InterruptedException { public class HDFSReverseReader extends ReverseReader { FSDataFastInputStream fis; - private LogCodec codec = new LogCodec(resolver) { - @Override - public SolrInputDocument readSolrInputDocument(DataInputInputStream dis) { - // Given that the SolrInputDocument is last in an add record, it's OK to just skip - // reading it completely. - return null; - } - }; + private LogCodec codec = + new LogCodec(resolver) { + @Override + public SolrInputDocument readSolrInputDocument(JavaBinCodec javaBinCodec) { + // Given that the SolrInputDocument is last in an add record, it's OK to just skip + // reading it completely. + return null; + } + }; - int nextLength; // length of the next record (the next one closer to the start of the log file) - long prevPos; // where we started reading from last time (so prevPos - nextLength == start of next record) + int nextLength; // length of the next record (the next one closer to the start of the log file) + long prevPos; // where we started reading from last time (so prevPos - nextLength == start of + // next record) public HDFSReverseReader() throws IOException { incref(); @@ -539,8 +557,8 @@ public HDFSReverseReader() throws IOException { } fis = new FSDataFastInputStream(fs.open(tlogFile), 0); - - if (sz >=4) { + codec.initRead(fis); + if (sz >= 4) { // readHeader(fis); // should not be needed prevPos = sz - 4; fis.seek(prevPos); @@ -548,8 +566,8 @@ public HDFSReverseReader() throws IOException { } } - - /** Returns the next object from the log, or null if none available. + /** + * Returns the next object from the log, or null if none available. * * @return The log record, or null if EOF * @throws IOException If there is a low-level I/O error. @@ -561,10 +579,10 @@ public Object next() throws IOException { int thisLength = nextLength; - long recordStart = prevPos - thisLength; // back up to the beginning of the next record - prevPos = recordStart - 4; // back up 4 more to read the length of the next record + long recordStart = prevPos - thisLength; // back up to the beginning of the next record + prevPos = recordStart - 4; // back up 4 more to read the length of the next record - if (prevPos <= 0) return null; // this record is the header + if (prevPos <= 0) return null; // this record is the header long bufferPos = fis.getBufferPos(); if (prevPos >= bufferPos) { @@ -572,26 +590,31 @@ public Object next() throws IOException { } else { // Position buffer so that this record is at the end. // For small records, this will cause subsequent calls to next() to be within the buffer. - long seekPos = endOfThisRecord - fis.getBufferSize(); - seekPos = Math.min(seekPos, prevPos); // seek to the start of the record if it's larger then the block size. + long seekPos = endOfThisRecord - fis.getBufferSize(); + seekPos = + Math.min( + seekPos, + prevPos); // seek to the start of the record if it's larger then the block size. seekPos = Math.max(seekPos, 0); fis.seek(seekPos); - fis.peek(); // cause buffer to be filled + fis.peek(); // cause buffer to be filled } fis.seek(prevPos); - nextLength = fis.readInt(); // this is the length of the *next* record (i.e. closer to the beginning) + nextLength = + fis.readInt(); // this is the length of the *next* record (i.e. closer to the beginning) // TODO: optionally skip document data - Object o = codec.readVal(fis); + Object o = JavaBinCodec.readVal(codec); - // assert fis.position() == prevPos + 4 + thisLength; // this is only true if we read all the data (and we currently skip reading SolrInputDocument + // assert fis.position() == prevPos + 4 + thisLength; // this is only true if we read all the + // data (and we currently skip reading SolrInputDocument return o; } /* returns the position in the log file of the last record returned by next() */ public long position() { - return prevPos + 4; // skip the length + return prevPos + 4; // skip the length } public void close() { @@ -606,17 +629,19 @@ public void close() { @Override public String toString() { synchronized (HdfsTransactionLog.this) { - return "LogReader{" + "file=" + tlogFile + ", position=" + fis.position() + ", end=" + getLogSize() + "}"; + return "LogReader{" + + "file=" + + tlogFile + + ", position=" + + fis.position() + + ", end=" + + getLogSize() + + "}"; } } - - } - } - - class FSDataFastInputStream extends FastInputStream { private FSDataInputStream fis; @@ -635,10 +660,12 @@ public int readWrappedStream(byte[] target, int offset, int len) throws IOExcept public void seek(long position) throws IOException { if (position <= readFromStream && position >= getBufferPos()) { // seek within buffer - pos = (int)(position - getBufferPos()); + pos = (int) (position - getBufferPos()); } else { - // long currSize = ch.size(); // not needed - underlying read should handle (unless read never done) - // if (position > currSize) throw new EOFException("Read past EOF: seeking to " + position + " on file of size " + currSize + " file=" + ch); + // long currSize = ch.size(); // not needed - underlying read should handle (unless read + // never done) + // if (position > currSize) throw new EOFException("Read past EOF: seeking to " + position + " + // on file of size " + currSize + " file=" + ch); readFromStream = position; end = pos = 0; } @@ -658,9 +685,18 @@ public int getBufferSize() { public void close() throws IOException { fis.close(); } - + @Override public String toString() { - return "readFromStream="+readFromStream +" pos="+pos +" end="+end + " bufferPos="+getBufferPos() + " position="+position() ; + return "readFromStream=" + + readFromStream + + " pos=" + + pos + + " end=" + + end + + " bufferPos=" + + getBufferPos() + + " position=" + + position(); } } diff --git a/solr/core/src/java/org/apache/solr/update/TransactionLog.java b/solr/core/src/java/org/apache/solr/update/TransactionLog.java index 651a0fb0482..b0b49186e43 100644 --- a/solr/core/src/java/org/apache/solr/update/TransactionLog.java +++ b/solr/core/src/java/org/apache/solr/update/TransactionLog.java @@ -35,71 +35,74 @@ import java.util.Map; import java.util.TreeMap; import java.util.concurrent.atomic.AtomicInteger; - import org.apache.lucene.util.BytesRef; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrInputDocument; -import org.apache.solr.common.util.DataInputInputStream; import org.apache.solr.common.util.FastInputStream; import org.apache.solr.common.util.FastOutputStream; import org.apache.solr.common.util.JavaBinCodec; import org.apache.solr.common.util.ObjectReleaseTracker; +import org.eclipse.jetty.io.RuntimeIOException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * Log Format: List{Operation, Version, ...} - * ADD, VERSION, DOC - * DELETE, VERSION, ID_BYTES - * DELETE_BY_QUERY, VERSION, String - * - * TODO: keep two files, one for [operation, version, id] and the other for the actual - * document data. That way we could throw away document log files more readily - * while retaining the smaller operation log files longer (and we can retrieve - * the stored fields from the latest documents from the index). + * Log Format: List{Operation, Version, ...} ADD, VERSION, DOC DELETE, VERSION, ID_BYTES + * DELETE_BY_QUERY, VERSION, String * - * This would require keeping all source fields stored of course. + *

TODO: keep two files, one for [operation, version, id] and the other for the actual document + * data. That way we could throw away document log files more readily while retaining the smaller + * operation log files longer (and we can retrieve the stored fields from the latest documents from + * the index). * - * This would also allow to not log document data for requests with commit=true - * in them (since we know that if the request succeeds, all docs will be committed) + *

This would require keeping all source fields stored of course. * + *

This would also allow to not log document data for requests with commit=true in them (since we + * know that if the request succeeds, all docs will be committed) */ public class TransactionLog implements Closeable { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private boolean debug = log.isDebugEnabled(); private boolean trace = log.isTraceEnabled(); - public final static String END_MESSAGE = "SOLR_TLOG_END"; + public static final String END_MESSAGE = "SOLR_TLOG_END"; long id; File tlogFile; RandomAccessFile raf; FileChannel channel; OutputStream os; - FastOutputStream fos; // all accesses to this stream should be synchronized on "this" (The TransactionLog) + FastOutputStream + fos; // all accesses to this stream should be synchronized on "this" (The TransactionLog) int numRecords; boolean isBuffer; - protected volatile boolean deleteOnClose = true; // we can delete old tlogs since they are currently only used for real-time-get (and in the future, recovery) + protected volatile boolean deleteOnClose = + true; // we can delete old tlogs since they are currently only used for real-time-get (and in + // the future, recovery) AtomicInteger refcount = new AtomicInteger(1); Map globalStringMap = new HashMap<>(); List globalStringList = new ArrayList<>(); // write a BytesRef as a byte array - static final JavaBinCodec.ObjectResolver resolver = new JavaBinCodec.ObjectResolver() { - @Override - public Object resolve(Object o, JavaBinCodec codec) throws IOException { - if (o instanceof BytesRef) { - BytesRef br = (BytesRef) o; - codec.writeByteArray(br.bytes, br.offset, br.length); - return null; - } - // Fallback: we have no idea how to serialize this. Be noisy to prevent insidious bugs - throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, - "TransactionLog doesn't know how to serialize " + o.getClass() + "; try implementing ObjectResolver?"); - } - }; + static final JavaBinCodec.ObjectResolver resolver = + new JavaBinCodec.ObjectResolver() { + @Override + public Object resolve(Object o, JavaBinCodec codec) throws IOException { + if (o instanceof BytesRef) { + BytesRef br = (BytesRef) o; + JavaBinCodec.writeByteArray(codec, br.bytes, br.offset, br.length); + return null; + } + // Fallback: we have no idea how to serialize this. Be noisy to prevent insidious bugs + throw new SolrException( + SolrException.ErrorCode.SERVER_ERROR, + "TransactionLog doesn't know how to serialize " + + o.getClass() + + "; try implementing ObjectResolver?"); + } + }; public class LogCodec extends JavaBinCodec { @@ -108,50 +111,52 @@ public LogCodec(JavaBinCodec.ObjectResolver resolver) { } @Override - public void writeExternString(CharSequence s) throws IOException { - if (s == null) { - writeTag(NULL); + public void writeExternString(CharSequence str) throws IOException { + if (str == null) { + writeTag(this, NULL); return; } - // no need to synchronize globalStringMap - it's only updated before the first record is written to the log - Integer idx = globalStringMap.get(s.toString()); + // no need to synchronize globalStringMap - it's only updated before the first record is + // written to the log + Integer idx = globalStringMap.get(str.toString()); if (idx == null) { // write a normal string - writeStr(s); + writeStr(this, str); } else { // write the extern string - writeTag(EXTERN_STRING, idx); + writeTag(this, EXTERN_STRING, idx); } } @Override - public CharSequence readExternString(DataInputInputStream fis) throws IOException { - int idx = readSize(fis); - if (idx != 0) {// idx != 0 is the index of the extern string - // no need to synchronize globalStringList - it's only updated before the first record is written to the log + public CharSequence readExternString(JavaBinCodec javaBinCodec) throws IOException { + int idx = readSize(javaBinCodec); + if (idx != 0) { // idx != 0 is the index of the extern string + // no need to synchronize globalStringList - it's only updated before the first record is + // written to the log return globalStringList.get(idx - 1); - } else {// idx == 0 means it has a string value + } else { // idx == 0 means it has a string value // this shouldn't happen with this codec subclass. throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Corrupt transaction log"); } } @Override - protected Object readObject(DataInputInputStream dis) throws IOException { + protected Object readObject() throws IOException { if (UUID == tagByte) { - return new java.util.UUID(dis.readLong(), dis.readLong()); + return new java.util.UUID(readLong(this), readLong(this)); } - return super.readObject(dis); + return super.readObject(); } @Override public boolean writePrimitive(Object val) throws IOException { if (val instanceof java.util.UUID) { java.util.UUID uuid = (java.util.UUID) val; - daos.writeByte(UUID); - daos.writeLong(uuid.getMostSignificantBits()); - daos.writeLong(uuid.getLeastSignificantBits()); + writeByteToOS(this, UUID); + writeLongToOS(this, uuid.getMostSignificantBits()); + writeLongToOS(this, uuid.getLeastSignificantBits()); return true; } return super.writePrimitive(val); @@ -166,8 +171,12 @@ public boolean writePrimitive(Object val) throws IOException { boolean success = false; try { if (debug) { - log.debug("New TransactionLog file= {}, exists={}, size={} openExisting={}" - , tlogFile, tlogFile.exists(), tlogFile.length(), openExisting); + log.debug( + "New TransactionLog file= {}, exists={}, size={} openExisting={}", + tlogFile, + tlogFile.exists(), + tlogFile.length(), + openExisting); } // Parse tlog id from the filename @@ -187,7 +196,7 @@ public boolean writePrimitive(Object val) throws IOException { readHeader(null); raf.seek(start); assert channel.position() == start; - fos.setWritten(start); // reflect that we aren't starting at the beginning + fos.setWritten(start); // reflect that we aren't starting at the beginning assert fos.size() == channel.size(); } else { addGlobalStrings(globalStrings); @@ -222,11 +231,11 @@ public boolean writePrimitive(Object val) throws IOException { } // for subclasses - protected TransactionLog() { - } + protected TransactionLog() {} - /** Returns the number of records in the log (currently includes the header and an optional commit). - * Note: currently returns 0 for reopened existing log files. + /** + * Returns the number of records in the log (currently includes the header and an optional + * commit). Note: currently returns 0 for reopened existing log files. */ public int numRecords() { synchronized (this) { @@ -245,7 +254,8 @@ public boolean endsWithCommit() throws IOException { byte[] buf = new byte[END_MESSAGE.length()]; long pos = size - END_MESSAGE.length() - 4; if (pos < 0) return false; - @SuppressWarnings("resource") final ChannelFastInputStream is = new ChannelFastInputStream(channel, pos); + @SuppressWarnings("resource") + final ChannelFastInputStream is = new ChannelFastInputStream(channel, pos); is.read(buf); for (int i = 0; i < buf.length; i++) { if (buf[i] != END_MESSAGE.charAt(i)) return false; @@ -254,24 +264,26 @@ public boolean endsWithCommit() throws IOException { } public long writeData(Object o) { - @SuppressWarnings("resource") final LogCodec codec = new LogCodec(resolver); + @SuppressWarnings("resource") + final LogCodec codec = new LogCodec(resolver); try { - long pos = fos.size(); // if we had flushed, this should be equal to channel.position() + long pos = fos.size(); // if we had flushed, this should be equal to channel.position() codec.init(fos); - codec.writeVal(o); + JavaBinCodec.writeVal(codec, o); return pos; } catch (IOException e) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e); } } - @SuppressWarnings({"unchecked"}) private void readHeader(FastInputStream fis) throws IOException { // read existing header fis = fis != null ? fis : new ChannelFastInputStream(channel, 0); - @SuppressWarnings("resource") final LogCodec codec = new LogCodec(resolver); - Map header = (Map) codec.unmarshal(fis); + @SuppressWarnings("resource") + final LogCodec codec = new LogCodec(resolver); + codec.initRead(fis); + Map header = (Map) JavaBinCodec.readVal(codec); fis.readInt(); // skip size @@ -294,7 +306,7 @@ protected void addGlobalStrings(Collection strings) { if (origSize > 0) { idx = globalStringMap.get(s); } - if (idx != null) continue; // already in list + if (idx != null) continue; // already in list globalStringList.add(s); globalStringMap.put(s, globalStringList.size()); } @@ -314,7 +326,7 @@ protected void writeLogHeader(LogCodec codec) throws IOException { Map header = new LinkedHashMap<>(); header.put("SOLR_TLOG", 1); // a magic string + version number header.put("strings", globalStringList); - codec.marshal(header, fos); + codec.marshal(header, fos, true); endRecord(pos); } @@ -332,7 +344,7 @@ protected void checkWriteHeader(LogCodec codec, SolrInputDocument optional) thro if (fos.size() != 0) return; synchronized (this) { - if (fos.size() != 0) return; // check again while synchronized + if (fos.size() != 0) return; // check again while synchronized if (optional != null) { addGlobalStrings(optional.getFieldNames()); } @@ -343,13 +355,12 @@ protected void checkWriteHeader(LogCodec codec, SolrInputDocument optional) thro int lastAddSize; /** - * Writes an add update command to the transaction log. This is not applicable for - * in-place updates; use {@link #write(AddUpdateCommand, long)}. - * (The previous pointer (applicable for in-place updates) is set to -1 while writing - * the command to the transaction log.) + * Writes an add update command to the transaction log. This is not applicable for in-place + * updates; use {@link #write(AddUpdateCommand, long)}. (The previous pointer (applicable for + * in-place updates) is set to -1 while writing the command to the transaction log.) + * * @param cmd The add update command to be written * @return Returns the position pointer of the written update command - * * @see #write(AddUpdateCommand, long) */ public long write(AddUpdateCommand cmd) { @@ -357,11 +368,12 @@ public long write(AddUpdateCommand cmd) { } /** - * Writes an add update command to the transaction log. This should be called only for - * writing in-place updates, or else pass -1 as the prevPointer. - * @param cmd The add update command to be written - * @param prevPointer The pointer in the transaction log which this update depends - * on (applicable for in-place updates) + * Writes an add update command to the transaction log. This should be called only for writing + * in-place updates, or else pass -1 as the prevPointer. + * + * @param cmd The add update command to be written + * @param prevPointer The pointer in the transaction log which this update depends on (applicable + * for in-place updates) * @return Returns the position pointer of the written update command */ public long write(AddUpdateCommand cmd, long prevPointer) { @@ -374,36 +386,36 @@ public long write(AddUpdateCommand cmd, long prevPointer) { checkWriteHeader(codec, sdoc); // adaptive buffer sizing - int bufSize = lastAddSize; // unsynchronized access of lastAddSize should be fine + int bufSize = lastAddSize; // unsynchronized access of lastAddSize should be fine // at least 256 bytes and at most 1 MB bufSize = Math.min(1024 * 1024, Math.max(256, bufSize + (bufSize >> 3) + 256)); MemOutputStream out = new MemOutputStream(new byte[bufSize]); codec.init(out); if (cmd.isInPlaceUpdate()) { - codec.writeTag(JavaBinCodec.ARR, 5); - codec.writeInt(UpdateLog.UPDATE_INPLACE); // should just take one byte - codec.writeLong(cmd.getVersion()); - codec.writeLong(prevPointer); - codec.writeLong(cmd.prevVersion); + JavaBinCodec.writeTag(codec, JavaBinCodec.ARR, 5); + JavaBinCodec.writeInt(codec, UpdateLog.UPDATE_INPLACE); // should just take one byte + JavaBinCodec.writeLong(codec, cmd.getVersion()); + JavaBinCodec.writeLong(codec, prevPointer); + JavaBinCodec.writeLong(codec, cmd.prevVersion); codec.writeSolrInputDocument(cmd.getSolrInputDocument()); } else { - codec.writeTag(JavaBinCodec.ARR, 3); - codec.writeInt(UpdateLog.ADD); // should just take one byte - codec.writeLong(cmd.getVersion()); + JavaBinCodec.writeTag(codec, JavaBinCodec.ARR, 3); + JavaBinCodec.writeInt(codec, UpdateLog.ADD); // should just take one byte + JavaBinCodec.writeLong(codec, cmd.getVersion()); codec.writeSolrInputDocument(cmd.getSolrInputDocument()); } lastAddSize = (int) out.size(); synchronized (this) { - long pos = fos.size(); // if we had flushed, this should be equal to channel.position() + long pos = fos.size(); // if we had flushed, this should be equal to channel.position() assert pos != 0; /*** - System.out.println("###writing at " + pos + " fos.size()=" + fos.size() + " raf.length()=" + raf.length()); - if (pos != fos.size()) { - throw new RuntimeException("ERROR" + "###writing at " + pos + " fos.size()=" + fos.size() + " raf.length()=" + raf.length()); - } + * System.out.println("###writing at " + pos + " fos.size()=" + fos.size() + " raf.length()=" + raf.length()); + * if (pos != fos.size()) { + * throw new RuntimeException("ERROR" + "###writing at " + pos + " fos.size()=" + fos.size() + " raf.length()=" + raf.length()); + * } ***/ out.writeAll(fos); @@ -428,13 +440,13 @@ public long writeDelete(DeleteUpdateCommand cmd) { MemOutputStream out = new MemOutputStream(new byte[20 + br.length]); codec.init(out); - codec.writeTag(JavaBinCodec.ARR, 3); - codec.writeInt(UpdateLog.DELETE); // should just take one byte - codec.writeLong(cmd.getVersion()); - codec.writeByteArray(br.bytes, br.offset, br.length); + JavaBinCodec.writeTag(codec, JavaBinCodec.ARR, 3); + JavaBinCodec.writeInt(codec, UpdateLog.DELETE); // should just take one byte + JavaBinCodec.writeLong(codec, cmd.getVersion()); + JavaBinCodec.writeByteArray(codec, br.bytes, br.offset, br.length); synchronized (this) { - long pos = fos.size(); // if we had flushed, this should be equal to channel.position() + long pos = fos.size(); // if we had flushed, this should be equal to channel.position() assert pos != 0; out.writeAll(fos); endRecord(pos); @@ -445,7 +457,6 @@ public long writeDelete(DeleteUpdateCommand cmd) { } catch (IOException e) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e); } - } public long writeDeleteByQuery(DeleteUpdateCommand cmd) { @@ -455,13 +466,13 @@ public long writeDeleteByQuery(DeleteUpdateCommand cmd) { MemOutputStream out = new MemOutputStream(new byte[20 + (cmd.query.length())]); codec.init(out); - codec.writeTag(JavaBinCodec.ARR, 3); - codec.writeInt(UpdateLog.DELETE_BY_QUERY); // should just take one byte - codec.writeLong(cmd.getVersion()); - codec.writeStr(cmd.query); + JavaBinCodec.writeTag(codec, JavaBinCodec.ARR, 3); + JavaBinCodec.writeInt(codec, UpdateLog.DELETE_BY_QUERY); // should just take one byte + JavaBinCodec.writeLong(codec, cmd.getVersion()); + JavaBinCodec.writeStr(codec, cmd.query); synchronized (this) { - long pos = fos.size(); // if we had flushed, this should be equal to channel.position() + long pos = fos.size(); // if we had flushed, this should be equal to channel.position() out.writeAll(fos); endRecord(pos); // fos.flushBuffer(); // flush later @@ -470,29 +481,28 @@ public long writeDeleteByQuery(DeleteUpdateCommand cmd) { } catch (IOException e) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e); } - } - public long writeCommit(CommitUpdateCommand cmd) { LogCodec codec = new LogCodec(resolver); synchronized (this) { try { - long pos = fos.size(); // if we had flushed, this should be equal to channel.position() + long pos = fos.size(); // if we had flushed, this should be equal to channel.position() if (pos == 0) { writeLogHeader(codec); pos = fos.size(); } codec.init(fos); - codec.writeTag(JavaBinCodec.ARR, 3); - codec.writeInt(UpdateLog.COMMIT); // should just take one byte - codec.writeLong(cmd.getVersion()); - codec.writeStr(END_MESSAGE); // ensure these bytes are (almost) last in the file + JavaBinCodec.writeTag(codec, JavaBinCodec.ARR, 3); + JavaBinCodec.writeInt(codec, UpdateLog.COMMIT); // should just take one byte + JavaBinCodec.writeLong(codec, cmd.getVersion()); + JavaBinCodec.writeStr( + codec, END_MESSAGE); // ensure these bytes are (almost) last in the file endRecord(pos); - fos.flush(); // flush since this will be the last record in a log fill + fos.flush(); // flush since this will be the last record in a log fill assert fos.size() == channel.size(); return pos; @@ -502,7 +512,6 @@ public long writeCommit(CommitUpdateCommand cmd) { } } - /* This method is thread safe */ public Object lookup(long pos) { @@ -516,16 +525,17 @@ public Object lookup(long pos) { // TODO: optimize this by keeping track of what we have flushed up to fos.flushBuffer(); /*** - System.out.println("###flushBuffer to " + fos.size() + " raf.length()=" + raf.length() + " pos="+pos); - if (fos.size() != raf.length() || pos >= fos.size() ) { - throw new RuntimeException("ERROR" + "###flushBuffer to " + fos.size() + " raf.length()=" + raf.length() + " pos="+pos); - } + * System.out.println("###flushBuffer to " + fos.size() + " raf.length()=" + raf.length() + " pos="+pos); + * if (fos.size() != raf.length() || pos >= fos.size() ) { + * throw new RuntimeException("ERROR" + "###flushBuffer to " + fos.size() + " raf.length()=" + raf.length() + " pos="+pos); + * } ***/ } ChannelFastInputStream fis = new ChannelFastInputStream(channel, pos); try (LogCodec codec = new LogCodec(resolver)) { - return codec.readVal(fis); + codec.init(fis); + return JavaBinCodec.readVal(codec); } } catch (IOException e) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e); @@ -535,7 +545,8 @@ public Object lookup(long pos) { public void incref() { int result = refcount.incrementAndGet(); if (result <= 1) { - throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "incref on a closed log: " + this); + throw new SolrException( + SolrException.ErrorCode.SERVER_ERROR, "incref on a closed log: " + this); } } @@ -556,10 +567,11 @@ public long position() { } } - /** Move to a read-only state, closing and releasing resources while keeping the log available for reads */ - public void closeOutput() { - - } + /** + * Move to a read-only state, closing and releasing resources while keeping the log available for + * reads + */ + public void closeOutput() {} public void finish(UpdateLog.SyncLevel syncLevel) { if (syncLevel == UpdateLog.SyncLevel.NONE) return; @@ -626,16 +638,14 @@ public long getLogSize() { return 0; } - /** - * @return the FastOutputStream size - */ + /** @return the FastOutputStream size */ public synchronized long getLogSizeFromStream() { return fos.size(); } - /** Returns a reader that can be used while a log is still in use. - * Currently only *one* LogReader may be outstanding, and that log may only - * be used from a single thread. + /** + * Returns a reader that can be used while a log is still in use. Currently only *one* LogReader + * may be outstanding, and that log may only be used from a single thread. */ public LogReader getReader(long startingPos) { return new LogReader(startingPos); @@ -657,12 +667,18 @@ public class LogReader { public LogReader(long startingPos) { incref(); fis = new ChannelFastInputStream(channel, startingPos); + try { + codec.init(fis); + } catch (IOException e) { + throw new RuntimeIOException(e); + } } // for classes that extend protected LogReader() {} - /** Returns the next object from the log, or null if none available. + /** + * Returns the next object from the log, or null if none available. * * @return The log record, or null if EOF * @throws IOException If there is a low-level I/O error. @@ -685,7 +701,8 @@ public Object next() throws IOException, InterruptedException { if (pos == 0) { readHeader(fis); - // shouldn't currently happen - header and first record are currently written at the same time + // shouldn't currently happen - header and first record are currently written at the same + // time synchronized (TransactionLog.this) { if (fis.position() >= fos.size()) { return null; @@ -694,7 +711,7 @@ public Object next() throws IOException, InterruptedException { } } - Object o = codec.readVal(fis); + Object o = JavaBinCodec.readVal(codec); // skip over record size int size = fis.readInt(); @@ -710,7 +727,14 @@ public void close() { @Override public String toString() { synchronized (TransactionLog.this) { - return "LogReader{" + "file=" + tlogFile + ", position=" + fis.position() + ", end=" + fos.size() + "}"; + return "LogReader{" + + "file=" + + tlogFile + + ", position=" + + fis.position() + + ", end=" + + fos.size() + + "}"; } } @@ -725,7 +749,6 @@ public long currentPos() { public long currentSize() throws IOException { return channel.size(); } - } public class SortedLogReader extends LogReader { @@ -774,7 +797,8 @@ public Object next() throws IOException, InterruptedException { public abstract class ReverseReader { - /** Returns the next object from the log, or null if none available. + /** + * Returns the next object from the log, or null if none available. * * @return The log record, or null if EOF * @throws IOException If there is a low-level I/O error. @@ -788,22 +812,23 @@ public abstract class ReverseReader { @Override public abstract String toString(); - } public class FSReverseReader extends ReverseReader { ChannelFastInputStream fis; - private LogCodec codec = new LogCodec(resolver) { - @Override - public SolrInputDocument readSolrInputDocument(DataInputInputStream dis) { - // Given that the SolrInputDocument is last in an add record, it's OK to just skip - // reading it completely. - return null; - } - }; + private LogCodec codec = + new LogCodec(resolver) { + @Override + public SolrInputDocument readSolrInputDocument(JavaBinCodec javaBinCodec) { + // Given that the SolrInputDocument is last in an add record, it's OK to just skip + // reading it completely. + return null; + } + }; - int nextLength; // length of the next record (the next one closer to the start of the log file) - long prevPos; // where we started reading from last time (so prevPos - nextLength == start of next record) + int nextLength; // length of the next record (the next one closer to the start of the log file) + long prevPos; // where we started reading from last time (so prevPos - nextLength == start of + // next record) public FSReverseReader() throws IOException { incref(); @@ -816,6 +841,7 @@ public FSReverseReader() throws IOException { } fis = new ChannelFastInputStream(channel, 0); + codec.init(fis); if (sz >= 4) { // readHeader(fis); // should not be needed prevPos = sz - 4; @@ -824,7 +850,8 @@ public FSReverseReader() throws IOException { } } - /** Returns the next object from the log, or null if none available. + /** + * Returns the next object from the log, or null if none available. * * @return The log record, or null if EOF * @throws IOException If there is a low-level I/O error. @@ -836,10 +863,10 @@ public Object next() throws IOException { int thisLength = nextLength; - long recordStart = prevPos - thisLength; // back up to the beginning of the next record - prevPos = recordStart - 4; // back up 4 more to read the length of the next record + long recordStart = prevPos - thisLength; // back up to the beginning of the next record + prevPos = recordStart - 4; // back up 4 more to read the length of the next record - if (prevPos <= 0) return null; // this record is the header + if (prevPos <= 0) return null; // this record is the header long bufferPos = fis.getBufferPos(); if (prevPos >= bufferPos) { @@ -848,26 +875,31 @@ public Object next() throws IOException { // Position buffer so that this record is at the end. // For small records, this will cause subsequent calls to next() to be within the buffer. long seekPos = endOfThisRecord - fis.getBufferSize(); - seekPos = Math.min(seekPos, prevPos); // seek to the start of the record if it's larger then the block size. + seekPos = + Math.min( + seekPos, + prevPos); // seek to the start of the record if it's larger then the block size. seekPos = Math.max(seekPos, 0); fis.seek(seekPos); - fis.peek(); // cause buffer to be filled + fis.peek(); // cause buffer to be filled } fis.seek(prevPos); - nextLength = fis.readInt(); // this is the length of the *next* record (i.e. closer to the beginning) + nextLength = + fis.readInt(); // this is the length of the *next* record (i.e. closer to the beginning) // TODO: optionally skip document data - Object o = codec.readVal(fis); + Object o = JavaBinCodec.readVal(codec); - // assert fis.position() == prevPos + 4 + thisLength; // this is only true if we read all the data (and we currently skip reading SolrInputDocument + // assert fis.position() == prevPos + 4 + thisLength; // this is only true if we read all the + // data (and we currently skip reading SolrInputDocument return o; } /* returns the position in the log file of the last record returned by next() */ public long position() { - return prevPos + 4; // skip the length + return prevPos + 4; // skip the length } public void close() { @@ -877,11 +909,16 @@ public void close() { @Override public String toString() { synchronized (TransactionLog.this) { - return "LogReader{" + "file=" + tlogFile + ", position=" + fis.position() + ", end=" + fos.size() + "}"; + return "LogReader{" + + "file=" + + tlogFile + + ", position=" + + fis.position() + + ", end=" + + fos.size() + + "}"; } } - - } static class ChannelFastInputStream extends FastInputStream { @@ -906,15 +943,17 @@ public void seek(long position) throws IOException { // seek within buffer pos = (int) (position - getBufferPos()); } else { - // long currSize = ch.size(); // not needed - underlying read should handle (unless read never done) - // if (position > currSize) throw new EOFException("Read past EOF: seeking to " + position + " on file of size " + currSize + " file=" + ch); + // long currSize = ch.size(); // not needed - underlying read should handle (unless read + // never done) + // if (position > currSize) throw new EOFException("Read past EOF: seeking to " + position + + // " on file of size " + currSize + " file=" + ch); readFromStream = position; end = pos = 0; } assert position() == position; } - /** where is the start of the buffer relative to the whole file */ + /** where is the start of the buffer relative to the whole file */ public long getBufferPos() { return readFromStream - end; } @@ -930,9 +969,16 @@ public void close() throws IOException { @Override public String toString() { - return "readFromStream=" + readFromStream + " pos=" + pos + " end=" + end + " bufferPos=" + getBufferPos() + " position=" + position(); + return "readFromStream=" + + readFromStream + + " pos=" + + pos + + " end=" + + end + + " bufferPos=" + + getBufferPos() + + " position=" + + position(); } } } - - diff --git a/solr/core/src/java/org/apache/solr/util/ExportTool.java b/solr/core/src/java/org/apache/solr/util/ExportTool.java index 9767a92f093..dc8b4a61267 100644 --- a/solr/core/src/java/org/apache/solr/util/ExportTool.java +++ b/solr/core/src/java/org/apache/solr/util/ExportTool.java @@ -17,6 +17,13 @@ package org.apache.solr.util; +import static org.apache.solr.common.params.CommonParams.FL; +import static org.apache.solr.common.params.CommonParams.JAVABIN; +import static org.apache.solr.common.params.CommonParams.Q; +import static org.apache.solr.common.params.CommonParams.SORT; +import static org.apache.solr.common.util.JavaBinCodec.SOLRINPUTDOC; + +import com.google.common.collect.ImmutableSet; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileOutputStream; @@ -46,8 +53,6 @@ import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.zip.GZIPOutputStream; - -import com.google.common.collect.ImmutableSet; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.Option; import org.apache.lucene.util.SuppressForbidden; @@ -77,12 +82,6 @@ import org.noggit.CharArr; import org.noggit.JSONWriter; -import static org.apache.solr.common.params.CommonParams.FL; -import static org.apache.solr.common.params.CommonParams.JAVABIN; -import static org.apache.solr.common.params.CommonParams.Q; -import static org.apache.solr.common.params.CommonParams.SORT; -import static org.apache.solr.common.util.JavaBinCodec.SOLRINPUTDOC; - public class ExportTool extends SolrCLI.ToolBase { @Override public String getName() { @@ -94,7 +93,7 @@ public Option[] getOptions() { return OPTIONS; } - public static abstract class Info { + public abstract static class Info { String baseurl; String format; String query; @@ -109,11 +108,9 @@ public static abstract class Info { CloudSolrClient solrClient; DocsSink sink; - public Info(String url) { setUrl(url); setOutFormat(null, "jsonl"); - } public void setUrl(String url) { @@ -137,11 +134,8 @@ public void setOutFormat(String out, String format) { this.out = out; if (this.out == null) { - this.out = JAVABIN.equals(format) ? - coll + ".javabin" : - coll + ".json"; + this.out = JAVABIN.equals(format) ? coll + ".javabin" : coll + ".json"; } - } DocsSink getSink() { @@ -152,8 +146,12 @@ DocsSink getSink() { void fetchUniqueKey() throws SolrServerException, IOException { solrClient = new CloudSolrClient.Builder(Collections.singletonList(baseurl)).build(); - NamedList response = solrClient.request(new GenericSolrRequest(SolrRequest.METHOD.GET, "/schema/uniquekey", - new MapSolrParams(Collections.singletonMap("collection", coll)))); + NamedList response = + solrClient.request( + new GenericSolrRequest( + SolrRequest.METHOD.GET, + "/schema/uniquekey", + new MapSolrParams(Collections.singletonMap("collection", coll)))); uniqueKey = (String) response.get("uniqueKey"); } @@ -169,12 +167,9 @@ public void streamSolrDocument(SolrDocument doc) { } @Override - public void streamDocListInfo(long numFound, long start, Float maxScore) { - - } + public void streamDocListInfo(long numFound, long start, Float maxScore) {} }; } - } static Set formats = ImmutableSet.of(JAVABIN, "jsonl"); @@ -191,11 +186,11 @@ protected void runImpl(CommandLine cli) throws Exception { info.exportDocs(); } - static abstract class DocsSink { + abstract static class DocsSink { Info info; OutputStream fos; - abstract void start() throws IOException ; + abstract void start() throws IOException; @SuppressForbidden(reason = "Command line tool prints out to console") void accept(SolrDocument document) throws IOException { @@ -204,45 +199,43 @@ void accept(SolrDocument document) throws IOException { if (count % 100000 == 0) { System.out.println("\nDOCS: " + count); } - - } - void end() throws IOException { - } + void end() throws IOException {} } private static final Option[] OPTIONS = { - Option.builder("url") - .hasArg() - .required() - .desc("Address of the collection, example http://localhost:8983/solr/gettingstarted.") - .build(), - Option.builder("out") - .hasArg() - .required(false) - .desc("File name, defaults to 'collection-name.'.") - .build(), - Option.builder("format") - .hasArg() - .required(false) - .desc("Output format for exported docs (json or javabin), defaulting to json. File extension would be .json.") - .build(), - Option.builder("limit") - .hasArg() - .required(false) - .desc("Maximum number of docs to download. Default is 100, use -1 for all docs.") - .build(), - Option.builder("query") - .hasArg() - .required(false) - .desc("A custom query, default is '*:*'.") - .build(), - Option.builder("fields") - .hasArg() - .required(false) - .desc("Comma separated list of fields to export. By default all fields are fetched.") - .build() + Option.builder("url") + .hasArg() + .required() + .desc("Address of the collection, example http://localhost:8983/solr/gettingstarted.") + .build(), + Option.builder("out") + .hasArg() + .required(false) + .desc("File name, defaults to 'collection-name.'.") + .build(), + Option.builder("format") + .hasArg() + .required(false) + .desc( + "Output format for exported docs (json or javabin), defaulting to json. File extension would be .json.") + .build(), + Option.builder("limit") + .hasArg() + .required(false) + .desc("Maximum number of docs to download. Default is 100, use -1 for all docs.") + .build(), + Option.builder("query") + .hasArg() + .required(false) + .desc("A custom query, default is '*:*'.") + .build(), + Option.builder("fields") + .hasArg() + .required(false) + .desc("Comma separated list of fields to export. By default all fields are fetched.") + .build() }; static class JsonSink extends DocsSink { @@ -257,12 +250,12 @@ public JsonSink(Info info) { @Override public void start() throws IOException { fos = new FileOutputStream(info.out); - if(info.out.endsWith(".json.gz") || info.out.endsWith(".json.")) fos = new GZIPOutputStream(fos); + if (info.out.endsWith(".json.gz") || info.out.endsWith(".json.")) + fos = new GZIPOutputStream(fos); if (info.bufferSize > 0) { fos = new BufferedOutputStream(fos, info.bufferSize); } writer = new OutputStreamWriter(fos, StandardCharsets.UTF_8); - } @Override @@ -276,24 +269,25 @@ public void end() throws IOException { public synchronized void accept(SolrDocument doc) throws IOException { charArr.reset(); Map m = new LinkedHashMap<>(doc.size()); - doc.forEach((s, field) -> { - if (s.equals("_version_") || s.equals("_roor_")) return; - if (field instanceof List) { - if (((List) field).size() == 1) { - field = ((List) field).get(0); - } - } - field = constructDateStr(field); - if (field instanceof List) { - List list = (List) field; - if (hasdate(list)) { - ArrayList listCopy = new ArrayList<>(list.size()); - for (Object o : list) listCopy.add(constructDateStr(o)); - field = listCopy; - } - } - m.put(s, field); - }); + doc.forEach( + (s, field) -> { + if (s.equals("_version_") || s.equals("_roor_")) return; + if (field instanceof List) { + if (((List) field).size() == 1) { + field = ((List) field).get(0); + } + } + field = constructDateStr(field); + if (field instanceof List) { + List list = (List) field; + if (hasdate(list)) { + ArrayList listCopy = new ArrayList<>(list.size()); + for (Object o : list) listCopy.add(constructDateStr(o)); + field = listCopy; + } + } + m.put(s, field); + }); jsonWriter.write(m); writer.write(charArr.getArray(), charArr.getStart(), charArr.getEnd()); writer.append('\n'); @@ -303,7 +297,7 @@ public synchronized void accept(SolrDocument doc) throws IOException { private boolean hasdate(List list) { boolean hasDate = false; for (Object o : list) { - if(o instanceof Date){ + if (o instanceof Date) { hasDate = true; break; } @@ -313,7 +307,8 @@ private boolean hasdate(List list) { private Object constructDateStr(Object field) { if (field instanceof Date) { - field = DateTimeFormatter.ISO_INSTANT.format(Instant.ofEpochMilli(((Date) field).getTime())); + field = + DateTimeFormatter.ISO_INSTANT.format(Instant.ofEpochMilli(((Date) field).getTime())); } return field; } @@ -329,46 +324,47 @@ public JavabinSink(Info info) { @Override public void start() throws IOException { fos = new FileOutputStream(info.out); - if(info.out.endsWith(".json.gz") || info.out.endsWith(".json.")) fos = new GZIPOutputStream(fos); + if (info.out.endsWith(".json.gz") || info.out.endsWith(".json.")) + fos = new GZIPOutputStream(fos); if (info.bufferSize > 0) { fos = new BufferedOutputStream(fos, info.bufferSize); } - codec = new JavaBinCodec(fos, null); - codec.writeTag(JavaBinCodec.NAMED_LST, 2); - codec.writeStr("params"); - codec.writeNamedList(new NamedList<>()); - codec.writeStr("docs"); - codec.writeTag(JavaBinCodec.ITERATOR); - + codec = new JavaBinCodec(fos, null, false); + JavaBinCodec.writeTag(codec, JavaBinCodec.NAMED_LST, 2); + JavaBinCodec.writeStr(codec, "params"); + JavaBinCodec.writeNamedList(codec, new NamedList<>()); + JavaBinCodec.writeStr(codec, "docs"); + JavaBinCodec.writeTag(codec, JavaBinCodec.ITERATOR); } @Override public void end() throws IOException { - codec.writeTag(JavaBinCodec.END); + JavaBinCodec.writeTag(codec, JavaBinCodec.END); codec.close(); fos.flush(); fos.close(); - } - private BiConsumer bic= new BiConsumer<>() { - @Override - public void accept(String s, Object o) { - try { - if (s.equals("_version_") || s.equals("_root_")) return; - codec.writeExternString(s); - codec.writeVal(o); - } catch (IOException e) { - throw new RuntimeException(e); - } - } - }; + + private final BiConsumer bic = + new BiConsumer<>() { + @Override + public void accept(String s, Object o) { + try { + if (s.equals("_version_") || s.equals("_root_")) return; + codec.writeExternString(s); + JavaBinCodec.writeVal(codec, o); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + }; @Override public synchronized void accept(SolrDocument doc) throws IOException { int sz = doc.size(); - if(doc.containsKey("_version_")) sz--; - if(doc.containsKey("_root_")) sz--; - codec.writeTag(SOLRINPUTDOC, sz); + if (doc.containsKey("_version_")) sz--; + if (doc.containsKey("_root_")) sz--; + JavaBinCodec.writeTag(codec, SOLRINPUTDOC, sz); codec.writeFloat(1f); // document boost doc.forEach(bic); super.accept(doc); @@ -381,15 +377,14 @@ static class MultiThreadedRunner extends Info { SolrDocument EOFDOC = new SolrDocument(); volatile boolean failed = false; Map corehandlers = new HashMap<>(); - private long startTime ; + private long startTime; @SuppressForbidden(reason = "Need to print out time") public MultiThreadedRunner(String url) { super(url); - startTime= System.currentTimeMillis(); + startTime = System.currentTimeMillis(); } - @Override @SuppressForbidden(reason = "Need to print out time") void exportDocs() throws Exception { @@ -398,10 +393,12 @@ void exportDocs() throws Exception { ClusterStateProvider stateProvider = solrClient.getClusterStateProvider(); DocCollection coll = stateProvider.getCollection(this.coll); Map m = coll.getSlicesMap(); - producerThreadpool = ExecutorUtil.newMDCAwareFixedThreadPool(m.size(), - new SolrNamedThreadFactory("solrcli-exporter-producers")); - consumerThreadpool = ExecutorUtil.newMDCAwareFixedThreadPool(1, - new SolrNamedThreadFactory("solrcli-exporter-consumer")); + producerThreadpool = + ExecutorUtil.newMDCAwareFixedThreadPool( + m.size(), new SolrNamedThreadFactory("solrcli-exporter-producers")); + consumerThreadpool = + ExecutorUtil.newMDCAwareFixedThreadPool( + 1, new SolrNamedThreadFactory("solrcli-exporter-consumer")); sink.start(); CountDownLatch consumerlatch = new CountDownLatch(1); try { @@ -411,15 +408,17 @@ void exportDocs() throws Exception { output.println("NO: of shards : " + corehandlers.size()); } CountDownLatch producerLatch = new CountDownLatch(corehandlers.size()); - corehandlers.forEach((s, coreHandler) -> producerThreadpool.submit(() -> { - try { - coreHandler.exportDocsFromCore(); - } catch (Exception e) { - if(output != null) output.println("Error exporting docs from : "+s); - - } - producerLatch.countDown(); - })); + corehandlers.forEach( + (s, coreHandler) -> + producerThreadpool.submit( + () -> { + try { + coreHandler.exportDocsFromCore(); + } catch (Exception e) { + if (output != null) output.println("Error exporting docs from : " + s); + } + producerLatch.countDown(); + })); producerLatch.await(); queue.offer(EOFDOC, 10, TimeUnit.SECONDS); @@ -433,11 +432,15 @@ void exportDocs() throws Exception { try { Files.delete(new File(out).toPath()); } catch (IOException e) { - //ignore + // ignore } } - System.out.println("\nTotal Docs exported: "+ (docsWritten.get() -1)+ - ". Time taken: "+( (System.currentTimeMillis() - startTime)/1000) + "secs"); + System.out.println( + "\nTotal Docs exported: " + + (docsWritten.get() - 1) + + ". Time taken: " + + ((System.currentTimeMillis() - startTime) / 1000) + + "secs"); } } @@ -445,37 +448,38 @@ private void addProducers(Map m) { for (Map.Entry entry : m.entrySet()) { Slice slice = entry.getValue(); Replica replica = slice.getLeader(); - if (replica == null) replica = slice.getReplicas().iterator().next();// get a random replica + if (replica == null) + replica = slice.getReplicas().iterator().next(); // get a random replica CoreHandler coreHandler = new CoreHandler(replica); corehandlers.put(replica.getCoreName(), coreHandler); } } private void addConsumer(CountDownLatch consumerlatch) { - consumerThreadpool.submit(() -> { - while (true) { - SolrDocument doc = null; - try { - doc = queue.poll(30, TimeUnit.SECONDS); - } catch (InterruptedException e) { - if (output != null) output.println("Consumer interrupted"); - failed = true; - break; - } - if (doc == EOFDOC) break; - try { - if (docsWritten.get() > limit) continue; - sink.accept(doc); - } catch (Exception e) { - if (output != null) output.println("Failed to write to file " + e.getMessage()); - failed = true; - } - } - consumerlatch.countDown(); - }); + consumerThreadpool.submit( + () -> { + while (true) { + SolrDocument doc = null; + try { + doc = queue.poll(30, TimeUnit.SECONDS); + } catch (InterruptedException e) { + if (output != null) output.println("Consumer interrupted"); + failed = true; + break; + } + if (doc == EOFDOC) break; + try { + if (docsWritten.get() > limit) continue; + sink.accept(doc); + } catch (Exception e) { + if (output != null) output.println("Failed to write to file " + e.getMessage()); + failed = true; + } + } + consumerlatch.countDown(); + }); } - class CoreHandler { final Replica replica; long expectedDocs; @@ -485,8 +489,7 @@ class CoreHandler { this.replica = replica; } - boolean exportDocsFromCore() - throws IOException, SolrServerException { + boolean exportDocsFromCore() throws IOException, SolrServerException { HttpSolrClient client = new HttpSolrClient.Builder(baseurl).build(); try { expectedDocs = getDocCount(replica.getCoreName(), client); @@ -498,33 +501,41 @@ boolean exportDocsFromCore() params.add(CommonParams.DISTRIB, "false"); params.add(CommonParams.ROWS, "1000"); String cursorMark = CursorMarkParams.CURSOR_MARK_START; - Consumer wrapper = doc -> { - try { - queue.offer(doc, 10, TimeUnit.SECONDS); - receivedDocs.incrementAndGet(); - } catch (InterruptedException e) { - failed = true; - if (output != null) output.println("Failed to write docs from" + e.getMessage()); - } - }; - StreamingBinaryResponseParser responseParser = new StreamingBinaryResponseParser(getStreamer(wrapper)); + Consumer wrapper = + doc -> { + try { + queue.offer(doc, 10, TimeUnit.SECONDS); + receivedDocs.incrementAndGet(); + } catch (InterruptedException e) { + failed = true; + if (output != null) output.println("Failed to write docs from" + e.getMessage()); + } + }; + StreamingBinaryResponseParser responseParser = + new StreamingBinaryResponseParser(getStreamer(wrapper)); while (true) { if (failed) return false; if (docsWritten.get() > limit) return true; params.set(CursorMarkParams.CURSOR_MARK_PARAM, cursorMark); - request = new GenericSolrRequest(SolrRequest.METHOD.GET, - "/" + replica.getCoreName() + "/select", params); + request = + new GenericSolrRequest( + SolrRequest.METHOD.GET, "/" + replica.getCoreName() + "/select", params); request.setResponseParser(responseParser); try { NamedList rsp = client.request(request); String nextCursorMark = (String) rsp.get(CursorMarkParams.CURSOR_MARK_NEXT); if (nextCursorMark == null || Objects.equals(cursorMark, nextCursorMark)) { if (output != null) - output.println(StrUtils.formatString("\nExport complete for : {0}, docs : {1}", replica.getCoreName(), receivedDocs.get())); + output.println( + StrUtils.formatString( + "\nExport complete for : {0}, docs : {1}", + replica.getCoreName(), receivedDocs.get())); if (expectedDocs != receivedDocs.get()) { if (output != null) { - output.println(StrUtils.formatString("Could not download all docs for core {0} , expected: {1} , actual", - replica.getCoreName(), expectedDocs, receivedDocs)); + output.println( + StrUtils.formatString( + "Could not download all docs for core {0} , expected: {1} , actual", + replica.getCoreName(), expectedDocs, receivedDocs)); return false; } } @@ -533,7 +544,12 @@ boolean exportDocsFromCore() cursorMark = nextCursorMark; if (output != null) output.print("."); } catch (SolrServerException e) { - if(output != null) output.println("Error reading from server "+ replica.getBaseUrl()+"/"+ replica.getCoreName()); + if (output != null) + output.println( + "Error reading from server " + + replica.getBaseUrl() + + "/" + + replica.getCoreName()); failed = true; return false; } @@ -545,13 +561,13 @@ boolean exportDocsFromCore() } } - - static long getDocCount(String coreName, HttpSolrClient client) throws SolrServerException, IOException { + static long getDocCount(String coreName, HttpSolrClient client) + throws SolrServerException, IOException { SolrQuery q = new SolrQuery("*:*"); q.setRows(0); q.add("distrib", "false"); - GenericSolrRequest request = new GenericSolrRequest(SolrRequest.METHOD.GET, - "/" + coreName + "/select", q); + GenericSolrRequest request = + new GenericSolrRequest(SolrRequest.METHOD.GET, "/" + coreName + "/select", q); NamedList res = client.request(request); SolrDocumentList sdl = (SolrDocumentList) res.get("response"); return sdl.getNumFound(); diff --git a/solr/core/src/java/org/apache/solr/util/PackageTool.java b/solr/core/src/java/org/apache/solr/util/PackageTool.java index 89aa244945e..34f66275157 100644 --- a/solr/core/src/java/org/apache/solr/util/PackageTool.java +++ b/solr/core/src/java/org/apache/solr/util/PackageTool.java @@ -16,11 +16,13 @@ */ package org.apache.solr.util; +import static org.apache.solr.packagemanager.PackageUtils.print; +import static org.apache.solr.packagemanager.PackageUtils.printGreen; + import java.io.File; import java.lang.invoke.MethodHandles; import java.nio.file.Paths; import java.util.Map; - import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.Option; import org.apache.commons.io.FileUtils; @@ -43,9 +45,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.apache.solr.packagemanager.PackageUtils.print; -import static org.apache.solr.packagemanager.PackageUtils.printGreen; - public class PackageTool extends SolrCLI.ToolBase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -55,7 +54,7 @@ public PackageTool() { // Need a logging free, clean output going through to the user. Configurator.setRootLevel(Level.OFF); } - + @Override public String getName() { return "package"; diff --git a/solr/core/src/java/org/apache/solr/util/RecordingJSONParser.java b/solr/core/src/java/org/apache/solr/util/RecordingJSONParser.java index 932ae5fbffd..bb2ba75dfdd 100644 --- a/solr/core/src/java/org/apache/solr/util/RecordingJSONParser.java +++ b/solr/core/src/java/org/apache/solr/util/RecordingJSONParser.java @@ -36,7 +36,7 @@ public class RecordingJSONParser extends JSONParser { private boolean objectStarted = false; private long lastMarkedPosition = 0; private long lastGlobalPosition = 0; - private static final int BUFFER_SIZE = 8192; + private static final int BUFFER_SIZE = 16384; public RecordingJSONParser(Reader in) { diff --git a/solr/core/src/java/org/apache/solr/util/StartupLoggingUtils.java b/solr/core/src/java/org/apache/solr/util/StartupLoggingUtils.java index 53f19b9fff4..c3c749d2414 100644 --- a/solr/core/src/java/org/apache/solr/util/StartupLoggingUtils.java +++ b/solr/core/src/java/org/apache/solr/util/StartupLoggingUtils.java @@ -114,7 +114,7 @@ public static boolean changeLogLevel(String logLevel) { private static boolean isLog4jActive() { try { // Make sure we have log4j LogManager in classpath - Class.forName("org.apache.log4j.LogManager"); + Class.forName("org.apache.logging.log4j.LogManager"); // Make sure that log4j is really selected as logger in slf4j - we could have LogManager in the bridge class :) return binder.getLoggerFactoryClassStr().contains("Log4jLoggerFactory"); } catch (Exception e) { diff --git a/solr/core/src/test/org/apache/solr/cloud/TestRebalanceLeaders.java b/solr/core/src/test/org/apache/solr/cloud/TestRebalanceLeaders.java index 3e9956fbe7a..3f90fadd950 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestRebalanceLeaders.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestRebalanceLeaders.java @@ -18,9 +18,16 @@ import java.io.IOException; import java.lang.invoke.MethodHandles; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; +import java.util.TreeMap; +import java.util.TreeSet; import java.util.concurrent.TimeUnit; - import org.apache.lucene.util.LuceneTestCase; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.embedded.JettySolrRunner; @@ -61,74 +68,83 @@ public static void setupCluster() throws Exception { numReplicas = random().nextInt(2) + 2; useAdminToSetProps = random().nextBoolean(); - configureCluster(numNodes) - .addConfig(COLLECTION_NAME, configset("cloud-minimal")) - .configure(); + configureCluster(numNodes).addConfig(COLLECTION_NAME, configset("cloud-minimal")).configure(); - CollectionAdminResponse resp = CollectionAdminRequest.createCollection(COLLECTION_NAME, COLLECTION_NAME, - numShards, numReplicas, 0, 0) - .process(cluster.getSolrClient()); + CollectionAdminResponse resp = + CollectionAdminRequest.createCollection( + COLLECTION_NAME, COLLECTION_NAME, numShards, numReplicas, 0, 0) + .process(cluster.getSolrClient()); assertEquals("Admin request failed; ", 0, resp.getStatus()); cluster.waitForActiveCollection(COLLECTION_NAME, numShards, numShards * numReplicas); - } @Before public void removeAllProperties() throws KeeperException, InterruptedException { forceUpdateCollectionStatus(); - DocCollection docCollection = cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLLECTION_NAME); + DocCollection docCollection = + cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLLECTION_NAME); for (Slice slice : docCollection.getSlices()) { for (Replica rep : slice.getReplicas()) { - rep.getProperties().forEach((key, value) -> { - if (key.startsWith("property.")) { - try { - delProp(slice, rep, key); - } catch (IOException | SolrServerException e) { - fail("Caught unexpected exception in @Before " + e.getMessage()); - } - } - }); + rep.getProperties() + .forEach( + (key, value) -> { + if (key.startsWith("property.")) { + try { + delProp(slice, rep, key); + } catch (IOException | SolrServerException e) { + fail("Caught unexpected exception in @Before " + e.getMessage()); + } + } + }); } } } int timeoutMs = 60000; - - // test that setting an arbitrary "slice unique" property un-sets the property if it's on another replica in the - // slice. This is testing when the property is set on an _individual_ replica whereas testBalancePropertySliceUnique - // tests whether changing an individual _replica_ un-sets the property on other replicas _in that slice_. + // test that setting an arbitrary "slice unique" property un-sets the property if it's on another + // replica in the + // slice. This is testing when the property is set on an _individual_ replica whereas + // testBalancePropertySliceUnique + // tests whether changing an individual _replica_ un-sets the property on other replicas _in that + // slice_. // // NOTE: There were significant problems because at one point the code implicitly defined - // shardUnique=true for the special property preferredLeader. That was removed at one point so we're explicitly + // shardUnique=true for the special property preferredLeader. That was removed at one point so + // we're explicitly // testing that as well. @Test - public void testSetArbitraryPropertySliceUnique() throws IOException, SolrServerException, InterruptedException, KeeperException { + public void testSetArbitraryPropertySliceUnique() + throws IOException, SolrServerException, InterruptedException, KeeperException { // Check both special (preferredLeader) and something arbitrary. doTestSetArbitraryPropertySliceUnique("foo" + random().nextInt(1_000_000)); removeAllProperties(); doTestSetArbitraryPropertySliceUnique("preferredleader"); } - - // Test that automatically distributing a slice unique property un-sets that property if it's in any other replica + // Test that automatically distributing a slice unique property un-sets that property if it's in + // any other replica // on that slice. - // This is different than the test above. The test above sets individual properties on individual nodes. This one + // This is different than the test above. The test above sets individual properties on individual + // nodes. This one // relies on Solr to pick which replicas to set the property on @Test - public void testBalancePropertySliceUnique() throws KeeperException, InterruptedException, IOException, SolrServerException { + public void testBalancePropertySliceUnique() + throws KeeperException, InterruptedException, IOException, SolrServerException { // Check both cases of "special" property preferred(Ll)eader doTestBalancePropertySliceUnique("foo" + random().nextInt(1_000_000)); removeAllProperties(); doTestBalancePropertySliceUnique("preferredleader"); } - // We've moved on from a property being tested, we need to check if rebalancing the leaders actually chantges the + // We've moved on from a property being tested, we need to check if rebalancing the leaders + // actually chantges the // leader appropriately. @Test public void testRebalanceLeaders() throws Exception { - // First let's unbalance the preferredLeader property, do all the leaders get reassigned properly? + // First let's unbalance the preferredLeader property, do all the leaders get reassigned + // properly? concentrateProp("preferredLeader"); sendRebalanceCommand(); checkPreferredsAreLeaders(); @@ -138,19 +154,23 @@ public void testRebalanceLeaders() throws Exception { sendRebalanceCommand(); checkPreferredsAreLeaders(); - // Now check the condition we saw "in the wild" where you could not rebalance properly when Jetty was restarted. + // Now check the condition we saw "in the wild" where you could not rebalance properly when + // Jetty was restarted. concentratePropByRestartingJettys(); sendRebalanceCommand(); checkPreferredsAreLeaders(); } - // Insure that the property is set on only one replica per slice when changing a unique property on an individual + // Insure that the property is set on only one replica per slice when changing a unique property + // on an individual // replica. - private void doTestSetArbitraryPropertySliceUnique(String propIn) throws InterruptedException, KeeperException, IOException, SolrServerException { + private void doTestSetArbitraryPropertySliceUnique(String propIn) + throws InterruptedException, KeeperException, IOException, SolrServerException { final String prop = (random().nextBoolean()) ? propIn : propIn.toUpperCase(Locale.ROOT); // First set the property in some replica in some slice forceUpdateCollectionStatus(); - DocCollection docCollection = cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLLECTION_NAME); + DocCollection docCollection = + cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLLECTION_NAME); Slice[] slices = docCollection.getSlices().toArray(new Slice[0]); Slice slice = slices[random().nextInt(slices.length)]; @@ -171,25 +191,41 @@ private void doTestSetArbitraryPropertySliceUnique(String propIn) throws Interru // insure that no other replica in that slice has the property when we return. while (timeout.hasTimedOut() == false) { forceUpdateCollectionStatus(); - modColl = cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLLECTION_NAME); + modColl = + cluster + .getSolrClient() + .getZkStateReader() + .getClusterState() + .getCollection(COLLECTION_NAME); modSlice = modColl.getSlice(slice.getName()); - rightRep = modSlice.getReplica(rep.getName()).getBool("property." + prop.toLowerCase(Locale.ROOT), false); - count = modSlice.getReplicas().stream().filter(thisRep -> thisRep.getBool("property." + prop.toLowerCase(Locale.ROOT), false)).count(); + rightRep = + modSlice + .getReplica(rep.getName()) + .getBool("property." + prop.toLowerCase(Locale.ROOT), false); + count = + modSlice.getReplicas().stream() + .filter( + thisRep -> thisRep.getBool("property." + prop.toLowerCase(Locale.ROOT), false)) + .count(); if (count == 1 && rightRep) { break; } - TimeUnit.MILLISECONDS.sleep(100); + TimeUnit.MILLISECONDS.sleep(200); } if (count != 1 || rightRep == false) { - fail("The property " + prop + " was not uniquely distributed in slice " + slice.getName() - + " " + modColl.toString()); + fail( + "The property " + + prop + + " was not uniquely distributed in slice " + + slice.getName() + + " " + + modColl.toString()); } } } - // Fail if we the replicas with the preferredLeader property are _not_ also the leaders. private void checkPreferredsAreLeaders() throws InterruptedException, KeeperException { // Make sure that the shard unique are where you expect. @@ -197,7 +233,8 @@ private void checkPreferredsAreLeaders() throws InterruptedException, KeeperExce while (timeout.hasTimedOut() == false) { if (checkPreferredsAreLeaders(false)) { - // Ok, all preferreds are leaders. Just for Let's also get the election queue and guarantee that every + // Ok, all preferreds are leaders. Just for Let's also get the election queue and guarantee + // that every // live replica is in the queue and none are repeated. checkElectionQueues(); return; @@ -205,40 +242,67 @@ private void checkPreferredsAreLeaders() throws InterruptedException, KeeperExce TimeUnit.MILLISECONDS.sleep(100); } - log.error("Leaders are not all preferres {}", cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLLECTION_NAME)); + log.error( + "Leaders are not all preferres {}", + cluster + .getSolrClient() + .getZkStateReader() + .getClusterState() + .getCollection(COLLECTION_NAME)); // Show the errors checkPreferredsAreLeaders(true); } // Do all active nodes in each slice appear exactly once in the slice's leader election queue? - // Since we assert that the number of live replicas is the same size as the leader election queue, we only + // Since we assert that the number of live replicas is the same size as the leader election queue, + // we only // have to compare one way. private void checkElectionQueues() throws KeeperException, InterruptedException { - DocCollection docCollection = cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLLECTION_NAME); - Set liveNodes = cluster.getSolrClient().getZkStateReader().getClusterState().getLiveNodes(); + DocCollection docCollection = + cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLLECTION_NAME); + Set liveNodes = + cluster.getSolrClient().getZkStateReader().getClusterState().getLiveNodes(); for (Slice slice : docCollection.getSlices()) { Set liveReplicas = new HashSet<>(); - slice.getReplicas().forEach(replica -> { - if (replica.isActive(liveNodes)) { - liveReplicas.add(replica); - } - }); + slice + .getReplicas() + .forEach( + replica -> { + if (replica.isActive(liveNodes)) { + liveReplicas.add(replica); + } + }); checkOneQueue(docCollection, slice, liveReplicas); } } // Helper method to check one leader election queue's consistency. - private void checkOneQueue(DocCollection coll, Slice slice, Set liveReplicas) throws KeeperException, InterruptedException { - - List leaderQueue = cluster.getSolrClient().getZkStateReader().getZkClient().getChildren("/collections/" + COLLECTION_NAME + - "/leader_elect/" + slice.getName() + "/election", null, true); + private void checkOneQueue(DocCollection coll, Slice slice, Set liveReplicas) + throws KeeperException, InterruptedException { + + List leaderQueue = + cluster + .getSolrClient() + .getZkStateReader() + .getZkClient() + .getChildren( + "/collections/" + + COLLECTION_NAME + + "/leader_elect/" + + slice.getName() + + "/election", + null, + true); if (leaderQueue.size() != liveReplicas.size()) { - log.error("One or more replicas is missing from the leader election queue! Slice {}, election queue: {}, collection: {}" - , slice.getName(), leaderQueue, coll); + log.error( + "One or more replicas is missing from the leader election queue! Slice {}, election queue: {}, collection: {}", + slice.getName(), + leaderQueue, + coll); fail("One or more replicas is missing from the leader election queue"); } // Check that each election node has a corresponding live replica. @@ -252,10 +316,13 @@ private void checkOneQueue(DocCollection coll, Slice slice, Set liveRep } // Just an encapsulation for checkPreferredsAreLeaders to make returning easier. - // the doAsserts var is to actually print the problem and fail the test if the condition is not met. - private boolean checkPreferredsAreLeaders(boolean doAsserts) throws KeeperException, InterruptedException { + // the doAsserts var is to actually print the problem and fail the test if the condition is not + // met. + private boolean checkPreferredsAreLeaders(boolean doAsserts) + throws KeeperException, InterruptedException { forceUpdateCollectionStatus(); - DocCollection docCollection = cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLLECTION_NAME); + DocCollection docCollection = + cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLLECTION_NAME); for (Slice slice : docCollection.getSlices()) { for (Replica rep : slice.getReplicas()) { if (rep.getBool("property.preferredleader", false)) { @@ -272,7 +339,8 @@ private boolean checkPreferredsAreLeaders(boolean doAsserts) throws KeeperExcept } // Arbitrarily send the rebalance command either with the SolrJ interface or with an HTTP request. - private void sendRebalanceCommand() throws SolrServerException, InterruptedException, IOException { + private void sendRebalanceCommand() + throws SolrServerException, InterruptedException, IOException { if (random().nextBoolean()) { rebalanceLeaderUsingSolrJAPI(); } else { @@ -280,9 +348,11 @@ private void sendRebalanceCommand() throws SolrServerException, InterruptedExcep } } - // Helper method to make sure the property is _unbalanced_ first, then it gets properly re-assigned with the + // Helper method to make sure the property is _unbalanced_ first, then it gets properly + // re-assigned with the // BALANCESHARDUNIQUE command. - private void doTestBalancePropertySliceUnique(String propIn) throws InterruptedException, IOException, KeeperException, SolrServerException { + private void doTestBalancePropertySliceUnique(String propIn) + throws InterruptedException, IOException, KeeperException, SolrServerException { final String prop = (random().nextBoolean()) ? propIn : propIn.toUpperCase(Locale.ROOT); // Concentrate the properties on as few replicas a possible @@ -296,10 +366,10 @@ private void doTestBalancePropertySliceUnique(String propIn) throws InterruptedE // Verify that the property is reasonably evenly distributed verifyPropCorrectlyDistributed(prop); - } - private void verifyPropCorrectlyDistributed(String prop) throws KeeperException, InterruptedException { + private void verifyPropCorrectlyDistributed(String prop) + throws KeeperException, InterruptedException { TimeOut timeout = new TimeOut(timeoutMs, TimeUnit.MILLISECONDS, TimeSource.NANO_TIME); @@ -307,7 +377,12 @@ private void verifyPropCorrectlyDistributed(String prop) throws KeeperException, DocCollection docCollection = null; while (timeout.hasTimedOut() == false) { forceUpdateCollectionStatus(); - docCollection = cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLLECTION_NAME); + docCollection = + cluster + .getSolrClient() + .getZkStateReader() + .getClusterState() + .getCollection(COLLECTION_NAME); int maxPropCount = Integer.MAX_VALUE; int minPropCount = Integer.MIN_VALUE; for (Slice slice : docCollection.getSlices()) { @@ -327,7 +402,9 @@ private void verifyPropCorrectlyDistributed(String prop) throws KeeperException, } // Used when we concentrate the leader on a few nodes. - private void verifyPropDistributedAsExpected(Map expectedShardReplicaMap, String prop) throws InterruptedException, KeeperException { + private void verifyPropDistributedAsExpected( + Map expectedShardReplicaMap, String prop) + throws InterruptedException, KeeperException { // Make sure that the shard unique are where you expect. TimeOut timeout = new TimeOut(timeoutMs, TimeUnit.MILLISECONDS, TimeSource.NANO_TIME); @@ -336,7 +413,12 @@ private void verifyPropDistributedAsExpected(Map expectedShardRe DocCollection docCollection = null; while (timeout.hasTimedOut() == false) { forceUpdateCollectionStatus(); - docCollection = cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLLECTION_NAME); + docCollection = + cluster + .getSolrClient() + .getZkStateReader() + .getClusterState() + .getCollection(COLLECTION_NAME); failure = false; for (Map.Entry ent : expectedShardReplicaMap.entrySet()) { Replica rep = docCollection.getSlice(ent.getKey()).getReplica(ent.getValue()); @@ -350,12 +432,18 @@ private void verifyPropDistributedAsExpected(Map expectedShardRe TimeUnit.MILLISECONDS.sleep(100); } - fail(prop + " properties are not on the expected replicas: " + docCollection.toString() - + System.lineSeparator() + "Expected " + expectedShardReplicaMap.toString()); + fail( + prop + + " properties are not on the expected replicas: " + + docCollection.toString() + + System.lineSeparator() + + "Expected " + + expectedShardReplicaMap.toString()); } // Just check that the property is distributed as expectecd. This does _not_ rebalance the leaders - private void rebalancePropAndCheck(String prop) throws IOException, SolrServerException, InterruptedException, KeeperException { + private void rebalancePropAndCheck(String prop) + throws IOException, SolrServerException, InterruptedException, KeeperException { if (random().nextBoolean()) { rebalancePropUsingSolrJAPI(prop); @@ -364,12 +452,13 @@ private void rebalancePropAndCheck(String prop) throws IOException, SolrServerEx } } - - private void rebalanceLeaderUsingSolrJAPI() throws IOException, SolrServerException, InterruptedException { - CollectionAdminResponse resp = CollectionAdminRequest - .rebalanceLeaders(COLLECTION_NAME) - .process(cluster.getSolrClient()); - assertTrue("All leaders should have been verified", resp.getResponse().get("Summary").toString().contains("Success")); + private void rebalanceLeaderUsingSolrJAPI() + throws IOException, SolrServerException, InterruptedException { + CollectionAdminResponse resp = + CollectionAdminRequest.rebalanceLeaders(COLLECTION_NAME).process(cluster.getSolrClient()); + assertTrue( + "All leaders should have been verified", + resp.getResponse().get("Summary").toString().contains("Success")); assertEquals("Admin request failed; ", 0, resp.getStatus()); } @@ -380,31 +469,33 @@ private void rebalanceLeaderUsingStandardRequest() throws IOException, SolrServe QueryRequest request = new QueryRequest(params); request.setPath("/admin/collections"); QueryResponse resp = request.process(cluster.getSolrClient()); - assertTrue("All leaders should have been verified", resp.getResponse().get("Summary").toString().contains("Success")); + assertTrue( + "All leaders should have been verified", + resp.getResponse().get("Summary").toString().contains("Success")); assertEquals("Call to rebalanceLeaders failed ", 0, resp.getStatus()); } - - private void rebalancePropUsingSolrJAPI(String prop) throws IOException, SolrServerException, InterruptedException { + private void rebalancePropUsingSolrJAPI(String prop) + throws IOException, SolrServerException, InterruptedException { // Don't set the value, that should be done automatically. CollectionAdminResponse resp; if (prop.toLowerCase(Locale.ROOT).contains("preferredleader")) { - resp = CollectionAdminRequest - .balanceReplicaProperty(COLLECTION_NAME, prop) - .process(cluster.getSolrClient()); + resp = + CollectionAdminRequest.balanceReplicaProperty(COLLECTION_NAME, prop) + .process(cluster.getSolrClient()); } else { - resp = CollectionAdminRequest - .balanceReplicaProperty(COLLECTION_NAME, prop) - .setShardUnique(true) - .process(cluster.getSolrClient()); - + resp = + CollectionAdminRequest.balanceReplicaProperty(COLLECTION_NAME, prop) + .setShardUnique(true) + .process(cluster.getSolrClient()); } assertEquals("Admin request failed; ", 0, resp.getStatus()); } - private void rebalancePropUsingStandardRequest(String prop) throws IOException, SolrServerException { + private void rebalancePropUsingStandardRequest(String prop) + throws IOException, SolrServerException { ModifiableSolrParams params = new ModifiableSolrParams(); params.set("action", CollectionParams.CollectionAction.BALANCESHARDUNIQUE.toString()); params.set("property", prop); @@ -419,11 +510,13 @@ private void rebalancePropUsingStandardRequest(String prop) throws IOException, assertEquals("Call to rebalanceLeaders failed ", 0, resp.getStatus()); } - // This important. I've (Erick Erickson) run across a situation where the "standard request" causes failures, but + // This important. I've (Erick Erickson) run across a situation where the "standard request" + // causes failures, but // never the Admin request. So let's test both all the time for a given test. // // This sets an _individual_ replica to have the property, not collection-wide - private void setProp(Slice slice, Replica rep, String prop) throws IOException, SolrServerException { + private void setProp(Slice slice, Replica rep, String prop) + throws IOException, SolrServerException { if (useAdminToSetProps) { setPropWithAdminRequest(slice, rep, prop); } else { @@ -431,7 +524,8 @@ private void setProp(Slice slice, Replica rep, String prop) throws IOException, } } - void setPropWithStandardRequest(Slice slice, Replica rep, String prop) throws IOException, SolrServerException { + void setPropWithStandardRequest(Slice slice, Replica rep, String prop) + throws IOException, SolrServerException { ModifiableSolrParams params = new ModifiableSolrParams(); params.set("action", CollectionParams.CollectionAction.ADDREPLICAPROP.toString()); @@ -449,48 +543,62 @@ void setPropWithStandardRequest(Slice slice, Replica rep, String prop) throws IO request.setPath("/admin/collections"); cluster.getSolrClient().request(request); String propLC = prop.toLowerCase(Locale.ROOT); - waitForState("Expecting property '" + prop + "'to appear on replica " + rep.getName(), COLLECTION_NAME, + waitForState( + "Expecting property '" + prop + "'to appear on replica " + rep.getName(), + COLLECTION_NAME, (n, c) -> "true".equals(c.getReplica(rep.getName()).getProperty(propLC))); - } - void setPropWithAdminRequest(Slice slice, Replica rep, String prop) throws IOException, SolrServerException { + void setPropWithAdminRequest(Slice slice, Replica rep, String prop) + throws IOException, SolrServerException { boolean setUnique = (prop.toLowerCase(Locale.ROOT).equals("preferredleader") == false); CollectionAdminRequest.AddReplicaProp addProp = - CollectionAdminRequest.addReplicaProperty(COLLECTION_NAME, slice.getName(), rep.getName(), prop, "true"); + CollectionAdminRequest.addReplicaProperty( + COLLECTION_NAME, slice.getName(), rep.getName(), prop, "true"); if (setUnique) { addProp.setShardUnique(true); } CollectionAdminResponse resp = addProp.process(cluster.getSolrClient()); assertEquals(0, resp.getStatus()); String propLC = prop.toLowerCase(Locale.ROOT); - waitForState("Expecting property '" + prop + "'to appear on replica " + rep.getName(), COLLECTION_NAME, + waitForState( + "Expecting property '" + prop + "'to appear on replica " + rep.getName(), + COLLECTION_NAME, (n, c) -> "true".equals(c.getReplica(rep.getName()).getProperty(propLC))); - } - private void delProp(Slice slice, Replica rep, String prop) throws IOException, SolrServerException { + private void delProp(Slice slice, Replica rep, String prop) + throws IOException, SolrServerException { String propLC = prop.toLowerCase(Locale.ROOT); - CollectionAdminResponse resp = CollectionAdminRequest.deleteReplicaProperty(COLLECTION_NAME, slice.getName(), rep.getName(), propLC) - .process(cluster.getSolrClient()); + CollectionAdminResponse resp = + CollectionAdminRequest.deleteReplicaProperty( + COLLECTION_NAME, slice.getName(), rep.getName(), propLC) + .process(cluster.getSolrClient()); assertEquals("Admin request failed; ", 0, resp.getStatus()); - waitForState("Expecting property '" + prop + "' to be removed from replica " + rep.getName(), COLLECTION_NAME, + waitForState( + "Expecting property '" + prop + "' to be removed from replica " + rep.getName(), + COLLECTION_NAME, (n, c) -> c.getReplica(rep.getName()).getProperty(prop) == null); } - // Intentionally un-balance the property to insure that BALANCESHARDUNIQUE does its job. There was an odd case - // where rebalancing didn't work very well if the Solr nodes were stopped and restarted that worked perfectly + // Intentionally un-balance the property to insure that BALANCESHARDUNIQUE does its job. There was + // an odd case + // where rebalancing didn't work very well if the Solr nodes were stopped and restarted that + // worked perfectly // when if the nodes were _not_ restarted in the test. So we have to test that too. private void concentratePropByRestartingJettys() throws Exception { List jettys = new ArrayList<>(cluster.getJettySolrRunners()); Collections.shuffle(jettys, random()); jettys.remove(random().nextInt(jettys.size())); - // Now we have a list of jettys, and there is one missing. Stop all of the remaining jettys, then start them again + // Now we have a list of jettys, and there is one missing. Stop all of the remaining jettys, + // then start them again // to concentrate the leaders. It's not necessary that all shards have a leader. for (JettySolrRunner jetty : jettys) { cluster.stopJettySolrRunner(jetty); + } + for (JettySolrRunner jetty : jettys) { cluster.waitForJettyToStop(jetty); } checkReplicasInactive(jettys); @@ -503,26 +611,35 @@ private void concentratePropByRestartingJettys() throws Exception { checkAllReplicasActive(); } - // while banging my nead against a wall, I put a lot of force refresh statements in. Want to leave them in + // while banging my nead against a wall, I put a lot of force refresh statements in. Want to leave + // them in // but have this be a no-op so if we start to get failures, we can re-enable with minimal effort. private void forceUpdateCollectionStatus() throws KeeperException, InterruptedException { // cluster.getSolrClient().getZkStateReader().forceUpdateCollection(COLLECTION_NAME); } - // Since we have to restart jettys, we don't want to try rebalancing etc. until we're sure all jettys that should + // Since we have to restart jettys, we don't want to try rebalancing etc. until we're sure all + // jettys that should // be up are up and all replicas are active. - private void checkReplicasInactive(List downJettys) throws KeeperException, InterruptedException { + private void checkReplicasInactive(List downJettys) + throws KeeperException, InterruptedException { TimeOut timeout = new TimeOut(timeoutMs, TimeUnit.MILLISECONDS, TimeSource.NANO_TIME); DocCollection docCollection = null; Set liveNodes = null; Set downJettyNodes = new TreeSet<>(); for (JettySolrRunner jetty : downJettys) { - downJettyNodes.add(jetty.getBaseUrl().getHost() + ":" + jetty.getBaseUrl().getPort() + "_solr"); + downJettyNodes.add( + jetty.getBaseUrl().getHost() + ":" + jetty.getBaseUrl().getPort() + "_solr"); } while (timeout.hasTimedOut() == false) { forceUpdateCollectionStatus(); - docCollection = cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLLECTION_NAME); + docCollection = + cluster + .getSolrClient() + .getZkStateReader() + .getClusterState() + .getCollection(COLLECTION_NAME); liveNodes = cluster.getSolrClient().getZkStateReader().getClusterState().getLiveNodes(); boolean expectedInactive = true; @@ -542,18 +659,28 @@ private void checkReplicasInactive(List downJettys) throws Keep } TimeUnit.MILLISECONDS.sleep(100); } - fail("timed out waiting for all replicas to become inactive: livenodes: " + liveNodes + - " Collection state: " + docCollection.toString()); + fail( + "timed out waiting for all replicas to become inactive: livenodes: " + + liveNodes + + " Collection state: " + + docCollection.toString()); } - // We need to wait around until all replicas are active before expecting rebalancing or distributing shard-unique + // We need to wait around until all replicas are active before expecting rebalancing or + // distributing shard-unique // properties to work. private void checkAllReplicasActive() throws KeeperException, InterruptedException { TimeOut timeout = new TimeOut(timeoutMs, TimeUnit.MILLISECONDS, TimeSource.NANO_TIME); while (timeout.hasTimedOut() == false) { forceUpdateCollectionStatus(); - DocCollection docCollection = cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLLECTION_NAME); - Set liveNodes = cluster.getSolrClient().getZkStateReader().getClusterState().getLiveNodes(); + DocCollection docCollection = + cluster + .getSolrClient() + .getZkStateReader() + .getClusterState() + .getCollection(COLLECTION_NAME); + Set liveNodes = + cluster.getSolrClient().getZkStateReader().getClusterState().getLiveNodes(); boolean allActive = true; for (Slice slice : docCollection.getSlices()) { for (Replica rep : slice.getReplicas()) { @@ -570,17 +697,23 @@ private void checkAllReplicasActive() throws KeeperException, InterruptedExcepti fail("timed out waiting for all replicas to become active"); } - // use a simple heuristic to put as many replicas with the property on as few nodes as possible. The point is that + // use a simple heuristic to put as many replicas with the property on as few nodes as possible. + // The point is that // then we can execute BALANCESHARDUNIQUE and be sure it worked correctly - private void concentrateProp(String prop) throws KeeperException, InterruptedException, IOException, SolrServerException { + private void concentrateProp(String prop) + throws KeeperException, InterruptedException, IOException, SolrServerException { // find all the live nodes - // for each slice, assign the leader to the first replica that is in the lowest position on live_nodes - List liveNodes = new ArrayList<>(cluster.getSolrClient().getZkStateReader().getClusterState().getLiveNodes()); + // for each slice, assign the leader to the first replica that is in the lowest position on + // live_nodes + List liveNodes = + new ArrayList<>( + cluster.getSolrClient().getZkStateReader().getClusterState().getLiveNodes()); Collections.shuffle(liveNodes, random()); Map uniquePropMap = new TreeMap<>(); forceUpdateCollectionStatus(); - DocCollection docCollection = cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLLECTION_NAME); + DocCollection docCollection = + cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLLECTION_NAME); for (Slice slice : docCollection.getSlices()) { Replica changedRep = null; int livePos = Integer.MAX_VALUE; @@ -592,7 +725,9 @@ private void concentrateProp(String prop) throws KeeperException, InterruptedExc } } if (livePos == Integer.MAX_VALUE) { - fail("Invalid state! We should have a replica to add the property to! " + docCollection.toString()); + fail( + "Invalid state! We should have a replica to add the property to! " + + docCollection.toString()); } uniquePropMap.put(slice.getName(), changedRep.getName()); @@ -603,7 +738,8 @@ private void concentrateProp(String prop) throws KeeperException, InterruptedExc } // make sure that the property in question is unique per shard. - private Map verifyPropUniquePerShard(String prop) throws InterruptedException, KeeperException { + private Map verifyPropUniquePerShard(String prop) + throws InterruptedException, KeeperException { Map uniquePropMaps = new TreeMap<>(); TimeOut timeout = new TimeOut(timeoutMs, TimeUnit.MILLISECONDS, TimeSource.NANO_TIME); @@ -614,15 +750,25 @@ private Map verifyPropUniquePerShard(String prop) throws Interru } TimeUnit.MILLISECONDS.sleep(100); } - fail("There should be exactly one replica with value " + prop + " set to true per shard: " - + cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLLECTION_NAME).toString()); + fail( + "There should be exactly one replica with value " + + prop + + " set to true per shard: " + + cluster + .getSolrClient() + .getZkStateReader() + .getClusterState() + .getCollection(COLLECTION_NAME) + .toString()); return null; // keeps IDE happy. } // return true if every shard has exactly one replica with the unique property set to "true" - private boolean checkdUniquePropPerShard(Map uniques, String prop) throws KeeperException, InterruptedException { + private boolean checkdUniquePropPerShard(Map uniques, String prop) + throws KeeperException, InterruptedException { forceUpdateCollectionStatus(); - DocCollection docCollection = cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLLECTION_NAME); + DocCollection docCollection = + cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLLECTION_NAME); for (Slice slice : docCollection.getSlices()) { int propfCount = 0; @@ -638,4 +784,4 @@ private boolean checkdUniquePropPerShard(Map uniques, String pro } return true; } -} \ No newline at end of file +} diff --git a/solr/core/src/test/org/apache/solr/cloud/TestStressLiveNodes.java b/solr/core/src/test/org/apache/solr/cloud/TestStressLiveNodes.java index 06be968d124..5d2753834f8 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestStressLiveNodes.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestStressLiveNodes.java @@ -116,7 +116,7 @@ private static List getCachedLiveNodesFromLocalState(final int expectedC log.info("sleeping #{} to give watchers a chance to finish: {} != {}", i, expectedCount, result.size()); } - Thread.sleep(200); + Thread.sleep(100); } else { break; } @@ -132,7 +132,7 @@ private static List getCachedLiveNodesFromLocalState(final int expectedC public void testStress() throws Exception { // do many iters, so we have "bursts" of adding nodes that we then check - final int numIters = atLeast(TEST_NIGHTLY ? 1000 : 100); + final int numIters = atLeast(TEST_NIGHTLY ? 1000 : 10); for (int iter = 0; iter < numIters; iter++) { // sanity check that ZK says there is in fact 1 live node @@ -157,7 +157,7 @@ public void testStress() throws Exception { // odds of concurrent watchers firing regardless of the num CPUs or load on the machine running // the test (but we deliberately don't look at availableProcessors() since we want randomization // consistency across all machines for a given seed) - final int numThreads = TestUtil.nextInt(random(), 2, 5); + final int numThreads = TestUtil.nextInt(random(), 2, TEST_NIGHTLY ? 5 : 3); // use same num for all thrashers, to increase likely hood of them all competing // (diff random number would mean heavy concurrency only for ~ the first N=lowest num requests) @@ -165,7 +165,7 @@ public void testStress() throws Exception { // this does not need to be a large number -- in fact, the higher it is, the more // likely we are to see a mistake in early watcher triggers get "corrected" by a later one // and overlook a possible bug - final int numNodesPerThrasher = TestUtil.nextInt(random(), 1, 5); + final int numNodesPerThrasher = TestUtil.nextInt(random(), 1, TEST_NIGHTLY ? 5 : 2); log.info("preparing parallel adds to live nodes: iter={}, numThreads={} numNodesPerThread={}", iter, numThreads, numNodesPerThrasher); diff --git a/solr/core/src/test/org/apache/solr/cloud/api/collections/HdfsCloudIncrementalBackupTest.java b/solr/core/src/test/org/apache/solr/cloud/api/collections/HdfsCloudIncrementalBackupTest.java index c0d51d958e8..5de30607d42 100644 --- a/solr/core/src/test/org/apache/solr/cloud/api/collections/HdfsCloudIncrementalBackupTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/api/collections/HdfsCloudIncrementalBackupTest.java @@ -17,11 +17,11 @@ package org.apache.solr.cloud.api.collections; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; - -import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hdfs.DistributedFileSystem; @@ -34,109 +34,117 @@ import org.junit.AfterClass; import org.junit.BeforeClass; -@LuceneTestCase.SuppressCodecs({"SimpleText"}) // Backups do checksum validation against a footer value not present in 'SimpleText' -@ThreadLeakFilters(defaultFilters = true, filters = { - BadHdfsThreadsFilter.class // hdfs currently leaks thread(s) -}) -public class HdfsCloudIncrementalBackupTest extends AbstractIncrementalBackupTest{ - public static final String SOLR_XML = "\n" + - "\n" + - " ${shareSchema:false}\n" + - " ${configSetBaseDir:configsets}\n" + - " ${coreRootDirectory:.}\n" + - "\n" + - " \n" + - " ${urlScheme:}\n" + - " ${socketTimeout:90000}\n" + - " ${connTimeout:15000}\n" + - " \n" + - "\n" + - " \n" + - " 127.0.0.1\n" + - " ${hostPort:8983}\n" + - " ${hostContext:solr}\n" + - " ${solr.zkclienttimeout:30000}\n" + - " ${genericCoreNodeNames:true}\n" + - " 10000\n" + - " ${distribUpdateConnTimeout:45000}\n" + - " ${distribUpdateSoTimeout:340000}\n" + - " \n" + - " \n" + - " \n" + - " \n" + - " hdfs\n" + - " \n" + - " \n" + - " ${solr.hdfs.default.backup.path}\n" + - " ${solr.hdfs.home:}\n" + - " ${solr.hdfs.confdir:}\n" + - " \n" + - " \n" + - " \n" + - "\n"; +@LuceneTestCase.SuppressCodecs({ + "SimpleText" +}) // Backups do checksum validation against a footer value not present in 'SimpleText' +@ThreadLeakFilters( + defaultFilters = true, + filters = { + BadHdfsThreadsFilter.class // hdfs currently leaks thread(s) + }) +@ThreadLeakLingering(linger = 5000) +public class HdfsCloudIncrementalBackupTest extends AbstractIncrementalBackupTest { + public static final String SOLR_XML = + "\n" + + "\n" + + " ${shareSchema:false}\n" + + " ${configSetBaseDir:configsets}\n" + + " ${coreRootDirectory:.}\n" + + "\n" + + " \n" + + " ${urlScheme:}\n" + + " ${socketTimeout:90000}\n" + + " ${connTimeout:15000}\n" + + " \n" + + "\n" + + " \n" + + " 127.0.0.1\n" + + " ${hostPort:8983}\n" + + " ${hostContext:solr}\n" + + " ${solr.zkclienttimeout:30000}\n" + + " ${genericCoreNodeNames:true}\n" + + " 10000\n" + + " ${distribUpdateConnTimeout:45000}\n" + + " ${distribUpdateSoTimeout:340000}\n" + + " \n" + + " \n" + + " \n" + + " \n" + + " hdfs\n" + + " \n" + + " \n" + + " ${solr.hdfs.default.backup.path}\n" + + " ${solr.hdfs.home:}\n" + + " ${solr.hdfs.confdir:}\n" + + " \n" + + " \n" + + " \n" + + "\n"; - private static MiniDFSCluster dfsCluster; - private static String hdfsUri; - private static FileSystem fs; + private static MiniDFSCluster dfsCluster; + private static String hdfsUri; + private static FileSystem fs; - @BeforeClass - public static void setupClass() throws Exception { - dfsCluster = HdfsTestUtil.setupClass(createTempDir().toFile().getAbsolutePath()); - hdfsUri = HdfsTestUtil.getURI(dfsCluster); - try { - URI uri = new URI(hdfsUri); - Configuration conf = HdfsTestUtil.getClientConfiguration(dfsCluster); - fs = FileSystem.get(uri, conf); + @BeforeClass + public static void beforeHdfsCloudIncrementalBackupTest() throws Exception { + dfsCluster = HdfsTestUtil.setupClass(createTempDir().toFile().getAbsolutePath()); + hdfsUri = HdfsTestUtil.getURI(dfsCluster); + try { + URI uri = new URI(hdfsUri); + Configuration conf = HdfsTestUtil.getClientConfiguration(dfsCluster); + fs = FileSystem.get(uri, conf); - if (fs instanceof DistributedFileSystem) { - // Make sure dfs is not in safe mode - while (((DistributedFileSystem) fs).setSafeMode(HdfsConstants.SafeModeAction.SAFEMODE_GET, true)) { - try { - Thread.sleep(5000); - } catch (InterruptedException e) { - Thread.interrupted(); - // continue - } - } - } - - fs.mkdirs(new org.apache.hadoop.fs.Path("/backup")); - } catch (IOException | URISyntaxException e) { - throw new RuntimeException(e); + if (fs instanceof DistributedFileSystem) { + // Make sure dfs is not in safe mode + while (((DistributedFileSystem) fs) + .setSafeMode(HdfsConstants.SafeModeAction.SAFEMODE_GET, true)) { + try { + Thread.sleep(5000); + } catch (InterruptedException e) { + Thread.interrupted(); + // continue + } } + } - System.setProperty("solr.hdfs.default.backup.path", "/backup"); - System.setProperty("solr.hdfs.home", hdfsUri + "/solr"); - useFactory("solr.StandardDirectoryFactory"); - - configureCluster(NUM_SHARDS)// nodes - .addConfig("conf1", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) - .withSolrXml(SOLR_XML) - .configure(); + fs.mkdirs(new org.apache.hadoop.fs.Path("/backup")); + } catch (IOException | URISyntaxException e) { + throw new RuntimeException(e); } - @AfterClass - public static void teardownClass() throws Exception { - IOUtils.closeQuietly(fs); - fs = null; - try { - HdfsTestUtil.teardownClass(dfsCluster); - } finally { - dfsCluster = null; - System.clearProperty("solr.hdfs.home"); - System.clearProperty("solr.hdfs.default.backup.path"); - System.clearProperty("test.build.data"); - System.clearProperty("test.cache.data"); - } - } + System.setProperty("solr.hdfs.default.backup.path", "/backup"); + System.setProperty("solr.hdfs.home", hdfsUri + "/solr"); + useFactory("solr.StandardDirectoryFactory"); - @Override - public String getCollectionNamePrefix() { - return "hdfsbackuprestore"; - } + configureCluster(NUM_SHARDS) // nodes + .addConfig( + "conf1", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) + .withSolrXml(SOLR_XML) + .configure(); + } - @Override - public String getBackupLocation() { - return null; + @AfterClass + public static void afterHdfsCloudIncrementalBackupTest() throws Exception { + IOUtils.closeQuietly(fs); + fs = null; + try { + HdfsTestUtil.teardownClass(dfsCluster); + } finally { + dfsCluster = null; + System.clearProperty("solr.hdfs.home"); + System.clearProperty("solr.hdfs.default.backup.path"); + System.clearProperty("test.build.data"); + System.clearProperty("test.cache.data"); } + } + + @Override + public String getCollectionNamePrefix() { + return "hdfsbackuprestore"; + } + + @Override + public String getBackupLocation() { + return null; + } } diff --git a/solr/core/src/test/org/apache/solr/cloud/api/collections/TestHdfsCloudBackupRestore.java b/solr/core/src/test/org/apache/solr/cloud/api/collections/TestHdfsCloudBackupRestore.java index a7f7c6daf99..6dade0da6a1 100644 --- a/solr/core/src/test/org/apache/solr/cloud/api/collections/TestHdfsCloudBackupRestore.java +++ b/solr/core/src/test/org/apache/solr/cloud/api/collections/TestHdfsCloudBackupRestore.java @@ -16,6 +16,7 @@ */ package org.apache.solr.cloud.api.collections; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering; import java.io.IOException; import java.lang.invoke.MethodHandles; import java.net.URI; @@ -67,6 +68,7 @@ QuickPatchThreadsFilter.class, BadHdfsThreadsFilter.class // hdfs currently leaks thread(s) }) +@ThreadLeakLingering(linger = 5000) public class TestHdfsCloudBackupRestore extends AbstractCloudBackupRestoreTestCase { public static final String SOLR_XML = "\n" + "\n" + @@ -110,7 +112,7 @@ public class TestHdfsCloudBackupRestore extends AbstractCloudBackupRestoreTestCa private static FileSystem fs; @BeforeClass - public static void setupClass() throws Exception { + public static void beforeTestHdfsCloudBackupRestore() throws Exception { dfsCluster = HdfsTestUtil.setupClass(createTempDir().toFile().getAbsolutePath()); hdfsUri = HdfsTestUtil.getURI(dfsCluster); try { @@ -149,7 +151,7 @@ public static void setupClass() throws Exception { } @AfterClass - public static void teardownClass() throws Exception { + public static void afterTestHdfsCloudBackupRestore() throws Exception { IOUtils.closeQuietly(fs); fs = null; try { diff --git a/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsNNFailoverTest.java b/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsNNFailoverTest.java index f70335d14d3..15f58b2abf6 100644 --- a/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsNNFailoverTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsNNFailoverTest.java @@ -16,6 +16,7 @@ */ package org.apache.solr.cloud.hdfs; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering; import java.io.IOException; import org.apache.hadoop.hdfs.MiniDFSCluster; @@ -36,6 +37,7 @@ QuickPatchThreadsFilter.class, BadHdfsThreadsFilter.class // hdfs currently leaks thread(s) }) +@ThreadLeakLingering(linger = 5000) public class HdfsNNFailoverTest extends BasicDistributedZkTest { private static final String COLLECTION = "collection"; private static MiniDFSCluster dfsCluster; diff --git a/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsRecoverLeaseTest.java b/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsRecoverLeaseTest.java index 8d8833f027e..97f8d20c467 100644 --- a/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsRecoverLeaseTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsRecoverLeaseTest.java @@ -16,11 +16,12 @@ */ package org.apache.solr.cloud.hdfs; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering; import java.io.IOException; import java.net.URI; import java.util.HashSet; import java.util.Set; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; @@ -29,6 +30,7 @@ import org.apache.lucene.util.QuickPatchThreadsFilter; import org.apache.solr.SolrIgnoredThreadsFilter; import org.apache.solr.SolrTestCaseJ4; +import org.apache.solr.common.util.ObjectReleaseTracker; import org.apache.solr.util.BadHdfsThreadsFilter; import org.apache.solr.util.FSHDFSUtils; import org.apache.solr.util.FSHDFSUtils.CallerInfo; @@ -38,72 +40,76 @@ import org.junit.BeforeClass; import org.junit.Test; -import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; - -@ThreadLeakFilters(defaultFilters = true, filters = { - SolrIgnoredThreadsFilter.class, - QuickPatchThreadsFilter.class, - BadHdfsThreadsFilter.class // hdfs currently leaks thread(s) -}) +@ThreadLeakFilters( + defaultFilters = true, + filters = { + SolrIgnoredThreadsFilter.class, + QuickPatchThreadsFilter.class, + BadHdfsThreadsFilter.class // hdfs currently leaks thread(s) + }) +@ThreadLeakLingering(linger = 10000) public class HdfsRecoverLeaseTest extends SolrTestCaseJ4 { - + private static MiniDFSCluster dfsCluster; @BeforeClass - public static void beforeClass() throws Exception { + public static void beforeHdfsRecoverLeaseTest() throws Exception { dfsCluster = HdfsTestUtil.setupClass(createTempDir().toFile().getAbsolutePath(), false); } @AfterClass - public static void afterClass() throws Exception { + public static void afterHdfsRecoverLeaseTest() throws Exception { try { HdfsTestUtil.teardownClass(dfsCluster); } finally { dfsCluster = null; } } - + @Before public void setUp() throws Exception { super.setUp(); } - + @After public void tearDown() throws Exception { super.tearDown(); } - + @Test public void testBasic() throws IOException { long startRecoverLeaseSuccessCount = FSHDFSUtils.RECOVER_LEASE_SUCCESS_COUNT.get(); - + URI uri = dfsCluster.getURI(); Path path = new Path(uri); Configuration conf = HdfsTestUtil.getClientConfiguration(dfsCluster); FileSystem fs1 = FileSystem.get(path.toUri(), conf); Path testFile = new Path(uri.toString() + "/testfile"); FSDataOutputStream out = fs1.create(testFile); - + out.write(5); out.hflush(); out.close(); - FSHDFSUtils.recoverFileLease(fs1, testFile, conf, new CallerInfo() { - - @Override - public boolean isCallerClosed() { - return false; - } - }); + FSHDFSUtils.recoverFileLease( + fs1, + testFile, + conf, + new CallerInfo() { + + @Override + public boolean isCallerClosed() { + return false; + } + }); assertEquals(0, FSHDFSUtils.RECOVER_LEASE_SUCCESS_COUNT.get() - startRecoverLeaseSuccessCount); - + fs1.close(); - FileSystem fs2 = FileSystem.get(path.toUri(), conf); Path testFile2 = new Path(uri.toString() + "/testfile2"); FSDataOutputStream out2 = fs2.create(testFile2); - + if (random().nextBoolean()) { int cnt = random().nextInt(100); for (int i = 0; i < cnt; i++) { @@ -112,38 +118,41 @@ public boolean isCallerClosed() { out2.hflush(); } - // closing the fs will close the file it seems // fs2.close(); - + FileSystem fs3 = FileSystem.get(path.toUri(), conf); - FSHDFSUtils.recoverFileLease(fs3, testFile2, conf, new CallerInfo() { - - @Override - public boolean isCallerClosed() { - return false; - } - }); + FSHDFSUtils.recoverFileLease( + fs3, + testFile2, + conf, + new CallerInfo() { + + @Override + public boolean isCallerClosed() { + return false; + } + }); assertEquals(1, FSHDFSUtils.RECOVER_LEASE_SUCCESS_COUNT.get() - startRecoverLeaseSuccessCount); - + fs3.close(); fs2.close(); } - + @Test public void testMultiThreaded() throws Exception { long startRecoverLeaseSuccessCount = FSHDFSUtils.RECOVER_LEASE_SUCCESS_COUNT.get(); - + final URI uri = dfsCluster.getURI(); final Path path = new Path(uri); final Configuration conf = HdfsTestUtil.getClientConfiguration(dfsCluster); - + // n threads create files class WriterThread extends Thread { private FileSystem fs; private int id; - + public WriterThread(int id) { this.id = id; try { @@ -152,14 +161,14 @@ public WriterThread(int id) { throw new RuntimeException(e); } } - + @Override public void run() { Path testFile = new Path(uri.toString() + "/file-" + id); FSDataOutputStream out; try { out = fs.create(testFile); - + if (random().nextBoolean()) { int cnt = random().nextInt(100); for (int i = 0; i < cnt; i++) { @@ -171,20 +180,20 @@ public void run() { throw new RuntimeException(); } } - + public void close() throws IOException { fs.close(); } - + public int getFileId() { return id; } } - + class RecoverThread extends Thread { private FileSystem fs; private int id; - + public RecoverThread(int id) { this.id = id; try { @@ -193,60 +202,63 @@ public RecoverThread(int id) { throw new RuntimeException(e); } } - + @Override public void run() { Path testFile = new Path(uri.toString() + "/file-" + id); try { - FSHDFSUtils.recoverFileLease(fs, testFile, conf, new CallerInfo() { - - @Override - public boolean isCallerClosed() { - return false; - } - }); + FSHDFSUtils.recoverFileLease( + fs, + testFile, + conf, + new CallerInfo() { + + @Override + public boolean isCallerClosed() { + return false; + } + }); } catch (IOException e) { throw new RuntimeException(e); } } - + public void close() throws IOException { fs.close(); } } - + Set writerThreads = new HashSet(); Set recoverThreads = new HashSet(); - + int threadCount = 3; for (int i = 0; i < threadCount; i++) { WriterThread wt = new WriterThread(i); writerThreads.add(wt); wt.run(); } - + for (WriterThread wt : writerThreads) { wt.join(); } - - Thread.sleep(2000); - + + Thread.sleep(1000); + for (WriterThread wt : writerThreads) { RecoverThread rt = new RecoverThread(wt.getFileId()); recoverThreads.add(rt); rt.run(); } - + for (WriterThread wt : writerThreads) { wt.close(); } - + for (RecoverThread rt : recoverThreads) { rt.close(); } - assertEquals(threadCount, FSHDFSUtils.RECOVER_LEASE_SUCCESS_COUNT.get() - startRecoverLeaseSuccessCount); - + assertEquals( + threadCount, FSHDFSUtils.RECOVER_LEASE_SUCCESS_COUNT.get() - startRecoverLeaseSuccessCount); } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsTestUtil.java b/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsTestUtil.java index 3872167d5ca..dc01f0aede8 100644 --- a/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsTestUtil.java +++ b/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsTestUtil.java @@ -16,6 +16,8 @@ */ package org.apache.solr.cloud.hdfs; +import io.netty.channel.EventLoopGroup; +import io.netty.util.concurrent.GlobalEventExecutor; import java.io.File; import java.lang.invoke.MethodHandles; import java.net.URI; @@ -30,6 +32,7 @@ import java.util.TimerTask; import java.util.concurrent.ForkJoinPool; import java.util.concurrent.ForkJoinWorkerThread; +import java.util.concurrent.TimeUnit; import java.util.regex.Pattern; import org.apache.commons.lang3.time.FastDateFormat; @@ -61,6 +64,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import static org.apache.lucene.util.LuceneTestCase.TEST_NIGHTLY; import static org.apache.lucene.util.LuceneTestCase.random; public class HdfsTestUtil { @@ -80,11 +84,11 @@ public class HdfsTestUtil { private static FileSystem badTlogOutStreamFs; public static MiniDFSCluster setupClass(String dir) throws Exception { - return setupClass(dir, true, true); + return setupClass(dir, TEST_NIGHTLY, true); } public static MiniDFSCluster setupClass(String dir, boolean haTesting) throws Exception { - return setupClass(dir, haTesting, true); + return setupClass(dir, TEST_NIGHTLY, haTesting); } public static void checkAssumptions() { @@ -278,6 +282,10 @@ private static Configuration getBasicConfiguration(Configuration conf) { conf.setBoolean("dfs.permissions.enabled", false); conf.set("hadoop.security.authentication", "simple"); conf.setBoolean("fs.hdfs.impl.disable.cache", true); + conf.setInt("solr.hdfs.lease.recovery.timeout", 300); + conf.setInt("solr.hdfs.lease.recovery.first.pause", 10); + conf.setInt("solr.hdfs.lease.recovery.pause", 10); + return conf; } @@ -331,6 +339,7 @@ public static void teardownClass(MiniDFSCluster dfsCluster) throws Exception { } } } finally { + GlobalEventExecutor.INSTANCE.shutdownGracefully(0, 0, TimeUnit.SECONDS); System.clearProperty("test.build.data"); System.clearProperty("test.cache.data"); diff --git a/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsThreadLeakTest.java b/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsThreadLeakTest.java index 2e04ee04dc0..b12e179648d 100644 --- a/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsThreadLeakTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsThreadLeakTest.java @@ -16,6 +16,8 @@ */ package org.apache.solr.cloud.hdfs; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering; import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; @@ -31,13 +33,12 @@ import org.junit.BeforeClass; import org.junit.Test; -import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; - @ThreadLeakFilters(defaultFilters = true, filters = { SolrIgnoredThreadsFilter.class, QuickPatchThreadsFilter.class, BadHdfsThreadsFilter.class // hdfs currently leaks thread(s) }) +@ThreadLeakLingering(linger = 5000) public class HdfsThreadLeakTest extends SolrTestCaseJ4 { private static MiniDFSCluster dfsCluster; @@ -53,8 +54,10 @@ public static void afterClass() throws Exception { } finally { dfsCluster = null; } + + interruptThreadsOnTearDown(); // not closed properly } - + @Test public void testBasic() throws IOException { String uri = HdfsTestUtil.getURI(dfsCluster); diff --git a/solr/core/src/test/org/apache/solr/core/CachingDirectoryFactoryTest.java b/solr/core/src/test/org/apache/solr/core/CachingDirectoryFactoryTest.java index 5c6bce79028..eb8520ac4bc 100644 --- a/solr/core/src/test/org/apache/solr/core/CachingDirectoryFactoryTest.java +++ b/solr/core/src/test/org/apache/solr/core/CachingDirectoryFactoryTest.java @@ -24,7 +24,6 @@ import java.util.Map; import java.util.Random; import java.util.concurrent.atomic.AtomicInteger; - import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.Directory; import org.apache.solr.SolrTestCaseJ4; @@ -38,63 +37,62 @@ public class CachingDirectoryFactoryTest extends SolrTestCaseJ4 { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - private Map dirs = new HashMap<>(); + private final Map dirs = new HashMap<>(); private volatile boolean stop = false; - + private static class Tracker { String path; AtomicInteger refCnt = new AtomicInteger(0); Directory dir; } - + @Test public void stressTest() throws Exception { doStressTest(new RAMDirectoryFactory()); doStressTest(new ByteBuffersDirectoryFactory()); } - + private void doStressTest(final CachingDirectoryFactory df) throws Exception { List threads = new ArrayList<>(); - int threadCount = 11; + int threadCount = TEST_NIGHTLY ? 11 : 3; for (int i = 0; i < threadCount; i++) { Thread getDirThread = new GetDirThread(df); threads.add(getDirThread); getDirThread.start(); } - + for (int i = 0; i < 4; i++) { Thread releaseDirThread = new ReleaseDirThread(df); threads.add(releaseDirThread); releaseDirThread.start(); } - + for (int i = 0; i < 2; i++) { Thread incRefThread = new IncRefThread(df); threads.add(incRefThread); incRefThread.start(); } - Thread.sleep(TEST_NIGHTLY ? 30000 : 8000); - - Thread closeThread = new Thread() { - public void run() { - try { - df.close(); - } catch (IOException e) { - throw new RuntimeException(e); - } - } - }; + Thread.sleep(TEST_NIGHTLY ? 30000 : 4000); + + Thread closeThread = + new Thread() { + public void run() { + try { + df.close(); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + }; closeThread.start(); - - + stop = true; - + for (Thread thread : threads) { thread.join(); } - - + // do any remaining releases synchronized (dirs) { int sz = dirs.size(); @@ -107,38 +105,35 @@ public void run() { } } } - } - - closeThread.join(); + closeThread.join(); } - + private class ReleaseDirThread extends Thread { Random random; private CachingDirectoryFactory df; - + public ReleaseDirThread(CachingDirectoryFactory df) { this.df = df; } - + @Override public void run() { random = random(); while (!stop) { try { - Thread.sleep(random.nextInt(50) + 1); + Thread.sleep(random.nextInt(TEST_NIGHTLY ? 50 : 10) + 1); } catch (InterruptedException e1) { throw new RuntimeException(e1); } - + synchronized (dirs) { int sz = dirs.size(); List dirsList = new ArrayList<>(); dirsList.addAll(dirs.values()); if (sz > 0) { - Tracker tracker = dirsList.get(Math.min(dirsList.size() - 1, - random.nextInt(sz + 1))); + Tracker tracker = dirsList.get(Math.min(dirsList.size() - 1, random.nextInt(sz + 1))); try { if (tracker.refCnt.get() > 0) { if (random.nextInt(10) > 7) { @@ -157,19 +152,18 @@ public void run() { } } } - } } } - + private class GetDirThread extends Thread { Random random; private CachingDirectoryFactory df; - + public GetDirThread(CachingDirectoryFactory df) { this.df = df; } - + @Override public void run() { random = random(); @@ -187,7 +181,8 @@ public void run() { if (random.nextBoolean()) { path = "path" + random.nextInt(20) + "/" + random.nextInt(20); } else { - path = "path" + random.nextInt(20) + "/" + random.nextInt(20) + "/" + random.nextInt(20); + path = + "path" + random.nextInt(20) + "/" + random.nextInt(20) + "/" + random.nextInt(20); } } synchronized (dirs) { @@ -202,7 +197,7 @@ public void run() { } tracker.refCnt.incrementAndGet(); } - + } catch (AlreadyClosedException e) { log.warn("Cannot get dir, factory is already closed"); } catch (IOException e) { @@ -211,29 +206,29 @@ public void run() { } } } - + private class IncRefThread extends Thread { Random random; private CachingDirectoryFactory df; - + public IncRefThread(CachingDirectoryFactory df) { this.df = df; } - + @Override public void run() { random = random(); while (!stop) { try { - Thread.sleep(random.nextInt(300) + 1); + Thread.sleep(random.nextInt(TEST_NIGHTLY ? 300 : 50) + 1); } catch (InterruptedException e1) { throw new RuntimeException(e1); } - + String path = "path" + random.nextInt(20); synchronized (dirs) { Tracker tracker = dirs.get(path); - + if (tracker != null && tracker.refCnt.get() > 0) { try { df.incRef(tracker.dir); @@ -241,13 +236,11 @@ public void run() { log.warn("", e); continue; } - + tracker.refCnt.incrementAndGet(); } } - } } } - } diff --git a/solr/core/src/test/org/apache/solr/core/HdfsDirectoryFactoryTest.java b/solr/core/src/test/org/apache/solr/core/HdfsDirectoryFactoryTest.java index 4253176f3b7..363e162aa3d 100644 --- a/solr/core/src/test/org/apache/solr/core/HdfsDirectoryFactoryTest.java +++ b/solr/core/src/test/org/apache/solr/core/HdfsDirectoryFactoryTest.java @@ -16,6 +16,10 @@ */ package org.apache.solr.core; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering; +import com.carrotsearch.randomizedtesting.generators.RandomStrings; +import com.google.common.base.Strings; import java.io.FileNotFoundException; import java.io.IOException; import java.nio.file.Path; @@ -26,9 +30,6 @@ import java.util.Locale; import java.util.Map; import java.util.Random; - -import com.carrotsearch.randomizedtesting.generators.RandomStrings; -import com.google.common.base.Strings; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.lucene.store.Directory; @@ -52,23 +53,24 @@ import org.junit.BeforeClass; import org.junit.Test; -import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; - -@ThreadLeakFilters(defaultFilters = true, filters = { - SolrIgnoredThreadsFilter.class, - QuickPatchThreadsFilter.class, - BadHdfsThreadsFilter.class // hdfs currently leaks thread(s) -}) +@ThreadLeakFilters( + defaultFilters = true, + filters = { + SolrIgnoredThreadsFilter.class, + QuickPatchThreadsFilter.class, + BadHdfsThreadsFilter.class // hdfs currently leaks thread(s) + }) +@ThreadLeakLingering(linger = 5000) public class HdfsDirectoryFactoryTest extends SolrTestCaseJ4 { private static MiniDFSCluster dfsCluster; - + @BeforeClass - public static void setupClass() throws Exception { + public static void beforeHdfsDirectoryFactoryTest() throws Exception { dfsCluster = HdfsTestUtil.setupClass(createTempDir().toFile().getAbsolutePath(), false); } - + @AfterClass - public static void teardownClass() throws Exception { + public static void afterHdfsDirectoryFactoryTest() throws Exception { try { HdfsTestUtil.teardownClass(dfsCluster); } finally { @@ -84,9 +86,10 @@ public static void teardownClass() throws Exception { @Test @SuppressWarnings({"try"}) public void testInitArgsOrSysPropConfig() throws Exception { - try(HdfsDirectoryFactory hdfsFactory = new HdfsDirectoryFactory()) { + try (HdfsDirectoryFactory hdfsFactory = new HdfsDirectoryFactory()) { // test sys prop config - System.setProperty(HdfsDirectoryFactory.HDFS_HOME, HdfsTestUtil.getURI(dfsCluster) + "/solr1"); + System.setProperty( + HdfsDirectoryFactory.HDFS_HOME, HdfsTestUtil.getURI(dfsCluster) + "/solr1"); hdfsFactory.init(new NamedList<>()); String dataHome = hdfsFactory.getDataHome(new MockCoreDescriptor()); @@ -103,7 +106,8 @@ public void testInitArgsOrSysPropConfig() throws Exception { assertTrue(dataHome.endsWith("/solr2/mock/data")); // test sys prop and init args config - init args wins - System.setProperty(HdfsDirectoryFactory.HDFS_HOME, HdfsTestUtil.getURI(dfsCluster) + "/solr1"); + System.setProperty( + HdfsDirectoryFactory.HDFS_HOME, HdfsTestUtil.getURI(dfsCluster) + "/solr1"); hdfsFactory.init(nl); dataHome = hdfsFactory.getDataHome(new MockCoreDescriptor()); @@ -116,8 +120,11 @@ public void testInitArgsOrSysPropConfig() throws Exception { System.setProperty(HdfsDirectoryFactory.CONFIG_DIRECTORY, confDir.toString()); - try (Directory dir = hdfsFactory - .create(HdfsTestUtil.getURI(dfsCluster) + "/solr", NoLockFactory.INSTANCE, DirContext.DEFAULT)) { + try (Directory dir = + hdfsFactory.create( + HdfsTestUtil.getURI(dfsCluster) + "/solr", + NoLockFactory.INSTANCE, + DirContext.DEFAULT)) { assertEquals(confDir.toString(), hdfsFactory.getConfDir()); } @@ -130,20 +137,23 @@ public void testInitArgsOrSysPropConfig() throws Exception { hdfsFactory.init(nl); - assertEquals(4, hdfsFactory.getConfig(HdfsDirectoryFactory.NRTCACHINGDIRECTORY_MAXMERGESIZEMB, 0)); + assertEquals( + 4, hdfsFactory.getConfig(HdfsDirectoryFactory.NRTCACHINGDIRECTORY_MAXMERGESIZEMB, 0)); assertTrue(hdfsFactory.getConfig(HdfsDirectoryFactory.BLOCKCACHE_ENABLED, false)); nl = new NamedList<>(); hdfsFactory.init(nl); System.setProperty(HdfsDirectoryFactory.BLOCKCACHE_ENABLED, "true"); - assertEquals(3, hdfsFactory.getConfig(HdfsDirectoryFactory.NRTCACHINGDIRECTORY_MAXMERGESIZEMB, 0)); + assertEquals( + 3, hdfsFactory.getConfig(HdfsDirectoryFactory.NRTCACHINGDIRECTORY_MAXMERGESIZEMB, 0)); assertTrue(hdfsFactory.getConfig(HdfsDirectoryFactory.BLOCKCACHE_ENABLED, false)); System.clearProperty(HdfsDirectoryFactory.NRTCACHINGDIRECTORY_MAXMERGESIZEMB); System.clearProperty(HdfsDirectoryFactory.BLOCKCACHE_ENABLED); - assertEquals(0, hdfsFactory.getConfig(HdfsDirectoryFactory.NRTCACHINGDIRECTORY_MAXMERGESIZEMB, 0)); + assertEquals( + 0, hdfsFactory.getConfig(HdfsDirectoryFactory.NRTCACHINGDIRECTORY_MAXMERGESIZEMB, 0)); assertFalse(hdfsFactory.getConfig(HdfsDirectoryFactory.BLOCKCACHE_ENABLED, false)); } } @@ -151,26 +161,31 @@ public void testInitArgsOrSysPropConfig() throws Exception { @Test public void testCleanupOldIndexDirectories() throws Exception { try (HdfsDirectoryFactory hdfsFactory = new HdfsDirectoryFactory()) { - System.setProperty(HdfsDirectoryFactory.HDFS_HOME, HdfsTestUtil.getURI(dfsCluster) + "/solr1"); + System.setProperty( + HdfsDirectoryFactory.HDFS_HOME, HdfsTestUtil.getURI(dfsCluster) + "/solr1"); hdfsFactory.init(new NamedList<>()); String dataHome = hdfsFactory.getDataHome(new MockCoreDescriptor()); assertTrue(dataHome.endsWith("/solr1/mock/data")); System.clearProperty(HdfsDirectoryFactory.HDFS_HOME); - try(FileSystem hdfs = FileSystem.get(HdfsTestUtil.getClientConfiguration(dfsCluster))) { + try (FileSystem hdfs = FileSystem.get(HdfsTestUtil.getClientConfiguration(dfsCluster))) { org.apache.hadoop.fs.Path dataHomePath = new org.apache.hadoop.fs.Path(dataHome); - org.apache.hadoop.fs.Path currentIndexDirPath = new org.apache.hadoop.fs.Path(dataHomePath, "index"); - assertFalse(checkHdfsDirectory(hdfs,currentIndexDirPath)); + org.apache.hadoop.fs.Path currentIndexDirPath = + new org.apache.hadoop.fs.Path(dataHomePath, "index"); + assertFalse(checkHdfsDirectory(hdfs, currentIndexDirPath)); hdfs.mkdirs(currentIndexDirPath); assertTrue(checkHdfsDirectory(hdfs, currentIndexDirPath)); - String timestamp1 = new SimpleDateFormat(SnapShooter.DATE_FMT, Locale.ROOT).format(new Date()); - org.apache.hadoop.fs.Path oldIndexDirPath = new org.apache.hadoop.fs.Path(dataHomePath, "index." + timestamp1); - assertFalse(checkHdfsDirectory(hdfs,oldIndexDirPath)); + String timestamp1 = + new SimpleDateFormat(SnapShooter.DATE_FMT, Locale.ROOT).format(new Date()); + org.apache.hadoop.fs.Path oldIndexDirPath = + new org.apache.hadoop.fs.Path(dataHomePath, "index." + timestamp1); + assertFalse(checkHdfsDirectory(hdfs, oldIndexDirPath)); hdfs.mkdirs(oldIndexDirPath); assertTrue(checkHdfsDirectory(hdfs, oldIndexDirPath)); - hdfsFactory.cleanupOldIndexDirectories(dataHomePath.toString(), currentIndexDirPath.toString(), false); + hdfsFactory.cleanupOldIndexDirectories( + dataHomePath.toString(), currentIndexDirPath.toString(), false); assertTrue(checkHdfsDirectory(hdfs, currentIndexDirPath)); assertFalse(checkHdfsDirectory(hdfs, oldIndexDirPath)); @@ -178,18 +193,19 @@ public void testCleanupOldIndexDirectories() throws Exception { } } - private boolean checkHdfsDirectory(FileSystem hdfs, org.apache.hadoop.fs.Path path) throws IOException { + private boolean checkHdfsDirectory(FileSystem hdfs, org.apache.hadoop.fs.Path path) + throws IOException { try { return hdfs.getFileStatus(path).isDirectory(); } catch (FileNotFoundException e) { return false; } } - + @Test public void testLocalityReporter() throws Exception { Random r = random(); - try(HdfsDirectoryFactory factory = new HdfsDirectoryFactory()) { + try (HdfsDirectoryFactory factory = new HdfsDirectoryFactory()) { SolrMetricManager metricManager = new SolrMetricManager(); String registry = TestUtil.randomSimpleString(r, 2, 10); String scope = TestUtil.randomSimpleString(r, 2, 10); @@ -202,13 +218,25 @@ public void testLocalityReporter() throws Exception { factory.initializeMetrics(new SolrMetricsContext(metricManager, registry, "foo"), scope); // get the metrics map for the locality bean - MetricsMap metrics = (MetricsMap) ((SolrMetricManager.GaugeWrapper) metricManager.registry(registry).getMetrics().get("OTHER." + scope + ".hdfsLocality")).getGauge(); + MetricsMap metrics = + (MetricsMap) + ((SolrMetricManager.GaugeWrapper) + metricManager + .registry(registry) + .getMetrics() + .get("OTHER." + scope + ".hdfsLocality")) + .getGauge(); // We haven't done anything, so there should be no data Map statistics = metrics.getValue(); - assertEquals("Saw bytes that were not written: " + statistics.get(HdfsLocalityReporter.LOCALITY_BYTES_TOTAL), 0L, + assertEquals( + "Saw bytes that were not written: " + + statistics.get(HdfsLocalityReporter.LOCALITY_BYTES_TOTAL), + 0L, statistics.get(HdfsLocalityReporter.LOCALITY_BYTES_TOTAL)); assertEquals( - "Counted bytes as local when none written: " + statistics.get(HdfsLocalityReporter.LOCALITY_BYTES_RATIO), 0, + "Counted bytes as local when none written: " + + statistics.get(HdfsLocalityReporter.LOCALITY_BYTES_RATIO), + 0, statistics.get(HdfsLocalityReporter.LOCALITY_BYTES_RATIO)); // create a directory and a file @@ -223,13 +251,21 @@ public void testLocalityReporter() throws Exception { // no locality because hostname not set factory.setHost("bogus"); statistics = metrics.getValue(); - assertEquals("Wrong number of total bytes counted: " + statistics.get(HdfsLocalityReporter.LOCALITY_BYTES_TOTAL), - long_bytes, statistics.get(HdfsLocalityReporter.LOCALITY_BYTES_TOTAL)); - assertEquals("Wrong number of total blocks counted: " + statistics.get(HdfsLocalityReporter.LOCALITY_BLOCKS_TOTAL), - 1, statistics.get(HdfsLocalityReporter.LOCALITY_BLOCKS_TOTAL)); assertEquals( - "Counted block as local when bad hostname set: " + statistics.get(HdfsLocalityReporter.LOCALITY_BLOCKS_LOCAL), - 0, statistics.get(HdfsLocalityReporter.LOCALITY_BLOCKS_LOCAL)); + "Wrong number of total bytes counted: " + + statistics.get(HdfsLocalityReporter.LOCALITY_BYTES_TOTAL), + long_bytes, + statistics.get(HdfsLocalityReporter.LOCALITY_BYTES_TOTAL)); + assertEquals( + "Wrong number of total blocks counted: " + + statistics.get(HdfsLocalityReporter.LOCALITY_BLOCKS_TOTAL), + 1, + statistics.get(HdfsLocalityReporter.LOCALITY_BLOCKS_TOTAL)); + assertEquals( + "Counted block as local when bad hostname set: " + + statistics.get(HdfsLocalityReporter.LOCALITY_BLOCKS_LOCAL), + 0, + statistics.get(HdfsLocalityReporter.LOCALITY_BLOCKS_LOCAL)); // set hostname and check again factory.setHost("127.0.0.1"); @@ -237,21 +273,23 @@ public void testLocalityReporter() throws Exception { assertEquals( "Did not count block as local after setting hostname: " + statistics.get(HdfsLocalityReporter.LOCALITY_BYTES_LOCAL), - long_bytes, statistics.get(HdfsLocalityReporter.LOCALITY_BYTES_LOCAL)); + long_bytes, + statistics.get(HdfsLocalityReporter.LOCALITY_BYTES_LOCAL)); } } } @Test public void testIsAbsolute() throws Exception { - try(HdfsDirectoryFactory hdfsFactory = new HdfsDirectoryFactory()) { - String relativePath = Strings.repeat( - RandomStrings.randomAsciiAlphanumOfLength(random(), random().nextInt(10) + 1) + '/', - random().nextInt(5) + 1); + try (HdfsDirectoryFactory hdfsFactory = new HdfsDirectoryFactory()) { + String relativePath = + Strings.repeat( + RandomStrings.randomAsciiAlphanumOfLength(random(), random().nextInt(10) + 1) + '/', + random().nextInt(5) + 1); assertFalse(hdfsFactory.isAbsolute(relativePath)); assertFalse(hdfsFactory.isAbsolute("/" + relativePath)); - for(String rootPrefix : Arrays.asList("file://", "hdfs://", "s3a://", "foo://")) { + for (String rootPrefix : Arrays.asList("file://", "hdfs://", "s3a://", "foo://")) { assertTrue(hdfsFactory.isAbsolute(rootPrefix + relativePath)); } } diff --git a/solr/core/src/test/org/apache/solr/core/backup/repository/HdfsBackupRepositoryIntegrationTest.java b/solr/core/src/test/org/apache/solr/core/backup/repository/HdfsBackupRepositoryIntegrationTest.java index 3d6ab69b297..ddb71724aab 100644 --- a/solr/core/src/test/org/apache/solr/core/backup/repository/HdfsBackupRepositoryIntegrationTest.java +++ b/solr/core/src/test/org/apache/solr/core/backup/repository/HdfsBackupRepositoryIntegrationTest.java @@ -17,6 +17,7 @@ package org.apache.solr.core.backup.repository; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; @@ -41,13 +42,14 @@ @ThreadLeakFilters(defaultFilters = true, filters = { BadHdfsThreadsFilter.class // hdfs currently leaks thread(s) }) +@ThreadLeakLingering(linger = 5000) public class HdfsBackupRepositoryIntegrationTest extends AbstractBackupRepositoryTest { private static MiniDFSCluster dfsCluster; private static String hdfsUri; private static FileSystem fs; @BeforeClass - public static void setupClass() throws Exception { + public static void beforeHdfsBackupRepositoryIntegrationTest() throws Exception { dfsCluster = HdfsTestUtil.setupClass(createTempDir().toFile().getAbsolutePath()); hdfsUri = HdfsTestUtil.getURI(dfsCluster); try { @@ -78,7 +80,7 @@ public static void setupClass() throws Exception { } @AfterClass - public static void teardownClass() throws Exception { + public static void afterHdfsBackupRepositoryIntegrationTest() throws Exception { IOUtils.closeQuietly(fs); fs = null; try { diff --git a/solr/core/src/test/org/apache/solr/handler/TestHdfsBackupRestoreCore.java b/solr/core/src/test/org/apache/solr/handler/TestHdfsBackupRestoreCore.java index e4168e3a9bd..5a0aae7867e 100644 --- a/solr/core/src/test/org/apache/solr/handler/TestHdfsBackupRestoreCore.java +++ b/solr/core/src/test/org/apache/solr/handler/TestHdfsBackupRestoreCore.java @@ -17,6 +17,12 @@ package org.apache.solr.handler; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; +import java.io.IOException; +import java.lang.invoke.MethodHandles; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.HashMap; +import java.util.Map; import org.apache.commons.io.IOUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; @@ -50,53 +56,49 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.IOException; -import java.lang.invoke.MethodHandles; -import java.net.URI; -import java.net.URISyntaxException; -import java.util.HashMap; -import java.util.Map; - -@ThreadLeakFilters(defaultFilters = true, filters = { - SolrIgnoredThreadsFilter.class, - QuickPatchThreadsFilter.class, - BadHdfsThreadsFilter.class // hdfs currently leaks thread(s) -}) -@SolrTestCaseJ4.SuppressSSL // Currently unknown why SSL does not work with this test +@ThreadLeakFilters( + defaultFilters = true, + filters = { + SolrIgnoredThreadsFilter.class, + QuickPatchThreadsFilter.class, + BadHdfsThreadsFilter.class // hdfs currently leaks thread(s) + }) +@SolrTestCaseJ4.SuppressSSL // Currently unknown why SSL does not work with this test public class TestHdfsBackupRestoreCore extends SolrCloudTestCase { - public static final String HDFS_REPO_SOLR_XML = "\n" + - "\n" + - " ${shareSchema:false}\n" + - " ${configSetBaseDir:configsets}\n" + - " ${coreRootDirectory:.}\n" + - "\n" + - " \n" + - " ${urlScheme:}\n" + - " ${socketTimeout:90000}\n" + - " ${connTimeout:15000}\n" + - " \n" + - "\n" + - " \n" + - " 127.0.0.1\n" + - " ${hostPort:8983}\n" + - " ${hostContext:solr}\n" + - " ${solr.zkclienttimeout:30000}\n" + - " ${genericCoreNodeNames:true}\n" + - " 10000\n" + - " ${distribUpdateConnTimeout:45000}\n" + - " ${distribUpdateSoTimeout:340000}\n" + - " \n" + - " \n" + - " \n" + - " \n" + - " ${solr.hdfs.default.backup.path}\n" + - " ${solr.hdfs.home:}\n" + - " ${solr.hdfs.confdir:}\n" + - " ${solr.hdfs.permissions.umask-mode:000}\n" + - " \n" + - " \n" + - " \n" + - "\n"; + public static final String HDFS_REPO_SOLR_XML = + "\n" + + "\n" + + " ${shareSchema:false}\n" + + " ${configSetBaseDir:configsets}\n" + + " ${coreRootDirectory:.}\n" + + "\n" + + " \n" + + " ${urlScheme:}\n" + + " ${socketTimeout:90000}\n" + + " ${connTimeout:15000}\n" + + " \n" + + "\n" + + " \n" + + " 127.0.0.1\n" + + " ${hostPort:8983}\n" + + " ${hostContext:solr}\n" + + " ${solr.zkclienttimeout:30000}\n" + + " ${genericCoreNodeNames:true}\n" + + " 10000\n" + + " ${distribUpdateConnTimeout:45000}\n" + + " ${distribUpdateSoTimeout:340000}\n" + + " \n" + + " \n" + + " \n" + + " \n" + + " ${solr.hdfs.default.backup.path}\n" + + " ${solr.hdfs.home:}\n" + + " ${solr.hdfs.confdir:}\n" + + " ${solr.hdfs.permissions.umask-mode:000}\n" + + " \n" + + " \n" + + " \n" + + "\n"; private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private static MiniDFSCluster dfsCluster; @@ -135,11 +137,12 @@ public static void setupClass() throws Exception { System.setProperty("solr.hdfs.home", hdfsUri + "/solr"); useFactory("solr.StandardDirectoryFactory"); - configureCluster(1)// nodes - .addConfig("conf1", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) - .withSolrXml(HDFS_REPO_SOLR_XML) - .configure(); - + configureCluster(1) // nodes + .addConfig( + "conf1", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) + .withSolrXml(HDFS_REPO_SOLR_XML) + .configure(); + docsSeed = random().nextLong(); } @@ -160,6 +163,7 @@ public static void teardownClass() throws Exception { System.clearProperty("test.cache.data"); } } + interruptThreadsOnTearDown(); // not closed properly } @Test @@ -172,7 +176,8 @@ public void test() throws Exception { int nDocs = BackupRestoreUtils.indexDocs(solrClient, collectionName, docsSeed); - DocCollection collectionState = solrClient.getZkStateReader().getClusterState().getCollection(collectionName); + DocCollection collectionState = + solrClient.getZkStateReader().getClusterState().getCollection(collectionName); assertEquals(1, collectionState.getActiveSlices().size()); Slice shard = collectionState.getActiveSlices().iterator().next(); assertEquals(1, shard.getReplicas().size()); @@ -190,39 +195,42 @@ public void test() throws Exception { // Create a backup. if (testViaReplicationHandler) { log.info("Running Backup via replication handler"); - BackupRestoreUtils.runReplicationHandlerCommand(baseUrl, coreName, ReplicationHandler.CMD_BACKUP, "hdfs", backupName); - final BackupStatusChecker backupStatus - = new BackupStatusChecker(leaderClient, "/" + coreName + "/replication"); + BackupRestoreUtils.runReplicationHandlerCommand( + baseUrl, coreName, ReplicationHandler.CMD_BACKUP, "hdfs", backupName); + final BackupStatusChecker backupStatus = + new BackupStatusChecker(leaderClient, "/" + coreName + "/replication"); backupStatus.waitForBackupSuccess(backupName, 30); } else { log.info("Running Backup via core admin api"); - Map params = new HashMap<>(); + Map params = new HashMap<>(); params.put("name", backupName); params.put(CoreAdminParams.BACKUP_REPOSITORY, "hdfs"); params.put(CoreAdminParams.SHARD_BACKUP_ID, shardBackupId); - BackupRestoreUtils.runCoreAdminCommand(replicaBaseUrl, coreName, CoreAdminAction.BACKUPCORE.toString(), params); + BackupRestoreUtils.runCoreAdminCommand( + replicaBaseUrl, coreName, CoreAdminAction.BACKUPCORE.toString(), params); } int numRestoreTests = nDocs > 0 ? TestUtil.nextInt(random(), 1, 5) : 1; - for (int attempts=0; attempts 0) { - //Delete a few docs + // Delete a few docs int numDeletes = TestUtil.nextInt(random(), 1, nDocs); - for(int i=0; i params = new HashMap<>(); + Map params = new HashMap<>(); params.put("name", "snapshot." + backupName); params.put(CoreAdminParams.BACKUP_REPOSITORY, "hdfs"); params.put(CoreAdminParams.SHARD_BACKUP_ID, shardBackupId); - BackupRestoreUtils.runCoreAdminCommand(replicaBaseUrl, coreName, CoreAdminAction.RESTORECORE.toString(), params); + BackupRestoreUtils.runCoreAdminCommand( + replicaBaseUrl, coreName, CoreAdminAction.RESTORECORE.toString(), params); } - //See if restore was successful by checking if all the docs are present again + // See if restore was successful by checking if all the docs are present again BackupRestoreUtils.verifyDocs(nDocs, leaderClient, coreName); // Verify the permissions on the backup folder. - final String backupPath = (testViaReplicationHandler) ? - "/backup/snapshot."+ backupName : - "/backup/shard_backup_metadata"; - final FsAction expectedPerms = (testViaReplicationHandler) ? FsAction.ALL : FsAction.READ_EXECUTE; + final String backupPath = + (testViaReplicationHandler) + ? "/backup/snapshot." + backupName + : "/backup/shard_backup_metadata"; + final FsAction expectedPerms = + (testViaReplicationHandler) ? FsAction.ALL : FsAction.READ_EXECUTE; FileStatus status = fs.getFileStatus(new org.apache.hadoop.fs.Path(backupPath)); FsPermission perm = status.getPermission(); diff --git a/solr/core/src/test/org/apache/solr/handler/admin/DaemonStreamApiTest.java b/solr/core/src/test/org/apache/solr/handler/admin/DaemonStreamApiTest.java index 34227aea19d..c819ba25789 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/DaemonStreamApiTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/DaemonStreamApiTest.java @@ -17,6 +17,7 @@ package org.apache.solr.handler.admin; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; @@ -32,14 +33,17 @@ import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.cloud.MiniSolrCloudCluster; +import org.apache.solr.cloud.hdfs.HdfsTestUtil; import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.util.TimeSource; import org.apache.solr.handler.TestSQLHandler; import org.apache.solr.util.TimeOut; import org.junit.After; +import org.junit.AfterClass; import org.junit.Before; import org.junit.Test; +@ThreadLeakLingering(linger = 5000) public class DaemonStreamApiTest extends SolrTestCaseJ4 { private MiniSolrCloudCluster cluster; @@ -61,6 +65,11 @@ public class DaemonStreamApiTest extends SolrTestCaseJ4 { private String url; + @AfterClass + public static void afterDaemonStreamApiTest() throws Exception { + interruptThreadsOnTearDown(); + } + @Override @Before diff --git a/solr/core/src/test/org/apache/solr/store/hdfs/HdfsDirectoryTest.java b/solr/core/src/test/org/apache/solr/store/hdfs/HdfsDirectoryTest.java index 772004687c8..a53c038b451 100644 --- a/solr/core/src/test/org/apache/solr/store/hdfs/HdfsDirectoryTest.java +++ b/solr/core/src/test/org/apache/solr/store/hdfs/HdfsDirectoryTest.java @@ -16,6 +16,7 @@ */ package org.apache.solr.store.hdfs; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering; import java.io.IOException; import java.nio.file.FileAlreadyExistsException; import java.util.HashSet; @@ -48,14 +49,15 @@ QuickPatchThreadsFilter.class, BadHdfsThreadsFilter.class // hdfs currently leaks thread(s) }) +@ThreadLeakLingering(linger = 5000) public class HdfsDirectoryTest extends SolrTestCaseJ4 { - private static final int MAX_NUMBER_OF_WRITES = 10000; + private static final int MAX_NUMBER_OF_WRITES = 1000; private static final int MIN_FILE_SIZE = 100; - private static final int MAX_FILE_SIZE = 100000; + private static final int MAX_FILE_SIZE = 10000; private static final int MIN_BUFFER_SIZE = 1; private static final int MAX_BUFFER_SIZE = 5000; - private static final int MAX_NUMBER_OF_READS = 10000; + private static final int MAX_NUMBER_OF_READS = 1000; private static MiniDFSCluster dfsCluster; private Configuration directoryConf; private Path directoryPath; @@ -63,12 +65,12 @@ public class HdfsDirectoryTest extends SolrTestCaseJ4 { private Random random; @BeforeClass - public static void beforeClass() throws Exception { + public static void beforeHdfsDirectoryTest() throws Exception { dfsCluster = HdfsTestUtil.setupClass(createTempDir().toFile().getAbsolutePath()); } @AfterClass - public static void afterClass() throws Exception { + public static void afterHdfsDirectoryTest() throws Exception { try { HdfsTestUtil.teardownClass(dfsCluster); } finally { diff --git a/solr/server/etc/jetty-http.xml b/solr/server/etc/jetty-http.xml index 4793c1f3194..d4fceccc976 100644 --- a/solr/server/etc/jetty-http.xml +++ b/solr/server/etc/jetty-http.xml @@ -29,6 +29,8 @@ + 512 + 16393 diff --git a/solr/server/etc/jetty-https.xml b/solr/server/etc/jetty-https.xml index ab03ba4d867..c13cf9ed865 100644 --- a/solr/server/etc/jetty-https.xml +++ b/solr/server/etc/jetty-https.xml @@ -54,6 +54,7 @@ + 512 diff --git a/solr/solrj/build.gradle b/solr/solrj/build.gradle index a7f33154629..5ca55b33d4a 100644 --- a/solr/solrj/build.gradle +++ b/solr/solrj/build.gradle @@ -27,6 +27,8 @@ dependencies { api 'commons-io:commons-io' api 'org.apache.commons:commons-math3' + api 'it.unimi.dsi:fastutil-core:8.5.6' + api 'org.eclipse.jetty.http2:http2-client' api 'org.eclipse.jetty.http2:http2-http-client-transport' api 'org.eclipse.jetty:jetty-util' @@ -57,7 +59,7 @@ dependencies { testImplementation project(':solr:test-framework') testImplementation 'org.eclipse.jetty:jetty-webapp' - testImplementation ('org.eclipse.jetty:jetty-alpn-java-server', { + testImplementation('org.eclipse.jetty:jetty-alpn-java-server', { exclude group: "org.eclipse.jetty.alpn", module: "alpn-api" }) testImplementation 'org.objenesis:objenesis' diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/SolrRequest.java b/solr/solrj/src/java/org/apache/solr/client/solrj/SolrRequest.java index f0ff3bf05a8..3d377800fe6 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/SolrRequest.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/SolrRequest.java @@ -69,13 +69,13 @@ public enum SolrClientContext { }; public static final Set SUPPORTED_METHODS = Set.of( - METHOD.GET.toString(), - METHOD.POST.toString(), - METHOD.PUT.toString(), - METHOD.DELETE.toString()); + "GET", + "POST", + "PUT", + "DELETE"); - private METHOD method = METHOD.GET; - private String path = null; + private METHOD method; + private String path; private Map headers; private ResponseParser responseParser; @@ -259,7 +259,7 @@ public String getBasePath() { public void addHeader(String key, String value) { if (headers == null) { - headers = new HashMap<>(); + headers = new HashMap<>(4, 0.50f); } headers.put(key, value); } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/AsyncTracker.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/AsyncTracker.java new file mode 100644 index 00000000000..10370e66a98 --- /dev/null +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/AsyncTracker.java @@ -0,0 +1,161 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.solrj.impl; + +import java.io.Closeable; +import java.lang.invoke.MethodHandles; +import java.util.concurrent.Phaser; +import java.util.concurrent.Semaphore; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; +import org.apache.solr.common.SolrException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class AsyncTracker implements Closeable { + + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + + private static final long CLOSE_TIMEOUT = TimeUnit.SECONDS.convert(1, TimeUnit.HOURS); + + private final Semaphore available; + private final boolean wait; + + private volatile boolean closed = false; + + // wait for async requests + private final Phaser phaser; + // maximum outstanding requests left + + public static class ThePhaser extends Phaser { + + ThePhaser(int start) { + super(start); + } + + @Override + protected boolean onAdvance(int phase, int parties) { + return false; + } + } + + public AsyncTracker(int maxOutstandingAsyncRequests) { + this(maxOutstandingAsyncRequests, true, 0); + } + + public AsyncTracker(int maxOutstandingAsyncRequests, boolean wait, int start) { + phaser = new ThePhaser(start); + this.wait = wait; + if (maxOutstandingAsyncRequests > 0) { + available = new Semaphore(maxOutstandingAsyncRequests, false); + } else { + available = null; + } + } + + public void waitForComplete(long timeout, TimeUnit timeUnit) throws TimeoutException { + final int registeredParties = phaser.getRegisteredParties(); + int phase = phaser.getPhase(); + if (phaser.getUnarrivedParties() == 0) return; + if (log.isTraceEnabled()) { + final int unarrivedParties = phaser.getUnarrivedParties(); + final int arrivedParties = phaser.getArrivedParties(); + log.trace( + "Before wait for outstanding requests registered: {} arrived: {}, {} {}", + registeredParties, + arrivedParties, + unarrivedParties, + phaser); + } + try { + phaser.awaitAdvanceInterruptibly(phase, timeout, timeUnit); + } catch (IllegalStateException e) { + log.error("Unexpected, perhaps came after close; ?", e); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e); + } + + if (log.isTraceEnabled()) { + log.trace("After wait for outstanding requests {}", phaser); + } + } + + public void close() { + try { + if (wait && available != null) { + while (true) { + final boolean hasQueuedThreads = available.hasQueuedThreads(); + if (!hasQueuedThreads) break; + available.release(available.getQueueLength()); + } + } + phaser.forceTermination(); + } catch (Exception e) { + log.error("Exception closing Http2SolrClient asyncTracker", e); + } finally { + closed = true; + } + } + + public boolean register() { + if (log.isDebugEnabled()) { + log.debug("Registered new party {}", phaser); + } + + phaser.register(); + + if (available != null) { + if (!wait) { + boolean success; + success = available.tryAcquire(); + return success; + } else { + try { + available.acquire(); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new SolrException(SolrException.ErrorCode.SERVICE_UNAVAILABLE, e); + } + } + } + return true; + } + + public void arrive() { + arrive(true); + } + + public void arrive(boolean releaseAvailable) { + + if (available != null && releaseAvailable) available.release(); + + try { + phaser.arriveAndDeregister(); + } catch (IllegalStateException e) { + log.info("Arrive came after close - not unexpected, but unusual", e); + } + + if (log.isDebugEnabled()) { + log.debug("Request complete {}", phaser); + } + } + + public int getUnArrived() { + return phaser.getUnarrivedParties(); + } +} diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BinaryRequestWriter.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BinaryRequestWriter.java index 1fcd72ef601..d4ffab43b06 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BinaryRequestWriter.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BinaryRequestWriter.java @@ -16,19 +16,18 @@ */ package org.apache.solr.client.solrj.impl; +import static org.apache.solr.common.params.CommonParams.JAVABIN_MIME; + import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.OutputStream; import java.util.Collection; - import org.apache.solr.client.solrj.SolrRequest; import org.apache.solr.client.solrj.request.JavaBinUpdateRequestCodec; import org.apache.solr.client.solrj.request.RequestWriter; import org.apache.solr.client.solrj.request.UpdateRequest; import org.apache.solr.common.util.ContentStream; -import static org.apache.solr.common.params.CommonParams.JAVABIN_MIME; - /** * A RequestWriter which writes requests in the javabin format * @@ -59,6 +58,7 @@ public String getContentType() { } } + @SuppressWarnings("deprecation") @Override public Collection getContentStreams(SolrRequest req) throws IOException { if (req instanceof UpdateRequest) { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BinaryResponseParser.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BinaryResponseParser.java index b146dcd9ee8..ec604fae0dd 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BinaryResponseParser.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BinaryResponseParser.java @@ -16,19 +16,15 @@ */ package org.apache.solr.client.solrj.impl; -import org.apache.solr.client.solrj.ResponseParser; -import org.apache.solr.common.SolrException; -import org.apache.solr.common.util.NamedList; -import org.apache.solr.common.util.JavaBinCodec; - import java.io.IOException; import java.io.InputStream; import java.io.Reader; +import org.apache.solr.client.solrj.ResponseParser; +import org.apache.solr.common.SolrException; +import org.apache.solr.common.util.JavaBinCodec; +import org.apache.solr.common.util.NamedList; -/** - * - * @since solr 1.3 - */ +/** @since solr 1.3 */ public class BinaryResponseParser extends ResponseParser { public static final String BINARY_CONTENT_TYPE = "application/octet-stream"; @@ -51,7 +47,6 @@ public NamedList processResponse(InputStream body, String encoding) { return (NamedList) createCodec().unmarshal(body); } catch (IOException e) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "parsing error", e); - } } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateHttp2SolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateHttp2SolrClient.java index a705121a884..e13eb1cae30 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateHttp2SolrClient.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateHttp2SolrClient.java @@ -26,10 +26,9 @@ import java.util.concurrent.BlockingQueue; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; -import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.LinkedTransferQueue; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; - import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrRequest; import org.apache.solr.client.solrj.SolrServerException; @@ -70,6 +69,7 @@ public class ConcurrentUpdateHttp2SolrClient extends SolrClient { private final boolean streamDeletes; private volatile boolean closed; private volatile CountDownLatch lock = null; // used to block everything + private volatile boolean waitingForFinish; private static class CustomBlockingQueue implements Iterable{ private final BlockingQueue queue; @@ -78,7 +78,7 @@ private static class CustomBlockingQueue implements Iterable{ private final E backdoorE; public CustomBlockingQueue(int queueSize, int maxConsumers, E backdoorE) { - queue = new LinkedBlockingQueue<>(); + queue = new LinkedTransferQueue(); available = new Semaphore(queueSize); this.queueSize = queueSize; this.backdoorE = backdoorE; @@ -174,6 +174,7 @@ class Runner implements Runnable { @Override public void run() { log.debug("starting runner: {}", this); + waitingForFinish = false; // This loop is so we can continue if an element was added to the queue after the last runner exited. for (;;) { try { @@ -225,8 +226,9 @@ void sendUpdateStream() throws Exception { } InputStreamResponseListener responseListener = null; + try (Http2SolrClient.OutStream out = client.initOutStream(basePath, update.getRequest(), - update.getCollection())) { + update.getCollection())) { Update upd = update; while (upd != null) { UpdateRequest req = upd.getRequest(); @@ -235,14 +237,23 @@ void sendUpdateStream() throws Exception { break; } client.send(out, upd.getRequest(), upd.getCollection()); - out.flush(); - notifyQueueAndRunnersIfEmptyQueue(); - upd = queue.poll(pollQueueTime, TimeUnit.MILLISECONDS); + upd = queue.poll(0, TimeUnit.MILLISECONDS); + + if (upd == null) { + out.flush(); + if (waitingForFinish) { + notifyQueueAndRunnersIfEmptyQueue(); + break; + } + notifyQueueAndRunnersIfEmptyQueue(); + upd = queue.poll(pollQueueTime, TimeUnit.MILLISECONDS); + } } responseListener = out.getResponseListener(); } + Response response = responseListener.get(client.getIdleTimeout(), TimeUnit.MILLISECONDS); rspBody = responseListener.getInputStream(); @@ -348,6 +359,7 @@ private void addRunner() { @Override public NamedList request(final SolrRequest request, String collection) throws SolrServerException, IOException { + waitingForFinish = false; if (!(request instanceof UpdateRequest)) { request.setBasePath(basePath); return client.request(request, collection); @@ -367,6 +379,7 @@ public NamedList request(final SolrRequest request, String collection } else { if ((req.getDocuments() == null || req.getDocuments().isEmpty())) { blockUntilFinished(); + waitingForFinish = false; return client.request(request, collection); } } @@ -468,7 +481,7 @@ public synchronized void blockUntilFinished() throws IOException { int lastQueueSize = -1; synchronized (runners) { - + waitingForFinish = true; // NOTE: if the executor is shut down, runners may never become empty (a scheduled task may never be run, // which means it would never remove itself from the runners list. This is why we don't wait forever // and periodically check if the scheduler is shutting down. @@ -538,6 +551,7 @@ private void waitForEmptyQueue() throws IOException { long lastStallTime = -1; int lastQueueSize = -1; while (!queue.isEmpty()) { + if (scheduler.isTerminated()) { log.warn("The task queue still has elements but the update scheduler {} is terminated. Can't process any more tasks. Queue size: {}, Runners: {}. Current thread Interrupted? {}" , scheduler, queue.size(), runners.size(), threadInterrupted); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/Http2SolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/Http2SolrClient.java index 768cb1fae80..3609daa291d 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/Http2SolrClient.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/Http2SolrClient.java @@ -16,6 +16,10 @@ */ package org.apache.solr.client.solrj.impl; +import static org.apache.solr.client.solrj.impl.BaseHttpSolrClient.RemoteExecutionException; +import static org.apache.solr.client.solrj.impl.BaseHttpSolrClient.RemoteSolrException; +import static org.apache.solr.common.util.Utils.getObjectByPath; + import java.io.ByteArrayOutputStream; import java.io.Closeable; import java.io.IOException; @@ -25,6 +29,8 @@ import java.net.ConnectException; import java.net.MalformedURLException; import java.net.URL; +import java.net.URLDecoder; +import java.nio.ByteBuffer; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.util.Arrays; @@ -39,11 +45,8 @@ import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; -import java.util.concurrent.Phaser; -import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; - import org.apache.commons.io.IOUtils; import org.apache.http.HttpStatus; import org.apache.http.entity.ContentType; @@ -56,10 +59,10 @@ import org.apache.solr.client.solrj.request.RequestWriter; import org.apache.solr.client.solrj.request.UpdateRequest; import org.apache.solr.client.solrj.request.V2Request; +import org.apache.solr.client.solrj.util.AsyncListener; import org.apache.solr.client.solrj.util.Cancellable; import org.apache.solr.client.solrj.util.ClientUtils; import org.apache.solr.client.solrj.util.Constants; -import org.apache.solr.client.solrj.util.AsyncListener; import org.apache.solr.common.SolrException; import org.apache.solr.common.StringUtils; import org.apache.solr.common.params.CommonParams; @@ -71,7 +74,6 @@ import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.ObjectReleaseTracker; import org.apache.solr.common.util.SolrNamedThreadFactory; -import org.apache.solr.common.util.Utils; import org.eclipse.jetty.client.HttpClient; import org.eclipse.jetty.client.HttpClientTransport; import org.eclipse.jetty.client.ProtocolHandlers; @@ -83,7 +85,6 @@ import org.eclipse.jetty.client.util.InputStreamContentProvider; import org.eclipse.jetty.client.util.InputStreamResponseListener; import org.eclipse.jetty.client.util.MultiPartContentProvider; -import org.eclipse.jetty.client.util.OutputStreamContentProvider; import org.eclipse.jetty.client.util.StringContentProvider; import org.eclipse.jetty.http.HttpField; import org.eclipse.jetty.http.HttpFields; @@ -91,57 +92,91 @@ import org.eclipse.jetty.http.HttpMethod; import org.eclipse.jetty.http2.client.HTTP2Client; import org.eclipse.jetty.http2.client.http.HttpClientTransportOverHTTP2; +import org.eclipse.jetty.io.ByteBufferPool; import org.eclipse.jetty.util.BlockingArrayQueue; import org.eclipse.jetty.util.Fields; import org.eclipse.jetty.util.ssl.SslContextFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.apache.solr.client.solrj.impl.BaseHttpSolrClient.RemoteSolrException; -import static org.apache.solr.client.solrj.impl.BaseHttpSolrClient.RemoteExecutionException; -import static org.apache.solr.common.util.Utils.getObjectByPath; - /** * Difference between this {@link Http2SolrClient} and {@link HttpSolrClient}: + * *
    - *
  • {@link Http2SolrClient} sends requests in HTTP/2
  • - *
  • {@link Http2SolrClient} can point to multiple urls
  • - *
  • {@link Http2SolrClient} does not expose its internal httpClient like {@link HttpSolrClient#getHttpClient()}, - * sharing connection pools should be done by {@link Http2SolrClient.Builder#withHttpClient(Http2SolrClient)}
  • + *
  • {@link Http2SolrClient} sends requests in HTTP/2 + *
  • {@link Http2SolrClient} can point to multiple urls + *
  • {@link Http2SolrClient} does not expose its internal httpClient like {@link + * HttpSolrClient#getHttpClient()}, sharing connection pools should be done by {@link + * Http2SolrClient.Builder#withHttpClient(Http2SolrClient)} *
+ * * @lucene.experimental */ public class Http2SolrClient extends SolrClient { public static final String REQ_PRINCIPAL_KEY = "solr-req-principal"; - - private static volatile SSLConfig defaultSSLConfig; + public static final String SOLR = "/solr"; + public static final String API = "/api"; + private static final String DESTINATION_NODE_IS_NOT_PROVIDED = "Destination node is not provided!"; + private static final String GET_CAN_T_SEND_STREAMS = "GET can't send streams!"; + private static final String SOLR_V_2_REAL_PATH = "solr.v2RealPath"; + private static final String V_2 = "/____v2"; + private static final String UNSUPPORTED_METHOD = "Unsupported method: "; + public static final String STREAM = "stream"; + private static final String EXPECTED_MIME_TYPE = "Expected mime type "; + private static final String BUT_GOT = " but got "; + private static final String COULD_NOT_PARSE_RESPONSE_WITH_ENCODING = + "Could not parse response with encoding "; + private static final String EXCEPTION_WITH_ERR_OBJECT = "ExceptionWithErrObject"; + public static final String ERROR = "error"; + private static final String MSG = "msg"; + public static final String TRACE = "trace"; + public static final String REQUEST = "request: "; + private static final String JAVAX_NET_SSL_KEY_STORE = "javax.net.ssl.keyStore"; + private static final String JAVAX_NET_SSL_KEY_STORE_PASSWORD = "javax.net.ssl.keyStorePassword"; + private static final String JAVAX_NET_SSL_KEY_STORE_TYPE = "javax.net.ssl.keyStoreType"; + private static final String JAVAX_NET_SSL_TRUST_STORE = "javax.net.ssl.trustStore"; + private static final String JAVAX_NET_SSL_TRUST_STORE_PASSWORD = + "javax.net.ssl.trustStorePassword"; + private static final String JAVAX_NET_SSL_TRUST_STORE_TYPE = "javax.net.ssl.trustStoreType"; + private static final String SOLR_JETTY_SSL_VERIFY_CLIENT_HOST_NAME = + "solr.jetty.ssl.verifyClientHostName"; + + private static final String STREAM_TAG = ""; + private static final String CLOSE_STREAM_TAG = ""; + public static final String UPDATE = "update"; + private static final String AUTHORIZATION = "Authorization"; + private static final String BASIC = "Basic "; + + private static SSLConfig defaultSSLConfig; private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private static final String AGENT = "Solr[" + Http2SolrClient.class.getName() + "] 2.0"; private static final Charset FALLBACK_CHARSET = StandardCharsets.UTF_8; private static final String DEFAULT_PATH = "/select"; - private static final List errPath = Arrays.asList("metadata", "error-class"); + private static final String METADATA = "metadata"; + private static final List errPath = Arrays.asList(METADATA, "error-class"); - private HttpClient httpClient; - private volatile Set queryParams = Collections.emptySet(); - private int idleTimeout; + private final HttpClient httpClient; + private final boolean directBuffers; + private Set queryParams = Collections.emptySet(); + private final int idleTimeout; private ResponseParser parser = new BinaryResponseParser(); - private volatile RequestWriter requestWriter = new BinaryRequestWriter(); - private List listenerFactory = new LinkedList<>(); - private AsyncTracker asyncTracker = new AsyncTracker(); - /** - * The URL of the Solr server. - */ + private RequestWriter requestWriter = new BinaryRequestWriter(); + private final Collection listenerFactory = new LinkedList<>(); + private final AsyncTracker asyncTracker = new AsyncTracker(100); + /** The URL of the Solr server. */ private String serverBaseUrl; + private boolean closeClient; private ExecutorService executor; private boolean shutdownExecutor; private final String basicAuthAuthorizationStr; + private ByteBufferPool bufferPool; protected Http2SolrClient(String serverBaseUrl, Builder builder) { - if (serverBaseUrl != null) { + if (serverBaseUrl != null) { if (!serverBaseUrl.equals("/") && serverBaseUrl.endsWith("/")) { serverBaseUrl = serverBaseUrl.substring(0, serverBaseUrl.length() - 1); } @@ -162,10 +197,13 @@ protected Http2SolrClient(String serverBaseUrl, Builder builder) { httpClient = builder.http2SolrClient.httpClient; } if (builder.basicAuthUser != null && builder.basicAuthPassword != null) { - basicAuthAuthorizationStr = basicAuthCredentialsToAuthorizationString(builder.basicAuthUser, builder.basicAuthPassword); + basicAuthAuthorizationStr = + basicAuthCredentialsToAuthorizationString( + builder.basicAuthUser, builder.basicAuthPassword); } else { basicAuthAuthorizationStr = null; } + this.directBuffers = builder.directBuffers; assert ObjectReleaseTracker.track(this); } @@ -186,11 +224,14 @@ ProtocolHandlers getProtocolHandlers() { private HttpClient createHttpClient(Builder builder) { HttpClient httpClient; - BlockingArrayQueue queue = new BlockingArrayQueue<>(256, 256); executor = builder.executor; if (executor == null) { - this.executor = new ExecutorUtil.MDCAwareThreadPoolExecutor(32, - 256, 60, TimeUnit.SECONDS, queue, new SolrNamedThreadFactory("h2sc")); + int minThreads = 8; + int capacity = Math.max(minThreads, 8) * 128; + BlockingArrayQueue queue = new BlockingArrayQueue<>(capacity, capacity); + this.executor = + new ExecutorUtil.MDCAwareThreadPoolExecutor( + 8, 256, 60, TimeUnit.SECONDS, queue, new SolrNamedThreadFactory("h2sc")); shutdownExecutor = true; } else { shutdownExecutor = false; @@ -200,7 +241,9 @@ private HttpClient createHttpClient(Builder builder) { boolean ssl; if (builder.sslConfig == null) { sslContextFactory = getDefaultSslContextFactory(); - ssl = sslContextFactory.getTrustStore() != null || sslContextFactory.getTrustStorePath() != null; + ssl = + sslContextFactory.getTrustStore() != null + || sslContextFactory.getTrustStorePath() != null; } else { sslContextFactory = builder.sslConfig.createClientContextFactory(); ssl = true; @@ -210,13 +253,15 @@ private HttpClient createHttpClient(Builder builder) { HttpClientTransport transport; if (builder.useHttp1_1 || sslOnJava8OrLower) { if (sslOnJava8OrLower && !builder.useHttp1_1) { - log.warn("Create Http2SolrClient with HTTP/1.1 transport since Java 8 or lower versions does not support SSL + HTTP/2"); + log.warn( + "Create Http2SolrClient with HTTP/1.1 transport since Java 8 or lower versions does not support SSL + HTTP/2"); } else { log.debug("Create Http2SolrClient with HTTP/1.1 transport"); } transport = new HttpClientTransportOverHTTP(2); httpClient = new HttpClient(transport, sslContextFactory); - if (builder.maxConnectionsPerHost != null) httpClient.setMaxConnectionsPerDestination(builder.maxConnectionsPerHost); + if (builder.maxConnectionsPerHost != null) + httpClient.setMaxConnectionsPerDestination(builder.maxConnectionsPerHost); } else { log.debug("Create Http2SolrClient with HTTP/2 transport"); HTTP2Client http2client = new HTTP2Client(); @@ -229,7 +274,9 @@ private HttpClient createHttpClient(Builder builder) { httpClient.setStrictEventOrdering(false); httpClient.setConnectBlocking(true); httpClient.setFollowRedirects(false); - httpClient.setMaxRequestsQueuedPerDestination(asyncTracker.getMaxRequestsQueuedPerDestination()); + httpClient.setMaxRequestsQueuedPerDestination(1024); + httpClient.setRequestBufferSize(16384); + httpClient.setResponseBufferSize(16384); httpClient.setUserAgentField(new HttpField(HttpHeader.USER_AGENT, AGENT)); httpClient.setIdleTimeout(idleTimeout); @@ -240,12 +287,12 @@ private HttpClient createHttpClient(Builder builder) { throw new RuntimeException(e); } + bufferPool = httpClient.getByteBufferPool(); + return httpClient; } public void close() { - // we wait for async requests, so far devs don't want to give sugar for this - asyncTracker.waitForComplete(); if (closeClient) { try { httpClient.setStopTimeout(1000); @@ -261,23 +308,27 @@ public void close() { assert ObjectReleaseTracker.release(this); } - public boolean isV2ApiRequest(final SolrRequest request) { - return request instanceof V2Request || request.getPath().contains("/____v2"); + public static boolean isV2ApiRequest(final SolrRequest request) { + return request instanceof V2Request || request.getPath().contains(V_2); } public long getIdleTimeout() { return idleTimeout; } - public static class OutStream implements Closeable{ + public static class OutStream implements Closeable { private final String origCollection; private final ModifiableSolrParams origParams; - private final OutputStreamContentProvider outProvider; + private final SolrOutputStreamContentProvider outProvider; private final InputStreamResponseListener responseListener; private final boolean isXml; - public OutStream(String origCollection, ModifiableSolrParams origParams, - OutputStreamContentProvider outProvider, InputStreamResponseListener responseListener, boolean isXml) { + public OutStream( + String origCollection, + ModifiableSolrParams origParams, + SolrOutputStreamContentProvider outProvider, + InputStreamResponseListener responseListener, + boolean isXml) { this.origCollection = origCollection; this.origParams = origParams; this.outProvider = outProvider; @@ -287,7 +338,8 @@ public OutStream(String origCollection, ModifiableSolrParams origParams, boolean belongToThisStream(SolrRequest solrRequest, String collection) { ModifiableSolrParams solrParams = new ModifiableSolrParams(solrRequest.getParams()); - if (!origParams.toNamedList().equals(solrParams.toNamedList()) || !StringUtils.equals(origCollection, collection)) { + if (!origParams.toNamedList().equals(solrParams.toNamedList()) + || !StringUtils.equals(origCollection, collection)) { return false; } return true; @@ -304,20 +356,19 @@ public void flush() throws IOException { @Override public void close() throws IOException { if (isXml) { - write("".getBytes(FALLBACK_CHARSET)); + write(CLOSE_STREAM_TAG.getBytes(FALLBACK_CHARSET)); } this.outProvider.getOutputStream().close(); } - //TODO this class should be hidden + // TODO this class should be hidden public InputStreamResponseListener getResponseListener() { return responseListener; } } - public OutStream initOutStream(String baseUrl, - UpdateRequest updateRequest, - String collection) throws IOException { + public OutStream initOutStream(String baseUrl, UpdateRequest updateRequest, String collection) + throws IOException { String contentType = requestWriter.getUpdateContentType(); final ModifiableSolrParams origParams = new ModifiableSolrParams(updateRequest.getParams()); @@ -327,28 +378,30 @@ public OutStream initOutStream(String baseUrl, requestParams.set(CommonParams.WT, parser.getWriterType()); requestParams.set(CommonParams.VERSION, parser.getVersion()); - String basePath = baseUrl; - if (collection != null) - basePath += "/" + collection; - if (!basePath.endsWith("/")) - basePath += "/"; - - OutputStreamContentProvider provider = new OutputStreamContentProvider(); - Request postRequest = httpClient - .newRequest(basePath + "update" - + requestParams.toQueryString()) - .method(HttpMethod.POST) - .header(HttpHeader.CONTENT_TYPE, contentType) - .content(provider); + StringBuilder basePath = new StringBuilder(baseUrl); + if (collection != null) basePath.append('/').append(collection); + if (!(basePath.length() > 0 && basePath.charAt(basePath.length() - 1) == '/')) + basePath.append('/'); + + ByteBuffer buffer = bufferPool.acquire(httpClient.getRequestBufferSize(), directBuffers); + buffer.clear(); + SolrOutputStreamContentProvider provider = new SolrOutputStreamContentProvider(buffer); + basePath.append(UPDATE).append(requestParams.toQueryString()); + Request postRequest = + httpClient + .newRequest(basePath.toString()) + .method(HttpMethod.POST) + .header(HttpHeader.CONTENT_TYPE, contentType) + .content(provider) + .onResponseBegin(result -> bufferPool.release(buffer)); decorateRequest(postRequest, updateRequest); InputStreamResponseListener responseListener = new InputStreamResponseListener(); postRequest.send(responseListener); boolean isXml = ClientUtils.TEXT_XML.equals(requestWriter.getUpdateContentType()); - OutStream outStream = new OutStream(collection, origParams, provider, responseListener, - isXml); + OutStream outStream = new OutStream(collection, origParams, provider, responseListener, isXml); if (isXml) { - outStream.write("".getBytes(FALLBACK_CHARSET)); + outStream.write(Http2SolrClient.STREAM_TAG.getBytes(FALLBACK_CHARSET)); } return outStream; } @@ -367,10 +420,9 @@ public void send(OutStream outStream, SolrRequest req, String collection) thr fmt = ""; } if (fmt != null) { - byte[] content = String.format(Locale.ROOT, - fmt, params.getBool(UpdateParams.WAIT_SEARCHER, false) - + "") - .getBytes(FALLBACK_CHARSET); + byte[] content = + String.format(Locale.ROOT, fmt, params.getBool(UpdateParams.WAIT_SEARCHER, false)) + .getBytes(FALLBACK_CHARSET); outStream.write(content); } } @@ -381,7 +433,10 @@ public void send(OutStream outStream, SolrRequest req, String collection) thr private static final Exception CANCELLED_EXCEPTION = new Exception(); private static final Cancellable FAILED_MAKING_REQUEST_CANCELLABLE = () -> {}; - public Cancellable asyncRequest(SolrRequest solrRequest, String collection, AsyncListener> asyncListener) { + public Cancellable asyncRequest( + SolrRequest solrRequest, + String collection, + AsyncListener> asyncListener) { Request req; try { req = makeRequest(solrRequest, collection); @@ -389,36 +444,45 @@ public Cancellable asyncRequest(SolrRequest solrRequest, String collection, A asyncListener.onFailure(e); return FAILED_MAKING_REQUEST_CANCELLABLE; } - final ResponseParser parser = solrRequest.getResponseParser() == null - ? this.parser: solrRequest.getResponseParser(); - req.onRequestQueued(asyncTracker.queuedListener) - .onComplete(asyncTracker.completeListener) - .send(new InputStreamResponseListener() { + final ResponseParser parser = + solrRequest.getResponseParser() == null ? this.parser : solrRequest.getResponseParser(); + asyncTracker.register(); + req.send( + new InputStreamResponseListener() { @Override public void onHeaders(Response response) { super.onHeaders(response); InputStreamResponseListener listener = this; - executor.execute(() -> { - InputStream is = listener.getInputStream(); - assert ObjectReleaseTracker.track(is); - try { - NamedList body = processErrorsAndResponse(solrRequest, parser, response, is); - asyncListener.onSuccess(body); - } catch (RemoteSolrException e) { - if (SolrException.getRootCause(e) != CANCELLED_EXCEPTION) { - asyncListener.onFailure(e); - } - } catch (SolrServerException e) { - asyncListener.onFailure(e); - } - }); + executor.execute( + () -> { + try { + InputStream is = listener.getInputStream(); + try { + NamedList body = + processErrorsAndResponse(solrRequest, parser, response, is); + asyncListener.onSuccess(body); + } catch (RemoteSolrException e) { + if (SolrException.getRootCause(e) != CANCELLED_EXCEPTION) { + asyncListener.onFailure(e); + } + } catch (SolrServerException e) { + asyncListener.onFailure(e); + } + } finally { + asyncTracker.arrive(); + } + }); } @Override public void onFailure(Response response, Throwable failure) { super.onFailure(response, failure); - if (failure != CANCELLED_EXCEPTION) { - asyncListener.onFailure(new SolrServerException(failure.getMessage(), failure)); + try { + if (failure != CANCELLED_EXCEPTION) { + asyncListener.onFailure(new SolrServerException(failure.getMessage(), failure)); + } + } finally { + asyncTracker.arrive(); } } }); @@ -426,17 +490,17 @@ public void onFailure(Response response, Throwable failure) { } @Override - public NamedList request(SolrRequest solrRequest, String collection) throws SolrServerException, IOException { + public NamedList request(SolrRequest solrRequest, String collection) + throws SolrServerException, IOException { Request req = makeRequest(solrRequest, collection); - final ResponseParser parser = solrRequest.getResponseParser() == null - ? this.parser: solrRequest.getResponseParser(); + final ResponseParser parser = + solrRequest.getResponseParser() == null ? this.parser : solrRequest.getResponseParser(); try { InputStreamResponseListener listener = new InputStreamResponseListener(); req.send(listener); Response response = listener.get(idleTimeout, TimeUnit.MILLISECONDS); InputStream is = listener.getInputStream(); - assert ObjectReleaseTracker.track(is); return processErrorsAndResponse(solrRequest, parser, response, is); } catch (InterruptedException e) { @@ -454,41 +518,45 @@ public NamedList request(SolrRequest solrRequest, String collection) throw (SolrServerException) cause; } else if (cause instanceof IOException) { throw new SolrServerException( - "IOException occured when talking to server at: " + getBaseURL(), cause); + "IOException occured when talking to server at: " + serverBaseUrl, cause); } throw new SolrServerException(cause.getMessage(), cause); } } - private NamedList processErrorsAndResponse(SolrRequest solrRequest, - ResponseParser parser, Response response, InputStream is) throws SolrServerException { + private NamedList processErrorsAndResponse( + SolrRequest solrRequest, ResponseParser parser, Response response, InputStream is) + throws SolrServerException { ContentType contentType = getContentType(response); String mimeType = null; String encoding = null; if (contentType != null) { mimeType = contentType.getMimeType(); - encoding = contentType.getCharset() != null? contentType.getCharset().name() : null; + encoding = contentType.getCharset() != null ? contentType.getCharset().name() : null; } - return processErrorsAndResponse(response, parser, is, mimeType, encoding, isV2ApiRequest(solrRequest)); + return processErrorsAndResponse( + response, parser, is, mimeType, encoding, isV2ApiRequest(solrRequest)); } - private ContentType getContentType(Response response) { + private static ContentType getContentType(Response response) { String contentType = response.getHeaders().get(HttpHeader.CONTENT_TYPE); - return StringUtils.isEmpty(contentType)? null : ContentType.parse(contentType); + return StringUtils.isEmpty(contentType) ? null : ContentType.parse(contentType); } private void setBasicAuthHeader(SolrRequest solrRequest, Request req) { if (solrRequest.getBasicAuthUser() != null && solrRequest.getBasicAuthPassword() != null) { - String encoded = basicAuthCredentialsToAuthorizationString(solrRequest.getBasicAuthUser(), solrRequest.getBasicAuthPassword()); - req.header("Authorization", encoded); + String encoded = + basicAuthCredentialsToAuthorizationString( + solrRequest.getBasicAuthUser(), solrRequest.getBasicAuthPassword()); + req.header(AUTHORIZATION, encoded); } else if (basicAuthAuthorizationStr != null) { - req.header("Authorization", basicAuthAuthorizationStr); + req.header(AUTHORIZATION, basicAuthAuthorizationStr); } } - private String basicAuthCredentialsToAuthorizationString(String user, String pass) { - String userPass = user + ":" + pass; - return "Basic " + Base64.getEncoder().encodeToString(userPass.getBytes(FALLBACK_CHARSET)); + private static String basicAuthCredentialsToAuthorizationString(String user, String pass) { + String userPass = user + ':' + pass; + return BASIC + Base64.getEncoder().encodeToString(userPass.getBytes(FALLBACK_CHARSET)); } private Request makeRequest(SolrRequest solrRequest, String collection) @@ -520,25 +588,29 @@ private void decorateRequest(Request req, SolrRequest solrRequest) { } } } - - private String changeV2RequestEndpoint(String basePath) throws MalformedURLException { + + private static String changeV2RequestEndpoint(String basePath) throws MalformedURLException { URL oldURL = new URL(basePath); - String newPath = oldURL.getPath().replaceFirst("/solr", "/api"); + String newPath = oldURL.getPath().replaceFirst(SOLR, API); return new URL(oldURL.getProtocol(), oldURL.getHost(), oldURL.getPort(), newPath).toString(); } + ; - private Request createRequest(SolrRequest solrRequest, String collection) throws IOException, SolrServerException { + @SuppressWarnings("deprecation") + private Request createRequest(SolrRequest solrRequest, String collection) + throws IOException, SolrServerException { if (solrRequest.getBasePath() == null && serverBaseUrl == null) - throw new IllegalArgumentException("Destination node is not provided!"); + throw new IllegalArgumentException(DESTINATION_NODE_IS_NOT_PROVIDED); if (solrRequest instanceof V2RequestSupport) { solrRequest = ((V2RequestSupport) solrRequest).getV2Request(); } SolrParams params = solrRequest.getParams(); RequestWriter.ContentWriter contentWriter = requestWriter.getContentWriter(solrRequest); - Collection streams = contentWriter == null ? requestWriter.getContentStreams(solrRequest) : null; + Collection streams = + contentWriter == null ? requestWriter.getContentStreams(solrRequest) : null; String path = requestWriter.getPath(solrRequest); - if (path == null || !path.startsWith("/")) { + if (path == null || !(!path.isEmpty() && path.charAt(0) == '/')) { path = DEFAULT_PATH; } @@ -555,33 +627,37 @@ private Request createRequest(SolrRequest solrRequest, String collection) thr wparams.set(CommonParams.VERSION, parser.getVersion()); } - //TODO add invariantParams support + // TODO add invariantParams support String basePath = solrRequest.getBasePath() == null ? serverBaseUrl : solrRequest.getBasePath(); - if (collection != null) - basePath += "/" + collection; + if (collection != null) basePath += '/' + collection; if (solrRequest instanceof V2Request) { - if (System.getProperty("solr.v2RealPath") == null) { + if (System.getProperty(SOLR_V_2_REAL_PATH) == null) { basePath = changeV2RequestEndpoint(basePath); } else { - basePath = serverBaseUrl + "/____v2"; + basePath = serverBaseUrl + V_2; } } if (SolrRequest.METHOD.GET == solrRequest.getMethod()) { if (streams != null || contentWriter != null) { - throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "GET can't send streams!"); + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, GET_CAN_T_SEND_STREAMS); } - return httpClient.newRequest(basePath + path + wparams.toQueryString()).method(HttpMethod.GET); + return httpClient + .newRequest(basePath + path + wparams.toQueryString()) + .method(HttpMethod.GET); } if (SolrRequest.METHOD.DELETE == solrRequest.getMethod()) { - return httpClient.newRequest(basePath + path + wparams.toQueryString()).method(HttpMethod.DELETE); + return httpClient + .newRequest(basePath + path + wparams.toQueryString()) + .method(HttpMethod.DELETE); } - if (SolrRequest.METHOD.POST == solrRequest.getMethod() || SolrRequest.METHOD.PUT == solrRequest.getMethod()) { + if (SolrRequest.METHOD.POST == solrRequest.getMethod() + || SolrRequest.METHOD.PUT == solrRequest.getMethod()) { String url = basePath + path; boolean hasNullStreamName = false; @@ -591,24 +667,22 @@ private Request createRequest(SolrRequest solrRequest, String collection) thr boolean isMultipart = streams != null && streams.size() > 1 && !hasNullStreamName; - HttpMethod method = SolrRequest.METHOD.POST == solrRequest.getMethod() ? HttpMethod.POST : HttpMethod.PUT; + HttpMethod method = + SolrRequest.METHOD.POST == solrRequest.getMethod() ? HttpMethod.POST : HttpMethod.PUT; if (contentWriter != null) { - Request req = httpClient - .newRequest(url + wparams.toQueryString()) - .method(method); + Request req = httpClient.newRequest(url + wparams.toQueryString()).method(method); ByteArrayOutputStream baos = new ByteArrayOutputStream(); contentWriter.write(baos); - //TODO reduce memory usage - return req.content(new BytesContentProvider(contentWriter.getContentType(), baos.toByteArray())); + // TODO reduce memory usage + return req.content( + new BytesContentProvider(contentWriter.getContentType(), baos.toByteArray())); } else if (streams == null || isMultipart) { // send server list and request list as query string params ModifiableSolrParams queryParams = calculateQueryParams(this.queryParams, wparams); queryParams.add(calculateQueryParams(solrRequest.getQueryParams(), wparams)); - Request req = httpClient - .newRequest(url + queryParams.toQueryString()) - .method(method); + Request req = httpClient.newRequest(url + queryParams.toQueryString()).method(method); return fillContentStream(req, streams, wparams, isMultipart); } else { // It is has one stream, it is the post body, put the params in the URL @@ -616,16 +690,21 @@ private Request createRequest(SolrRequest solrRequest, String collection) thr return httpClient .newRequest(url + wparams.toQueryString()) .method(method) - .content(new InputStreamContentProvider(contentStream.getStream()), contentStream.getContentType()); + .content( + new InputStreamContentProvider(contentStream.getStream()), + contentStream.getContentType()); } } - throw new SolrServerException("Unsupported method: " + solrRequest.getMethod()); + throw new SolrServerException(UNSUPPORTED_METHOD + solrRequest.getMethod()); } - private Request fillContentStream(Request req, Collection streams, - ModifiableSolrParams wparams, - boolean isMultipart) throws IOException { + private static Request fillContentStream( + Request req, + Collection streams, + ModifiableSolrParams wparams, + boolean isMultipart) + throws IOException { if (isMultipart) { // multipart/form-data MultiPartContentProvider content = new MultiPartContentProvider(); @@ -651,7 +730,11 @@ private Request fillContentStream(Request req, Collection streams } HttpFields fields = new HttpFields(); fields.add(HttpHeader.CONTENT_TYPE, contentType); - content.addFilePart(name, contentStream.getName(), new InputStreamContentProvider(contentStream.getStream()), fields); + content.addFilePart( + name, + contentStream.getName(), + new InputStreamContentProvider(contentStream.getStream()), + fields); } } req.content(content); @@ -674,17 +757,18 @@ private Request fillContentStream(Request req, Collection streams return req; } - private boolean wantStream(final ResponseParser processor) { + private static boolean wantStream(final ResponseParser processor) { return processor == null || processor instanceof InputStreamResponseParser; } @SuppressWarnings({"unchecked", "rawtypes"}) - private NamedList processErrorsAndResponse(Response response, - final ResponseParser processor, - InputStream is, - String mimeType, - String encoding, - final boolean isV2Api) + private NamedList processErrorsAndResponse( + Response response, + final ResponseParser processor, + InputStream is, + String mimeType, + String encoding, + final boolean isV2Api) throws SolrServerException { boolean shouldClose = true; try { @@ -699,14 +783,16 @@ private NamedList processErrorsAndResponse(Response response, case HttpStatus.SC_MOVED_PERMANENTLY: case HttpStatus.SC_MOVED_TEMPORARILY: if (!httpClient.isFollowRedirects()) { - throw new SolrServerException("Server at " + getBaseURL() - + " sent back a redirect (" + httpStatus + ")."); + throw new SolrServerException( + "Server at " + serverBaseUrl + " sent back a redirect (" + httpStatus + ")."); } break; default: if (processor == null || mimeType == null) { - throw new RemoteSolrException(serverBaseUrl, httpStatus, "non ok status: " + httpStatus - + ", message:" + response.getReason(), + throw new RemoteSolrException( + serverBaseUrl, + httpStatus, + "non ok status: " + httpStatus + ", message:" + response.getReason(), null); } } @@ -714,7 +800,7 @@ private NamedList processErrorsAndResponse(Response response, if (wantStream(parser)) { // no processor specified, return raw stream NamedList rsp = new NamedList<>(); - rsp.add("stream", is); + rsp.add(STREAM, is); // Only case where stream should not be closed shouldClose = false; return rsp; @@ -722,15 +808,20 @@ private NamedList processErrorsAndResponse(Response response, String procCt = processor.getContentType(); if (procCt != null) { - String procMimeType = ContentType.parse(procCt).getMimeType().trim().toLowerCase(Locale.ROOT); + String procMimeType = + ContentType.parse(procCt).getMimeType().trim().toLowerCase(Locale.ROOT); if (!procMimeType.equals(mimeType)) { // unexpected mime type - String msg = "Expected mime type " + procMimeType + " but got " + mimeType + "."; - String exceptionEncoding = encoding != null? encoding : FALLBACK_CHARSET.name(); + String msg = EXPECTED_MIME_TYPE + procMimeType + BUT_GOT + mimeType + '.'; + String exceptionEncoding = encoding != null ? encoding : FALLBACK_CHARSET.name(); try { - msg = msg + " " + IOUtils.toString(is, exceptionEncoding); + msg = msg + ' ' + IOUtils.toString(is, exceptionEncoding); } catch (IOException e) { - throw new RemoteSolrException(serverBaseUrl, httpStatus, "Could not parse response with encoding " + exceptionEncoding, e); + throw new RemoteSolrException( + serverBaseUrl, + httpStatus, + COULD_NOT_PARSE_RESPONSE_WITH_ENCODING + exceptionEncoding, + e); } throw new RemoteSolrException(serverBaseUrl, httpStatus, msg, null); } @@ -743,41 +834,46 @@ private NamedList processErrorsAndResponse(Response response, throw new RemoteSolrException(serverBaseUrl, httpStatus, e.getMessage(), e); } - Object error = rsp == null ? null : rsp.get("error"); - if (error != null && (String.valueOf(getObjectByPath(error, true, errPath)).endsWith("ExceptionWithErrObject"))) { + Object error = rsp == null ? null : rsp.get(ERROR); + if (error != null + && (String.valueOf(getObjectByPath(error, true, errPath)) + .endsWith(EXCEPTION_WITH_ERR_OBJECT))) { throw RemoteExecutionException.create(serverBaseUrl, rsp); } if (httpStatus != HttpStatus.SC_OK && !isV2Api) { + NamedList metadata = null; String reason = null; try { if (error != null) { - reason = (String) Utils.getObjectByPath(error, false, Collections.singletonList("msg")); - if(reason == null) { - reason = (String) Utils.getObjectByPath(error, false, Collections.singletonList("trace")); + reason = (String) getObjectByPath(error, false, Collections.singletonList(MSG)); + if (reason == null) { + reason = (String) getObjectByPath(error, false, Collections.singletonList(TRACE)); } - Object metadataObj = Utils.getObjectByPath(error, false, Collections.singletonList("metadata")); - if (metadataObj instanceof NamedList) { + Object metadataObj = getObjectByPath(error, false, Collections.singletonList(METADATA)); + if (metadataObj instanceof NamedList) { metadata = (NamedList) metadataObj; } else if (metadataObj instanceof List) { // NamedList parsed as List convert to NamedList again List list = (List) metadataObj; - metadata = new NamedList<>(list.size()/2); - for (int i = 0; i < list.size(); i+=2) { - metadata.add((String)list.get(i), (String) list.get(i+1)); + final int size = list.size(); + metadata = new NamedList<>(size / 2); + for (int i = 0; i < size; i += 2) { + metadata.add((String) list.get(i), (String) list.get(i + 1)); } } else if (metadataObj instanceof Map) { metadata = new NamedList((Map) metadataObj); } } - } catch (Exception ex) {} + } catch (Exception ex) { + } if (reason == null) { - StringBuilder msg = new StringBuilder(); + StringBuilder msg = new StringBuilder(16); msg.append(response.getReason()) .append("\n\n") - .append("request: ") + .append(REQUEST) .append(response.getRequest().getMethod()); - reason = java.net.URLDecoder.decode(msg.toString(), FALLBACK_CHARSET); + reason = URLDecoder.decode(msg.toString(), FALLBACK_CHARSET); } RemoteSolrException rss = new RemoteSolrException(serverBaseUrl, httpStatus, reason, null); if (metadata != null) rss.setMetadata(metadata); @@ -786,11 +882,18 @@ private NamedList processErrorsAndResponse(Response response, return rsp; } finally { if (shouldClose) { - try { - is.close(); - assert ObjectReleaseTracker.release(is); - } catch (IOException e) { - // quitely + if (is != null) { + try { + // make sure the stream is full read + is.skip(is.available()); + while (is.read() != -1) {} + } catch (UnsupportedOperationException e) { + // nothing to do then + } catch (IOException e) { + // quiet + } finally { + org.apache.solr.common.util.IOUtils.closeQuietly(is); + } } } } @@ -808,47 +911,11 @@ public String getBaseURL() { return serverBaseUrl; } - private static class AsyncTracker { - private static final int MAX_OUTSTANDING_REQUESTS = 1000; - - // wait for async requests - private final Phaser phaser; - // maximum outstanding requests left - private final Semaphore available; - private final Request.QueuedListener queuedListener; - private final Response.CompleteListener completeListener; - - AsyncTracker() { - // TODO: what about shared instances? - phaser = new Phaser(1); - available = new Semaphore(MAX_OUTSTANDING_REQUESTS, false); - queuedListener = request -> { - phaser.register(); - try { - available.acquire(); - } catch (InterruptedException ignored) { - - } - }; - completeListener = result -> { - phaser.arriveAndDeregister(); - available.release(); - }; - } - - int getMaxRequestsQueuedPerDestination() { - // comfortably above max outstanding requests - return MAX_OUTSTANDING_REQUESTS * 3; - } - - public void waitForComplete() { - phaser.arriveAndAwaitAdvance(); - phaser.arriveAndDeregister(); - } - } - public static class Builder { + public static final String + INVALID_AUTHENTICATION_CREDENTIALS_EITHER_BOTH_USERNAME_AND_PASSWORD_OR_NONE_MUST_BE_PROVIDED = + "Invalid Authentication credentials. Either both username and password or none must be provided"; private Http2SolrClient http2SolrClient; private SSLConfig sslConfig = defaultSSLConfig; private Integer idleTimeout; @@ -859,10 +926,9 @@ public static class Builder { private boolean useHttp1_1 = Boolean.getBoolean("solr.http1"); protected String baseSolrUrl; private ExecutorService executor; + private boolean directBuffers = true; - public Builder() { - - } + public Builder() {} public Builder(String baseSolrUrl) { this.baseSolrUrl = baseSolrUrl; @@ -883,23 +949,28 @@ public Http2SolrClient build() { return client; } - private void httpClientBuilderSetup(Http2SolrClient client) { - String factoryClassName = System.getProperty(HttpClientUtil.SYS_PROP_HTTP_CLIENT_BUILDER_FACTORY); + private static void httpClientBuilderSetup(Http2SolrClient client) { + String factoryClassName = + System.getProperty(HttpClientUtil.SYS_PROP_HTTP_CLIENT_BUILDER_FACTORY); if (factoryClassName != null) { - log.debug ("Using Http Builder Factory: {}", factoryClassName); + log.debug("Using Http Builder Factory: {}", factoryClassName); HttpClientBuilderFactory factory; try { - factory = (HttpClientBuilderFactory)Class.forName(factoryClassName).getConstructor().newInstance(); - } catch (InstantiationException | IllegalAccessException | ClassNotFoundException | InvocationTargetException | NoSuchMethodException e) { + factory = + (HttpClientBuilderFactory) + Class.forName(factoryClassName).getConstructor().newInstance(); + } catch (InstantiationException + | IllegalAccessException + | ClassNotFoundException + | InvocationTargetException + | NoSuchMethodException e) { throw new RuntimeException("Unable to instantiate " + Http2SolrClient.class.getName(), e); } factory.setup(client); } } - /** - * Reuse {@code httpClient} connections pool - */ + /** Reuse {@code httpClient} connections pool */ public Builder withHttpClient(Http2SolrClient httpClient) { this.http2SolrClient = httpClient; return this; @@ -918,7 +989,8 @@ public Builder withSSLConfig(SSLConfig sslConfig) { public Builder withBasicAuthCredentials(String user, String pass) { if (user != null || pass != null) { if (user == null || pass == null) { - throw new IllegalStateException("Invalid Authentication credentials. Either both username and password or none must be provided"); + throw new IllegalStateException( + INVALID_AUTHENTICATION_CREDENTIALS_EITHER_BOTH_USERNAME_AND_PASSWORD_OR_NONE_MUST_BE_PROVIDED); } } this.basicAuthUser = user; @@ -927,7 +999,8 @@ public Builder withBasicAuthCredentials(String user, String pass) { } /** - * Set maxConnectionsPerHost for http1 connections, maximum number http2 connections is limited by 4 + * Set maxConnectionsPerHost for http1 connections, maximum number http2 connections is limited + * by 4 */ public Builder maxConnectionsPerHost(int max) { this.maxConnectionsPerHost = max; @@ -948,26 +1021,31 @@ public Builder connectionTimeout(int connectionTimeOut) { this.connectionTimeout = connectionTimeOut; return this; } + + public Builder directBuffers(boolean directBuffers) { + this.directBuffers = directBuffers; + return this; + } + } public Set getQueryParams() { - return queryParams; + return Collections.unmodifiableSet(queryParams); } /** * Expert Method * - * @param queryParams set of param keys to only send via the query string - * Note that the param will be sent as a query string if the key is part - * of this Set or the SolrRequest's query params. - * @see org.apache.solr.client.solrj.SolrRequest#getQueryParams + * @param queryParams set of param keys to only send via the query string Note that the param will + * be sent as a query string if the key is part of this Set or the SolrRequest's query params. + * @see SolrRequest#getQueryParams */ public void setQueryParams(Set queryParams) { this.queryParams = queryParams; } - private ModifiableSolrParams calculateQueryParams(Set queryParamNames, - ModifiableSolrParams wparams) { + private static ModifiableSolrParams calculateQueryParams( + Set queryParamNames, ModifiableSolrParams wparams) { ModifiableSolrParams queryModParams = new ModifiableSolrParams(); if (queryParamNames != null) { for (String param : queryParamNames) { @@ -1010,32 +1088,28 @@ static SslContextFactory.Client getDefaultSslContextFactory() { SslContextFactory.Client sslContextFactory = new SslContextFactory.Client(!sslCheckPeerName); - if (null != System.getProperty("javax.net.ssl.keyStore")) { - sslContextFactory.setKeyStorePath - (System.getProperty("javax.net.ssl.keyStore")); + if (null != System.getProperty(JAVAX_NET_SSL_KEY_STORE)) { + sslContextFactory.setKeyStorePath(System.getProperty(JAVAX_NET_SSL_KEY_STORE)); } - if (null != System.getProperty("javax.net.ssl.keyStorePassword")) { - sslContextFactory.setKeyStorePassword - (System.getProperty("javax.net.ssl.keyStorePassword")); + if (null != System.getProperty(JAVAX_NET_SSL_KEY_STORE_PASSWORD)) { + sslContextFactory.setKeyStorePassword(System.getProperty(JAVAX_NET_SSL_KEY_STORE_PASSWORD)); } - if (null != System.getProperty("javax.net.ssl.keyStoreType")) { - sslContextFactory.setKeyStoreType - (System.getProperty("javax.net.ssl.keyStoreType")); + if (null != System.getProperty(JAVAX_NET_SSL_KEY_STORE_TYPE)) { + sslContextFactory.setKeyStoreType(System.getProperty(JAVAX_NET_SSL_KEY_STORE_TYPE)); } - if (null != System.getProperty("javax.net.ssl.trustStore")) { - sslContextFactory.setTrustStorePath - (System.getProperty("javax.net.ssl.trustStore")); + if (null != System.getProperty(JAVAX_NET_SSL_TRUST_STORE)) { + sslContextFactory.setTrustStorePath(System.getProperty(JAVAX_NET_SSL_TRUST_STORE)); } - if (null != System.getProperty("javax.net.ssl.trustStorePassword")) { - sslContextFactory.setTrustStorePassword - (System.getProperty("javax.net.ssl.trustStorePassword")); + if (null != System.getProperty(JAVAX_NET_SSL_TRUST_STORE_PASSWORD)) { + sslContextFactory.setTrustStorePassword( + System.getProperty(JAVAX_NET_SSL_TRUST_STORE_PASSWORD)); } - if (null != System.getProperty("javax.net.ssl.trustStoreType")) { - sslContextFactory.setTrustStoreType - (System.getProperty("javax.net.ssl.trustStoreType")); + if (null != System.getProperty(JAVAX_NET_SSL_TRUST_STORE_TYPE)) { + sslContextFactory.setTrustStoreType(System.getProperty(JAVAX_NET_SSL_TRUST_STORE_TYPE)); } - sslContextFactory.setEndpointIdentificationAlgorithm(System.getProperty("solr.jetty.ssl.verifyClientHostName")); + sslContextFactory.setEndpointIdentificationAlgorithm( + System.getProperty(SOLR_JETTY_SSL_VERIFY_CLIENT_HOST_NAME)); return sslContextFactory; } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/SolrOutputStreamContentProvider.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/SolrOutputStreamContentProvider.java new file mode 100644 index 00000000000..2a26749a52d --- /dev/null +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/SolrOutputStreamContentProvider.java @@ -0,0 +1,124 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.solrj.impl; + +import java.io.Closeable; +import java.io.OutputStream; +import java.nio.ByteBuffer; +import java.util.Iterator; +import org.apache.solr.common.util.BufferedBytesOutputStream; +import org.eclipse.jetty.client.AsyncContentProvider; +import org.eclipse.jetty.client.api.ContentProvider; +import org.eclipse.jetty.client.api.Request; +import org.eclipse.jetty.client.api.Response; +import org.eclipse.jetty.client.util.DeferredContentProvider; +import org.eclipse.jetty.util.Callback; + +/** + * A {@link ContentProvider} that provides content asynchronously through an {@link OutputStream} similar to {@link + * DeferredContentProvider}. + *

+ * {@link SolrOutputStreamContentProvider} can only be used in conjunction with {@link + * Request#send(Response.CompleteListener)} (and not with its blocking counterpart {@link Request#send()}) because it + * provides content asynchronously. + *

+ * The deferred content is provided once by writing to the {@link #getOutputStream() output stream} and then fully + * consumed. Invocations to the {@link #iterator()} method after the first will return an "empty" iterator because the + * stream has been consumed on the first invocation. However, it is possible for subclasses to support multiple + * invocations of {@link #iterator()} by overriding {@link #write(ByteBuffer)} and {@link #close()}, copying the bytes + * and making them available for subsequent invocations. + *

+ * Content must be provided by writing to the {@link #getOutputStream() output stream}, that must be {@link + * OutputStream#close() closed} when all content has been provided. + *

+ * Example usage: + *

+ * HttpClient httpClient = ...;
+ *
+ * // Use try-with-resources to autoclose the output stream
+ * OutputStreamContentProvider content = new OutputStreamContentProvider();
+ * try (OutputStream output = content.getOutputStream())
+ * {
+ *     httpClient.newRequest("localhost", 8080)
+ *             .content(content)
+ *             .send(new Response.CompleteListener()
+ *             {
+ *                 @Override
+ *                 public void onComplete(Result result)
+ *                 {
+ *                     // Your logic here
+ *                 }
+ *             });
+ *
+ *     // At a later time...
+ *     output.write("some content".getBytes());
+ * }
+ * 
+ */ +public class SolrOutputStreamContentProvider implements AsyncContentProvider, Callback, Closeable { + private final DeferredContentProvider deferred = new DeferredContentProvider(); + + private final BufferedBytesOutputStream out; + + public SolrOutputStreamContentProvider(ByteBuffer buffer) { + this.out = new BufferedBytesOutputStream(buffer, deferred); + } + + @Override + public InvocationType getInvocationType() { + return deferred.getInvocationType(); + } + + @Override + public long getLength() { + return deferred.getLength(); + } + + @Override + public Iterator iterator() { + return deferred.iterator(); + } + + @Override + public void setListener(Listener listener) { + deferred.setListener(listener); + } + + public OutputStream getOutputStream() { + return out; + } + + public void write(ByteBuffer buffer) { + deferred.offer(buffer); + } + + @Override + public void close() { + deferred.close(); + } + + @Override + public void succeeded() { + deferred.succeeded(); + } + + @Override + public void failed(Throwable failure) { + deferred.failed(failure); + } + +} diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/StreamingBinaryResponseParser.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/StreamingBinaryResponseParser.java index 9e458da9467..ae0d17907f6 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/StreamingBinaryResponseParser.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/StreamingBinaryResponseParser.java @@ -19,7 +19,6 @@ import java.io.IOException; import java.io.InputStream; import java.util.List; - import org.apache.solr.client.solrj.FastStreamingDocsCallback; import org.apache.solr.client.solrj.StreamingResponseCallback; import org.apache.solr.common.SolrDocument; @@ -27,7 +26,6 @@ import org.apache.solr.common.SolrException; import org.apache.solr.common.util.DataEntry; import org.apache.solr.common.util.DataEntry.EntryListener; -import org.apache.solr.common.util.DataInputInputStream; import org.apache.solr.common.util.FastJavaBinDecoder; import org.apache.solr.common.util.FastJavaBinDecoder.EntryImpl; import org.apache.solr.common.util.FastJavaBinDecoder.Tag; @@ -35,9 +33,7 @@ import org.apache.solr.common.util.NamedList; /** - * A BinaryResponseParser that sends callback events rather then build - * a large response - * + * A BinaryResponseParser that sends callback events rather then build a large response * * @since solr 4.0 */ @@ -53,9 +49,8 @@ public StreamingBinaryResponseParser(StreamingResponseCallback cb) { public StreamingBinaryResponseParser(FastStreamingDocsCallback cb) { this.fastCallback = cb; this.callback = null; - } - + @Override public NamedList processResponse(InputStream body, String encoding) { if (callback != null) { @@ -67,106 +62,105 @@ public NamedList processResponse(InputStream body, String encoding) { throw new RuntimeException("Unable to parse", e); } } - } - private NamedList fastStreamDocs(InputStream body, FastStreamingDocsCallback fastCallback) throws IOException { + private NamedList fastStreamDocs(InputStream body, FastStreamingDocsCallback fastCallback) + throws IOException { - fieldListener = new EntryListener() { - @Override - public void entry(DataEntry field) { - if (((EntryImpl) field).getTag() == Tag._SOLRDOC) { - field.listenContainer(fastCallback.startChildDoc(field.ctx()), fieldListener); - } else { - fastCallback.field(field, field.ctx()); - } - } - - @Override - public void end(DataEntry e) { - fastCallback.endDoc(((EntryImpl) e).ctx); - } - }; - docListener = e -> { - EntryImpl entry = (EntryImpl) e; - if (entry.getTag() == Tag._SOLRDOC) {//this is a doc - entry.listenContainer(fastCallback.startDoc(entry.ctx()), fieldListener); - } - }; - new FastJavaBinDecoder() - .withInputStream(body) - .decode(new EntryListener() { + fieldListener = + new EntryListener() { @Override - public void entry(DataEntry e) { - EntryImpl entry = (EntryImpl) e; - if( !entry.type().isContainer) return; - if (e.isKeyValEntry() && entry.getTag() == Tag._SOLRDOCLST) { - List l = (List) e.metadata(); - e.listenContainer(fastCallback.initDocList( - (Long) l.get(0), - (Long) l.get(1), - (Float) l.get(2)), - docListener); + public void entry(DataEntry field) { + if (((EntryImpl) field).getTag() == Tag._SOLRDOC) { + field.listenContainer(fastCallback.startChildDoc(field.ctx()), fieldListener); } else { - e.listenContainer(null, this); + fastCallback.field(field, field.ctx()); } } - }); + + @Override + public void end(DataEntry e) { + fastCallback.endDoc(((EntryImpl) e).ctx); + } + }; + docListener = + e -> { + EntryImpl entry = (EntryImpl) e; + if (entry.getTag() == Tag._SOLRDOC) { // this is a doc + entry.listenContainer(fastCallback.startDoc(entry.ctx()), fieldListener); + } + }; + new FastJavaBinDecoder() + .withInputStream(body) + .decode( + new EntryListener() { + @Override + public void entry(DataEntry e) { + EntryImpl entry = (EntryImpl) e; + if (!entry.type().isContainer) return; + if (e.isKeyValEntry() && entry.getTag() == Tag._SOLRDOCLST) { + List l = (List) e.metadata(); + e.listenContainer( + fastCallback.initDocList((Long) l.get(0), (Long) l.get(1), (Float) l.get(2)), + docListener); + } else { + e.listenContainer(null, this); + } + } + }); return null; } - private EntryListener fieldListener; private EntryListener docListener; - @SuppressWarnings({"unchecked"}) private NamedList streamDocs(InputStream body) { - try (JavaBinCodec codec = new JavaBinCodec() { - - private int nestedLevel; - - @Override - public SolrDocument readSolrDocument(DataInputInputStream dis) throws IOException { - nestedLevel++; - SolrDocument doc = super.readSolrDocument(dis); - nestedLevel--; - if (nestedLevel == 0) { - // parent document - callback.streamSolrDocument(doc); - return null; - } else { - // child document - return doc; - } - } + try (JavaBinCodec codec = + new JavaBinCodec() { - @Override - public SolrDocumentList readSolrDocumentList(DataInputInputStream dis) throws IOException { - SolrDocumentList solrDocs = new SolrDocumentList(); - List list = (List) readVal(dis); - solrDocs.setNumFound((Long) list.get(0)); - solrDocs.setStart((Long) list.get(1)); - solrDocs.setMaxScore((Float) list.get(2)); - - callback.streamDocListInfo( - solrDocs.getNumFound(), - solrDocs.getStart(), - solrDocs.getMaxScore()); - - // Read the Array - tagByte = dis.readByte(); - if ((tagByte >>> 5) != (ARR >>> 5)) { - throw new RuntimeException("doclist must have an array"); - } - int sz = readSize(dis); - for (int i = 0; i < sz; i++) { - // must be a SolrDocument - readVal(dis); - } - return solrDocs; - } - };) { + private int nestedLevel; + + @Override + public SolrDocument readSolrDocument(JavaBinCodec javaBinCodec) throws IOException { + nestedLevel++; + SolrDocument doc = super.readSolrDocument(javaBinCodec); + nestedLevel--; + if (nestedLevel == 0) { + // parent document + callback.streamSolrDocument(doc); + return null; + } else { + // child document + return doc; + } + } + + @Override + public SolrDocumentList readSolrDocumentList(JavaBinCodec javaBinCodec) + throws IOException { + SolrDocumentList solrDocs = new SolrDocumentList(); + List list = (List) readVal(this); + solrDocs.setNumFound((Long) list.get(0)); + solrDocs.setStart((Long) list.get(1)); + solrDocs.setMaxScore((Float) list.get(2)); + + callback.streamDocListInfo( + solrDocs.getNumFound(), solrDocs.getStart(), solrDocs.getMaxScore()); + + // Read the Array + tagByte = readByte(javaBinCodec); + if ((tagByte >>> 5) != (ARR >>> 5)) { + throw new RuntimeException("doclist must have an array"); + } + int sz = readSize(this); + for (int i = 0; i < sz; i++) { + // must be a SolrDocument + readVal(this); + } + return solrDocs; + } + }; ) { return (NamedList) codec.unmarshal(body); } catch (IOException e) { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/JavabinTupleStreamParser.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/JavabinTupleStreamParser.java index 2cb307405c0..24d06782fba 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/JavabinTupleStreamParser.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/JavabinTupleStreamParser.java @@ -17,7 +17,6 @@ package org.apache.solr.client.solrj.io.stream; - import java.io.IOException; import java.io.InputStream; import java.time.Instant; @@ -25,43 +24,38 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; - -import org.apache.solr.common.util.DataInputInputStream; -import org.apache.solr.common.util.FastInputStream; import org.apache.solr.common.util.JavaBinCodec; public class JavabinTupleStreamParser extends JavaBinCodec implements TupleStreamParser { private final InputStream is; - final FastInputStream fis; + private int arraySize = Integer.MAX_VALUE; private boolean onlyJsonTypes = false; int objectSize; - public JavabinTupleStreamParser(InputStream is, boolean onlyJsonTypes) throws IOException { this.onlyJsonTypes = onlyJsonTypes; this.is = is; - this.fis = initRead(is); + initRead(is); if (!readTillDocs()) arraySize = 0; } - private boolean readTillDocs() throws IOException { - if (isObjectType(fis)) { + if (isObjectType()) { if (tagByte == SOLRDOCLST) { - readVal(fis);// this is the metadata, throw it away - tagByte = fis.readByte(); - arraySize = readSize(fis); + readVal(this); // this is the metadata, throw it away + tagByte = readByte(this); + arraySize = readSize(this); return true; } for (int i = objectSize; i > 0; i--) { - Object k = readVal(fis); + Object k = readVal(this); if (k == END_OBJ) break; if ("docs".equals(k)) { - tagByte = fis.readByte(); - if (tagByte == ITERATOR) return true;//docs must be an iterator or - if (tagByte >>> 5 == ARR >>> 5) {// an array - arraySize = readSize(fis); + tagByte = readByte(this); + if (tagByte == ITERATOR) return true; // docs must be an iterator or + if (tagByte >>> 5 == ARR >>> 5) { // an array + arraySize = readSize(this); return true; } return false; @@ -70,23 +64,22 @@ private boolean readTillDocs() throws IOException { } } } else { - readObject(fis); + readObject(); return false; } return false; - //here after it will be a stream of maps + // here after it will be a stream of maps } - private boolean isObjectType(DataInputInputStream dis) throws IOException { - tagByte = dis.readByte(); - if (tagByte >>> 5 == ORDERED_MAP >>> 5 || - tagByte >>> 5 == NAMED_LST >>> 5) { - objectSize = readSize(dis); + private boolean isObjectType() throws IOException { + tagByte = readByte(this); + if (tagByte >>> 5 == ORDERED_MAP >>> 5 || tagByte >>> 5 == NAMED_LST >>> 5) { + objectSize = readSize(this); return true; } if (tagByte == MAP) { - objectSize = readVInt(dis); + objectSize = readVInt(this); return true; } if (tagByte == MAP_ENTRY_ITER) { @@ -96,24 +89,24 @@ private boolean isObjectType(DataInputInputStream dis) throws IOException { return tagByte == SOLRDOCLST; } - private Map readAsMap(DataInputInputStream dis) throws IOException { - int sz = readSize(dis); + private Map readAsMap() throws IOException { + int sz = readSize(this); Map m = new LinkedHashMap<>(); for (int i = 0; i < sz; i++) { - String name = (String) readVal(dis); - Object val = readVal(dis); + String name = (String) readVal(this); + Object val = readVal(this); m.put(name, val); } return m; } - private Map readSolrDocumentAsMap(DataInputInputStream dis) throws IOException { - tagByte = dis.readByte(); - int size = readSize(dis); + private Map readSolrDocumentAsMap() throws IOException { + tagByte = readByte(this); + int size = readSize(this); Map doc = new LinkedHashMap<>(); for (int i = 0; i < size; i++) { String fieldName; - Object obj = readVal(dis); // could be a field name, or a child document + Object obj = readVal(this); // could be a field name, or a child document if (obj instanceof Map) { @SuppressWarnings("unchecked") List l = (List) doc.get("_childDocuments_"); @@ -123,61 +116,65 @@ private Map readSolrDocumentAsMap(DataInputInputStream dis) throws IOExcept } else { fieldName = (String) obj; } - Object fieldVal = readVal(dis); + Object fieldVal = readVal(this); doc.put(fieldName, fieldVal); } return doc; } @Override - protected Object readObject(DataInputInputStream dis) throws IOException { + protected Object readObject() throws IOException { if (tagByte == SOLRDOC) { - return readSolrDocumentAsMap(dis); + return readSolrDocumentAsMap(); } if (onlyJsonTypes) { switch (tagByte >>> 5) { case SINT >>> 5: - int i = readSmallInt(dis); + int i = readSmallInt(this); return (long) i; case ORDERED_MAP >>> 5: case NAMED_LST >>> 5: - return readAsMap(dis); + return readAsMap(); } switch (tagByte) { - case INT: { - int i = dis.readInt(); - return (long) i; - } - case FLOAT: { - float v = dis.readFloat(); - return (double) v; - } - case BYTE: { - byte b = dis.readByte(); - return (long) b; - } - case SHORT: { - short s = dis.readShort(); - return (long) s; - } + case INT: + { + int i = readInt(this); + return (long) i; + } + case FLOAT: + { + float v = readFloat(); + return (double) v; + } + case BYTE: + { + byte b = readByte(this); + return (long) b; + } + case SHORT: + { + short s = readShort(); + return (long) s; + } - case DATE: { - return Instant.ofEpochMilli(dis.readLong()).toString(); - } + case DATE: + { + return Instant.ofEpochMilli(readLong(this)).toString(); + } default: - return super.readObject(dis); + return super.readObject(); } - } else return super.readObject(dis); + } else return super.readObject(); } - @Override @SuppressWarnings({"unchecked"}) public Map next() throws IOException { if (arraySize == 0) return null; - Object o = readVal(fis); + Object o = readVal(this); arraySize--; if (o == END_OBJ) return null; return (Map) o; diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/ContentStreamUpdateRequest.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/ContentStreamUpdateRequest.java index 6b387c03caa..02ed726dee1 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/ContentStreamUpdateRequest.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/ContentStreamUpdateRequest.java @@ -22,12 +22,10 @@ import java.util.ArrayList; import java.util.Collection; import java.util.List; - import org.apache.commons.io.IOUtils; import org.apache.solr.common.util.ContentStream; import org.apache.solr.common.util.ContentStreamBase; - /** * Basic functionality to upload a File or {@link org.apache.solr.common.util.ContentStream} to a Solr Cell or some * other handler that takes ContentStreams (CSV) @@ -46,9 +44,10 @@ public class ContentStreamUpdateRequest extends AbstractUpdateRequest { */ public ContentStreamUpdateRequest(String url) { super(METHOD.POST, url); - contentStreams = new ArrayList<>(); + contentStreams = new ArrayList<>(2); } + @SuppressWarnings("deprecation") @Override public Collection getContentStreams() throws IOException { return contentStreams; diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/JavaBinUpdateRequestCodec.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/JavaBinUpdateRequestCodec.java index 8bcb1d78673..8ababcff6f7 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/JavaBinUpdateRequestCodec.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/JavaBinUpdateRequestCodec.java @@ -16,37 +16,32 @@ */ package org.apache.solr.client.solrj.request; +import static org.apache.solr.common.params.CommonParams.CHILDDOC; +import static org.apache.solr.common.util.ByteArrayUtf8CharSequence.convertCharSeq; + import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.lang.invoke.MethodHandles; -import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import java.util.Map.Entry; import java.util.concurrent.atomic.AtomicBoolean; - import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.SolrInputField; import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.params.ShardParams; import org.apache.solr.common.params.SolrParams; -import org.apache.solr.common.util.DataInputInputStream; import org.apache.solr.common.util.JavaBinCodec; import org.apache.solr.common.util.NamedList; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.apache.solr.common.params.CommonParams.CHILDDOC; -import static org.apache.solr.common.util.ByteArrayUtf8CharSequence.convertCharSeq; - /** - * Provides methods for marshalling an UpdateRequest to a NamedList which can be serialized in the javabin format and - * vice versa. - * + * Provides methods for marshalling an UpdateRequest to a NamedList which can be serialized in the + * javabin format and vice versa. * * @see org.apache.solr.common.util.JavaBinCodec * @since solr 1.4 @@ -55,20 +50,19 @@ public class JavaBinUpdateRequestCodec { private boolean readStringAsCharSeq = false; public JavaBinUpdateRequestCodec setReadStringAsCharSeq(boolean flag) { - this.readStringAsCharSeq = flag; + readStringAsCharSeq = flag; return this; - } private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private static final AtomicBoolean WARNED_ABOUT_INDEX_TIME_BOOSTS = new AtomicBoolean(); /** - * Converts an UpdateRequest to a NamedList which can be serialized to the given OutputStream in the javabin format + * Converts an UpdateRequest to a NamedList which can be serialized to the given OutputStream in + * the javabin format * * @param updateRequest the UpdateRequest to be written out - * @param os the OutputStream to which the request is to be written - * + * @param os the OutputStream to which the request is to be written * @throws IOException in case of an exception during marshalling or writing to the stream */ public void marshal(UpdateRequest updateRequest, OutputStream os) throws IOException { @@ -79,13 +73,13 @@ public void marshal(UpdateRequest updateRequest, OutputStream os) throws IOExcep } Iterator docIter = null; - if(updateRequest.getDocIterator() != null){ + if (updateRequest.getDocIterator() != null) { docIter = updateRequest.getDocIterator(); } - Map> docMap = updateRequest.getDocumentsMap(); + Map> docMap = updateRequest.getDocumentsMap(); - nl.add("params", params);// 0: params + nl.add("params", params); // 0: params if (updateRequest.getDeleteByIdMap() != null) { nl.add("delByIdMap", updateRequest.getDeleteByIdMap()); } @@ -105,23 +99,23 @@ public void marshal(UpdateRequest updateRequest, OutputStream os) throws IOExcep } /** - * Reads a NamedList from the given InputStream, converts it into a SolrInputDocument and passes it to the given - * StreamingUpdateHandler - * - * @param is the InputStream from which to read - * @param handler an instance of StreamingUpdateHandler to which SolrInputDocuments are streamed one by one + * Reads a NamedList from the given InputStream, converts it into a SolrInputDocument and passes + * it to the given StreamingUpdateHandler * + * @param is the InputStream from which to read + * @param handler an instance of StreamingUpdateHandler to which SolrInputDocuments are streamed + * one by one * @return the UpdateRequest - * - * @throws IOException in case of an exception while reading from the input stream or unmarshalling + * @throws IOException in case of an exception while reading from the input stream or + * unmarshalling */ @SuppressWarnings({"unchecked"}) - public UpdateRequest unmarshal(InputStream is, final StreamingUpdateHandler handler) throws IOException { + public UpdateRequest unmarshal(InputStream is, final StreamingUpdateHandler handler) + throws IOException { final UpdateRequest updateRequest = new UpdateRequest(); - List>> doclist; - List>> docMap; + List delById; - Map> delByIdMap; + Map> delByIdMap; List delByQ; final NamedList[] namedList = new NamedList[1]; try (JavaBinCodec codec = new StreamingCodec(namedList, updateRequest, handler)) { @@ -130,24 +124,15 @@ public UpdateRequest unmarshal(InputStream is, final StreamingUpdateHandler hand // NOTE: if the update request contains only delete commands the params // must be loaded now - if(updateRequest.getParams()==null) { + if (updateRequest.getParams() == null) { NamedList params = (NamedList) namedList[0].get("params"); - if(params!=null) { + if (params != null) { updateRequest.setParams(new ModifiableSolrParams(params.toSolrParams())); } } delById = (List) namedList[0].get("delById"); - delByIdMap = (Map>) namedList[0].get("delByIdMap"); + delByIdMap = (Map>) namedList[0].get("delByIdMap"); delByQ = (List) namedList[0].get("delByQ"); - doclist = (List) namedList[0].get("docs"); - Object docsMapObj = namedList[0].get("docsMap"); - - if (docsMapObj instanceof Map) {//SOLR-5762 - docMap = new ArrayList<>(((Map)docsMapObj).entrySet()); - } else { - docMap = (List>>) docsMapObj; - } - // we don't add any docs, because they were already processed // deletes are handled later, and must be passed back on the UpdateRequest @@ -158,19 +143,19 @@ public UpdateRequest unmarshal(InputStream is, final StreamingUpdateHandler hand } } if (delByIdMap != null) { - for (Map.Entry> entry : delByIdMap.entrySet()) { - Map params = entry.getValue(); + for (Map.Entry> entry : delByIdMap.entrySet()) { + Map params = entry.getValue(); if (params != null) { Long version = (Long) params.get(UpdateRequest.VER); if (params.containsKey(ShardParams._ROUTE_)) { - updateRequest.deleteById(entry.getKey(), (String) params.get(ShardParams._ROUTE_), version); + updateRequest.deleteById( + entry.getKey(), (String) params.get(ShardParams._ROUTE_), version); } else { updateRequest.deleteById(entry.getKey(), version); } } else { updateRequest.deleteById(entry.getKey()); } - } } if (delByQ != null) { @@ -182,14 +167,14 @@ public UpdateRequest unmarshal(InputStream is, final StreamingUpdateHandler hand return updateRequest; } - private NamedList solrParamsToNamedList(SolrParams params) { if (params == null) return new NamedList<>(); return params.toNamedList(); } public interface StreamingUpdateHandler { - void update(SolrInputDocument document, UpdateRequest req, Integer commitWithin, Boolean override); + void update( + SolrInputDocument document, UpdateRequest req, Integer commitWithin, Boolean override); } static class MaskCharSequenceSolrInputDoc extends SolrInputDocument { @@ -201,7 +186,6 @@ public MaskCharSequenceSolrInputDoc(Map fields) { public Object getFieldValue(String name) { return convertCharSeq(super.getFieldValue(name)); } - } class StreamingCodec extends JavaBinCodec { @@ -215,7 +199,8 @@ class StreamingCodec extends JavaBinCodec { // is ever refactored, this will not work. private boolean seenOuterMostDocIterator; - public StreamingCodec(NamedList[] namedList, UpdateRequest updateRequest, StreamingUpdateHandler handler) { + public StreamingCodec( + NamedList[] namedList, UpdateRequest updateRequest, StreamingUpdateHandler handler) { this.namedList = namedList; this.updateRequest = updateRequest; this.handler = handler; @@ -228,15 +213,15 @@ protected SolrInputDocument createSolrInputDocument(int sz) { } @Override - public NamedList readNamedList(DataInputInputStream dis) throws IOException { - int sz = readSize(dis); + public NamedList readNamedList() throws IOException { + int sz = readSize(this); NamedList nl = new NamedList<>(); if (namedList[0] == null) { namedList[0] = nl; } for (int i = 0; i < sz; i++) { - String name = (String) readVal(dis); - Object val = readVal(dis); + String name = (String) readVal(this); + Object val = readVal(this); nl.add(name, val); } return nl; @@ -248,57 +233,65 @@ private SolrInputDocument listToSolrInputDocument(List> namedList) NamedList nl = namedList.get(i); if (i == 0) { Float boost = (Float) nl.getVal(0); - if (boost != null && boost.floatValue() != 1f) { - String message = "Ignoring document boost: " + boost + " as index-time boosts are not supported anymore"; + if (boost != null && boost != 1f) { + String message = + "Ignoring document boost: " + + boost + + " as index-time boosts are not supported anymore"; if (WARNED_ABOUT_INDEX_TIME_BOOSTS.compareAndSet(false, true)) { log.warn(message); } else { - log.debug(message); + if (log.isDebugEnabled()) { + log.debug(message); + } } } } else { Float boost = (Float) nl.getVal(2); - if (boost != null && boost.floatValue() != 1f) { - String message = "Ignoring field boost: " + boost + " as index-time boosts are not supported anymore"; + if (boost != null && boost != 1f) { + String message = + "Ignoring field boost: " + + boost + + " as index-time boosts are not supported anymore"; if (WARNED_ABOUT_INDEX_TIME_BOOSTS.compareAndSet(false, true)) { log.warn(message); } else { - log.debug(message); + if (log.isDebugEnabled()) { + log.debug(message); + } } } - doc.addField((String) nl.getVal(0), - nl.getVal(1)); + doc.addField((String) nl.getVal(0), nl.getVal(1)); } } return doc; } @Override - public List readIterator(DataInputInputStream fis) throws IOException { + public List readIterator(JavaBinCodec javaBinCodec) throws IOException { // default behavior for reading any regular Iterator in the stream - if (seenOuterMostDocIterator) return super.readIterator(fis); + if (seenOuterMostDocIterator) return super.readIterator(javaBinCodec); // special treatment for first outermost Iterator // (the list of documents) seenOuterMostDocIterator = true; - return readOuterMostDocIterator(fis); + return readOuterMostDocIterator(); } - - private List readOuterMostDocIterator(DataInputInputStream fis) throws IOException { - if(namedList[0] == null) namedList[0] = new NamedList<>(); + private List readOuterMostDocIterator() throws IOException { + if (namedList[0] == null) namedList[0] = new NamedList<>(); NamedList params = (NamedList) namedList[0].get("params"); if (params == null) params = new NamedList<>(); updateRequest.setParams(new ModifiableSolrParams(params.toSolrParams())); - if (handler == null) return super.readIterator(fis); + if (handler == null) return super.readIterator(this); Integer commitWithin = null; Boolean overwrite = null; Object o = null; - super.readStringAsCharSeq = JavaBinUpdateRequestCodec.this.readStringAsCharSeq; + readStringAsCharSeq = true; try { while (true) { if (o == null) { - o = readVal(fis); + o = readVal(this); } if (o == END_OBJ) { @@ -316,9 +309,10 @@ private List readOuterMostDocIterator(DataInputInputStream fis) throws I handler.update(null, req, null, null); } else if (o instanceof Map.Entry) { @SuppressWarnings("unchecked") - Map.Entry> entry = (Map.Entry>) o; + Map.Entry> entry = + (Map.Entry>) o; sdoc = entry.getKey(); - Map p = entry.getValue(); + Map p = entry.getValue(); if (p != null) { commitWithin = (Integer) p.get(UpdateRequest.COMMIT_WITHIN); overwrite = (Boolean) p.get(UpdateRequest.OVERWRITE); @@ -330,9 +324,10 @@ private List readOuterMostDocIterator(DataInputInputStream fis) throws I } // peek at the next object to see if we're at the end - o = readVal(fis); + o = readVal(this); if (o == END_OBJ) { - // indicate that we've hit the last doc in the batch, used to enable optimizations when doing replication + // indicate that we've hit the last doc in the batch, used to enable optimizations when + // doing replication updateRequest.lastDocInBatch(); } @@ -340,31 +335,30 @@ private List readOuterMostDocIterator(DataInputInputStream fis) throws I } return Collections.emptyList(); } finally { - super.readStringAsCharSeq = false; - + readStringAsCharSeq = false; } } - private SolrInputDocument convertMapToSolrInputDoc(Map m) { + private SolrInputDocument convertMapToSolrInputDoc(Map m) { SolrInputDocument result = createSolrInputDocument(m.size()); - m.forEach((k, v) -> { - if (CHILDDOC.equals(k.toString())) { - if (v instanceof List) { - List list = (List) v; - for (Object o : list) { - if (o instanceof Map) { - result.addChildDocument(convertMapToSolrInputDoc((Map) o)); + m.forEach( + (k, v) -> { + if (CHILDDOC.equals(k.toString())) { + if (v instanceof List) { + List list = (List) v; + for (Object o : list) { + if (o instanceof Map) { + result.addChildDocument(convertMapToSolrInputDoc((Map) o)); + } + } + } else if (v instanceof Map) { + result.addChildDocument(convertMapToSolrInputDoc((Map) v)); } + } else { + result.addField(k.toString(), v); } - } else if (v instanceof Map) { - result.addChildDocument(convertMapToSolrInputDoc((Map) v)); - } - } else { - result.addField(k.toString(), v); - } - }); + }); return result; } - } } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/UpdateRequest.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/UpdateRequest.java index e60bc39b6e5..4520117b490 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/UpdateRequest.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/UpdateRequest.java @@ -16,6 +16,8 @@ */ package org.apache.solr.client.solrj.request; +import static org.apache.solr.common.params.ShardParams._ROUTE_; + import java.io.IOException; import java.io.StringWriter; import java.io.Writer; @@ -29,7 +31,6 @@ import java.util.Map.Entry; import java.util.Objects; import java.util.Set; - import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.impl.LBHttpSolrClient; @@ -45,8 +46,6 @@ import org.apache.solr.common.util.ContentStream; import org.apache.solr.common.util.XML; -import static org.apache.solr.common.params.ShardParams._ROUTE_; - /** * * @@ -58,6 +57,10 @@ public class UpdateRequest extends AbstractUpdateRequest { public static final String VER = "ver"; public static final String OVERWRITE = "ow"; public static final String COMMIT_WITHIN = "cw"; + public static final String UPDATE = "/update"; + + public static final String CANNOT_ADD_A_NULL_SOLR_INPUT_DOCUMENT = "Cannot add a null SolrInputDocument"; + private Map> documents = null; private Iterator docIterator = null; private Map> deleteById = null; @@ -66,7 +69,7 @@ public class UpdateRequest extends AbstractUpdateRequest { private boolean isLastDocInBatch = false; public UpdateRequest() { - super(METHOD.POST, "/update"); + super(METHOD.POST, UPDATE); } public UpdateRequest(String url) { @@ -100,9 +103,9 @@ public void clear() { * @throws NullPointerException if the document is null */ public UpdateRequest add(final SolrInputDocument doc) { - Objects.requireNonNull(doc, "Cannot add a null SolrInputDocument"); + Objects.requireNonNull(doc, CANNOT_ADD_A_NULL_SOLR_INPUT_DOCUMENT); if (documents == null) { - documents = new LinkedHashMap<>(); + documents = new LinkedHashMap<>(8); } documents.put(doc, null); return this; @@ -140,9 +143,9 @@ public UpdateRequest add(final SolrInputDocument doc, Integer commitWithin) { * @throws NullPointerException if the document is null */ public UpdateRequest add(final SolrInputDocument doc, Integer commitWithin, Boolean overwrite) { - Objects.requireNonNull(doc, "Cannot add a null SolrInputDocument"); + Objects.requireNonNull(doc, CANNOT_ADD_A_NULL_SOLR_INPUT_DOCUMENT); if (documents == null) { - documents = new LinkedHashMap<>(); + documents = new LinkedHashMap<>(8); } Map params = new HashMap<>(2); if (commitWithin != null) params.put(COMMIT_WITHIN, commitWithin); @@ -160,10 +163,10 @@ public UpdateRequest add(final SolrInputDocument doc, Integer commitWithin, Bool */ public UpdateRequest add(final Collection docs) { if (documents == null) { - documents = new LinkedHashMap<>(); + documents = new LinkedHashMap<>(8); } for (SolrInputDocument doc : docs) { - Objects.requireNonNull(doc, "Cannot add a null SolrInputDocument"); + Objects.requireNonNull(doc, CANNOT_ADD_A_NULL_SOLR_INPUT_DOCUMENT); documents.put(doc, null); } return this; @@ -171,7 +174,7 @@ public UpdateRequest add(final Collection docs) { public UpdateRequest deleteById(String id) { if (deleteById == null) { - deleteById = new LinkedHashMap<>(); + deleteById = new LinkedHashMap<>(8); } deleteById.put(id, null); return this; @@ -183,7 +186,7 @@ public UpdateRequest deleteById(String id, String route) { public UpdateRequest deleteById(String id, String route, Long version) { if (deleteById == null) { - deleteById = new LinkedHashMap<>(); + deleteById = new LinkedHashMap<>(8); } Map params = (route == null && version == null) ? null : new HashMap<>(1); if (version != null) @@ -197,7 +200,7 @@ public UpdateRequest deleteById(String id, String route, Long version) { public UpdateRequest deleteById(List ids) { if (deleteById == null) { - deleteById = new LinkedHashMap<>(); + deleteById = new LinkedHashMap<>(8); } for (String id : ids) { @@ -213,7 +216,7 @@ public UpdateRequest deleteById(String id, Long version) { public UpdateRequest deleteByQuery(String q) { if (deleteQuery == null) { - deleteQuery = new ArrayList<>(); + deleteQuery = new ArrayList<>(2); } deleteQuery.add(q); return this; @@ -246,7 +249,7 @@ private Map getRoutes(DocRouter router, return null; } - Map routes = new HashMap<>(); + Map routes = new HashMap<>(4); if (documents != null) { Set>> entries = documents.entrySet(); for (Entry> entry : entries) { @@ -296,14 +299,10 @@ private Map getRoutes(DocRouter router, if (deleteById != null) { - Iterator>> entries = deleteById.entrySet() - .iterator(); - while (entries.hasNext()) { - - Map.Entry> entry = entries.next(); + for (Entry> entry : deleteById.entrySet()) { String deleteId = entry.getKey(); - Map map = entry.getValue(); + Map map = entry.getValue(); Long version = null; String route = null; if (map != null) { @@ -378,7 +377,8 @@ public void setDeleteQuery(List deleteQuery) { // -------------------------------------------------------------------------- // -------------------------------------------------------------------------- - + + @SuppressWarnings("deprecation") @Override public Collection getContentStreams() throws IOException { return ClientUtils.toContentStreams(getXML(), ClientUtils.TEXT_XML); @@ -395,8 +395,8 @@ public String getXML() throws IOException { return (xml.length() > 0) ? xml : null; } - private List>> getDocLists(Map> documents) { - List>> docLists = new ArrayList<>(); + private List>> getDocLists() { + List>> docLists = new ArrayList<>(2); Map> docList = null; if (this.documents != null) { @@ -416,7 +416,7 @@ private List>> getDocLists(Map(); + docList = new LinkedHashMap<>(2); docLists.add(docList); } docList.put(entry.getKey(), entry.getValue()); @@ -426,7 +426,7 @@ private List>> getDocLists(Map(); + docList = new LinkedHashMap<>(8); docLists.add(docList); while (docIterator.hasNext()) { SolrInputDocument doc = docIterator.next(); @@ -444,7 +444,7 @@ private List>> getDocLists(Map>> getDocLists = getDocLists(documents); + List>> getDocLists = getDocLists(); for (Map> docs : getDocLists) { @@ -544,8 +544,7 @@ public Iterator getDocIterator() { public List getDeleteById() { if (deleteById == null) return null; - List deletes = new ArrayList<>(deleteById.keySet()); - return deletes; + return new ArrayList<>(deleteById.keySet()); } public Map> getDeleteByIdMap() { diff --git a/solr/solrj/src/java/org/apache/solr/common/ConditionalKeyMapWriter.java b/solr/solrj/src/java/org/apache/solr/common/ConditionalKeyMapWriter.java index fa6c1c95463..33179d9186d 100644 --- a/solr/solrj/src/java/org/apache/solr/common/ConditionalKeyMapWriter.java +++ b/solr/solrj/src/java/org/apache/solr/common/ConditionalKeyMapWriter.java @@ -30,7 +30,7 @@ public ConditionalKeyMapWriter(MapWriter delegate, Predicate predi this.predicate = predicate; } - public static class EntryWriterWrapper implements EntryWriter { + public static class EntryWriterWrapper extends EntryWriter { private final EntryWriter delegate; private final Predicate predicate; @@ -84,5 +84,4 @@ public void writeMap(EntryWriter ew) throws IOException { public static Predicate dedupeKeyPredicate(Set keys) { return (k) -> keys.add(k); } - } diff --git a/solr/solrj/src/java/org/apache/solr/common/ConditionalMapWriter.java b/solr/solrj/src/java/org/apache/solr/common/ConditionalMapWriter.java index 54785685cda..0083c3b621a 100644 --- a/solr/solrj/src/java/org/apache/solr/common/ConditionalMapWriter.java +++ b/solr/solrj/src/java/org/apache/solr/common/ConditionalMapWriter.java @@ -30,7 +30,7 @@ public ConditionalMapWriter(MapWriter delegate, BiPredicate predicate; diff --git a/solr/solrj/src/java/org/apache/solr/common/MapWriter.java b/solr/solrj/src/java/org/apache/solr/common/MapWriter.java index b7cc4e29cbe..0e46fbd9250 100644 --- a/solr/solrj/src/java/org/apache/solr/common/MapWriter.java +++ b/solr/solrj/src/java/org/apache/solr/common/MapWriter.java @@ -17,7 +17,6 @@ package org.apache.solr.common; - import java.io.IOException; import java.util.ArrayList; import java.util.LinkedHashMap; @@ -25,55 +24,57 @@ import java.util.Map; import java.util.function.BiConsumer; import java.util.function.BiPredicate; - import org.apache.solr.common.util.Utils; /** - * Use this class to push all entries of a Map into an output. - * This avoids creating map instances and is supposed to be memory efficient. - * If the entries are primitives, unnecessary boxing is also avoided. + * Use this class to push all entries of a Map into an output. This avoids creating map instances + * and is supposed to be memory efficient. If the entries are primitives, unnecessary boxing is also + * avoided. */ -public interface MapWriter extends MapSerializable , NavigableObject { +public interface MapWriter extends MapSerializable, NavigableObject { - default String jsonStr(){ + default String jsonStr() { return Utils.toJSONString(this); } @Override @SuppressWarnings({"unchecked", "rawtypes"}) - default Map toMap(Map map) { + default Map toMap(Map map) { try { - writeMap(new EntryWriter() { - @Override - public EntryWriter put(CharSequence k, Object v) { - if (v instanceof MapWriter) v = ((MapWriter) v).toMap(new LinkedHashMap<>()); - if (v instanceof IteratorWriter) v = ((IteratorWriter) v).toList(new ArrayList<>()); - if (v instanceof Iterable) { - List lst = new ArrayList(); - for (Object vv : (Iterable)v) { - if (vv instanceof MapWriter) vv = ((MapWriter) vv).toMap(new LinkedHashMap<>()); - if (vv instanceof IteratorWriter) vv = ((IteratorWriter) vv).toList(new ArrayList<>()); - lst.add(vv); - } - v = lst; - } - if (v instanceof Map) { - Map map = new LinkedHashMap(); - for (Map.Entry entry : ((Map)v).entrySet()) { - Object vv = entry.getValue(); - if (vv instanceof MapWriter) vv = ((MapWriter) vv).toMap(new LinkedHashMap<>()); - if (vv instanceof IteratorWriter) vv = ((IteratorWriter) vv).toList(new ArrayList<>()); - map.put(entry.getKey(), vv); + writeMap( + new EntryWriter() { + @Override + public EntryWriter put(CharSequence k, Object v) { + if (v instanceof MapWriter) v = ((MapWriter) v).toMap(new LinkedHashMap<>()); + if (v instanceof IteratorWriter) v = ((IteratorWriter) v).toList(new ArrayList<>()); + if (v instanceof Iterable) { + List lst = new ArrayList(); + for (Object vv : (Iterable) v) { + if (vv instanceof MapWriter) vv = ((MapWriter) vv).toMap(new LinkedHashMap<>()); + if (vv instanceof IteratorWriter) + vv = ((IteratorWriter) vv).toList(new ArrayList<>()); + lst.add(vv); + } + v = lst; + } + if (v instanceof Map) { + Map map = new LinkedHashMap(); + for (Map.Entry entry : ((Map) v).entrySet()) { + Object vv = entry.getValue(); + if (vv instanceof MapWriter) vv = ((MapWriter) vv).toMap(new LinkedHashMap<>()); + if (vv instanceof IteratorWriter) + vv = ((IteratorWriter) vv).toList(new ArrayList<>()); + map.put(entry.getKey(), vv); + } + v = map; + } + map.put(k == null ? null : k.toString(), v); + // note: It'd be nice to assert that there is no previous value at 'k' but it's + // possible the passed in + // map is already populated and the intention is to overwrite. + return this; } - v = map; - } - map.put(k==null? null : k.toString(), v); - // note: It'd be nice to assert that there is no previous value at 'k' but it's possible the passed in - // map is already populated and the intention is to overwrite. - return this; - } - - }); + }); } catch (IOException e) { throw new RuntimeException(e); } @@ -83,11 +84,10 @@ public EntryWriter put(CharSequence k, Object v) { void writeMap(EntryWriter ew) throws IOException; /** - * An interface to push one entry at a time to the output. - * The order of the keys is not defined, but we assume they are distinct -- don't call {@code put} more than once - * for the same key. + * An interface to push one entry at a time to the output. The order of the keys is not defined, + * but we assume they are distinct -- don't call {@code put} more than once for the same key. */ - interface EntryWriter { + abstract class EntryWriter { /** * Writes a key value into the map @@ -95,70 +95,66 @@ interface EntryWriter { * @param k The key * @param v The value can be any supported object */ - EntryWriter put(CharSequence k, Object v) throws IOException; - default EntryWriter putNoEx(CharSequence k, Object v) { + public abstract EntryWriter put(CharSequence k, Object v) throws IOException; + + public EntryWriter putNoEx(CharSequence k, Object v) { try { - put(k,v); + put(k, v); } catch (IOException e) { throw new RuntimeException(e); } return this; } - default EntryWriter put(CharSequence k, Object v, BiPredicate p) throws IOException { - if (p.test(k,v)) put(k, v); + public EntryWriter put(CharSequence k, Object v, BiPredicate p) + throws IOException { + if (p.test(k, v)) put(k, v); return this; } - default EntryWriter putIfNotNull(CharSequence k, Object v) throws IOException { - if(v != null) put(k,v); + public EntryWriter putIfNotNull(CharSequence k, Object v) throws IOException { + if (v != null) put(k, v); return this; } - default EntryWriter putStringIfNotNull(CharSequence k, Object v) throws IOException { - if(v != null) put(k,String.valueOf(v)); + public EntryWriter putStringIfNotNull(CharSequence k, Object v) throws IOException { + if (v != null) put(k, String.valueOf(v)); return this; } - - default EntryWriter put(CharSequence k, int v) throws IOException { + public EntryWriter put(CharSequence k, int v) throws IOException { put(k, (Integer) v); return this; } - - default EntryWriter put(CharSequence k, long v) throws IOException { + public EntryWriter put(CharSequence k, long v) throws IOException { put(k, (Long) v); return this; } - - default EntryWriter put(CharSequence k, float v) throws IOException { + public EntryWriter put(CharSequence k, float v) throws IOException { put(k, (Float) v); return this; } - default EntryWriter put(CharSequence k, double v) throws IOException { + public EntryWriter put(CharSequence k, double v) throws IOException { put(k, (Double) v); return this; } - default EntryWriter put(CharSequence k, boolean v) throws IOException { + public EntryWriter put(CharSequence k, boolean v) throws IOException { put(k, (Boolean) v); return this; } - /**This is an optimization to avoid the instanceof checks. - * - */ - default EntryWriter put(CharSequence k, CharSequence v) throws IOException { - put(k, (Object)v); + /** This is an optimization to avoid the instanceof checks. */ + public EntryWriter put(CharSequence k, CharSequence v) throws IOException { + put(k, (Object) v); return this; - } - default BiConsumer getBiConsumer(){ - return (k, v) -> putNoEx(k,v); + public final BiConsumer getBiConsumer() { + return this::putNoEx; } } } diff --git a/solr/solrj/src/java/org/apache/solr/common/SolrDocumentList.java b/solr/solrj/src/java/org/apache/solr/common/SolrDocumentList.java index ab13b2debc3..a0f45dcfb53 100644 --- a/solr/solrj/src/java/org/apache/solr/common/SolrDocumentList.java +++ b/solr/solrj/src/java/org/apache/solr/common/SolrDocumentList.java @@ -32,6 +32,14 @@ public class SolrDocumentList extends ArrayList private long start = 0; private Float maxScore = null; private Boolean numFoundExact = true; + + public SolrDocumentList() { + super(); + } + + public SolrDocumentList(int size) { + super(size); + } public Boolean getNumFoundExact() { return numFoundExact; diff --git a/solr/solrj/src/java/org/apache/solr/common/util/BufferedBytesOutputStream.java b/solr/solrj/src/java/org/apache/solr/common/util/BufferedBytesOutputStream.java new file mode 100644 index 00000000000..4aee298f893 --- /dev/null +++ b/solr/solrj/src/java/org/apache/solr/common/util/BufferedBytesOutputStream.java @@ -0,0 +1,106 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.common.util; + +import java.io.IOException; +import java.io.OutputStream; +import java.nio.BufferOverflowException; +import java.nio.ByteBuffer; +import org.eclipse.jetty.client.util.DeferredContentProvider; + +public class BufferedBytesOutputStream extends OutputStream { + private final DeferredContentProvider stream; + + protected ByteBuffer buf; + + protected int sz; + + public BufferedBytesOutputStream(ByteBuffer buffer, DeferredContentProvider stream) { + buf = buffer; + this.stream = stream; + } + + /** + * Writes the specified byte to this {@code ByteArrayOutputStream}. + * + * @param b the byte to be written. + */ + public void write(int b) throws IOException { + if (sz + 1 > buf.remaining()) { + flush(); + } + buf.put(sz++, (byte) b); + } + + @Override + public void write(byte[] b, int off, int len) throws IOException { + if (len > buf.limit() - sz) { + + if (len > buf.capacity()) { + flush(); + stream.offer(ByteBuffer.wrap(b, off, len)); + stream.flush(); + return; + } + flush(); + } + + buf.position(sz); + try { + buf.put(b, off, len); + } catch (BufferOverflowException e) { + throw new RuntimeException( + "len:" + + len + + " sz:" + + sz + + " cap:" + + buf.capacity() + + " pos:" + + buf.position() + + " buflimit:" + + buf.limit()); + } + sz += len; + } + + public void writeBytes(byte[] b) throws IOException { + write(b, 0, b.length); + } + + public void reset() { + sz = 0; + buf.clear(); + } + + public void flush() throws IOException { + if (sz > 0) { + buf.flip(); + stream.offer(buf); + stream.flush(); + buf.clear(); + sz = 0; + } + } + + @Override + public void close() throws IOException { + flush(); + stream.close(); + } + +} diff --git a/solr/solrj/src/java/org/apache/solr/common/util/ByteArrayUtf8CharSequence.java b/solr/solrj/src/java/org/apache/solr/common/util/ByteArrayUtf8CharSequence.java index 2088e2b5828..03f4e9f1c39 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/ByteArrayUtf8CharSequence.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/ByteArrayUtf8CharSequence.java @@ -17,6 +17,8 @@ package org.apache.solr.common.util; +import java.io.IOException; +import java.io.OutputStream; import java.util.AbstractMap; import java.util.ArrayList; import java.util.Collection; @@ -136,8 +138,9 @@ public boolean equals(Object other) { } public static boolean utf8Equals(Utf8CharSequence utf8_1, Utf8CharSequence utf8_2) { - if (utf8_1.size() != utf8_2.size()) return false; - for (int i = 0; i < utf8_1.size(); i++) { + final int size = utf8_1.size(); + if (size != utf8_2.size()) return false; + for (int i = 0; i < size; i++) { if (utf8_1.byteAt(i) != utf8_2.byteAt(i)) return false; } return true; @@ -269,6 +272,10 @@ public ByteArrayUtf8CharSequence reset(byte[] bytes, int offset, int length, Str return this; } + public void write(OutputStream os) throws IOException { + os.write(buf, offset, length); + } + /** * Performs internal consistency checks. * Always returns true (or throws IllegalStateException) diff --git a/solr/solrj/src/java/org/apache/solr/common/util/ByteUtils.java b/solr/solrj/src/java/org/apache/solr/common/util/ByteUtils.java index be64e6cac8e..fb92b59eae7 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/ByteUtils.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/ByteUtils.java @@ -16,42 +16,49 @@ */ package org.apache.solr.common.util; +import static java.lang.Character.MAX_SURROGATE; +import static java.lang.Character.MIN_SURROGATE; + import java.io.IOException; import java.io.OutputStream; - import org.noggit.CharArr; - public class ByteUtils { /** Maximum number of UTF8 bytes per UTF16 character. */ public static final int MAX_UTF8_BYTES_PER_CHAR = 3; - /** Converts utf8 to utf16 and returns the number of 16 bit Java chars written. - * Full characters are read, even if this reads past the length passed (and can result in - * an ArrayOutOfBoundsException if invalid UTF8 is passed). Explicit checks for valid UTF8 are not performed. - * The char[] out should probably have enough room to hold the worst case of each byte becoming a Java char. + /** + * Converts utf8 to utf16 and returns the number of 16 bit Java chars written. Full characters are + * read, even if this reads past the length passed (and can result in an ArrayOutOfBoundsException + * if invalid UTF8 is passed). Explicit checks for valid UTF8 are not performed. The char[] out + * should probably have enough room to hold the worst case of each byte becoming a Java char. */ public static int UTF8toUTF16(byte[] utf8, int offset, int len, char[] out, int out_offset) { int out_start = out_offset; final int limit = offset + len; while (offset < limit) { - int b = utf8[offset++]&0xff; + int b = utf8[offset++] & 0xff; if (b < 0xc0) { assert b < 0x80; - out[out_offset++] = (char)b; + out[out_offset++] = (char) b; } else if (b < 0xe0) { - out[out_offset++] = (char)(((b&0x1f)<<6) + (utf8[offset++]&0x3f)); + out[out_offset++] = (char) (((b & 0x1f) << 6) + (utf8[offset++] & 0x3f)); } else if (b < 0xf0) { - out[out_offset++] = (char)(((b&0xf)<<12) + ((utf8[offset]&0x3f)<<6) + (utf8[offset+1]&0x3f)); + out[out_offset++] = + (char) (((b & 0xf) << 12) + ((utf8[offset] & 0x3f) << 6) + (utf8[offset + 1] & 0x3f)); offset += 2; } else { assert b < 0xf8; - int ch = ((b&0x7)<<18) + ((utf8[offset]&0x3f)<<12) + ((utf8[offset+1]&0x3f)<<6) + (utf8[offset+2]&0x3f); + int ch = + ((b & 0x7) << 18) + + ((utf8[offset] & 0x3f) << 12) + + ((utf8[offset + 1] & 0x3f) << 6) + + (utf8[offset + 2] & 0x3f); offset += 3; if (ch < 0xffff) { - out[out_offset++] = (char)ch; + out[out_offset++] = (char) ch; } else { int chHalf = ch - 0x0010000; out[out_offset++] = (char) ((chHalf >> 10) + 0xD800); @@ -71,48 +78,41 @@ public static void UTF8toUTF16(byte[] utf8, int offset, int len, CharArr out) { out.setEnd(out.getEnd() + n); } - /** Convert UTF8 bytes into a String */ - public static String UTF8toUTF16(byte[] utf8, int offset, int len) { - char[] out = new char[len]; - int n = UTF8toUTF16(utf8, offset, len, out, 0); - return new String(out,0,n); - } - - - - /** Writes UTF8 into the byte array, starting at offset. The caller should ensure that - * there is enough space for the worst-case scenario. + /** + * Writes UTF8 into the byte array, starting at offset. The caller should ensure that there is + * enough space for the worst-case scenario. + * * @return the number of bytes written */ - public static int UTF16toUTF8(CharSequence s, int offset, int len, byte[] result, int resultOffset) { + public static int UTF16toUTF8( + CharSequence s, int offset, int len, byte[] result, int resultOffset) { final int end = offset + len; int upto = resultOffset; - for(int i=offset;i> 6)); - result[upto++] = (byte)(0x80 | (code & 0x3F)); + result[upto++] = (byte) (0x80 | (code & 0x3F)); } else if (code < 0xD800 || code > 0xDFFF) { - result[upto++] = (byte)(0xE0 | (code >> 12)); - result[upto++] = (byte)(0x80 | ((code >> 6) & 0x3F)); - result[upto++] = (byte)(0x80 | (code & 0x3F)); + result[upto++] = (byte) (0xE0 | (code >> 12)); + result[upto++] = (byte) (0x80 | ((code >> 6) & 0x3F)); + result[upto++] = (byte) (0x80 | (code & 0x3F)); } else { // surrogate pair // confirm valid high surrogate - if (code < 0xDC00 && (i < end-1)) { - int utf32 = (int) s.charAt(i+1); + if (code < 0xDC00 && (i < end - 1)) { + int utf32 = s.charAt(i + 1); // confirm valid low surrogate and write pair if (utf32 >= 0xDC00 && utf32 <= 0xDFFF) { utf32 = ((code - 0xD7C0) << 10) + (utf32 & 0x3FF); i++; - result[upto++] = (byte)(0xF0 | (utf32 >> 18)); - result[upto++] = (byte)(0x80 | ((utf32 >> 12) & 0x3F)); - result[upto++] = (byte)(0x80 | ((utf32 >> 6) & 0x3F)); - result[upto++] = (byte)(0x80 | (utf32 & 0x3F)); + result[upto++] = (byte) (0xF0 | (utf32 >> 18)); + result[upto++] = (byte) (0x80 | ((utf32 >> 12) & 0x3F)); + result[upto++] = (byte) (0x80 | ((utf32 >> 6) & 0x3F)); + result[upto++] = (byte) (0x80 | (utf32 & 0x3F)); continue; } } @@ -127,49 +127,50 @@ else if (code < 0x800) { return upto - resultOffset; } - /** Writes UTF8 into the given OutputStream by first writing to the given scratch array - * and then writing the contents of the scratch array to the OutputStream. The given scratch byte array - * is used to buffer intermediate data before it is written to the output stream. + /** + * Writes UTF8 into the given OutputStream by first writing to the given scratch array and then + * writing the contents of the scratch array to the OutputStream. The given scratch byte array is + * used to buffer intermediate data before it is written to the output stream. * * @return the number of bytes written */ - public static int writeUTF16toUTF8(CharSequence s, int offset, int len, OutputStream fos, byte[] scratch) throws IOException { + public static int writeUTF16toUTF8( + CharSequence s, int offset, int len, OutputStream fos, byte[] scratch) throws IOException { final int end = offset + len; int upto = 0, totalBytes = 0; - for(int i=offset;i scratch.length - 4) { + if (upto > scratch.length - 4) { // a code point may take upto 4 bytes and we don't have enough space, so reset totalBytes += upto; - if(fos == null) throw new IOException("buffer over flow"); + if (fos == null) throw new IOException("buffer over flow"); fos.write(scratch, 0, upto); upto = 0; } - if (code < 0x80) - scratch[upto++] = (byte) code; + if (code < 0x80) scratch[upto++] = (byte) code; else if (code < 0x800) { scratch[upto++] = (byte) (0xC0 | (code >> 6)); - scratch[upto++] = (byte)(0x80 | (code & 0x3F)); + scratch[upto++] = (byte) (0x80 | (code & 0x3F)); } else if (code < 0xD800 || code > 0xDFFF) { - scratch[upto++] = (byte)(0xE0 | (code >> 12)); - scratch[upto++] = (byte)(0x80 | ((code >> 6) & 0x3F)); - scratch[upto++] = (byte)(0x80 | (code & 0x3F)); + scratch[upto++] = (byte) (0xE0 | (code >> 12)); + scratch[upto++] = (byte) (0x80 | ((code >> 6) & 0x3F)); + scratch[upto++] = (byte) (0x80 | (code & 0x3F)); } else { // surrogate pair // confirm valid high surrogate - if (code < 0xDC00 && (i < end-1)) { - int utf32 = (int) s.charAt(i+1); + if (code < 0xDC00 && (i < end - 1)) { + int utf32 = s.charAt(i + 1); // confirm valid low surrogate and write pair if (utf32 >= 0xDC00 && utf32 <= 0xDFFF) { utf32 = ((code - 0xD7C0) << 10) + (utf32 & 0x3FF); i++; - scratch[upto++] = (byte)(0xF0 | (utf32 >> 18)); - scratch[upto++] = (byte)(0x80 | ((utf32 >> 12) & 0x3F)); - scratch[upto++] = (byte)(0x80 | ((utf32 >> 6) & 0x3F)); - scratch[upto++] = (byte)(0x80 | (utf32 & 0x3F)); + scratch[upto++] = (byte) (0xF0 | (utf32 >> 18)); + scratch[upto++] = (byte) (0x80 | ((utf32 >> 12) & 0x3F)); + scratch[upto++] = (byte) (0x80 | ((utf32 >> 6) & 0x3F)); + scratch[upto++] = (byte) (0x80 | (utf32 & 0x3F)); continue; } } @@ -182,7 +183,7 @@ else if (code < 0x800) { } totalBytes += upto; - if(fos != null) fos.write(scratch, 0, upto); + if (fos != null) fos.write(scratch, 0, upto); return totalBytes; } @@ -199,8 +200,7 @@ public static int calcUTF16toUTF8Length(CharSequence s, int offset, int len) { for (int i = offset; i < end; i++) { final int code = (int) s.charAt(i); - if (code < 0x80) - res++; + if (code < 0x80) res++; else if (code < 0x800) { res += 2; } else if (code < 0xD800 || code > 0xDFFF) { @@ -224,4 +224,65 @@ else if (code < 0x800) { return res; } + /** + * Returns the number of bytes in the UTF-8-encoded form of {@code sequence}. For a string, this + * method is equivalent to {@code string.getBytes(UTF_8).length}, but is more efficient in both + * time and space. + * + *

Note: This is from Guava vs re-adding the complicated dependency, (calling for shading), for + * one method. + * + * @throws IllegalArgumentException if {@code sequence} contains ill-formed UTF-16 (unpaired + * surrogates) + */ + public static int calcUTF16toUTF8LengthGuava(CharSequence sequence) { + // Warning to maintainers: this implementation is highly optimized. + int utf16Length = sequence.length(); + int utf8Length = utf16Length; + + int i = 0; + // This loop optimizes for pure ASCII. + while (i < utf16Length && sequence.charAt(i) < 0x80) { + i++; + } + + // This loop optimizes for chars less than 0x800. + for (; i < utf16Length; i++) { + char c = sequence.charAt(i); + if (c < 0x800) { + utf8Length += ((0x7f - c) >>> 31); // branch free! + } else { + utf8Length += encodedLengthGeneral(sequence, i); + break; + } + } + + if (utf8Length < utf16Length) { + // Necessary and sufficient condition for overflow because of maximum 3x expansion + throw new IllegalArgumentException( + "UTF-8 length does not fit in int: " + (utf8Length + (1L << 32))); + } + return utf8Length; + } + + private static int encodedLengthGeneral(CharSequence sequence, int start) { + int utf16Length = sequence.length(); + int utf8Length = 0; + for (int i = start; i < utf16Length; i++) { + char c = sequence.charAt(i); + if (c < 0x800) { + utf8Length += (0x7f - c) >>> 31; // branch free! + } else { + utf8Length += 2; + if (MIN_SURROGATE <= c && c <= MAX_SURROGATE) { + // Check that we have a well-formed surrogate pair. + if (Character.codePointAt(sequence, i) == c) { + throw new IllegalArgumentException("Unpaired surrogate at index " + i + 1); + } + i++; + } + } + } + return utf8Length; + } } diff --git a/solr/solrj/src/java/org/apache/solr/common/util/BytesBlock.java b/solr/solrj/src/java/org/apache/solr/common/util/BytesBlock.java index 4ce4ecc278c..a081446323b 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/BytesBlock.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/BytesBlock.java @@ -26,7 +26,7 @@ public class BytesBlock { private int startPos = 0; public BytesBlock(int sz) { - this.bufSize = sz; + bufSize = sz; create(); } @@ -52,7 +52,7 @@ public BytesBlock expand(int sz) { } private BytesBlock markPositions(int sz) { - this.startPos = pos; + startPos = pos; pos += sz; return this; } diff --git a/solr/solrj/src/java/org/apache/solr/common/util/BytesInputStream.java b/solr/solrj/src/java/org/apache/solr/common/util/BytesInputStream.java new file mode 100644 index 00000000000..34c96a000e6 --- /dev/null +++ b/solr/solrj/src/java/org/apache/solr/common/util/BytesInputStream.java @@ -0,0 +1,109 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.common.util; + +import java.io.IOException; +import java.io.InputStream; + +/** An un-synchronized byte[] InputStream. */ +public final class BytesInputStream extends InputStream { + + private byte[] buf; + private int pos; + private int mark; + private final int count; + + + public BytesInputStream(byte[] buf) { + mark = 0; + this.buf = buf; + count = buf.length; + } + + public BytesInputStream(byte[] buf, int offset, int length) { + this.buf = buf; + pos = offset; + mark = offset; + count = offset + length > buf.length ? buf.length : offset + length; + } + + + @Override + public int available() { + return count - pos; + } + + + @Override + public void close() throws IOException { + // Do nothing on close, this matches JDK behaviour. + } + + + @Override + public void mark(int readlimit) { + mark = pos; + } + + @Override + public boolean markSupported() { + return true; + } + + @Override + public int read() { + return pos < count ? buf[pos++] & 0xFF : -1; + } + + @Override + public int read(byte[] b, int offset, int length) { + if (b == null) { + throw new NullPointerException(); + } + // avoid int overflow + if (offset < 0 || offset > b.length || length < 0 || length > b.length - offset) { + throw new IndexOutOfBoundsException(); + } + // Are there any bytes available? + if (pos >= count) { + return -1; + } + if (length == 0) { + return 0; + } + + int copylen = count - pos < length ? count - pos : length; + System.arraycopy(buf, pos, b, offset, copylen); + pos += copylen; + return copylen; + } + + @Override + public void reset() { + pos = mark; + } + + @Override + public long skip(long n) { + if (n <= 0) { + return 0; + } + int temp = pos; + pos = count - pos < n ? count : (int) (pos + n); + return pos - (long)temp; + } +} diff --git a/solr/solrj/src/java/org/apache/solr/common/util/BytesOutputStream.java b/solr/solrj/src/java/org/apache/solr/common/util/BytesOutputStream.java index 0666158e3e7..fc7ee802ffa 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/BytesOutputStream.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/BytesOutputStream.java @@ -22,15 +22,18 @@ import java.io.UnsupportedEncodingException; import java.util.Arrays; -public class BytesOutputStream extends OutputStream { +/** + * An un-synchronized byte[] OutputStream. + */ +public final class BytesOutputStream extends OutputStream { private static final int MAX_ARRAY_SIZE = Integer.MAX_VALUE - 8; - protected byte[] buf; + private byte[] buf; - protected int sz; + private int sz; public BytesOutputStream() { - this(64); + this(32); } public BytesOutputStream(int size) { @@ -44,43 +47,38 @@ public byte[] toBytes() { return Arrays.copyOf(buf, sz); } - public Bytes bytes() { - return new Bytes(buf, 0, sz); + /** + * Access to the internal byte array. Callers need to inspect {@link #size()} + * for its actual length. + * + * @return the byte [ ] + */ + public byte[] bytes() { + return buf; } public InputStream inputStream() { return new ByteArrayInputStream(buf); } - private void ensureCapacity(int minCapacity) { - if (minCapacity - buf.length > 0) - expandBuf(minCapacity); - } - - /** * Write a byte to the stream. */ - @Override + /** + * Writes the specified byte to this {@code ByteArrayOutputStream}. + * + * @param b the byte to be written. + */ public void write(int b) { - - try { - buf[sz] = (byte) b; - sz += 1; - } catch (IndexOutOfBoundsException e) { - ensureCapacity(sz + 1); - buf[sz] = (byte) b; - sz += 1; - } + if (sz + 1 - buf.length > 0) + expandBuf(sz + 1); + buf[sz] = (byte) b; + sz += 1; } @Override public void write(byte[] b, int off, int len) { - try { - System.arraycopy(b, off, buf, sz, len); - sz += len; - } catch (IndexOutOfBoundsException e) { - ensureCapacity(sz + len); - System.arraycopy(b, off, buf, sz, len); - sz += len; - } + if (sz + len - buf.length > 0) + expandBuf(sz + len); + System.arraycopy(b, off, buf, sz, len); + sz += len; } public void writeBytes(byte[] b) { @@ -124,18 +122,4 @@ public void close() { // noop } - public static class Bytes { - - public final byte[] bytes; - public final int offset; - public final int length; - - public Bytes(byte[] bytes, int offset, int length) { - this.bytes = bytes; - this.offset = offset; - this.length = length; - } - - } - } diff --git a/solr/solrj/src/java/org/apache/solr/common/util/ContentStreamBase.java b/solr/solrj/src/java/org/apache/solr/common/util/ContentStreamBase.java index 4f77b5d4875..9da25eb8864 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/ContentStreamBase.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/ContentStreamBase.java @@ -34,41 +34,41 @@ import java.util.Locale; import java.util.function.Predicate; import java.util.zip.GZIPInputStream; - import org.apache.http.entity.ContentType; import org.apache.solr.client.solrj.SolrRequest; import org.apache.solr.client.solrj.request.RequestWriter; /** * Three concrete implementations for ContentStream - one for File/URL/String - * * * @since solr 1.2 */ -public abstract class ContentStreamBase implements ContentStream -{ +public abstract class ContentStreamBase implements ContentStream { public static final String DEFAULT_CHARSET = StandardCharsets.UTF_8.name(); private static final String TEXT_CSV = "text/csv"; - private static final List UNHELPFUL_TYPES = Arrays.asList(ContentType.APPLICATION_OCTET_STREAM.getMimeType(), "application/gzip", "content/unknown"); - private static final List XML_SUF = Arrays.asList(".xml", ".xml.gz", ".xml.gzip"); - private static final List JSON_SUF = Arrays.asList(".json", ".json.gz", ".json.gzip"); - private static final List CSV_SUF = Arrays.asList(".csv", ".csv.gz", ".csv.gzip"); + private static final List UNHELPFUL_TYPES = + Arrays.asList( + ContentType.APPLICATION_OCTET_STREAM.getMimeType(), + "application/gzip", + "content/unknown"); + private static final List XML_SUF = Arrays.asList(".xml", ".xml.gz", ".xml.gzip"); + private static final List JSON_SUF = Arrays.asList(".json", ".json.gz", ".json.gzip"); + private static final List CSV_SUF = Arrays.asList(".csv", ".csv.gz", ".csv.gzip"); protected String name; protected String sourceInfo; protected String contentType; protected Long size; - - //--------------------------------------------------------------------- - //--------------------------------------------------------------------- - - public static String getCharsetFromContentType( String contentType ) - { - if( contentType != null ) { - int idx = contentType.toLowerCase(Locale.ROOT).indexOf( "charset=" ); - if( idx > 0 ) { - return contentType.substring( idx + "charset=".length() ).trim(); + + // --------------------------------------------------------------------- + // --------------------------------------------------------------------- + + public static String getCharsetFromContentType(String contentType) { + if (contentType != null) { + int idx = contentType.toLowerCase(Locale.ROOT).indexOf("charset="); + if (idx > 0) { + return contentType.substring(idx + "charset=".length()).trim(); } } return null; @@ -77,7 +77,7 @@ public static String getCharsetFromContentType( String contentType ) protected String attemptToDetermineContentType() { String type = null; if (name != null) { - Predicate endsWith = suffix->name.toLowerCase(Locale.ROOT).endsWith(suffix); + Predicate endsWith = suffix -> name.toLowerCase(Locale.ROOT).endsWith(suffix); if (XML_SUF.stream().anyMatch(endsWith)) { type = ContentType.APPLICATION_XML.getMimeType(); @@ -98,12 +98,12 @@ private String attemptToDetermineTypeFromFirstCharacter() { // Last ditch effort to determine content, if the first non-white space // is a '<' or '{', assume xml or json. int data = stream.read(); - while (( data != -1 ) && ( ( (char)data ) == ' ' )) { + while ((data != -1) && (((char) data) == ' ')) { data = stream.read(); } - if ((char)data == '<') { + if ((char) data == '<') { type = ContentType.APPLICATION_XML.getMimeType(); - } else if ((char)data == '{') { + } else if ((char) data == '{') { type = ContentType.APPLICATION_JSON.getMimeType(); } } catch (Exception ex) { @@ -113,21 +113,20 @@ private String attemptToDetermineTypeFromFirstCharacter() { return type; } - //------------------------------------------------------------------------ - //------------------------------------------------------------------------ - + // ------------------------------------------------------------------------ + // ------------------------------------------------------------------------ + /** - * Construct a ContentStream from a URL - * - * This uses a URLConnection to get the content stream - * @see URLConnection + * Construct a ContentStream from a URL This uses a URLConnection + * to get the content stream + * + * @see URLConnection */ - public static class URLStream extends ContentStreamBase - { + public static class URLStream extends ContentStreamBase { private final URL url; - - public URLStream( URL url ) { - this.url = url; + + public URLStream(URL url) { + this.url = url; sourceInfo = "url"; } @@ -136,10 +135,10 @@ public String getContentType() { // for file:// streams that are octet-streams, try to determine the payload // type from payload rather than just using the mime type. if ("file".equals(url.getProtocol())) { - Predicate equals = mimeType->mimeType.equals(contentType); + Predicate equals = mimeType -> mimeType.equals(contentType); if (UNHELPFUL_TYPES.stream().anyMatch(equals)) { String type = attemptToDetermineContentType(); - contentType = ( type != null ) ? type : contentType; + contentType = (type != null) ? type : contentType; } } return contentType; @@ -148,29 +147,28 @@ public String getContentType() { @Override public InputStream getStream() throws IOException { URLConnection conn = this.url.openConnection(); - + contentType = conn.getContentType(); name = url.toExternalForm(); size = conn.getContentLengthLong(); InputStream is = conn.getInputStream(); String urlFile = url.getFile().toLowerCase(Locale.ROOT); - if( "gzip".equals(conn.getContentEncoding()) || urlFile.endsWith( ".gz" ) || urlFile.endsWith( ".gzip" )){ + if ("gzip".equals(conn.getContentEncoding()) + || urlFile.endsWith(".gz") + || urlFile.endsWith(".gzip")) { is = new GZIPInputStream(is); } return is; } } - - /** - * Construct a ContentStream from a File - */ - public static class FileStream extends ContentStreamBase - { + + /** Construct a ContentStream from a File */ + public static class FileStream extends ContentStreamBase { private final File file; - - public FileStream( File f ) { - file = f; - + + public FileStream(File f) { + file = f; + contentType = null; // ?? name = file.getName(); size = file.length(); @@ -179,7 +177,7 @@ public FileStream( File f ) { @Override public String getContentType() { - if(contentType==null) { + if (contentType == null) { contentType = attemptToDetermineContentType(); } return contentType; @@ -187,28 +185,24 @@ public String getContentType() { @Override public InputStream getStream() throws IOException { - InputStream is = new FileInputStream( file ); + InputStream is = new FileInputStream(file); String lowerName = name.toLowerCase(Locale.ROOT); - if(lowerName.endsWith(".gz") || lowerName.endsWith(".gzip")) { + if (lowerName.endsWith(".gz") || lowerName.endsWith(".gzip")) { is = new GZIPInputStream(is); } return is; } } - - /** - * Construct a ContentStream from a String - */ - public static class StringStream extends ContentStreamBase - { + /** Construct a ContentStream from a String */ + public static class StringStream extends ContentStreamBase { private final String str; - public StringStream( String str ) { + public StringStream(String str) { this(str, detect(str)); } - public StringStream( String str, String contentType ) { + public StringStream(String str, String contentType) { this.str = str; this.contentType = contentType; name = null; @@ -224,17 +218,18 @@ public StringStream( String str, String contentType ) { public static String detect(String str) { String detectedContentType = null; int lim = str.length() - 1; - for (int i=0; iContentStream from a File - */ - public static class ByteArrayStream extends ContentStreamBase - { + + /** Construct a ContentStream from a File */ + public static class ByteArrayStream extends ContentStreamBase { private final byte[] bytes; - public ByteArrayStream( byte[] bytes, String source ) { - this(bytes,source, null); + + public ByteArrayStream(byte[] bytes, String source) { + this(bytes, source, null); } - - public ByteArrayStream( byte[] bytes, String source, String contentType ) { + + public ByteArrayStream(byte[] bytes, String source, String contentType) { this.bytes = bytes; - + this.contentType = contentType; name = source; size = (long) bytes.length; sourceInfo = source; } - @Override public InputStream getStream() throws IOException { - return new ByteArrayInputStream( bytes ); + return new BytesInputStream(bytes); } - } + } } diff --git a/solr/solrj/src/java/org/apache/solr/common/util/FastInputStream.java b/solr/solrj/src/java/org/apache/solr/common/util/FastInputStream.java index f7d633db1cf..828f22f6902 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/FastInputStream.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/FastInputStream.java @@ -59,7 +59,10 @@ public static FastInputStream wrap(InputStream in) { @Override public int read() throws IOException { if (pos >= end) { - refill(); + // this will set end to -1 at EOF + end = readWrappedStream(buf, 0, buf.length); + if (end > 0) readFromStream += end; + pos = 0; if (pos >= end) return -1; } return buf[pos++] & 0xff; @@ -67,7 +70,10 @@ public int read() throws IOException { public int peek() throws IOException { if (pos >= end) { - refill(); + // this will set end to -1 at EOF + end = readWrappedStream(buf, 0, buf.length); + if (end > 0) readFromStream += end; + pos = 0; if (pos >= end) return -1; } return buf[pos] & 0xff; @@ -77,7 +83,10 @@ public int peek() throws IOException { @Override public int readUnsignedByte() throws IOException { if (pos >= end) { - refill(); + // this will set end to -1 at EOF + end = readWrappedStream(buf, 0, buf.length); + if (end > 0) readFromStream += end; + pos = 0; if (pos >= end) { throw new EOFException(); } @@ -94,7 +103,7 @@ public long position() { return readFromStream - (end - pos); } - public void refill() throws IOException { + protected void refill() throws IOException { // this will set end to -1 at EOF end = readWrappedStream(buf, 0, buf.length); if (end > 0) readFromStream += end; @@ -147,7 +156,10 @@ public int read(byte b[], int off, int len) throws IOException { } } - refill(); + // this will set end to -1 at EOF + end = readWrappedStream(buf, 0, buf.length); + if (end > 0) readFromStream += end; + pos = 0; // read rest from our buffer if (end-pos > 0) { @@ -196,7 +208,10 @@ public int skipBytes(int n) throws IOException { pos = end; while (r < n) { - refill(); + // this will set end to -1 at EOF + end = readWrappedStream(buf, 0, buf.length); + if (end > 0) readFromStream += end; + pos = 0; if (end-pos <= 0) return r; int toRead = Math.min(end-pos, n-r); r += toRead; @@ -214,7 +229,10 @@ public boolean readBoolean() throws IOException { @Override public byte readByte() throws IOException { if (pos >= end) { - refill(); + // this will set end to -1 at EOF + end = readWrappedStream(buf, 0, buf.length); + if (end > 0) readFromStream += end; + pos = 0; if (pos >= end) throw new EOFException(); } return buf[pos++]; diff --git a/solr/solrj/src/java/org/apache/solr/common/util/FastJavaBinDecoder.java b/solr/solrj/src/java/org/apache/solr/common/util/FastJavaBinDecoder.java index c5ebd06f664..13e31fe0717 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/FastJavaBinDecoder.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/FastJavaBinDecoder.java @@ -17,6 +17,37 @@ package org.apache.solr.common.util; +import static org.apache.solr.common.util.FastJavaBinDecoder.Tag._EXTERN_STRING; +import static org.apache.solr.common.util.JavaBinCodec.ARR; +import static org.apache.solr.common.util.JavaBinCodec.BOOL_FALSE; +import static org.apache.solr.common.util.JavaBinCodec.BOOL_TRUE; +import static org.apache.solr.common.util.JavaBinCodec.BYTE; +import static org.apache.solr.common.util.JavaBinCodec.BYTEARR; +import static org.apache.solr.common.util.JavaBinCodec.DATE; +import static org.apache.solr.common.util.JavaBinCodec.DOUBLE; +import static org.apache.solr.common.util.JavaBinCodec.END; +import static org.apache.solr.common.util.JavaBinCodec.ENUM_FIELD_VALUE; +import static org.apache.solr.common.util.JavaBinCodec.EXTERN_STRING; +import static org.apache.solr.common.util.JavaBinCodec.FLOAT; +import static org.apache.solr.common.util.JavaBinCodec.INT; +import static org.apache.solr.common.util.JavaBinCodec.ITERATOR; +import static org.apache.solr.common.util.JavaBinCodec.LONG; +import static org.apache.solr.common.util.JavaBinCodec.MAP; +import static org.apache.solr.common.util.JavaBinCodec.MAP_ENTRY; +import static org.apache.solr.common.util.JavaBinCodec.MAP_ENTRY_ITER; +import static org.apache.solr.common.util.JavaBinCodec.NAMED_LST; +import static org.apache.solr.common.util.JavaBinCodec.NULL; +import static org.apache.solr.common.util.JavaBinCodec.ORDERED_MAP; +import static org.apache.solr.common.util.JavaBinCodec.SHORT; +import static org.apache.solr.common.util.JavaBinCodec.SINT; +import static org.apache.solr.common.util.JavaBinCodec.SLONG; +import static org.apache.solr.common.util.JavaBinCodec.SOLRDOC; +import static org.apache.solr.common.util.JavaBinCodec.SOLRDOCLST; +import static org.apache.solr.common.util.JavaBinCodec.SOLRINPUTDOC; +import static org.apache.solr.common.util.JavaBinCodec.STR; +import static org.apache.solr.common.util.JavaBinCodec.TAG_AND_LEN; +import static org.apache.solr.common.util.JavaBinCodec.readSize; +import static org.apache.solr.common.util.JavaBinCodec.readVInt; import java.io.IOException; import java.io.InputStream; @@ -27,22 +58,16 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; - import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrDocumentList; import org.apache.solr.common.util.DataEntry.EntryListener; -import static org.apache.solr.common.util.FastJavaBinDecoder.Tag._EXTERN_STRING; -import static org.apache.solr.common.util.JavaBinCodec.*; - public class FastJavaBinDecoder implements DataEntry.FastDecoder { private StreamCodec codec; private EntryImpl rootEntry = new EntryImpl(); private InputStream stream; - private static final DataEntry.EntryListener emptylistener = e -> { - }; - + private static final DataEntry.EntryListener emptylistener = e -> {}; @Override public FastJavaBinDecoder withInputStream(InputStream is) { @@ -63,32 +88,23 @@ public Object decode(EntryListener listener) throws IOException { return entry.ctx; } - static class StreamCodec extends JavaBinCodec { - final FastInputStream dis; - - StreamCodec(InputStream is) { - this.dis = FastInputStream.wrap(is); + StreamCodec(InputStream is) throws IOException { + initRead(is); } - public void skip(int sz) throws IOException { while (sz > 0) { - int read = dis.read(bytes, 0, Math.min(bytes.length, sz)); + int read = read(this, bytes, 0, Math.min(bytes.length, sz)); sz -= read; } - - } - - - void start() throws IOException { - _init(dis); } + void start() throws IOException {} Tag getTag() throws IOException { - tagByte = dis.readByte(); + tagByte = readByte(this); switch (tagByte >>> 5) { case STR >>> 5: return Tag._STR; @@ -111,21 +127,21 @@ Tag getTag() throws IOException { return t; } - public ByteBuffer readByteBuffer(DataInputInputStream dis, int sz) throws IOException { - ByteBuffer result = dis.readDirectByteBuffer(sz); - if(result != null) return result; - byte[] arr = new byte[readVInt(dis)]; - dis.readFully(arr); + public ByteBuffer readByteBuffer(int sz) throws IOException { + ByteBuffer result = JavaBinCodec.readDirectByteBuffer(sz); + if (result != null) return result; + byte[] arr = new byte[readVInt(this)]; + readFully(this, arr, 0, arr.length); return ByteBuffer.wrap(arr); } public CharSequence readObjKey(Tag ktag) throws IOException { CharSequence key = null; if (ktag.type == DataEntry.Type.STR) { - if (ktag == _EXTERN_STRING) key = readExternString(dis); - else key = readStr(dis); + if (ktag == _EXTERN_STRING) key = readExternString(this); + else key = readStr(this); } else if (ktag.type == DataEntry.Type.NULL) { - //no need to do anything + // no need to do anything } else { throw new RuntimeException("Key must be String"); } @@ -141,9 +157,8 @@ public EntryImpl beginRead(EntryImpl parent) throws IOException { } } - public class EntryImpl implements DataEntry { - //size + // size int size = -1; Tag tag; Object metadata; @@ -162,7 +177,6 @@ public class EntryImpl implements DataEntry { int depth = 0; CharSequence name; - EntryImpl getChildAndReset() { if (child == null) { child = new EntryImpl(); @@ -171,7 +185,6 @@ EntryImpl getChildAndReset() { } child.reset(); return child; - } @Override @@ -240,7 +253,7 @@ public long longVal() { @Override public float floatVal() { - if(tag.type == Type.FLOAT) return (float) doubleVal; + if (tag.type == Type.FLOAT) return (float) doubleVal; else { return ((Number) val()).floatValue(); } @@ -285,7 +298,6 @@ void reset() { public void callEnd() { if (entryListener != null) entryListener.end(this); - } } @@ -319,7 +331,7 @@ public Object readObject(StreamCodec codec, EntryImpl entry) { _BYTE(BYTE, LOWER_5_BITS, DataEntry.Type.INT) { @Override public void lazyRead(EntryImpl entry, StreamCodec streamCodec) throws IOException { - entry.numericVal = streamCodec.dis.readByte(); + entry.numericVal = JavaBinCodec.readByte(streamCodec); entry.consumedFully = true; } @@ -331,7 +343,7 @@ public Object readObject(StreamCodec codec, EntryImpl entry) { _SHORT(SHORT, LOWER_5_BITS, DataEntry.Type.INT) { @Override public void lazyRead(EntryImpl entry, StreamCodec streamCodec) throws IOException { - entry.numericVal = streamCodec.dis.readShort(); + entry.numericVal = streamCodec.readShort(); entry.consumedFully = true; } @@ -343,7 +355,7 @@ public Object readObject(StreamCodec codec, EntryImpl entry) { _DOUBLE(DOUBLE, LOWER_5_BITS, DataEntry.Type.DOUBLE) { @Override public void lazyRead(EntryImpl entry, StreamCodec streamCodec) throws IOException { - entry.doubleVal = streamCodec.dis.readDouble(); + entry.doubleVal = streamCodec.readDouble(); entry.consumedFully = true; } @@ -355,19 +367,18 @@ public Object readObject(StreamCodec codec, EntryImpl entry) { _INT(INT, LOWER_5_BITS, DataEntry.Type.INT) { @Override public void lazyRead(EntryImpl entry, StreamCodec streamCodec) throws IOException { - entry.numericVal = streamCodec.dis.readInt(); + entry.numericVal = JavaBinCodec.readInt(streamCodec); } @Override public Object readObject(StreamCodec codec, EntryImpl entry) { return Integer.valueOf((int) entry.numericVal); } - - },//signed integer + }, // signed integer _LONG(LONG, LOWER_5_BITS, DataEntry.Type.LONG) { @Override public void lazyRead(EntryImpl entry, StreamCodec streamCodec) throws IOException { - entry.numericVal = streamCodec.dis.readLong(); + entry.numericVal = JavaBinCodec.readLong(streamCodec); } @Override @@ -378,7 +389,7 @@ public Object readObject(StreamCodec codec, EntryImpl entry) { _FLOAT(FLOAT, LOWER_5_BITS, DataEntry.Type.FLOAT) { @Override public void lazyRead(EntryImpl entry, StreamCodec streamCodec) throws IOException { - entry.doubleVal = streamCodec.dis.readFloat(); + entry.doubleVal = streamCodec.readFloat(); } @Override @@ -389,7 +400,7 @@ public Object readObject(StreamCodec codec, EntryImpl entry) { _DATE(DATE, LOWER_5_BITS, DataEntry.Type.DATE) { @Override public void lazyRead(EntryImpl entry, StreamCodec streamCodec) throws IOException { - entry.numericVal = streamCodec.dis.readLong(); + entry.numericVal = JavaBinCodec.readLong(streamCodec); } @Override @@ -417,7 +428,7 @@ public void stream(EntryImpl entry, StreamCodec codec) throws IOException { @Override public Object readObject(StreamCodec codec, EntryImpl entry) throws IOException { - return codec.readMap(codec.dis,entry.size); + return JavaBinCodec.readMap(codec, entry.size); } }, _SOLRDOC(SOLRDOC, LOWER_5_BITS, DataEntry.Type.KEYVAL_ITER) { @@ -425,7 +436,7 @@ public Object readObject(StreamCodec codec, EntryImpl entry) throws IOException public void stream(EntryImpl entry, StreamCodec codec) throws IOException { try { codec.getTag(); - entry.size = codec.readSize(codec.dis);// readObjSz(codec, entry.tag); + entry.size = readSize(codec); // readObjSz(codec, entry.tag); for (int i = 0; i < entry.size; i++) { Tag tag = codec.getTag(); if (tag == _SOLRDOC) { @@ -437,25 +448,23 @@ public void stream(EntryImpl entry, StreamCodec codec) throws IOException { CharSequence key = codec.readObjKey(tag); callbackMapEntryListener(entry, key, codec, i); } - } } finally { entry.callEnd(); - } } @Override public Object readObject(StreamCodec codec, EntryImpl entry) throws IOException { - return codec.readSolrDocument(codec.dis); + return codec.readSolrDocument(codec); } }, _SOLRDOCLST(SOLRDOCLST, LOWER_5_BITS, DataEntry.Type.ENTRY_ITER) { @Override public void lazyRead(EntryImpl entry, StreamCodec codec) throws IOException { - entry.metadata = codec.readVal(codec.dis); - codec.getTag();//ignore this - entry.size = codec.readSize(codec.dis); + entry.metadata = JavaBinCodec.readVal(codec); + codec.getTag(); // ignore this + entry.size = JavaBinCodec.readSize(codec); } @Override @@ -475,17 +484,17 @@ public void stream(EntryImpl entry, StreamCodec codec) throws IOException { @SuppressWarnings({"unchecked"}) public Object readObject(StreamCodec codec, EntryImpl entry) throws IOException { SolrDocumentList solrDocs = new SolrDocumentList(); - if(entry.metadata != null){ + if (entry.metadata != null) { @SuppressWarnings({"rawtypes"}) List list = (List) entry.metadata; solrDocs.setNumFound((Long) list.get(0)); solrDocs.setStart((Long) list.get(1)); solrDocs.setMaxScore((Float) list.get(2)); - if (list.size() > 3) { //needed for back compatibility - solrDocs.setNumFoundExact((Boolean)list.get(3)); + if (list.size() > 3) { // needed for back compatibility + solrDocs.setNumFoundExact((Boolean) list.get(3)); } } - List l = codec.readArray(codec.dis, entry.size); + List l = JavaBinCodec.readArray(codec, entry.size); solrDocs.addAll(l); return solrDocs; } @@ -493,12 +502,12 @@ public Object readObject(StreamCodec codec, EntryImpl entry) throws IOException _BYTEARR(BYTEARR, LOWER_5_BITS, DataEntry.Type.BYTEARR) { @Override public void lazyRead(EntryImpl entry, StreamCodec codec) throws IOException { - entry.size = readVInt(codec.dis); + entry.size = readVInt(codec); } @Override public Object readObject(StreamCodec codec, EntryImpl entry) throws IOException { - ByteBuffer buf = codec.readByteBuffer(codec.dis, entry.size); + ByteBuffer buf = codec.readByteBuffer(entry.size); entry.size = buf.limit() - buf.position(); return buf; } @@ -527,7 +536,7 @@ public void stream(EntryImpl entry, StreamCodec codec) throws IOException { @Override public Object readObject(StreamCodec codec, EntryImpl entry) throws IOException { - return codec.readIterator(codec.dis); + return codec.readIterator(codec); } }, @@ -554,22 +563,22 @@ public void stream(EntryImpl entry, StreamCodec codec) throws IOException { @Override public Object readObject(StreamCodec codec, EntryImpl entry) throws IOException { - return codec.readMapIter(codec.dis); + return JavaBinCodec.readMapIter(codec); } }, _ENUM_FIELD_VALUE(ENUM_FIELD_VALUE, LOWER_5_BITS, DataEntry.Type.JAVA_OBJ) { @Override public void lazyRead(EntryImpl entry, StreamCodec codec) throws IOException { - entry.objVal =codec.readEnumFieldValue(codec.dis); + entry.objVal = JavaBinCodec.readEnumFieldValue(codec); entry.consumedFully = true; } }, _MAP_ENTRY(MAP_ENTRY, LOWER_5_BITS, DataEntry.Type.JAVA_OBJ) { - //doesn't support streaming + // doesn't support streaming @Override public void lazyRead(EntryImpl entry, StreamCodec codec) throws IOException { - entry.objVal = codec.readMapEntry(codec.dis); + entry.objVal = JavaBinCodec.readMapEntry(codec); entry.consumedFully = true; } }, @@ -579,12 +588,11 @@ public void lazyRead(EntryImpl entry, StreamCodec codec) throws IOException { @Override public void lazyRead(EntryImpl entry, StreamCodec codec) throws IOException { entry.size = readObjSz(codec, this); - } @Override public Object readObject(StreamCodec codec, EntryImpl entry) throws IOException { - return codec.readUtf8(codec.dis); + return JavaBinCodec.readUtf8(codec); } @Override @@ -592,30 +600,27 @@ public void skip(EntryImpl entry, StreamCodec codec) throws IOException { codec.skip(entry.size); } }, - _SINT(SINT, UPPER_3_BITS, DataEntry.Type.INT) {//unsigned integer + _SINT(SINT, UPPER_3_BITS, DataEntry.Type.INT) { // unsigned integer @Override public void lazyRead(EntryImpl entry, StreamCodec codec) throws IOException { - entry.numericVal = codec.readSmallInt(codec.dis); + entry.numericVal = JavaBinCodec.readSmallInt(codec); } @Override public Object readObject(StreamCodec codec, EntryImpl entry) { return Integer.valueOf((int) entry.numericVal); } - }, _SLONG(SLONG, UPPER_3_BITS, DataEntry.Type.LONG) { @Override public void lazyRead(EntryImpl entry, StreamCodec codec) throws IOException { - entry.numericVal = codec.readSmallLong(codec.dis); + entry.numericVal = JavaBinCodec.readSmallLong(codec); } @Override public Object readObject(StreamCodec codec, EntryImpl entry) { return Long.valueOf((int) entry.numericVal); } - - }, _ARR(ARR, UPPER_3_BITS, DataEntry.Type.ENTRY_ITER) { @Override @@ -632,10 +637,9 @@ public void stream(EntryImpl entry, StreamCodec codec) throws IOException { } } - @Override public Object readObject(StreamCodec codec, EntryImpl entry) throws IOException { - return codec.readArray(codec.dis); + return JavaBinCodec.readArray(codec); } }, // _ORDERED_MAP(ORDERED_MAP, UPPER_3_BITS, DataEntry.Type.KEYVAL_ITER) { @@ -651,9 +655,8 @@ public void stream(EntryImpl entry, StreamCodec codec) throws IOException { @Override public Object readObject(StreamCodec codec, EntryImpl entry) throws IOException { - return codec.readOrderedMap(codec.dis); + return JavaBinCodec.readOrderedMap(codec); } - }, // SimpleOrderedMap (a NamedList subclass, and more common) _NAMED_LST(NAMED_LST, UPPER_3_BITS, DataEntry.Type.KEYVAL_ITER) { @Override @@ -668,25 +671,23 @@ public void stream(EntryImpl entry, StreamCodec codec) throws IOException { @Override public Object readObject(StreamCodec codec, EntryImpl entry) throws IOException { - return codec.readNamedList(codec.dis); + return codec.readNamedList(); } }, // NamedList _EXTERN_STRING(EXTERN_STRING, UPPER_3_BITS, DataEntry.Type.STR) { @Override public Object readObject(StreamCodec codec, EntryImpl entry) throws IOException { - return codec.readExternString(codec.dis); + return codec.readExternString(codec); } }; private static int readObjSz(StreamCodec codec, Tag tag) throws IOException { - return tag.isLower5Bits ? - StreamCodec.readVInt(codec.dis) : - codec.readSize(codec.dis); + return tag.isLower5Bits ? StreamCodec.readVInt(codec) : JavaBinCodec.readSize(codec); } - private static void callbackMapEntryListener(EntryImpl entry, CharSequence key, StreamCodec codec, long idx) - throws IOException { + private static void callbackMapEntryListener( + EntryImpl entry, CharSequence key, StreamCodec codec, long idx) throws IOException { EntryImpl newEntry = codec.beginRead(entry); newEntry.name = key; newEntry.mapEntry = true; @@ -699,11 +700,11 @@ private static void callbackMapEntryListener(EntryImpl entry, CharSequence key, } } - private static void callbackIterListener(EntryImpl parent, EntryImpl newEntry, StreamCodec codec) - throws IOException { + private static void callbackIterListener( + EntryImpl parent, EntryImpl newEntry, StreamCodec codec) throws IOException { try { newEntry.mapEntry = false; - if(parent.entryListener != null) parent.entryListener.entry(newEntry); + if (parent.entryListener != null) parent.entryListener.entry(newEntry); } finally { // the listener did not consume the entry postCallback(codec, newEntry); @@ -713,7 +714,7 @@ private static void callbackIterListener(EntryImpl parent, EntryImpl newEntry, S private static void postCallback(StreamCodec codec, EntryImpl newEntry) throws IOException { if (!newEntry.consumedFully) { if (newEntry.tag.type.isContainer) { - //this is a map like container object and there is a listener + // this is a map like container object and there is a listener if (newEntry.entryListener == null) newEntry.entryListener = emptylistener; newEntry.tag.stream(newEntry, codec); } else { @@ -722,7 +723,6 @@ private static void postCallback(StreamCodec codec, EntryImpl newEntry) throws I } } - final int code; final boolean isLower5Bits; final DataEntry.Type type; @@ -734,44 +734,38 @@ private static void postCallback(StreamCodec codec, EntryImpl newEntry) throws I } /** - * This applies to only container Objects. This is invoked only if there is a corresponding listener. - * + * This applies to only container Objects. This is invoked only if there is a corresponding + * listener. */ - public void stream(EntryImpl currentEntry, StreamCodec codec) throws IOException { - - - } + public void stream(EntryImpl currentEntry, StreamCodec codec) throws IOException {} /** - * This should read the minimal data about the entry . if the data is a primitive type , - * read the whole thing + * This should read the minimal data about the entry . if the data is a primitive type , read + * the whole thing */ - public void lazyRead(EntryImpl entry, StreamCodec codec) throws IOException { - - } + public void lazyRead(EntryImpl entry, StreamCodec codec) throws IOException {} /** - * Read the entry as an Object. The behavior should be similar to that of {@link JavaBinCodec#readObject(DataInputInputStream)} + * Read the entry as an Object. The behavior should be similar to that of {@link + * JavaBinCodec#readObject()} */ public Object readObject(StreamCodec codec, EntryImpl entry) throws IOException { throw new RuntimeException("Unsupported object : " + this.name()); } - /** - * Read the entry from and discard the data. Do not create any objects - */ + /** Read the entry from and discard the data. Do not create any objects */ public void skip(EntryImpl entry, StreamCodec codec) throws IOException { - if (entry.tag.type == DataEntry.Type.KEYVAL_ITER || entry.tag.type == DataEntry.Type.ENTRY_ITER) { + if (entry.tag.type == DataEntry.Type.KEYVAL_ITER + || entry.tag.type == DataEntry.Type.ENTRY_ITER) { entry.entryListener = null; stream(entry, codec); } else if (!entry.tag.type.isPrimitive) { readObject(codec, entry); } - } } - static final private Tag[] lower5BitTags = new Tag[32]; + private static final Tag[] lower5BitTags = new Tag[32]; static { for (Tag tag : Tag.values()) { @@ -784,9 +778,10 @@ public void skip(EntryImpl entry, StreamCodec codec) throws IOException { @SuppressWarnings({"unchecked", "rawtypes"}) private static void addObj(DataEntry e) { if (e.type().isContainer) { - Object ctx = e.type() == DataEntry.Type.KEYVAL_ITER ? - new LinkedHashMap(getSize(e)) : - new ArrayList(getSize(e)); + Object ctx = + e.type() == DataEntry.Type.KEYVAL_ITER + ? new LinkedHashMap(getSize(e)) + : new ArrayList(getSize(e)); if (e.ctx() != null) { if (e.isKeyValEntry()) { ((Map) e.ctx()).put(e.name(), ctx); @@ -814,13 +809,9 @@ private static int getSize(DataEntry e) { return sz; } - public static EntryListener getEntryListener() { return ENTRY_LISTENER; } - static final EntryListener ENTRY_LISTENER = FastJavaBinDecoder::addObj; - - } diff --git a/solr/solrj/src/java/org/apache/solr/common/util/JavaBinCodec.java b/solr/solrj/src/java/org/apache/solr/common/util/JavaBinCodec.java index ac49a684e72..bb85538947f 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/JavaBinCodec.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/JavaBinCodec.java @@ -16,11 +16,21 @@ */ package org.apache.solr.common.util; +import static org.apache.solr.common.util.ByteArrayUtf8CharSequence.convertCharSeq; + +import com.google.errorprone.annotations.DoNotCall; +import it.unimi.dsi.fastutil.objects.Object2ObjectLinkedOpenHashMap; +import it.unimi.dsi.fastutil.objects.ObjectArrayList; +import java.io.EOFException; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import java.io.UnsupportedEncodingException; import java.lang.invoke.MethodHandles; +import java.lang.invoke.VarHandle; import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.nio.charset.StandardCharsets; import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; @@ -35,10 +45,7 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; -import java.util.function.BiConsumer; import java.util.function.Function; -import java.util.function.Predicate; - import org.apache.solr.common.ConditionalKeyMapWriter; import org.apache.solr.common.EnumFieldValue; import org.apache.solr.common.IteratorWriter; @@ -51,83 +58,130 @@ import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.SolrInputField; import org.apache.solr.common.params.CommonParams; +import org.eclipse.jetty.io.RuntimeIOException; import org.noggit.CharArr; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import static org.apache.solr.common.util.ByteArrayUtf8CharSequence.convertCharSeq; /** * Defines a space-efficient serialization/deserialization format for transferring data. - *

- * JavaBinCodec has built in support many commonly used types. This includes primitive types (boolean, byte, - * short, double, int, long, float), common Java containers/utilities (Date, Map, Collection, Iterator, String, - * Object[], byte[]), and frequently used Solr types ({@link NamedList}, {@link SolrDocument}, - * {@link SolrDocumentList}). Each of the above types has a pair of associated methods which read and write - * that type to a stream. - *

- * Classes that aren't supported natively can still be serialized/deserialized by providing - * an {@link JavaBinCodec.ObjectResolver} object that knows how to work with the unsupported class. + * + *

JavaBinCodec has built in support many commonly used types. This includes primitive types + * (boolean, byte, short, double, int, long, float), common Java containers/utilities (Date, Map, + * Collection, Iterator, String, Object[], byte[]), and frequently used Solr types ({@link + * NamedList}, {@link SolrDocument}, {@link SolrDocumentList}). Each of the above types has a pair + * of associated methods which read and write that type to a stream. b + * + *

Classes that aren't supported natively can still be serialized/deserialized by providing an + * {@link JavaBinCodec.ObjectResolver} object that knows how to work with the unsupported class. * This allows {@link JavaBinCodec} to be used to marshall/unmarshall arbitrary content. - *

- * NOTE -- {@link JavaBinCodec} instances cannot be reused for more than one marshall or unmarshall operation. + * + *

NOTE -- {@link JavaBinCodec} instances cannot be reused for more than one marshall or + * unmarshall operation. */ public class JavaBinCodec implements PushWriter { - private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - private static final AtomicBoolean WARNED_ABOUT_INDEX_TIME_BOOSTS = new AtomicBoolean(); - - public static final byte - NULL = 0, - BOOL_TRUE = 1, - BOOL_FALSE = 2, - BYTE = 3, - SHORT = 4, - DOUBLE = 5, - INT = 6, - LONG = 7, - FLOAT = 8, - DATE = 9, - MAP = 10, - SOLRDOC = 11, - SOLRDOCLST = 12, - BYTEARR = 13, - ITERATOR = 14, - /** - * this is a special tag signals an end. No value is associated with it - */ - END = 15, - - SOLRINPUTDOC = 16, - MAP_ENTRY_ITER = 17, - ENUM_FIELD_VALUE = 18, - MAP_ENTRY = 19, - UUID = 20, // This is reserved to be used only in LogCodec - // types that combine tag + length (or other info) in a single byte - TAG_AND_LEN = (byte) (1 << 5), - STR = (byte) (1 << 5), - SINT = (byte) (2 << 5), - SLONG = (byte) (3 << 5), - ARR = (byte) (4 << 5), // - ORDERED_MAP = (byte) (5 << 5), // SimpleOrderedMap (a NamedList subclass, and more common) - NAMED_LST = (byte) (6 << 5), // NamedList - EXTERN_STRING = (byte) (7 << 5); - - private static final int MAX_UTF8_SIZE_FOR_ARRAY_GROW_STRATEGY = 65536; - - - private static byte VERSION = 2; + // WARNING! this class is heavily optimized and balancing a wide variety of use cases, data types + // and sizes, and tradeoffs - please be thorough and careful with changes - not only is + // performance considered across a large number of cases, but also the resource cost for that + // performance + + // TODO: this should be two classes - an encoder and decoder + + private static final int BUFFER_SZ = 8192; + + // Solr encode / decode is only a win at fairly small values due to gc overhead + // of copying byte arrays, until intrinsics/simd wins overcome + private static final int MAX_SZ_BEFORE_STRING_UTF8_ENCODE_OVER_SOLR_DEFAULT = 72; + private static final int MAX_SZ_BEFORE_STRING_UTF8_DECODE_OVER_SOLR_DEFAULT = 192; + private static final int MAX_SZ_BEFORE_SLOWER_SOLR_UTF8_ENCODE_DECODE = 262144; + private static final int MAX_SZ_BEFORE_STRING_UTF8_ENCODE_OVER_SOLR = + Integer.getInteger( + "maxSzBeforeStringUTF8EncodeOverSolr", + MAX_SZ_BEFORE_STRING_UTF8_ENCODE_OVER_SOLR_DEFAULT); + private static final int MAX_SZ_BEFORE_STRING_UTF8_DECODE_OVER_SOLR = + Integer.getInteger( + "maxSzBeforeStringUTF8DecodeOverSolr", + MAX_SZ_BEFORE_STRING_UTF8_DECODE_OVER_SOLR_DEFAULT); + + private static final int MAX_STRING_SZ_TO_TRY_KEEPING_AS_UTF8_WO_CONVERT_BYTES = + 1024 << 4; // can cause too much memory allocation if too large + + private static final byte VERSION = 2; + + private static final Float FLOAT_1 = 1f; + protected static final Object END_OBJ = new Object(); + + public static final byte NULL = 0, + BOOL_TRUE = 1, + BOOL_FALSE = 2, + BYTE = 3, + SHORT = 4, + DOUBLE = 5, + INT = 6, + LONG = 7, + FLOAT = 8, + DATE = 9, + MAP = 10, + SOLRDOC = 11, + SOLRDOCLST = 12, + BYTEARR = 13, + ITERATOR = 14, + /** this is a special tag signals an end. No value is associated with it */ + END = 15, + SOLRINPUTDOC = 16, + MAP_ENTRY_ITER = 17, + ENUM_FIELD_VALUE = 18, + MAP_ENTRY = 19, + UUID = 20, // This is reserved to be used only in LogCodec + // types that combine tag + length (or other info) in a single byte + TAG_AND_LEN = (byte) (1 << 5), + STR = (byte) (1 << 5), + SINT = (byte) (2 << 5), + SLONG = (byte) (3 << 5), + ARR = (byte) (4 << 5), // + ORDERED_MAP = (byte) (5 << 5), // SimpleOrderedMap (a NamedList subclass, and more common) + NAMED_LST = (byte) (6 << 5), // NamedList + EXTERN_STRING = (byte) (7 << 5); + public static final int OxOF = 0x0f; + private final ObjectResolver resolver; - protected FastOutputStream daos; - private StringCache stringCache; + private final StringCache stringCache; private WritableDocFields writableDocFields; private boolean alreadyMarshalled; private boolean alreadyUnmarshalled; protected boolean readStringAsCharSeq = false; + protected byte tagByte; + + // extern string structures + private int stringsCount; + private Map stringsMap; + private ObjectArrayList stringsList; + + public final BinEntryWriter ew = new BinEntryWriter(); + + // caching objects + protected byte[] bytes; + private CharArr arr; + private final StringBytes bytesRef = new StringBytes(null, 0, 0); + + // caching UTF-8 bytes and lazy conversion to UTF-16 + private Function stringProvider; + private BytesBlock bytesBlock; + + // internal stream wrapper classes + private OutputStream out; + private boolean isFastOutputStream; + protected byte[] buf; + protected int pos; + + protected InputStream in; + protected int end; + + // protected long readFromStream; // number of bytes read from the underlying inputstream public JavaBinCodec() { - resolver =null; - writableDocFields =null; + resolver = null; + writableDocFields = null; + stringCache = null; } public JavaBinCodec setReadStringAsCharSeq(boolean flag) { @@ -136,22 +190,42 @@ public JavaBinCodec setReadStringAsCharSeq(boolean flag) { } /** - * Use this to use this as a PushWriter. ensure that close() is called explicitly after use + * Instantiates a new Java bin codec to be used as a PushWriter. + * + *

Ensure that close() is called explicitly after use. + * + * @param os the OutputStream to marshal to + * @param resolver a resolver to be used for resolving Objects + */ + public JavaBinCodec(OutputStream os, ObjectResolver resolver) { + this.resolver = resolver; + stringCache = null; + initWrite(os, false); + } + + /** + * Instantiates a new Java bin codec to be used as a PushWriter. + * + *

Ensure that close() is called explicitly after use. * - * @param os The output stream + * @param os the OutputStream to marshal to + * @param resolver a resolver to be used for resolving Objects + * @param streamIsBuffered if true, no additional buffering for the OutputStream will be + * considered necessary. */ - public JavaBinCodec(OutputStream os, ObjectResolver resolver) throws IOException { + public JavaBinCodec(OutputStream os, ObjectResolver resolver, boolean streamIsBuffered) { this.resolver = resolver; - initWrite(os); + stringCache = null; + initWrite(os, streamIsBuffered); } public JavaBinCodec(ObjectResolver resolver) { this(resolver, null); } - public JavaBinCodec setWritableDocFields(WritableDocFields writableDocFields){ + + public JavaBinCodec setWritableDocFields(WritableDocFields writableDocFields) { this.writableDocFields = writableDocFields; return this; - } public JavaBinCodec(ObjectResolver resolver, StringCache stringCache) { @@ -162,509 +236,673 @@ public JavaBinCodec(ObjectResolver resolver, StringCache stringCache) { public ObjectResolver getResolver() { return resolver; } - - public void marshal(Object nl, OutputStream os) throws IOException { + + /** + * Marshals a given primitive or collection to an OutputStream. + * + *

Collections may be nested amd {@link NamedList} is a supported collection. + * + * @param object the primitive or Collection to marshal + * @param outputStream the OutputStream to marshal to + * @throws IOException on IO failure + */ + public void marshal(Object object, OutputStream outputStream) throws IOException { + marshal(object, outputStream, false); + } + + /** + * Marshals a given primitive or collection to an OutputStream. + * + *

Collections may be nested amd {@link NamedList} is a supported collection. + * + * @param object the primitive or Collection to marshal + * @param outputStream the OutputStream to marshal to + * @param streamIsBuffered a hint indicating whether the OutputStream is already buffered or not + * @throws IOException on IO failure + */ + public void marshal(Object object, OutputStream outputStream, boolean streamIsBuffered) + throws IOException { try { - initWrite(os); - writeVal(nl); + initWrite(outputStream, streamIsBuffered); + writeVal(this, object); } finally { alreadyMarshalled = true; - daos.flushBuffer(); + flushBufferOS(this); } } - protected void initWrite(OutputStream os) throws IOException { + private void initWrite(OutputStream os, boolean streamIsBuffered) { assert !alreadyMarshalled; - init(FastOutputStream.wrap(os)); - daos.writeByte(VERSION); - } - - /** expert: sets a new output stream */ - public void init(FastOutputStream os) { - daos = os; + if (streamIsBuffered) { + initOutStream(os, null); + } else { + initOutStream(os); + } + try { + writeByteToOS(this, VERSION); + } catch (IOException e) { + throw new RuntimeIOException(e); + } } - byte version; - - public Object unmarshal(byte[] buf) throws IOException { - FastInputStream dis = initRead(buf); - return readVal(dis); + public void init(OutputStream outputStream) { + if (outputStream instanceof FastOutputStream) { + initOutStream(outputStream, null); + } else { + initOutStream(outputStream); + } } - public Object unmarshal(InputStream is) throws IOException { - FastInputStream dis = initRead(is); - return readVal(dis); + + /** + * Unmarshalls a primitive or collection from a byte array to an Object. + * + * @param buffer a byte buffer containing the marshaled Object + * @return the unmarshalled Object + * @throws IOException on IO failure + */ + public Object unmarshal(byte[] buffer) throws IOException { + initRead(buffer); + return readVal(this); } - protected FastInputStream initRead(InputStream is) throws IOException { - assert !alreadyUnmarshalled; - FastInputStream dis = FastInputStream.wrap(is); - return _init(dis); + /** + * Unmarshalls a primitive or collection from an InputStream to an Object. + * + * @param inputStream an InputStream containing the marshaled Object + * @return the unmarshalled Object + * @throws IOException on IO failure + */ + public Object unmarshal(InputStream inputStream) throws IOException { + initRead(inputStream); + return readVal(this); } - protected FastInputStream initRead(byte[] buf) throws IOException { + + public void initRead(InputStream is) throws IOException { assert !alreadyUnmarshalled; - FastInputStream dis = new FastInputStream(null, buf, 0, buf.length); - return _init(dis); + + init(is); + byte version = readByte(this); + if (version != VERSION) { + throw new InvalidEncodingException( + "Invalid version (expected " + + VERSION + + ", but " + + version + + ") or the data in not in 'javabin' format"); + } + + alreadyUnmarshalled = true; } - protected FastInputStream _init(FastInputStream dis) throws IOException { - version = dis.readByte(); + protected void initRead(byte[] buf) throws IOException { + assert !alreadyUnmarshalled; + InputStream dis = new BytesInputStream(buf); + init(dis); + byte version = readByte(this); if (version != VERSION) { - throw new RuntimeException("Invalid version (expected " + VERSION + - ", but " + version + ") or the data in not in 'javabin' format"); + throw new InvalidEncodingException( + "Invalid version (expected " + + VERSION + + ", but " + + version + + ") or the data in not in 'javabin' format"); } alreadyUnmarshalled = true; - return dis; } + public void init(InputStream dis) throws IOException { + if (dis instanceof FastInputStream) { + buf = null; + pos = ((FastInputStream) dis).pos; + end = ((FastInputStream) dis).end; + } else { + buf = new byte[BUFFER_SZ]; + } + in = dis; + } - public SimpleOrderedMap readOrderedMap(DataInputInputStream dis) throws IOException { - int sz = readSize(dis); + static SimpleOrderedMap readOrderedMap(JavaBinCodec javaBinCodec) throws IOException { + int sz = readSize(javaBinCodec); SimpleOrderedMap nl = new SimpleOrderedMap<>(sz); for (int i = 0; i < sz; i++) { - String name = (String) readVal(dis); - Object val = readVal(dis); + String name = (String) readVal(javaBinCodec); + Object val = readVal(javaBinCodec); nl.add(name, val); } return nl; } - public NamedList readNamedList(DataInputInputStream dis) throws IOException { - int sz = readSize(dis); + public NamedList readNamedList() throws IOException { + int sz = readSize(this); NamedList nl = new NamedList<>(sz); for (int i = 0; i < sz; i++) { - String name = (String) readVal(dis); - Object val = readVal(dis); + String name = (String) readVal(this); + Object val = readVal(this); nl.add(name, val); } return nl; } - public void writeNamedList(NamedList nl) throws IOException { - writeTag(nl instanceof SimpleOrderedMap ? ORDERED_MAP : NAMED_LST, nl.size()); - for (int i = 0; i < nl.size(); i++) { - String name = nl.getName(i); - writeExternString(name); - Object val = nl.getVal(i); - writeVal(val); - } + public static void writeNamedList(JavaBinCodec javaBinCodec, NamedList nl) throws IOException { + int size = nl.size(); + writeTag(javaBinCodec, nl instanceof SimpleOrderedMap ? ORDERED_MAP : NAMED_LST, size); + nl.forEachIO(new NLWriterIOBiConsumer(javaBinCodec)); } - public void writeVal(Object val) throws IOException { - if (writeKnownType(val)) { + public static void writeVal(JavaBinCodec javaBinCodec, Object val) throws IOException { + if (javaBinCodec.writeKnownType(val)) { return; } else { - ObjectResolver resolver = null; - if(val instanceof ObjectResolver) { - resolver = (ObjectResolver)val; - } - else { - resolver = this.resolver; + ObjectResolver resolver; + if (val instanceof ObjectResolver) { + resolver = (ObjectResolver) val; + } else { + resolver = javaBinCodec.resolver; } if (resolver != null) { - Object tmpVal = resolver.resolve(val, this); + Object tmpVal = resolver.resolve(val, javaBinCodec); if (tmpVal == null) return; // null means the resolver took care of it fully - if (writeKnownType(tmpVal)) return; + if (javaBinCodec.writeKnownType(tmpVal)) return; } } - // Fallback to do *something*. - // note: if the user of this codec doesn't want this (e.g. UpdateLog) it can supply an ObjectResolver that does - // something else like throw an exception. - writeVal(val.getClass().getName() + ':' + val.toString()); + + /* NOTE: if the user of this codec doesn't want this (e.g. UpdateLog) they can supply an + ObjectResolver that does something else, like throw an exception.*/ + writeVal(javaBinCodec, val.getClass().getName() + ':' + val); } - protected static final Object END_OBJ = new Object(); + public static Object readVal(JavaBinCodec javaBinCodec) throws IOException { + javaBinCodec.tagByte = readByte(javaBinCodec); + return javaBinCodec.readObject(); + } - protected byte tagByte; + protected Object readObject() throws IOException { - public Object readVal(DataInputInputStream dis) throws IOException { - tagByte = dis.readByte(); - return readObject(dis); - } + /* + NOTE: this method is broken up just a bit (ie checkLessCommonTypes) to get + the method size under the limit for inlining by the C2 compiler - protected Object readObject(DataInputInputStream dis) throws IOException { - // if ((tagByte & 0xe0) == 0) { - // if top 3 bits are clear, this is a normal tag + FYI NOTE: if top 3 bits are clear, this is a normal tag + i.e. if ((tagByte & 0xe0) == 0) + */ - // OK, try type + size in single byte + // try type + size in single byte switch (tagByte >>> 5) { case STR >>> 5: - return readStr(dis, stringCache, readStringAsCharSeq); + return readStr(this, stringCache, readStringAsCharSeq); case SINT >>> 5: - return readSmallInt(dis); + return readSmallInt(this); case SLONG >>> 5: - return readSmallLong(dis); + return readSmallLong(this); case ARR >>> 5: - return readArray(dis); + return readArray(this); case ORDERED_MAP >>> 5: - return readOrderedMap(dis); + return readOrderedMap(this); case NAMED_LST >>> 5: - return readNamedList(dis); + return readNamedList(); case EXTERN_STRING >>> 5: - return readExternString(dis); + return readExternString(this); } switch (tagByte) { - case NULL: - return null; - case DATE: - return new Date(dis.readLong()); case INT: - return dis.readInt(); + return readIntFromIS(this); + case LONG: + return readLongFromIS(this); + case DATE: + return new Date(readLongFromIS(this)); + case SOLRDOC: + return readSolrDocument(this); + case SOLRDOCLST: + return readSolrDocumentList(this); + case SOLRINPUTDOC: + return readSolrInputDocument(this); + case MAP: + return readMap(this); + case MAP_ENTRY: + return readMapEntry(this); + case MAP_ENTRY_ITER: + return readMapIter(this); + } + + return readLessCommonTypes(this); + } + + private static long readLongFromIS(JavaBinCodec javaBinCodec) throws IOException { + return (long) readUnsignedByte(javaBinCodec) << 56 + | (long) readUnsignedByte(javaBinCodec) << 48 + | (long) readUnsignedByte(javaBinCodec) << 40 + | (long) readUnsignedByte(javaBinCodec) << 32 + | (long) readUnsignedByte(javaBinCodec) << 24 + | (long) readUnsignedByte(javaBinCodec) << 16 + | (long) readUnsignedByte(javaBinCodec) << 8 + | readUnsignedByte(javaBinCodec); + } + + private static int readIntFromIS(JavaBinCodec javaBinCodec) throws IOException { + return readUnsignedByte(javaBinCodec) << 24 + | readUnsignedByte(javaBinCodec) << 16 + | readUnsignedByte(javaBinCodec) << 8 + | readUnsignedByte(javaBinCodec); + } + + private static Object readTagThenStringOrSolrDocument(JavaBinCodec javaBinCodec) + throws IOException { + javaBinCodec.tagByte = readByte(javaBinCodec); + return readStringOrSolrDocument(javaBinCodec); + } + + private static Object readStringOrSolrDocument(JavaBinCodec javaBinCodec) throws IOException { + if (javaBinCodec.tagByte >>> 5 == STR >>> 5) { + return readStr(javaBinCodec, javaBinCodec.stringCache, javaBinCodec.readStringAsCharSeq); + } else if (javaBinCodec.tagByte >>> 5 == EXTERN_STRING >>> 5) { + return javaBinCodec.readExternString(javaBinCodec); + } + + switch (javaBinCodec.tagByte) { + case SOLRDOC: + return javaBinCodec.readSolrDocument(javaBinCodec); + case SOLRINPUTDOC: + return javaBinCodec.readSolrInputDocument(javaBinCodec); + case FLOAT: + return javaBinCodec.readFloat(); + } + + throw new UnsupportedEncodingException("Unknown or unexpected type " + javaBinCodec.tagByte); + } + + private Object readLessCommonTypes(JavaBinCodec javaBinCodec) throws IOException { + switch (javaBinCodec.tagByte) { case BOOL_TRUE: return Boolean.TRUE; case BOOL_FALSE: return Boolean.FALSE; - case FLOAT: - return dis.readFloat(); + case NULL: + return null; case DOUBLE: - return dis.readDouble(); - case LONG: - return dis.readLong(); + return readDouble(); + case FLOAT: + return readFloat(); case BYTE: - return dis.readByte(); + return readByte(javaBinCodec); case SHORT: - return dis.readShort(); - case MAP: - return readMap(dis); - case SOLRDOC: - return readSolrDocument(dis); - case SOLRDOCLST: - return readSolrDocumentList(dis); + return readShort(); case BYTEARR: - return readByteArray(dis); + return readByteArray(javaBinCodec); case ITERATOR: - return readIterator(dis); + return javaBinCodec.readIterator(javaBinCodec); case END: return END_OBJ; - case SOLRINPUTDOC: - return readSolrInputDocument(dis); case ENUM_FIELD_VALUE: - return readEnumFieldValue(dis); - case MAP_ENTRY: - return readMapEntry(dis); - case MAP_ENTRY_ITER: - return readMapIter(dis); + return readEnumFieldValue(javaBinCodec); } - throw new RuntimeException("Unknown type " + tagByte); + throw new UnsupportedEncodingException("Unknown type " + javaBinCodec.tagByte); + } + + @SuppressWarnings("rawtypes") + private boolean writeKnownType(Object val) throws IOException { + while (true) { + if (writePrimitive(val)) { + return true; + } else if (val instanceof NamedList) { + writeNamedList(this, (NamedList) val); + return true; + } else if (val instanceof SolrInputField) { + val = ((SolrInputField) val).getValue(); + continue; + } else if (val + instanceof + SolrDocumentList) { // SolrDocumentList is a List, so must come before List check + writeSolrDocumentList((SolrDocumentList) val); + return true; + } else if (val instanceof SolrDocument) { + // this needs special treatment to know which fields are to be written + writeSolrDocument((SolrDocument) val); + return true; + } else if (val instanceof SolrInputDocument) { + writeSolrInputDocument((SolrInputDocument) val); + return true; + } else if (val instanceof Iterator) { + writeIterator(this, (Iterator) val); + return true; + } else if (val instanceof Map.Entry) { + writeMapEntry((Map.Entry) val); + return true; + } else if (val instanceof MapWriter) { + writeMap((MapWriter) val); + return true; + } else if (val instanceof Map) { + writeMap(this, (Map) val); + return true; + } else if (writeLessCommonPrimitive(this, val)) return true; + + return writeLessCommonKnownType(this, val); + } } @SuppressWarnings({"unchecked", "rawtypes"}) - public boolean writeKnownType(Object val) throws IOException { - if (writePrimitive(val)) return true; - if (val instanceof NamedList) { - writeNamedList((NamedList) val); + private static boolean writeLessCommonKnownType(JavaBinCodec javaBinCodec, Object val) + throws IOException { + if (val instanceof Collection) { + writeArray(javaBinCodec, (Collection) val); return true; - } - if (val instanceof SolrDocumentList) { // SolrDocumentList is a List, so must come before List check - writeSolrDocumentList((SolrDocumentList) val); + } else if (val instanceof IteratorWriter) { + javaBinCodec.writeIterator((IteratorWriter) val); return true; - } - if (val instanceof SolrInputField) { - return writeKnownType(((SolrInputField) val).getValue()); - } - if (val instanceof IteratorWriter) { - writeIterator((IteratorWriter) val); + } else if (val instanceof Object[]) { + writeArray(javaBinCodec, (Object[]) val); return true; - } - if (val instanceof Collection) { - writeArray((Collection) val); + } else if (val instanceof Path) { + writeStr(javaBinCodec, ((Path) val).toAbsolutePath().toString()); return true; - } - if (val instanceof Object[]) { - writeArray((Object[]) val); + } else if (val instanceof Iterable) { + writeIterator(javaBinCodec, ((Iterable) val).iterator()); return true; - } - if (val instanceof SolrDocument) { - //this needs special treatment to know which fields are to be written - writeSolrDocument((SolrDocument) val); + } else if (val instanceof EnumFieldValue) { + javaBinCodec.writeEnumFieldValue((EnumFieldValue) val); return true; - } - if (val instanceof SolrInputDocument) { - writeSolrInputDocument((SolrInputDocument)val); + } else if (val instanceof MapSerializable) { + // todo find a better way to reuse the map more efficiently + writeMap(javaBinCodec, ((MapSerializable) val).toMap(new NamedList().asShallowMap())); return true; - } - if (val instanceof MapWriter) { - writeMap((MapWriter) val); + } else if (val instanceof AtomicInteger) { + writeInt(javaBinCodec, ((AtomicInteger) val).get()); return true; - } - if (val instanceof Map) { - writeMap((Map) val); + } else if (val instanceof AtomicLong) { + writeLong(javaBinCodec, ((AtomicLong) val).get()); return true; - } - if (val instanceof Iterator) { - writeIterator((Iterator) val); + } else if (val instanceof AtomicBoolean) { + writeBoolean(javaBinCodec, ((AtomicBoolean) val).get()); return true; } - if (val instanceof Path) { - writeStr(((Path) val).toAbsolutePath().toString()); - return true; + return false; + } + + private static class MapEntry implements Entry { + + private final Object key; + private final Object value; + + MapEntry(Object key, Object value) { + this.key = key; + this.value = value; } - if (val instanceof Iterable) { - writeIterator(((Iterable) val).iterator()); - return true; + + @Override + public Object getKey() { + return key; } - if (val instanceof EnumFieldValue) { - writeEnumFieldValue((EnumFieldValue) val); - return true; + + @Override + public Object getValue() { + return value; } - if (val instanceof Map.Entry) { - writeMapEntry((Map.Entry)val); - return true; + + @Override + public String toString() { + return "MapEntry[" + key + ':' + value + ']'; } - if (val instanceof MapSerializable) { - //todo find a better way to reuse the map more efficiently - writeMap(((MapSerializable) val).toMap(new NamedList().asShallowMap())); - return true; + + @DoNotCall + @Deprecated + @Override + public final Object setValue(Object value) { + throw new UnsupportedOperationException(); } - if (val instanceof AtomicInteger) { - writeInt(((AtomicInteger) val).get()); - return true; + + @Override + public int hashCode() { + int result = 31; + result *= 31 + key.hashCode(); + result *= 31 + value.hashCode(); + return result; } - if (val instanceof AtomicLong) { - writeLong(((AtomicLong) val).get()); - return true; + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj instanceof Map.Entry) { + Entry entry = (Entry) obj; + return (key.equals(entry.getKey()) && value.equals(entry.getValue())); + } + return false; } - if (val instanceof AtomicBoolean) { - writeBoolean(((AtomicBoolean) val).get()); - return true; + } + + private static class NLWriterIOBiConsumer extends NamedList.IOBiConsumer { + + private final JavaBinCodec javaBinCodec; + + public NLWriterIOBiConsumer(JavaBinCodec javaBinCodec) { + this.javaBinCodec = javaBinCodec; + } + + @Override + void accept(String n, Object v) throws IOException { + javaBinCodec.writeExternString(n); + writeVal(javaBinCodec, v); } - return false; } - public class BinEntryWriter implements MapWriter.EntryWriter { + public final class BinEntryWriter extends MapWriter.EntryWriter { @Override public MapWriter.EntryWriter put(CharSequence k, Object v) throws IOException { writeExternString(k); - JavaBinCodec.this.writeVal(v); + writeVal(JavaBinCodec.this, v); return this; } @Override public MapWriter.EntryWriter put(CharSequence k, int v) throws IOException { writeExternString(k); - JavaBinCodec.this.writeInt(v); + writeInt(JavaBinCodec.this, v); return this; } @Override public MapWriter.EntryWriter put(CharSequence k, long v) throws IOException { writeExternString(k); - JavaBinCodec.this.writeLong(v); + writeLong(JavaBinCodec.this, v); return this; } @Override public MapWriter.EntryWriter put(CharSequence k, float v) throws IOException { writeExternString(k); - JavaBinCodec.this.writeFloat(v); + writeFloat(v); return this; } @Override public MapWriter.EntryWriter put(CharSequence k, double v) throws IOException { writeExternString(k); - JavaBinCodec.this.writeDouble(v); + writeDouble(JavaBinCodec.this, v); return this; } @Override public MapWriter.EntryWriter put(CharSequence k, boolean v) throws IOException { writeExternString(k); - writeBoolean(v); + writeBoolean(JavaBinCodec.this, v); return this; } @Override public MapWriter.EntryWriter put(CharSequence k, CharSequence v) throws IOException { writeExternString(k); - writeStr(v); + writeStr(JavaBinCodec.this, v); return this; } - - private BiConsumer biConsumer; - - @Override - public BiConsumer getBiConsumer() { - if (biConsumer == null) biConsumer = MapWriter.EntryWriter.super.getBiConsumer(); - return biConsumer; - } } - public final BinEntryWriter ew = new BinEntryWriter(); - - public void writeMap(MapWriter val) throws IOException { - writeTag(MAP_ENTRY_ITER); + writeTag(this, MAP_ENTRY_ITER); val.writeMap(ew); - writeTag(END); + writeTag(this, END); } - - public void writeTag(byte tag) throws IOException { - daos.writeByte(tag); + public static void writeTag(JavaBinCodec javaBinCodec, byte tag) throws IOException { + writeByteToOS(javaBinCodec, tag); } - public void writeTag(byte tag, int size) throws IOException { + public static void writeTag(JavaBinCodec javaBinCodec, byte tag, int size) throws IOException { if ((tag & 0xe0) != 0) { if (size < 0x1f) { - daos.writeByte(tag | size); + writeByteToOS(javaBinCodec, tag | size); } else { - daos.writeByte(tag | 0x1f); - writeVInt(size - 0x1f, daos); + writeByteToOS(javaBinCodec, tag | 0x1f); + writeVInt(javaBinCodec, size - 0x1f); } } else { - daos.writeByte(tag); - writeVInt(size, daos); + writeByteToOS(javaBinCodec, tag); + writeVInt(javaBinCodec, size); } } - public void writeByteArray(byte[] arr, int offset, int len) throws IOException { - writeTag(BYTEARR, len); - daos.write(arr, offset, len); + public static void writeByteArray(JavaBinCodec javaBinCodec, byte[] arr, int offset, int len) + throws IOException { + writeTag(javaBinCodec, BYTEARR, len); + writeToOS(javaBinCodec, arr, offset, len); } - public byte[] readByteArray(DataInputInputStream dis) throws IOException { - byte[] arr = new byte[readVInt(dis)]; - dis.readFully(arr); + private byte[] readByteArray(JavaBinCodec javaBinCodec) throws IOException { + byte[] arr = new byte[readVInt(javaBinCodec)]; + readFully(this, arr, 0, arr.length); return arr; } - //use this to ignore the writable interface because , child docs will ignore the fl flag - // is it a good design? - private boolean ignoreWritable =false; + + // children will return false in the predicate passed to EntryWriterWrapper e.g. NOOP + private boolean isChildDoc = false; private MapWriter.EntryWriter cew; public void writeSolrDocument(SolrDocument doc) throws IOException { List children = doc.getChildDocuments(); int fieldsCount = 0; - if(writableDocFields == null || writableDocFields.wantsAllFields() || ignoreWritable){ + if (writableDocFields == null || writableDocFields.wantsAllFields() || isChildDoc) { fieldsCount = doc.size(); } else { for (Entry e : doc) { - if(toWrite(e.getKey())) fieldsCount++; + if (toWrite(e.getKey())) fieldsCount++; } } - int sz = fieldsCount + (children==null ? 0 : children.size()); - writeTag(SOLRDOC); - writeTag(ORDERED_MAP, sz); - if (cew == null) cew = new ConditionalKeyMapWriter.EntryWriterWrapper(ew, (k) -> toWrite(k.toString())); + int sz = fieldsCount + (children == null ? 0 : children.size()); + writeTag(this, SOLRDOC); + writeTag(this, ORDERED_MAP, sz); + if (cew == null) + cew = new ConditionalKeyMapWriter.EntryWriterWrapper(ew, k -> toWrite(k.toString())); doc.writeMap(cew); if (children != null) { try { - ignoreWritable = true; + isChildDoc = true; for (SolrDocument child : children) { writeSolrDocument(child); } } finally { - ignoreWritable = false; + isChildDoc = false; } } - } - protected boolean toWrite(String key) { - return writableDocFields == null || ignoreWritable || writableDocFields.isWritable(key); + private boolean toWrite(String key) { + return writableDocFields == null || isChildDoc || writableDocFields.isWritable(key); } - public SolrDocument readSolrDocument(DataInputInputStream dis) throws IOException { - tagByte = dis.readByte(); - int size = readSize(dis); + public SolrDocument readSolrDocument(JavaBinCodec javaBinCodec) throws IOException { + tagByte = readByte(javaBinCodec); + int size = readSize(javaBinCodec); SolrDocument doc = new SolrDocument(new LinkedHashMap<>(size)); for (int i = 0; i < size; i++) { String fieldName; - Object obj = readVal(dis); // could be a field name, or a child document + Object obj = + readTagThenStringOrSolrDocument( + javaBinCodec); // could be a field name, or a child document if (obj instanceof SolrDocument) { - doc.addChildDocument((SolrDocument)obj); + doc.addChildDocument((SolrDocument) obj); continue; } else { - fieldName = (String)obj; + fieldName = (String) obj; } - Object fieldVal = readVal(dis); + Object fieldVal = readVal(javaBinCodec); doc.setField(fieldName, fieldVal); } return doc; } - public SolrDocumentList readSolrDocumentList(DataInputInputStream dis) throws IOException { - SolrDocumentList solrDocs = new SolrDocumentList(); + public SolrDocumentList readSolrDocumentList(JavaBinCodec javaBinCodec) throws IOException { + + tagByte = readByte(javaBinCodec); + @SuppressWarnings("unchecked") + List list = readArray(javaBinCodec, readSize(javaBinCodec)); + + tagByte = readByte(javaBinCodec); @SuppressWarnings("unchecked") - List list = (List) readVal(dis); + List l = readArray(this, readSize(javaBinCodec)); + int sz = l.size(); + SolrDocumentList solrDocs = new SolrDocumentList(sz); solrDocs.setNumFound((Long) list.get(0)); solrDocs.setStart((Long) list.get(1)); solrDocs.setMaxScore((Float) list.get(2)); - if (list.size() > 3) { //needed for back compatibility - solrDocs.setNumFoundExact((Boolean)list.get(3)); + if (list.size() > 3) { // needed for back compatibility + solrDocs.setNumFoundExact((Boolean) list.get(3)); } - @SuppressWarnings("unchecked") - List l = (List) readVal(dis); - solrDocs.addAll(l); + l.forEach(doc -> solrDocs.add((SolrDocument) doc)); + return solrDocs; } - public void writeSolrDocumentList(SolrDocumentList docs) - throws IOException { - writeTag(SOLRDOCLST); + public void writeSolrDocumentList(SolrDocumentList docs) throws IOException { + writeTag(this, SOLRDOCLST); List l = new ArrayList<>(4); l.add(docs.getNumFound()); l.add(docs.getStart()); l.add(docs.getMaxScore()); l.add(docs.getNumFoundExact()); - writeArray(l); - writeArray(docs); + writeArray(this, l); + writeArray(this, docs); } - public SolrInputDocument readSolrInputDocument(DataInputInputStream dis) throws IOException { - int sz = readVInt(dis); - float docBoost = (Float)readVal(dis); - if (docBoost != 1f) { - String message = "Ignoring document boost: " + docBoost + " as index-time boosts are not supported anymore"; - if (WARNED_ABOUT_INDEX_TIME_BOOSTS.compareAndSet(false, true)) { - log.warn(message); - } else { - log.debug(message); - } - } - SolrInputDocument sdoc = createSolrInputDocument(sz); + public SolrInputDocument readSolrInputDocument(JavaBinCodec javaBinCodec) throws IOException { + int sz = readVInt(javaBinCodec); + readVal(javaBinCodec); // unused boost + SolrInputDocument solrDoc = createSolrInputDocument(sz); for (int i = 0; i < sz; i++) { String fieldName; - Object obj = readVal(dis); // could be a boost, a field name, or a child document + // we know we are expecting to read a String key, a child document (or a back compat boost) + Object obj = readTagThenStringOrSolrDocument(javaBinCodec); if (obj instanceof Float) { - float boost = (Float)obj; - if (boost != 1f) { - String message = "Ignoring field boost: " + boost + " as index-time boosts are not supported anymore"; - if (WARNED_ABOUT_INDEX_TIME_BOOSTS.compareAndSet(false, true)) { - log.warn(message); - } else { - log.debug(message); - } - } - fieldName = (String)readVal(dis); + // same as above, key, child doc, or back compat boost + fieldName = (String) readTagThenStringOrSolrDocument(javaBinCodec); } else if (obj instanceof SolrInputDocument) { - sdoc.addChildDocument((SolrInputDocument)obj); + solrDoc.addChildDocument((SolrInputDocument) obj); continue; } else { - fieldName = (String)obj; + fieldName = (String) obj; } - Object fieldVal = readVal(dis); - sdoc.setField(fieldName, fieldVal); + Object fieldVal = readVal(javaBinCodec); + solrDoc.setField(fieldName, fieldVal); } - return sdoc; + return solrDoc; } protected SolrInputDocument createSolrInputDocument(int sz) { return new SolrInputDocument(new LinkedHashMap<>(sz)); } - static final Predicate IGNORECHILDDOCS = it -> !CommonParams.CHILDDOC.equals(it.toString()); + /** Writes a {@link SolrInputDocument}. */ public void writeSolrInputDocument(SolrInputDocument sdoc) throws IOException { List children = sdoc.getChildDocuments(); - int sz = sdoc.size() + (children==null ? 0 : children.size()); - writeTag(SOLRINPUTDOC, sz); - writeFloat(1f); // document boost - sdoc.writeMap(new ConditionalKeyMapWriter.EntryWriterWrapper(ew,IGNORECHILDDOCS)); + int sz = sdoc.size() + (children == null ? 0 : children.size()); + writeTag(this, SOLRINPUTDOC, sz); + writeFloat(1f); // placeholder document boost for back compat + sdoc.writeMap( + new ConditionalKeyMapWriter.EntryWriterWrapper( + ew, it -> !CommonParams.CHILDDOC.equals(it.toString()))); if (children != null) { for (SolrInputDocument child : children) { writeSolrInputDocument(child); @@ -672,573 +910,556 @@ public void writeSolrInputDocument(SolrInputDocument sdoc) throws IOException { } } - - public Map readMapIter(DataInputInputStream dis) throws IOException { - Map m = newMap(-1); + static Map readMapIter(JavaBinCodec javaBinCodec) throws IOException { + Map m = javaBinCodec.newMap(-1); for (; ; ) { - Object key = readVal(dis); + Object key = readVal(javaBinCodec); if (key == END_OBJ) break; - Object val = readVal(dis); + Object val = readVal(javaBinCodec); m.put(key, val); } return m; } /** - * create a new Map object - * @param size expected size, -1 means unknown size + * Creates new Map implementations for unmarshalled Maps. + * + * @param size the expected size or -1 for unknown size */ + @SuppressWarnings({"unchecked", "rawtypes"}) protected Map newMap(int size) { - return size < 0 ? new LinkedHashMap<>() : new LinkedHashMap<>(size); - } - - public Map readMap(DataInputInputStream dis) - throws IOException { - int sz = readVInt(dis); - return readMap(dis, sz); - } - - protected Map readMap(DataInputInputStream dis, int sz) throws IOException { - Map m = newMap(sz); + // open addressing with linear probing + return size < 0 + ? new Object2ObjectLinkedOpenHashMap(16, 0.75f) + : new Object2ObjectLinkedOpenHashMap(size, 0.5f); + } +// protected Map newMap(int size) { +// return size < 0 +// ? new HashMap<>(16, 0.75f) +// : new HashMap<>((int) Math.max(2, nextPowerOfTwo((long)Math.ceil(size / .5f))), 0.5f); +// } +// +// public static long nextPowerOfTwo(long x) { +// if (x == 0) return 1; +// x--; +// x |= x >> 1; +// x |= x >> 2; +// x |= x >> 4; +// x |= x >> 8; +// x |= x >> 16; +// return (x | x >> 32) + 1; +// } + + private static Map readMap(JavaBinCodec javaBinCodec) throws IOException { + int sz = readVInt(javaBinCodec); + return readMap(javaBinCodec, sz); + } + + static Map readMap(JavaBinCodec javaBinCodec, int sz) throws IOException { + Map m = javaBinCodec.newMap(sz); for (int i = 0; i < sz; i++) { - Object key = readVal(dis); - Object val = readVal(dis); + Object key = readVal(javaBinCodec); + Object val = readVal(javaBinCodec); m.put(key, val); - } return m; } - public final ItemWriter itemWriter = new ItemWriter() { - @Override - public ItemWriter add(Object o) throws IOException { - writeVal(o); - return this; - } - - @Override - public ItemWriter add(int v) throws IOException { - writeInt(v); - return this; - } - - @Override - public ItemWriter add(long v) throws IOException { - writeLong(v); - return this; - } - - @Override - public ItemWriter add(float v) throws IOException { - writeFloat(v); - return this; - } - - @Override - public ItemWriter add(double v) throws IOException { - writeDouble(v); - return this; - } - - @Override - public ItemWriter add(boolean v) throws IOException { - writeBoolean(v); - return this; - } - }; + private final ItemWriter itemWriter = new ObjectItemWriter(); - @Override public void writeIterator(IteratorWriter val) throws IOException { - writeTag(ITERATOR); + writeTag(this, ITERATOR); val.writeIter(itemWriter); - writeTag(END); + writeTag(this, END); } - public void writeIterator(Iterator iter) throws IOException { - writeTag(ITERATOR); + + public static void writeIterator(JavaBinCodec javaBinCodec, Iterator iter) throws IOException { + writeTag(javaBinCodec, ITERATOR); while (iter.hasNext()) { - writeVal(iter.next()); + writeVal(javaBinCodec, iter.next()); } - writeTag(END); + writeTag(javaBinCodec, END); } - public List readIterator(DataInputInputStream fis) throws IOException { - ArrayList l = new ArrayList<>(); + /** + * Unmarshalls an Iterator from the DataInputInputStream into a List. + * + * @return a list containing the Objects from the unmarshalled Iterator + * @throws IOException on IO failure + */ + public List readIterator(JavaBinCodec javaBinCodec) throws IOException { + List l = new ArrayList<>(8); while (true) { - Object o = readVal(fis); + Object o = readVal(javaBinCodec); if (o == END_OBJ) break; l.add(o); } return l; } - public void writeArray(List l) throws IOException { - writeTag(ARR, l.size()); - for (int i = 0; i < l.size(); i++) { - writeVal(l.get(i)); + public static void writeArray(JavaBinCodec javaBinCodec, List l) throws IOException { + writeTag(javaBinCodec, ARR, l.size()); + for (Object o : l) { + writeVal(javaBinCodec, o); } } - public void writeArray(Collection coll) throws IOException { - writeTag(ARR, coll.size()); + public static void writeArray(JavaBinCodec javaBinCodec, Collection coll) throws IOException { + writeTag(javaBinCodec, ARR, coll.size()); for (Object o : coll) { - writeVal(o); + writeVal(javaBinCodec, o); } - } - public void writeArray(Object[] arr) throws IOException { - writeTag(ARR, arr.length); - for (int i = 0; i < arr.length; i++) { - Object o = arr[i]; - writeVal(o); + public static void writeArray(JavaBinCodec javaBinCodec, Object[] arr) throws IOException { + writeTag(javaBinCodec, ARR, arr.length); + for (Object o : arr) { + writeVal(javaBinCodec, o); } } - @SuppressWarnings({"unchecked"}) - public List readArray(DataInputInputStream dis) throws IOException { - int sz = readSize(dis); - return readArray(dis, sz); + @SuppressWarnings("unchecked") + public static List readArray(JavaBinCodec javaBinCodec) throws IOException { + int sz = readSize(javaBinCodec); + return readArray(javaBinCodec, sz); } - @SuppressWarnings({"rawtypes"}) - protected List readArray(DataInputInputStream dis, int sz) throws IOException { - ArrayList l = new ArrayList<>(sz); + @SuppressWarnings("rawtypes") + protected static List readArray(JavaBinCodec javaBinCodec, int sz) throws IOException { + List l = new ArrayList<>(sz); for (int i = 0; i < sz; i++) { - l.add(readVal(dis)); + l.add(readVal(javaBinCodec)); } return l; } /** * write {@link EnumFieldValue} as tag+int value+string value + * * @param enumFieldValue to write */ - public void writeEnumFieldValue(EnumFieldValue enumFieldValue) throws IOException { - writeTag(ENUM_FIELD_VALUE); - writeInt(enumFieldValue.toInt()); - writeStr(enumFieldValue.toString()); + private void writeEnumFieldValue(EnumFieldValue enumFieldValue) throws IOException { + writeTag(this, ENUM_FIELD_VALUE); + writeInt(this, enumFieldValue.toInt()); + writeStr(this, enumFieldValue.toString()); } - public void writeMapEntry(Map.Entry val) throws IOException { - writeTag(MAP_ENTRY); - writeVal(val.getKey()); - writeVal(val.getValue()); + private void writeMapEntry(Map.Entry val) throws IOException { + writeTag(this, MAP_ENTRY); + writeVal(this, val.getKey()); + writeVal(this, val.getValue()); } - /** - * read {@link EnumFieldValue} (int+string) from input stream - * @param dis data input stream - * @return {@link EnumFieldValue} - */ - public EnumFieldValue readEnumFieldValue(DataInputInputStream dis) throws IOException { - Integer intValue = (Integer) readVal(dis); - String stringValue = (String) convertCharSeq (readVal(dis)); + static EnumFieldValue readEnumFieldValue(JavaBinCodec javaBinCodec) throws IOException { + Integer intValue = (Integer) readVal(javaBinCodec); + String stringValue = (String) convertCharSeq(readVal(javaBinCodec)); return new EnumFieldValue(intValue, stringValue); } - - - public Map.Entry readMapEntry(DataInputInputStream dis) throws IOException { - final Object key = readVal(dis); - final Object value = readVal(dis); - return new Map.Entry() { - - @Override - public Object getKey() { - return key; - } - - @Override - public Object getValue() { - return value; - } - - @Override - public String toString() { - return "MapEntry[" + key + ":" + value + "]"; - } - @Override - public Object setValue(Object value) { - throw new UnsupportedOperationException(); - } + public static Map.Entry readMapEntry(JavaBinCodec javaBinCodec) + throws IOException { + Object key = readVal(javaBinCodec); + Object value = readVal(javaBinCodec); + return new MapEntry(key, value); + } - @Override - public int hashCode() { - int result = 31; - result *=31 + getKey().hashCode(); - result *=31 + getValue().hashCode(); - return result; - } + public static void writeStr(JavaBinCodec javaBinCodec, CharSequence s) throws IOException { - @Override - public boolean equals(Object obj) { - if(this == obj) { - return true; - } - if (obj instanceof Map.Entry) { - Entry entry = (Entry) obj; - return (this.getKey().equals(entry.getKey()) && this.getValue().equals(entry.getValue())); - } - return false; - } - }; - } + // writes the string as tag+length, with length being the number of UTF-8 bytes - /** - * write the string as tag+length, with length being the number of UTF-8 bytes - */ - public void writeStr(CharSequence s) throws IOException { if (s == null) { - writeTag(NULL); + writeTag(javaBinCodec, NULL); return; } if (s instanceof Utf8CharSequence) { - writeUTF8Str((Utf8CharSequence) s); + writeUTF8Str(javaBinCodec, (Utf8CharSequence) s); return; } int end = s.length(); - int maxSize = end * ByteUtils.MAX_UTF8_BYTES_PER_CHAR; - if (maxSize <= MAX_UTF8_SIZE_FOR_ARRAY_GROW_STRATEGY) { - if (bytes == null || bytes.length < maxSize) bytes = new byte[maxSize]; - int sz = ByteUtils.UTF16toUTF8(s, 0, end, bytes, 0); - writeTag(STR, sz); - daos.write(bytes, 0, sz); + if (end > MAX_SZ_BEFORE_STRING_UTF8_ENCODE_OVER_SOLR) { + + // however, when going too large, breaking up the conversion can allow + // for much better scale and behavior regardless of it's performance + if (end > MAX_SZ_BEFORE_SLOWER_SOLR_UTF8_ENCODE_DECODE) { + + // the previous internal length calc method we used was very costly - often + // approaching what the actual conversion costs - this is from Guava + int sz = ByteUtils.calcUTF16toUTF8LengthGuava(s); + + int readSize = Math.min(sz, MAX_SZ_BEFORE_SLOWER_SOLR_UTF8_ENCODE_DECODE); + if (javaBinCodec.bytes == null || javaBinCodec.bytes.length < readSize) + javaBinCodec.bytes = new byte[readSize]; + + writeTag(javaBinCodec, STR, sz); + flushBufferOS(javaBinCodec); + ByteUtils.writeUTF16toUTF8(s, 0, end, javaBinCodec.out, javaBinCodec.bytes); + return; + } + + byte[] stringBytes = s.toString().getBytes(StandardCharsets.UTF_8); + + writeTag(javaBinCodec, STR, stringBytes.length); + writeToOS(javaBinCodec, stringBytes); } else { - // double pass logic for large strings, see SOLR-7971 - int sz = ByteUtils.calcUTF16toUTF8Length(s, 0, end); - writeTag(STR, sz); - if (bytes == null || bytes.length < 8192) bytes = new byte[8192]; - ByteUtils.writeUTF16toUTF8(s, 0, end, daos, bytes); + int maxSize = end * ByteUtils.MAX_UTF8_BYTES_PER_CHAR; + if (javaBinCodec.bytes == null || javaBinCodec.bytes.length < maxSize) + javaBinCodec.bytes = new byte[maxSize]; + int sz = ByteUtils.UTF16toUTF8(s, 0, end, javaBinCodec.bytes, 0); + + writeTag(javaBinCodec, STR, sz); + + writeToOS(javaBinCodec, javaBinCodec.bytes, 0, sz); } } - byte[] bytes; - CharArr arr = new CharArr(); - private StringBytes bytesRef = new StringBytes(bytes,0,0); - - public CharSequence readStr(DataInputInputStream dis) throws IOException { - return readStr(dis, null, readStringAsCharSeq); + public static CharSequence readStr(JavaBinCodec javaBinCodec) throws IOException { + return readStr(javaBinCodec, null, javaBinCodec.readStringAsCharSeq); } - public CharSequence readStr(DataInputInputStream dis, StringCache stringCache, boolean readStringAsCharSeq) throws IOException { + public static CharSequence readStr( + JavaBinCodec javaBinCodec, StringCache stringCache, boolean readStringAsCharSeq) + throws IOException { if (readStringAsCharSeq) { - return readUtf8(dis); + return readUtf8(javaBinCodec); } - int sz = readSize(dis); - return _readStr(dis, stringCache, sz); + int sz = readSize(javaBinCodec); + return readStr(javaBinCodec, stringCache, sz); } - private CharSequence _readStr(DataInputInputStream dis, StringCache stringCache, int sz) throws IOException { - if (bytes == null || bytes.length < sz) bytes = new byte[sz]; - dis.readFully(bytes, 0, sz); + private static CharSequence readStr(JavaBinCodec javaBinCodec, StringCache stringCache, int sz) + throws IOException { + if (javaBinCodec.bytes == null || javaBinCodec.bytes.length < sz) + javaBinCodec.bytes = new byte[sz]; + readFully(javaBinCodec, javaBinCodec.bytes, 0, sz); if (stringCache != null) { - return stringCache.get(bytesRef.reset(bytes, 0, sz)); + return stringCache.get(javaBinCodec.bytesRef.reset(javaBinCodec.bytes, 0, sz)); } else { - arr.reset(); - ByteUtils.UTF8toUTF16(bytes, 0, sz, arr); - return arr.toString(); + if (sz < MAX_SZ_BEFORE_STRING_UTF8_DECODE_OVER_SOLR + || sz > MAX_SZ_BEFORE_SLOWER_SOLR_UTF8_ENCODE_DECODE) { + if (javaBinCodec.arr == null) { + javaBinCodec.arr = new CharArr(sz); + } else { + javaBinCodec.arr.reset(); + } + ByteUtils.UTF8toUTF16(javaBinCodec.bytes, 0, sz, javaBinCodec.arr); + + return javaBinCodec.arr.toString(); + } + return new String(javaBinCodec.bytes, 0, sz, StandardCharsets.UTF_8); + /* + NOTE: Until Java 9, you had to use "UTF-8" vs passing an Encoder or you would not + get Encoder caching. However, as part of 'compact strings', UTF-8 was + special cased, and now passing an Encoder is okay. Additionally, this path + also now hits intrinsics for SIMD. It has been juiced beyond a developers + reach even though it still almost always requires returning a defensive array copy. + In Java 17+, this path is supposed to be even more efficient. + */ } } - /////////// code to optimize reading UTF8 - static final int MAX_UTF8_SZ = 1024 * 64;//too big strings can cause too much memory allocation - private Function stringProvider; - private BytesBlock bytesBlock; - - protected CharSequence readUtf8(DataInputInputStream dis) throws IOException { - int sz = readSize(dis); - return readUtf8(dis, sz); + static CharSequence readUtf8(JavaBinCodec javaBinCodec) throws IOException { + int sz = readSize(javaBinCodec); + return readUtf8(javaBinCodec, sz); } - protected CharSequence readUtf8(DataInputInputStream dis, int sz) throws IOException { - ByteArrayUtf8CharSequence result = new ByteArrayUtf8CharSequence(null,0,0); - if(dis.readDirectUtf8(result, sz)){ - result.stringProvider= getStringProvider(); - return result; + private static CharSequence readUtf8(JavaBinCodec javaBinCodec, int sz) throws IOException { + ByteArrayUtf8CharSequence result = new ByteArrayUtf8CharSequence(null, 0, 0); + if (readDirectUtf8(javaBinCodec, result, sz)) { + result.stringProvider = javaBinCodec.getStringProvider(); + return result; } - if (sz > MAX_UTF8_SZ) return _readStr(dis, null, sz); - if (bytesBlock == null) bytesBlock = new BytesBlock(1024 * 4); - BytesBlock block = this.bytesBlock.expand(sz); - dis.readFully(block.getBuf(), block.getStartPos(), sz); + if (sz > MAX_STRING_SZ_TO_TRY_KEEPING_AS_UTF8_WO_CONVERT_BYTES) + return readStr(javaBinCodec, null, sz); + + if (javaBinCodec.bytesBlock == null) + javaBinCodec.bytesBlock = new BytesBlock(Math.max(sz, 1024 << 2)); - result.reset(block.getBuf(), block.getStartPos(), sz,null); - result.stringProvider = getStringProvider(); + BytesBlock block = javaBinCodec.bytesBlock.expand(sz); + readFully(javaBinCodec, block.getBuf(), block.getStartPos(), sz); + result.reset(block.getBuf(), block.getStartPos(), sz, null); + result.stringProvider = javaBinCodec.getStringProvider(); return result; } private Function getStringProvider() { if (stringProvider == null) { - stringProvider = new Function<>() { - final CharArr charArr = new CharArr(8); - @Override - public String apply(ByteArrayUtf8CharSequence butf8cs) { - synchronized (charArr) { - charArr.reset(); - ByteUtils.UTF8toUTF16(butf8cs.buf, butf8cs.offset(), butf8cs.size(), charArr); - return charArr.toString(); - } - } - }; + stringProvider = new ByteArrayUtf8CharSequenceStringFunction(); } - return this.stringProvider; + return stringProvider; } - public void writeInt(int val) throws IOException { + public static void writeInt(JavaBinCodec javaBinCodec, int val) throws IOException { if (val > 0) { - int b = SINT | (val & 0x0f); - - if (val >= 0x0f) { + int b = SINT | (val & OxOF); + if (val >= OxOF) { b |= 0x10; - daos.writeByte(b); - writeVInt(val >>> 4, daos); + writeByteToOS(javaBinCodec, b); + writeVInt(javaBinCodec, val >>> 4); } else { - daos.writeByte(b); + writeByteToOS(javaBinCodec, b); } } else { - daos.writeByte(INT); - daos.writeInt(val); + writeByteToOS(javaBinCodec, INT); + writeIntToOS(javaBinCodec, val); } } - public int readSmallInt(DataInputInputStream dis) throws IOException { - int v = tagByte & 0x0F; - if ((tagByte & 0x10) != 0) - v = (readVInt(dis) << 4) | v; + public static int readSmallInt(JavaBinCodec javaBinCodec) throws IOException { + int v = javaBinCodec.tagByte & OxOF; + if ((javaBinCodec.tagByte & 0x10) != 0) v = (readVInt(javaBinCodec) << 4) | v; return v; } - - public void writeLong(long val) throws IOException { + public static void writeLong(JavaBinCodec javaBinCodec, long val) throws IOException { if ((val & 0xff00000000000000L) == 0) { - int b = SLONG | ((int) val & 0x0f); - if (val >= 0x0f) { + int b = SLONG | ((int) val & OxOF); + if (val >= OxOF) { b |= 0x10; - daos.writeByte(b); - writeVLong(val >>> 4, daos); + writeByteToOS(javaBinCodec, b); + writeVLong(javaBinCodec, val >>> 4); } else { - daos.writeByte(b); + writeByteToOS(javaBinCodec, b); } } else { - daos.writeByte(LONG); - daos.writeLong(val); + writeByteToOS(javaBinCodec, LONG); + writeLongToOS(javaBinCodec, val); } } - public long readSmallLong(DataInputInputStream dis) throws IOException { - long v = tagByte & 0x0F; - if ((tagByte & 0x10) != 0) - v = (readVLong(dis) << 4) | v; + static long readSmallLong(JavaBinCodec javaBinCodec) throws IOException { + long v = javaBinCodec.tagByte & OxOF; + if ((javaBinCodec.tagByte & 0x10) != 0) v = (readVLong(javaBinCodec) << 4) | v; return v; } public void writeFloat(float val) throws IOException { - daos.writeByte(FLOAT); - daos.writeFloat(val); + writeByteToOS(this, FLOAT); + writeFloatToOS(this, val); } public boolean writePrimitive(Object val) throws IOException { - if (val == null) { - daos.writeByte(NULL); + if (val instanceof CharSequence) { + writeStr(this, (CharSequence) val); return true; - } else if (val instanceof Utf8CharSequence) { - writeUTF8Str((Utf8CharSequence) val); + } else if (val instanceof Integer) { + writeInt(this, (Integer) val); return true; - } else if (val instanceof CharSequence) { - writeStr((CharSequence) val); + } else if (val instanceof Long) { + writeLong(this, (Long) val); + return true; + } else if (val instanceof Float) { + writeFloat((Float) val); return true; - } else if (val instanceof Number) { - - if (val instanceof Integer) { - writeInt(((Integer) val).intValue()); - return true; - } else if (val instanceof Long) { - writeLong(((Long) val).longValue()); - return true; - } else if (val instanceof Float) { - writeFloat(((Float) val).floatValue()); - return true; - } else if (val instanceof Double) { - writeDouble(((Double) val).doubleValue()); - return true; - } else if (val instanceof Byte) { - daos.writeByte(BYTE); - daos.writeByte(((Byte) val).intValue()); - return true; - } else if (val instanceof Short) { - daos.writeByte(SHORT); - daos.writeShort(((Short) val).intValue()); - return true; - } - return false; - } else if (val instanceof Date) { - daos.writeByte(DATE); - daos.writeLong(((Date) val).getTime()); + writeByteToOS(this, DATE); + writeLongToOS(this, ((Date) val).getTime()); return true; } else if (val instanceof Boolean) { - writeBoolean((Boolean) val); + writeBoolean(this, (Boolean) val); + return true; + } + return false; + } + + private static boolean writeLessCommonPrimitive(JavaBinCodec javaBinCodec, Object val) + throws IOException { + if (val == null) { + writeByteToOS(javaBinCodec, NULL); + return true; + } else if (val instanceof Double) { + writeDouble(javaBinCodec, (Double) val); + return true; + } else if (val instanceof Short) { + writeByteToOS(javaBinCodec, SHORT); + writeShortToOS(javaBinCodec, ((Short) val).intValue()); + return true; + } else if (val instanceof Byte) { + writeByteToOS(javaBinCodec, BYTE); + writeByteToOS(javaBinCodec, ((Byte) val).intValue()); return true; } else if (val instanceof byte[]) { - writeByteArray((byte[]) val, 0, ((byte[]) val).length); + writeByteArray(javaBinCodec, (byte[]) val, 0, ((byte[]) val).length); return true; } else if (val instanceof ByteBuffer) { - ByteBuffer buf = (ByteBuffer) val; - writeByteArray(buf.array(), buf.arrayOffset() + buf.position(),buf.limit() - buf.position()); + ByteBuffer buffer = (ByteBuffer) val; + writeByteArray( + javaBinCodec, + buffer.array(), + buffer.arrayOffset() + buffer.position(), + buffer.limit() - buffer.position()); return true; } else if (val == END_OBJ) { - writeTag(END); + writeTag(javaBinCodec, END); return true; } return false; } - protected void writeBoolean(boolean val) throws IOException { - if (val) daos.writeByte(BOOL_TRUE); - else daos.writeByte(BOOL_FALSE); + protected static void writeBoolean(JavaBinCodec javaBinCodec, boolean val) throws IOException { + if (val) writeByteToOS(javaBinCodec, BOOL_TRUE); + else writeByteToOS(javaBinCodec, BOOL_FALSE); } - protected void writeDouble(double val) throws IOException { - daos.writeByte(DOUBLE); - daos.writeDouble(val); + protected static void writeDouble(JavaBinCodec javaBinCodec, double val) throws IOException { + writeByteToOS(javaBinCodec, DOUBLE); + writeDoubleToOS(javaBinCodec, val); } - - public void writeMap(Map val) throws IOException { - writeTag(MAP, val.size()); + public static void writeMap(JavaBinCodec javaBinCodec, Map val) throws IOException { + writeTag(javaBinCodec, MAP, val.size()); if (val instanceof MapWriter) { - ((MapWriter) val).writeMap(ew); + ((MapWriter) val).writeMap(javaBinCodec.ew); return; } - for (Map.Entry entry : val.entrySet()) { + for (Map.Entry entry : val.entrySet()) { Object key = entry.getKey(); if (key instanceof String) { - writeExternString((String) key); + javaBinCodec.writeExternString((CharSequence) key); } else { - writeVal(key); + writeVal(javaBinCodec, key); } - writeVal(entry.getValue()); + writeVal(javaBinCodec, entry.getValue()); } } - - public int readSize(DataInputInputStream in) throws IOException { - int sz = tagByte & 0x1f; - if (sz == 0x1f) sz += readVInt(in); + public static int readSize(JavaBinCodec javaBinCodec) throws IOException { + int sz = javaBinCodec.tagByte & 0x1f; + if (sz == 0x1f) sz += readVInt(javaBinCodec); return sz; } - /** - * Special method for variable length int (copied from lucene). Usually used for writing the length of a - * collection/array/map In most of the cases the length can be represented in one byte (length < 127) so it saves 3 - * bytes/object + * Special method for variable length int (copied from lucene). Usually used for writing the + * length of a collection/array/map In most of the cases the length can be represented in one byte + * (length < 127) so it saves 3 bytes/object * * @throws IOException If there is a low-level I/O error. */ - public static void writeVInt(int i, FastOutputStream out) throws IOException { + private static void writeVInt(JavaBinCodec javaBinCodec, int i) throws IOException { + // i = encodeZigZag32(i); while ((i & ~0x7F) != 0) { - out.writeByte((byte) ((i & 0x7f) | 0x80)); + writeByteToOS(javaBinCodec, (byte) ((i & 0x7f) | 0x80)); i >>>= 7; } - out.writeByte((byte) i); + writeByteToOS(javaBinCodec, (byte) i); } /** - * The counterpart for {@link #writeVInt(int, FastOutputStream)} + * The counterpart for {@link JavaBinCodec#writeVInt(JavaBinCodec, int)} * * @throws IOException If there is a low-level I/O error. */ - public static int readVInt(DataInputInputStream in) throws IOException { - byte b = in.readByte(); + protected static int readVInt(JavaBinCodec javaBinCodec) throws IOException { + byte b = readByte(javaBinCodec); int i = b & 0x7F; for (int shift = 7; (b & 0x80) != 0; shift += 7) { - b = in.readByte(); + b = readByte(javaBinCodec); i |= (b & 0x7F) << shift; } + // return decodeZigZag32(i); return i; } - - public static void writeVLong(long i, FastOutputStream out) throws IOException { + private static void writeVLong(JavaBinCodec javaBinCodec, long i) throws IOException { while ((i & ~0x7F) != 0) { - out.writeByte((byte) ((i & 0x7f) | 0x80)); + writeByteToOS(javaBinCodec, (byte) ((i & 0x7f) | 0x80)); i >>>= 7; } - out.writeByte((byte) i); + writeByteToOS(javaBinCodec, (byte) i); } - public static long readVLong(DataInputInputStream in) throws IOException { - byte b = in.readByte(); + private static long readVLong(JavaBinCodec javaBinCodec) throws IOException { + byte b = readByte(javaBinCodec); long i = b & 0x7F; for (int shift = 7; (b & 0x80) != 0; shift += 7) { - b = in.readByte(); + b = readByte(javaBinCodec); i |= (long) (b & 0x7F) << shift; } return i; } - private int stringsCount = 0; - private Map stringsMap; - private List stringsList; + // zigzag avoids the always worst case penalty of negatives - maybe for version 3? + private static int encodeZigZag32(int n) { + // Note: the right-shift must be arithmetic + return (n << 1) ^ (n >> 31); + } - public void writeExternString(CharSequence s) throws IOException { - if (s == null) { - writeTag(NULL); + private static int decodeZigZag32(int n) { + return (n >>> 1) ^ -(n & 1); + } + + // cost for long zip encode/decode is measurably larger hit then int + + // private static long encodeZigZag64(final long n) { + // // Note: the right-shift must be arithmetic + // return (n << 1) ^ (n >> 63); + // } + // + // private static long decodeZigZag64(final long n) { + // return (n >>> 1) ^ -(n & 1); + // } + + public void writeExternString(CharSequence str) throws IOException { + if (str == null) { + writeTag(this, NULL); return; } - Integer idx = stringsMap == null ? null : stringsMap.get(s); - if (idx == null) idx = 0; - writeTag(EXTERN_STRING, idx); - if (idx == 0) { - writeStr(s); - if (stringsMap == null) stringsMap = new HashMap<>(); - stringsMap.put(s.toString(), ++stringsCount); + int idx = 0; + if (stringsMap != null) { + + int[] idxArr = stringsMap.get(str); + if (idxArr != null) { + idx = idxArr[0]; + } } + writeTag(this, EXTERN_STRING, idx); + if (idx == 0) { + writeStr(this, str); + if (stringsMap == null) { + stringsMap = new HashMap<>(32, 0.75f); + } + stringsMap.put(str, new int[] {++stringsCount}); + } } - public CharSequence readExternString(DataInputInputStream fis) throws IOException { - int idx = readSize(fis); - if (idx != 0) {// idx != 0 is the index of the extern string + public CharSequence readExternString(JavaBinCodec javaBinCodec) throws IOException { + int idx = readSize(javaBinCodec); + if (idx != 0) { // idx > 0 is the index of the extern string return stringsList.get(idx - 1); - } else {// idx == 0 means it has a string value - tagByte = fis.readByte(); - CharSequence s = readStr(fis, stringCache, false); - if (s != null) s = s.toString(); - if (stringsList == null) stringsList = new ArrayList<>(); - stringsList.add(s); - return s; + } else { // idx == 0 means it has a string value + tagByte = readByte(javaBinCodec); + CharSequence str = readStr(this, stringCache, false).toString(); + if (stringsList == null) stringsList = new ObjectArrayList<>(32); + stringsList.add(str); + return str; } } - - public void writeUTF8Str(Utf8CharSequence utf8) throws IOException { - writeTag(STR, utf8.size()); - daos.writeUtf8CharSeq(utf8); - } - - public long getTotalBytesWritten() { - if (daos != null) { - return daos.written; - } - return 0; + public static void writeUTF8Str(JavaBinCodec javaBinCodec, Utf8CharSequence utf8) + throws IOException { + writeTag(javaBinCodec, STR, utf8.size()); + writeUtf8CharSeqToOS(javaBinCodec, utf8); } /** * Allows extension of {@link JavaBinCodec} to support serialization of arbitrary data types. - *

- * Implementors of this interface write a method to serialize a given object using an existing {@link JavaBinCodec} + * + *

Implementors of this interface write a method to serialize a given object using an existing + * {@link JavaBinCodec} */ public interface ObjectResolver { /** - * Examine and attempt to serialize the given object, using a {@link JavaBinCodec} to write it to a stream. + * Examine and attempt to serialize the given object, using a {@link JavaBinCodec} to write it + * to a stream. * - * @param o the object that the caller wants serialized. + * @param o the object to serialize. * @param codec used to actually serialize {@code o}. - * @return the object {@code o} itself if it could not be serialized, or {@code null} if the whole object was successfully serialized. + * @return the object {@code o} itself if it could not be serialized, or {@code null} if the + * whole object was successfully serialized. * @see JavaBinCodec */ Object resolve(Object o, JavaBinCodec codec) throws IOException; @@ -1246,10 +1467,10 @@ public interface ObjectResolver { public interface WritableDocFields { boolean isWritable(String name); + boolean wantsAllFields(); } - public static class StringCache { private final Cache cache; @@ -1260,11 +1481,19 @@ public StringCache(Cache cache) { public String get(StringBytes b) { String result = cache.get(b); if (result == null) { - //make a copy because the buffer received may be changed later by the caller - StringBytes copy = new StringBytes(Arrays.copyOfRange(b.bytes, b.offset, b.offset + b.length), 0, b.length); - CharArr arr = new CharArr(); - ByteUtils.UTF8toUTF16(b.bytes, b.offset, b.length, arr); - result = arr.toString(); + // make a copy because the buffer received may be changed later by the caller + StringBytes copy = + new StringBytes( + Arrays.copyOfRange(b.bytes, b.offset, b.offset + b.length), 0, b.length); + + if (b.length < MAX_SZ_BEFORE_STRING_UTF8_DECODE_OVER_SOLR) { + CharArr arr = new CharArr(64); + ByteUtils.UTF8toUTF16(b.bytes, b.offset, b.length, arr); + result = arr.toString(); + } else { + result = new String(b.bytes, b.offset, b.length, StandardCharsets.UTF_8); + } + cache.put(copy, result); } return result; @@ -1273,8 +1502,470 @@ public String get(StringBytes b) { @Override public void close() throws IOException { - if (daos != null) { - daos.flushBuffer(); + if (out != null) { + flushBufferOS(this); + } + } + + /* + We do a low level optimization here. Low level code that does IO, especially that writes + bytes, is very impacted by any additional overhead. In the best case, things get inlined. + However, the best case only goes so far. Inlining has cut off caps, limitations, reversals when assumptions get invalidated, etc. + + Via measurement and inspection, we find this and related existing classes have not been entirely compiler friendly. + + To further help the situation, we pull the OutputStream wrapper layer class into JavaBinCodec itself - allowing no additional layers + of inheritance, nor the opportunity for the class to be co-opted for other tasks or uses, thus ensuring efficient code and monomorphic call sites. + + Looking at how the output methods should be dispatched to, the rule of thumb is that interfaces are slowest to dispatch on, abstract classes are faster, + isolated classes obviously a decent case, but at the top is the simple jump of a static method call. + The single byte methods show the most gratitude. + */ + + // passing null for the buffer writes straight through to the stream - if writing to something + // like a ByteArrayOutputStream, intermediate buffering should not be used + private void initOutStream(OutputStream sink, byte[] tempBuffer) { + out = sink; + buf = tempBuffer; + pos = 0; + if (sink instanceof FastOutputStream) { + isFastOutputStream = true; + if (tempBuffer != null) { + throw new IllegalArgumentException( + "FastInputStream cannot pass a buffer to JavaBinInputStream as it will already buffer - pass null to write to the stream directly"); + } + } else { + isFastOutputStream = false; + } + } + + private void initOutStream(OutputStream w) { + // match jetty output buffer + initOutStream(w, new byte[BUFFER_SZ]); + } + + private static void writeToOS(JavaBinCodec javaBinCodec, byte[] b) throws IOException { + if (javaBinCodec.buf == null) { + javaBinCodec.out.write(b, 0, b.length); + return; } + writeToOS(javaBinCodec, b, 0, b.length); } + + private static void writeToOS(JavaBinCodec javaBinCodec, byte b) throws IOException { + if (javaBinCodec.buf == null) { + javaBinCodec.out.write(b); + return; + } + + if (javaBinCodec.pos >= javaBinCodec.buf.length) { + flushOS(javaBinCodec, javaBinCodec.buf, 0, javaBinCodec.buf.length); + javaBinCodec.pos = 0; + } + javaBinCodec.buf[javaBinCodec.pos++] = b; + } + + private static void writeToOS(JavaBinCodec javaBinCodec, byte[] arr, int off, int len) + throws IOException { + if (javaBinCodec.buf == null) { + javaBinCodec.out.write(arr, off, len); + return; + } + + for (; ; ) { + int space = javaBinCodec.buf.length - javaBinCodec.pos; + + if (len <= space) { + System.arraycopy(arr, off, javaBinCodec.buf, javaBinCodec.pos, len); + javaBinCodec.pos += len; + return; + } else if (len > javaBinCodec.buf.length) { + if (javaBinCodec.pos > 0) { + flushOS(javaBinCodec, javaBinCodec.buf, 0, javaBinCodec.pos); // flush + javaBinCodec.pos = 0; + } + // don't buffer, just write to sink + flushOS(javaBinCodec, arr, off, len); + return; + } + + // buffer is too big to fit in the free space, but + // not big enough to warrant writing on its own. + // write whatever we can fit, then flush and iterate. + + System.arraycopy(arr, off, javaBinCodec.buf, javaBinCodec.pos, space); + flushOS(javaBinCodec, javaBinCodec.buf, 0, javaBinCodec.buf.length); + javaBinCodec.pos = 0; + off += space; + len -= space; + } + } + + protected static void writeByteToOS(JavaBinCodec javaBinCodec, int b) throws IOException { + if (javaBinCodec.buf == null) { + javaBinCodec.out.write((byte) b); + return; + } + + if (javaBinCodec.pos >= javaBinCodec.buf.length) { + flushOS(javaBinCodec, javaBinCodec.buf, 0, javaBinCodec.buf.length); + javaBinCodec.pos = 0; + } + javaBinCodec.buf[javaBinCodec.pos++] = (byte) b; + } + + private static void writeShortToOS(JavaBinCodec javaBinCodec, int v) throws IOException { + writeToOS(javaBinCodec, (byte) (v >>> 8)); + writeToOS(javaBinCodec, (byte) v); + } + + private static void writeIntToOS(JavaBinCodec javaBinCodec, int v) throws IOException { + if (javaBinCodec.buf == null) { + javaBinCodec.out.write((byte) (v >>> 24)); + javaBinCodec.out.write((byte) (v >>> 16)); + javaBinCodec.out.write((byte) (v >>> 8)); + javaBinCodec.out.write((byte) (v)); + javaBinCodec.pos += 4; + return; + } + + if (4 > javaBinCodec.buf.length - javaBinCodec.pos && javaBinCodec.pos > 0) { + flushOS(javaBinCodec, javaBinCodec.buf, 0, javaBinCodec.pos); + javaBinCodec.pos = 0; + } + + //intFromByteArrayVarHandle.set(javaBinCodec.buf, javaBinCodec.pos, v); // can't - odd + + javaBinCodec.buf[javaBinCodec.pos] = (byte) (v >>> 24); + javaBinCodec.buf[javaBinCodec.pos + 1] = (byte) (v >>> 16); + javaBinCodec.buf[javaBinCodec.pos + 2] = (byte) (v >>> 8); + javaBinCodec.buf[javaBinCodec.pos + 3] = (byte) (v); + javaBinCodec.pos += 4; + } + + protected static void writeLongToOS(JavaBinCodec javaBinCodec, long v) throws IOException { + if (javaBinCodec.buf == null) { + javaBinCodec.out.write((byte) (v >>> 56)); + javaBinCodec.out.write((byte) (v >>> 48)); + javaBinCodec.out.write((byte) (v >>> 40)); + javaBinCodec.out.write((byte) (v >>> 32)); + javaBinCodec.out.write((byte) (v >>> 24)); + javaBinCodec.out.write((byte) (v >>> 16)); + javaBinCodec.out.write((byte) (v >>> 8)); + javaBinCodec.out.write((byte) (v)); + javaBinCodec.pos += 8; + return; + } + + if (8 > (javaBinCodec.buf.length - javaBinCodec.pos) && javaBinCodec.pos > 0) { + flushOS(javaBinCodec, javaBinCodec.buf, 0, javaBinCodec.pos); + javaBinCodec.pos = 0; + } + // longFromByteArrayVarHandle.set(javaBinCodec.buf, javaBinCodec.pos, v); // can't - odd + javaBinCodec.buf[javaBinCodec.pos] = (byte) (v >>> 56); + javaBinCodec.buf[javaBinCodec.pos + 1] = (byte) (v >>> 48); + javaBinCodec.buf[javaBinCodec.pos + 2] = (byte) (v >>> 40); + javaBinCodec.buf[javaBinCodec.pos + 3] = (byte) (v >>> 32); + javaBinCodec.buf[javaBinCodec.pos + 4] = (byte) (v >>> 24); + javaBinCodec.buf[javaBinCodec.pos + 5] = (byte) (v >>> 16); + javaBinCodec.buf[javaBinCodec.pos + 6] = (byte) (v >>> 8); + javaBinCodec.buf[javaBinCodec.pos + 7] = (byte) (v); + javaBinCodec.pos += 8; + } + + private static void writeFloatToOS(JavaBinCodec javaBinCodec, float v) throws IOException { + writeIntToOS(javaBinCodec, Float.floatToRawIntBits(v)); + } + + private static void writeDoubleToOS(JavaBinCodec javaBinCodec, double v) throws IOException { + writeLongToOS(javaBinCodec, Double.doubleToRawLongBits(v)); + } + + /** Only flushes the buffer of the FastOutputStream, not that of the underlying stream. */ + private static void flushBufferOS(JavaBinCodec javaBinCodec) throws IOException { + if (javaBinCodec.buf == null) { + if (javaBinCodec.isFastOutputStream) { + ((FastOutputStream) javaBinCodec.out).flushBuffer(); + } + return; + } + + if (javaBinCodec.pos > 0) { + flushOS(javaBinCodec, javaBinCodec.buf, 0, javaBinCodec.pos); + javaBinCodec.pos = 0; + } + } + + /** All writes to the sink will go through this method */ + private static void flushOS(JavaBinCodec javaBinCodec, byte[] buf, int offset, int len) + throws IOException { + javaBinCodec.out.write(buf, offset, len); + } + + /** Copies a {@link Utf8CharSequence} without making extra copies */ + private static void writeUtf8CharSeqToOS(JavaBinCodec javaBinCodec, Utf8CharSequence utf8) + throws IOException { + if (javaBinCodec.buf == null) { + utf8.write(javaBinCodec.out); + return; + } + + int start = 0; + int totalWritten = 0; + while (true) { + int size = utf8.size(); + if (totalWritten >= size) break; + if (javaBinCodec.pos >= javaBinCodec.buf.length) flushBufferOS(javaBinCodec); + int sz = utf8.write(start, javaBinCodec.buf, javaBinCodec.pos); + javaBinCodec.pos += sz; + totalWritten += sz; + start += sz; + } + } + + static boolean readDirectUtf8( + JavaBinCodec javaBinCodec, ByteArrayUtf8CharSequence utf8, int len) { + if (javaBinCodec.buf == null) { + return ((FastInputStream) javaBinCodec.in).readDirectUtf8(utf8, len); + } + if (javaBinCodec.in != null || javaBinCodec.end < javaBinCodec.pos + len) return false; + utf8.reset(javaBinCodec.buf, javaBinCodec.pos, len, null); + javaBinCodec.pos = javaBinCodec.pos + len; + return true; + } + + static ByteBuffer readDirectByteBuffer(int sz) { + return null; + } + + public static int read(JavaBinCodec javaBinCodec) throws IOException { + if (javaBinCodec.buf == null) { + return javaBinCodec.in.read(); + } + if (javaBinCodec.pos >= javaBinCodec.end) { + // this will set end to -1 at EOF + int result; + if (javaBinCodec.in == null) { + result = -1; + } else { + result = javaBinCodec.in.read(javaBinCodec.buf, 0, javaBinCodec.buf.length); + } + javaBinCodec.end = result; + javaBinCodec.pos = 0; + if (javaBinCodec.pos >= javaBinCodec.end) return -1; + } + return javaBinCodec.buf[javaBinCodec.pos++] & 0xff; + } + + private static int readUnsignedByte(JavaBinCodec javaBinCodec) throws IOException { + if (javaBinCodec.buf == null) { + return ((FastInputStream) javaBinCodec.in).readUnsignedByte(); + } + if (javaBinCodec.pos >= javaBinCodec.end) { + // this will set end to -1 at EOF + int result; + if (javaBinCodec.in == null) { + result = -1; + } else { + result = javaBinCodec.in.read(javaBinCodec.buf, 0, javaBinCodec.buf.length); + } + javaBinCodec.end = result; + javaBinCodec.pos = 0; + if (javaBinCodec.pos >= javaBinCodec.end) { + throw new EOFException(); + } + } + return javaBinCodec.buf[javaBinCodec.pos++] & 0xff; + } + + public static int read(JavaBinCodec javaBinCodec, byte[] b, int off, int len) throws IOException { + if (javaBinCodec.buf == null) { + return javaBinCodec.in.read(b, off, len); + } + int r = 0; // number of bytes we have read + + // first read from our buffer; + if (javaBinCodec.end - javaBinCodec.pos > 0) { + r = Math.min(javaBinCodec.end - javaBinCodec.pos, len); + System.arraycopy(javaBinCodec.buf, javaBinCodec.pos, b, off, r); + javaBinCodec.pos += r; + } + + if (r == len) return r; + + // refill + // amount left to read is >= buffer size + if (len - r >= javaBinCodec.buf.length) { + int ret; + if (javaBinCodec.in == null) { + ret = -1; + } else { + ret = javaBinCodec.in.read(b, off + r, len - r); + } + if (ret >= 0) { + r += ret; + return r; + } else { + // negative return code + return r > 0 ? r : -1; + } + } + + // this will set end to -1 at EOF + int result; + if (javaBinCodec.in == null) { + result = -1; + } else { + result = javaBinCodec.in.read(javaBinCodec.buf, 0, javaBinCodec.buf.length); + } + javaBinCodec.end = result; + javaBinCodec.pos = 0; + + // read rest from our buffer + if (javaBinCodec.end - javaBinCodec.pos > 0) { + int toRead = Math.min(javaBinCodec.end - javaBinCodec.pos, len - r); + System.arraycopy(javaBinCodec.buf, javaBinCodec.pos, b, off + r, toRead); + javaBinCodec.pos += toRead; + r += toRead; + return r; + } + + return r > 0 ? r : -1; + } + + public void closeInputStream() throws IOException { + in.close(); + } + + public static void readFully(JavaBinCodec javaBinCodec, byte[] b, int off, int len) + throws IOException { + if (javaBinCodec.buf == null) { + ((FastInputStream) javaBinCodec.in).readFully(b, off, len); + return; + } + while (len > 0) { + int ret = read(javaBinCodec, b, off, len); + off += ret; + len -= ret; + } + } + + public static byte readByte(JavaBinCodec javaBinCodec) throws IOException { + if (javaBinCodec.buf == null) { + return ((FastInputStream) javaBinCodec.in).readByte(); + } + if (javaBinCodec.pos >= javaBinCodec.end) { + // this will set end to -1 at EOF + int result; + if (javaBinCodec.in == null) { + result = -1; + } else { + result = javaBinCodec.in.read(javaBinCodec.buf, 0, javaBinCodec.buf.length); + } + javaBinCodec.end = result; + javaBinCodec.pos = 0; + if (javaBinCodec.pos >= javaBinCodec.end) throw new EOFException(); + } + return javaBinCodec.buf[javaBinCodec.pos++]; + } + + protected short readShort() throws IOException { + return (short) ((readUnsignedByte(this) << 8) | readUnsignedByte(this)); + } + + private static final VarHandle intFromByteArrayVarHandle = + MethodHandles.byteArrayViewVarHandle(int[].class, ByteOrder.BIG_ENDIAN); + private static final VarHandle longFromByteArrayVarHandle = + MethodHandles.byteArrayViewVarHandle(long[].class, ByteOrder.BIG_ENDIAN); + + public static int readInt(JavaBinCodec javaBinCodec) throws IOException { + if (javaBinCodec.end - javaBinCodec.pos >= 4) { + int val = (int) intFromByteArrayVarHandle.get(javaBinCodec.buf, javaBinCodec.pos); + javaBinCodec.pos += 4; + return val; + } + return ((readUnsignedByte(javaBinCodec) << 24) + | (readUnsignedByte(javaBinCodec) << 16) + | (readUnsignedByte(javaBinCodec) << 8) + | readUnsignedByte(javaBinCodec)); + } + + public static long readLong(JavaBinCodec javaBinCodec) throws IOException { + if (javaBinCodec.end - javaBinCodec.pos >= 8) { + long val = (long) longFromByteArrayVarHandle.get(javaBinCodec.buf, javaBinCodec.pos); + javaBinCodec.pos += 8; + return val; + } + return (((long) readUnsignedByte(javaBinCodec)) << 56) + | (((long) readUnsignedByte(javaBinCodec)) << 48) + | (((long) readUnsignedByte(javaBinCodec)) << 40) + | (((long) readUnsignedByte(javaBinCodec)) << 32) + | (((long) readUnsignedByte(javaBinCodec)) << 24) + | (readUnsignedByte(javaBinCodec) << 16) + | (readUnsignedByte(javaBinCodec) << 8) + | (readUnsignedByte(javaBinCodec)); + } + + public float readFloat() throws IOException { + return Float.intBitsToFloat(readInt(this)); + } + + public double readDouble() throws IOException { + return Double.longBitsToDouble(readLong(this)); + } + + private static class ByteArrayUtf8CharSequenceStringFunction + implements Function { + + @Override + public String apply(ByteArrayUtf8CharSequence butf8cs) { + return new String(butf8cs.buf, butf8cs.offset, butf8cs.length, StandardCharsets.UTF_8); + } + } + + public static class InvalidEncodingException extends IOException { + public InvalidEncodingException(String s) { + super(s); + } + } + + private class ObjectItemWriter implements ItemWriter { + + @Override + public ItemWriter add(Object o) throws IOException { + writeVal(JavaBinCodec.this, o); + return this; + } + + @Override + public ItemWriter add(int v) throws IOException { + writeInt(JavaBinCodec.this, v); + return this; + } + + @Override + public ItemWriter add(long v) throws IOException { + writeLong(JavaBinCodec.this, v); + return this; + } + + @Override + public ItemWriter add(float v) throws IOException { + writeFloat(v); + return this; + } + + @Override + public ItemWriter add(double v) throws IOException { + writeDouble(JavaBinCodec.this, v); + return this; + } + + @Override + public ItemWriter add(boolean v) throws IOException { + writeBoolean(JavaBinCodec.this, v); + return this; + } + } + } diff --git a/solr/solrj/src/java/org/apache/solr/common/util/JavaBinCodecOld.java b/solr/solrj/src/java/org/apache/solr/common/util/JavaBinCodecOld.java new file mode 100644 index 00000000000..e3717170015 --- /dev/null +++ b/solr/solrj/src/java/org/apache/solr/common/util/JavaBinCodecOld.java @@ -0,0 +1,1258 @@ +///* +// * Licensed to the Apache Software Foundation (ASF) under one or more +// * contributor license agreements. See the NOTICE file distributed with +// * this work for additional information regarding copyright ownership. +// * The ASF licenses this file to You under the Apache License, Version 2.0 +// * (the "License"); you may not use this file except in compliance with +// * the License. You may obtain a copy of the License at +// * +// * http://www.apache.org/licenses/LICENSE-2.0 +// * +// * Unless required by applicable law or agreed to in writing, software +// * distributed under the License is distributed on an "AS IS" BASIS, +// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// * See the License for the specific language governing permissions and +// * limitations under the License. +// */ +//package org.apache.solr.common.util; +// +//import java.io.IOException; +//import java.io.InputStream; +//import java.io.OutputStream; +//import java.lang.invoke.MethodHandles; +//import java.nio.ByteBuffer; +//import java.nio.file.Path; +//import java.util.ArrayList; +//import java.util.Arrays; +//import java.util.Collection; +//import java.util.Date; +//import java.util.HashMap; +//import java.util.Iterator; +//import java.util.LinkedHashMap; +//import java.util.List; +//import java.util.Map; +//import java.util.Map.Entry; +//import java.util.concurrent.atomic.AtomicBoolean; +//import java.util.concurrent.atomic.AtomicInteger; +//import java.util.concurrent.atomic.AtomicLong; +//import java.util.function.BiConsumer; +//import java.util.function.Function; +//import java.util.function.Predicate; +// +//import org.apache.solr.common.ConditionalKeyMapWriter; +//import org.apache.solr.common.EnumFieldValue; +//import org.apache.solr.common.IteratorWriter; +//import org.apache.solr.common.IteratorWriter.ItemWriter; +//import org.apache.solr.common.MapSerializable; +//import org.apache.solr.common.MapWriter; +//import org.apache.solr.common.SolrDocument; +//import org.apache.solr.common.SolrDocumentList; +//import org.apache.solr.common.SolrInputDocument; +//import org.apache.solr.common.SolrInputField; +//import org.apache.solr.common.params.CommonParams; +//import org.noggit.CharArr; +//import org.slf4j.Logger; +//import org.slf4j.LoggerFactory; +// +//import static org.apache.solr.common.util.ByteArrayUtf8CharSequence.convertCharSeq; +// +///** +// * Defines a space-efficient serialization/deserialization format for transferring data. +// *

+// * JavaBinCodec has built in support many commonly used types. This includes primitive types (boolean, byte, +// * short, double, int, long, float), common Java containers/utilities (Date, Map, Collection, Iterator, String, +// * Object[], byte[]), and frequently used Solr types ({@link NamedList}, {@link SolrDocument}, +// * {@link SolrDocumentList}). Each of the above types has a pair of associated methods which read and write +// * that type to a stream. +// *

+// * Classes that aren't supported natively can still be serialized/deserialized by providing +// * an {@link JavaBinCodec.ObjectResolver} object that knows how to work with the unsupported class. +// * This allows {@link JavaBinCodec} to be used to marshall/unmarshall arbitrary content. +// *

+// * NOTE -- {@link JavaBinCodec} instances cannot be reused for more than one marshall or unmarshall operation. +// */ +//public class JavaBinCodecOld extends JavaBinCodec { +// +// private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); +// private static final AtomicBoolean WARNED_ABOUT_INDEX_TIME_BOOSTS = new AtomicBoolean(); +// +// public static final byte +// NULL = 0, +// BOOL_TRUE = 1, +// BOOL_FALSE = 2, +// BYTE = 3, +// SHORT = 4, +// DOUBLE = 5, +// INT = 6, +// LONG = 7, +// FLOAT = 8, +// DATE = 9, +// MAP = 10, +// SOLRDOC = 11, +// SOLRDOCLST = 12, +// BYTEARR = 13, +// ITERATOR = 14, +// /** +// * this is a special tag signals an end. No value is associated with it +// */ +// END = 15, +// +// SOLRINPUTDOC = 16, +// MAP_ENTRY_ITER = 17, +// ENUM_FIELD_VALUE = 18, +// MAP_ENTRY = 19, +// UUID = 20, // This is reserved to be used only in LogCodec +// // types that combine tag + length (or other info) in a single byte +// TAG_AND_LEN = (byte) (1 << 5), +// STR = (byte) (1 << 5), +// SINT = (byte) (2 << 5), +// SLONG = (byte) (3 << 5), +// ARR = (byte) (4 << 5), // +// ORDERED_MAP = (byte) (5 << 5), // SimpleOrderedMap (a NamedList subclass, and more common) +// NAMED_LST = (byte) (6 << 5), // NamedList +// EXTERN_STRING = (byte) (7 << 5); +// +// private static final int MAX_UTF8_SIZE_FOR_ARRAY_GROW_STRATEGY = 65536; +// +// +// private static byte VERSION = 2; +// private final ObjectResolver resolver; +// protected FastOutputStream daos; +// private StringCache stringCache; +// private WritableDocFields writableDocFields; +// private boolean alreadyMarshalled; +// private boolean alreadyUnmarshalled; +// protected boolean readStringAsCharSeq = false; +// +// public JavaBinCodecOld() { +// resolver =null; +// writableDocFields =null; +// } +// +// public JavaBinCodecOld setReadStringAsCharSeq(boolean flag) { +// readStringAsCharSeq = flag; +// return this; +// } +// +// /** +// * Use this to use this as a PushWriter. ensure that close() is called explicitly after use +// * +// * @param os The output stream +// */ +// public JavaBinCodecOld(OutputStream os, ObjectResolver resolver) throws IOException { +// this.resolver = resolver; +// initWrite(os); +// } +// +// public JavaBinCodecOld(ObjectResolver resolver) { +// this(resolver, null); +// } +// public JavaBinCodecOld setWritableDocFields(WritableDocFields writableDocFields){ +// this.writableDocFields = writableDocFields; +// return this; +// +// } +// +// public JavaBinCodecOld(ObjectResolver resolver, StringCache stringCache) { +// this.resolver = resolver; +// this.stringCache = stringCache; +// } +// +// public ObjectResolver getResolver() { +// return resolver; +// } +// +// public void marshal(Object nl, OutputStream os) throws IOException { +// try { +// initWrite(os); +// writeVal(nl); +// } finally { +// alreadyMarshalled = true; +// daos.flushBuffer(); +// } +// } +// +// protected void initWrite(OutputStream os) throws IOException { +// assert !alreadyMarshalled; +// init(FastOutputStream.wrap(os)); +// daos.writeByte(VERSION); +// } +// +// +// /** expert: sets a new output stream */ +// public void init(FastOutputStream os) { +// daos = os; +// } +// +// byte version; +// +// public Object unmarshal(byte[] buf) throws IOException { +// FastInputStream dis = initRead(buf); +// return readVal(dis); +// } +// public Object unmarshal(InputStream is) throws IOException { +// FastInputStream dis = initRead(is); +// return readVal(dis); +// } +// +// protected FastInputStream initRead(InputStream is) throws IOException { +// assert !alreadyUnmarshalled; +// FastInputStream dis = FastInputStream.wrap(is); +// return _init(dis); +// } +// protected FastInputStream initRead(byte[] buf) throws IOException { +// assert !alreadyUnmarshalled; +// FastInputStream dis = new FastInputStream(null, buf, 0, buf.length); +// return _init(dis); +// } +// +// protected FastInputStream _init(FastInputStream dis) throws IOException { +// version = dis.readByte(); +// if (version != VERSION) { +// throw new RuntimeException("Invalid version (expected " + VERSION + +// ", but " + version + ") or the data in not in 'javabin' format"); +// } +// +// alreadyUnmarshalled = true; +// return dis; +// } +// +// +// public SimpleOrderedMap readOrderedMap(DataInputInputStream dis) throws IOException { +// int sz = readSize(dis); +// SimpleOrderedMap nl = new SimpleOrderedMap<>(sz); +// for (int i = 0; i < sz; i++) { +// String name = (String) readVal(dis); +// Object val = readVal(dis); +// nl.add(name, val); +// } +// return nl; +// } +// +// public NamedList readNamedList(DataInputInputStream dis) throws IOException { +// int sz = readSize(dis); +// NamedList nl = new NamedList<>(sz); +// for (int i = 0; i < sz; i++) { +// String name = (String) readVal(dis); +// Object val = readVal(dis); +// nl.add(name, val); +// } +// return nl; +// } +// +// public void writeNamedList(NamedList nl) throws IOException { +// writeTag(nl instanceof SimpleOrderedMap ? ORDERED_MAP : NAMED_LST, nl.size()); +// for (int i = 0; i < nl.size(); i++) { +// String name = nl.getName(i); +// writeExternString(name); +// Object val = nl.getVal(i); +// writeVal(val); +// } +// } +// +// public void writeVal(Object val) throws IOException { +// if (writeKnownType(val)) { +// return; +// } else { +// ObjectResolver resolver = null; +// if(val instanceof ObjectResolver) { +// resolver = (ObjectResolver)val; +// } +// else { +// resolver = this.resolver; +// } +// if (resolver != null) { +// Object tmpVal = resolver.resolve(val, this); +// if (tmpVal == null) return; // null means the resolver took care of it fully +// if (writeKnownType(tmpVal)) return; +// } +// } +// // Fallback to do *something*. +// // note: if the user of this codec doesn't want this (e.g. UpdateLog) it can supply an ObjectResolver that does +// // something else like throw an exception. +// writeVal(val.getClass().getName() + ':' + val.toString()); +// } +// +// protected static final Object END_OBJ = new Object(); +// +// protected byte tagByte; +// +// public Object readVal(DataInputInputStream dis) throws IOException { +// tagByte = dis.readByte(); +// return readObject(dis); +// } +// +// protected Object readObject(DataInputInputStream dis) throws IOException { +// // if ((tagByte & 0xe0) == 0) { +// // if top 3 bits are clear, this is a normal tag +// +// // OK, try type + size in single byte +// switch (tagByte >>> 5) { +// case STR >>> 5: +// return readStr(dis, stringCache, readStringAsCharSeq); +// case SINT >>> 5: +// return readSmallInt(dis); +// case SLONG >>> 5: +// return readSmallLong(dis); +// case ARR >>> 5: +// return readArray(dis); +// case ORDERED_MAP >>> 5: +// return readOrderedMap(dis); +// case NAMED_LST >>> 5: +// return readNamedList(dis); +// case EXTERN_STRING >>> 5: +// return readExternString(dis); +// } +// +// switch (tagByte) { +// case NULL: +// return null; +// case DATE: +// return new Date(dis.readLong()); +// case INT: +// return dis.readInt(); +// case BOOL_TRUE: +// return Boolean.TRUE; +// case BOOL_FALSE: +// return Boolean.FALSE; +// case FLOAT: +// return dis.readFloat(); +// case DOUBLE: +// return dis.readDouble(); +// case LONG: +// return dis.readLong(); +// case BYTE: +// return dis.readByte(); +// case SHORT: +// return dis.readShort(); +// case MAP: +// return readMap(dis); +// case SOLRDOC: +// return readSolrDocument(dis); +// case SOLRDOCLST: +// return readSolrDocumentList(dis); +// case BYTEARR: +// return readByteArray(dis); +// case ITERATOR: +// return readIterator(dis); +// case END: +// return END_OBJ; +// case SOLRINPUTDOC: +// return readSolrInputDocument(dis); +// case ENUM_FIELD_VALUE: +// return readEnumFieldValue(dis); +// case MAP_ENTRY: +// return readMapEntry(dis); +// case MAP_ENTRY_ITER: +// return readMapIter(dis); +// } +// +// throw new RuntimeException("Unknown type " + tagByte); +// } +// +// @SuppressWarnings({"unchecked", "rawtypes"}) +// public boolean writeKnownType(Object val) throws IOException { +// if (writePrimitive(val)) return true; +// if (val instanceof NamedList) { +// writeNamedList((NamedList) val); +// return true; +// } +// if (val instanceof SolrDocumentList) { // SolrDocumentList is a List, so must come before List check +// writeSolrDocumentList((SolrDocumentList) val); +// return true; +// } +// if (val instanceof SolrInputField) { +// return writeKnownType(((SolrInputField) val).getValue()); +// } +// if (val instanceof IteratorWriter) { +// writeIterator((IteratorWriter) val); +// return true; +// } +// if (val instanceof Collection) { +// writeArray((Collection) val); +// return true; +// } +// if (val instanceof Object[]) { +// writeArray((Object[]) val); +// return true; +// } +// if (val instanceof SolrDocument) { +// //this needs special treatment to know which fields are to be written +// writeSolrDocument((SolrDocument) val); +// return true; +// } +// if (val instanceof SolrInputDocument) { +// writeSolrInputDocument((SolrInputDocument)val); +// return true; +// } +// if (val instanceof MapWriter) { +// writeMap((MapWriter) val); +// return true; +// } +// if (val instanceof Map) { +// writeMap((Map) val); +// return true; +// } +// if (val instanceof Iterator) { +// writeIterator((Iterator) val); +// return true; +// } +// if (val instanceof Path) { +// writeStr(((Path) val).toAbsolutePath().toString()); +// return true; +// } +// if (val instanceof Iterable) { +// writeIterator(((Iterable) val).iterator()); +// return true; +// } +// if (val instanceof EnumFieldValue) { +// writeEnumFieldValue((EnumFieldValue) val); +// return true; +// } +// if (val instanceof Map.Entry) { +// writeMapEntry((Map.Entry)val); +// return true; +// } +// if (val instanceof MapSerializable) { +// //todo find a better way to reuse the map more efficiently +// writeMap(((MapSerializable) val).toMap(new NamedList().asShallowMap())); +// return true; +// } +// if (val instanceof AtomicInteger) { +// writeInt(((AtomicInteger) val).get()); +// return true; +// } +// if (val instanceof AtomicLong) { +// writeLong(((AtomicLong) val).get()); +// return true; +// } +// if (val instanceof AtomicBoolean) { +// writeBoolean(((AtomicBoolean) val).get()); +// return true; +// } +// return false; +// } +// +// public class BinEntryWriter implements MapWriter.EntryWriter { +// @Override +// public MapWriter.EntryWriter put(CharSequence k, Object v) throws IOException { +// writeExternString(k); +// JavaBinCodecOld.this.writeVal(v); +// return this; +// } +// +// @Override +// public MapWriter.EntryWriter put(CharSequence k, int v) throws IOException { +// writeExternString(k); +// JavaBinCodecOld.this.writeInt(v); +// return this; +// } +// +// @Override +// public MapWriter.EntryWriter put(CharSequence k, long v) throws IOException { +// writeExternString(k); +// JavaBinCodecOld.this.writeLong(v); +// return this; +// } +// +// @Override +// public MapWriter.EntryWriter put(CharSequence k, float v) throws IOException { +// writeExternString(k); +// JavaBinCodecOld.this.writeFloat(v); +// return this; +// } +// +// @Override +// public MapWriter.EntryWriter put(CharSequence k, double v) throws IOException { +// writeExternString(k); +// JavaBinCodecOld.this.writeDouble(v); +// return this; +// } +// +// @Override +// public MapWriter.EntryWriter put(CharSequence k, boolean v) throws IOException { +// writeExternString(k); +// writeBoolean(v); +// return this; +// } +// +// @Override +// public MapWriter.EntryWriter put(CharSequence k, CharSequence v) throws IOException { +// writeExternString(k); +// writeStr(v); +// return this; +// } +// +// private BiConsumer biConsumer; +// +// @Override +// public BiConsumer getBiConsumer() { +// if (biConsumer == null) biConsumer = MapWriter.EntryWriter.super.getBiConsumer(); +// return biConsumer; +// } +// } +// +// public final BinEntryWriter ew = new BinEntryWriter(); +// +// +// public void writeMap(MapWriter val) throws IOException { +// writeTag(MAP_ENTRY_ITER); +// val.writeMap(ew); +// writeTag(END); +// } +// +// +// public void writeTag(byte tag) throws IOException { +// daos.writeByte(tag); +// } +// +// public void writeTag(byte tag, int size) throws IOException { +// if ((tag & 0xe0) != 0) { +// if (size < 0x1f) { +// daos.writeByte(tag | size); +// } else { +// daos.writeByte(tag | 0x1f); +// writeVInt(size - 0x1f, daos); +// } +// } else { +// daos.writeByte(tag); +// writeVInt(size, daos); +// } +// } +// +// public void writeByteArray(byte[] arr, int offset, int len) throws IOException { +// writeTag(BYTEARR, len); +// daos.write(arr, offset, len); +// } +// +// public byte[] readByteArray(DataInputInputStream dis) throws IOException { +// byte[] arr = new byte[readVInt(dis)]; +// dis.readFully(arr); +// return arr; +// } +// //use this to ignore the writable interface because , child docs will ignore the fl flag +// // is it a good design? +// private boolean ignoreWritable =false; +// private MapWriter.EntryWriter cew; +// +// public void writeSolrDocument(SolrDocument doc) throws IOException { +// List children = doc.getChildDocuments(); +// int fieldsCount = 0; +// if(writableDocFields == null || writableDocFields.wantsAllFields() || ignoreWritable){ +// fieldsCount = doc.size(); +// } else { +// for (Entry e : doc) { +// if(toWrite(e.getKey())) fieldsCount++; +// } +// } +// int sz = fieldsCount + (children==null ? 0 : children.size()); +// writeTag(SOLRDOC); +// writeTag(ORDERED_MAP, sz); +// if (cew == null) cew = new ConditionalKeyMapWriter.EntryWriterWrapper(ew, (k) -> toWrite(k.toString())); +// doc.writeMap(cew); +// if (children != null) { +// try { +// ignoreWritable = true; +// for (SolrDocument child : children) { +// writeSolrDocument(child); +// } +// } finally { +// ignoreWritable = false; +// } +// } +// +// } +// +// protected boolean toWrite(String key) { +// return writableDocFields == null || ignoreWritable || writableDocFields.isWritable(key); +// } +// +// public SolrDocument readSolrDocument(DataInputInputStream dis) throws IOException { +// tagByte = dis.readByte(); +// int size = readSize(dis); +// SolrDocument doc = new SolrDocument(new LinkedHashMap<>(size)); +// for (int i = 0; i < size; i++) { +// String fieldName; +// Object obj = readVal(dis); // could be a field name, or a child document +// if (obj instanceof SolrDocument) { +// doc.addChildDocument((SolrDocument)obj); +// continue; +// } else { +// fieldName = (String)obj; +// } +// Object fieldVal = readVal(dis); +// doc.setField(fieldName, fieldVal); +// } +// return doc; +// } +// +// public SolrDocumentList readSolrDocumentList(DataInputInputStream dis) throws IOException { +// SolrDocumentList solrDocs = new SolrDocumentList(); +// @SuppressWarnings("unchecked") +// List list = (List) readVal(dis); +// solrDocs.setNumFound((Long) list.get(0)); +// solrDocs.setStart((Long) list.get(1)); +// solrDocs.setMaxScore((Float) list.get(2)); +// if (list.size() > 3) { //needed for back compatibility +// solrDocs.setNumFoundExact((Boolean)list.get(3)); +// } +// +// @SuppressWarnings("unchecked") +// List l = (List) readVal(dis); +// solrDocs.addAll(l); +// return solrDocs; +// } +// +// public void writeSolrDocumentList(SolrDocumentList docs) +// throws IOException { +// writeTag(SOLRDOCLST); +// List l = new ArrayList<>(4); +// l.add(docs.getNumFound()); +// l.add(docs.getStart()); +// l.add(docs.getMaxScore()); +// l.add(docs.getNumFoundExact()); +// writeArray(l); +// writeArray(docs); +// } +// +// public SolrInputDocument readSolrInputDocument(DataInputInputStream dis) throws IOException { +// int sz = readVInt(dis); +// float docBoost = (Float)readVal(dis); +// if (docBoost != 1f) { +// String message = "Ignoring document boost: " + docBoost + " as index-time boosts are not supported anymore"; +// if (WARNED_ABOUT_INDEX_TIME_BOOSTS.compareAndSet(false, true)) { +// log.warn(message); +// } else { +// log.debug(message); +// } +// } +// SolrInputDocument sdoc = createSolrInputDocument(sz); +// for (int i = 0; i < sz; i++) { +// String fieldName; +// Object obj = readVal(dis); // could be a boost, a field name, or a child document +// if (obj instanceof Float) { +// float boost = (Float)obj; +// if (boost != 1f) { +// String message = "Ignoring field boost: " + boost + " as index-time boosts are not supported anymore"; +// if (WARNED_ABOUT_INDEX_TIME_BOOSTS.compareAndSet(false, true)) { +// log.warn(message); +// } else { +// log.debug(message); +// } +// } +// fieldName = (String)readVal(dis); +// } else if (obj instanceof SolrInputDocument) { +// sdoc.addChildDocument((SolrInputDocument)obj); +// continue; +// } else { +// fieldName = (String)obj; +// } +// Object fieldVal = readVal(dis); +// sdoc.setField(fieldName, fieldVal); +// } +// return sdoc; +// } +// +// protected SolrInputDocument createSolrInputDocument(int sz) { +// return new SolrInputDocument(new LinkedHashMap<>(sz)); +// } +// static final Predicate IGNORECHILDDOCS = it -> !CommonParams.CHILDDOC.equals(it.toString()); +// +// public void writeSolrInputDocument(SolrInputDocument sdoc) throws IOException { +// List children = sdoc.getChildDocuments(); +// int sz = sdoc.size() + (children==null ? 0 : children.size()); +// writeTag(SOLRINPUTDOC, sz); +// writeFloat(1f); // document boost +// sdoc.writeMap(new ConditionalKeyMapWriter.EntryWriterWrapper(ew,IGNORECHILDDOCS)); +// if (children != null) { +// for (SolrInputDocument child : children) { +// writeSolrInputDocument(child); +// } +// } +// } +// +// +// public Map readMapIter(DataInputInputStream dis) throws IOException { +// Map m = newMap(-1); +// for (; ; ) { +// Object key = readVal(dis); +// if (key == END_OBJ) break; +// Object val = readVal(dis); +// m.put(key, val); +// } +// return m; +// } +// +// /** +// * create a new Map object +// * @param size expected size, -1 means unknown size +// */ +// protected Map newMap(int size) { +// return size < 0 ? new LinkedHashMap<>() : new LinkedHashMap<>(size); +// } +// +// public Map readMap(DataInputInputStream dis) +// throws IOException { +// int sz = readVInt(dis); +// return readMap(dis, sz); +// } +// +// protected Map readMap(DataInputInputStream dis, int sz) throws IOException { +// Map m = newMap(sz); +// for (int i = 0; i < sz; i++) { +// Object key = readVal(dis); +// Object val = readVal(dis); +// m.put(key, val); +// +// } +// return m; +// } +// +// public final ItemWriter itemWriter = new ItemWriter() { +// @Override +// public ItemWriter add(Object o) throws IOException { +// writeVal(o); +// return this; +// } +// +// @Override +// public ItemWriter add(int v) throws IOException { +// writeInt(v); +// return this; +// } +// +// @Override +// public ItemWriter add(long v) throws IOException { +// writeLong(v); +// return this; +// } +// +// @Override +// public ItemWriter add(float v) throws IOException { +// writeFloat(v); +// return this; +// } +// +// @Override +// public ItemWriter add(double v) throws IOException { +// writeDouble(v); +// return this; +// } +// +// @Override +// public ItemWriter add(boolean v) throws IOException { +// writeBoolean(v); +// return this; +// } +// }; +// +// @Override +// public void writeIterator(IteratorWriter val) throws IOException { +// writeTag(ITERATOR); +// val.writeIter(itemWriter); +// writeTag(END); +// } +// public void writeIterator(Iterator iter) throws IOException { +// writeTag(ITERATOR); +// while (iter.hasNext()) { +// writeVal(iter.next()); +// } +// writeTag(END); +// } +// +// public List readIterator(DataInputInputStream fis) throws IOException { +// ArrayList l = new ArrayList<>(); +// while (true) { +// Object o = readVal(fis); +// if (o == END_OBJ) break; +// l.add(o); +// } +// return l; +// } +// +// public void writeArray(List l) throws IOException { +// writeTag(ARR, l.size()); +// for (int i = 0; i < l.size(); i++) { +// writeVal(l.get(i)); +// } +// } +// +// public void writeArray(Collection coll) throws IOException { +// writeTag(ARR, coll.size()); +// for (Object o : coll) { +// writeVal(o); +// } +// +// } +// +// public void writeArray(Object[] arr) throws IOException { +// writeTag(ARR, arr.length); +// for (int i = 0; i < arr.length; i++) { +// Object o = arr[i]; +// writeVal(o); +// } +// } +// +// @SuppressWarnings({"unchecked"}) +// public List readArray(DataInputInputStream dis) throws IOException { +// int sz = readSize(dis); +// return readArray(dis, sz); +// } +// +// @SuppressWarnings({"rawtypes"}) +// protected List readArray(DataInputInputStream dis, int sz) throws IOException { +// ArrayList l = new ArrayList<>(sz); +// for (int i = 0; i < sz; i++) { +// l.add(readVal(dis)); +// } +// return l; +// } +// +// /** +// * write {@link EnumFieldValue} as tag+int value+string value +// * @param enumFieldValue to write +// */ +// public void writeEnumFieldValue(EnumFieldValue enumFieldValue) throws IOException { +// writeTag(ENUM_FIELD_VALUE); +// writeInt(enumFieldValue.toInt()); +// writeStr(enumFieldValue.toString()); +// } +// +// public void writeMapEntry(Map.Entry val) throws IOException { +// writeTag(MAP_ENTRY); +// writeVal(val.getKey()); +// writeVal(val.getValue()); +// } +// +// /** +// * read {@link EnumFieldValue} (int+string) from input stream +// * @param dis data input stream +// * @return {@link EnumFieldValue} +// */ +// public EnumFieldValue readEnumFieldValue(DataInputInputStream dis) throws IOException { +// Integer intValue = (Integer) readVal(dis); +// String stringValue = (String) convertCharSeq (readVal(dis)); +// return new EnumFieldValue(intValue, stringValue); +// } +// +// +// public Map.Entry readMapEntry(DataInputInputStream dis) throws IOException { +// final Object key = readVal(dis); +// final Object value = readVal(dis); +// return new Map.Entry() { +// +// @Override +// public Object getKey() { +// return key; +// } +// +// @Override +// public Object getValue() { +// return value; +// } +// +// @Override +// public String toString() { +// return "MapEntry[" + key + ":" + value + "]"; +// } +// +// @Override +// public Object setValue(Object value) { +// throw new UnsupportedOperationException(); +// } +// +// @Override +// public int hashCode() { +// int result = 31; +// result *=31 + getKey().hashCode(); +// result *=31 + getValue().hashCode(); +// return result; +// } +// +// @Override +// public boolean equals(Object obj) { +// if(this == obj) { +// return true; +// } +// if (obj instanceof Map.Entry) { +// Entry entry = (Entry) obj; +// return (this.getKey().equals(entry.getKey()) && this.getValue().equals(entry.getValue())); +// } +// return false; +// } +// }; +// } +// +// /** +// * write the string as tag+length, with length being the number of UTF-8 bytes +// */ +// public void writeStr(CharSequence s) throws IOException { +// if (s == null) { +// writeTag(NULL); +// return; +// } +// if (s instanceof Utf8CharSequence) { +// writeUTF8Str((Utf8CharSequence) s); +// return; +// } +// int end = s.length(); +// int maxSize = end * ByteUtils.MAX_UTF8_BYTES_PER_CHAR; +// +// if (maxSize <= MAX_UTF8_SIZE_FOR_ARRAY_GROW_STRATEGY) { +// if (bytes == null || bytes.length < maxSize) bytes = new byte[maxSize]; +// int sz = ByteUtils.UTF16toUTF8(s, 0, end, bytes, 0); +// writeTag(STR, sz); +// daos.write(bytes, 0, sz); +// } else { +// // double pass logic for large strings, see SOLR-7971 +// int sz = ByteUtils.calcUTF16toUTF8Length(s, 0, end); +// writeTag(STR, sz); +// if (bytes == null || bytes.length < 8192) bytes = new byte[8192]; +// ByteUtils.writeUTF16toUTF8(s, 0, end, daos, bytes); +// } +// } +// +// byte[] bytes; +// CharArr arr = new CharArr(); +// private StringBytes bytesRef = new StringBytes(bytes,0,0); +// +// public CharSequence readStr(DataInputInputStream dis, StringCache stringCache, boolean readStringAsCharSeq) throws IOException { +// if (readStringAsCharSeq) { +// return readUtf8(dis); +// } +// int sz = readSize(dis); +// return _readStr(dis, stringCache, sz); +// } +// +// private CharSequence _readStr(DataInputInputStream dis, StringCache stringCache, int sz) throws IOException { +// if (bytes == null || bytes.length < sz) bytes = new byte[sz]; +// dis.readFully(bytes, 0, sz); +// if (stringCache != null) { +// return stringCache.get(bytesRef.reset(bytes, 0, sz)); +// } else { +// arr.reset(); +// ByteUtils.UTF8toUTF16(bytes, 0, sz, arr); +// return arr.toString(); +// } +// } +// +// /////////// code to optimize reading UTF8 +// static final int MAX_UTF8_SZ = 1024 * 64;//too big strings can cause too much memory allocation +// private Function stringProvider; +// private BytesBlock bytesBlock; +// +// protected CharSequence readUtf8(DataInputInputStream dis) throws IOException { +// int sz = readSize(dis); +// return readUtf8(dis, sz); +// } +// +// protected CharSequence readUtf8(DataInputInputStream dis, int sz) throws IOException { +// ByteArrayUtf8CharSequence result = new ByteArrayUtf8CharSequence(null,0,0); +// if(dis.readDirectUtf8(result, sz)){ +// result.stringProvider= getStringProvider(); +// return result; +// } +// +// if (sz > MAX_UTF8_SZ) return _readStr(dis, null, sz); +// if (bytesBlock == null) bytesBlock = new BytesBlock(1024 * 4); +// BytesBlock block = this.bytesBlock.expand(sz); +// dis.readFully(block.getBuf(), block.getStartPos(), sz); +// +// result.reset(block.getBuf(), block.getStartPos(), sz,null); +// result.stringProvider = getStringProvider(); +// return result; +// } +// +// private Function getStringProvider() { +// if (stringProvider == null) { +// stringProvider = new Function<>() { +// final CharArr charArr = new CharArr(8); +// @Override +// public String apply(ByteArrayUtf8CharSequence butf8cs) { +// synchronized (charArr) { +// charArr.reset(); +// ByteUtils.UTF8toUTF16(butf8cs.buf, butf8cs.offset(), butf8cs.size(), charArr); +// return charArr.toString(); +// } +// } +// }; +// } +// return this.stringProvider; +// } +// +// public void writeInt(int val) throws IOException { +// if (val > 0) { +// int b = SINT | (val & 0x0f); +// +// if (val >= 0x0f) { +// b |= 0x10; +// daos.writeByte(b); +// writeVInt(val >>> 4, daos); +// } else { +// daos.writeByte(b); +// } +// +// } else { +// daos.writeByte(INT); +// daos.writeInt(val); +// } +// } +// +// public int readSmallInt(DataInputInputStream dis) throws IOException { +// int v = tagByte & 0x0F; +// if ((tagByte & 0x10) != 0) +// v = (readVInt(dis) << 4) | v; +// return v; +// } +// +// +// public void writeLong(long val) throws IOException { +// if ((val & 0xff00000000000000L) == 0) { +// int b = SLONG | ((int) val & 0x0f); +// if (val >= 0x0f) { +// b |= 0x10; +// daos.writeByte(b); +// writeVLong(val >>> 4, daos); +// } else { +// daos.writeByte(b); +// } +// } else { +// daos.writeByte(LONG); +// daos.writeLong(val); +// } +// } +// +// public long readSmallLong(DataInputInputStream dis) throws IOException { +// long v = tagByte & 0x0F; +// if ((tagByte & 0x10) != 0) +// v = (readVLong(dis) << 4) | v; +// return v; +// } +// +// public void writeFloat(float val) throws IOException { +// daos.writeByte(FLOAT); +// daos.writeFloat(val); +// } +// +// public boolean writePrimitive(Object val) throws IOException { +// if (val == null) { +// daos.writeByte(NULL); +// return true; +// } else if (val instanceof Utf8CharSequence) { +// writeUTF8Str((Utf8CharSequence) val); +// return true; +// } else if (val instanceof CharSequence) { +// writeStr((CharSequence) val); +// return true; +// } else if (val instanceof Number) { +// +// if (val instanceof Integer) { +// writeInt(((Integer) val).intValue()); +// return true; +// } else if (val instanceof Long) { +// writeLong(((Long) val).longValue()); +// return true; +// } else if (val instanceof Float) { +// writeFloat(((Float) val).floatValue()); +// return true; +// } else if (val instanceof Double) { +// writeDouble(((Double) val).doubleValue()); +// return true; +// } else if (val instanceof Byte) { +// daos.writeByte(BYTE); +// daos.writeByte(((Byte) val).intValue()); +// return true; +// } else if (val instanceof Short) { +// daos.writeByte(SHORT); +// daos.writeShort(((Short) val).intValue()); +// return true; +// } +// return false; +// +// } else if (val instanceof Date) { +// daos.writeByte(DATE); +// daos.writeLong(((Date) val).getTime()); +// return true; +// } else if (val instanceof Boolean) { +// writeBoolean((Boolean) val); +// return true; +// } else if (val instanceof byte[]) { +// writeByteArray((byte[]) val, 0, ((byte[]) val).length); +// return true; +// } else if (val instanceof ByteBuffer) { +// ByteBuffer buf = (ByteBuffer) val; +// writeByteArray(buf.array(),buf.position(),buf.limit() - buf.position()); +// return true; +// } else if (val == END_OBJ) { +// writeTag(END); +// return true; +// } +// return false; +// } +// +// protected void writeBoolean(boolean val) throws IOException { +// if (val) daos.writeByte(BOOL_TRUE); +// else daos.writeByte(BOOL_FALSE); +// } +// +// protected void writeDouble(double val) throws IOException { +// daos.writeByte(DOUBLE); +// daos.writeDouble(val); +// } +// +// +// public void writeMap(Map val) throws IOException { +// writeTag(MAP, val.size()); +// if (val instanceof MapWriter) { +// ((MapWriter) val).writeMap(ew); +// return; +// } +// for (Map.Entry entry : val.entrySet()) { +// Object key = entry.getKey(); +// if (key instanceof String) { +// writeExternString((String) key); +// } else { +// writeVal(key); +// } +// writeVal(entry.getValue()); +// } +// } +// +// +// public int readSize(DataInputInputStream in) throws IOException { +// int sz = tagByte & 0x1f; +// if (sz == 0x1f) sz += readVInt(in); +// return sz; +// } +// +// +// /** +// * Special method for variable length int (copied from lucene). Usually used for writing the length of a +// * collection/array/map In most of the cases the length can be represented in one byte (length < 127) so it saves 3 +// * bytes/object +// * +// * @throws IOException If there is a low-level I/O error. +// */ +// public static void writeVInt(int i, FastOutputStream out) throws IOException { +// while ((i & ~0x7F) != 0) { +// out.writeByte((byte) ((i & 0x7f) | 0x80)); +// i >>>= 7; +// } +// out.writeByte((byte) i); +// } +// +// /** +// * The counterpart for {@link #writeVInt(int, FastOutputStream)} +// * +// * @throws IOException If there is a low-level I/O error. +// */ +// public static int readVInt(DataInputInputStream in) throws IOException { +// byte b = in.readByte(); +// int i = b & 0x7F; +// for (int shift = 7; (b & 0x80) != 0; shift += 7) { +// b = in.readByte(); +// i |= (b & 0x7F) << shift; +// } +// return i; +// } +// +// +// public static void writeVLong(long i, FastOutputStream out) throws IOException { +// while ((i & ~0x7F) != 0) { +// out.writeByte((byte) ((i & 0x7f) | 0x80)); +// i >>>= 7; +// } +// out.writeByte((byte) i); +// } +// +// public static long readVLong(DataInputInputStream in) throws IOException { +// byte b = in.readByte(); +// long i = b & 0x7F; +// for (int shift = 7; (b & 0x80) != 0; shift += 7) { +// b = in.readByte(); +// i |= (long) (b & 0x7F) << shift; +// } +// return i; +// } +// +// private int stringsCount = 0; +// private Map stringsMap; +// private List stringsList; +// +// public void writeExternString(CharSequence s) throws IOException { +// if (s == null) { +// writeTag(NULL); +// return; +// } +// Integer idx = stringsMap == null ? null : stringsMap.get(s); +// if (idx == null) idx = 0; +// writeTag(EXTERN_STRING, idx); +// if (idx == 0) { +// writeStr(s); +// if (stringsMap == null) stringsMap = new HashMap<>(); +// stringsMap.put(s.toString(), ++stringsCount); +// } +// +// } +// +// public CharSequence readExternString(DataInputInputStream fis) throws IOException { +// int idx = readSize(fis); +// if (idx != 0) {// idx != 0 is the index of the extern string +// return stringsList.get(idx - 1); +// } else {// idx == 0 means it has a string value +// tagByte = fis.readByte(); +// CharSequence s = readStr(fis, stringCache, false); +// if (s != null) s = s.toString(); +// if (stringsList == null) stringsList = new ArrayList<>(); +// stringsList.add(s); +// return s; +// } +// } +// +// +// public void writeUTF8Str(Utf8CharSequence utf8) throws IOException { +// writeTag(STR, utf8.size()); +// daos.writeUtf8CharSeq(utf8); +// } +// +// public long getTotalBytesWritten() { +// if (daos != null) { +// return daos.written; +// } +// return 0; +// } +// +// public interface WritableDocFields { +// boolean isWritable(String name); +// boolean wantsAllFields(); +// } +// +// +// public static class StringCache { +// private final Cache cache; +// +// public StringCache(Cache cache) { +// this.cache = cache; +// } +// +// public String get(StringBytes b) { +// String result = cache.get(b); +// if (result == null) { +// //make a copy because the buffer received may be changed later by the caller +// StringBytes copy = new StringBytes(Arrays.copyOfRange(b.bytes, b.offset, b.offset + b.length), 0, b.length); +// CharArr arr = new CharArr(); +// ByteUtils.UTF8toUTF16(b.bytes, b.offset, b.length, arr); +// result = arr.toString(); +// cache.put(copy, result); +// } +// return result; +// } +// } +// +// @Override +// public void close() throws IOException { +// if (daos != null) { +// daos.flushBuffer(); +// } +// } +//} diff --git a/solr/solrj/src/java/org/apache/solr/common/util/JavaBinInputStream.java b/solr/solrj/src/java/org/apache/solr/common/util/JavaBinInputStream.java new file mode 100644 index 00000000000..ccc1f38bc8c --- /dev/null +++ b/solr/solrj/src/java/org/apache/solr/common/util/JavaBinInputStream.java @@ -0,0 +1,311 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.common.util; + +import com.google.errorprone.annotations.DoNotCall; +import java.io.EOFException; +import java.io.IOException; +import java.io.InputStream; + +/** Single threaded buffered InputStream + * Internal Solr use only, subject to change. + */ +public final class JavaBinInputStream { + protected final InputStream in; + protected final byte[] buf; + protected int pos; + protected int end; + protected long readFromStream; // number of bytes read from the underlying inputstream + + public JavaBinInputStream(InputStream in) { + // match jetty input buffer + this(in, new byte[16384], 0, 0); + } + + public JavaBinInputStream(InputStream in, byte[] tempBuffer, int start, int end) { + this.in = in; + this.buf = tempBuffer; + this.pos = start; + this.end = end; + } + + boolean readDirectUtf8(ByteArrayUtf8CharSequence utf8, int len) { + if (in != null || end < pos + len) return false; + utf8.reset(buf, pos, len, null); + pos = pos + len; + return true; + } + + public static JavaBinInputStream wrap(InputStream in) { + return new JavaBinInputStream(in); + } + + public int read() throws IOException { + if (pos >= end) { + // this will set end to -1 at EOF + int result; + if(in == null) { + result = -1; + } else {result = in.read(buf, 0, buf.length);} + end = result; + if (end > 0) readFromStream += end; + pos = 0; + if (pos >= end) return -1; + } + return buf[pos++]; //& 0xff; + } + + public int peek() throws IOException { + if (pos >= end) { + // this will set end to -1 at EOF + int result; + if(in == null) { + result = -1; + } else {result = in.read(buf, 0, buf.length);} + end = result; + if (end > 0) readFromStream += end; + pos = 0; + if (pos >= end) return -1; + } + return buf[pos] & 0xff; + } + + public int readUnsignedByte() throws IOException { + if (pos >= end) { + // this will set end to -1 at EOF + int result; + if(in == null) { + result = -1; + } else {result = in.read(buf, 0, buf.length);} + end = result; + if (end > 0) readFromStream += end; + pos = 0; + if (pos >= end) { + throw new EOFException(); + } + } + return buf[pos++] & 0xff; + } + + public int readWrappedStream(byte[] target, int offset, int len) throws IOException { + if(in == null) return -1; + return in.read(target, offset, len); + } + + public long position() { + return readFromStream - (end - pos); + } + + public void refill() throws IOException { + // this will set end to -1 at EOF + int result; + if(in == null) { + result = -1; + } else {result = in.read(buf, 0, buf.length);} + end = result; + if (end > 0) readFromStream += end; + pos = 0; + } + + public int available() throws IOException { + return end - pos; + } + + /** Returns the internal buffer used for caching */ + public byte[] getBuffer() { + return buf; + } + + /** Current position within the internal buffer */ + public int getPositionInBuffer() { + return pos; + } + + /** Current end-of-data position within the internal buffer. This is one past the last valid byte. */ + public int getEndInBuffer() { + return end; + } + + public int read(byte b[], int off, int len) throws IOException { + int r=0; // number of bytes we have read + + // first read from our buffer; + if (end-pos > 0) { + r = Math.min(end-pos, len); + System.arraycopy(buf, pos, b, off, r); + pos += r; + } + + if (r == len) return r; + + // amount left to read is >= buffer size + if (len-r >= buf.length) { + int ret = readWrappedStream(b, off+r, len-r); + if (ret >= 0) { + readFromStream += ret; + r += ret; + return r; + } else { + // negative return code + return r > 0 ? r : -1; + } + } + + return endRead(b, off, len, r); + } + + private int endRead(byte[] b, int off, int len, int r) throws IOException { + // this will set end to -1 at EOF + end = readWrappedStream(buf, 0, buf.length); + if (end > 0) readFromStream += end; + pos = 0; + + // read rest from our buffer + if (end-pos > 0) { + int toRead = Math.min(end-pos, len - r); + System.arraycopy(buf, pos, b, off + r, toRead); + pos += toRead; + r += toRead; + return r; + } + + return r > 0 ? r : -1; + } + + public void close() throws IOException { + in.close(); + } + + public void readFully(byte b[]) throws IOException { + int len = b.length; + int off = 0; + while (len>0) { + int ret = read(b, 0, len); + if (ret==-1) { + throw new EOFException(); + } + off += ret; + len -= ret; + } + } + + public void readFully(byte b[], int off, int len) throws IOException { + while (len>0) { + int ret = read(b, off, len); + if (ret==-1) { + throw new EOFException(); + } + off += ret; + len -= ret; + } + } + + public int skipBytes(int n) throws IOException { + if (end-pos >= n) { + pos += n; + return n; + } + + if (end-pos<0) return -1; + + int r = end-pos; + pos = end; + + while (r < n) { + // this will set end to -1 at EOF + int result; + if(in == null) { + result = -1; + } else {result = in.read(buf, 0, buf.length);} + end = result; + if (end > 0) readFromStream += end; + pos = 0; + if (end-pos <= 0) return r; + int toRead = Math.min(end-pos, n-r); + r += toRead; + pos += toRead; + } + + return r; + } + + public boolean readBoolean() throws IOException { + return readByte()==1; + } + + public byte readByte() throws IOException { + if (pos >= end) { + // this will set end to -1 at EOF + int result; + if(in == null) { + result = -1; + } else {result = in.read(buf, 0, buf.length);} + end = result; + if (end > 0) readFromStream += end; + pos = 0; + if (pos >= end) throw new EOFException(); + } + return buf[pos++]; + } + + public short readShort() throws IOException { + return (short)((readUnsignedByte() << 8) | readUnsignedByte()); + } + + public int readUnsignedShort() throws IOException { + return (readUnsignedByte() << 8) | readUnsignedByte(); + } + + public char readChar() throws IOException { + return (char)((readUnsignedByte() << 8) | readUnsignedByte()); + } + + public int readInt() throws IOException { + return ((readUnsignedByte() << 24) + |(readUnsignedByte() << 16) + |(readUnsignedByte() << 8) + | readUnsignedByte()); + } + + public long readLong() throws IOException { + return (((long)readUnsignedByte()) << 56) + | (((long)readUnsignedByte()) << 48) + | (((long)readUnsignedByte()) << 40) + | (((long)readUnsignedByte()) << 32) + | (((long)readUnsignedByte()) << 24) + | (readUnsignedByte() << 16) + | (readUnsignedByte() << 8) + | (readUnsignedByte()); + } + + public float readFloat() throws IOException { + return Float.intBitsToFloat(readInt()); + } + + public double readDouble() throws IOException { + return Double.longBitsToDouble(readLong()); + } + + @DoNotCall + public String readLine() throws IOException { + throw new UnsupportedOperationException(); + } + + public InputStream getIn() { + return in; + } +} diff --git a/solr/solrj/src/java/org/apache/solr/common/util/JavaBinOutputStream.java b/solr/solrj/src/java/org/apache/solr/common/util/JavaBinOutputStream.java new file mode 100644 index 00000000000..39d88fc3746 --- /dev/null +++ b/solr/solrj/src/java/org/apache/solr/common/util/JavaBinOutputStream.java @@ -0,0 +1,359 @@ +/// * +// * Licensed to the Apache Software Foundation (ASF) under one or more +// * contributor license agreements. See the NOTICE file distributed with +// * this work for additional information regarding copyright ownership. +// * The ASF licenses this file to You under the Apache License, Version 2.0 +// * (the "License"); you may not use this file except in compliance with +// * the License. You may obtain a copy of the License at +// * +// * http://www.apache.org/licenses/LICENSE-2.0 +// * +// * Unless required by applicable law or agreed to in writing, software +// * distributed under the License is distributed on an "AS IS" BASIS, +// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// * See the License for the specific language governing permissions and +// * limitations under the License. +// */ +// package org.apache.solr.common.util; +// +// import com.google.errorprone.annotations.DoNotCall; +// +// import java.io.Closeable; +// import java.io.IOException; +// import java.io.OutputStream; +// +/// ** Single threaded buffered OutputStream +// * Internal Solr use only, subject to change. +// */ +// public final class JavaBinOutputStream { +// private final OutputStream out; +// +// private final boolean isFastOutputStream; +// private final byte[] buf; +// // private long written; // how many bytes written to the underlying stream +// private int pos; +// +// public JavaBinOutputStream(OutputStream w) { +// // match jetty output buffer +// this(w, new byte[32768], 0); +// } +// +// public JavaBinOutputStream(OutputStream sink, byte[] tempBuffer, int start) { +// this.out = sink; +// this.buf = tempBuffer; +// this.pos = start; +// if (sink instanceof FastOutputStream) { +// isFastOutputStream = true; +// if (tempBuffer != null) { +// throw new IllegalArgumentException("FastInputStream cannot bass a buffer to +// JavaBinInputStream"); +// } +// } else { +// isFastOutputStream = false; +// } +// } +// +// public static JavaBinOutputStream wrap(FastOutputStream sink) { +// return new JavaBinOutputStream(sink, null, 0); +// } +// +// public static JavaBinOutputStream wrap(OutputStream sink) { +// return new JavaBinOutputStream(sink); +// } +// +// public void write(int b) throws IOException { +// if (buf == null) { +// out.write((byte) b); +// return; +// } +// +// try { +// buf[pos++] = (byte) b; +// } catch (ArrayIndexOutOfBoundsException e) { +// +// flush(buf, 0, buf.length); +// pos=0; +// +// buf[pos++] = (byte) b; +// +// } +// } +// +// public void write(byte[] b) throws IOException { +// if (buf == null) { +// out.write(b, 0, b.length); +// return; +// } +// write(b,0,b.length); +// } +// +// public void write(byte b) throws IOException { +// if (buf == null) { +// out.write(b); +// return; +// } +// +// +//// try { +//// buf[pos++] = b; +//// } catch (ArrayIndexOutOfBoundsException e) { +//// flush(buf, 0, buf.length); +//// pos=0; +//// +//// buf[pos++] = b; +//// } +// if (pos >= buf.length) { +// //written += pos; +// flush(buf, 0, buf.length); +// pos=0; +// } +// buf[pos++] = b; +// } +// +// public void write(byte[] arr, int off, int len) throws IOException { +// if (buf == null) { +// out.write(arr, off, len); +// return; +// } +// +// for(;;) { +// int space = buf.length - pos; +// +// if (len <= space) { +// System.arraycopy(arr, off, buf, pos, len); +// pos += len; +// return; +// } else if (len > buf.length) { +// if (pos>0) { +// flush(buf,0,pos); // flush +// pos=0; +// } +// // don't buffer, just write to sink +// flush(arr, off, len); +// return; +// } +// +// // buffer is too big to fit in the free space, but +// // not big enough to warrant writing on its own. +// // write whatever we can fit, then flush and iterate. +// +// System.arraycopy(arr, off, buf, pos, space); +// flush(buf, 0, buf.length); +// pos = 0; +// off += space; +// len -= space; +// } +// } +// +// +// /** reserve at least len bytes at the end of the buffer. +// * Invalid if len > buffer.length +// */ +// public void reserve(int len) throws IOException { +// if (buf == null) { +// if (isFastOutputStream) { +// ((FastOutputStream)out).reserve(len); +// } +// return; +// } +// if (len > (buf.length - pos)) +// if (pos > 0) { +// flush(buf, 0, pos); +// pos=0; +// } +// } +// +// ////////////////// DataOutput methods /////////////////// +// public void writeBoolean(boolean v) throws IOException { +// if (buf == null) { +// if (v) { +// out.write((byte) 1); +// } else { +// out.write((byte) 0); +// } +// return; +// } +// +// write(v ? 1:0); +// } +// +// public void writeByte(int v) throws IOException { +// if (buf == null) { +// out.write((byte) v); +// return; +// } +// +// try { +// buf[pos++] = (byte) v; +// } catch (ArrayIndexOutOfBoundsException e) { +// flush(buf, 0, buf.length); +// pos=0; +// +// buf[pos++] = (byte) v; +// } +// } +// +// public void writeShort(int v) throws IOException { +// write((byte)(v >>> 8)); +// write((byte)v); +// } +// +// public void writeChar(int v) throws IOException { +// writeShort(v); +// } +// +// public void writeInt(int v) throws IOException { +// if (buf == null) { +// out.write((byte) (v >>> 24)); +// out.write((byte) (v >>> 16)); +// out.write((byte) (v >>> 8)); +// out.write((byte) (v)); +// pos += 4; +// return; +// } +// +// if (4 > (buf.length - pos)) +// if (pos > 0) { +// flush(buf, 0, pos); +// pos=0; +// } +// buf[pos] = (byte)(v>>>24); +// buf[pos+1] = (byte)(v>>>16); +// buf[pos+2] = (byte)(v>>>8); +// buf[pos+3] = (byte)(v); +// pos+=4; +// } +// +// public void writeLong(long v) throws IOException { +// if (buf == null) { +// out.write((byte) (v >>> 56)); +// out.write((byte) (v >>> 48)); +// out.write((byte) (v >>> 40)); +// out.write((byte) (v >>> 32)); +// out.write((byte) (v >>> 24)); +// out.write((byte) (v >>> 16)); +// out.write((byte) (v >>> 8)); +// out.write((byte) (v)); +// pos += 8; +// return; +// } +// +// +// if (8 > (buf.length - pos)) +// if (pos > 0) { +// flush(buf, 0, pos); +// pos=0; +// } +// buf[pos] = (byte)(v>>>56); +// buf[pos+1] = (byte)(v>>>48); +// buf[pos+2] = (byte)(v>>>40); +// buf[pos+3] = (byte)(v>>>32); +// buf[pos+4] = (byte)(v>>>24); +// buf[pos+5] = (byte)(v>>>16); +// buf[pos+6] = (byte)(v>>>8); +// buf[pos+7] = (byte)(v); +// pos+=8; +// } +// +// public void writeFloat(float v) throws IOException { +// writeInt(Float.floatToRawIntBits(v)); +// } +// +// public void writeDouble(double v) throws IOException { +// writeLong(Double.doubleToRawLongBits(v)); +// } +// +// @DoNotCall +// public void writeBytes(String s) throws IOException { +// throw new UnsupportedOperationException(); +// } +// +// @DoNotCall +// public void writeChars(String s) throws IOException { +// throw new UnsupportedOperationException(); +// } +// +// @DoNotCall +// public void writeUTF(String s) { +// throw new UnsupportedOperationException(); +// } +// +// public void flush() throws IOException { +// if (buf == null) { +// out.flush(); +// return; +// } +// +// if (pos > 0) { +// flush(buf, 0, pos); +// pos=0; +// } +// if (out != null) out.flush(); +// } +// +// public void close() throws IOException { +// +// if (buf == null) { +// if (out != null) out.close(); +// return; +// } +// +// if (pos > 0) { +// flush(buf, 0, pos); +// pos=0; +// } +// if (out != null) out.close(); +// } +// +// /** Only flushes the buffer of the FastOutputStream, not that of the +// * underlying stream. +// */ +// public void flushBuffer() throws IOException { +// if (buf == null) { +// if (isFastOutputStream) { +// ((FastOutputStream) out).flushBuffer(); +// } +// return; +// } +// +// if (pos > 0) { +// flush(buf, 0, pos); +// pos=0; +// } +// } +// +// /** All writes to the sink will go through this method */ +// public void flush(byte[] buf, int offset, int len) throws IOException { +// out.write(buf, offset, len); +// } +// +// +// /**Copies a {@link Utf8CharSequence} without making extra copies +// */ +// public void writeUtf8CharSeq(Utf8CharSequence utf8) throws IOException { +// if (buf == null) { +// if (utf8 instanceof ByteArrayUtf8CharSequence) { +// out.write(((ByteArrayUtf8CharSequence) utf8).getBuf()); +// return; +// } +// utf8.write(out); +// return; +// } +// +// int start = 0; +// int totalWritten = 0; +// while (true) { +// final int size = utf8.size(); +// if (!(totalWritten < size)) break; +// if (pos >= buf.length) flushBuffer(); +// int sz = utf8.write(start, buf, pos); +// pos += sz; +// totalWritten += sz; +// start += sz; +// } +// } +// +// public OutputStream getOutPut() { +// return out; +// } +// } diff --git a/solr/solrj/src/java/org/apache/solr/common/util/NamedList.java b/solr/solrj/src/java/org/apache/solr/common/util/NamedList.java index 07a76112123..ab2b2617318 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/NamedList.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/NamedList.java @@ -33,7 +33,6 @@ import java.util.function.BiFunction; import java.util.function.Consumer; import java.util.function.Function; - import org.apache.solr.cluster.api.SimpleMap; import org.apache.solr.common.MapWriter; import org.apache.solr.common.SolrException; @@ -43,62 +42,54 @@ /** * A simple container class for modeling an ordered list of name/value pairs. * - *

- * Unlike Maps: - *

+ *

Unlike Maps: + * *

    - *
  • Names may be repeated
  • - *
  • Order of elements is maintained
  • - *
  • Elements may be accessed by numeric index
  • - *
  • Names and Values can both be null
  • + *
  • Names may be repeated + *
  • Order of elements is maintained + *
  • Elements may be accessed by numeric index + *
  • Names and Values can both be null *
* - *

- * A NamedList provides fast access by element number, but not by name. - *

- *

- * When a NamedList is serialized, order is considered more important than access - * by key, so ResponseWriters that output to a format such as JSON will normally - * choose a data structure that allows order to be easily preserved in various - * clients (i.e. not a straight map). - * If access by key is more important for serialization, see {@link SimpleOrderedMap}, - * or simply use a regular {@link Map} - *

+ *

A NamedList provides fast access by element number, but not by name. * + *

When a NamedList is serialized, order is considered more important than access by key, so + * ResponseWriters that output to a format such as JSON will normally choose a data structure that + * allows order to be easily preserved in various clients (i.e. not a straight map). If access by + * key is more important for serialization, see {@link SimpleOrderedMap}, or simply use a regular + * {@link Map} */ @SuppressWarnings({"unchecked", "rawtypes"}) -public class NamedList implements Cloneable, Serializable, Iterable> , MapWriter, SimpleMap { +public class NamedList + implements Cloneable, Serializable, Iterable>, MapWriter, SimpleMap { private static final long serialVersionUID = 1957981902839867821L; protected final List nvPairs; /** Creates an empty instance */ public NamedList() { - nvPairs = new ArrayList<>(); + nvPairs = new ArrayList<>(10); } - public NamedList(int sz) { - nvPairs = new ArrayList<>(sz<<1); + nvPairs = new ArrayList<>(sz << 1); } @Override public void writeMap(EntryWriter ew) throws IOException { - for (int i = 0; i < nvPairs.size(); i+=2) { + final int size = nvPairs.size(); + for (int i = 0; i < size; i += 2) { ew.put((CharSequence) nvPairs.get(i), nvPairs.get(i + 1)); } } /** - * Creates a NamedList instance containing the "name,value" pairs contained in the - * Entry[]. + * Creates a NamedList instance containing the "name,value" pairs contained in the Entry[]. * - *

- * Modifying the contents of the Entry[] after calling this constructor may change - * the NamedList (in future versions of Solr), but this is not guaranteed and should - * not be relied upon. To modify the NamedList, refer to {@link #add(String, Object)} - * or {@link #remove(String)}. - *

+ *

Modifying the contents of the Entry[] after calling this constructor may change the + * NamedList (in future versions of Solr), but this is not guaranteed and should not be relied + * upon. To modify the NamedList, refer to {@link #add(String, Object)} or {@link + * #remove(String)}. * * @param nameValuePairs the name value pairs */ @@ -107,24 +98,20 @@ public NamedList(Map.Entry[] nameValuePairs) { } /** - * Creates a NamedList instance containing the "name,value" pairs contained in the - * Map. + * Creates a NamedList instance containing the "name,value" pairs contained in the Map. * - *

- * Modifying the contents of the Map after calling this constructor may change - * the NamedList (in future versions of Solr), but this is not guaranteed and should - * not be relied upon. To modify the NamedList, refer to {@link #add(String, Object)} - * or {@link #remove(String)}. - *

+ *

Modifying the contents of the Map after calling this constructor may change the NamedList + * (in future versions of Solr), but this is not guaranteed and should not be relied upon. To + * modify the NamedList, refer to {@link #add(String, Object)} or {@link #remove(String)}. * * @param nameValueMap the name value pairs */ - public NamedList(Map nameValueMap) { + public NamedList(Map nameValueMap) { if (null == nameValueMap) { - nvPairs = new ArrayList<>(); + nvPairs = new ArrayList<>(10); } else { nvPairs = new ArrayList<>(nameValueMap.size() << 1); - for (Map.Entry ent : nameValueMap.entrySet()) { + for (Map.Entry ent : nameValueMap.entrySet()) { nvPairs.add(ent.getKey()); nvPairs.add(ent.getValue()); } @@ -132,36 +119,30 @@ public NamedList(Map nameValueMap) { } /** - * Creates an instance backed by an explicitly specified list of - * pairwise names/values. + * Creates an instance backed by an explicitly specified list of pairwise names/values. + * + *

When using this constructor, runtime type safety is only guaranteed if all even numbered + * elements of the input list are of type "T". + * + *

This method is package protected and exists solely so SimpleOrderedMap and clone() can + * utilize it + * + *

TODO: this method was formerly public, now that it's not we can change the impl details of + * this class to be based on a Map.Entry[] * - *

- * When using this constructor, runtime type safety is only guaranteed if - * all even numbered elements of the input list are of type "T". - *

- *

- * This method is package protected and exists solely so SimpleOrderedMap and clone() can utilize it - *

- *

- * TODO: this method was formerly public, now that it's not we can change the impl details of - * this class to be based on a Map.Entry[] - *

* @lucene.internal * @see #nameValueMapToList */ NamedList(List nameValuePairs) { - nvPairs=nameValuePairs; + nvPairs = nameValuePairs; } /** - * Method to serialize Map.Entry<String, ?> to a List in which the even - * indexed elements (0,2,4. ..etc) are Strings and odd elements (1,3,5,) are of - * the type "T". + * Method to serialize Map.Entry<String, ?> to a List in which the even indexed elements + * (0,2,4. ..etc) are Strings and odd elements (1,3,5,) are of the type "T". * - *

- * NOTE: This a temporary placeholder method until the guts of the class - * are actually replaced by List<String, ?>. - *

+ *

NOTE: This a temporary placeholder method until the guts of the class are actually replaced + * by List<String, ?>. * * @return Modified List as per the above description * @see SOLR-912 @@ -186,7 +167,7 @@ public int size() { * @return null if no name exists */ public String getName(int idx) { - return (String)nvPairs.get(idx << 1); + return (String) nvPairs.get(idx << 1); } /** @@ -196,22 +177,18 @@ public String getName(int idx) { */ @SuppressWarnings("unchecked") public T getVal(int idx) { - return (T)nvPairs.get((idx << 1) + 1); + return (T) nvPairs.get((idx << 1) + 1); } - /** - * Adds a name/value pair to the end of the list. - */ + /** Adds a name/value pair to the end of the list. */ public void add(String name, T val) { nvPairs.add(name); nvPairs.add(val); } - /** - * Modifies the name of the pair at the specified index. - */ + /** Modifies the name of the pair at the specified index. */ public void setName(int idx, String name) { - nvPairs.set(idx<<1, name); + nvPairs.set(idx << 1, name); } /** @@ -220,9 +197,9 @@ public void setName(int idx, String name) { * @return the value that used to be at index */ public T setVal(int idx, T val) { - int index = (idx<<1)+1; + int index = (idx << 1) + 1; @SuppressWarnings("unchecked") - T old = (T)nvPairs.get( index ); + T old = (T) nvPairs.get(index); nvPairs.set(index, val); return old; } @@ -233,16 +210,16 @@ public T setVal(int idx, T val) { * @return the value at the index removed */ public T remove(int idx) { - int index = (idx<<1); + int index = (idx << 1); nvPairs.remove(index); @SuppressWarnings("unchecked") - T result = (T)nvPairs.remove(index); // same index, as things shifted in previous remove + T result = (T) nvPairs.remove(index); // same index, as things shifted in previous remove return result; } /** - * Scans the list sequentially beginning at the specified index and - * returns the index of the first pair with the specified name. + * Scans the list sequentially beginning at the specified index and returns the index of the first + * pair with the specified name. * * @param name name to look for, may be null * @param start index to begin searching from @@ -250,10 +227,10 @@ public T remove(int idx) { */ public int indexOf(String name, int start) { int sz = size(); - for (int i=start; i - * NOTE: this runs in linear time (it scans starting at the - * beginning of the list until it finds the first pair with - * the specified name). + * Gets the value for the first instance of the specified name found. + * + *

NOTE: this runs in linear time (it scans starting at the beginning of the list until it + * finds the first pair with the specified name). * * @return null if not found or if the value stored was null. * @see #indexOf * @see #get(String,int) - * */ public T get(String name) { - return get(name,0); + return get(name, 0); } /** - * Gets the value for the first instance of the specified name - * found starting at the specified index. - *

- * NOTE: this runs in linear time (it scans starting at the - * specified position until it finds the first pair with - * the specified name). + * Gets the value for the first instance of the specified name found starting at the specified + * index. + * + *

NOTE: this runs in linear time (it scans starting at the specified position until it finds + * the first pair with the specified name). * * @return null if not found or if the value stored was null. * @see #indexOf */ public T get(String name, int start) { int sz = size(); - for (int i=start; i getAll(String name) { } return result; } - + /** * Removes all values matching the specified name * @@ -335,30 +308,27 @@ private void killAll(String name) { } } } - + /** - * Recursively parses the NamedList structure to arrive at a specific element. - * As you descend the NamedList tree, the last element can be any type, - * including NamedList, but the previous elements MUST be NamedList objects - * themselves. A null value is returned if the indicated hierarchy doesn't - * exist, but NamedList allows null values so that could be the actual value - * at the end of the path. - * - * This method is particularly useful for parsing the response from Solr's - * /admin/mbeans handler, but it also works for any complex structure. - * - * Explicitly casting the return value is recommended. An even safer option is - * to accept the return value as an object and then check its type. - * - * Usage examples: - * - * String coreName = (String) response.findRecursive - * ("solr-mbeans", "CORE", "core", "stats", "coreName"); - * long numDoc = (long) response.findRecursive - * ("solr-mbeans", "CORE", "searcher", "stats", "numDocs"); - * - * @param args - * One or more strings specifying the tree to navigate. + * Recursively parses the NamedList structure to arrive at a specific element. As you descend the + * NamedList tree, the last element can be any type, including NamedList, but the previous + * elements MUST be NamedList objects themselves. A null value is returned if the indicated + * hierarchy doesn't exist, but NamedList allows null values so that could be the actual value at + * the end of the path. + * + *

This method is particularly useful for parsing the response from Solr's /admin/mbeans + * handler, but it also works for any complex structure. + * + *

Explicitly casting the return value is recommended. An even safer option is to accept the + * return value as an object and then check its type. + * + *

Usage examples: + * + *

String coreName = (String) response.findRecursive ("solr-mbeans", "CORE", "core", "stats", + * "coreName"); long numDoc = (long) response.findRecursive ("solr-mbeans", "CORE", "searcher", + * "stats", "numDocs"); + * + * @param args One or more strings specifying the tree to navigate. * @return the last entry in the given path hierarchy, null if not found. */ public Object findRecursive(String... args) { @@ -370,12 +340,12 @@ public Object findRecursive(String... args) { * The first time through the loop, the current list is null, so we assign * it to this list. Then we retrieve the first key from this list and * assign it to value. - * + * * On the next loop, we check whether the retrieved value is a NamedList. * If it is, then we drop down to that NamedList, grab the value of the * next key, and start the loop over. If it is not a NamedList, then we * assign the value to null and break out of the loop. - * + * * Assigning the value to null and then breaking out of the loop seems * like the wrong thing to do, but there's a very simple reason that it * works: If we have reached the last key, then the loop ends naturally @@ -406,7 +376,7 @@ public String toString() { StringBuilder sb = new StringBuilder(); sb.append('{'); int sz = size(); - for (int i=0; i removeConfigArgs(final String name) - throws SolrException { + public Collection removeConfigArgs(final String name) throws SolrException { List objects = getAll(name); - List collection = new ArrayList<>(size() / 2); - final String err = "init arg '" + name + "' must be a string " - + "(ie: 'str'), or an array (ie: 'arr') containing strings; found: "; - + List collection = new ArrayList<>(size() >> 1); + final String err = + "init arg '" + + name + + "' must be a string " + + "(ie: 'str'), or an array (ie: 'arr') containing strings; found: "; + for (Object o : objects) { if (o instanceof String) { collection.add((String) o); continue; } - + // If it's an array, convert to List (which is a Collection). if (o instanceof Object[]) { o = Arrays.asList((Object[]) o); } - + // If it's a Collection, collect each value. if (o instanceof Collection) { for (Object item : (Collection) o) { @@ -836,14 +791,14 @@ public Collection removeConfigArgs(final String name) } throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, err + o.getClass()); } - - if (collection.size() > 0) { + + if (!collection.isEmpty()) { killAll(name); } - + return collection; } - + public void clear() { nvPairs.clear(); } @@ -860,12 +815,11 @@ public boolean equals(Object obj) { return this.nvPairs.equals(nl.nvPairs); } - @Override public void abortableForEach(BiFunction fun) { int sz = size(); for (int i = 0; i < sz; i++) { - if(!fun.apply(getName(i), getVal(i))) break; + if (!fun.apply(getName(i), getVal(i))) break; } } @@ -873,7 +827,7 @@ public void abortableForEach(BiFunction fun) { public void abortableForEachKey(Function fun) { int sz = size(); for (int i = 0; i < sz; i++) { - if(!fun.apply(getName(i))) break; + if (!fun.apply(getName(i))) break; } } @@ -884,10 +838,18 @@ public void forEachKey(Consumer fun) { fun.accept(getName(i)); } } + public void forEach(BiConsumer action) { - int sz = size(); - for (int i = 0; i < sz; i++) { - action.accept(getName(i), getVal(i)); + int sz = nvPairs.size(); + for (int i = 0; i < sz; i += 2) { + action.accept((String) nvPairs.get(i), (T) nvPairs.get(i + 1)); + } + } + + public void forEachIO(IOBiConsumer action) throws IOException { + int sz = nvPairs.size(); + for (int i = 0; i < sz; i += 2) { + action.accept((String) nvPairs.get(i), (T) nvPairs.get(i + 1)); } } @@ -900,4 +862,8 @@ public int _size() { public void forEachEntry(BiConsumer fun) { forEach(fun); } + + public abstract static class IOBiConsumer { + abstract void accept(String t, Object u) throws IOException; + } } diff --git a/solr/solrj/src/java/org/apache/solr/common/util/Utf8CharSequence.java b/solr/solrj/src/java/org/apache/solr/common/util/Utf8CharSequence.java index 1d4b861a3b0..42c3f5adf2e 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/Utf8CharSequence.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/Utf8CharSequence.java @@ -59,14 +59,15 @@ default int compareTo(Utf8CharSequence o) { * @param os The sink */ default void write(OutputStream os) throws IOException { - byte[] buf = new byte[1024]; + byte[] buf = new byte[8192]; int start = 0; int totalWritten = 0; - for (; ; ) { - if (totalWritten >= size()) break; + int size = size(); + while (totalWritten < size) { int sz = write(start, buf, 0); totalWritten += sz; - if (sz > 0) os.write(buf, 0, sz); + if (sz > 0) + os.write(buf, 0, sz); start += sz; } } diff --git a/solr/solrj/src/java/org/apache/solr/common/util/Utils.java b/solr/solrj/src/java/org/apache/solr/common/util/Utils.java index f15b67a5797..c1ba84bdd02 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/Utils.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/Utils.java @@ -16,6 +16,10 @@ */ package org.apache.solr.common.util; +import static java.nio.charset.StandardCharsets.UTF_8; +import static java.util.Collections.singletonList; +import static java.util.concurrent.TimeUnit.NANOSECONDS; + import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; @@ -56,7 +60,6 @@ import java.util.function.Function; import java.util.regex.Matcher; import java.util.regex.Pattern; - import org.apache.http.HttpEntity; import org.apache.http.HttpResponse; import org.apache.http.client.HttpClient; @@ -87,64 +90,65 @@ import org.slf4j.LoggerFactory; import org.slf4j.MDC; -import static java.nio.charset.StandardCharsets.UTF_8; -import static java.util.Collections.singletonList; -import static java.util.concurrent.TimeUnit.NANOSECONDS; - - public class Utils { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @SuppressWarnings({"rawtypes"}) - public static Map getDeepCopy(Map map, int maxDepth) { + public static Map getDeepCopy(Map map, int maxDepth) { return getDeepCopy(map, maxDepth, true, false); } @SuppressWarnings({"rawtypes"}) - public static Map getDeepCopy(Map map, int maxDepth, boolean mutable) { + public static Map getDeepCopy(Map map, int maxDepth, boolean mutable) { return getDeepCopy(map, maxDepth, mutable, false); } @SuppressWarnings({"unchecked", "rawtypes"}) - public static Map getDeepCopy(Map map, int maxDepth, boolean mutable, boolean sorted) { + public static Map getDeepCopy(Map map, int maxDepth, boolean mutable, boolean sorted) { if (map == null) return null; if (maxDepth < 1) return map; Map copy; if (sorted) { copy = new TreeMap<>(); } else { - copy = map instanceof LinkedHashMap ? new LinkedHashMap<>(map.size()) : new HashMap<>(map.size()); + copy = + map instanceof LinkedHashMap + ? new LinkedHashMap<>(map.size()) + : new HashMap<>(map.size()); } for (Object o : map.entrySet()) { - Map.Entry e = (Map.Entry) o; + Map.Entry e = (Map.Entry) o; copy.put(e.getKey(), makeDeepCopy(e.getValue(), maxDepth, mutable, sorted)); } return mutable ? copy : Collections.unmodifiableMap(copy); } - public static void forEachMapEntry(Object o, String path, @SuppressWarnings({"rawtypes"})BiConsumer fun) { + public static void forEachMapEntry( + Object o, String path, @SuppressWarnings({"rawtypes"}) BiConsumer fun) { Object val = Utils.getObjectByPath(o, false, path); forEachMapEntry(val, fun); } - public static void forEachMapEntry(Object o, List path, @SuppressWarnings({"rawtypes"})BiConsumer fun) { + public static void forEachMapEntry( + Object o, List path, @SuppressWarnings({"rawtypes"}) BiConsumer fun) { Object val = Utils.getObjectByPath(o, false, path); forEachMapEntry(val, fun); } @SuppressWarnings({"unchecked"}) - public static void forEachMapEntry(Object o, @SuppressWarnings({"rawtypes"})BiConsumer fun) { + public static void forEachMapEntry(Object o, @SuppressWarnings({"rawtypes"}) BiConsumer fun) { if (o instanceof MapWriter) { MapWriter m = (MapWriter) o; try { - m.writeMap(new MapWriter.EntryWriter() { - @Override - public MapWriter.EntryWriter put(CharSequence k, Object v) { - fun.accept(k, v); - return this; - } - }); + m.writeMap( + new MapWriter.EntryWriter() { + @Override + public MapWriter.EntryWriter put(CharSequence k, Object v) { + fun.accept(k, v); + return this; + } + }); } catch (IOException e) { throw new RuntimeException(e); } @@ -179,7 +183,7 @@ public static InputStream toJavabin(Object o) throws IOException { return new ByteBufferInputStream(ByteBuffer.wrap(baos.getbuf(), 0, baos.size())); } } - + public static Object fromJavabin(byte[] bytes) throws IOException { try (JavaBinCodec jbc = new JavaBinCodec()) { return jbc.unmarshal(bytes); @@ -190,11 +194,13 @@ public static Collection getDeepCopy(Collection c, int maxDepth, boolean m return getDeepCopy(c, maxDepth, mutable, false); } - public static Collection getDeepCopy(Collection c, int maxDepth, boolean mutable, boolean sorted) { + public static Collection getDeepCopy( + Collection c, int maxDepth, boolean mutable, boolean sorted) { if (c == null || maxDepth < 1) return c; - Collection result = c instanceof Set ? - (sorted ? new TreeSet<>() : new HashSet<>()) : new ArrayList<>(); - for (Object o : c) result.add(makeDeepCopy(o, maxDepth, mutable, sorted)); // TODO should this be maxDepth - 1? + Collection result = + c instanceof Set ? (sorted ? new TreeSet<>() : new HashSet<>()) : new ArrayList<>(); + for (Object o : c) + result.add(makeDeepCopy(o, maxDepth, mutable, sorted)); // TODO should this be maxDepth - 1? if (sorted && (result instanceof List)) { ((List) result).sort(null); } @@ -202,14 +208,12 @@ public static Collection getDeepCopy(Collection c, int maxDepth, boolean m } public static void writeJson(Object o, OutputStream os, boolean indent) throws IOException { - writeJson(o, new OutputStreamWriter(os, UTF_8), indent) - .flush(); + writeJson(o, new OutputStreamWriter(os, UTF_8), indent).flush(); } public static Writer writeJson(Object o, Writer writer, boolean indent) throws IOException { try (SolrJSONWriter jsonWriter = new SolrJSONWriter(writer)) { - jsonWriter.setIndent(indent) - .writeObj(o); + jsonWriter.setIndent(indent).writeObj(o); } return writer; } @@ -230,17 +234,18 @@ public void handleUnknownClass(Object o) { final boolean[] first = new boolean[1]; first[0] = true; int sz = mapWriter._size(); - mapWriter._forEachEntry((k, v) -> { - if (first[0]) { - first[0] = false; - } else { - writeValueSeparator(); - } - if (sz > 1) indent(); - writeString(k.toString()); - writeNameSeparator(); - write(v); - }); + mapWriter._forEachEntry( + (k, v) -> { + if (first[0]) { + first[0] = false; + } else { + writeValueSeparator(); + } + if (sz > 1) indent(); + writeString(k.toString()); + writeNameSeparator(); + write(v); + }); endObject(); } else if (o instanceof IteratorWriter) { IteratorWriter iteratorWriter = (IteratorWriter) o; @@ -248,19 +253,20 @@ public void handleUnknownClass(Object o) { final boolean[] first = new boolean[1]; first[0] = true; try { - iteratorWriter.writeIter(new IteratorWriter.ItemWriter() { - @Override - public IteratorWriter.ItemWriter add(Object o) throws IOException { - if (first[0]) { - first[0] = false; - } else { - writeValueSeparator(); - } - indent(); - write(o); - return this; - } - }); + iteratorWriter.writeIter( + new IteratorWriter.ItemWriter() { + @Override + public IteratorWriter.ItemWriter add(Object o) throws IOException { + if (first[0]) { + first[0] = false; + } else { + writeValueSeparator(); + } + indent(); + write(o); + return this; + } + }); } catch (IOException e) { throw new RuntimeException("this should never happen", e); } @@ -273,20 +279,39 @@ public IteratorWriter.ItemWriter add(Object o) throws IOException { public static byte[] toJSON(Object o) { if (o == null) return new byte[0]; - CharArr out = new CharArr(); -// if (!(o instanceof List) && !(o instanceof Map)) { -// if (o instanceof MapWriter) { -// o = ((MapWriter) o).toMap(new LinkedHashMap<>()); -// } else if (o instanceof IteratorWriter) { -// o = ((IteratorWriter) o).toList(new ArrayList<>()); -// } -// } + CharArr out = new CharArr(64); + // if (!(o instanceof List) && !(o instanceof Map)) { + // if (o instanceof MapWriter) { + // o = ((MapWriter) o).toMap(new LinkedHashMap<>()); + // } else if (o instanceof IteratorWriter) { + // o = ((IteratorWriter) o).toList(new ArrayList<>()); + // } + // } new MapWriterJSONWriter(out, 2).write(o); // indentation by default return toUTF8(out); } public static String toJSONString(Object o) { - return new String(toJSON(o), StandardCharsets.UTF_8); + byte[] arr; + int nBytes; + if (o == null) { + arr = new byte[0]; + nBytes = 0; + } else { + CharArr out = new CharArr(64); + // if (!(o instanceof List) && !(o instanceof Map)) { + // if (o instanceof MapWriter) { + // o = ((MapWriter) o).toMap(new LinkedHashMap<>()); + // } else if (o instanceof IteratorWriter) { + // o = ((IteratorWriter) o).toList(new ArrayList<>()); + // } + // } + new MapWriterJSONWriter(out, 2).write(o); // indentation by default + arr = new byte[out.size() * 3]; + nBytes = ByteUtils.UTF16toUTF8(out, 0, out.size(), arr, 0); + } + + return new String(arr, 0, nBytes, StandardCharsets.UTF_8); } public static byte[] toUTF8(CharArr out) { @@ -298,17 +323,18 @@ public static byte[] toUTF8(CharArr out) { public static Object fromJSON(byte[] utf8) { return fromJSON(utf8, 0, utf8.length); } - + public static Object fromJSON(byte[] utf8, int offset, int length) { // convert directly from bytes to chars // and parse directly from that instead of going through // intermediate strings or readers - CharArr chars = new CharArr(); + CharArr chars = new CharArr(64); ByteUtils.UTF8toUTF16(utf8, offset, length, chars); JSONParser parser = new JSONParser(chars.getArray(), chars.getStart(), chars.length()); - parser.setFlags(parser.getFlags() | - JSONParser.ALLOW_MISSING_COLON_COMMA_BEFORE_OBJECT | - JSONParser.OPTIONAL_OUTER_BRACES); + parser.setFlags( + parser.getFlags() + | JSONParser.ALLOW_MISSING_COLON_COMMA_BEFORE_OBJECT + | JSONParser.OPTIONAL_OUTER_BRACES); try { return STANDARDOBJBUILDER.apply(parser).getValStrict(); } catch (IOException e) { @@ -335,7 +361,8 @@ private static Map _makeMap(T[] keyVals) { if ((keyVals.length & 0x01) != 0) { throw new IllegalArgumentException("arguments should be key,value"); } - Map propMap = new LinkedHashMap<>(); // Cost of oversizing LHM is low, don't compute initialCapacity + Map propMap = + new LinkedHashMap<>(); // Cost of oversizing LHM is low, don't compute initialCapacity for (int i = 0; i < keyVals.length; i += 2) { propMap.put(String.valueOf(keyVals[i]), keyVals[i + 1]); } @@ -354,49 +381,58 @@ public static Object fromJSON(Reader is) { } } - public static final Function STANDARDOBJBUILDER = jsonParser -> { - try { - return new ObjectBuilder(jsonParser); - } catch (IOException e) { - throw new RuntimeException(e); - } - }; - public static final Function MAPWRITEROBJBUILDER = jsonParser -> { - try { - return new ObjectBuilder(jsonParser) { - @Override - public Object newObject() { - return new LinkedHashMapWriter<>(); + public static final Function STANDARDOBJBUILDER = + jsonParser -> { + try { + return new ObjectBuilder(jsonParser); + } catch (IOException e) { + throw new RuntimeException(e); + } + }; + public static final Function MAPWRITEROBJBUILDER = + jsonParser -> { + try { + return new ObjectBuilder(jsonParser) { + @Override + public Object newObject() { + return new LinkedHashMapWriter<>(); + } + }; + } catch (IOException e) { + throw new RuntimeException(e); } }; - } catch (IOException e) { - throw new RuntimeException(e); - } - }; - public static final Function MAPOBJBUILDER = jsonParser -> { - try { - return new ObjectBuilder(jsonParser) { - @Override - public Object newObject() { - return new HashMap<>(); + public static final Function MAPOBJBUILDER = + jsonParser -> { + try { + return new ObjectBuilder(jsonParser) { + @Override + public Object newObject() { + return new HashMap<>(); + } + }; + } catch (IOException e) { + throw new RuntimeException(e); } }; - } catch (IOException e) { - throw new RuntimeException(e); - } - }; /** - * Util function to convert {@link Object} to {@link String} - * Specially handles {@link Date} to string conversion + * Util function to convert {@link Object} to {@link String} Specially handles {@link Date} to + * string conversion */ public static final Function OBJECT_TO_STRING = - obj -> ((obj instanceof Date) ? Objects.toString(((Date) obj).toInstant()) : Objects.toString(obj)); + obj -> + ((obj instanceof Date) + ? Objects.toString(((Date) obj).toInstant()) + : Objects.toString(obj)); - public static Object fromJSON(InputStream is, Function objBuilderProvider) { + public static Object fromJSON( + InputStream is, Function objBuilderProvider) { try { - return objBuilderProvider.apply(getJSONParser((new InputStreamReader(is, StandardCharsets.UTF_8)))).getValStrict(); + return objBuilderProvider + .apply(getJSONParser((new InputStreamReader(is, StandardCharsets.UTF_8)))) + .getValStrict(); } catch (IOException e) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Parse error", e); } @@ -410,16 +446,17 @@ public static Object fromJSONResource(String resourceName) { try (InputStream stream = resource.openStream()) { return fromJSON(stream); } catch (IOException e) { - throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, - "Resource error: " + e.getMessage(), e); + throw new SolrException( + SolrException.ErrorCode.SERVER_ERROR, "Resource error: " + e.getMessage(), e); } } public static JSONParser getJSONParser(Reader reader) { JSONParser parser = new JSONParser(reader); - parser.setFlags(parser.getFlags() | - JSONParser.ALLOW_MISSING_COLON_COMMA_BEFORE_OBJECT | - JSONParser.OPTIONAL_OUTER_BRACES); + parser.setFlags( + parser.getFlags() + | JSONParser.ALLOW_MISSING_COLON_COMMA_BEFORE_OBJECT + | JSONParser.OPTIONAL_OUTER_BRACES); return parser; } @@ -447,7 +484,7 @@ public static boolean setObjectByPath(Object root, List hierarchy, Objec if (!isMapLike(root)) throw new RuntimeException("must be a Map or NamedList"); Object obj = root; for (int i = 0; i < hierarchy.size(); i++) { - int idx = -2; //-1 means append to list, -2 means not found + int idx = -2; // -1 means append to list, -2 means not found String s = hierarchy.get(i); if (s.endsWith("]")) { Matcher matcher = ARRAY_ELEMENT_INDEX.matcher(s); @@ -499,10 +536,8 @@ public static boolean setObjectByPath(Object root, List hierarchy, Objec } return false; - } - public static Object getObjectByPath(Object root, boolean onlyPrimitive, List hierarchy) { if (root == null) return null; if (!isMapLike(root)) return null; @@ -555,29 +590,29 @@ public static Object getObjectByPath(Object root, boolean onlyPrimitive, List idx) return this; - if (i == idx) result[0] = o; - return this; - } - }); + iteratorWriter.writeIter( + new IteratorWriter.ItemWriter() { + int i = -1; + + @Override + public IteratorWriter.ItemWriter add(Object o) { + ++i; + if (i > idx) return this; + if (i == idx) result[0] = o; + return this; + } + }); } catch (IOException e) { throw new RuntimeException(e); } return result[0]; - } - static class MapWriterEntry extends AbstractMap.SimpleEntry implements MapWriter, Map.Entry { + static class MapWriterEntry extends AbstractMap.SimpleEntry + implements MapWriter, Map.Entry { MapWriterEntry(CharSequence key, V value) { super(key, value); } @@ -587,7 +622,6 @@ public void writeMap(EntryWriter ew) throws IOException { ew.put("key", getKey()); ew.put("value", getValue()); } - } private static boolean isMapLike(Object o) { @@ -598,31 +632,32 @@ private static Object getVal(Object obj, String key, int idx) { if (obj instanceof MapWriter) { Object[] result = new Object[1]; try { - ((MapWriter) obj).writeMap(new MapWriter.EntryWriter() { - int count = -1; - - @Override - public MapWriter.EntryWriter put(CharSequence k, Object v) { - if (result[0] != null) return this; - if (idx < 0) { - if (k.equals(key)) result[0] = v; - } else { - if (++count == idx) result[0] = new MapWriterEntry<>(k, v); - } - return this; - } - }); + ((MapWriter) obj) + .writeMap( + new MapWriter.EntryWriter() { + int count = -1; + + @Override + public MapWriter.EntryWriter put(CharSequence k, Object v) { + if (result[0] != null) return this; + if (idx < 0) { + if (k.equals(key)) result[0] = v; + } else { + if (++count == idx) result[0] = new MapWriterEntry<>(k, v); + } + return this; + } + }); } catch (IOException e) { throw new RuntimeException(e); } return result[0]; - } else if (obj instanceof Map) return ((Map) obj).get(key); + } else if (obj instanceof Map) return ((Map) obj).get(key); else throw new RuntimeException("must be a NamedList or Map"); } /** - * If the passed entity has content, make sure it is fully - * read and closed. + * If the passed entity has content, make sure it is fully read and closed. * * @param entity to consume or null */ @@ -650,32 +685,37 @@ public static void consumeFully(HttpEntity entity) { */ public static void readFully(InputStream is) throws IOException { is.skip(is.available()); - while (is.read() != -1) { - } + while (is.read() != -1) {} } @SuppressWarnings({"unchecked"}) - public static Map getJson(DistribStateManager distribStateManager, String path) throws InterruptedException, IOException, KeeperException { + public static Map getJson(DistribStateManager distribStateManager, String path) + throws InterruptedException, IOException, KeeperException { VersionedData data = null; try { data = distribStateManager.getData(path); } catch (KeeperException.NoNodeException | NoSuchElementException e) { return Collections.emptyMap(); } - if (data == null || data.getData() == null || data.getData().length == 0) return Collections.emptyMap(); + if (data == null || data.getData() == null || data.getData().length == 0) + return Collections.emptyMap(); return (Map) Utils.fromJSON(data.getData()); } /** * Assumes data in ZooKeeper is a JSON string, deserializes it and returns as a Map * - * @param zkClient the zookeeper client - * @param path the path to the znode being read - * @param retryOnConnLoss whether to retry the operation automatically on connection loss, see {@link org.apache.solr.common.cloud.ZkCmdExecutor#retryOperation(ZkOperation)} - * @return a Map if the node exists and contains valid JSON or an empty map if znode does not exist or has a null data + * @param zkClient the zookeeper client + * @param path the path to the znode being read + * @param retryOnConnLoss whether to retry the operation automatically on connection loss, see + * {@link org.apache.solr.common.cloud.ZkCmdExecutor#retryOperation(ZkOperation)} + * @return a Map if the node exists and contains valid JSON or an empty map if znode does not + * exist or has a null data */ @SuppressWarnings({"unchecked"}) - public static Map getJson(SolrZkClient zkClient, String path, boolean retryOnConnLoss) throws KeeperException, InterruptedException { + public static Map getJson( + SolrZkClient zkClient, String path, boolean retryOnConnLoss) + throws KeeperException, InterruptedException { try { byte[] bytes = zkClient.getData(path, null, null, retryOnConnLoss); if (bytes != null && bytes.length > 0) { @@ -687,12 +727,12 @@ public static Map getJson(SolrZkClient zkClient, String path, bo return Collections.emptyMap(); } - public static final Pattern ARRAY_ELEMENT_INDEX = Pattern - .compile("(\\S*?)\\[([-]?\\d+)\\]"); + public static final Pattern ARRAY_ELEMENT_INDEX = Pattern.compile("(\\S*?)\\[([-]?\\d+)\\]"); public static SpecProvider getSpec(final String name) { return () -> { - return ValidatingJsonMap.parse(CommonParams.APISPEC_LOCATION + name + ".json", CommonParams.APISPEC_LOCATION); + return ValidatingJsonMap.parse( + CommonParams.APISPEC_LOCATION + name + ".json", CommonParams.APISPEC_LOCATION); }; } @@ -716,11 +756,10 @@ public static String parseMetricsReplicaName(String collectionName, String coreN } /** - * Applies one json over other. The 'input' is applied over the sink - * The values in input isapplied over the values in 'sink' . If a value is 'null' - * that value is removed from sink + * Applies one json over other. The 'input' is applied over the sink The values in input isapplied + * over the values in 'sink' . If a value is 'null' that value is removed from sink * - * @param sink the original json object to start with. Ensure that this Map is mutable + * @param sink the original json object to start with. Ensure that this Map is mutable * @param input the json with new values * @return whether there was any change made to sink or not. */ @@ -746,13 +785,11 @@ public static boolean mergeJson(Map sink, Map in sink.put(e.getKey(), e.getValue()); isModified = true; } - } } else if (e.getValue() != null) { sink.put(e.getKey(), e.getValue()); isModified = true; } - } return isModified; @@ -761,19 +798,23 @@ public static boolean mergeJson(Map sink, Map in public static String getBaseUrlForNodeName(final String nodeName, String urlScheme) { return getBaseUrlForNodeName(nodeName, urlScheme, false); } - public static String getBaseUrlForNodeName(final String nodeName, String urlScheme, boolean isV2) { + + public static String getBaseUrlForNodeName( + final String nodeName, String urlScheme, boolean isV2) { final int colonAt = nodeName.indexOf(':'); if (colonAt == -1) { - throw new IllegalArgumentException("nodeName does not contain expected ':' separator: " + nodeName); + throw new IllegalArgumentException( + "nodeName does not contain expected ':' separator: " + nodeName); } - final int _offset = nodeName.indexOf("_", colonAt); + final int _offset = nodeName.indexOf('_', colonAt); if (_offset < 0) { - throw new IllegalArgumentException("nodeName does not contain expected '_' separator: " + nodeName); + throw new IllegalArgumentException( + "nodeName does not contain expected '_' separator: " + nodeName); } final String hostAndPort = nodeName.substring(0, _offset); final String path = URLDecoder.decode(nodeName.substring(1 + _offset), UTF_8); - return urlScheme + "://" + hostAndPort + (path.isEmpty() ? "" : ("/" + (isV2? "api": path))); + return urlScheme + "://" + hostAndPort + (path.isEmpty() ? "" : ('/' + (isV2 ? "api" : path))); } public static long time(TimeSource timeSource, TimeUnit unit) { @@ -806,10 +847,10 @@ public static T handleExp(Logger logger, T def, Callable c) { public interface InputStreamConsumer { T accept(InputStream is) throws IOException; - } - public static final InputStreamConsumer JAVABINCONSUMER = is -> new JavaBinCodec().unmarshal(is); + public static final InputStreamConsumer JAVABINCONSUMER = + is -> new JavaBinCodec().unmarshal(is); public static final InputStreamConsumer JSONCONSUMER = Utils::fromJSON; public static InputStreamConsumer newBytesConsumer(int maxSize) { @@ -828,15 +869,15 @@ public static InputStreamConsumer newBytesConsumer(int maxSize) { throw new RuntimeException(e); } }; - } - - public static T executeGET(HttpClient client, String url, InputStreamConsumer consumer) throws SolrException { + public static T executeGET(HttpClient client, String url, InputStreamConsumer consumer) + throws SolrException { return executeHttpMethod(client, url, consumer, new HttpGet(url)); } - public static T executeHttpMethod(HttpClient client, String url, InputStreamConsumer consumer, HttpRequestBase httpMethod) { + public static T executeHttpMethod( + HttpClient client, String url, InputStreamConsumer consumer, HttpRequestBase httpMethod) { T result = null; HttpResponse rsp = null; try { @@ -848,7 +889,11 @@ public static T executeHttpMethod(HttpClient client, String url, InputStream int statusCode = rsp.getStatusLine().getStatusCode(); if (statusCode != 200) { try { - log.error("Failed a request to: {}, status: {}, body: {}", url, rsp.getStatusLine(), EntityUtils.toString(rsp.getEntity(), StandardCharsets.UTF_8)); // nowarn + log.error( + "Failed a request to: {}, status: {}, body: {}", + url, + rsp.getStatusLine(), + EntityUtils.toString(rsp.getEntity(), StandardCharsets.UTF_8)); // nowarn } catch (IOException e) { log.error("could not print error", e); } @@ -869,7 +914,7 @@ public static T executeHttpMethod(HttpClient client, String url, InputStream return result; } - public static void reflectWrite(MapWriter.EntryWriter ew, Object o) throws IOException{ + public static void reflectWrite(MapWriter.EntryWriter ew, Object o) throws IOException { List fieldWriters = null; try { fieldWriters = getReflectData(o.getClass()); @@ -879,19 +924,20 @@ public static void reflectWrite(MapWriter.EntryWriter ew, Object o) throws IOExc for (FieldWriter fieldWriter : fieldWriters) { try { fieldWriter.write(ew, o); - } catch( Throwable e) { + } catch (Throwable e) { throw new RuntimeException(e); - //should not happen + // should not happen } } } private static List getReflectData(Class c) throws IllegalAccessException { boolean sameClassLoader = c.getClassLoader() == Utils.class.getClassLoader(); - //we should not cache the class references of objects loaded from packages because they will not get garbage collected - //TODO fix that later - List reflectData = sameClassLoader ? storedReflectData.get(c): null; - if(reflectData == null) { + // we should not cache the class references of objects loaded from packages because they will + // not get garbage collected + // TODO fix that later + List reflectData = sameClassLoader ? storedReflectData.get(c) : null; + if (reflectData == null) { ArrayList l = new ArrayList<>(); MethodHandles.Lookup lookup = MethodHandles.publicLookup(); for (Field field : lookup.accessClass(c).getFields()) { @@ -921,24 +967,22 @@ private static List getReflectData(Class c) throws IllegalAccess l.add((ew, inst) -> ew.putIfNotNull(fname, mh.invoke(inst))); } } catch (NoSuchFieldException e) { - //this is unlikely + // this is unlikely throw new RuntimeException(e); } - }} + } + } - if(sameClassLoader){ + if (sameClassLoader) { storedReflectData.put(c, reflectData = Collections.unmodifiableList(new ArrayList<>(l))); } } return reflectData; } - - private static Map, List> storedReflectData = new ConcurrentHashMap<>(); interface FieldWriter { void write(MapWriter.EntryWriter ew, Object inst) throws Throwable; } - } diff --git a/solr/solrj/src/java/org/noggit/CharArr.java b/solr/solrj/src/java/org/noggit/CharArr.java index 0431e107a1d..adeaaec2e88 100644 --- a/solr/solrj/src/java/org/noggit/CharArr.java +++ b/solr/solrj/src/java/org/noggit/CharArr.java @@ -17,16 +17,20 @@ package org.noggit; - import java.io.IOException; import java.io.Reader; import java.io.Writer; import java.nio.CharBuffer; +import java.util.Arrays; public class CharArr implements CharSequence, Appendable { + + private static final int M2 = 0x7A646E4D; + protected char[] buf; protected int start; protected int end; + private int hash; public CharArr() { this(32); @@ -75,14 +79,11 @@ public int length() { return size(); } - /** - * The capacity of the buffer when empty (getArray().size()) - */ + /** The capacity of the buffer when empty (getArray().size()) */ public int capacity() { return buf.length; } - @Override public char charAt(int index) { return buf[start + index]; @@ -99,7 +100,7 @@ public int read() throws IOException { } public int read(char cbuf[], int off, int len) { - //TODO + // TODO return 0; } @@ -160,15 +161,13 @@ public void write(String s, int stringOffset, int len) { end += len; } - public void flush() { - } + public void flush() {} public final void reset() { - start = end = 0; + start = end = hash = 0; } - public void close() { - } + public void close() {} public char[] toCharArray() { char newbuf[] = new char[size()]; @@ -176,7 +175,6 @@ public char[] toCharArray() { return newbuf; } - @Override public String toString() { return new String(buf, start, size()); @@ -184,11 +182,11 @@ public String toString() { public int read(CharBuffer cb) throws IOException { - /*** - int sz = size(); - if (sz<=0) return -1; - if (sz>0) cb.put(buf, start, sz); - return -1; + /* + * int sz = size(); + * if (sz<=0) return -1; + * if (sz>0) cb.put(buf, start, sz); + * return -1; ***/ int sz = size(); @@ -203,9 +201,44 @@ public int read(CharBuffer cb) throws IOException { } } - public int fill() throws IOException { - return 0; // or -1? + return 0; // or -1? + } + + @SuppressWarnings(value = "EqualsWrongThing") + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null) { + return false; + } + if (getClass() != o.getClass()) { + if (o instanceof CharSequence) { + return o.equals(this); + } + return false; + } + CharArr charArr = (CharArr) o; + return Arrays.equals(buf, start, end, charArr.buf, charArr.start, charArr.end); + } + + @Override + public int hashCode() { + if (buf == null) return 0; + + if (hash != 0) { + return hash; + } + + long h = 37; + + for (int i = start; i < end; i++) h = h * M2 + buf[i]; + h *= M2; + + hash = (int) h; + return (int) h; } //////////////// Appendable methods ///////////// @@ -226,39 +259,31 @@ public final Appendable append(char c) throws IOException { return this; } - static class NullCharArr extends CharArr { public NullCharArr() { super(new char[1], 0, 0); } @Override - public void unsafeWrite(char b) { - } + public void unsafeWrite(char b) {} @Override - public void unsafeWrite(char b[], int off, int len) { - } + public void unsafeWrite(char b[], int off, int len) {} @Override - public void unsafeWrite(int b) { - } + public void unsafeWrite(int b) {} @Override - public void write(char b) { - } + public void write(char b) {} @Override - public void write(char b[], int off, int len) { - } + public void write(char b[], int off, int len) {} @Override - public void reserve(int num) { - } + public void reserve(int num) {} @Override - protected void resize(int len) { - } + protected void resize(int len) {} @Override public Appendable append(CharSequence csq, int start, int end) throws IOException { @@ -271,11 +296,9 @@ public char charAt(int index) { } @Override - public void write(String s, int stringOffset, int len) { - } + public void write(String s, int stringOffset, int len) {} } - // IDEA: a subclass that refills the array from a reader? class CharArrReader extends CharArr { protected final Reader in; @@ -310,23 +333,21 @@ public int fill() throws IOException { end = size(); start = 0; } - /*** - // fill fully or not??? - do { - int sz = in.read(buf,end,buf.length-end); - if (sz==-1) return; - end+=sz; - } while (end < buf.length); + /* + * // fill fully or not??? + * do { + * int sz = in.read(buf,end,buf.length-end); + * if (sz==-1) return; + * end+=sz; + * } while (end < buf.length); ***/ int sz = in.read(buf, end, buf.length - end); if (sz > 0) end += sz; return sz; } - } - class CharArrWriter extends CharArr { protected Writer sink; @@ -387,7 +408,6 @@ public void write(String s, int stringOffset, int len) { } catch (IOException e) { throw new RuntimeException(e); } - } } } diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestUpdateRequestCodec.java b/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestUpdateRequestCodec.java index 2e9b5e8de30..8548677cb68 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestUpdateRequestCodec.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestUpdateRequestCodec.java @@ -16,6 +16,8 @@ */ package org.apache.solr.client.solrj.request; +import static org.apache.solr.common.params.CommonParams.CHILDDOC; + import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; @@ -28,7 +30,6 @@ import java.util.List; import java.util.Map; import java.util.Set; - import junit.framework.Assert; import org.apache.solr.SolrTestCase; import org.apache.solr.common.IteratorWriter; @@ -39,8 +40,6 @@ import org.apache.solr.common.util.Utils; import org.junit.Test; -import static org.apache.solr.common.params.CommonParams.CHILDDOC; - /** * Test for UpdateRequestCodec * @@ -50,7 +49,8 @@ public class TestUpdateRequestCodec extends SolrTestCase { @Test - // commented out on: 24-Dec-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 20-Sep-2018 + // commented out on: 24-Dec-2018 + // @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 20-Sep-2018 public void simple() throws IOException { UpdateRequest updateRequest = new UpdateRequest(); updateRequest.deleteById("*:*"); @@ -83,19 +83,21 @@ public void simple() throws IOException { doc.addField("foobar", foobar); updateRequest.add(doc); -// updateRequest.setWaitFlush(true); + // updateRequest.setWaitFlush(true); updateRequest.deleteById("2"); updateRequest.deleteByQuery("id:3"); JavaBinUpdateRequestCodec codec = new JavaBinUpdateRequestCodec(); ByteArrayOutputStream baos = new ByteArrayOutputStream(); codec.marshal(updateRequest, baos); final List docs = new ArrayList<>(); - JavaBinUpdateRequestCodec.StreamingUpdateHandler handler = (document, req, commitWithin, overwrite) -> { - Assert.assertNotNull(req.getParams()); - docs.add(document); - }; + JavaBinUpdateRequestCodec.StreamingUpdateHandler handler = + (document, req, commitWithin, overwrite) -> { + Assert.assertNotNull(req.getParams()); + docs.add(document); + }; - UpdateRequest updateUnmarshalled = codec.unmarshal(new ByteArrayInputStream(baos.toByteArray()), handler); + UpdateRequest updateUnmarshalled = + codec.unmarshal(new ByteArrayInputStream(baos.toByteArray()), handler); for (SolrInputDocument document : docs) { updateUnmarshalled.add(document); @@ -105,16 +107,17 @@ public void simple() throws IOException { SolrInputDocument outDoc = updateUnmarshalled.getDocuments().get(i); compareDocs("doc#" + i, inDoc, outDoc); } - Assert.assertEquals(updateUnmarshalled.getDeleteById().get(0), - updateRequest.getDeleteById().get(0)); - Assert.assertEquals(updateUnmarshalled.getDeleteQuery().get(0), - updateRequest.getDeleteQuery().get(0)); + Assert.assertEquals( + updateUnmarshalled.getDeleteById().get(0), updateRequest.getDeleteById().get(0)); + Assert.assertEquals( + updateUnmarshalled.getDeleteQuery().get(0), updateRequest.getDeleteQuery().get(0)); assertEquals("b", updateUnmarshalled.getParams().get("a")); } @Test - // commented out on: 24-Dec-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 20-Sep-2018 + // commented out on: 24-Dec-2018 + // @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 20-Sep-2018 public void testIteratable() throws IOException { final List values = new ArrayList<>(); values.add("iterItem1"); @@ -126,7 +129,7 @@ public void testIteratable() throws IOException { SolrInputDocument doc = new SolrInputDocument(); doc.addField("id", 1); doc.addField("desc", "one"); - // imagine someone adding a custom Bean that implements Iterable + // imagine someone adding a custom Bean that implements Iterable // but is not a Collection doc.addField("iter", (Iterable) values::iterator); doc.addField("desc", "1"); @@ -136,12 +139,14 @@ public void testIteratable() throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); codec.marshal(updateRequest, baos); final List docs = new ArrayList<>(); - JavaBinUpdateRequestCodec.StreamingUpdateHandler handler = (document, req, commitWithin, overwrite) -> { - Assert.assertNotNull(req.getParams()); - docs.add(document); - }; + JavaBinUpdateRequestCodec.StreamingUpdateHandler handler = + (document, req, commitWithin, overwrite) -> { + Assert.assertNotNull(req.getParams()); + docs.add(document); + }; - UpdateRequest updateUnmarshalled = codec.unmarshal(new ByteArrayInputStream(baos.toByteArray()), handler); + UpdateRequest updateUnmarshalled = + codec.unmarshal(new ByteArrayInputStream(baos.toByteArray()), handler); for (SolrInputDocument document : docs) { updateUnmarshalled.add(document); @@ -151,32 +156,39 @@ public void testIteratable() throws IOException { SolrInputField iter = outDoc.getField("iter"); Assert.assertNotNull("iter field is null", iter); Object iterVal = iter.getValue(); - Assert.assertTrue("iterVal is not a Collection", - iterVal instanceof Collection); - Assert.assertEquals("iterVal contents", values, iterVal); - + Assert.assertTrue("iterVal is not a Collection", iterVal instanceof Collection); + Assert.assertEquals("iterVal contents", values.toString(), iterVal.toString()); } - //this format accepts a 1:1 mapping of the json format and javabin format + // this format accepts a 1:1 mapping of the json format and javabin format public void testStreamableInputDocFormat() throws IOException { Map m = new LinkedHashMap<>(); m.put("id", "1"); m.put("desc", "The desc 1"); - m.put(CHILDDOC, (MapWriter) ew -> { - ew.put("id","1.1"); - ew.put("desc" ,"The desc 1.1"); - ew.put(CHILDDOC, (IteratorWriter) iw -> { - iw.add(Map.of("id", "1.1.1","desc","The desc 1.1.1")); - iw.add((MapWriter) ew1 -> { - ew1.put("id", "1.1.2"); - ew1.put("desc", "The desc 1.1.2"); - }); - }); - }); - MapWriter m2 = ew -> { - ew.put("id", "2"); - ew.put("des", "The desc 2"); - }; + m.put( + CHILDDOC, + (MapWriter) + ew -> { + ew.put("id", "1.1"); + ew.put("desc", "The desc 1.1"); + ew.put( + CHILDDOC, + (IteratorWriter) + iw -> { + iw.add(Map.of("id", "1.1.1", "desc", "The desc 1.1.1")); + iw.add( + (MapWriter) + ew1 -> { + ew1.put("id", "1.1.2"); + ew1.put("desc", "The desc 1.1.2"); + }); + }); + }); + MapWriter m2 = + ew -> { + ew.put("id", "2"); + ew.put("des", "The desc 2"); + }; List l = new ArrayList<>(); l.add(m); @@ -184,22 +196,25 @@ public void testStreamableInputDocFormat() throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); new JavaBinCodec().marshal(l.iterator(), baos); - List l2 = new ArrayList<>(); + List l2 = new ArrayList<>(); - new JavaBinUpdateRequestCodec().unmarshal(new ByteArrayInputStream(baos.toByteArray()), (document, req, commitWithin, override) -> l2.add(document)); + new JavaBinUpdateRequestCodec() + .unmarshal( + new ByteArrayInputStream(baos.toByteArray()), + (document, req, commitWithin, override) -> l2.add(document)); - assertEquals(l2.get(0).getChildDocuments().size(), 1); - - Object o = Utils.fromJSONString(Utils.writeJson(l.get(0), new StringWriter(), true).toString()); - Object cdoc = Utils.getObjectByPath(o, false, CHILDDOC); - assertEquals(Utils.writeJson(cdoc, new StringWriter(), true).toString(), - Utils.writeJson(l2.get(0).getChildDocuments().get(0) ,new StringWriter(), true).toString()); + assertEquals(l2.get(0).getChildDocuments().size(), 1); + Object o = Utils.fromJSONString(Utils.writeJson(l.get(0), new StringWriter(), true).toString()); + Object cdoc = Utils.getObjectByPath(o, false, CHILDDOC); + assertEquals( + Utils.writeJson(cdoc, new StringWriter(), true).toString(), + Utils.writeJson(l2.get(0).getChildDocuments().get(0), new StringWriter(), true).toString()); } - @Test - // commented out on: 24-Dec-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 20-Sep-2018 + // commented out on: 24-Dec-2018 + // @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 20-Sep-2018 public void testBackCompat4_5() throws IOException { UpdateRequest updateRequest = new UpdateRequest(); @@ -236,15 +251,24 @@ public void testBackCompat4_5() throws IOException { updateRequest.deleteById("2"); updateRequest.deleteByQuery("id:3"); - InputStream is = getClass().getResourceAsStream("/solrj/updateReq_4_5.bin"); assertNotNull("updateReq_4_5.bin was not found", is); - UpdateRequest updateUnmarshalled = new JavaBinUpdateRequestCodec().unmarshal(is, (document, req, commitWithin, override) -> { - if (commitWithin == null) { - req.add(document); - } - System.err.println("Doc" + document + " ,commitWithin:" + commitWithin + " , override:" + override); - }); + UpdateRequest updateUnmarshalled = + new JavaBinUpdateRequestCodec() + .unmarshal( + is, + (document, req, commitWithin, override) -> { + if (commitWithin == null) { + req.add(document); + } + System.err.println( + "Doc" + + document + + " ,commitWithin:" + + commitWithin + + " , override:" + + override); + }); System.err.println(updateUnmarshalled.getDocumentsMap()); System.err.println(updateUnmarshalled.getDocuments()); @@ -254,38 +278,41 @@ public void testBackCompat4_5() throws IOException { SolrInputDocument outDoc = updateUnmarshalled.getDocuments().get(i); compareDocs("doc#" + i, inDoc, outDoc); } - Assert.assertEquals(updateUnmarshalled.getDeleteById().get(0), - updateRequest.getDeleteById().get(0)); - Assert.assertEquals(updateUnmarshalled.getDeleteQuery().get(0), - updateRequest.getDeleteQuery().get(0)); + Assert.assertEquals( + updateUnmarshalled.getDeleteById().get(0), updateRequest.getDeleteById().get(0)); + Assert.assertEquals( + updateUnmarshalled.getDeleteQuery().get(0), updateRequest.getDeleteQuery().get(0)); assertEquals("b", updateUnmarshalled.getParams().get("a")); is.close(); } - - private void compareDocs(String m, - SolrInputDocument expectedDoc, - SolrInputDocument actualDoc) { + private void compareDocs(String m, SolrInputDocument expectedDoc, SolrInputDocument actualDoc) { for (String s : expectedDoc.getFieldNames()) { SolrInputField expectedField = expectedDoc.getField(s); SolrInputField actualField = actualDoc.getField(s); Object expectedVal = expectedField.getValue(); Object actualVal = actualField.getValue(); - if (expectedVal instanceof Set && - actualVal instanceof Collection) { - // unmarshaled documents never contain Sets, they are just a - // List in an arbitrary order based on what the iterator of + if (expectedVal instanceof Set && actualVal instanceof Collection) { + // unmarshaled documents never contain Sets, they are just a + // List in an arbitrary order based on what the iterator of // the original Set returned, so we need a comparison that is // order agnostic. actualVal = new HashSet<>((Collection) actualVal); m += " (Set comparison)"; } - Assert.assertEquals(m + " diff values for field: " + s, - expectedVal, actualVal); + Assert.assertEquals( + m + + " " + + expectedVal.getClass().getName() + + " diff values for field: " + + s + + " " + + actualVal.getClass().getName(), + expectedVal.toString(), + actualVal.toString()); } } - } diff --git a/solr/solrj/src/test/org/apache/solr/common/cloud/TestDocCollectionWatcher.java b/solr/solrj/src/test/org/apache/solr/common/cloud/TestDocCollectionWatcher.java index f22c7cdc5ca..ab3a7410f23 100644 --- a/solr/solrj/src/test/org/apache/solr/common/cloud/TestDocCollectionWatcher.java +++ b/solr/solrj/src/test/org/apache/solr/common/cloud/TestDocCollectionWatcher.java @@ -45,7 +45,7 @@ public class TestDocCollectionWatcher extends SolrCloudTestCase { private static final int CLUSTER_SIZE = 4; - private static final int MAX_WAIT_TIMEOUT = 120; // seconds, only use for await -- NO SLEEP!!! + private static final int MAX_WAIT_TIMEOUT = 20; // seconds, only use for await -- NO SLEEP!!! private ExecutorService executor = null; @@ -86,7 +86,7 @@ private void waitFor(String message, long timeout, TimeUnit unit, Callable= 0x01 && code <= 0x7F) - os.write(code); + if (code >= 0x01 && code <= 0x7F) os.write(code); else if (((code >= 0x80) && (code <= 0x7FF)) || code == 0) { os.write(0xC0 | (code >> 6)); os.write(0x80 | (code & 0x3F)); @@ -90,4 +88,12 @@ else if (((code >= 0x80) && (code <= 0x7FF)) || code == 0) { } } } + + private static void writeVInt(int i, FastOutputStream out) throws IOException { + while ((i & ~0x7F) != 0) { + out.writeByte((byte) ((i & 0x7f) | 0x80)); + i >>>= 7; + } + out.writeByte((byte) i); + } } diff --git a/solr/solrj/src/test/org/apache/solr/common/util/TestFastJavabinDecoder.java b/solr/solrj/src/test/org/apache/solr/common/util/TestFastJavabinDecoder.java index 3ea5f44c56a..543e8361b81 100644 --- a/solr/solrj/src/test/org/apache/solr/common/util/TestFastJavabinDecoder.java +++ b/solr/solrj/src/test/org/apache/solr/common/util/TestFastJavabinDecoder.java @@ -25,7 +25,6 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; - import org.apache.commons.io.IOUtils; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.FastStreamingDocsCallback; @@ -41,38 +40,37 @@ public void testTagRead() throws Exception { BinaryRequestWriter.BAOS baos = new BinaryRequestWriter.BAOS(); FastOutputStream faos = FastOutputStream.wrap(baos); - try (JavaBinCodec codec = new JavaBinCodec(faos, null)) { - codec.writeVal(10); - codec.writeVal(100); - codec.writeVal("Hello!"); + try (JavaBinCodec codec = new JavaBinCodec(faos, null, true)) { + JavaBinCodec.writeVal(codec, 10); + JavaBinCodec.writeVal(codec, 100); + JavaBinCodec.writeVal(codec, "Hello!"); } faos.flushBuffer(); faos.close(); - FastInputStream fis = new FastInputStream(null, baos.getbuf(), 0, baos.size()); try (FastJavaBinDecoder.StreamCodec scodec = new FastJavaBinDecoder.StreamCodec(fis)) { scodec.start(); Tag tag = scodec.getTag(); assertEquals(Tag._SINT, tag); - assertEquals(10, scodec.readSmallInt(scodec.dis)); + assertEquals(10, JavaBinCodec.readSmallInt(scodec)); tag = scodec.getTag(); assertEquals(Tag._SINT, tag); - assertEquals(100, scodec.readSmallInt(scodec.dis)); + assertEquals(100, JavaBinCodec.readSmallInt(scodec)); tag = scodec.getTag(); assertEquals(Tag._STR, tag); - assertEquals("Hello!", scodec.readStr(fis)); + assertEquals("Hello!", JavaBinCodec.readStr(scodec)); } } public void testSimple() throws IOException { - String sampleObj = "{k : v , " + - "mapk : {k1: v1, k2 : [v2_1 , v2_2 ]}," + - "listk : [ 1, 2, 3 ]," + - "maps : [ {id: kov1}, {id : kov2} ,{id:kov3 , longv : 234} ]," + - "}"; - + String sampleObj = + "{k : v , " + + "mapk : {k1: v1, k2 : [v2_1 , v2_2 ]}," + + "listk : [ 1, 2, 3 ]," + + "maps : [ {id: kov1}, {id : kov2} ,{id:kov3 , longv : 234} ]," + + "}"; @SuppressWarnings({"rawtypes"}) Map m = (Map) Utils.fromJSONString(sampleObj); @@ -87,37 +85,47 @@ public void testSimple() throws IOException { m2 = (Map) jbc.unmarshal(new FastInputStream(null, baos.getbuf(), 0, baos.size())); } @SuppressWarnings({"rawtypes"}) - LinkedHashMap fastMap = (LinkedHashMap) new FastJavaBinDecoder() - .withInputStream(new FastInputStream(null, baos.getbuf(), 0, baos.size())) - .decode(FastJavaBinDecoder.getEntryListener()); - assertEquals(Utils.writeJson(m2, new StringWriter(), true).toString(), + LinkedHashMap fastMap = + (LinkedHashMap) + new FastJavaBinDecoder() + .withInputStream(new FastInputStream(null, baos.getbuf(), 0, baos.size())) + .decode(FastJavaBinDecoder.getEntryListener()); + assertEquals( + Utils.writeJson(m2, new StringWriter(), true).toString(), Utils.writeJson(fastMap, new StringWriter(), true).toString()); @SuppressWarnings({"unchecked", "rawtypes"}) - Object newMap = new FastJavaBinDecoder() - .withInputStream(new FastInputStream(null, baos.getbuf(), 0, baos.size())) - .decode(e -> { - e.listenContainer(new LinkedHashMap<>(), e_ -> { - Map rootMap = (Map) e_.ctx(); - if (e_.type() == DataEntry.Type.ENTRY_ITER) { - e_.listenContainer(rootMap.computeIfAbsent(e_.name(), o -> new ArrayList<>()), - FastJavaBinDecoder.getEntryListener()); - } else if (e_.type() == DataEntry.Type.KEYVAL_ITER) { - e_.listenContainer(rootMap.computeIfAbsent(e_.name(), o -> new LinkedHashMap<>()), e1 -> { - Map m1 = (Map) e1.ctx(); - if ("k1".equals(e1.name())) { - m1.put(e1.name(), e1.val().toString()); - } - //eat up k2 - }); - } else if (e_.type() == DataEntry.Type.STR) { - rootMap.put(e_.name(), e_.val().toString()); - } - - }); - }); + Object newMap = + new FastJavaBinDecoder() + .withInputStream(new FastInputStream(null, baos.getbuf(), 0, baos.size())) + .decode( + e -> { + e.listenContainer( + new LinkedHashMap<>(), + e_ -> { + Map rootMap = (Map) e_.ctx(); + if (e_.type() == DataEntry.Type.ENTRY_ITER) { + e_.listenContainer( + rootMap.computeIfAbsent(e_.name(), o -> new ArrayList<>()), + FastJavaBinDecoder.getEntryListener()); + } else if (e_.type() == DataEntry.Type.KEYVAL_ITER) { + e_.listenContainer( + rootMap.computeIfAbsent(e_.name(), o -> new LinkedHashMap<>()), + e1 -> { + Map m1 = (Map) e1.ctx(); + if ("k1".equals(e1.name())) { + m1.put(e1.name(), e1.val().toString()); + } + // eat up k2 + }); + } else if (e_.type() == DataEntry.Type.STR) { + rootMap.put(e_.name(), e_.val().toString()); + } + }); + }); ((Map) m2.get("mapk")).remove("k2"); - assertEquals(Utils.writeJson(m2, new StringWriter(), true).toString(), + assertEquals( + Utils.writeJson(m2, new StringWriter(), true).toString(), Utils.writeJson(newMap, new StringWriter(), true).toString()); } @@ -134,56 +142,58 @@ public void testFastJavabinStreamingDecoder() throws IOException { list = (SolrDocumentList) o.get("response"); } - System.out.println(" " + list.getNumFound() + " , " + list.getStart() + " , " + list.getMaxScore()); + System.out.println( + " " + list.getNumFound() + " , " + list.getStart() + " , " + list.getMaxScore()); class Pojo { long _idx; CharSequence id; boolean inStock; float price; + @SuppressWarnings({"rawtypes"}) List children; } - StreamingBinaryResponseParser parser = new StreamingBinaryResponseParser(new FastStreamingDocsCallback() { - - @Override - public Object initDocList(Long numFound, Long start, Float maxScore) { - assertEquals((Long) list.getNumFound(), numFound); - assertEquals((Long) list.getStart(), start); - assertEquals(list.getMaxScore(), maxScore); - return new int[1]; - } - - @Override - public Object startDoc(Object docListObj) { - Pojo pojo = new Pojo(); - pojo._idx = ((int[]) docListObj)[0]++; - return pojo; - } - - @Override - public void field(DataEntry field, Object docObj) { - Pojo pojo = (Pojo) docObj; - if ("id".equals(field.name())) { - pojo.id = ((Utf8CharSequence) field.val()).clone(); - } else if (field.type() == DataEntry.Type.BOOL && "inStock".equals(field.name())) { - pojo.inStock = field.boolVal(); - } else if (field.type() == DataEntry.Type.FLOAT && "price".equals(field.name())) { - pojo.price = field.floatVal(); - } - - } - - @Override - public void endDoc(Object docObj) { - Pojo pojo = (Pojo) docObj; - SolrDocument doc = list.get((int) pojo._idx); - assertEquals(doc.get("id"), pojo.id.toString()); - if (doc.get("inStock") != null) - assertEquals(doc.get("inStock"), pojo.inStock); - if (doc.get("price") != null) - assertEquals((Float) doc.get("price"), pojo.price, 0.001); - } - }); + StreamingBinaryResponseParser parser = + new StreamingBinaryResponseParser( + new FastStreamingDocsCallback() { + + @Override + public Object initDocList(Long numFound, Long start, Float maxScore) { + assertEquals((Long) list.getNumFound(), numFound); + assertEquals((Long) list.getStart(), start); + assertEquals(list.getMaxScore(), maxScore); + return new int[1]; + } + + @Override + public Object startDoc(Object docListObj) { + Pojo pojo = new Pojo(); + pojo._idx = ((int[]) docListObj)[0]++; + return pojo; + } + + @Override + public void field(DataEntry field, Object docObj) { + Pojo pojo = (Pojo) docObj; + if ("id".equals(field.name())) { + pojo.id = ((Utf8CharSequence) field.val()).clone(); + } else if (field.type() == DataEntry.Type.BOOL && "inStock".equals(field.name())) { + pojo.inStock = field.boolVal(); + } else if (field.type() == DataEntry.Type.FLOAT && "price".equals(field.name())) { + pojo.price = field.floatVal(); + } + } + + @Override + public void endDoc(Object docObj) { + Pojo pojo = (Pojo) docObj; + SolrDocument doc = list.get((int) pojo._idx); + assertEquals(doc.get("id"), pojo.id.toString()); + if (doc.get("inStock") != null) assertEquals(doc.get("inStock"), pojo.inStock); + if (doc.get("price") != null) + assertEquals((Float) doc.get("price"), pojo.price, 0.001); + } + }); parser.processResponse(new FastInputStream(null, baos.getbuf(), 0, baos.size()), null); } @@ -203,7 +213,7 @@ public void testParsingWithChildDocs() throws IOException { SimpleOrderedMap orderedMap = new SimpleOrderedMap<>(); orderedMap.add("response", sdocs); - BinaryRequestWriter.BAOS baos = new BinaryRequestWriter.BAOS(); + BytesOutputStream baos = new BytesOutputStream(); try (JavaBinCodec jbc = new JavaBinCodec()) { jbc.marshal(orderedMap, baos); } @@ -224,7 +234,7 @@ public void compare(SolrDocument d) { assertEquals(d.getChildDocumentCount(), children.size()); @SuppressWarnings({"unchecked"}) List l = (List) d.getFieldValue("longs"); - if(l != null){ + if (l != null) { assertNotNull(longs); for (int i = 0; i < l.size(); i++) { Long v = l.get(i); @@ -236,62 +246,60 @@ public void compare(SolrDocument d) { for (int i = 0; i < childDocuments.size(); i++) { children.get(i).compare(childDocuments.get(i)); } - } - } List l = new ArrayList<>(); - StreamingBinaryResponseParser binaryResponseParser = new StreamingBinaryResponseParser(new FastStreamingDocsCallback() { - - @Override - public Object initDocList(Long numFound, Long start, Float maxScore) { - return l; - } - - @Override - @SuppressWarnings({"unchecked"}) - public Object startDoc(Object docListObj) { - Pojo pojo = new Pojo(); - ((List) docListObj).add(pojo); - return pojo; - } - - @Override - public void field(DataEntry field, Object docObj) { - Pojo pojo = (Pojo) docObj; - if (field.name().equals("id")) { - pojo.id = field.strValue(); - } else if (field.name().equals("subject")) { - pojo.subject = field.strValue(); - } else if (field.name().equals("cat")) { - pojo.cat = field.strValue(); - } else if (field.type() == DataEntry.Type.ENTRY_ITER && "longs".equals(field.name())) { - if(useListener[0]){ - field.listenContainer(pojo.longs = new long[field.length()], READLONGS); - } else { - @SuppressWarnings({"unchecked"}) - List longList = (List) field.val(); - pojo.longs = new long[longList.size()]; - for (int i = 0; i < longList.size(); i++) { - pojo.longs[i] = longList.get(i); - - } - - } - } - - } - - - @Override - public Object startChildDoc(Object parentDocObj) { - Pojo parent = (Pojo) parentDocObj; - Pojo child = new Pojo(); - parent.children.add(child); - return child; - } - }); - binaryResponseParser.processResponse(new FastInputStream(null, baos.getbuf(), 0, baos.size()), null); + StreamingBinaryResponseParser binaryResponseParser = + new StreamingBinaryResponseParser( + new FastStreamingDocsCallback() { + + @Override + public Object initDocList(Long numFound, Long start, Float maxScore) { + return l; + } + + @Override + @SuppressWarnings({"unchecked"}) + public Object startDoc(Object docListObj) { + Pojo pojo = new Pojo(); + ((List) docListObj).add(pojo); + return pojo; + } + + @Override + public void field(DataEntry field, Object docObj) { + Pojo pojo = (Pojo) docObj; + if (field.name().equals("id")) { + pojo.id = field.strValue(); + } else if (field.name().equals("subject")) { + pojo.subject = field.strValue(); + } else if (field.name().equals("cat")) { + pojo.cat = field.strValue(); + } else if (field.type() == DataEntry.Type.ENTRY_ITER + && "longs".equals(field.name())) { + if (useListener[0]) { + field.listenContainer(pojo.longs = new long[field.length()], READLONGS); + } else { + @SuppressWarnings({"unchecked"}) + List longList = (List) field.val(); + pojo.longs = new long[longList.size()]; + for (int i = 0; i < longList.size(); i++) { + pojo.longs[i] = longList.get(i); + } + } + } + } + + @Override + public Object startChildDoc(Object parentDocObj) { + Pojo parent = (Pojo) parentDocObj; + Pojo child = new Pojo(); + parent.children.add(child); + return child; + } + }); + binaryResponseParser.processResponse( + new FastInputStream(null, baos.toBytes(), 0, baos.size()), null); for (int i = 0; i < sdocs.size(); i++) { l.get(i).compare(sdocs.get(i)); } @@ -299,18 +307,17 @@ public Object startChildDoc(Object parentDocObj) { l.clear(); useListener[0] = false; - binaryResponseParser.processResponse(new FastInputStream(null, baos.getbuf(), 0, baos.size()), null); + binaryResponseParser.processResponse( + new FastInputStream(null, baos.toBytes(), 0, baos.size()), null); for (int i = 0; i < sdocs.size(); i++) { l.get(i).compare(sdocs.get(i)); } - - } - static final DataEntry.EntryListener READLONGS = e -> { - if (e.type() != DataEntry.Type.LONG) return; - long[] array = (long[]) e.ctx(); - array[(int) e.index()] = e.longVal(); - - }; + static final DataEntry.EntryListener READLONGS = + e -> { + if (e.type() != DataEntry.Type.LONG) return; + long[] array = (long[]) e.ctx(); + array[(int) e.index()] = e.longVal(); + }; } diff --git a/solr/solrj/src/test/org/apache/solr/common/util/TestJavaBinCodec.java b/solr/solrj/src/test/org/apache/solr/common/util/TestJavaBinCodec.java index 9f789215ba2..ee1477229dc 100644 --- a/solr/solrj/src/test/org/apache/solr/common/util/TestJavaBinCodec.java +++ b/solr/solrj/src/test/org/apache/solr/common/util/TestJavaBinCodec.java @@ -16,19 +16,6 @@ */ package org.apache.solr.common.util; -import org.apache.commons.io.IOUtils; -import org.apache.lucene.util.TestUtil; -import org.apache.solr.SolrTestCaseJ4; -import org.apache.solr.common.EnumFieldValue; -import org.apache.solr.common.SolrDocument; -import org.apache.solr.common.SolrDocumentList; -import org.apache.solr.common.SolrInputDocument; -import org.apache.solr.common.SolrInputField; -import org.apache.solr.util.ConcurrentLRUCache; -import org.apache.solr.util.RTimer; -import org.junit.Test; -import org.noggit.CharArr; - import java.io.BufferedOutputStream; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; @@ -43,14 +30,29 @@ import java.util.List; import java.util.Map; import java.util.Random; +import org.apache.commons.io.IOUtils; +import org.apache.lucene.util.TestUtil; +import org.apache.solr.SolrTestCaseJ4; +import org.apache.solr.common.EnumFieldValue; +import org.apache.solr.common.SolrDocument; +import org.apache.solr.common.SolrDocumentList; +import org.apache.solr.common.SolrInputDocument; +import org.apache.solr.common.SolrInputField; +import org.apache.solr.util.ConcurrentLRUCache; +import org.apache.solr.util.RTimer; +import org.junit.Test; +import org.noggit.CharArr; public class TestJavaBinCodec extends SolrTestCaseJ4 { private static final String SOLRJ_JAVABIN_BACKCOMPAT_BIN = "/solrj/javabin_backcompat.bin"; - private static final String BIN_FILE_LOCATION = "./solr/solrj/src/test-files/solrj/javabin_backcompat.bin"; + private static final String BIN_FILE_LOCATION = + "./solr/solrj/src/test-files/solrj/javabin_backcompat.bin"; - private static final String SOLRJ_JAVABIN_BACKCOMPAT_BIN_CHILD_DOCS = "/solrj/javabin_backcompat_child_docs.bin"; - private static final String BIN_FILE_LOCATION_CHILD_DOCS = "./solr/solrj/src/test-files/solrj/javabin_backcompat_child_docs.bin"; + private static final String SOLRJ_JAVABIN_BACKCOMPAT_BIN_CHILD_DOCS = + "/solrj/javabin_backcompat_child_docs.bin"; + private static final String BIN_FILE_LOCATION_CHILD_DOCS = + "./solr/solrj/src/test-files/solrj/javabin_backcompat_child_docs.bin"; private static final String SOLRJ_DOCS_1 = "/solrj/docs1.xml"; private static final String SOLRJ_DOCS_2 = "/solrj/sampleClusteringResponse.xml"; @@ -58,9 +60,11 @@ public class TestJavaBinCodec extends SolrTestCaseJ4 { public void testStrings() throws Exception { for (int i = 0; i < 10000 * RANDOM_MULTIPLIER; i++) { String s = TestUtil.randomUnicodeString(random()); - try (JavaBinCodec jbcO = new JavaBinCodec(); ByteArrayOutputStream os = new ByteArrayOutputStream()) { + try (JavaBinCodec jbcO = new JavaBinCodec(); + ByteArrayOutputStream os = new ByteArrayOutputStream()) { jbcO.marshal(s, os); - try (JavaBinCodec jbcI = new JavaBinCodec(); ByteArrayInputStream is = new ByteArrayInputStream(os.toByteArray())) { + try (JavaBinCodec jbcI = new JavaBinCodec(); + ByteArrayInputStream is = new ByteArrayInputStream(os.toByteArray())) { Object o = jbcI.unmarshal(is); assertEquals(s, o); } @@ -72,14 +76,10 @@ public void testReadAsCharSeq() throws Exception { List types = new ArrayList<>(); SolrInputDocument idoc = new SolrInputDocument(); idoc.addField("foo", "bar"); - idoc.addField("foos", Arrays.asList("bar1","bar2")); + idoc.addField("foos", Arrays.asList("bar1", "bar2")); idoc.addField("enumf", new EnumFieldValue(1, "foo")); types.add(idoc); - compareObjects( - (List) getObject(getBytes(types, true)), - (List) types - ); - + compareObjects((List) getObject(getBytes(types, true)), (List) types); } public static SolrDocument generateSolrDocumentWithChildDocs() { @@ -108,7 +108,7 @@ public static SolrDocument generateSolrDocumentWithChildDocs() { private List generateAllDataTypes() { List types = new ArrayList<>(); - types.add(null); //NULL + types.add(null); // NULL types.add(true); types.add(false); types.add((byte) 1); @@ -142,14 +142,14 @@ private List generateAllDataTypes() { solrDocs.add(0, doc); types.add(solrDocs); - types.add(new byte[] {1,2,3,4,5}); + types.add(new byte[] {1, 2, 3, 4, 5}); // TODO? // List list = new ArrayList(); // list.add("one"); // types.add(list.iterator()); - types.add((byte) 15); //END + types.add((byte) 15); // END SolrInputDocument idoc = new SolrInputDocument(); idoc.addField("foo", "bar"); @@ -164,9 +164,9 @@ private List generateAllDataTypes() { types.add(new EnumFieldValue(1, "foo")); - types.add(map.entrySet().iterator().next()); //Map.Entry + types.add(map.entrySet().iterator().next()); // Map.Entry - types.add((byte) (1 << 5)); //TAG_AND_LEN + types.add((byte) (1 << 5)); // TAG_AND_LEN types.add("foo"); types.add(1); @@ -185,13 +185,14 @@ private List generateAllDataTypes() { @Test public void testBackCompat() throws IOException { - try (InputStream is = getClass().getResourceAsStream(SOLRJ_JAVABIN_BACKCOMPAT_BIN); JavaBinCodec javabin = new JavaBinCodec(){ - @Override - public List readIterator(DataInputInputStream fis) throws IOException { - return super.readIterator(fis); - } - };) - { + try (InputStream is = getClass().getResourceAsStream(SOLRJ_JAVABIN_BACKCOMPAT_BIN); + JavaBinCodec javabin = + new JavaBinCodec() { + @Override + public List readIterator(JavaBinCodec javaBinCodec) throws IOException { + return super.readIterator(javaBinCodec); + } + }; ) { @SuppressWarnings({"unchecked"}) List unmarshaledObj = (List) javabin.unmarshal(is); List matchObj = generateAllDataTypes(); @@ -199,7 +200,6 @@ public List readIterator(DataInputInputStream fis) throws IOException { } catch (IOException e) { throw e; } - } private void compareObjects(List unmarshaledObj, List matchObj) { @@ -210,30 +210,33 @@ private void compareObjects(List unmarshaledObj, List matchObj) { byte[] b1 = (byte[]) unmarshaledObj.get(i); byte[] b2 = (byte[]) matchObj.get(i); assertTrue(Arrays.equals(b1, b2)); - } else if (unmarshaledObj.get(i) instanceof SolrDocument && matchObj.get(i) instanceof SolrDocument) { + } else if (unmarshaledObj.get(i) instanceof SolrDocument + && matchObj.get(i) instanceof SolrDocument) { assertTrue(compareSolrDocument(unmarshaledObj.get(i), matchObj.get(i))); - } else if (unmarshaledObj.get(i) instanceof SolrDocumentList && matchObj.get(i) instanceof SolrDocumentList) { + } else if (unmarshaledObj.get(i) instanceof SolrDocumentList + && matchObj.get(i) instanceof SolrDocumentList) { assertTrue(compareSolrDocumentList(unmarshaledObj.get(i), matchObj.get(i))); - } else if (unmarshaledObj.get(i) instanceof SolrInputDocument && matchObj.get(i) instanceof SolrInputDocument) { + } else if (unmarshaledObj.get(i) instanceof SolrInputDocument + && matchObj.get(i) instanceof SolrInputDocument) { assertTrue(compareSolrInputDocument(unmarshaledObj.get(i), matchObj.get(i))); - } else if (unmarshaledObj.get(i) instanceof SolrInputField && matchObj.get(i) instanceof SolrInputField) { + } else if (unmarshaledObj.get(i) instanceof SolrInputField + && matchObj.get(i) instanceof SolrInputField) { assertTrue(assertSolrInputFieldEquals(unmarshaledObj.get(i), matchObj.get(i))); } else { assertEquals(unmarshaledObj.get(i), matchObj.get(i)); } - } } @Test public void testBackCompatForSolrDocumentWithChildDocs() throws IOException { - try (JavaBinCodec javabin = new JavaBinCodec(){ - @Override - public List readIterator(DataInputInputStream fis) throws IOException { - return super.readIterator(fis); - } - };) - { + try (JavaBinCodec javabin = + new JavaBinCodec() { + @Override + public List readIterator(JavaBinCodec javaBinCodec) throws IOException { + return super.readIterator(javaBinCodec); + } + }; ) { InputStream is = getClass().getResourceAsStream(SOLRJ_JAVABIN_BACKCOMPAT_BIN_CHILD_DOCS); SolrDocument sdoc = (SolrDocument) javabin.unmarshal(is); SolrDocument matchSolrDoc = generateSolrDocumentWithChildDocs(); @@ -245,7 +248,8 @@ public List readIterator(DataInputInputStream fis) throws IOException { @Test public void testForwardCompat() throws IOException { - try (JavaBinCodec javabin = new JavaBinCodec(); ByteArrayOutputStream os = new ByteArrayOutputStream()) { + try (JavaBinCodec javabin = new JavaBinCodec(); + ByteArrayOutputStream os = new ByteArrayOutputStream()) { Object data = generateAllDataTypes(); try { @@ -255,7 +259,9 @@ public void testForwardCompat() throws IOException { InputStream is = getClass().getResourceAsStream(SOLRJ_JAVABIN_BACKCOMPAT_BIN); byte[] currentFormatBytes = IOUtils.toByteArray(is); - for (int i = 1; i < currentFormatBytes.length; i++) {//ignore the first byte. It is version information + for (int i = 1; + i < currentFormatBytes.length; + i++) { // ignore the first byte. It is version information assertEquals(newFormatBytes[i], currentFormatBytes[i]); } @@ -268,14 +274,17 @@ public void testForwardCompat() throws IOException { @Test public void testForwardCompatForSolrDocumentWithChildDocs() throws IOException { SolrDocument sdoc = generateSolrDocumentWithChildDocs(); - try (JavaBinCodec javabin = new JavaBinCodec(); ByteArrayOutputStream os = new ByteArrayOutputStream()) { + try (JavaBinCodec javabin = new JavaBinCodec(); + ByteArrayOutputStream os = new ByteArrayOutputStream()) { javabin.marshal(sdoc, os); byte[] newFormatBytes = os.toByteArray(); InputStream is = getClass().getResourceAsStream(SOLRJ_JAVABIN_BACKCOMPAT_BIN_CHILD_DOCS); byte[] currentFormatBytes = IOUtils.toByteArray(is); - for (int i = 1; i < currentFormatBytes.length; i++) {//ignore the first byte. It is version information + for (int i = 1; + i < currentFormatBytes.length; + i++) { // ignore the first byte. It is version information assertEquals(newFormatBytes[i], currentFormatBytes[i]); } } catch (IOException e) { @@ -286,10 +295,7 @@ public void testForwardCompatForSolrDocumentWithChildDocs() throws IOException { @Test public void testAllTypes() throws IOException { List obj = generateAllDataTypes(); - compareObjects( - (List) getObject(getBytes(obj)), - (List) obj - ); + compareObjects((List) getObject(getBytes(obj)), (List) obj); } @Test @@ -301,40 +307,56 @@ public void testReadMapEntryTextStreamSource() throws IOException { Map.Entry entryFromTextDoc2_clone = getMapFromJavaBinCodec(SOLRJ_DOCS_2); // exactly same document read twice should have same content - assertEquals ("text-doc1 exactly same document read twice should have same content",entryFromTextDoc1,entryFromTextDoc1_clone); + assertEquals( + "text-doc1 exactly same document read twice should have same content", + entryFromTextDoc1, + entryFromTextDoc1_clone); // doc1 and doc2 are 2 text files with different content on line 1 - assertNotEquals ("2 text streams with 2 different contents should be unequal",entryFromTextDoc2,entryFromTextDoc1); + assertNotEquals( + "2 text streams with 2 different contents should be unequal", + entryFromTextDoc2, + entryFromTextDoc1); // exactly same document read twice should have same content - assertEquals ("text-doc2 exactly same document read twice should have same content",entryFromTextDoc2,entryFromTextDoc2_clone); + assertEquals( + "text-doc2 exactly same document read twice should have same content", + entryFromTextDoc2, + entryFromTextDoc2_clone); } @Test - public void testReadMapEntryBinaryStreamSource() throws IOException { + public void testReadMapEntryBinaryStreamSource() throws IOException { // now lets look at binary files - Map.Entry entryFromBinFileA = getMapFromJavaBinCodec(SOLRJ_JAVABIN_BACKCOMPAT_BIN); - Map.Entry entryFromBinFileA_clone = getMapFromJavaBinCodec(SOLRJ_JAVABIN_BACKCOMPAT_BIN); + Map.Entry entryFromBinFileA = + getMapFromJavaBinCodec(SOLRJ_JAVABIN_BACKCOMPAT_BIN); + Map.Entry entryFromBinFileA_clone = + getMapFromJavaBinCodec(SOLRJ_JAVABIN_BACKCOMPAT_BIN); - assertEquals("same map entry references should be equal",entryFromBinFileA,entryFromBinFileA); + assertEquals("same map entry references should be equal", entryFromBinFileA, entryFromBinFileA); // Commenting-out this test as it may have inadvertent effect on someone changing this in future // but keeping this in code to make a point, that even the same exact bin file, - // there could be sub-objects in the key or value of the maps, with types that do not implement equals - // and in these cases equals would fail as these sub-objects would be equated on their memory-references which is highly probbale to be unique + // there could be sub-objects in the key or value of the maps, with types that do not implement + // equals + // and in these cases equals would fail as these sub-objects would be equated on their + // memory-references which is highly probbale to be unique // and hence the top-level map's equals will also fail - // assertNotEquals("2 different references even though from same source are un-equal",entryFromBinFileA,entryFromBinFileA_clone); - + // assertNotEquals("2 different references even though from same source are + // un-equal",entryFromBinFileA,entryFromBinFileA_clone); // read in a different binary file and this should definitely not be equal to the other bi file - Map.Entry entryFromBinFileB = getMapFromJavaBinCodec(SOLRJ_JAVABIN_BACKCOMPAT_BIN_CHILD_DOCS); - assertNotEquals("2 different references from 2 different source bin streams should still be unequal",entryFromBinFileA,entryFromBinFileB); + Map.Entry entryFromBinFileB = + getMapFromJavaBinCodec(SOLRJ_JAVABIN_BACKCOMPAT_BIN_CHILD_DOCS); + assertNotEquals( + "2 different references from 2 different source bin streams should still be unequal", + entryFromBinFileA, + entryFromBinFileB); } private Map.Entry getMapFromJavaBinCodec(String fileName) throws IOException { try (InputStream is = getClass().getResourceAsStream(fileName)) { - try (DataInputInputStream dis = new FastInputStream(is)) { - try (JavaBinCodec javabin = new JavaBinCodec()) { - return javabin.readMapEntry(dis); - } + try (JavaBinCodec javabin = new JavaBinCodec()) { + javabin.init(is); + return JavaBinCodec.readMapEntry(javabin); } } } @@ -342,15 +364,18 @@ private Map.Entry getMapFromJavaBinCodec(String fileName) throws private static Object serializeAndDeserialize(Object o) throws IOException { return getObject(getBytes(o)); } + private static byte[] getBytes(Object o) throws IOException { - try (JavaBinCodec javabin = new JavaBinCodec(); ByteArrayOutputStream baos = new ByteArrayOutputStream()) { + try (JavaBinCodec javabin = new JavaBinCodec(); + ByteArrayOutputStream baos = new ByteArrayOutputStream()) { javabin.marshal(o, baos); return baos.toByteArray(); } } private static byte[] getBytes(Object o, boolean readAsCharSeq) throws IOException { - try (JavaBinCodec javabin = new JavaBinCodec(); ByteArrayOutputStream baos = new ByteArrayOutputStream()) { + try (JavaBinCodec javabin = new JavaBinCodec(); + ByteArrayOutputStream baos = new ByteArrayOutputStream()) { javabin.readStringAsCharSeq = readAsCharSeq; javabin.marshal(o, baos); return baos.toByteArray(); @@ -363,10 +388,10 @@ private static Object getObject(byte[] bytes) throws IOException { } } - @Test public void testResponseChildDocuments() throws IOException { - SolrDocument result = (SolrDocument) serializeAndDeserialize(generateSolrDocumentWithChildDocs()); + SolrDocument result = + (SolrDocument) serializeAndDeserialize(generateSolrDocumentWithChildDocs()); assertEquals(2, result.size()); assertEquals("1", result.getFieldValue("id")); assertEquals("parentDocument", result.getFieldValue("subject")); @@ -394,13 +419,14 @@ public void testResponseChildDocuments() throws IOException { assertFalse(grandChildDocuments.get(0).hasChildDocuments()); assertNull(grandChildDocuments.get(0).getChildDocuments()); } + @Test public void testStringCaching() throws Exception { Map m = Map.of("key1", "val1", "key2", "val2"); - byte[] b1 = getBytes(m);//copy 1 - byte[] b2 = getBytes(m);//copy 2 - Map m1 = (Map) getObject(b1); - Map m2 = (Map) getObject(b2); + byte[] b1 = getBytes(m); // copy 1 + byte[] b2 = getBytes(m); // copy 2 + Map m1 = (Map) getObject(b1); + Map m2 = (Map) getObject(b2); List l1 = new ArrayList<>(m1.keySet()); List l2 = new ArrayList<>(m2.keySet()); @@ -409,14 +435,14 @@ public void testStringCaching() throws Exception { assertNotSame(l1.get(0), l2.get(0)); assertNotSame(l1.get(1), l2.get(1)); - JavaBinCodec.StringCache stringCache = new JavaBinCodec.StringCache(new MapBackedCache<>(new HashMap<>())); - + JavaBinCodec.StringCache stringCache = + new JavaBinCodec.StringCache(new MapBackedCache<>(new HashMap<>())); try (JavaBinCodec c1 = new JavaBinCodec(null, stringCache); - JavaBinCodec c2 = new JavaBinCodec(null, stringCache)) { + JavaBinCodec c2 = new JavaBinCodec(null, stringCache)) { - m1 = (Map) c1.unmarshal(new ByteArrayInputStream(b1)); - m2 = (Map) c2.unmarshal(new ByteArrayInputStream(b2)); + m1 = (Map) c1.unmarshal(new ByteArrayInputStream(b1)); + m2 = (Map) c2.unmarshal(new ByteArrayInputStream(b2)); l1 = new ArrayList<>(m1.keySet()); l2 = new ArrayList<>(m2.keySet()); @@ -424,8 +450,6 @@ public void testStringCaching() throws Exception { assertEquals(l1, l2); assertSame(l1.get(0), l2.get(0)); assertSame(l1.get(1), l2.get(1)); - - } public void genBinaryFiles() throws IOException { @@ -437,19 +461,18 @@ public void genBinaryFiles() throws IOException { bos.write(out); bos.close(); - //Binary file with child documents + // Binary file with child documents SolrDocument sdoc = generateSolrDocumentWithChildDocs(); fs = new FileOutputStream(new File(BIN_FILE_LOCATION_CHILD_DOCS)); bos = new BufferedOutputStream(fs); bos.write(getBytes(sdoc)); bos.close(); - } private void testPerf() throws InterruptedException { final ArrayList l = new ArrayList<>(); Cache cache = null; - /* cache = new ConcurrentLRUCache(10000, 9000, 10000, 1000, false, true, null){ + /* cache = new ConcurrentLRUCache(10000, 9000, 10000, 1000, false, true, null){ @Override public String put(JavaBinCodec.StringBytes key, String val) { l.add(key); @@ -459,14 +482,14 @@ public String put(JavaBinCodec.StringBytes key, String val) { Runtime.getRuntime().gc(); printMem("before cache init"); - Cache cache1 = new MapBackedCache<>(new HashMap<>()) ; + Cache cache1 = new MapBackedCache<>(new HashMap<>()); final JavaBinCodec.StringCache STRING_CACHE = new JavaBinCodec.StringCache(cache1); -// STRING_CACHE = new JavaBinCodec.StringCache(cache); + // STRING_CACHE = new JavaBinCodec.StringCache(cache); byte[] bytes = new byte[0]; - StringBytes stringBytes = new StringBytes(null,0,0); + StringBytes stringBytes = new StringBytes(null, 0, 0); - for(int i=0;i<10000;i++) { + for (int i = 0; i < 10000; i++) { String s = String.valueOf(random().nextLong()); int end = s.length(); int maxSize = end * 4; @@ -480,85 +503,78 @@ public String put(JavaBinCodec.StringBytes key, String val) { final int ITERS = 1000000; int THREADS = 10; - runInThreads(THREADS, () -> { - StringBytes stringBytes1 = new StringBytes(new byte[0], 0, 0); - for (int i = 0; i < ITERS; i++) { - StringBytes b = l.get(i % l.size()); - stringBytes1.reset(b.bytes, 0, b.bytes.length); - if (STRING_CACHE.get(stringBytes1) == null) throw new RuntimeException("error"); - } - - }); - - + runInThreads( + THREADS, + () -> { + StringBytes stringBytes1 = new StringBytes(new byte[0], 0, 0); + for (int i = 0; i < ITERS; i++) { + StringBytes b = l.get(i % l.size()); + stringBytes1.reset(b.bytes, 0, b.bytes.length); + if (STRING_CACHE.get(stringBytes1) == null) throw new RuntimeException("error"); + } + }); printMem("after cache test"); System.out.println("time taken by LRUCACHE " + timer.getTime()); timer = new RTimer(); - runInThreads(THREADS, () -> { - String a = null; - CharArr arr = new CharArr(); - for (int i = 0; i < ITERS; i++) { - StringBytes sb = l.get(i % l.size()); - arr.reset(); - ByteUtils.UTF8toUTF16(sb.bytes, 0, sb.bytes.length, arr); - a = arr.toString(); - } - }); + runInThreads( + THREADS, + () -> { + String a = null; + CharArr arr = new CharArr(); + for (int i = 0; i < ITERS; i++) { + StringBytes sb = l.get(i % l.size()); + arr.reset(); + ByteUtils.UTF8toUTF16(sb.bytes, 0, sb.bytes.length, arr); + a = arr.toString(); + } + }); printMem("after new string test"); - System.out.println("time taken by string creation "+ timer.getTime()); - - - + System.out.println("time taken by string creation " + timer.getTime()); } - private static void runInThreads(int count, Runnable runnable) throws InterruptedException { - ArrayList t =new ArrayList<>(); - for(int i=0;i t = new ArrayList<>(); + for (int i = 0; i < count; i++) t.add(new Thread(runnable)); for (Thread thread : t) thread.start(); for (Thread thread : t) thread.join(); } static void printMem(String head) { System.out.println("*************" + head + "***********"); - int mb = 1024*1024; - //Getting the runtime reference from system + int mb = 1024 * 1024; + // Getting the runtime reference from system Runtime runtime = Runtime.getRuntime(); - //Print used memory - System.out.println("Used Memory:" - + (runtime.totalMemory() - runtime.freeMemory()) / mb); - - //Print free memory - System.out.println("Free Memory:" - + runtime.freeMemory() / mb); - + // Print used memory + System.out.println("Used Memory:" + (runtime.totalMemory() - runtime.freeMemory()) / mb); + // Print free memory + System.out.println("Free Memory:" + runtime.freeMemory() / mb); } public static void main(String[] args) throws IOException { - TestJavaBinCodec test = new TestJavaBinCodec(); - test.genBinaryFiles(); -// try { -// doDecodePerf(args); -// } catch (Exception e) { -// throw new RuntimeException(e); -// } + TestJavaBinCodec test = new TestJavaBinCodec(); + test.genBinaryFiles(); + // try { + // doDecodePerf(args); + // } catch (Exception e) { + // throw new RuntimeException(e); + // } } // common-case ascii static String str(Random r, int sz) { StringBuffer sb = new StringBuffer(sz); - for (int i=0; i underlyingCache = cacheSz > 0 ? new ConcurrentLRUCache<>(cacheSz,cacheSz-cacheSz/10,cacheSz,cacheSz/10,false,true,null) : null; // the cache in the first version of the patch was 10000,9000,10000,1000,false,true,null - final JavaBinCodec.StringCache stringCache = underlyingCache==null ? null : new JavaBinCodec.StringCache(underlyingCache); + ConcurrentLRUCache underlyingCache = + cacheSz > 0 + ? new ConcurrentLRUCache<>( + cacheSz, cacheSz - cacheSz / 10, cacheSz, cacheSz / 10, false, true, null) + : null; // the cache in the first version of the patch was + // 10000,9000,10000,1000,false,true,null + final JavaBinCodec.StringCache stringCache = + underlyingCache == null ? null : new JavaBinCodec.StringCache(underlyingCache); if (nThreads <= 0) { ret += doDecode(buffers, iter, stringCache); } else { - runInThreads(nThreads, () -> { - try { - doDecode(buffers, iter, stringCache); - } catch (IOException e) { - e.printStackTrace(); - } - }); + runInThreads( + nThreads, + () -> { + try { + doDecode(buffers, iter, stringCache); + } catch (IOException e) { + e.printStackTrace(); + } + }); } - long n = iter * Math.max(1,nThreads); - System.out.println("ret=" + ret + " THROUGHPUT=" + (n*1000 / timer.getTime())); - if (underlyingCache != null) System.out.println("cache: hits=" + underlyingCache.getStats().getCumulativeHits() + " lookups=" + underlyingCache.getStats().getCumulativeLookups() + " size=" + underlyingCache.getStats().getCurrentSize()); + long n = iter * Math.max(1, nThreads); + System.out.println("ret=" + ret + " THROUGHPUT=" + (n * 1000 / timer.getTime())); + if (underlyingCache != null) + System.out.println( + "cache: hits=" + + underlyingCache.getStats().getCumulativeHits() + + " lookups=" + + underlyingCache.getStats().getCumulativeLookups() + + " size=" + + underlyingCache.getStats().getCurrentSize()); } - public static int doDecode(byte[][] buffers, long iter, JavaBinCodec.StringCache stringCache) throws IOException { + public static int doDecode(byte[][] buffers, long iter, JavaBinCodec.StringCache stringCache) + throws IOException { int ret = 0; int bufnum = -1; - InputStream empty = new InputStream() { - @Override - public int read() throws IOException { - return -1; - } - }; + InputStream empty = + new InputStream() { + @Override + public int read() throws IOException { + return -1; + } + }; while (--iter >= 0) { if (++bufnum >= buffers.length) bufnum = 0; @@ -639,7 +672,4 @@ public int read() throws IOException { } return ret; } - } - - diff --git a/solr/solrj/src/test/org/apache/solr/common/util/Utf8CharSequenceTest.java b/solr/solrj/src/test/org/apache/solr/common/util/Utf8CharSequenceTest.java index 24a7ea596c5..69f12ace3b3 100644 --- a/solr/solrj/src/test/org/apache/solr/common/util/Utf8CharSequenceTest.java +++ b/solr/solrj/src/test/org/apache/solr/common/util/Utf8CharSequenceTest.java @@ -103,7 +103,7 @@ public void testUnMarshal() throws IOException { ByteArrayUtf8CharSequence val = (ByteArrayUtf8CharSequence) nl1.get("key" + i); assertEquals(buf, val.getBuf()); String s = val.toString(); - assertTrue(s.startsWith("" + i)); + assertTrue(s + " i=" + i, s.startsWith("" + i)); assertTrue(s, s.endsWith(str)); } diff --git a/solr/test-framework/src/java/org/apache/solr/SolrIgnoredThreadsFilter.java b/solr/test-framework/src/java/org/apache/solr/SolrIgnoredThreadsFilter.java index 3fce31db253..c7a439ab45f 100644 --- a/solr/test-framework/src/java/org/apache/solr/SolrIgnoredThreadsFilter.java +++ b/solr/test-framework/src/java/org/apache/solr/SolrIgnoredThreadsFilter.java @@ -16,9 +16,9 @@ */ package org.apache.solr; -import org.apache.lucene.search.TimeLimitingCollector.TimerThread; - import com.carrotsearch.randomizedtesting.ThreadFilter; +import java.lang.Thread.State; +import org.apache.lucene.search.TimeLimitingCollector.TimerThread; /** @@ -36,6 +36,10 @@ public boolean reject(Thread t) { * test-dependent information. */ + if (t.getState().equals(State.TERMINATED)) { + return true; + } + String threadName = t.getName(); if (threadName.equals(TimerThread.THREAD_NAME)) { return true; @@ -55,6 +59,22 @@ public boolean reject(Thread t) { if (threadName.startsWith("ForkJoinPool.")) { return true; } + + // load balancer is leaky + if (threadName.startsWith("aliveCheckExecutor")) { + return true; + } + + + // we don't handle zk shutdown well, but these threads are harmless and will shortly go away + if (threadName.startsWith("SessionTracker")) { + return true; + } + + // tools + if (threadName.startsWith("Reference Handler") && threadName.startsWith("Signal Dispatcher") && threadName.startsWith("Monitor") && threadName.startsWith("YJPAgent-RequestListener")) { + return true; + } if (threadName.startsWith("Image Fetcher")) { return true; diff --git a/solr/test-framework/src/java/org/apache/solr/SolrTestCase.java b/solr/test-framework/src/java/org/apache/solr/SolrTestCase.java index b215c69eae4..4b01d32972c 100644 --- a/solr/test-framework/src/java/org/apache/solr/SolrTestCase.java +++ b/solr/test-framework/src/java/org/apache/solr/SolrTestCase.java @@ -17,11 +17,21 @@ package org.apache.solr; -import java.lang.invoke.MethodHandles; -import java.io.File; -import java.util.regex.Pattern; +import static com.carrotsearch.randomizedtesting.RandomizedTest.systemPropertyAsBoolean; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakAction; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakAction.Action; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering; +import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule; +import java.io.File; +import java.lang.invoke.MethodHandles; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.regex.Pattern; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.QuickPatchThreadsFilter; import org.apache.lucene.util.VerifyTestClassNamingConvention; @@ -38,118 +48,220 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule; - -import static com.carrotsearch.randomizedtesting.RandomizedTest.systemPropertyAsBoolean; -import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; - /** - * All Solr test cases should derive from this class eventually. This is originally a result of async logging, see: - * SOLR-12055 and associated. To enable async logging, we must gracefully shut down logging. Many Solr tests subclass - * LuceneTestCase. + * All Solr test cases should derive from this class eventually. This is originally a result of + * async logging, see: SOLR-12055 and associated. To enable async logging, we must gracefully shut + * down logging. Many Solr tests subclass LuceneTestCase. * - * Rather than add the cruft from SolrTestCaseJ4 to all the Solr tests that currently subclass LuceneTestCase, - * we'll add the shutdown to this class and subclass it. + *

Rather than add the cruft from SolrTestCaseJ4 to all the Solr tests that currently subclass + * LuceneTestCase, we'll add the shutdown to this class and subclass it. * - * Other changes that should affect every Solr test case may go here if they don't require the added capabilities in - * SolrTestCaseJ4. + *

Other changes that should affect every Solr test case may go here if they don't require the + * added capabilities in SolrTestCaseJ4. */ - // ThreadLeakFilters are not additive. Any subclass that requires filters - // other than these must redefine these as well. -@ThreadLeakFilters(defaultFilters = true, filters = { - SolrIgnoredThreadsFilter.class, - QuickPatchThreadsFilter.class -}) -@ThreadLeakLingering(linger = 10000) +// ThreadLeakFilters are not additive. Any subclass that requires filters +// other than these must redefine these as well. +@ThreadLeakFilters( + defaultFilters = true, + filters = {SolrIgnoredThreadsFilter.class, QuickPatchThreadsFilter.class}) +@ThreadLeakLingering(linger = 0) +@ThreadLeakAction(Action.WARN) public class SolrTestCase extends LuceneTestCase { /** * DO NOT REMOVE THIS LOGGER - *

- * For reasons that aren't 100% obvious, the existence of this logger is neccessary to ensure - * that the logging framework is properly initialized (even if concrete subclasses do not - * themselves initialize any loggers) so that the async logger threads can be properly shutdown - * on completion of the test suite - *

+ * + *

For reasons that aren't 100% obvious, the existence of this logger is neccessary to ensure + * that the logging framework is properly initialized (even if concrete subclasses do not + * themselves initialize any loggers) so that the async logger threads can be properly shutdown on + * completion of the test suite + * * @see SOLR-14247 * @see #shutdownLogger */ private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - private static final Pattern NAMING_CONVENTION_TEST_SUFFIX = Pattern.compile("(.+\\.)([^.]+)(Test)"); + protected static final Pattern NAMING_CONVENTION_TEST_SUFFIX = + Pattern.compile("(.+\\.)([^.]+)(Test)"); - private static final Pattern NAMING_CONVENTION_TEST_PREFIX = Pattern.compile("(.+\\.)(Test)([^.]+)"); + protected static final Pattern NAMING_CONVENTION_TEST_PREFIX = + Pattern.compile("(.+\\.)(Test)([^.]+)"); @ClassRule - public static TestRule solrClassRules = - RuleChain.outerRule(new SystemPropertiesRestoreRule()) - .around( - new VerifyTestClassNamingConvention( - "org.apache.solr.analytics", NAMING_CONVENTION_TEST_SUFFIX)) - .around( - new VerifyTestClassNamingConvention( - "org.apache.solr.ltr", NAMING_CONVENTION_TEST_PREFIX)) - .around(new RevertDefaultThreadHandlerRule()); + public static TestRule solrClassRules = + RuleChain.outerRule(new SystemPropertiesRestoreRule()) + .around( + new VerifyTestClassNamingConvention( + "org.apache.solr.analytics", NAMING_CONVENTION_TEST_SUFFIX)) + .around( + new VerifyTestClassNamingConvention( + "org.apache.solr.ltr", NAMING_CONVENTION_TEST_PREFIX)) + .around(new RevertDefaultThreadHandlerRule()); /** - * Sets the solr.default.confdir system property to the value of - * {@link ExternalPaths#DEFAULT_CONFIGSET} if and only if the system property is not already set, - * and the DEFAULT_CONFIGSET exists and is a readable directory. - *

- * Logs INFO/WARNing messages as appropriate based on these 2 conditions. - *

+ * Sets the solr.default.confdir system property to the value of {@link + * ExternalPaths#DEFAULT_CONFIGSET} if and only if the system property is not already set, and the + * DEFAULT_CONFIGSET exists and is a readable directory. + * + *

Logs INFO/WARNing messages as appropriate based on these 2 conditions. + * * @see SolrDispatchFilter#SOLR_DEFAULT_CONFDIR_ATTRIBUTE */ @BeforeClass public static void setDefaultConfigDirSysPropIfNotSet() { - final String existingValue = System.getProperty(SolrDispatchFilter.SOLR_DEFAULT_CONFDIR_ATTRIBUTE); + final String existingValue = + System.getProperty(SolrDispatchFilter.SOLR_DEFAULT_CONFDIR_ATTRIBUTE); if (null != existingValue) { - log.info("Test env includes configset dir system property '{}'='{}'", SolrDispatchFilter.SOLR_DEFAULT_CONFDIR_ATTRIBUTE, existingValue); + log.info( + "Test env includes configset dir system property '{}'='{}'", + SolrDispatchFilter.SOLR_DEFAULT_CONFDIR_ATTRIBUTE, + existingValue); return; } final File extPath = new File(ExternalPaths.DEFAULT_CONFIGSET); - if (extPath.canRead(/* implies exists() */) && extPath.isDirectory()) { - log.info("Setting '{}' system property to test-framework derived value of '{}'", - SolrDispatchFilter.SOLR_DEFAULT_CONFDIR_ATTRIBUTE, ExternalPaths.DEFAULT_CONFIGSET); + if (extPath.canRead(/* implies exists() */ ) && extPath.isDirectory()) { + log.info( + "Setting '{}' system property to test-framework derived value of '{}'", + SolrDispatchFilter.SOLR_DEFAULT_CONFDIR_ATTRIBUTE, + ExternalPaths.DEFAULT_CONFIGSET); assert null == existingValue; - System.setProperty(SolrDispatchFilter.SOLR_DEFAULT_CONFDIR_ATTRIBUTE, ExternalPaths.DEFAULT_CONFIGSET); + System.setProperty( + SolrDispatchFilter.SOLR_DEFAULT_CONFDIR_ATTRIBUTE, ExternalPaths.DEFAULT_CONFIGSET); } else { - log.warn("System property '{}' is not already set, but test-framework derived value ('{}') either " + - "does not exist or is not a readable directory, you may need to set the property yourself " + - "for tests to run properly", - SolrDispatchFilter.SOLR_DEFAULT_CONFDIR_ATTRIBUTE, ExternalPaths.DEFAULT_CONFIGSET); + log.warn( + "System property '{}' is not already set, but test-framework derived value ('{}') either " + + "does not exist or is not a readable directory, you may need to set the property yourself " + + "for tests to run properly", + SolrDispatchFilter.SOLR_DEFAULT_CONFDIR_ATTRIBUTE, + ExternalPaths.DEFAULT_CONFIGSET); } } - - /** - * Special hook for sanity checking if any tests trigger failures when an - * Assumption failure occures in a {@link BeforeClass} method + + /** + * Special hook for sanity checking if any tests trigger failures when an Assumption failure + * occures in a {@link BeforeClass} method + * * @lucene.internal */ @BeforeClass public static void checkSyspropForceBeforeClassAssumptionFailure() { // ant test -Dargs="-Dtests.force.assumption.failure.beforeclass=true" final String PROP = "tests.force.assumption.failure.beforeclass"; - assumeFalse(PROP + " == true", - systemPropertyAsBoolean(PROP, false)); + assumeFalse(PROP + " == true", systemPropertyAsBoolean(PROP, false)); } - - /** - * Special hook for sanity checking if any tests trigger failures when an - * Assumption failure occures in a {@link Before} method + + /** + * Special hook for sanity checking if any tests trigger failures when an Assumption failure + * occures in a {@link Before} method + * * @lucene.internal */ @Before public void checkSyspropForceBeforeAssumptionFailure() { // ant test -Dargs="-Dtests.force.assumption.failure.before=true" final String PROP = "tests.force.assumption.failure.before"; - assumeFalse(PROP + " == true", - systemPropertyAsBoolean(PROP, false)); + assumeFalse(PROP + " == true", systemPropertyAsBoolean(PROP, false)); } - + @AfterClass - public static void shutdownLogger() throws Exception { + public static void shutdownLogger() { StartupLoggingUtils.shutdown(); } + + public static void interruptThreadsOnTearDown() { + + log.info("Checking leaked threads after test"); + + ThreadGroup tg = Thread.currentThread().getThreadGroup(); + + Set> threadSet = Thread.getAllStackTraces().entrySet(); + if (log.isInfoEnabled()) { + log.info("thread count={}", threadSet.size()); + } + Collection waitThreads = new ArrayList<>(); + for (Map.Entry threadEntry : threadSet) { + Thread thread = threadEntry.getKey(); + ThreadGroup threadGroup = thread.getThreadGroup(); + if (threadGroup != null + && !(thread.getName().startsWith("SUITE") + && thread.getName().charAt(thread.getName().length() - 1) == ']') + && !"main".equals(thread.getName())) { + if (log.isTraceEnabled()) { + log.trace("thread is {} state={}", thread.getName(), thread.getState()); + } + if (threadGroup.getName().equals(tg.getName()) && interrupt(thread)) { + waitThreads.add(thread); + } + } + + while (true) { + boolean cont = + threadGroup != null && threadGroup.getParent() != null && !( + thread.getName().startsWith("SUITE") + && thread.getName().charAt(thread.getName().length() - 1) == ']') + && !"main".equals(thread.getName()); + if (!cont) break; + threadGroup = threadGroup.getParent(); + + if (threadGroup.getName().equals(tg.getName())) { + if (log.isTraceEnabled()) { + log.trace("thread is {} state={}", thread.getName(), thread.getState()); + } + if (interrupt(thread)) { + waitThreads.add(thread); + } + } + } + } + + for (Thread thread : waitThreads) { + int cnt = 0; + do { + if (log.isDebugEnabled()) { + log.debug("waiting on {} {}", thread.getName(), thread.getState()); + } + thread.interrupt(); + try { + thread.join(5); + } catch (InterruptedException e) { + // ignore + } + } while (cnt++ < 20); + } + + waitThreads.clear(); + } + + private static boolean interrupt(Thread thread) { + + if (thread.getName().startsWith("Reference Handler") + || thread.getName().startsWith("Signal Dispatcher") + || thread.getName().startsWith("Monitor") + || thread.getName().startsWith("YJPAgent-RequestListener")) { + return false; + } + + if (thread.getName().startsWith("ForkJoinPool.") + || thread.getName().startsWith("Log4j2-") + || thread.getName().startsWith("SessionTracker")) { + return false; + } + + // pool is forkjoin + if (thread.getName().contains("pool-")) { + return false; + } + + Thread.State state = thread.getState(); + + if (state == Thread.State.TERMINATED) { + return false; + } + if (log.isDebugEnabled()) { + log.debug("Interrupt on {} state={}", thread.getName(), thread.getState()); + } + thread.interrupt(); + return true; + } } diff --git a/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java b/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java index 3f4e62a3dc0..c9853526438 100644 --- a/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java +++ b/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java @@ -192,7 +192,7 @@ public abstract class SolrTestCaseJ4 extends SolrTestCase { "java.lang.reflect.", "com.carrotsearch.randomizedtesting.", }); - + public static final String DEFAULT_TEST_COLLECTION_NAME = "collection1"; public static final String DEFAULT_TEST_CORENAME = DEFAULT_TEST_COLLECTION_NAME; protected static final String CORE_PROPERTIES_FILENAME = "core.properties"; @@ -204,9 +204,9 @@ public abstract class SolrTestCaseJ4 extends SolrTestCase { protected static String coreName = DEFAULT_TEST_CORENAME; public static int DEFAULT_CONNECTION_TIMEOUT = 60000; // default socket connection timeout in ms - + private static String initialRootLogLevel; - + protected volatile static ExecutorService testExecutor; protected void writeCoreProperties(Path coreDirectory, String corename) throws IOException { @@ -223,13 +223,13 @@ public static void writeCoreProperties(Path coreDirectory, Properties properties log.info("Writing core.properties file to {}", coreDirectory); Files.createDirectories(coreDirectory); try (Writer writer = - new OutputStreamWriter(Files.newOutputStream(coreDirectory.resolve(CORE_PROPERTIES_FILENAME)), Charset.forName("UTF-8"))) { + new OutputStreamWriter(Files.newOutputStream(coreDirectory.resolve(CORE_PROPERTIES_FILENAME)), Charset.forName("UTF-8"))) { properties.store(writer, testname); } } protected void assertExceptionThrownWithMessageContaining(Class expectedType, - List expectedStrings, ThrowingRunnable runnable) { + List expectedStrings, ThrowingRunnable runnable) { Throwable thrown = expectThrows(expectedType, runnable); if (expectedStrings != null) { @@ -250,7 +250,7 @@ protected void assertExceptionThrownWithMessageContaining(Class(), @@ -291,8 +291,8 @@ public static void setupTestCases() { // non-null after calling setupTestCases() initAndGetDataDir(); - System.setProperty("solr.zkclienttimeout", "90000"); - + System.setProperty("solr.zkclienttimeout", "90000"); + System.setProperty("solr.httpclient.retries", "1"); System.setProperty("solr.retries.on.forward", "1"); System.setProperty("solr.retries.to.followers", "1"); @@ -300,11 +300,10 @@ public static void setupTestCases() { System.setProperty("solr.v2RealPath", "true"); System.setProperty("zookeeper.forceSync", "no"); System.setProperty("jetty.testMode", "true"); - System.setProperty("enable.update.log", Boolean.toString(usually())); + System.setProperty("enable.update.log", usually() ? "true" : "false"); System.setProperty("tests.shardhandler.randomSeed", Long.toString(random().nextLong())); System.setProperty("solr.clustering.enabled", "false"); System.setProperty("solr.cloud.wait-for-updates-with-stale-state-pause", "500"); - System.setProperty("solr.filterCache.async", String.valueOf(random().nextBoolean())); System.setProperty("pkiHandlerPrivateKeyPath", SolrTestCaseJ4.class.getClassLoader().getResource("cryptokeys/priv_key512_pkcs8.pem").toExternalForm()); System.setProperty("pkiHandlerPublicKeyPath", SolrTestCaseJ4.class.getClassLoader().getResource("cryptokeys/pub_key512.der").toExternalForm()); @@ -365,7 +364,7 @@ public static void teardownTestCases() throws Exception { } catch (Exception e) { log.error("Error deleting SolrCore."); } - + if (null != testExecutor) { ExecutorUtil.shutdownAndAwaitTermination(testExecutor); testExecutor = null; @@ -413,7 +412,7 @@ public static void teardownTestCases() throws Exception { StartupLoggingUtils.changeLogLevel(initialRootLogLevel); } } - + /** * a "dead" host, if you try to connect to it, it will likely fail fast * please consider using mocks and not real networking to simulate failure @@ -442,14 +441,14 @@ public static void assumeWorkingMockito() { fail("ByteBuddy and Mockito are not available on classpath: " + e.toString()); } } - + /** * @return null if ok else error message */ public static String clearObjectTrackerAndCheckEmpty(int waitSeconds) { return clearObjectTrackerAndCheckEmpty(waitSeconds, false); } - + /** * @return null if ok else error message */ @@ -477,12 +476,12 @@ public static String clearObjectTrackerAndCheckEmpty(int waitSeconds, boolean tr } catch (InterruptedException e) { break; } } while (retries++ < waitSeconds); - - + + log.info("------------------------------------------------------- Done waiting for tracked resources to be released"); - + ObjectReleaseTracker.clear(); - + return result; } @@ -517,7 +516,7 @@ public void restoreMethodLogLevels() { LogLevel.Configurer.restoreLogLevels(savedMethodLogLevels); savedMethodLogLevels.clear(); } - + protected static boolean isSSLMode() { return sslConfig != null && sslConfig.isSSLMode(); } @@ -552,8 +551,8 @@ public static void resetFactory() throws Exception { private static SSLTestConfig buildSSLConfig() { SSLRandomizer sslRandomizer = - SSLRandomizer.getSSLRandomizerForClass(RandomizedContext.current().getTargetClass()); - + SSLRandomizer.getSSLRandomizerForClass(RandomizedContext.current().getTargetClass()); + if (Constants.MAC_OS_X) { // see SOLR-9039 // If a solution is found to remove this, please make sure to also update @@ -572,7 +571,7 @@ private static SSLTestConfig buildSSLConfig() { protected static JettyConfig buildJettyConfig(String context) { return JettyConfig.builder().setContext(context).withSSLConfig(sslConfig.buildServerSSLConfig()).build(); } - + protected static String buildUrl(final int port, final String context) { return (isSSLMode() ? "https" : "http") + "://127.0.0.1:" + port + context; } @@ -604,11 +603,11 @@ public static void setupNoCoreTest(Path solrHome, String xmlStr) throws Exceptio h = new TestHarness(SolrXmlConfig.fromSolrHome(solrHome, new Properties())); lrf = h.getRequestFactory("/select", 0, 20, CommonParams.VERSION, "2.2"); } - - /** + + /** * Sets system properties to allow generation of random configurations of - * solrconfig.xml and schema.xml. - * Sets properties used on + * solrconfig.xml and schema.xml. + * Sets properties used on * {@link #newIndexWriterConfig(org.apache.lucene.analysis.Analyzer)} * and base schema.xml (Point Fields) */ @@ -654,12 +653,12 @@ public void tearDown() throws Exception { } /** - * Subclasses may call this method to access the "dataDir" that will be used by + * Subclasses may call this method to access the "dataDir" that will be used by * {@link #initCore} (either prior to or after the core is created). *

* If the dataDir has not yet been initialized when this method is called, this method will do so. - * Calling {@link #deleteCore} will "reset" the value, such that subsequent calls will - * re-initialize a new value. All directories returned by any calls to this method will + * Calling {@link #deleteCore} will "reset" the value, such that subsequent calls will + * re-initialize a new value. All directories returned by any calls to this method will * automatically be cleaned up per {@link #createTempDir} *

*

@@ -682,17 +681,17 @@ protected static File initAndGetDataDir() { } return dataDir; } - /** - * Counter for ensuring we don't ask {@link #createTempDir} to try and + /** + * Counter for ensuring we don't ask {@link #createTempDir} to try and * re-create the same dir prefix over and over. *

- * (createTempDir has it's own counter for uniqueness, but it tries all numbers in a loop - * until it finds one available. No reason to force that O(N^2) behavior when we know we've + * (createTempDir has it's own counter for uniqueness, but it tries all numbers in a loop + * until it finds one available. No reason to force that O(N^2) behavior when we know we've * already created N previous directories with the same prefix.) *

*/ private static final AtomicInteger dataDirCount = new AtomicInteger(0); - + /** Call initCore in @BeforeClass to instantiate a solr core in your test class. * deleteCore will be called for you via SolrTestCaseJ4 @AfterClass */ public static void initCore(String config, String schema) throws Exception { @@ -716,7 +715,7 @@ public static void initCore(String config, String schema, String solrHome, Strin coreName=pCoreName; initCore(config,schema,solrHome); } - + static long numOpens; static long numCloses; public static void startTrackingSearchers() { @@ -728,7 +727,7 @@ public static void startTrackingSearchers() { numOpens = numCloses = 0; } } - + /** Causes an exception matching the regex pattern to not be logged. */ public static void ignoreException(String pattern) { if (SolrException.ignorePatterns == null) // usually initialized already but in case not... @@ -801,10 +800,10 @@ public static String getSolrConfigFile() { } /** - * The directory used as the dataDir for the TestHarness unless - * {@link #hdfsDataDir} is non null. + * The directory used as the dataDir for the TestHarness unless + * {@link #hdfsDataDir} is non null. *

- * Will be set to null by {@link #deleteCore} and re-initialized as needed by {@link #createCore}. + * Will be set to null by {@link #deleteCore} and re-initialized as needed by {@link #createCore}. * In the event of a test failure, the contents will be left on disk. *

* @see #createTempDir(String) @@ -813,7 +812,7 @@ public static String getSolrConfigFile() { */ @Deprecated protected static volatile File initCoreDataDir; - + // hack due to File dataDir protected static String hdfsDataDir; @@ -855,10 +854,10 @@ public static void createCore() { assertNotNull(testSolrHome); solrConfig = TestHarness.createConfig(testSolrHome, coreName, getSolrConfigFile()); h = new TestHarness( coreName, hdfsDataDir == null ? initAndGetDataDir().getAbsolutePath() : hdfsDataDir, - solrConfig, - getSchemaFile()); + solrConfig, + getSchemaFile()); lrf = h.getRequestFactory - ("",0,20,CommonParams.VERSION,"2.2"); + ("",0,20,CommonParams.VERSION,"2.2"); } public static CoreContainer createCoreContainer(Path solrHome, String solrXML) { @@ -958,7 +957,7 @@ public static void deleteCore() { // clears the updatelog sysprop at the end of the test run System.clearProperty(UPDATELOG_SYSPROP); } - + solrConfig = null; h = null; lrf = null; @@ -997,11 +996,11 @@ private static void checkUpdateU(String message, String update, boolean shouldSu try { String m = (null == message) ? "" : message + " "; if (shouldSucceed) { - String res = h.validateUpdate(update); - if (res != null) fail(m + "update was not successful: " + res); + String res = h.validateUpdate(update); + if (res != null) fail(m + "update was not successful: " + res); } else { - String res = h.validateErrorUpdate(update); - if (res != null) fail(m + "update succeeded, but should have failed: " + res); + String res = h.validateErrorUpdate(update); + if (res != null) fail(m + "update succeeded, but should have failed: " + res); } } catch (SAXException e) { throw new RuntimeException("Invalid XML", e); @@ -1149,7 +1148,7 @@ public static String assertJQ(SolrQueryRequest req, double delta, String... test // restore the params if (params != null && params != req.getParams()) req.setParams(params); } - } + } /** Makes sure a query throws a SolrException with the listed response code */ @@ -1194,7 +1193,7 @@ public static void assertQEx(String failMessage, String exceptionMessage, SolrQu fail( failMessage ); } catch (SolrException e) { assertEquals( code.code, e.code() ); - assertTrue("Unexpected error message. Expecting \"" + exceptionMessage + + assertTrue("Unexpected error message. Expecting \"" + exceptionMessage + "\" but got \"" + e.getMessage() + "\"", e.getMessage()!= null && e.getMessage().contains(exceptionMessage)); } catch (Exception e2) { throw new RuntimeException("Exception during query", e2); @@ -1286,7 +1285,7 @@ public static String add(XmlDoc doc, String... args) { return r.getBuffer().toString(); } catch (IOException e) { throw new RuntimeException - ("this should never happen with a StringWriter", e); + ("this should never happen with a StringWriter", e); } } @@ -1387,11 +1386,11 @@ public static class XmlDoc { } /** - * Does a low level delete of all docs in the index. + * Does a low level delete of all docs in the index. * * The behavior of this method is slightly different then doing a normal *:* DBQ because it - * takes advantage of internal methods to ensure all index data is wiped, regardless of optimistic - * concurrency version constraints -- making it suitable for tests that create synthetic versions, + * takes advantage of internal methods to ensure all index data is wiped, regardless of optimistic + * concurrency version constraints -- making it suitable for tests that create synthetic versions, * and/or require a completely pristine index w/o any field metdata. * * @see #deleteByQueryAndGetVersion @@ -1404,7 +1403,7 @@ public void clearIndex() { } try { deleteByQueryAndGetVersion("*:*", params("_version_", Long.toString(-Long.MAX_VALUE), - DISTRIB_UPDATE_PARAM,DistribPhase.FROMLEADER.toString())); + DISTRIB_UPDATE_PARAM,DistribPhase.FROMLEADER.toString())); } catch (Exception e) { throw new RuntimeException(e); } @@ -1571,7 +1570,7 @@ public static String jsonAdd(SolrInputDocument... docs) { return out.toString(); } - /** Creates a JSON delete command from an id list */ + /** Creates a JSON delete command from an id list */ public static String jsonDelId(Object... ids) { CharArr out = new CharArr(); try { @@ -1661,7 +1660,7 @@ public static Long deleteByQueryAndGetVersion(String q, SolrParams params) throw ///////////////////////////////////////////////////////////////////////////////////// //////////////////////////// random document / index creation /////////////////////// ///////////////////////////////////////////////////////////////////////////////////// - + public abstract static class Vals { @SuppressWarnings({"rawtypes"}) public abstract Comparable get(); @@ -1869,7 +1868,7 @@ public FldType(String fname, Vals vals) { public FldType(String fname, IVals numValues, Vals vals) { this.fname = fname; this.numValues = numValues; - this.vals = vals; + this.vals = vals; } @SuppressWarnings({"rawtypes"}) @@ -1895,7 +1894,7 @@ public Fld createField() { Fld fld = new Fld(); fld.ftype = this; fld.vals = vals; - return fld; + return fld; } } @@ -1933,7 +1932,7 @@ public Map indexDocs(List descriptor, Map createSort(IndexSchema schema, List field if (comparators.size() == 0) { // default sort is by score desc - comparators.add(createComparator("score", false, false, false, false)); + comparators.add(createComparator("score", false, false, false, false)); } return createComparator(comparators); @@ -2024,7 +2023,7 @@ public static Comparator createComparator(final String field, final boolean final int mul = asc ? 1 : -1; if (field.equals("_docid_")) { - return (o1, o2) -> (o1.order - o2.order) * mul; + return (o1, o2) -> (o1.order - o2.order) * mul; } if (field.equals("score")) { @@ -2155,7 +2154,7 @@ public static File getFile(String name) { try { return new File(url.toURI()); } catch (Exception e) { - throw new RuntimeException("Resource was found on classpath, but cannot be resolved to a " + + throw new RuntimeException("Resource was found on classpath, but cannot be resolved to a " + "normal file (maybe it is part of a JAR file): " + name); } } @@ -2165,7 +2164,7 @@ public static File getFile(String name) { } throw new RuntimeException("Cannot find resource in classpath or in file-system (relative to CWD): " + name); } - + public static String TEST_HOME() { return getFile("solr/collection1").getParent(); } @@ -2199,7 +2198,7 @@ public static void assertXmlFile(final File file, String... xpath) throw new RuntimeException("XPath is invalid", e2); } } - + /** * Fails if the number of documents in the given SolrDocumentList differs * from the given number of expected values, or if any of the values in the @@ -2267,7 +2266,7 @@ public static void copyXmlToHome(File dstRoot, String fromFile) throws IOExcepti } File xmlF = new File(SolrTestCaseJ4.TEST_HOME(), fromFile); FileUtils.copyFile(xmlF, new File(dstRoot, "solr.xml")); - + } // Creates a consistent configuration, _including_ solr.xml at dstRoot. Creates collection1/conf and copies // the stock files in there. @@ -2359,7 +2358,7 @@ public boolean compareSolrDocumentList(Object expected, Object actual) { if (list1.getMaxScore() == null) { if (list2.getMaxScore() != null) { return false; - } + } } else if (list2.getMaxScore() == null) { return false; } else { @@ -2475,7 +2474,7 @@ public boolean assertSolrInputFieldEquals(Object expected, Object actual) { return true; } - /** + /** * Returns likely most (1/10) of the time, otherwise unlikely */ public static Object skewed(Object likely, Object unlikely) { @@ -2542,27 +2541,27 @@ public static CloudHttp2SolrClient getCloudHttp2SolrClient(MiniSolrCloudCluster * some internal settings. */ public static class CloudSolrClientBuilder extends CloudSolrClient.Builder { - + public CloudSolrClientBuilder(List zkHosts, Optional zkChroot) { super(zkHosts, zkChroot); randomizeCloudSolrClient(); } - + public CloudSolrClientBuilder(ClusterStateProvider stateProvider) { this.stateProvider = stateProvider; randomizeCloudSolrClient(); } - + public CloudSolrClientBuilder(MiniSolrCloudCluster cluster) { if (random().nextBoolean()) { this.zkHosts.add(cluster.getZkServer().getZkAddress()); } else { populateSolrUrls(cluster); } - + randomizeCloudSolrClient(); } - + private void populateSolrUrls(MiniSolrCloudCluster cluster) { if (random().nextBoolean()) { final List solrNodes = cluster.getJettySolrRunners(); @@ -2573,7 +2572,7 @@ private void populateSolrUrls(MiniSolrCloudCluster cluster) { this.solrUrls.add(cluster.getRandomJetty(random()).getBaseUrl().toString()); } } - + private void randomizeCloudSolrClient() { this.directUpdatesToLeadersOnly = random().nextBoolean(); this.shardLeadersOnly = random().nextBoolean(); @@ -2585,7 +2584,7 @@ private void randomizeCloudSolrClient() { * This method may randomize unspecified aspects of the resulting SolrClient. * Tests that do not wish to have any randomized behavior should use the * {@link org.apache.solr.client.solrj.impl.CloudSolrClient.Builder} class directly - */ + */ public static CloudSolrClient getCloudSolrClient(String zkHost) { return new CloudSolrClientBuilder(Collections.singletonList(zkHost), Optional.empty()).build(); } @@ -2601,20 +2600,20 @@ public static CloudSolrClient getCloudSolrClient(MiniSolrCloudCluster cluster) { /** * This method may randomize unspecified aspects of the resulting SolrClient. - * Tests that do not wish to have any randomized behavior should use the + * Tests that do not wish to have any randomized behavior should use the * {@link org.apache.solr.client.solrj.impl.CloudSolrClient.Builder} class directly - */ + */ public static CloudSolrClient getCloudSolrClient(String zkHost, HttpClient httpClient) { return new CloudSolrClientBuilder(Collections.singletonList(zkHost), Optional.empty()) .withHttpClient(httpClient) .build(); } - + /** * This method may randomize unspecified aspects of the resulting SolrClient. - * Tests that do not wish to have any randomized behavior should use the + * Tests that do not wish to have any randomized behavior should use the * {@link org.apache.solr.client.solrj.impl.CloudSolrClient.Builder} class directly - */ + */ public static CloudSolrClient getCloudSolrClient(String zkHost, boolean shardLeadersOnly) { if (shardLeadersOnly) { return new CloudSolrClientBuilder(Collections.singletonList(zkHost), Optional.empty()) @@ -2632,9 +2631,9 @@ public static CloudSolrClientBuilder newCloudSolrClient(String zkHost) { /** * This method may randomize unspecified aspects of the resulting SolrClient. - * Tests that do not wish to have any randomized behavior should use the + * Tests that do not wish to have any randomized behavior should use the * {@link org.apache.solr.client.solrj.impl.CloudSolrClient.Builder} class directly - */ + */ public static CloudSolrClient getCloudSolrClient(String zkHost, boolean shardLeadersOnly, int socketTimeoutMillis) { if (shardLeadersOnly) { return new CloudSolrClientBuilder(Collections.singletonList(zkHost), Optional.empty()) @@ -2647,12 +2646,12 @@ public static CloudSolrClient getCloudSolrClient(String zkHost, boolean shardLea .withSocketTimeout(socketTimeoutMillis) .build(); } - + /** * This method may randomize unspecified aspects of the resulting SolrClient. - * Tests that do not wish to have any randomized behavior should use the + * Tests that do not wish to have any randomized behavior should use the * {@link org.apache.solr.client.solrj.impl.CloudSolrClient.Builder} class directly - */ + */ public static CloudSolrClient getCloudSolrClient(String zkHost, boolean shardLeadersOnly, int connectionTimeoutMillis, int socketTimeoutMillis) { if (shardLeadersOnly) { return new CloudSolrClientBuilder(Collections.singletonList(zkHost), Optional.empty()) @@ -2667,14 +2666,14 @@ public static CloudSolrClient getCloudSolrClient(String zkHost, boolean shardLea .withSocketTimeout(socketTimeoutMillis) .build(); } - - - + + + /** * This method may randomize unspecified aspects of the resulting SolrClient. - * Tests that do not wish to have any randomized behavior should use the + * Tests that do not wish to have any randomized behavior should use the * {@link org.apache.solr.client.solrj.impl.CloudSolrClient.Builder} class directly - */ + */ public static CloudSolrClient getCloudSolrClient(String zkHost, boolean shardLeadersOnly, HttpClient httpClient) { if (shardLeadersOnly) { return new CloudSolrClientBuilder(Collections.singletonList(zkHost), Optional.empty()) @@ -2687,12 +2686,12 @@ public static CloudSolrClient getCloudSolrClient(String zkHost, boolean shardLea .sendUpdatesToAllReplicasInShard() .build(); } - + /** * This method may randomize unspecified aspects of the resulting SolrClient. - * Tests that do not wish to have any randomized behavior should use the + * Tests that do not wish to have any randomized behavior should use the * {@link org.apache.solr.client.solrj.impl.CloudSolrClient.Builder} class directly - */ + */ public static CloudSolrClient getCloudSolrClient(String zkHost, boolean shardLeadersOnly, HttpClient httpClient, int connectionTimeoutMillis, int socketTimeoutMillis) { if (shardLeadersOnly) { @@ -2710,24 +2709,24 @@ public static CloudSolrClient getCloudSolrClient(String zkHost, boolean shardLea .withSocketTimeout(socketTimeoutMillis) .build(); } - + /** * This method may randomize unspecified aspects of the resulting SolrClient. - * Tests that do not wish to have any randomized behavior should use the + * Tests that do not wish to have any randomized behavior should use the * {@link org.apache.solr.client.solrj.impl.ConcurrentUpdateSolrClient.Builder} class directly - */ + */ public static ConcurrentUpdateSolrClient getConcurrentUpdateSolrClient(String baseSolrUrl, int queueSize, int threadCount) { return new ConcurrentUpdateSolrClient.Builder(baseSolrUrl) .withQueueSize(queueSize) .withThreadCount(threadCount) .build(); } - + /** * This method may randomize unspecified aspects of the resulting SolrClient. - * Tests that do not wish to have any randomized behavior should use the + * Tests that do not wish to have any randomized behavior should use the * {@link org.apache.solr.client.solrj.impl.ConcurrentUpdateSolrClient.Builder} class directly - */ + */ public static ConcurrentUpdateSolrClient getConcurrentUpdateSolrClient(String baseSolrUrl, int queueSize, int threadCount, int connectionTimeoutMillis) { return new ConcurrentUpdateSolrClient.Builder(baseSolrUrl) .withQueueSize(queueSize) @@ -2735,12 +2734,12 @@ public static ConcurrentUpdateSolrClient getConcurrentUpdateSolrClient(String ba .withConnectionTimeout(connectionTimeoutMillis) .build(); } - + /** * This method may randomize unspecified aspects of the resulting SolrClient. - * Tests that do not wish to have any randomized behavior should use the + * Tests that do not wish to have any randomized behavior should use the * {@link org.apache.solr.client.solrj.impl.ConcurrentUpdateSolrClient.Builder} class directly - */ + */ public static ConcurrentUpdateSolrClient getConcurrentUpdateSolrClient(String baseSolrUrl, HttpClient httpClient, int queueSize, int threadCount) { return new ConcurrentUpdateSolrClient.Builder(baseSolrUrl) .withHttpClient(httpClient) @@ -2748,24 +2747,24 @@ public static ConcurrentUpdateSolrClient getConcurrentUpdateSolrClient(String ba .withThreadCount(threadCount) .build(); } - + /** * This method may randomize unspecified aspects of the resulting SolrClient. - * Tests that do not wish to have any randomized behavior should use the + * Tests that do not wish to have any randomized behavior should use the * {@link org.apache.solr.client.solrj.impl.LBHttpSolrClient.Builder} class directly - */ + */ public static LBHttpSolrClient getLBHttpSolrClient(HttpClient client, String... solrUrls) { return new LBHttpSolrClient.Builder() .withHttpClient(client) .withBaseSolrUrls(solrUrls) .build(); } - + /** * This method may randomize unspecified aspects of the resulting SolrClient. - * Tests that do not wish to have any randomized behavior should use the + * Tests that do not wish to have any randomized behavior should use the * {@link org.apache.solr.client.solrj.impl.LBHttpSolrClient.Builder} class directly - */ + */ public static LBHttpSolrClient getLBHttpSolrClient(HttpClient client, int connectionTimeoutMillis, int socketTimeoutMillis, String... solrUrls) { return new LBHttpSolrClient.Builder() @@ -2775,23 +2774,23 @@ public static LBHttpSolrClient getLBHttpSolrClient(HttpClient client, int connec .withSocketTimeout(socketTimeoutMillis) .build(); } - + /** * This method may randomize unspecified aspects of the resulting SolrClient. - * Tests that do not wish to have any randomized behavior should use the + * Tests that do not wish to have any randomized behavior should use the * {@link org.apache.solr.client.solrj.impl.LBHttpSolrClient.Builder} class directly - */ + */ public static LBHttpSolrClient getLBHttpSolrClient(String... solrUrls) throws MalformedURLException { return new LBHttpSolrClient.Builder() .withBaseSolrUrls(solrUrls) .build(); } - + /** * This method may randomize unspecified aspects of the resulting SolrClient. - * Tests that do not wish to have any randomized behavior should use the + * Tests that do not wish to have any randomized behavior should use the * {@link org.apache.solr.client.solrj.impl.HttpSolrClient.Builder} class directly - */ + */ public static HttpSolrClient getHttpSolrClient(String url, HttpClient httpClient, ResponseParser responseParser, boolean compression) { return new Builder(url) .withHttpClient(httpClient) @@ -2799,35 +2798,35 @@ public static HttpSolrClient getHttpSolrClient(String url, HttpClient httpClient .allowCompression(compression) .build(); } - + /** * This method may randomize unspecified aspects of the resulting SolrClient. - * Tests that do not wish to have any randomized behavior should use the + * Tests that do not wish to have any randomized behavior should use the * {@link org.apache.solr.client.solrj.impl.HttpSolrClient.Builder} class directly - */ + */ public static HttpSolrClient getHttpSolrClient(String url, HttpClient httpClient, ResponseParser responseParser) { return new Builder(url) .withHttpClient(httpClient) .withResponseParser(responseParser) .build(); } - + /** * This method may randomize unspecified aspects of the resulting SolrClient. - * Tests that do not wish to have any randomized behavior should use the + * Tests that do not wish to have any randomized behavior should use the * {@link org.apache.solr.client.solrj.impl.HttpSolrClient.Builder} class directly - */ + */ public static HttpSolrClient getHttpSolrClient(String url, HttpClient httpClient) { return new Builder(url) .withHttpClient(httpClient) .build(); } - + /** * This method may randomize unspecified aspects of the resulting SolrClient. - * Tests that do not wish to have any randomized behavior should use the + * Tests that do not wish to have any randomized behavior should use the * {@link org.apache.solr.client.solrj.impl.HttpSolrClient.Builder} class directly - */ + */ public static HttpSolrClient getHttpSolrClient(String url, HttpClient httpClient, int connectionTimeoutMillis) { return new Builder(url) .withHttpClient(httpClient) @@ -2837,30 +2836,30 @@ public static HttpSolrClient getHttpSolrClient(String url, HttpClient httpClient /** * This method may randomize unspecified aspects of the resulting SolrClient. - * Tests that do not wish to have any randomized behavior should use the + * Tests that do not wish to have any randomized behavior should use the * {@link org.apache.solr.client.solrj.impl.HttpSolrClient.Builder} class directly - */ + */ public static HttpSolrClient getHttpSolrClient(String url) { return new Builder(url) .build(); } - + /** * This method may randomize unspecified aspects of the resulting SolrClient. - * Tests that do not wish to have any randomized behavior should use the + * Tests that do not wish to have any randomized behavior should use the * {@link org.apache.solr.client.solrj.impl.HttpSolrClient.Builder} class directly - */ + */ public static HttpSolrClient getHttpSolrClient(String url, int connectionTimeoutMillis) { return new Builder(url) .withConnectionTimeout(connectionTimeoutMillis) .build(); } - + /** * This method may randomize unspecified aspects of the resulting SolrClient. - * Tests that do not wish to have any randomized behavior should use the + * Tests that do not wish to have any randomized behavior should use the * {@link org.apache.solr.client.solrj.impl.HttpSolrClient.Builder} class directly - */ + */ public static HttpSolrClient getHttpSolrClient(String url, int connectionTimeoutMillis, int socketTimeoutMillis) { return new Builder(url) .withConnectionTimeout(connectionTimeoutMillis) @@ -2868,25 +2867,25 @@ public static HttpSolrClient getHttpSolrClient(String url, int connectionTimeout .build(); } - /** - * Returns a randomly generated Date in the appropriate Solr external (input) format + /** + * Returns a randomly generated Date in the appropriate Solr external (input) format * @see #randomSkewedDate */ public static String randomDate() { return Instant.ofEpochMilli(random().nextLong()).toString(); } - /** - * Returns a Date such that all results from this method always have the same values for - * year+month+day+hour+minute but the seconds are randomized. This can be helpful for - * indexing documents with random date values that are biased for a narrow window + /** + * Returns a Date such that all results from this method always have the same values for + * year+month+day+hour+minute but the seconds are randomized. This can be helpful for + * indexing documents with random date values that are biased for a narrow window * (one day) to test collisions/overlaps * * @see #randomDate */ public static String randomSkewedDate() { return String.format(Locale.ROOT, "2010-10-31T10:31:%02d.000Z", - TestUtil.nextInt(random(), 0, 59)); + TestUtil.nextInt(random(), 0, 59)); } /** @@ -2900,7 +2899,7 @@ public static String randomXmlUsableUnicodeString() { } return result; } - + protected static void waitForWarming(SolrCore core) throws InterruptedException { RefCounted registeredSearcher = core.getRegisteredSearcher(); RefCounted newestSearcher = core.getNewestSearcher(false); @@ -2931,7 +2930,7 @@ protected String getSaferTestName() { } return testName; } - + @BeforeClass public static void assertNonBlockingRandomGeneratorAvailable() throws InterruptedException { final String EGD = "java.security.egd"; @@ -2939,38 +2938,38 @@ public static void assertNonBlockingRandomGeneratorAvailable() throws Interrupte final String ALLOWED = "test.solr.allowed.securerandom"; final String allowedAlg = System.getProperty(ALLOWED); final String actualEGD = System.getProperty(EGD); - + log.info("SecureRandom sanity checks: {}={} & {}={}", ALLOWED, allowedAlg, EGD, actualEGD); if (null != allowedAlg) { // the user has explicitly requested to bypass our assertions and allow a particular alg // the only thing we should do is assert that the algorithm they have whitelisted is actaully used - - + + final String actualAlg = (new SecureRandom()).getAlgorithm(); assertEquals("Algorithm specified using "+ALLOWED+" system property " + - "does not match actual algorithm", allowedAlg, actualAlg); + "does not match actual algorithm", allowedAlg, actualAlg); return; } - // else: no user override, do the checks we want including - + // else: no user override, do the checks we want including + if (null == actualEGD) { System.setProperty(EGD, URANDOM); log.warn("System property {} was not set by test runner, forcibly set to expected: {}", EGD, URANDOM); } else if (! URANDOM.equals(actualEGD) ) { log.warn("System property {}={} .. test runner should use expected: {}", EGD, actualEGD, URANDOM); } - + final String algorithm = (new SecureRandom()).getAlgorithm(); - + assertFalse("SecureRandom algorithm '" + algorithm + "' is in use by your JVM, " + - "which is a potentially blocking algorithm on some environments. " + - "Please report the details of this failure (and your JVM vendor/version) to solr-user@lucene.apache.org. " + - "You can try to run your tests with -D"+EGD+"="+URANDOM+" or bypass this check using " + - "-Dtest.solr.allowed.securerandom="+ algorithm +" as a JVM option when running tests.", - // be permissive in our checks and blacklist only algorithms - // that are known to be blocking under some circumstances - algorithm.equals("NativePRNG") || algorithm.equals("NativePRNGBlocking")); + "which is a potentially blocking algorithm on some environments. " + + "Please report the details of this failure (and your JVM vendor/version) to solr-user@lucene.apache.org. " + + "You can try to run your tests with -D"+EGD+"="+URANDOM+" or bypass this check using " + + "-Dtest.solr.allowed.securerandom="+ algorithm +" as a JVM option when running tests.", + // be permissive in our checks and blacklist only algorithms + // that are known to be blocking under some circumstances + algorithm.equals("NativePRNG") || algorithm.equals("NativePRNGBlocking")); } protected static void systemSetPropertySolrTestsMergePolicyFactory(String value) { @@ -2980,7 +2979,7 @@ protected static void systemSetPropertySolrTestsMergePolicyFactory(String value) protected static void systemClearPropertySolrTestsMergePolicyFactory() { System.clearProperty(SYSTEM_PROPERTY_SOLR_TESTS_MERGEPOLICYFACTORY); } - + @Deprecated // For backwards compatibility only. Please do not use in new tests. protected static void systemSetPropertySolrDisableUrlAllowList(String value) { System.setProperty(AllowListUrlChecker.DISABLE_URL_ALLOW_LIST, value); @@ -2995,9 +2994,9 @@ protected static void systemClearPropertySolrDisableUrlAllowList() { protected static T pickRandom(T... options) { return options[random().nextInt(options.length)]; } - + /** - * The name of a sysprop that can be set by users when running tests to force the types of numerics + * The name of a sysprop that can be set by users when running tests to force the types of numerics * used for test classes that do not have the {@link SuppressPointFields} annotation: *
    *
  • If unset, then a random variable will be used to decide the type of numerics.
  • @@ -3007,23 +3006,23 @@ protected static T pickRandom(T... options) { * @see #NUMERIC_POINTS_SYSPROP */ public static final String USE_NUMERIC_POINTS_SYSPROP = "solr.tests.use.numeric.points"; - + /** - * The name of a sysprop that will either true or false indicating if - * numeric points fields are currently in use, depending on the user specified value of - * {@link #USE_NUMERIC_POINTS_SYSPROP} and/or the {@link SuppressPointFields} annotation and/or + * The name of a sysprop that will either true or false indicating if + * numeric points fields are currently in use, depending on the user specified value of + * {@link #USE_NUMERIC_POINTS_SYSPROP} and/or the {@link SuppressPointFields} annotation and/or * randomization. Tests can use Boolean.getBoolean(NUMERIC_POINTS_SYSPROP). * * @see #randomizeNumericTypesProperties */ public static final String NUMERIC_POINTS_SYSPROP = "solr.tests.numeric.points"; - + /** - * The name of a sysprop that will be either true or false indicating if - * docValues should be used on a numeric field. This property string should be used in the - * docValues attribute of (most) numeric fieldTypes in schemas, and by default will be - * randomized by this class in a @BeforeClass. Subclasses that need to force specific - * behavior can use System.setProperty(NUMERIC_DOCVALUES_SYSPROP,"true") + * The name of a sysprop that will be either true or false indicating if + * docValues should be used on a numeric field. This property string should be used in the + * docValues attribute of (most) numeric fieldTypes in schemas, and by default will be + * randomized by this class in a @BeforeClass. Subclasses that need to force specific + * behavior can use System.setProperty(NUMERIC_DOCVALUES_SYSPROP,"true") * to override the default randomization. * * @see #randomizeNumericTypesProperties @@ -3033,7 +3032,7 @@ protected static T pickRandom(T... options) { public static final String UPDATELOG_SYSPROP = "solr.tests.ulog"; /** - * Sets various sys props related to user specified or randomized choices regarding the types + * Sets various sys props related to user specified or randomized choices regarding the types * of numerics that should be used in tests. * * @see #NUMERIC_DOCVALUES_SYSPROP @@ -3046,17 +3045,17 @@ private static void randomizeNumericTypesProperties() { final boolean useDV = random().nextBoolean(); System.setProperty(NUMERIC_DOCVALUES_SYSPROP, ""+useDV); - + // consume a consistent amount of random data even if sysprop/annotation is set final boolean randUsePoints = 0 != random().nextInt(5); // 80% likelihood final String usePointsStr = System.getProperty(USE_NUMERIC_POINTS_SYSPROP); final boolean usePoints = (null == usePointsStr) ? randUsePoints : Boolean.parseBoolean(usePointsStr); - + if (RandomizedContext.current().getTargetClass().isAnnotationPresent(SolrTestCaseJ4.SuppressPointFields.class) || (! usePoints)) { log.info("Using TrieFields (NUMERIC_POINTS_SYSPROP=false) w/NUMERIC_DOCVALUES_SYSPROP={}", useDV); - + org.apache.solr.schema.PointField.TEST_HACK_IGNORE_USELESS_TRIEFIELD_ARGS = false; private_RANDOMIZED_NUMERIC_FIELDTYPES.put(Integer.class, "solr.TrieIntField"); private_RANDOMIZED_NUMERIC_FIELDTYPES.put(Float.class, "solr.TrieFloatField"); @@ -3064,7 +3063,7 @@ private static void randomizeNumericTypesProperties() { private_RANDOMIZED_NUMERIC_FIELDTYPES.put(Double.class, "solr.TrieDoubleField"); private_RANDOMIZED_NUMERIC_FIELDTYPES.put(Date.class, "solr.TrieDateField"); private_RANDOMIZED_NUMERIC_FIELDTYPES.put(Enum.class, "solr.EnumField"); - + System.setProperty(NUMERIC_POINTS_SYSPROP, "false"); } else { log.info("Using PointFields (NUMERIC_POINTS_SYSPROP=true) w/NUMERIC_DOCVALUES_SYSPROP={}", useDV); @@ -3076,24 +3075,24 @@ private static void randomizeNumericTypesProperties() { private_RANDOMIZED_NUMERIC_FIELDTYPES.put(Double.class, "solr.DoublePointField"); private_RANDOMIZED_NUMERIC_FIELDTYPES.put(Date.class, "solr.DatePointField"); private_RANDOMIZED_NUMERIC_FIELDTYPES.put(Enum.class, "solr.EnumFieldType"); - + System.setProperty(NUMERIC_POINTS_SYSPROP, "true"); } for (Map.Entry,String> entry : RANDOMIZED_NUMERIC_FIELDTYPES.entrySet()) { System.setProperty("solr.tests." + entry.getKey().getSimpleName() + "FieldType", - entry.getValue()); + entry.getValue()); } } public static DistributedUpdateProcessor createDistributedUpdateProcessor(SolrQueryRequest req, SolrQueryResponse rsp, - UpdateRequestProcessor next) { + UpdateRequestProcessor next) { if(h.getCoreContainer().isZooKeeperAware()) { return new DistributedZkUpdateProcessor(req, rsp, next); } return new DistributedUpdateProcessor(req, rsp, next); } - + /** * Cleans up the randomized sysproperties and variables set by {@link #randomizeNumericTypesProperties} * @@ -3131,7 +3130,7 @@ private static boolean isChildDoc(Object o) { } private static final Map,String> private_RANDOMIZED_NUMERIC_FIELDTYPES = new HashMap<>(); - + /** * A Map of "primitive" java "numeric" types and the string name of the class used in the * corresponding schema fieldType declaration. @@ -3142,6 +3141,6 @@ private static boolean isChildDoc(Object o) { * @see #randomizeNumericTypesProperties */ protected static final Map,String> RANDOMIZED_NUMERIC_FIELDTYPES - = Collections.unmodifiableMap(private_RANDOMIZED_NUMERIC_FIELDTYPES); + = Collections.unmodifiableMap(private_RANDOMIZED_NUMERIC_FIELDTYPES); } diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/api/collections/AbstractIncrementalBackupTest.java b/solr/test-framework/src/java/org/apache/solr/cloud/api/collections/AbstractIncrementalBackupTest.java index a854f911e2e..d7b1c34f19b 100644 --- a/solr/test-framework/src/java/org/apache/solr/cloud/api/collections/AbstractIncrementalBackupTest.java +++ b/solr/test-framework/src/java/org/apache/solr/cloud/api/collections/AbstractIncrementalBackupTest.java @@ -48,6 +48,7 @@ import org.apache.solr.core.backup.ShardBackupId; import org.apache.solr.core.backup.ShardBackupMetadata; import org.apache.solr.core.backup.repository.BackupRepository; +import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; @@ -95,11 +96,16 @@ public abstract class AbstractIncrementalBackupTest extends SolrCloudTestCase { protected String testSuffix = "test1"; @BeforeClass - public static void createCluster() throws Exception { + public static void beforeAbstractIncrementalBackupTest() throws Exception { docsSeed = random().nextLong(); System.setProperty("solr.directoryFactory", "solr.StandardDirectoryFactory"); } + @AfterClass + public static void afterAbstractIncrementalBackupTest() throws Exception { + interruptThreadsOnTearDown(); // not closed properly + } + @Before public void setUpTrackingRepo() { TrackingBackupRepository.clear(); @@ -316,8 +322,8 @@ public void testBackupIncremental() throws Exception { .setIncremental(true) .setMaxNumberBackupPoints(3) .setRepositoryName(BACKUP_REPO_NAME); - if (random().nextBoolean()) { - RequestStatusState state = backup.processAndWait(cluster.getSolrClient(), 1000); + if (rarely()) { + RequestStatusState state = backup.processAndWait(cluster.getSolrClient(), 100); if (state != RequestStatusState.FAILED) { fail("This backup should be failed"); } diff --git a/solr/test-framework/src/java/org/apache/solr/util/NotSecurePseudoRandom.java b/solr/test-framework/src/java/org/apache/solr/util/NotSecurePseudoRandom.java new file mode 100644 index 00000000000..c4786050405 --- /dev/null +++ b/solr/test-framework/src/java/org/apache/solr/util/NotSecurePseudoRandom.java @@ -0,0 +1,2 @@ +package org.apache.solr.util;public class NotSecurePseudoRandom { +} diff --git a/solr/test-framework/src/java/org/apache/solr/util/NotSecurePseudoRandomSpi.java b/solr/test-framework/src/java/org/apache/solr/util/NotSecurePseudoRandomSpi.java new file mode 100644 index 00000000000..e69de29bb2d diff --git a/solr/test-framework/src/java/org/apache/solr/util/SolrTestNonSecureRandomProvider.java b/solr/test-framework/src/java/org/apache/solr/util/SolrTestNonSecureRandomProvider.java new file mode 100644 index 00000000000..f9c3210133c --- /dev/null +++ b/solr/test-framework/src/java/org/apache/solr/util/SolrTestNonSecureRandomProvider.java @@ -0,0 +1,25 @@ +//package org.apache.solr.util; +// +//import java.security.Provider; +// +//public class SolrTestNonSecureRandomProvider extends Provider { +// +// public SolrTestNonSecureRandomProvider() { +// super("LinuxPRNG", +// 1.0, +// "A Test only, non secure provider"); +// put("SecureRandom.SHA1PRNG", NotSecurePseudoRandomSpi.class.getName()); +// put("SecureRandom.NativePRNG", NotSecurePseudoRandomSpi.class.getName()); +// put("SecureRandom.DRBG", NotSecurePseudoRandomSpi.class.getName()); +// +// +// put("SecureRandom.SHA1PRNG ThreadSafe", "true"); +// put("SecureRandom.NativePRNG ThreadSafe", "true"); +// put("SecureRandom.DRBG ThreadSafe", "true"); +// +// +// put("SecureRandom.SHA1PRNG ImplementedIn", "Software"); +// put("SecureRandom.NativePRNG ImplementedIn", "Software"); +// put("SecureRandom.DRBG ImplementedIn", "Software"); +// } +// } \ No newline at end of file diff --git a/versions.props b/versions.props index 2c71a28b947..37c525956d2 100644 --- a/versions.props +++ b/versions.props @@ -62,6 +62,7 @@ io.opencensus:opencensus-contrib-http-util=0.21.0 io.opentracing:*=0.33.0 io.prometheus:*=0.2.0 io.sgr:s2-geometry-library-java=1.0.0 +it.unimi.dsi:fastutil-core=8.5.6 javax.servlet:javax.servlet-api=3.1.0 junit:junit=4.13.1 net.arnx:jsonic=1.2.7