diff --git a/stroom-app/src/main/resources/ui/noauth/swagger/stroom.json b/stroom-app/src/main/resources/ui/noauth/swagger/stroom.json index cb627d31bc..0234c066b3 100644 --- a/stroom-app/src/main/resources/ui/noauth/swagger/stroom.json +++ b/stroom-app/src/main/resources/ui/noauth/swagger/stroom.json @@ -4392,6 +4392,85 @@ "tags" : [ "Feed Status" ] } }, + "/fileTransfer/v1/fetchSnapshot" : { + "post" : { + "operationId" : "fetchSnapshot", + "requestBody" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/SnapshotRequest" + } + } + } + }, + "responses" : { + "default" : { + "content" : { + "application/octet-stream" : { } + }, + "description" : "default response" + } + }, + "summary" : "Fetch Plan B snapshot", + "tags" : [ "File Transfer" ] + } + }, + "/fileTransfer/v1/sendPart" : { + "post" : { + "operationId" : "sendPart", + "parameters" : [ { + "in" : "header", + "name" : "createTime", + "schema" : { + "type" : "integer", + "format" : "int64" + } + }, { + "in" : "header", + "name" : "metaId", + "schema" : { + "type" : "integer", + "format" : "int64" + } + }, { + "in" : "header", + "name" : "fileHash", + "schema" : { + "type" : "string" + } + }, { + "in" : "header", + "name" : "fileName", + "schema" : { + "type" : "string" + } + } ], + "requestBody" : { + "content" : { + "application/octet-stream" : { + "schema" : { + "type" : "object" + } + } + } + }, + "responses" : { + "default" : { + "content" : { + "application/json" : { + "schema" : { + "type" : "boolean" + } + } + }, + "description" : "default response" + } + }, + "summary" : "Send Plan B part", + "tags" : [ "File Transfer" ] + } + }, "/fsVolume/v1" : { "post" : { "operationId" : "createFsVolume", @@ -8993,85 +9072,6 @@ "tags" : [ "Session Info" ] } }, - "/snapshot/v1/fetchSnapshot" : { - "post" : { - "operationId" : "fetchSnapshot", - "requestBody" : { - "content" : { - "application/json" : { - "schema" : { - "$ref" : "#/components/schemas/SnapshotRequest" - } - } - } - }, - "responses" : { - "default" : { - "content" : { - "application/octet-stream" : { } - }, - "description" : "default response" - } - }, - "summary" : "Fetch Plan B snapshot", - "tags" : [ "File Transfer" ] - } - }, - "/snapshot/v1/sendPart" : { - "post" : { - "operationId" : "sendPart", - "parameters" : [ { - "in" : "header", - "name" : "createTime", - "schema" : { - "type" : "integer", - "format" : "int64" - } - }, { - "in" : "header", - "name" : "metaId", - "schema" : { - "type" : "integer", - "format" : "int64" - } - }, { - "in" : "header", - "name" : "fileHash", - "schema" : { - "type" : "string" - } - }, { - "in" : "header", - "name" : "fileName", - "schema" : { - "type" : "string" - } - } ], - "requestBody" : { - "content" : { - "application/octet-stream" : { - "schema" : { - "type" : "object" - } - } - } - }, - "responses" : { - "default" : { - "content" : { - "application/json" : { - "schema" : { - "type" : "boolean" - } - } - }, - "description" : "default response" - } - }, - "summary" : "Send Plan B part", - "tags" : [ "File Transfer" ] - } - }, "/solrIndex/v1/fetchSolrTypes" : { "post" : { "operationId" : "fetchSolrTypes", diff --git a/stroom-app/src/main/resources/ui/noauth/swagger/stroom.yaml b/stroom-app/src/main/resources/ui/noauth/swagger/stroom.yaml index 3637890d76..61b5b08064 100644 --- a/stroom-app/src/main/resources/ui/noauth/swagger/stroom.yaml +++ b/stroom-app/src/main/resources/ui/noauth/swagger/stroom.yaml @@ -3005,6 +3005,59 @@ paths: summary: Submit a request to get the status of a feed tags: - Feed Status + /fileTransfer/v1/fetchSnapshot: + post: + operationId: fetchSnapshot + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SnapshotRequest" + responses: + default: + content: + application/octet-stream: {} + description: default response + summary: Fetch Plan B snapshot + tags: + - File Transfer + /fileTransfer/v1/sendPart: + post: + operationId: sendPart + parameters: + - in: header + name: createTime + schema: + type: integer + format: int64 + - in: header + name: metaId + schema: + type: integer + format: int64 + - in: header + name: fileHash + schema: + type: string + - in: header + name: fileName + schema: + type: string + requestBody: + content: + application/octet-stream: + schema: + type: object + responses: + default: + content: + application/json: + schema: + type: boolean + description: default response + summary: Send Plan B part + tags: + - File Transfer /fsVolume/v1: post: operationId: createFsVolume @@ -6179,59 +6232,6 @@ paths: summary: Get information for the current session tags: - Session Info - /snapshot/v1/fetchSnapshot: - post: - operationId: fetchSnapshot - requestBody: - content: - application/json: - schema: - $ref: "#/components/schemas/SnapshotRequest" - responses: - default: - content: - application/octet-stream: {} - description: default response - summary: Fetch Plan B snapshot - tags: - - File Transfer - /snapshot/v1/sendPart: - post: - operationId: sendPart - parameters: - - in: header - name: createTime - schema: - type: integer - format: int64 - - in: header - name: metaId - schema: - type: integer - format: int64 - - in: header - name: fileHash - schema: - type: string - - in: header - name: fileName - schema: - type: string - requestBody: - content: - application/octet-stream: - schema: - type: object - responses: - default: - content: - application/json: - schema: - type: boolean - description: default response - summary: Send Plan B part - tags: - - File Transfer /solrIndex/v1/fetchSolrTypes: post: operationId: fetchSolrTypes diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/PlanBConfig.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/PlanBConfig.java index c6ceee81ac..86c5112719 100644 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/PlanBConfig.java +++ b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/PlanBConfig.java @@ -10,40 +10,47 @@ import com.fasterxml.jackson.annotation.JsonPropertyDescription; import com.fasterxml.jackson.annotation.JsonPropertyOrder; -import java.util.ArrayList; +import java.util.Collections; import java.util.List; +import java.util.Objects; @JsonPropertyOrder(alphabetic = true) public class PlanBConfig extends AbstractConfig implements IsStroomConfig { private final CacheConfig stateDocCache; - private final CacheConfig readerCache; private final List nodeList; private final String path; + private final StroomDuration minTimeToKeepSnapshots; + private final StroomDuration minTimeToKeepEnvOpen; public PlanBConfig() { - stateDocCache = CacheConfig.builder() - .maximumSize(100L) - .expireAfterWrite(StroomDuration.ofMinutes(10)) - .build(); - readerCache = CacheConfig.builder() - .maximumSize(10L) - .expireAfterWrite(StroomDuration.ofMinutes(10)) - .build(); - nodeList = new ArrayList<>(); - path = "${stroom.home}/planb"; + this("${stroom.home}/planb"); + } + + public PlanBConfig(final String path) { + this(CacheConfig + .builder() + .maximumSize(100L) + .expireAfterWrite(StroomDuration.ofMinutes(10)) + .build(), + Collections.emptyList(), + path, + StroomDuration.ofMinutes(10), + StroomDuration.ofMinutes(1)); } @SuppressWarnings("unused") @JsonCreator public PlanBConfig(@JsonProperty("stateDocCache") final CacheConfig stateDocCache, - @JsonProperty("readerCache") final CacheConfig readerCache, @JsonProperty("nodeList") final List nodeList, - @JsonProperty("path") final String path) { + @JsonProperty("path") final String path, + @JsonProperty("minTimeToKeepSnapshots") final StroomDuration minTimeToKeepSnapshots, + @JsonProperty("minTimeToKeepEnvOpen") final StroomDuration minTimeToKeepEnvOpen) { this.stateDocCache = stateDocCache; - this.readerCache = readerCache; this.nodeList = nodeList; this.path = path; + this.minTimeToKeepSnapshots = minTimeToKeepSnapshots; + this.minTimeToKeepEnvOpen = minTimeToKeepEnvOpen; } @JsonProperty @@ -52,12 +59,6 @@ public CacheConfig getStateDocCache() { return stateDocCache; } - @JsonProperty - @JsonPropertyDescription("Cache for Plan B shard readers.") - public CacheConfig getReaderCache() { - return readerCache; - } - @JsonProperty @JsonPropertyDescription("Nodes to use to store Plan B shards. " + "If none are specified only the local node is used. " + @@ -72,13 +73,47 @@ public String getPath() { return path; } + @JsonProperty + @JsonPropertyDescription("How long should we keep snapshots before we fetch new ones.") + public StroomDuration getMinTimeToKeepSnapshots() { + return minTimeToKeepSnapshots; + } + + @JsonProperty + @JsonPropertyDescription("How long should we keep an environment open but inactive.") + public StroomDuration getMinTimeToKeepEnvOpen() { + return minTimeToKeepEnvOpen; + } + @Override public String toString() { return "PlanBConfig{" + "stateDocCache=" + stateDocCache + - ", readerCache=" + readerCache + ", nodeList=" + nodeList + ", path='" + path + '\'' + + ", minTimeToKeepSnapshots=" + minTimeToKeepSnapshots + + ", minTimeToKeepEnvOpen=" + minTimeToKeepEnvOpen + '}'; } + + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + final PlanBConfig that = (PlanBConfig) o; + return Objects.equals(stateDocCache, that.stateDocCache) && + Objects.equals(nodeList, that.nodeList) && + Objects.equals(path, that.path) && + Objects.equals(minTimeToKeepSnapshots, that.minTimeToKeepSnapshots) && + Objects.equals(minTimeToKeepEnvOpen, that.minTimeToKeepEnvOpen); + } + + @Override + public int hashCode() { + return Objects.hash(stateDocCache, nodeList, path, minTimeToKeepSnapshots, minTimeToKeepEnvOpen); + } } diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/PlanBModule.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/PlanBModule.java index 11a29c0ede..f86d2c222f 100644 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/PlanBModule.java +++ b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/PlanBModule.java @@ -16,7 +16,6 @@ package stroom.planb.impl; -import stroom.cluster.lock.api.ClusterLockService; import stroom.datasource.api.v2.DataSourceProvider; import stroom.docstore.api.ContentIndexable; import stroom.docstore.api.DocumentActionHandlerBinder; @@ -30,6 +29,7 @@ import stroom.planb.impl.data.FileTransferService; import stroom.planb.impl.data.FileTransferServiceImpl; import stroom.planb.impl.data.MergeProcessor; +import stroom.planb.impl.data.ShardManager; import stroom.planb.impl.pipeline.PlanBElementModule; import stroom.planb.impl.pipeline.PlanBLookupImpl; import stroom.planb.impl.pipeline.StateProviderImpl; @@ -97,15 +97,28 @@ protected void configure() { .description("Plan B state store merge") .cronSchedule(CronExpressions.EVERY_MINUTE.getExpression()) .advanced(true)); + ScheduledJobsBinder.create(binder()) + .bindJobTo(ShardManagerCleanupRunnable.class, builder -> builder + .name(ShardManager.CLEANUP_TASK_NAME) + .description("Plan B shard cleanup") + .cronSchedule(CronExpressions.EVERY_10_MINUTES.getExpression()) + .advanced(true)); } private static class StateMergeRunnable extends RunnableWrapper { @Inject - StateMergeRunnable(final MergeProcessor mergeProcessor, - final ClusterLockService clusterLockService) { + StateMergeRunnable(final MergeProcessor mergeProcessor) { super(mergeProcessor::exec); } } + + private static class ShardManagerCleanupRunnable extends RunnableWrapper { + + @Inject + ShardManagerCleanupRunnable(final ShardManager shardManager) { + super(shardManager::cleanup); + } + } } diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/StateSearchProvider.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/StateSearchProvider.java index d10348b6a4..1d58679d3b 100644 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/StateSearchProvider.java +++ b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/StateSearchProvider.java @@ -22,8 +22,8 @@ import stroom.docref.DocRef; import stroom.entity.shared.ExpressionCriteria; import stroom.index.shared.IndexFieldImpl; -import stroom.planb.impl.data.ReaderCache; -import stroom.planb.impl.io.StateFieldUtil; +import stroom.planb.impl.data.ShardManager; +import stroom.planb.impl.db.StateFieldUtil; import stroom.planb.shared.PlanBDoc; import stroom.query.api.v2.ExpressionUtil; import stroom.query.api.v2.Query; @@ -74,7 +74,7 @@ public class StateSearchProvider implements SearchProvider, IndexFieldProvider { private final ResultStoreFactory resultStoreFactory; private final TaskManager taskManager; private final TaskContextFactory taskContextFactory; - private final ReaderCache readerCache; + private final ShardManager shardManager; private final ExpressionPredicateFactory expressionPredicateFactory; @Inject @@ -85,7 +85,7 @@ public StateSearchProvider(final Executor executor, final ResultStoreFactory resultStoreFactory, final TaskManager taskManager, final TaskContextFactory taskContextFactory, - final ReaderCache readerCache, + final ShardManager shardManager, final ExpressionPredicateFactory expressionPredicateFactory) { this.executor = executor; this.stateDocStore = stateDocStore; @@ -94,7 +94,7 @@ public StateSearchProvider(final Executor executor, this.resultStoreFactory = resultStoreFactory; this.taskManager = taskManager; this.taskContextFactory = taskContextFactory; - this.readerCache = readerCache; + this.shardManager = shardManager; this.expressionPredicateFactory = expressionPredicateFactory; } @@ -232,7 +232,7 @@ public void onTerminate() { final Instant queryStart = Instant.now(); try { - readerCache.get(doc.getName(), reader -> { + shardManager.get(doc.getName(), reader -> { reader.search( criteria, coprocessors.getFieldIndex(), diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/data/FileTransferClientImpl.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/data/FileTransferClientImpl.java index ee8e70c3b5..63139b6449 100644 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/data/FileTransferClientImpl.java +++ b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/data/FileTransferClientImpl.java @@ -82,7 +82,9 @@ public void storePart(final FileDescriptor fileDescriptor, if (enabledActiveNodes.contains(node)) { targetNodes.add(node); } else { - throw new RuntimeException("Plan B target node '" + node + "' is not enabled or active"); + throw new RuntimeException("Plan B target node '" + + node + + "' is not enabled or active"); } } } catch (final Exception e) { @@ -153,7 +155,11 @@ public void fetchSnapshot(final String nodeName, final Path snapshotDir) { securityContext.asProcessingUser(() -> { try { - LOGGER.info(() -> "Fetching snapshot from '" + nodeName + "' for '" + request.getMapName() + "'"); + LOGGER.info(() -> "Fetching snapshot from '" + + nodeName + + "' for '" + + request.getMapName() + + "'"); final String url = NodeCallUtil.getBaseEndpointUrl(nodeInfo, nodeService, nodeName) + ResourcePaths.buildAuthenticatedApiPath( FileTransferResource.BASE_PATH, diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/data/FileTransferServiceImpl.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/data/FileTransferServiceImpl.java index c245768bc4..8eed2b1ec4 100644 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/data/FileTransferServiceImpl.java +++ b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/data/FileTransferServiceImpl.java @@ -1,8 +1,5 @@ package stroom.planb.impl.data; -import stroom.bytebuffer.impl6.ByteBufferFactory; -import stroom.planb.impl.PlanBDocCache; -import stroom.planb.impl.io.StatePaths; import stroom.security.api.SecurityContext; import stroom.util.io.StreamUtil; import stroom.util.logging.LambdaLogger; @@ -15,7 +12,6 @@ import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; -import java.nio.file.Path; @Singleton public class FileTransferServiceImpl implements FileTransferService { diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/data/MergeProcessor.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/data/MergeProcessor.java index fc0452597d..b45b317474 100644 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/data/MergeProcessor.java +++ b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/data/MergeProcessor.java @@ -1,6 +1,6 @@ package stroom.planb.impl.data; -import stroom.planb.impl.io.StatePaths; +import stroom.planb.impl.db.StatePaths; import stroom.security.api.SecurityContext; import stroom.task.api.TaskContext; import stroom.task.api.TaskContextFactory; @@ -117,8 +117,6 @@ private void merge(final SequentialFile sequentialFile) throws IOException { try { // Merge source. shardManager.merge(source); - // Delete source. - FileUtil.deleteDir(source); } catch (final IOException e) { throw new UncheckedIOException(e); } diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/data/ReaderCache.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/data/ReaderCache.java deleted file mode 100644 index 7a2fe02955..0000000000 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/data/ReaderCache.java +++ /dev/null @@ -1,284 +0,0 @@ -package stroom.planb.impl.data; - -import stroom.bytebuffer.impl6.ByteBufferFactory; -import stroom.cache.api.CacheManager; -import stroom.cache.api.LoadingStroomCache; -import stroom.node.api.NodeInfo; -import stroom.planb.impl.PlanBConfig; -import stroom.planb.impl.PlanBDocCache; -import stroom.planb.impl.io.AbstractLmdbReader; -import stroom.planb.impl.io.RangedStateReader; -import stroom.planb.impl.io.SessionReader; -import stroom.planb.impl.io.StatePaths; -import stroom.planb.impl.io.StateReader; -import stroom.planb.impl.io.TemporalRangedStateReader; -import stroom.planb.impl.io.TemporalStateReader; -import stroom.planb.shared.PlanBDoc; -import stroom.util.NullSafe; -import stroom.util.io.FileUtil; -import stroom.util.logging.LambdaLogger; -import stroom.util.logging.LambdaLoggerFactory; - -import jakarta.inject.Inject; -import jakarta.inject.Provider; -import jakarta.inject.Singleton; -import org.lmdbjava.Env.AlreadyClosedException; - -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.List; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.locks.Condition; -import java.util.concurrent.locks.ReentrantLock; -import java.util.function.Function; - -@Singleton -public class ReaderCache { - - private static final LambdaLogger LOGGER = LambdaLoggerFactory.getLogger(ReaderCache.class); - - private static final String CACHE_NAME = "Plan B Reader Cache"; - - private final LoadingStroomCache cache; - private final ByteBufferFactory byteBufferFactory; - private final PlanBDocCache planBDocCache; - private final Provider configProvider; - private final StatePaths statePaths; - private final FileTransferClient fileTransferClient; - private final NodeInfo nodeInfo; - - @Inject - public ReaderCache(final Provider configProvider, - final CacheManager cacheManager, - final ByteBufferFactory byteBufferFactory, - final PlanBDocCache planBDocCache, - final StatePaths statePaths, - final FileTransferClient fileTransferClient, - final NodeInfo nodeInfo) { - this.byteBufferFactory = byteBufferFactory; - this.planBDocCache = planBDocCache; - this.configProvider = configProvider; - this.statePaths = statePaths; - this.fileTransferClient = fileTransferClient; - this.nodeInfo = nodeInfo; - - cache = cacheManager.createLoadingCache( - CACHE_NAME, - () -> configProvider.get().getReaderCache(), - this::create, - this::destroy); - } - - public R get(final String mapName, - final Function, R> function) { - while (true) { - final Shard shard = cache.get(mapName); - try { - return shard.get(function); - } catch (final AlreadyClosedException e) { - // Expected exception. - LOGGER.debug(e::getMessage, e); - } - } - } - - private Shard create(final String mapName) { - return new Shard( - mapName, - statePaths, - planBDocCache, - configProvider, - fileTransferClient, - byteBufferFactory, - nodeInfo); - } - - private void destroy(final String mapName, - final Shard shard) { - shard.close(); - } - - public static class Shard { - - private final String mapName; - private Path path; - private boolean snapshot; - private final PlanBDoc doc; - private final ByteBufferFactory byteBufferFactory; - - private RuntimeException exception; - private AbstractLmdbReader currentReader; - private final ReentrantLock readLock = new ReentrantLock(); - private final Condition readCondition = readLock.newCondition(); - private final AtomicInteger currentReadCount = new AtomicInteger(); - private volatile boolean closed; - - public Shard(final String mapName, - final StatePaths statePaths, - final PlanBDocCache planBDocCache, - final Provider configProvider, - final FileTransferClient fileTransferClient, - final ByteBufferFactory byteBufferFactory, - final NodeInfo nodeInfo) { - this.mapName = mapName; - this.byteBufferFactory = byteBufferFactory; - doc = planBDocCache.get(mapName); - if (doc == null) { - LOGGER.warn(() -> "No PlanB doc found for '" + mapName + "'"); - exception = new RuntimeException("No PlanB doc found for '" + mapName + "'"); - - } else { - final List nodes = NullSafe.list(configProvider.get().getNodeList()); - final boolean isStoreNode = nodes.contains(nodeInfo.getThisNodeName()); - - // See if we have it locally. - final Path shardDir = statePaths.getShardDir().resolve(mapName); - if (Files.exists(shardDir)) { - LOGGER.info(() -> "Found local shard for '" + mapName + "'"); - this.path = shardDir; - this.snapshot = false; - this.currentReader = createReader(); - - } else if (isStoreNode) { - // If this node is supposed to be a node that stores shards, but it doesn't have it, then error. - final String message = "Local Plan B shard not found for '" + - mapName + - "'"; - LOGGER.error(() -> message); - exception = new RuntimeException(message); - - } else if (nodes.isEmpty()) { - final String message = "Local Plan B shard not found for '" + - mapName + - "' and no remote nodes are configured"; - LOGGER.error(() -> message); - exception = new RuntimeException(message); - - } else { - - // See if we have a snapshot. - final Path snapshotDir = statePaths.getSnapshotDir().resolve(mapName); - if (Files.exists(snapshotDir)) { - LOGGER.info(() -> "Found local snapshot for '" + mapName + "'"); - this.path = snapshotDir; - this.snapshot = true; - this.currentReader = createReader(); - - } else { - // Go and get a snapshot. - final SnapshotRequest request = new SnapshotRequest(mapName, 0L); - for (final String node : configProvider.get().getNodeList()) { - try { - LOGGER.info(() -> "Fetching shard for '" + mapName + "'"); - fileTransferClient.fetchSnapshot(node, request, snapshotDir); - this.path = snapshotDir; - this.snapshot = true; - this.currentReader = createReader(); - - } catch (final Exception e) { - LOGGER.error(e::getMessage, e); - exception = new RuntimeException(e); - } - } - } - } - } - } - - public R get(final Function, R> function) { - if (exception != null) { - throw exception; - } - - // Count up readers. - readLock.lock(); - try { - if (closed) { - throw new AlreadyClosedException(); - } - currentReadCount.incrementAndGet(); - } finally { - readLock.unlock(); - } - - try { - return function.apply(currentReader); - } finally { - // Count down readers. - readLock.lock(); - try { - currentReadCount.decrementAndGet(); - readCondition.signalAll(); - } finally { - readLock.unlock(); - } - } - } - - private AbstractLmdbReader createReader() { - switch (doc.getStateType()) { - case STATE -> { - return new StateReader(path, byteBufferFactory); - } - case TEMPORAL_STATE -> { - return new TemporalStateReader(path, byteBufferFactory); - } - case RANGED_STATE -> { - return new RangedStateReader(path, byteBufferFactory); - } - case TEMPORAL_RANGED_STATE -> { - return new TemporalRangedStateReader(path, byteBufferFactory); - } - case SESSION -> { - return new SessionReader(path, byteBufferFactory); - } - default -> throw new RuntimeException("Unexpected state type: " + doc.getStateType()); - } - } - - public void close() { - try { - // Don't allow close until we have nobody reading. - readLock.lock(); - try { - // Make sure new reads will end up going and getting a new shard. - closed = true; - - // Wait for all current reads to stop. - while (currentReadCount.get() > 0) { - readCondition.await(); - } - } finally { - readLock.unlock(); - } - } catch (final InterruptedException e) { - LOGGER.debug(e::getMessage, e); - Thread.currentThread().interrupt(); - - } finally { - closeReader(); - if (snapshot) { - deleteSnapshot(); - } - } - } - - private void closeReader() { - try { - LOGGER.info(() -> "Closing reader for '" + mapName + "'"); - currentReader.close(); - } catch (final Exception e) { - LOGGER.error(e::getMessage, e); - } - } - - private void deleteSnapshot() { - try { - LOGGER.info(() -> "Deleting snapshot for '" + mapName + "'"); - FileUtil.deleteDir(path); - } catch (final Exception e) { - LOGGER.error(e::getMessage, e); - } - } - } -} diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/data/SequentialFileStore.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/data/SequentialFileStore.java index 79d9481c4b..380e658a77 100644 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/data/SequentialFileStore.java +++ b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/data/SequentialFileStore.java @@ -1,6 +1,6 @@ package stroom.planb.impl.data; -import stroom.planb.impl.io.StatePaths; +import stroom.planb.impl.db.StatePaths; import stroom.util.concurrent.UncheckedInterruptedException; import stroom.util.io.FileUtil; import stroom.util.logging.LambdaLogger; diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/data/ShardManager.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/data/ShardManager.java index 7257477c98..dfbfdacdee 100644 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/data/ShardManager.java +++ b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/data/ShardManager.java @@ -1,18 +1,26 @@ package stroom.planb.impl.data; import stroom.bytebuffer.impl6.ByteBufferFactory; +import stroom.node.api.NodeInfo; +import stroom.planb.impl.PlanBConfig; import stroom.planb.impl.PlanBDocCache; -import stroom.planb.impl.io.AbstractLmdbWriter; -import stroom.planb.impl.io.RangedStateWriter; -import stroom.planb.impl.io.SessionWriter; -import stroom.planb.impl.io.StatePaths; -import stroom.planb.impl.io.StateWriter; -import stroom.planb.impl.io.TemporalRangedStateWriter; -import stroom.planb.impl.io.TemporalStateWriter; +import stroom.planb.impl.db.AbstractLmdb; +import stroom.planb.impl.db.RangedStateDb; +import stroom.planb.impl.db.SessionDb; +import stroom.planb.impl.db.StateDb; +import stroom.planb.impl.db.StatePaths; +import stroom.planb.impl.db.TemporalRangedStateDb; +import stroom.planb.impl.db.TemporalStateDb; import stroom.planb.shared.PlanBDoc; +import stroom.util.NullSafe; +import stroom.util.date.DateUtil; +import stroom.util.io.FileUtil; +import stroom.util.logging.LambdaLogger; +import stroom.util.logging.LambdaLoggerFactory; import stroom.util.zip.ZipUtil; import jakarta.inject.Inject; +import jakarta.inject.Provider; import jakarta.inject.Singleton; import org.apache.commons.compress.archivers.zip.ZipArchiveOutputStream; @@ -22,130 +30,650 @@ import java.io.UncheckedIOException; import java.nio.file.Files; import java.nio.file.Path; +import java.time.Instant; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.ReentrantLock; +import java.util.function.Function; @Singleton public class ShardManager { - private final ByteBufferFactory byteBufferFactory; - private final Path shardDir; - private final PlanBDocCache planBDocCache; + private static final LambdaLogger LOGGER = LambdaLoggerFactory.getLogger(ShardManager.class); + + public static final String CLEANUP_TASK_NAME = "Plan B Cleanup"; + private static final String DATA_FILE_NAME = "data.mdb"; + private static final String LOCK_FILE_NAME = "lock.mdb"; - private final ReentrantLock reentrantLock = new ReentrantLock(); - private volatile String currentMapName; - private volatile AbstractLmdbWriter currentWriter; + private final ByteBufferFactory byteBufferFactory; + private final PlanBDocCache planBDocCache; + private final Map shardMap = new ConcurrentHashMap<>(); + private final NodeInfo nodeInfo; + private final Provider configProvider; + private final StatePaths statePaths; + private final FileTransferClient fileTransferClient; @Inject public ShardManager(final ByteBufferFactory byteBufferFactory, final PlanBDocCache planBDocCache, - final StatePaths statePaths) { + final NodeInfo nodeInfo, + final Provider configProvider, + final StatePaths statePaths, + final FileTransferClient fileTransferClient) { this.byteBufferFactory = byteBufferFactory; this.planBDocCache = planBDocCache; - shardDir = statePaths.getShardDir(); + this.nodeInfo = nodeInfo; + this.configProvider = configProvider; + this.statePaths = statePaths; + this.fileTransferClient = fileTransferClient; + + // Delete any existing shards that might have been left behind from the last use of Stroom. + FileUtil.deleteDir(statePaths.getShardDir()); + } + + private boolean isSnapshotNode() { + final List nodes = NullSafe.list(configProvider.get().getNodeList()); + // If we have no node info or no nodes are configured then treat this as a shard writer node and not a + // snapshot node. + return nodeInfo != null && !nodes.isEmpty() && !nodes.contains(nodeInfo.getThisNodeName()); } public void merge(final Path sourceDir) throws IOException { final String mapName = sourceDir.getFileName().toString(); - final PlanBDoc doc = planBDocCache.get(mapName); - if (doc != null) { - // Get shard dir. - final Path target = shardDir.resolve(mapName); + final Shard shard = getShard(mapName); + shard.merge(sourceDir); + } + + public void zip(final String mapName, final OutputStream outputStream) throws IOException { + final Shard shard = getShard(mapName); + shard.zip(outputStream); + } + + public R get(final String mapName, final Function, R> function) { + final Shard shard = getShard(mapName); + return shard.get(function); + } + + public void cleanup() { + shardMap.values().forEach(Shard::cleanup); + } + + private Shard getShard(final String mapName) { + return shardMap.computeIfAbsent(mapName, this::createShard); + } + + private Shard createShard(final String mapName) { + if (isSnapshotNode()) { + return new SnapshotShard(byteBufferFactory, + planBDocCache, + configProvider, + statePaths, + fileTransferClient, + mapName); + } + return new LocalShard( + byteBufferFactory, + planBDocCache, + configProvider, + statePaths, + mapName); + } - // If we don't already have the shard dir then just move the source to the target. - if (!Files.isDirectory(target)) { - lock(() -> { + + private static class LocalShard implements Shard { + + private final ByteBufferFactory byteBufferFactory; + private final PlanBDocCache planBDocCache; + private final Provider configProvider; + private final StatePaths statePaths; + private final String mapName; + + private final ReentrantLock lock = new ReentrantLock(); + + private volatile PlanBDoc doc; + private final AtomicInteger useCount = new AtomicInteger(); + private volatile AbstractLmdb db; + private volatile boolean open; + private volatile Instant openTime; + private volatile Instant lastAccessTime; + + public LocalShard(final ByteBufferFactory byteBufferFactory, + final PlanBDocCache planBDocCache, + final Provider configProvider, + final StatePaths statePaths, + final String mapName) { + this.byteBufferFactory = byteBufferFactory; + this.planBDocCache = planBDocCache; + this.configProvider = configProvider; + this.statePaths = statePaths; + this.mapName = mapName; + } + + private void incrementUseCount() { + lock.lock(); + try { + // Open if needed. + if (!open) { + open(); + open = true; + openTime = Instant.now(); + } + + final int count = useCount.incrementAndGet(); + if (count <= 0) { + throw new RuntimeException("Unexpected count"); + } + + lastAccessTime = Instant.now(); + + } finally { + lock.unlock(); + } + } + + private void decrementUseCount() { + lock.lock(); + try { + final int count = useCount.decrementAndGet(); + if (count < 0) { + throw new RuntimeException("Unexpected count"); + } + cleanup(); + } finally { + lock.unlock(); + } + } + + @Override + public void merge(final Path sourceDir) { + boolean success = false; + + // See if we can just merge by moving the file. + lock.lock(); + try { + // Get shard dir. + final PlanBDoc doc = getDoc(); + final Path targetDir = statePaths.getShardDir().resolve(doc.getName()); + + // If we don't already have the shard dir then just move the source to the target. + if (!Files.isDirectory(targetDir)) { try { - Files.createDirectories(shardDir); - Files.move(sourceDir, target); + success = true; + Files.createDirectories(statePaths.getShardDir()); + Files.move(sourceDir, targetDir); } catch (final IOException e) { throw new UncheckedIOException(e); } - }); + } + } finally { + lock.unlock(); + } - } else { - // If we do already have a target then merge the source to the target. - lock(() -> { - currentMapName = mapName; - currentWriter = getWriter(doc, target); - }); + // If the file already existed then we must open the DB and merge with LMDB. + if (!success) { + incrementUseCount(); try { - currentWriter.merge(sourceDir); + db.merge(sourceDir); } finally { - lock(() -> { - currentMapName = null; - currentWriter.close(); - }); + decrementUseCount(); } } } - } - public void zip(final String mapName, final OutputStream outputStream) throws IOException { - final PlanBDoc doc = planBDocCache.get(mapName); - if (doc != null) { + @Override + public void zip(final OutputStream outputStream) { + boolean success = false; + + // If the DB is not open then we can just create the zip from the dir. + lock.lock(); + try { + if (!open) { + success = true; + createZip(outputStream); + } + } finally { + lock.unlock(); + } + + // If the DB was open then we will need to lock the DB and zip the dir. + if (!success) { + incrementUseCount(); + try { + db.lock(() -> createZip(outputStream)); + } finally { + decrementUseCount(); + } + } + } + private void createZip(final OutputStream outputStream) { // Get shard dir. - final Path shard = shardDir.resolve(mapName); - if (!Files.exists(shard)) { + final Path dbDir = statePaths.getShardDir().resolve(mapName); + if (!Files.exists(dbDir)) { throw new RuntimeException("Shard not found"); } - final Path lmdbDataFile = shard.resolve("data.mdb"); + final Path lmdbDataFile = dbDir.resolve(DATA_FILE_NAME); if (!Files.exists(lmdbDataFile)) { throw new RuntimeException("LMDB data file not found"); } - lock(() -> { - if (currentMapName.equals(mapName)) { - if (currentWriter != null) { - currentWriter.lock(() -> zip(shard, outputStream)); - } else { - zip(shard, outputStream); + try (final ZipArchiveOutputStream zipOutputStream = + ZipUtil.createOutputStream(new BufferedOutputStream(outputStream))) { + ZipUtil.zip(dbDir, zipOutputStream); + } catch (final IOException e) { + throw new UncheckedIOException(e); + } + } + + @Override + public R get(final Function, R> function) { + incrementUseCount(); + try { + return function.apply(db); + } finally { + decrementUseCount(); + } + } + + @Override + public void cleanup() { + lock.lock(); + try { + if (useCount.get() == 0) { + if (open && isIdle()) { + db.close(); + open = false; + openTime = null; } - } else { - zip(shard, outputStream); } - }); + } finally { + lock.unlock(); + } } - } - private void lock(final Runnable runnable) { - reentrantLock.lock(); - try { - runnable.run(); - } finally { - reentrantLock.unlock(); + private boolean isIdle() { + return lastAccessTime.isBefore(Instant.now().minus( + configProvider.get().getMinTimeToKeepEnvOpen().getDuration())); + } + + private PlanBDoc getDoc() { + if (doc == null) { + doc = planBDocCache.get(mapName); + if (doc == null) { + LOGGER.warn(() -> "No PlanB doc found for '" + mapName + "'"); + throw new RuntimeException("No PlanB doc found for '" + mapName + "'"); + } + } + return doc; + } + + private void open() { + final PlanBDoc doc = getDoc(); + final String mapName = doc.getName(); + + final Path shardDir = statePaths.getShardDir().resolve(mapName); + if (Files.exists(shardDir)) { + LOGGER.info(() -> "Found local shard for '" + mapName + "'"); + db = openDb(doc, shardDir); + + + } else { + // If this node is supposed to be a node that stores shards, but it doesn't have it, then error. + final String message = "Local Plan B shard not found for '" + + mapName + + "'"; + LOGGER.error(() -> message); + throw new RuntimeException(message); + } + } + + private AbstractLmdb openDb(final PlanBDoc doc, + final Path targetPath) { + switch (doc.getStateType()) { + case STATE -> { + return new StateDb(targetPath, byteBufferFactory, true, false); + } + case TEMPORAL_STATE -> { + return new TemporalStateDb(targetPath, byteBufferFactory, true, false); + } + case RANGED_STATE -> { + return new RangedStateDb(targetPath, byteBufferFactory, true, false); + } + case TEMPORAL_RANGED_STATE -> { + return new TemporalRangedStateDb(targetPath, byteBufferFactory, true, false); + } + case SESSION -> { + return new SessionDb(targetPath, byteBufferFactory, true, false); + } + default -> throw new RuntimeException("Unexpected state type: " + doc.getStateType()); + } } } - private void zip(final Path shard, final OutputStream outputStream) { - try (final ZipArchiveOutputStream zipOutputStream = - ZipUtil.createOutputStream(new BufferedOutputStream(outputStream))) { - ZipUtil.zip(shard, zipOutputStream); - } catch (final IOException e) { - throw new UncheckedIOException(e); + private static class SnapshotShard implements Shard { + + private final ByteBufferFactory byteBufferFactory; + private final PlanBDocCache planBDocCache; + private final Provider configProvider; + private final StatePaths statePaths; + private final FileTransferClient fileTransferClient; + private final String mapName; + private final Instant createTime; + + private final ReentrantLock lock = new ReentrantLock(); + + private volatile SnapshotInstance snapshotInstance; + + public SnapshotShard(final ByteBufferFactory byteBufferFactory, + final PlanBDocCache planBDocCache, + final Provider configProvider, + final StatePaths statePaths, + final FileTransferClient fileTransferClient, + final String mapName) { + this.byteBufferFactory = byteBufferFactory; + this.planBDocCache = planBDocCache; + this.configProvider = configProvider; + this.statePaths = statePaths; + this.fileTransferClient = fileTransferClient; + this.mapName = mapName; + this.createTime = Instant.now(); + } + + private SnapshotInstance getDBInstance() { + SnapshotInstance instance; + lock.lock(); + try { + instance = snapshotInstance; + if (instance == null) { + instance = new SnapshotInstance( + byteBufferFactory, + planBDocCache, + configProvider, + statePaths, + fileTransferClient, + mapName, + createTime); + + } else if (instance.isOldSnapshot()) { + instance.destroy(); + instance = new SnapshotInstance( + byteBufferFactory, + planBDocCache, + configProvider, + statePaths, + fileTransferClient, + mapName, + createTime); + + } else if (instance.destroy) { + instance = new SnapshotInstance( + byteBufferFactory, + planBDocCache, + configProvider, + statePaths, + fileTransferClient, + mapName, + createTime); + } + + snapshotInstance = instance; + } finally { + lock.unlock(); + } + return instance; + } + + @Override + public void merge(final Path sourceDir) { + throw new RuntimeException("Merge is not supported on snapshots"); + } + + @Override + public void zip(final OutputStream outputStream) { + throw new RuntimeException("Zip is not supported on snapshots"); + } + + @Override + public R get(final Function, R> function) { + R result = null; + + boolean success = false; + while (!success) { + try { + success = true; + final SnapshotInstance instance = getDBInstance(); + result = instance.get(function); + } catch (final DestroyedException e) { + LOGGER.debug(e::getMessage, e); + success = false; + } + } + + return result; + } + + @Override + public void cleanup() { + getDBInstance().cleanup(); } } + private static class SnapshotInstance { + + private final ByteBufferFactory byteBufferFactory; + private final PlanBDocCache planBDocCache; + private final Provider configProvider; + private final StatePaths statePaths; + private final FileTransferClient fileTransferClient; + private final String mapName; + private final Instant createTime; + + private final ReentrantLock lock = new ReentrantLock(); + + private volatile PlanBDoc doc; + private final AtomicInteger useCount = new AtomicInteger(); + private volatile AbstractLmdb db; + private volatile boolean open; + private volatile Instant openTime; + private volatile Instant lastAccessTime; + private volatile boolean destroy; + + public SnapshotInstance(final ByteBufferFactory byteBufferFactory, + final PlanBDocCache planBDocCache, + final Provider configProvider, + final StatePaths statePaths, + final FileTransferClient fileTransferClient, + final String mapName, + final Instant createTime) { + this.byteBufferFactory = byteBufferFactory; + this.planBDocCache = planBDocCache; + this.configProvider = configProvider; + this.statePaths = statePaths; + this.fileTransferClient = fileTransferClient; + this.mapName = mapName; + this.createTime = createTime; + } + + private void incrementUseCount() throws DestroyedException { + lock.lock(); + try { + if (destroy) { + throw new DestroyedException(); + } + + // Open if needed. + if (!open) { + open(); + open = true; + openTime = Instant.now(); + } + + final int count = useCount.incrementAndGet(); + if (count <= 0) { + throw new RuntimeException("Unexpected count"); + } + + lastAccessTime = Instant.now(); + + } finally { + lock.unlock(); + } + } + + private void decrementUseCount() { + lock.lock(); + try { + final int count = useCount.decrementAndGet(); + if (count < 0) { + throw new RuntimeException("Unexpected count"); + } + cleanup(); + } finally { + lock.unlock(); + } + } + + public R get(final Function, R> function) throws DestroyedException { + incrementUseCount(); + try { + return function.apply(db); + } finally { + decrementUseCount(); + } + } - private AbstractLmdbWriter getWriter(final PlanBDoc doc, final Path targetPath) { - switch (doc.getStateType()) { - case STATE -> { - return new StateWriter(targetPath, byteBufferFactory); + public void destroy() { + lock.lock(); + try { + destroy = true; + cleanup(); + } finally { + lock.unlock(); } - case TEMPORAL_STATE -> { - return new TemporalStateWriter(targetPath, byteBufferFactory); + } + + + private void cleanup() { + lock.lock(); + try { + if (useCount.get() == 0) { + if (open && (destroy || isIdle())) { + db.close(); + open = false; + openTime = null; + } + + if (!open && destroy) { + // Delete if this is an old snapshot, i.e. readonly. + try { + LOGGER.info(() -> "Deleting snapshot for '" + getDoc().getName() + "'"); + db.delete(); + } catch (final Exception e) { + LOGGER.error(e::getMessage, e); + } + } + } + } finally { + lock.unlock(); } - case RANGED_STATE -> { - return new RangedStateWriter(targetPath, byteBufferFactory); + } + + private boolean isIdle() { + return lastAccessTime.isBefore(Instant.now().minus( + configProvider.get().getMinTimeToKeepEnvOpen().getDuration())); + } + + public boolean isOldSnapshot() { + return createTime.isBefore(Instant.now().minus( + configProvider.get().getMinTimeToKeepSnapshots().getDuration())); + } + + private PlanBDoc getDoc() { + if (doc == null) { + doc = planBDocCache.get(mapName); + if (doc == null) { + LOGGER.warn(() -> "No PlanB doc found for '" + mapName + "'"); + throw new RuntimeException("No PlanB doc found for '" + mapName + "'"); + } } - case TEMPORAL_RANGED_STATE -> { - return new TemporalRangedStateWriter(targetPath, byteBufferFactory); + return doc; + } + + private void open() { + final PlanBDoc doc = getDoc(); + final String mapName = doc.getName(); + + // See if we have a snapshot. + final Path snapshotDir = statePaths + .getSnapshotDir() + .resolve(mapName) + .resolve(DateUtil.createFileDateTimeString(createTime)); + + final Path lmdbDataFile = snapshotDir.resolve(DATA_FILE_NAME); + if (Files.exists(snapshotDir) && !Files.exists(lmdbDataFile)) { + LOGGER.info(() -> "Found local snapshot for '" + mapName + "'"); + db = openDb(doc, snapshotDir); + + } else { + // Go and get a snapshot. + final SnapshotRequest request = new SnapshotRequest(mapName, 0L); + for (final String node : configProvider.get().getNodeList()) { + try { + LOGGER.info(() -> "Fetching shard for '" + mapName + "'"); + + // Create dir. + Files.createDirectories(snapshotDir); + + fileTransferClient.fetchSnapshot(node, request, snapshotDir); + db = openDb(doc, snapshotDir); + + } catch (final Exception e) { + LOGGER.warn(e::getMessage, e); + } + } } - case SESSION -> { - return new SessionWriter(targetPath, byteBufferFactory); + + throw new RuntimeException("Unable to get snapshot shard for '" + mapName + "'"); + } + + private AbstractLmdb openDb(final PlanBDoc doc, + final Path targetPath) { + switch (doc.getStateType()) { + case STATE -> { + return new StateDb(targetPath, byteBufferFactory, false, true); + } + case TEMPORAL_STATE -> { + return new TemporalStateDb(targetPath, byteBufferFactory, false, true); + } + case RANGED_STATE -> { + return new RangedStateDb(targetPath, byteBufferFactory, false, true); + } + case TEMPORAL_RANGED_STATE -> { + return new TemporalRangedStateDb(targetPath, byteBufferFactory, false, true); + } + case SESSION -> { + return new SessionDb(targetPath, byteBufferFactory, false, true); + } + default -> throw new RuntimeException("Unexpected state type: " + doc.getStateType()); } - default -> throw new RuntimeException("Unexpected state type: " + doc.getStateType()); } } + + private interface Shard { + + void merge(Path sourceDir); + + void zip(OutputStream outputStream); + + R get(Function, R> function); + + void cleanup(); + } + + private static class DestroyedException extends Exception { + + } } diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/AbstractLmdb.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/AbstractLmdb.java new file mode 100644 index 0000000000..ab5330a17f --- /dev/null +++ b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/AbstractLmdb.java @@ -0,0 +1,382 @@ +package stroom.planb.impl.db; + +import stroom.bytebuffer.impl6.ByteBufferFactory; +import stroom.entity.shared.ExpressionCriteria; +import stroom.expression.api.DateTimeSettings; +import stroom.lmdb.LmdbConfig; +import stroom.lmdb2.LmdbEnvDir; +import stroom.query.api.v2.Column; +import stroom.query.api.v2.Format; +import stroom.query.common.v2.ExpressionPredicateFactory; +import stroom.query.common.v2.ExpressionPredicateFactory.ValueFunctionFactories; +import stroom.query.common.v2.ValFunctionFactory; +import stroom.query.language.functions.FieldIndex; +import stroom.query.language.functions.Val; +import stroom.query.language.functions.ValuesConsumer; +import stroom.util.concurrent.UncheckedInterruptedException; +import stroom.util.io.FileUtil; +import stroom.util.logging.LambdaLogger; +import stroom.util.logging.LambdaLoggerFactory; + +import org.lmdbjava.CursorIterable; +import org.lmdbjava.CursorIterable.KeyVal; +import org.lmdbjava.Dbi; +import org.lmdbjava.DbiFlags; +import org.lmdbjava.Env; +import org.lmdbjava.EnvFlags; +import org.lmdbjava.KeyRange; +import org.lmdbjava.PutFlags; +import org.lmdbjava.Txn; + +import java.nio.ByteBuffer; +import java.nio.file.Path; +import java.util.Iterator; +import java.util.Optional; +import java.util.concurrent.Semaphore; +import java.util.concurrent.locks.ReentrantLock; +import java.util.function.Function; +import java.util.function.Predicate; + +import static java.nio.charset.StandardCharsets.UTF_8; + +public abstract class AbstractLmdb implements AutoCloseable { + + private static final LambdaLogger LOGGER = LambdaLoggerFactory.getLogger(AbstractLmdb.class); + + private static final byte[] NAME = "db".getBytes(UTF_8); + private static final int CONCURRENT_READERS = 10; + + private final Semaphore concurrentReaderSemaphore; + + final Serde serde; + final ByteBufferFactory byteBufferFactory; + final Env env; + final Dbi dbi; + private Txn writeTxn; + private int commitCount = 0; + private int hashClashes = 0; + private final DBWriter dbWriter; + private final ReentrantLock lock = new ReentrantLock(); + private final boolean readOnly; + private final Path path; + + public AbstractLmdb(final Path path, + final ByteBufferFactory byteBufferFactory, + final Serde serde, + final boolean overwrite, + final boolean readOnly) { + final LmdbEnvDir lmdbEnvDir = new LmdbEnvDir(path, true); + this.byteBufferFactory = byteBufferFactory; + this.serde = serde; + this.path = path; + this.readOnly = readOnly; + concurrentReaderSemaphore = new Semaphore(CONCURRENT_READERS); + + if (readOnly) { + LOGGER.info(() -> "Opening: " + path); + } else { + LOGGER.info(() -> "Creating: " + path); + } + + final Env.Builder builder = Env.create() + .setMapSize(LmdbConfig.DEFAULT_MAX_STORE_SIZE.getBytes()) + .setMaxDbs(1) + .setMaxReaders(CONCURRENT_READERS); + + if (readOnly) { + env = builder.open(lmdbEnvDir.getEnvDir().toFile(), + EnvFlags.MDB_NOTLS, + EnvFlags.MDB_NOLOCK, + EnvFlags.MDB_RDONLY_ENV); + } else { + env = builder.open(lmdbEnvDir.getEnvDir().toFile(), + EnvFlags.MDB_NOTLS); + } + dbi = env.openDbi(NAME, getDbiFlags()); + + if (readOnly) { + dbWriter = null; + + } else { + // If we do not prefix values then we can simply put rows. + if (!serde.hasPrefix()) { + // If the value has no key prefix, i.e. we are not using key hashes then just try to put. + if (overwrite) { + // Put and overwrite any existing key/value. + dbWriter = dbi::put; + } else { + // Put but do not overwrite any existing key/value. + dbWriter = (writeTxn, keyByteBuffer, valueByteBuffer) -> + dbi.put(writeTxn, keyByteBuffer, valueByteBuffer, PutFlags.MDB_NOOVERWRITE); + } + } else { + if (overwrite) { + dbWriter = (writeTxn, keyByteBuffer, valueByteBuffer) -> { + // First try to put without overwriting existing values. + if (!dbi.put(writeTxn, keyByteBuffer, valueByteBuffer, PutFlags.MDB_NOOVERWRITE)) { + serde.createPrefixPredicate(keyByteBuffer, valueByteBuffer, predicate -> { + // Delete current value if there is one. + if (!delete(writeTxn, keyByteBuffer, predicate)) { + // We must have had a hash clash here because we didn't find a row for the key even + // though the db contains the key hash. + hashClashes++; + } + + // Put new value allowing for duplicate keys as we are only using a hash key. + dbi.put(writeTxn, keyByteBuffer, valueByteBuffer); + return true; + }); + } + }; + } else { + dbWriter = (writeTxn, keyByteBuffer, valueByteBuffer) -> { + // First try to put without overwriting existing values. + if (!dbi.put(writeTxn, keyByteBuffer, valueByteBuffer, PutFlags.MDB_NOOVERWRITE)) { + serde.createPrefixPredicate(keyByteBuffer, valueByteBuffer, predicate -> { + if (!exists(writeTxn, keyByteBuffer, predicate)) { + // We must have had a hash clash here because we didn't find a row for the key even + // though the db contains the key hash. + hashClashes++; + + // Put the value as another row for the same key hash as we didn't find a row for + // the full key value. + dbi.put(writeTxn, keyByteBuffer, valueByteBuffer); + } + return true; + }); + } + }; + } + } + } + } + + DbiFlags[] getDbiFlags() { + if (serde.hasPrefix()) { + return new DbiFlags[]{DbiFlags.MDB_CREATE, DbiFlags.MDB_DUPSORT}; + } + return new DbiFlags[]{DbiFlags.MDB_CREATE}; + } + + public void merge(final Path source) { + final Env.Builder builder = Env.create() + .setMaxDbs(1) + .setMaxReaders(1); + try (final Env sourceEnv = builder.open(source.toFile(), + EnvFlags.MDB_NOTLS, + EnvFlags.MDB_NOLOCK, + EnvFlags.MDB_RDONLY_ENV)) { + final Dbi sourceDbi = sourceEnv.openDbi(NAME); + try (final Txn readTxn = sourceEnv.txnRead()) { + try (final CursorIterable cursorIterable = sourceDbi.iterate(readTxn)) { + for (final KeyVal keyVal : cursorIterable) { + insert(keyVal.key(), keyVal.val()); + } + } + } + } + + // Always commit after a merge. + commit(); + + // Delete source now we have merged. + FileUtil.deleteDir(source); + } + + public boolean insert(final KV kv) { + return insert(kv.key(), kv.value()); + } + + public boolean insert(final K key, final V value) { + return serde.createKeyByteBuffer(key, keyByteBuffer -> + serde.createValueByteBuffer(key, value, valueByteBuffer -> + insert(keyByteBuffer, valueByteBuffer))); + } + + public boolean insert(final ByteBuffer keyByteBuffer, + final ByteBuffer valueByteBuffer) { + final Txn writeTxn = getOrCreateWriteTxn(); + dbWriter.write(writeTxn, keyByteBuffer, valueByteBuffer); + + commitCount++; + if (commitCount > 10000) { + commit(); + } + + return true; + } + + private boolean delete(final Txn txn, + final ByteBuffer keyByteBuffer, + final Predicate> predicate) { + final KeyRange keyRange = KeyRange.closed(keyByteBuffer, keyByteBuffer); + try (final CursorIterable cursor = dbi.iterate(txn, keyRange)) { + final Iterator> iterator = cursor.iterator(); + while (iterator.hasNext()) { + final KeyVal keyVal = iterator.next(); + if (predicate.test(keyVal)) { + iterator.remove(); + return true; + } + } + } + return false; + } + + private boolean exists(final Txn txn, + final ByteBuffer keyByteBuffer, + final Predicate> predicate) { + final KeyRange keyRange = KeyRange.closed(keyByteBuffer, keyByteBuffer); + try (final CursorIterable cursor = dbi.iterate(txn, keyRange)) { + for (final KeyVal keyVal : cursor) { + if (predicate.test(keyVal)) { + return true; + } + } + } + return false; + } + + + Txn getOrCreateWriteTxn() { + if (writeTxn == null) { + writeTxn = env.txnWrite(); + } + return writeTxn; + } + + void commit() { + lock(() -> { + if (writeTxn != null) { + try { + writeTxn.commit(); + } finally { + try { + writeTxn.close(); + } finally { + writeTxn = null; + } + } + } + + commitCount = 0; + + if (hashClashes > 0) { + // We prob don't want to warn but will keep for now until we know how big the issue is. + LOGGER.warn(() -> "We had " + hashClashes + " hash clashes since last commit"); + hashClashes = 0; + } + }); + } + + public void lock(final Runnable runnable) { + lock.lock(); + try { + runnable.run(); + } finally { + lock.unlock(); + } + } + + R read(final Function, R> function) { + try { + concurrentReaderSemaphore.acquire(); + try { + try (final Txn readTxn = env.txnRead()) { + return function.apply(readTxn); + } + } finally { + concurrentReaderSemaphore.release(); + } + } catch (final InterruptedException e) { + LOGGER.error(e::getMessage, e); + Thread.currentThread().interrupt(); + throw new UncheckedInterruptedException(e); + } + } + + public Optional get(final K key) { + return read(readTxn -> get(readTxn, key)); + } + + private Optional get(final Txn readTxn, final K key) { + return serde.createKeyByteBuffer(key, keyByteBuffer -> + serde.createPrefixPredicate(key, predicate -> { + final KeyRange keyRange = KeyRange.closed(keyByteBuffer, keyByteBuffer); + try (final CursorIterable cursor = dbi.iterate(readTxn, keyRange)) { + final Iterator> iterator = cursor.iterator(); + while (iterator.hasNext() + && !Thread.currentThread().isInterrupted()) { + final KeyVal keyVal = iterator.next(); + if (predicate.test(keyVal)) { + return Optional.of(serde.getVal(keyVal)); + } + } + } + return Optional.empty(); + })); + } + + public void search(final ExpressionCriteria criteria, + final FieldIndex fieldIndex, + final DateTimeSettings dateTimeSettings, + final ExpressionPredicateFactory expressionPredicateFactory, + final ValuesConsumer consumer) { + final ValueFunctionFactories valueFunctionFactories = createValueFunctionFactories(fieldIndex); + final Optional> optionalPredicate = expressionPredicateFactory + .create(criteria.getExpression(), valueFunctionFactories, dateTimeSettings); + final Predicate predicate = optionalPredicate.orElse(vals -> true); + final Function, Val>[] valExtractors = serde.getValExtractors(fieldIndex); + + // TODO : It would be faster if we limit the iteration to keys based on the criteria. + read(readTxn -> { + try (final CursorIterable cursorIterable = dbi.iterate(readTxn)) { + for (final KeyVal keyVal : cursorIterable) { + final Val[] vals = new Val[valExtractors.length]; + for (int i = 0; i < vals.length; i++) { + vals[i] = valExtractors[i].apply(keyVal); + } + if (predicate.test(vals)) { + consumer.accept(vals); + } + } + } + return null; + }); + } + + ValueFunctionFactories createValueFunctionFactories(final FieldIndex fieldIndex) { + return fieldName -> { + final Integer index = fieldIndex.getPos(fieldName); + if (index == null) { + throw new RuntimeException("Unexpected field: " + fieldName); + } + return new ValFunctionFactory(Column.builder().format(Format.TEXT).build(), index); + }; + } + + public long count() { + return read(readTxn -> dbi.stat(readTxn).entries); + } + + public boolean isReadOnly() { + return readOnly; + } + + @Override + public void close() { + commit(); + env.close(); + } + + public void delete() { + FileUtil.deleteDir(path); + } + + private interface DBWriter { + + void write(Txn writeTxn, + ByteBuffer keyByteBuffer, + ByteBuffer valueByteBuffer); + } +} diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/KV.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/KV.java similarity index 67% rename from stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/KV.java rename to stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/KV.java index b2d2d57f1f..cdcac9e00b 100644 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/KV.java +++ b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/KV.java @@ -1,4 +1,4 @@ -package stroom.planb.impl.io; +package stroom.planb.impl.db; public interface KV { diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/RangedState.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/RangedState.java similarity index 95% rename from stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/RangedState.java rename to stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/RangedState.java index 4ffe5b587c..5cebd5dc51 100644 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/RangedState.java +++ b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/RangedState.java @@ -1,6 +1,6 @@ -package stroom.planb.impl.io; +package stroom.planb.impl.db; -import stroom.planb.impl.io.RangedState.Key; +import stroom.planb.impl.db.RangedState.Key; public record RangedState(Key key, StateValue value) implements KV { diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/RangedStateReader.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/RangedStateDb.java similarity index 76% rename from stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/RangedStateReader.java rename to stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/RangedStateDb.java index 212a56665c..9118ba317e 100644 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/RangedStateReader.java +++ b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/RangedStateDb.java @@ -1,7 +1,7 @@ -package stroom.planb.impl.io; +package stroom.planb.impl.db; import stroom.bytebuffer.impl6.ByteBufferFactory; -import stroom.planb.impl.io.RangedState.Key; +import stroom.planb.impl.db.RangedState.Key; import org.lmdbjava.CursorIterable; import org.lmdbjava.CursorIterable.KeyVal; @@ -12,11 +12,18 @@ import java.util.Iterator; import java.util.Optional; -public class RangedStateReader extends AbstractLmdbReader { +public class RangedStateDb extends AbstractLmdb { - public RangedStateReader(final Path path, - final ByteBufferFactory byteBufferFactory) { - super(path, byteBufferFactory, new RangedStateSerde(byteBufferFactory)); + public RangedStateDb(final Path path, + final ByteBufferFactory byteBufferFactory) { + this(path, byteBufferFactory, true, false); + } + + public RangedStateDb(final Path path, + final ByteBufferFactory byteBufferFactory, + final boolean overwrite, + final boolean readOnly) { + super(path, byteBufferFactory, new RangedStateSerde(byteBufferFactory), overwrite, readOnly); } public Optional getState(final RangedStateRequest request) { diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/RangedStateFields.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/RangedStateFields.java similarity index 96% rename from stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/RangedStateFields.java rename to stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/RangedStateFields.java index 9aaba88aa7..ddc8d14cbf 100644 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/RangedStateFields.java +++ b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/RangedStateFields.java @@ -1,4 +1,4 @@ -package stroom.planb.impl.io; +package stroom.planb.impl.db; import stroom.datasource.api.v2.QueryField; diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/RangedStateRequest.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/RangedStateRequest.java similarity index 62% rename from stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/RangedStateRequest.java rename to stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/RangedStateRequest.java index 48ff5c3292..affd146ae4 100644 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/RangedStateRequest.java +++ b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/RangedStateRequest.java @@ -1,4 +1,4 @@ -package stroom.planb.impl.io; +package stroom.planb.impl.db; public record RangedStateRequest(long key) { diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/RangedStateSerde.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/RangedStateSerde.java similarity index 98% rename from stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/RangedStateSerde.java rename to stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/RangedStateSerde.java index 0f7554062a..a8e8bce5ba 100644 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/RangedStateSerde.java +++ b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/RangedStateSerde.java @@ -1,8 +1,8 @@ -package stroom.planb.impl.io; +package stroom.planb.impl.db; import stroom.bytebuffer.ByteBufferUtils; import stroom.bytebuffer.impl6.ByteBufferFactory; -import stroom.planb.impl.io.RangedState.Key; +import stroom.planb.impl.db.RangedState.Key; import stroom.query.language.functions.FieldIndex; import stroom.query.language.functions.Val; import stroom.query.language.functions.ValLong; diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/SearchHelper.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/SearchHelper.java similarity index 99% rename from stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/SearchHelper.java rename to stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/SearchHelper.java index 56a6ea4ec5..7ea0283d57 100644 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/SearchHelper.java +++ b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/SearchHelper.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package stroom.planb.impl.io; +package stroom.planb.impl.db; public class SearchHelper { // private final Map columnMap; diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/Serde.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/Serde.java similarity index 96% rename from stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/Serde.java rename to stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/Serde.java index 957e2e335a..fff3ffae2a 100644 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/Serde.java +++ b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/Serde.java @@ -1,4 +1,4 @@ -package stroom.planb.impl.io; +package stroom.planb.impl.db; import stroom.query.language.functions.FieldIndex; import stroom.query.language.functions.Val; diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/Session.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/Session.java similarity index 98% rename from stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/Session.java rename to stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/Session.java index efaeb8ddf2..88f00aa926 100644 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/Session.java +++ b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/Session.java @@ -1,4 +1,4 @@ -package stroom.planb.impl.io; +package stroom.planb.impl.db; import java.nio.charset.StandardCharsets; import java.time.Instant; diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/SessionReader.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/SessionDb.java similarity index 95% rename from stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/SessionReader.java rename to stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/SessionDb.java index b87507b26e..b18c44f014 100644 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/SessionReader.java +++ b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/SessionDb.java @@ -1,4 +1,4 @@ -package stroom.planb.impl.io; +package stroom.planb.impl.db; import stroom.bytebuffer.ByteBufferUtils; import stroom.bytebuffer.impl6.ByteBufferFactory; @@ -27,13 +27,21 @@ import java.util.function.Function; import java.util.function.Predicate; -public class SessionReader extends AbstractLmdbReader { +public class SessionDb extends AbstractLmdb { - public SessionReader(final Path path, - final ByteBufferFactory byteBufferFactory) { - super(path, byteBufferFactory, new SessionSerde(byteBufferFactory)); + public SessionDb(final Path path, + final ByteBufferFactory byteBufferFactory) { + this(path, byteBufferFactory, true, false); } + public SessionDb(final Path path, + final ByteBufferFactory byteBufferFactory, + final boolean overwrite, + final boolean readOnly) { + super(path, byteBufferFactory, new SessionSerde(byteBufferFactory), overwrite, readOnly); + } + + public void search(final ExpressionCriteria criteria, final FieldIndex fieldIndex, final DateTimeSettings dateTimeSettings, diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/SessionFields.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/SessionFields.java similarity index 96% rename from stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/SessionFields.java rename to stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/SessionFields.java index f9c474b4cd..e7cf674ab4 100644 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/SessionFields.java +++ b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/SessionFields.java @@ -1,4 +1,4 @@ -package stroom.planb.impl.io; +package stroom.planb.impl.db; import stroom.datasource.api.v2.QueryField; diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/SessionRequest.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/SessionRequest.java similarity index 97% rename from stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/SessionRequest.java rename to stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/SessionRequest.java index c129bcbfc7..cb0eb8e4c5 100644 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/SessionRequest.java +++ b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/SessionRequest.java @@ -1,4 +1,4 @@ -package stroom.planb.impl.io; +package stroom.planb.impl.db; import java.nio.charset.StandardCharsets; import java.time.Instant; diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/SessionSerde.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/SessionSerde.java similarity index 99% rename from stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/SessionSerde.java rename to stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/SessionSerde.java index 271c0ac142..51b0d87174 100644 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/SessionSerde.java +++ b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/SessionSerde.java @@ -1,4 +1,4 @@ -package stroom.planb.impl.io; +package stroom.planb.impl.db; import stroom.bytebuffer.ByteBufferUtils; import stroom.bytebuffer.impl6.ByteBufferFactory; diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/ShardWriters.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/ShardWriters.java similarity index 85% rename from stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/ShardWriters.java rename to stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/ShardWriters.java index 81c18454f3..26dcd63b16 100644 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/ShardWriters.java +++ b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/ShardWriters.java @@ -1,4 +1,4 @@ -package stroom.planb.impl.io; +package stroom.planb.impl.db; import stroom.bytebuffer.impl6.ByteBufferFactory; import stroom.meta.shared.Meta; @@ -66,7 +66,7 @@ public static class ShardWriter implements AutoCloseable { private final FileTransferClient fileTransferClient; private final Path dir; private final Meta meta; - private final Map> writers = new HashMap<>(); + private final Map> writers = new HashMap<>(); private final Map> stateDocMap = new HashMap<>(); private final boolean overwrite = true; @@ -110,36 +110,36 @@ public Optional getStateType(final String mapName, final Consumer - new StateWriter(getLmdbEnvDir(k), byteBufferFactory, overwrite)); + final StateDb writer = (StateDb) writers.computeIfAbsent(mapName, k -> + new StateDb(getLmdbEnvDir(k), byteBufferFactory, overwrite, false)); writer.insert(state); } public void addTemporalState(final String mapName, final TemporalState temporalState) { - final TemporalStateWriter writer = (TemporalStateWriter) writers.computeIfAbsent(mapName, k -> - new TemporalStateWriter(getLmdbEnvDir(k), byteBufferFactory, overwrite)); + final TemporalStateDb writer = (TemporalStateDb) writers.computeIfAbsent(mapName, k -> + new TemporalStateDb(getLmdbEnvDir(k), byteBufferFactory, overwrite, false)); writer.insert(temporalState); } public void addRangedState(final String mapName, final RangedState rangedState) { - final RangedStateWriter writer = (RangedStateWriter) writers.computeIfAbsent(mapName, k -> - new RangedStateWriter(getLmdbEnvDir(k), byteBufferFactory, overwrite)); + final RangedStateDb writer = (RangedStateDb) writers.computeIfAbsent(mapName, k -> + new RangedStateDb(getLmdbEnvDir(k), byteBufferFactory, overwrite, false)); writer.insert(rangedState); } public void addTemporalRangedState(final String mapName, final TemporalRangedState temporalRangedState) { - final TemporalRangedStateWriter writer = (TemporalRangedStateWriter) writers.computeIfAbsent(mapName, k -> - new TemporalRangedStateWriter(getLmdbEnvDir(k), byteBufferFactory, overwrite)); + final TemporalRangedStateDb writer = (TemporalRangedStateDb) writers.computeIfAbsent(mapName, k -> + new TemporalRangedStateDb(getLmdbEnvDir(k), byteBufferFactory, overwrite, false)); writer.insert(temporalRangedState); } public void addSession(final String mapName, final Session session) { - final SessionWriter writer = (SessionWriter) writers.computeIfAbsent(mapName, k -> - new SessionWriter(getLmdbEnvDir(k), byteBufferFactory, overwrite)); + final SessionDb writer = (SessionDb) writers.computeIfAbsent(mapName, k -> + new SessionDb(getLmdbEnvDir(k), byteBufferFactory, overwrite, false)); writer.insert(session, session); } @@ -157,7 +157,7 @@ private Path getLmdbEnvDir(final String name) { public void close() throws IOException { Path zipFile = null; try { - writers.values().forEach(AbstractLmdbWriter::close); + writers.values().forEach(AbstractLmdb::close); // Zip all and delete dir. zipFile = dir.getParent().resolve(dir.getFileName().toString() + ".zip"); diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/State.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/State.java similarity index 96% rename from stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/State.java rename to stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/State.java index 7cef33033c..5f8e54d0e4 100644 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/State.java +++ b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/State.java @@ -1,6 +1,6 @@ -package stroom.planb.impl.io; +package stroom.planb.impl.db; -import stroom.planb.impl.io.State.Key; +import stroom.planb.impl.db.State.Key; import java.nio.charset.StandardCharsets; diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/StateDb.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/StateDb.java new file mode 100644 index 0000000000..192c5a3b8c --- /dev/null +++ b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/StateDb.java @@ -0,0 +1,28 @@ +package stroom.planb.impl.db; + +import stroom.bytebuffer.impl6.ByteBufferFactory; +import stroom.planb.impl.db.State.Key; + +import java.nio.file.Path; +import java.util.Optional; + +public class StateDb extends AbstractLmdb { + + public StateDb(final Path path, + final ByteBufferFactory byteBufferFactory) { + this(path, byteBufferFactory, true, false); + } + + public StateDb(final Path path, + final ByteBufferFactory byteBufferFactory, + final boolean overwrite, + final boolean readOnly) { + super(path, byteBufferFactory, new StateSerde(byteBufferFactory), overwrite, readOnly); + } + + public Optional getState(final StateRequest request) { + final Key key = Key.builder().name(request.key()).build(); + final Optional optional = get(key); + return optional.map(value -> new State(key, value)); + } +} diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/StateFieldUtil.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/StateFieldUtil.java similarity index 97% rename from stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/StateFieldUtil.java rename to stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/StateFieldUtil.java index e3f1bbd9ec..5006e7a16b 100644 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/StateFieldUtil.java +++ b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/StateFieldUtil.java @@ -1,4 +1,4 @@ -package stroom.planb.impl.io; +package stroom.planb.impl.db; import stroom.datasource.api.v2.QueryField; import stroom.planb.shared.StateType; diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/StateFields.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/StateFields.java similarity index 95% rename from stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/StateFields.java rename to stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/StateFields.java index 6268185cef..df5f83c276 100644 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/StateFields.java +++ b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/StateFields.java @@ -1,4 +1,4 @@ -package stroom.planb.impl.io; +package stroom.planb.impl.db; import stroom.datasource.api.v2.QueryField; diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/StatePaths.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/StatePaths.java similarity index 98% rename from stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/StatePaths.java rename to stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/StatePaths.java index f5dea6493c..26804ef68e 100644 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/StatePaths.java +++ b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/StatePaths.java @@ -1,4 +1,4 @@ -package stroom.planb.impl.io; +package stroom.planb.impl.db; import stroom.planb.impl.PlanBConfig; import stroom.util.io.PathCreator; diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/StateRequest.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/StateRequest.java similarity index 60% rename from stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/StateRequest.java rename to stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/StateRequest.java index dd6877c9e5..41ca1c0e67 100644 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/StateRequest.java +++ b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/StateRequest.java @@ -1,4 +1,4 @@ -package stroom.planb.impl.io; +package stroom.planb.impl.db; public record StateRequest(byte[] key) { diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/StateSerde.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/StateSerde.java similarity index 98% rename from stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/StateSerde.java rename to stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/StateSerde.java index 29de50bfac..4edaeaa5d0 100644 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/StateSerde.java +++ b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/StateSerde.java @@ -1,8 +1,8 @@ -package stroom.planb.impl.io; +package stroom.planb.impl.db; import stroom.bytebuffer.ByteBufferUtils; import stroom.bytebuffer.impl6.ByteBufferFactory; -import stroom.planb.impl.io.State.Key; +import stroom.planb.impl.db.State.Key; import stroom.query.language.functions.FieldIndex; import stroom.query.language.functions.Val; import stroom.query.language.functions.ValNull; diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/StateValue.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/StateValue.java similarity index 97% rename from stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/StateValue.java rename to stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/StateValue.java index acc8f1a63b..54abff4468 100644 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/StateValue.java +++ b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/StateValue.java @@ -1,4 +1,4 @@ -package stroom.planb.impl.io; +package stroom.planb.impl.db; import stroom.pipeline.refdata.store.FastInfosetUtil; import stroom.pipeline.refdata.store.FastInfosetValue; diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/TemporalRangedState.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/TemporalRangedState.java similarity index 96% rename from stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/TemporalRangedState.java rename to stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/TemporalRangedState.java index a129b41721..c7a245796f 100644 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/TemporalRangedState.java +++ b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/TemporalRangedState.java @@ -1,6 +1,6 @@ -package stroom.planb.impl.io; +package stroom.planb.impl.db; -import stroom.planb.impl.io.TemporalRangedState.Key; +import stroom.planb.impl.db.TemporalRangedState.Key; import java.time.Instant; diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/TemporalRangedStateReader.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/TemporalRangedStateDb.java similarity index 78% rename from stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/TemporalRangedStateReader.java rename to stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/TemporalRangedStateDb.java index bda1cb5c06..70533ccffd 100644 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/TemporalRangedStateReader.java +++ b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/TemporalRangedStateDb.java @@ -1,7 +1,7 @@ -package stroom.planb.impl.io; +package stroom.planb.impl.db; import stroom.bytebuffer.impl6.ByteBufferFactory; -import stroom.planb.impl.io.TemporalRangedState.Key; +import stroom.planb.impl.db.TemporalRangedState.Key; import org.lmdbjava.CursorIterable; import org.lmdbjava.CursorIterable.KeyVal; @@ -12,13 +12,21 @@ import java.util.Iterator; import java.util.Optional; -public class TemporalRangedStateReader extends AbstractLmdbReader { +public class TemporalRangedStateDb extends AbstractLmdb { - public TemporalRangedStateReader(final Path path, - final ByteBufferFactory byteBufferFactory) { - super(path, byteBufferFactory, new TemporalRangedStateSerde(byteBufferFactory)); + public TemporalRangedStateDb(final Path path, + final ByteBufferFactory byteBufferFactory) { + this(path, byteBufferFactory, true, false); } + public TemporalRangedStateDb(final Path path, + final ByteBufferFactory byteBufferFactory, + final boolean overwrite, + final boolean readOnly) { + super(path, byteBufferFactory, new TemporalRangedStateSerde(byteBufferFactory), overwrite, readOnly); + } + + public Optional getState(final TemporalRangedStateRequest request) { final ByteBuffer start = byteBufferFactory.acquire(Long.BYTES); try { diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/TemporalRangedStateFields.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/TemporalRangedStateFields.java similarity index 97% rename from stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/TemporalRangedStateFields.java rename to stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/TemporalRangedStateFields.java index 9ab8e245fc..dc7237df52 100644 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/TemporalRangedStateFields.java +++ b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/TemporalRangedStateFields.java @@ -1,4 +1,4 @@ -package stroom.planb.impl.io; +package stroom.planb.impl.db; import stroom.datasource.api.v2.QueryField; diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/TemporalRangedStateRequest.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/TemporalRangedStateRequest.java similarity index 71% rename from stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/TemporalRangedStateRequest.java rename to stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/TemporalRangedStateRequest.java index 6ca26339d5..f84e6fc839 100644 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/TemporalRangedStateRequest.java +++ b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/TemporalRangedStateRequest.java @@ -1,4 +1,4 @@ -package stroom.planb.impl.io; +package stroom.planb.impl.db; public record TemporalRangedStateRequest(long key, long effectiveTime) { diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/TemporalRangedStateSerde.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/TemporalRangedStateSerde.java similarity index 98% rename from stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/TemporalRangedStateSerde.java rename to stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/TemporalRangedStateSerde.java index 930b21ad32..6c0581a1bf 100644 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/TemporalRangedStateSerde.java +++ b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/TemporalRangedStateSerde.java @@ -1,8 +1,8 @@ -package stroom.planb.impl.io; +package stroom.planb.impl.db; import stroom.bytebuffer.ByteBufferUtils; import stroom.bytebuffer.impl6.ByteBufferFactory; -import stroom.planb.impl.io.TemporalRangedState.Key; +import stroom.planb.impl.db.TemporalRangedState.Key; import stroom.query.language.functions.FieldIndex; import stroom.query.language.functions.Val; import stroom.query.language.functions.ValDate; diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/TemporalState.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/TemporalState.java similarity index 96% rename from stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/TemporalState.java rename to stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/TemporalState.java index 427d8046ed..0bf0700a26 100644 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/TemporalState.java +++ b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/TemporalState.java @@ -1,6 +1,6 @@ -package stroom.planb.impl.io; +package stroom.planb.impl.db; -import stroom.planb.impl.io.TemporalState.Key; +import stroom.planb.impl.db.TemporalState.Key; import java.nio.charset.StandardCharsets; import java.time.Instant; diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/TemporalStateReader.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/TemporalStateDb.java similarity index 81% rename from stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/TemporalStateReader.java rename to stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/TemporalStateDb.java index c60909805f..26010cc218 100644 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/TemporalStateReader.java +++ b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/TemporalStateDb.java @@ -1,7 +1,7 @@ -package stroom.planb.impl.io; +package stroom.planb.impl.db; import stroom.bytebuffer.impl6.ByteBufferFactory; -import stroom.planb.impl.io.TemporalState.Key; +import stroom.planb.impl.db.TemporalState.Key; import net.openhft.hashing.LongHashFunction; import org.lmdbjava.CursorIterable; @@ -14,13 +14,21 @@ import java.util.Iterator; import java.util.Optional; -public class TemporalStateReader extends AbstractLmdbReader { +public class TemporalStateDb extends AbstractLmdb { - public TemporalStateReader(final Path path, - final ByteBufferFactory byteBufferFactory) { - super(path, byteBufferFactory, new TemporalStateSerde(byteBufferFactory)); + public TemporalStateDb(final Path path, + final ByteBufferFactory byteBufferFactory) { + this(path, byteBufferFactory, true, false); } + public TemporalStateDb(final Path path, + final ByteBufferFactory byteBufferFactory, + final boolean overwrite, + final boolean readOnly) { + super(path, byteBufferFactory, new TemporalStateSerde(byteBufferFactory), overwrite, readOnly); + } + + public Optional getState(final TemporalStateRequest request) { final long rowHash = LongHashFunction.xx3().hashBytes(request.key()); final ByteBuffer start = byteBufferFactory.acquire(Long.BYTES + Long.BYTES); diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/TemporalStateFields.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/TemporalStateFields.java similarity index 96% rename from stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/TemporalStateFields.java rename to stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/TemporalStateFields.java index f197c9ac35..e19f01c5bb 100644 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/TemporalStateFields.java +++ b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/TemporalStateFields.java @@ -1,4 +1,4 @@ -package stroom.planb.impl.io; +package stroom.planb.impl.db; import stroom.datasource.api.v2.QueryField; diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/TemporalStateRequest.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/TemporalStateRequest.java similarity index 70% rename from stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/TemporalStateRequest.java rename to stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/TemporalStateRequest.java index badf57624d..d12c9cb478 100644 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/TemporalStateRequest.java +++ b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/TemporalStateRequest.java @@ -1,4 +1,4 @@ -package stroom.planb.impl.io; +package stroom.planb.impl.db; public record TemporalStateRequest(byte[] key, long effectiveTime) { diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/TemporalStateSerde.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/TemporalStateSerde.java similarity index 98% rename from stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/TemporalStateSerde.java rename to stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/TemporalStateSerde.java index 4c938f2466..991e692a9e 100644 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/TemporalStateSerde.java +++ b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/TemporalStateSerde.java @@ -1,8 +1,8 @@ -package stroom.planb.impl.io; +package stroom.planb.impl.db; import stroom.bytebuffer.ByteBufferUtils; import stroom.bytebuffer.impl6.ByteBufferFactory; -import stroom.planb.impl.io.TemporalState.Key; +import stroom.planb.impl.db.TemporalState.Key; import stroom.query.language.functions.FieldIndex; import stroom.query.language.functions.Val; import stroom.query.language.functions.ValDate; diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/ValUtil.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/ValUtil.java similarity index 98% rename from stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/ValUtil.java rename to stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/ValUtil.java index 899562ff85..f2eabbe71e 100644 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/ValUtil.java +++ b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/db/ValUtil.java @@ -1,4 +1,4 @@ -package stroom.planb.impl.io; +package stroom.planb.impl.db; import stroom.query.language.functions.Val; import stroom.query.language.functions.ValNull; diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/experiment/StateSerde2.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/experiment/StateSerde2.java index 0fb846fcdf..2f07c61a01 100644 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/experiment/StateSerde2.java +++ b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/experiment/StateSerde2.java @@ -2,10 +2,10 @@ import stroom.bytebuffer.ByteBufferUtils; import stroom.bytebuffer.impl6.ByteBufferFactory; -import stroom.planb.impl.io.State.Key; -import stroom.planb.impl.io.StateFields; -import stroom.planb.impl.io.StateValue; -import stroom.planb.impl.io.ValUtil; +import stroom.planb.impl.db.State.Key; +import stroom.planb.impl.db.StateFields; +import stroom.planb.impl.db.StateValue; +import stroom.planb.impl.db.ValUtil; import stroom.query.language.functions.FieldIndex; import stroom.query.language.functions.Val; import stroom.query.language.functions.ValNull; diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/experiment/StateWriter2.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/experiment/StateWriter2.java index f9f05427fe..7b4e7b625b 100644 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/experiment/StateWriter2.java +++ b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/experiment/StateWriter2.java @@ -1,8 +1,8 @@ package stroom.planb.impl.experiment; import stroom.bytebuffer.impl6.ByteBufferFactory; -import stroom.planb.impl.io.State.Key; -import stroom.planb.impl.io.StateValue; +import stroom.planb.impl.db.State.Key; +import stroom.planb.impl.db.StateValue; import stroom.util.logging.LambdaLogger; import stroom.util.logging.LambdaLoggerFactory; diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/AbstractLmdbReader.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/AbstractLmdbReader.java deleted file mode 100644 index 9749669316..0000000000 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/AbstractLmdbReader.java +++ /dev/null @@ -1,158 +0,0 @@ -package stroom.planb.impl.io; - -import stroom.bytebuffer.impl6.ByteBufferFactory; -import stroom.entity.shared.ExpressionCriteria; -import stroom.expression.api.DateTimeSettings; -import stroom.lmdb.LmdbConfig; -import stroom.lmdb2.LmdbEnvDir; -import stroom.query.api.v2.Column; -import stroom.query.api.v2.Format; -import stroom.query.common.v2.ExpressionPredicateFactory; -import stroom.query.common.v2.ExpressionPredicateFactory.ValueFunctionFactories; -import stroom.query.common.v2.ValFunctionFactory; -import stroom.query.language.functions.FieldIndex; -import stroom.query.language.functions.Val; -import stroom.query.language.functions.ValuesConsumer; -import stroom.util.concurrent.UncheckedInterruptedException; -import stroom.util.logging.LambdaLogger; -import stroom.util.logging.LambdaLoggerFactory; - -import org.lmdbjava.CursorIterable; -import org.lmdbjava.CursorIterable.KeyVal; -import org.lmdbjava.Dbi; -import org.lmdbjava.Env; -import org.lmdbjava.EnvFlags; -import org.lmdbjava.KeyRange; -import org.lmdbjava.Txn; - -import java.nio.ByteBuffer; -import java.nio.file.Path; -import java.util.Iterator; -import java.util.Optional; -import java.util.concurrent.Semaphore; -import java.util.function.Function; -import java.util.function.Predicate; - -import static java.nio.charset.StandardCharsets.UTF_8; - -public abstract class AbstractLmdbReader implements AutoCloseable { - - private static final LambdaLogger LOGGER = LambdaLoggerFactory.getLogger(AbstractLmdbReader.class); - - private static final byte[] NAME = "db".getBytes(UTF_8); - private static final int CONCURRENT_READERS = 10; - - private final Semaphore concurrentReaderSemaphore; - - final ByteBufferFactory byteBufferFactory; - private final Env env; - final Dbi dbi; - final Serde serde; - - public AbstractLmdbReader(final Path path, - final ByteBufferFactory byteBufferFactory, - final Serde serde) { - final LmdbEnvDir lmdbEnvDir = new LmdbEnvDir(path, true); - this.byteBufferFactory = byteBufferFactory; - this.serde = serde; - LOGGER.info(() -> "Opening: " + path); - - final Env.Builder builder = Env.create() - .setMapSize(LmdbConfig.DEFAULT_MAX_STORE_SIZE.getBytes()) - .setMaxDbs(1) - .setMaxReaders(CONCURRENT_READERS); - - env = builder.open(lmdbEnvDir.getEnvDir().toFile(), - EnvFlags.MDB_NOTLS, - EnvFlags.MDB_NOLOCK, - EnvFlags.MDB_RDONLY_ENV); - dbi = env.openDbi(NAME); - concurrentReaderSemaphore = new Semaphore(CONCURRENT_READERS); - } - - R read(final Function, R> function) { - try { - concurrentReaderSemaphore.acquire(); - try { - try (final Txn readTxn = env.txnRead()) { - return function.apply(readTxn); - } - } finally { - concurrentReaderSemaphore.release(); - } - } catch (final InterruptedException e) { - LOGGER.error(e::getMessage, e); - Thread.currentThread().interrupt(); - throw new UncheckedInterruptedException(e); - } - } - - public Optional get(final K key) { - return read(readTxn -> get(readTxn, key)); - } - - private Optional get(final Txn readTxn, final K key) { - return serde.createKeyByteBuffer(key, keyByteBuffer -> - serde.createPrefixPredicate(key, predicate -> { - final KeyRange keyRange = KeyRange.closed(keyByteBuffer, keyByteBuffer); - try (final CursorIterable cursor = dbi.iterate(readTxn, keyRange)) { - final Iterator> iterator = cursor.iterator(); - while (iterator.hasNext() - && !Thread.currentThread().isInterrupted()) { - final KeyVal keyVal = iterator.next(); - if (predicate.test(keyVal)) { - return Optional.of(serde.getVal(keyVal)); - } - } - } - return Optional.empty(); - })); - } - - public void search(final ExpressionCriteria criteria, - final FieldIndex fieldIndex, - final DateTimeSettings dateTimeSettings, - final ExpressionPredicateFactory expressionPredicateFactory, - final ValuesConsumer consumer) { - final ValueFunctionFactories valueFunctionFactories = createValueFunctionFactories(fieldIndex); - final Optional> optionalPredicate = expressionPredicateFactory - .create(criteria.getExpression(), valueFunctionFactories, dateTimeSettings); - final Predicate predicate = optionalPredicate.orElse(vals -> true); - final Function, Val>[] valExtractors = serde.getValExtractors(fieldIndex); - - // TODO : It would be faster if we limit the iteration to keys based on the criteria. - read(readTxn -> { - try (final CursorIterable cursorIterable = dbi.iterate(readTxn)) { - for (final KeyVal keyVal : cursorIterable) { - final Val[] vals = new Val[valExtractors.length]; - for (int i = 0; i < vals.length; i++) { - vals[i] = valExtractors[i].apply(keyVal); - } - if (predicate.test(vals)) { - consumer.accept(vals); - } - } - } - return null; - }); - } - - ValueFunctionFactories createValueFunctionFactories(final FieldIndex fieldIndex) { - return fieldName -> { - final Integer index = fieldIndex.getPos(fieldName); - if (index == null) { - throw new RuntimeException("Unexpected field: " + fieldName); - } - return new ValFunctionFactory(Column.builder().format(Format.TEXT).build(), index); - }; - } - - public long count() { - return read(readTxn -> dbi.stat(readTxn).entries); - } - - @Override - public void close() { - env.close(); - } -} diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/AbstractLmdbWriter.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/AbstractLmdbWriter.java deleted file mode 100644 index fa10345535..0000000000 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/AbstractLmdbWriter.java +++ /dev/null @@ -1,246 +0,0 @@ -package stroom.planb.impl.io; - -import stroom.bytebuffer.impl6.ByteBufferFactory; -import stroom.lmdb.LmdbConfig; -import stroom.lmdb2.LmdbEnvDir; -import stroom.util.logging.LambdaLogger; -import stroom.util.logging.LambdaLoggerFactory; - -import org.lmdbjava.CursorIterable; -import org.lmdbjava.CursorIterable.KeyVal; -import org.lmdbjava.Dbi; -import org.lmdbjava.DbiFlags; -import org.lmdbjava.Env; -import org.lmdbjava.EnvFlags; -import org.lmdbjava.KeyRange; -import org.lmdbjava.PutFlags; -import org.lmdbjava.Txn; - -import java.nio.ByteBuffer; -import java.nio.file.Path; -import java.util.Iterator; -import java.util.concurrent.locks.ReentrantLock; -import java.util.function.Predicate; - -import static java.nio.charset.StandardCharsets.UTF_8; - -public abstract class AbstractLmdbWriter implements AutoCloseable { - - private static final LambdaLogger LOGGER = LambdaLoggerFactory.getLogger(AbstractLmdbWriter.class); - - private static final byte[] NAME = "db".getBytes(UTF_8); - - private final Serde serde; - final ByteBufferFactory byteBufferFactory; - final Env env; - final Dbi dbi; - private Txn writeTxn; - private int commitCount = 0; - private int hashClashes = 0; - private final DBWriter dbWriter; - private final ReentrantLock lock = new ReentrantLock(); - - public AbstractLmdbWriter(final Path path, - final ByteBufferFactory byteBufferFactory, - final Serde serde, - final boolean overwrite) { - final LmdbEnvDir lmdbEnvDir = new LmdbEnvDir(path, true); - this.byteBufferFactory = byteBufferFactory; - this.serde = serde; - - LOGGER.info(() -> "Creating: " + path); - - final Env.Builder builder = Env.create() - .setMapSize(LmdbConfig.DEFAULT_MAX_STORE_SIZE.getBytes()) - .setMaxDbs(1) - .setMaxReaders(1); - - env = builder.open(lmdbEnvDir.getEnvDir().toFile(), EnvFlags.MDB_NOTLS); - dbi = env.openDbi(NAME, getDbiFlags()); - - // If we do not prefix values then we can simply put rows. - if (!serde.hasPrefix()) { - // If the value has no key prefix, i.e. we are not using key hashes then just try to put. - if (overwrite) { - // Put and overwrite any existing key/value. - dbWriter = dbi::put; - } else { - // Put but do not overwrite any existing key/value. - dbWriter = (writeTxn, keyByteBuffer, valueByteBuffer) -> - dbi.put(writeTxn, keyByteBuffer, valueByteBuffer, PutFlags.MDB_NOOVERWRITE); - } - } else { - if (overwrite) { - dbWriter = (writeTxn, keyByteBuffer, valueByteBuffer) -> { - // First try to put without overwriting existing values. - if (!dbi.put(writeTxn, keyByteBuffer, valueByteBuffer, PutFlags.MDB_NOOVERWRITE)) { - serde.createPrefixPredicate(keyByteBuffer, valueByteBuffer, predicate -> { - // Delete current value if there is one. - if (!delete(writeTxn, keyByteBuffer, predicate)) { - // We must have had a hash clash here because we didn't find a row for the key even - // though the db contains the key hash. - hashClashes++; - } - - // Put new value allowing for duplicate keys as we are only using a hash key. - dbi.put(writeTxn, keyByteBuffer, valueByteBuffer); - return true; - }); - } - }; - } else { - dbWriter = (writeTxn, keyByteBuffer, valueByteBuffer) -> { - // First try to put without overwriting existing values. - if (!dbi.put(writeTxn, keyByteBuffer, valueByteBuffer, PutFlags.MDB_NOOVERWRITE)) { - serde.createPrefixPredicate(keyByteBuffer, valueByteBuffer, predicate -> { - if (!exists(writeTxn, keyByteBuffer, predicate)) { - // We must have had a hash clash here because we didn't find a row for the key even - // though the db contains the key hash. - hashClashes++; - - // Put the value as another row for the same key hash as we didn't find a row for the - // full key value. - dbi.put(writeTxn, keyByteBuffer, valueByteBuffer); - } - return true; - }); - } - }; - } - } - } - - DbiFlags[] getDbiFlags() { - if (serde.hasPrefix()) { - return new DbiFlags[]{DbiFlags.MDB_CREATE, DbiFlags.MDB_DUPSORT}; - } - return new DbiFlags[]{DbiFlags.MDB_CREATE}; - } - - public void merge(final Path source) { - final Env.Builder builder = Env.create() - .setMaxDbs(1) - .setMaxReaders(1); - try (final Env sourceEnv = builder.open(source.toFile(), - EnvFlags.MDB_NOTLS, - EnvFlags.MDB_NOLOCK, - EnvFlags.MDB_RDONLY_ENV)) { - final Dbi sourceDbi = sourceEnv.openDbi(NAME); - try (final Txn readTxn = sourceEnv.txnRead()) { - try (final CursorIterable cursorIterable = sourceDbi.iterate(readTxn)) { - for (final KeyVal keyVal : cursorIterable) { - insert(keyVal.key(), keyVal.val()); - } - } - } - } - } - - public boolean insert(final KV kv) { - return insert(kv.key(), kv.value()); - } - - public boolean insert(final K key, final V value) { - return serde.createKeyByteBuffer(key, keyByteBuffer -> - serde.createValueByteBuffer(key, value, valueByteBuffer -> - insert(keyByteBuffer, valueByteBuffer))); - } - - public boolean insert(final ByteBuffer keyByteBuffer, - final ByteBuffer valueByteBuffer) { - final Txn writeTxn = getOrCreateWriteTxn(); - dbWriter.write(writeTxn, keyByteBuffer, valueByteBuffer); - - commitCount++; - if (commitCount > 10000) { - commit(); - } - - return true; - } - - private boolean delete(final Txn txn, - final ByteBuffer keyByteBuffer, - final Predicate> predicate) { - final KeyRange keyRange = KeyRange.closed(keyByteBuffer, keyByteBuffer); - try (final CursorIterable cursor = dbi.iterate(txn, keyRange)) { - final Iterator> iterator = cursor.iterator(); - while (iterator.hasNext()) { - final KeyVal keyVal = iterator.next(); - if (predicate.test(keyVal)) { - iterator.remove(); - return true; - } - } - } - return false; - } - - private boolean exists(final Txn txn, - final ByteBuffer keyByteBuffer, - final Predicate> predicate) { - final KeyRange keyRange = KeyRange.closed(keyByteBuffer, keyByteBuffer); - try (final CursorIterable cursor = dbi.iterate(txn, keyRange)) { - for (final KeyVal keyVal : cursor) { - if (predicate.test(keyVal)) { - return true; - } - } - } - return false; - } - - - Txn getOrCreateWriteTxn() { - if (writeTxn == null) { - writeTxn = env.txnWrite(); - } - return writeTxn; - } - - void commit() { - lock(() -> { - if (writeTxn != null) { - try { - writeTxn.commit(); - } finally { - try { - writeTxn.close(); - } finally { - writeTxn = null; - } - } - } - - commitCount = 0; - - if (hashClashes > 0) { - // We prob don't want to warn but will keep for now until we know how big the issue is. - LOGGER.warn(() -> "We had " + hashClashes + " hash clashes since last commit"); - hashClashes = 0; - } - }); - } - - public void lock(final Runnable runnable) { - lock.lock(); - try { - runnable.run(); - } finally { - lock.unlock(); - } - } - - @Override - public void close() { - commit(); - lock(env::close); - } - - private interface DBWriter { - - void write(Txn writeTxn, - ByteBuffer keyByteBuffer, - ByteBuffer valueByteBuffer); - } -} diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/RangedStateWriter.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/RangedStateWriter.java deleted file mode 100644 index 2e6ab6330a..0000000000 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/RangedStateWriter.java +++ /dev/null @@ -1,20 +0,0 @@ -package stroom.planb.impl.io; - -import stroom.bytebuffer.impl6.ByteBufferFactory; -import stroom.planb.impl.io.RangedState.Key; - -import java.nio.file.Path; - -public class RangedStateWriter extends AbstractLmdbWriter { - - public RangedStateWriter(final Path path, - final ByteBufferFactory byteBufferFactory) { - this(path, byteBufferFactory, true); - } - - public RangedStateWriter(final Path path, - final ByteBufferFactory byteBufferFactory, - final boolean overwrite) { - super(path, byteBufferFactory, new RangedStateSerde(byteBufferFactory), overwrite); - } -} diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/SessionWriter.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/SessionWriter.java deleted file mode 100644 index 1b05df3d0b..0000000000 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/SessionWriter.java +++ /dev/null @@ -1,19 +0,0 @@ -package stroom.planb.impl.io; - -import stroom.bytebuffer.impl6.ByteBufferFactory; - -import java.nio.file.Path; - -public class SessionWriter extends AbstractLmdbWriter { - - public SessionWriter(final Path path, - final ByteBufferFactory byteBufferFactory) { - this(path, byteBufferFactory, true); - } - - public SessionWriter(final Path path, - final ByteBufferFactory byteBufferFactory, - final boolean overwrite) { - super(path, byteBufferFactory, new SessionSerde(byteBufferFactory), overwrite); - } -} diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/StateReader.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/StateReader.java deleted file mode 100644 index e174335fb2..0000000000 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/StateReader.java +++ /dev/null @@ -1,21 +0,0 @@ -package stroom.planb.impl.io; - -import stroom.bytebuffer.impl6.ByteBufferFactory; -import stroom.planb.impl.io.State.Key; - -import java.nio.file.Path; -import java.util.Optional; - -public class StateReader extends AbstractLmdbReader { - - public StateReader(final Path path, - final ByteBufferFactory byteBufferFactory) { - super(path, byteBufferFactory, new StateSerde(byteBufferFactory)); - } - - public Optional getState(final StateRequest request) { - final Key key = Key.builder().name(request.key()).build(); - final Optional optional = get(key); - return optional.map(value -> new State(key, value)); - } -} diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/StateWriter.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/StateWriter.java deleted file mode 100644 index ca9e5db303..0000000000 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/StateWriter.java +++ /dev/null @@ -1,20 +0,0 @@ -package stroom.planb.impl.io; - -import stroom.bytebuffer.impl6.ByteBufferFactory; -import stroom.planb.impl.io.State.Key; - -import java.nio.file.Path; - -public class StateWriter extends AbstractLmdbWriter { - - public StateWriter(final Path path, - final ByteBufferFactory byteBufferFactory) { - this(path, byteBufferFactory, true); - } - - public StateWriter(final Path path, - final ByteBufferFactory byteBufferFactory, - final boolean overwrite) { - super(path, byteBufferFactory, new StateSerde(byteBufferFactory), overwrite); - } -} diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/TemporalRangedStateWriter.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/TemporalRangedStateWriter.java deleted file mode 100644 index 1e02d015d8..0000000000 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/TemporalRangedStateWriter.java +++ /dev/null @@ -1,20 +0,0 @@ -package stroom.planb.impl.io; - -import stroom.bytebuffer.impl6.ByteBufferFactory; -import stroom.planb.impl.io.TemporalRangedState.Key; - -import java.nio.file.Path; - -public class TemporalRangedStateWriter extends AbstractLmdbWriter { - - public TemporalRangedStateWriter(final Path path, - final ByteBufferFactory byteBufferFactory) { - this(path, byteBufferFactory, true); - } - - public TemporalRangedStateWriter(final Path path, - final ByteBufferFactory byteBufferFactory, - final boolean overwrite) { - super(path, byteBufferFactory, new TemporalRangedStateSerde(byteBufferFactory), overwrite); - } -} diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/TemporalStateWriter.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/TemporalStateWriter.java deleted file mode 100644 index e0fb6af931..0000000000 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/io/TemporalStateWriter.java +++ /dev/null @@ -1,20 +0,0 @@ -package stroom.planb.impl.io; - -import stroom.bytebuffer.impl6.ByteBufferFactory; -import stroom.planb.impl.io.TemporalState.Key; - -import java.nio.file.Path; - -public class TemporalStateWriter extends AbstractLmdbWriter { - - public TemporalStateWriter(final Path path, - final ByteBufferFactory byteBufferFactory) { - this(path, byteBufferFactory, true); - } - - public TemporalStateWriter(final Path path, - final ByteBufferFactory byteBufferFactory, - final boolean overwrite) { - super(path, byteBufferFactory, new TemporalStateSerde(byteBufferFactory), overwrite); - } -} diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/pipeline/PlanBFilter.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/pipeline/PlanBFilter.java index 26677decaa..ed1e6ef66b 100644 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/pipeline/PlanBFilter.java +++ b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/pipeline/PlanBFilter.java @@ -28,14 +28,14 @@ import stroom.pipeline.shared.data.PipelineElementType; import stroom.pipeline.shared.data.PipelineElementType.Category; import stroom.pipeline.state.MetaHolder; -import stroom.planb.impl.io.RangedState; -import stroom.planb.impl.io.Session; -import stroom.planb.impl.io.ShardWriters; -import stroom.planb.impl.io.ShardWriters.ShardWriter; -import stroom.planb.impl.io.State; -import stroom.planb.impl.io.StateValue; -import stroom.planb.impl.io.TemporalRangedState; -import stroom.planb.impl.io.TemporalState; +import stroom.planb.impl.db.RangedState; +import stroom.planb.impl.db.Session; +import stroom.planb.impl.db.ShardWriters; +import stroom.planb.impl.db.ShardWriters.ShardWriter; +import stroom.planb.impl.db.State; +import stroom.planb.impl.db.StateValue; +import stroom.planb.impl.db.TemporalRangedState; +import stroom.planb.impl.db.TemporalState; import stroom.planb.shared.StateType; import stroom.svg.shared.SvgImage; import stroom.util.CharBuffer; diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/pipeline/PlanBLookupImpl.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/pipeline/PlanBLookupImpl.java index f40f2cb3f4..54646f44c6 100644 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/pipeline/PlanBLookupImpl.java +++ b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/pipeline/PlanBLookupImpl.java @@ -7,19 +7,19 @@ import stroom.pipeline.refdata.store.StringValue; import stroom.pipeline.xsltfunctions.PlanBLookup; import stroom.planb.impl.PlanBDocCache; -import stroom.planb.impl.data.ReaderCache; -import stroom.planb.impl.io.RangedStateReader; -import stroom.planb.impl.io.RangedStateRequest; -import stroom.planb.impl.io.SessionReader; -import stroom.planb.impl.io.SessionRequest; -import stroom.planb.impl.io.StateReader; -import stroom.planb.impl.io.StateRequest; -import stroom.planb.impl.io.StateValue; -import stroom.planb.impl.io.TemporalRangedStateReader; -import stroom.planb.impl.io.TemporalRangedStateRequest; -import stroom.planb.impl.io.TemporalState; -import stroom.planb.impl.io.TemporalStateReader; -import stroom.planb.impl.io.TemporalStateRequest; +import stroom.planb.impl.data.ShardManager; +import stroom.planb.impl.db.RangedStateDb; +import stroom.planb.impl.db.RangedStateRequest; +import stroom.planb.impl.db.SessionDb; +import stroom.planb.impl.db.SessionRequest; +import stroom.planb.impl.db.StateDb; +import stroom.planb.impl.db.StateRequest; +import stroom.planb.impl.db.StateValue; +import stroom.planb.impl.db.TemporalRangedStateDb; +import stroom.planb.impl.db.TemporalRangedStateRequest; +import stroom.planb.impl.db.TemporalState; +import stroom.planb.impl.db.TemporalStateDb; +import stroom.planb.impl.db.TemporalStateRequest; import stroom.planb.shared.PlanBDoc; import stroom.util.pipeline.scope.PipelineScoped; @@ -40,14 +40,14 @@ public class PlanBLookupImpl implements PlanBLookup { private final PlanBDocCache stateDocCache; private final Cache> cache; - private final ReaderCache readerCache; + private final ShardManager shardManager; private final Map> stateDocMap = new HashMap<>(); @Inject public PlanBLookupImpl(final PlanBDocCache stateDocCache, - final ReaderCache readerCache) { + final ShardManager shardManager) { this.stateDocCache = stateDocCache; - this.readerCache = readerCache; + this.shardManager = shardManager; cache = Caffeine.newBuilder().maximumSize(1000).build(); } @@ -88,11 +88,11 @@ private Optional getState(final PlanBDoc doc, final String mapName, final String keyName, final Instant eventTime) { - return readerCache.get(mapName, reader -> { - if (reader instanceof final StateReader stateReader) { + return shardManager.get(mapName, reader -> { + if (reader instanceof final StateDb db) { final StateRequest request = new StateRequest(keyName.getBytes(StandardCharsets.UTF_8)); - return stateReader + return db .getState(request) .map(state -> new TemporalState(TemporalState .Key @@ -101,16 +101,16 @@ private Optional getState(final PlanBDoc doc, .effectiveTime(0) .build(), state.value())); - } else if (reader instanceof final TemporalStateReader stateReader) { + } else if (reader instanceof final TemporalStateDb db) { final TemporalStateRequest request = new TemporalStateRequest(keyName.getBytes(StandardCharsets.UTF_8), eventTime.toEpochMilli()); - return stateReader + return db .getState(request); - } else if (reader instanceof final RangedStateReader stateReader) { + } else if (reader instanceof final RangedStateDb db) { final RangedStateRequest request = new RangedStateRequest(Long.parseLong(keyName)); - return stateReader + return db .getState(request) .map(state -> new TemporalState(TemporalState .Key @@ -119,10 +119,10 @@ private Optional getState(final PlanBDoc doc, .effectiveTime(0) .build(), state.value())); - } else if (reader instanceof final TemporalRangedStateReader stateReader) { + } else if (reader instanceof final TemporalRangedStateDb db) { final TemporalRangedStateRequest request = new TemporalRangedStateRequest(Long.parseLong(keyName), eventTime.toEpochMilli()); - return stateReader + return db .getState(request) .map(state -> new TemporalState(TemporalState .Key @@ -131,10 +131,10 @@ private Optional getState(final PlanBDoc doc, .effectiveTime(0) .build(), state.value())); - } else if (reader instanceof final SessionReader stateReader) { + } else if (reader instanceof final SessionDb db) { final SessionRequest request = new SessionRequest(keyName.getBytes(StandardCharsets.UTF_8), eventTime.toEpochMilli()); - return stateReader + return db .getState(request) .map(state -> new TemporalState(TemporalState .Key diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/pipeline/StateProviderImpl.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/pipeline/StateProviderImpl.java index 41c0fc1abc..cf100867ed 100644 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/pipeline/StateProviderImpl.java +++ b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/pipeline/StateProviderImpl.java @@ -1,22 +1,22 @@ package stroom.planb.impl.pipeline; import stroom.planb.impl.PlanBDocCache; -import stroom.planb.impl.data.ReaderCache; -import stroom.planb.impl.io.RangedState; -import stroom.planb.impl.io.RangedStateReader; -import stroom.planb.impl.io.RangedStateRequest; -import stroom.planb.impl.io.SessionReader; -import stroom.planb.impl.io.SessionRequest; -import stroom.planb.impl.io.State; -import stroom.planb.impl.io.StateReader; -import stroom.planb.impl.io.StateRequest; -import stroom.planb.impl.io.StateValue; -import stroom.planb.impl.io.TemporalRangedState; -import stroom.planb.impl.io.TemporalRangedStateReader; -import stroom.planb.impl.io.TemporalRangedStateRequest; -import stroom.planb.impl.io.TemporalState; -import stroom.planb.impl.io.TemporalStateReader; -import stroom.planb.impl.io.TemporalStateRequest; +import stroom.planb.impl.data.ShardManager; +import stroom.planb.impl.db.RangedState; +import stroom.planb.impl.db.RangedStateDb; +import stroom.planb.impl.db.RangedStateRequest; +import stroom.planb.impl.db.SessionDb; +import stroom.planb.impl.db.SessionRequest; +import stroom.planb.impl.db.State; +import stroom.planb.impl.db.StateDb; +import stroom.planb.impl.db.StateRequest; +import stroom.planb.impl.db.StateValue; +import stroom.planb.impl.db.TemporalRangedState; +import stroom.planb.impl.db.TemporalRangedStateDb; +import stroom.planb.impl.db.TemporalRangedStateRequest; +import stroom.planb.impl.db.TemporalState; +import stroom.planb.impl.db.TemporalStateDb; +import stroom.planb.impl.db.TemporalStateRequest; import stroom.planb.shared.PlanBDoc; import stroom.query.language.functions.StateProvider; import stroom.query.language.functions.Val; @@ -44,13 +44,13 @@ public class StateProviderImpl implements StateProvider { private final PlanBDocCache stateDocCache; private final Cache cache; private final Map> stateDocMap = new HashMap<>(); - private final ReaderCache readerCache; + private final ShardManager shardManager; @Inject public StateProviderImpl(final PlanBDocCache stateDocCache, - final ReaderCache readerCache) { + final ShardManager shardManager) { this.stateDocCache = stateDocCache; - this.readerCache = readerCache; + this.shardManager = shardManager; cache = Caffeine.newBuilder().maximumSize(1000).build(); } @@ -73,36 +73,36 @@ private Val getState(final PlanBDoc doc, final String keyName, final Instant eventTime) { try { - return readerCache.get(mapName, reader -> { - if (reader instanceof final StateReader stateReader) { + return shardManager.get(mapName, reader -> { + if (reader instanceof final StateDb db) { final StateRequest request = new StateRequest(keyName.getBytes(StandardCharsets.UTF_8)); - return getVal(stateReader + return getVal(db .getState(request) .map(State::value)); - } else if (reader instanceof final TemporalStateReader stateReader) { + } else if (reader instanceof final TemporalStateDb db) { final TemporalStateRequest request = new TemporalStateRequest(keyName.getBytes(StandardCharsets.UTF_8), eventTime.toEpochMilli()); - return getVal(stateReader + return getVal(db .getState(request) .map(TemporalState::value)); - } else if (reader instanceof final RangedStateReader stateReader) { + } else if (reader instanceof final RangedStateDb db) { final RangedStateRequest request = new RangedStateRequest(Long.parseLong(keyName)); - return getVal(stateReader + return getVal(db .getState(request) .map(RangedState::value)); - } else if (reader instanceof final TemporalRangedStateReader stateReader) { + } else if (reader instanceof final TemporalRangedStateDb db) { final TemporalRangedStateRequest request = new TemporalRangedStateRequest(Long.parseLong(keyName), eventTime.toEpochMilli()); - return getVal(stateReader + return getVal(db .getState(request) .map(TemporalRangedState::value)); - } else if (reader instanceof final SessionReader stateReader) { + } else if (reader instanceof final SessionDb db) { final SessionRequest request = new SessionRequest(keyName.getBytes(StandardCharsets.UTF_8), eventTime.toEpochMilli()); - return stateReader + return db .getState(request) .map(session -> ValBoolean.create(true)) .orElse(ValBoolean.create(false)); diff --git a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/pipeline/StateValueProxy.java b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/pipeline/StateValueProxy.java index fcce8fc764..c19dd3732b 100644 --- a/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/pipeline/StateValueProxy.java +++ b/stroom-state/stroom-planb-impl/src/main/java/stroom/planb/impl/pipeline/StateValueProxy.java @@ -12,7 +12,7 @@ import stroom.pipeline.refdata.store.UnknownRefDataValue; import stroom.pipeline.refdata.store.offheapstore.RefDataValueProxyConsumer; import stroom.pipeline.refdata.store.offheapstore.TypedByteBuffer; -import stroom.planb.impl.io.TemporalState; +import stroom.planb.impl.db.TemporalState; import stroom.util.logging.LogUtil; import net.sf.saxon.trans.XPathException; diff --git a/stroom-state/stroom-planb-impl/src/test/java/stroom/planb/impl/TestRangedState.java b/stroom-state/stroom-planb-impl/src/test/java/stroom/planb/impl/TestRangedStateDb.java similarity index 83% rename from stroom-state/stroom-planb-impl/src/test/java/stroom/planb/impl/TestRangedState.java rename to stroom-state/stroom-planb-impl/src/test/java/stroom/planb/impl/TestRangedStateDb.java index e718dfb729..04fe184047 100644 --- a/stroom-state/stroom-planb-impl/src/test/java/stroom/planb/impl/TestRangedState.java +++ b/stroom-state/stroom-planb-impl/src/test/java/stroom/planb/impl/TestRangedStateDb.java @@ -21,13 +21,12 @@ import stroom.bytebuffer.impl6.ByteBufferFactoryImpl; import stroom.entity.shared.ExpressionCriteria; import stroom.pipeline.refdata.store.StringValue; -import stroom.planb.impl.io.RangedState; -import stroom.planb.impl.io.RangedState.Key; -import stroom.planb.impl.io.RangedStateFields; -import stroom.planb.impl.io.RangedStateReader; -import stroom.planb.impl.io.RangedStateRequest; -import stroom.planb.impl.io.RangedStateWriter; -import stroom.planb.impl.io.StateValue; +import stroom.planb.impl.db.RangedState; +import stroom.planb.impl.db.RangedState.Key; +import stroom.planb.impl.db.RangedStateDb; +import stroom.planb.impl.db.RangedStateFields; +import stroom.planb.impl.db.RangedStateRequest; +import stroom.planb.impl.db.StateValue; import stroom.query.api.v2.ExpressionOperator; import stroom.query.common.v2.ExpressionPredicateFactory; import stroom.query.language.functions.FieldIndex; @@ -47,20 +46,20 @@ import static org.assertj.core.api.Assertions.assertThat; -class TestRangedState { +class TestRangedStateDb { @Test void test(@TempDir Path tempDir) { testWrite(tempDir); final ByteBufferFactory byteBufferFactory = new ByteBufferFactoryImpl(); - try (final RangedStateReader reader = new RangedStateReader(tempDir, byteBufferFactory)) { - assertThat(reader.count()).isEqualTo(1); - testGet(reader); + try (final RangedStateDb db = new RangedStateDb(tempDir, byteBufferFactory, false, true)) { + assertThat(db.count()).isEqualTo(1); + testGet(db); final RangedStateRequest stateRequest = new RangedStateRequest(11); - final Optional optional = reader.getState(stateRequest); + final Optional optional = db.getState(stateRequest); assertThat(optional).isNotEmpty(); final RangedState res = optional.get(); assertThat(res.key().keyStart()).isEqualTo(10); @@ -83,7 +82,7 @@ void test(@TempDir Path tempDir) { fieldIndex.create(RangedStateFields.VALUE); final List results = new ArrayList<>(); final ExpressionPredicateFactory expressionPredicateFactory = new ExpressionPredicateFactory(null); - reader.search( + db.search( new ExpressionCriteria(ExpressionOperator.builder().build()), fieldIndex, null, @@ -108,21 +107,21 @@ void testMerge(@TempDir final Path rootDir) throws IOException { testWrite(db2); final ByteBufferFactory byteBufferFactory = new ByteBufferFactoryImpl(); - try (final RangedStateWriter writer = new RangedStateWriter(db1, byteBufferFactory)) { + try (final RangedStateDb writer = new RangedStateDb(db1, byteBufferFactory)) { writer.merge(db2); } } private void testWrite(final Path dbDir) { final ByteBufferFactory byteBufferFactory = new ByteBufferFactoryImpl(); - try (final RangedStateWriter writer = new RangedStateWriter(dbDir, byteBufferFactory)) { - insertData(writer, 100); + try (final RangedStateDb db = new RangedStateDb(dbDir, byteBufferFactory)) { + insertData(db, 100); } } - private void testGet(final RangedStateReader reader) { + private void testGet(final RangedStateDb db) { final Key k = Key.builder().keyStart(10).keyEnd(30).build(); - final Optional optional = reader.get(k); + final Optional optional = db.get(k); assertThat(optional).isNotEmpty(); final StateValue res = optional.get(); assertThat(res.typeId()).isEqualTo(StringValue.TYPE_ID); @@ -147,7 +146,7 @@ private void testGet(final RangedStateReader reader) { // }); // } - private void insertData(final RangedStateWriter writer, + private void insertData(final RangedStateDb writer, final int rows) { for (int i = 0; i < rows; i++) { final ByteBuffer byteBuffer = ByteBuffer.wrap(("test" + i).getBytes(StandardCharsets.UTF_8)); diff --git a/stroom-state/stroom-planb-impl/src/test/java/stroom/planb/impl/TestSession.java b/stroom-state/stroom-planb-impl/src/test/java/stroom/planb/impl/TestSessionDb.java similarity index 93% rename from stroom-state/stroom-planb-impl/src/test/java/stroom/planb/impl/TestSession.java rename to stroom-state/stroom-planb-impl/src/test/java/stroom/planb/impl/TestSessionDb.java index e68a04111f..0922b2ac79 100644 --- a/stroom-state/stroom-planb-impl/src/test/java/stroom/planb/impl/TestSession.java +++ b/stroom-state/stroom-planb-impl/src/test/java/stroom/planb/impl/TestSessionDb.java @@ -20,10 +20,9 @@ import stroom.bytebuffer.impl6.ByteBufferFactory; import stroom.bytebuffer.impl6.ByteBufferFactoryImpl; import stroom.entity.shared.ExpressionCriteria; -import stroom.planb.impl.io.Session; -import stroom.planb.impl.io.SessionFields; -import stroom.planb.impl.io.SessionReader; -import stroom.planb.impl.io.SessionWriter; +import stroom.planb.impl.db.Session; +import stroom.planb.impl.db.SessionDb; +import stroom.planb.impl.db.SessionFields; import stroom.query.api.v2.ExpressionOperator; import stroom.query.api.v2.ExpressionTerm.Condition; import stroom.query.common.v2.ExpressionPredicateFactory; @@ -45,7 +44,7 @@ import static org.assertj.core.api.Assertions.assertThat; -class TestSession { +class TestSessionDb { @Test void test(@TempDir Path tempDir) { @@ -61,9 +60,9 @@ void test(@TempDir Path tempDir) { // lowRange = insertData(writer, key, refTime, 10, -10); // } - try (final SessionReader reader = new SessionReader(tempDir, byteBufferFactory)) { - assertThat(reader.count()).isEqualTo(109); - testGet(reader, key, refTime, 10); + try (final SessionDb db = new SessionDb(tempDir, byteBufferFactory, false, true)) { + assertThat(db.count()).isEqualTo(109); + testGet(db, key, refTime, 10); // final SessionRequest sessionRequest = SessionRequest.builder().name("TEST").time(refTime).build(); @@ -89,7 +88,7 @@ void test(@TempDir Path tempDir) { final ValDate maxTime = ValDate.create(outerRange.max()); final List results = new ArrayList<>(); final ExpressionPredicateFactory expressionPredicateFactory = new ExpressionPredicateFactory(null); - reader.search( + db.search( criteria, fieldIndex, null, @@ -198,7 +197,7 @@ void testMerge(@TempDir final Path rootDir) throws IOException { testWrite(db2); final ByteBufferFactory byteBufferFactory = new ByteBufferFactoryImpl(); - try (final SessionWriter writer = new SessionWriter(db1, byteBufferFactory)) { + try (final SessionDb writer = new SessionDb(db1, byteBufferFactory)) { writer.merge(db2); } } @@ -209,7 +208,7 @@ private Ranges testWrite(final Path dbDir) { final InstantRange highRange; final InstantRange lowRange; final ByteBufferFactory byteBufferFactory = new ByteBufferFactoryImpl(); - try (final SessionWriter writer = new SessionWriter(dbDir, byteBufferFactory)) { + try (final SessionDb writer = new SessionDb(dbDir, byteBufferFactory)) { highRange = insertData(writer, key, refTime, 100, 10); lowRange = insertData(writer, key, refTime, 10, -10); } @@ -221,12 +220,12 @@ private record Ranges(InstantRange highRange, } - private void testGet(final SessionReader reader, + private void testGet(final SessionDb db, final byte[] key, final Instant refTime, final long deltaSeconds) { final Session k = Session.builder().start(refTime).end(refTime.plusSeconds(deltaSeconds)).key(key).build(); - final Optional optional = reader.get(k); + final Optional optional = db.get(k); assertThat(optional).isNotEmpty(); final Session res = optional.get(); assertThat(res.key()).isEqualTo(key); @@ -251,7 +250,7 @@ private void testGet(final SessionReader reader, // }); // } - private InstantRange insertData(final SessionWriter writer, + private InstantRange insertData(final SessionDb writer, final byte[] key, final Instant refTime, final int rows, diff --git a/stroom-state/stroom-planb-impl/src/test/java/stroom/planb/impl/TestState.java b/stroom-state/stroom-planb-impl/src/test/java/stroom/planb/impl/TestStateDb.java similarity index 87% rename from stroom-state/stroom-planb-impl/src/test/java/stroom/planb/impl/TestState.java rename to stroom-state/stroom-planb-impl/src/test/java/stroom/planb/impl/TestStateDb.java index 3005af0e5d..b584ad99c8 100644 --- a/stroom-state/stroom-planb-impl/src/test/java/stroom/planb/impl/TestState.java +++ b/stroom-state/stroom-planb-impl/src/test/java/stroom/planb/impl/TestStateDb.java @@ -29,12 +29,11 @@ import stroom.planb.impl.data.MergeProcessor; import stroom.planb.impl.data.SequentialFileStore; import stroom.planb.impl.data.ShardManager; -import stroom.planb.impl.io.State.Key; -import stroom.planb.impl.io.StateFields; -import stroom.planb.impl.io.StatePaths; -import stroom.planb.impl.io.StateReader; -import stroom.planb.impl.io.StateValue; -import stroom.planb.impl.io.StateWriter; +import stroom.planb.impl.db.State.Key; +import stroom.planb.impl.db.StateDb; +import stroom.planb.impl.db.StateFields; +import stroom.planb.impl.db.StatePaths; +import stroom.planb.impl.db.StateValue; import stroom.planb.shared.PlanBDoc; import stroom.planb.shared.StateType; import stroom.query.api.v2.ExpressionOperator; @@ -43,9 +42,7 @@ import stroom.query.language.functions.Val; import stroom.security.mock.MockSecurityContext; import stroom.task.api.SimpleTaskContextFactory; -import stroom.util.cache.CacheConfig; import stroom.util.io.FileUtil; -import stroom.util.time.StroomDuration; import stroom.util.zip.ZipUtil; import org.junit.jupiter.api.Disabled; @@ -69,7 +66,7 @@ import static org.assertj.core.api.Assertions.assertThat; -class TestState { +class TestStateDb { @Test void testReadWrite(@TempDir Path tempDir) { @@ -103,7 +100,7 @@ void testMerge(@TempDir final Path rootDir) throws IOException { testWrite(db2, 100, keyFunction2, valueFunction2); final ByteBufferFactory byteBufferFactory = new ByteBufferFactoryImpl(); - try (final StateWriter writer = new StateWriter(db1, byteBufferFactory)) { + try (final StateDb writer = new StateDb(db1, byteBufferFactory)) { writer.merge(db2); } } @@ -133,21 +130,8 @@ void testFullProcess(@TempDir final Path rootDir) throws IOException { Mockito.when(planBDocStore.readDocument(Mockito.any(DocRef.class))) .thenReturn(doc); - final CacheConfig stateDocCache = CacheConfig.builder() - .maximumSize(100L) - .expireAfterWrite(StroomDuration.ofMinutes(10)) - .build(); - final CacheConfig readerCache = CacheConfig.builder() - .maximumSize(10L) - .expireAfterWrite(StroomDuration.ofMinutes(10)) - .build(); - final List nodeList = Collections.emptyList(); final String path = rootDir.toAbsolutePath().toString(); - final PlanBConfig planBConfig = new PlanBConfig( - stateDocCache, - readerCache, - nodeList, - path); + final PlanBConfig planBConfig = new PlanBConfig(path); final PlanBDocCache planBDocCache = new PlanBDocCacheImpl( cacheManager, planBDocStore, @@ -156,7 +140,10 @@ void testFullProcess(@TempDir final Path rootDir) throws IOException { final ShardManager shardManager = new ShardManager( new ByteBufferFactoryImpl(), planBDocCache, - statePaths); + null, + () -> planBConfig, + statePaths, + null); final MergeProcessor mergeProcessor = new MergeProcessor( fileStore, statePaths, @@ -170,9 +157,9 @@ void testFullProcess(@TempDir final Path rootDir) throws IOException { // Read merged final ByteBufferFactory byteBufferFactory = new ByteBufferFactoryImpl(); - try (final StateReader reader = new StateReader(statePaths.getShardDir().resolve("map-name"), - byteBufferFactory)) { - assertThat(reader.count()).isEqualTo(2); + try (final StateDb db = new StateDb(statePaths.getShardDir().resolve("map-name"), + byteBufferFactory, false, true)) { + assertThat(db.count()).isEqualTo(2); } } @@ -195,7 +182,7 @@ void testZipUnzip(@TempDir final Path rootDir) throws IOException { final AtomicBoolean writeComplete = new AtomicBoolean(); final List> list = new ArrayList<>(); - try (final StateWriter writer = new StateWriter(source, byteBufferFactory)) { + try (final StateDb writer = new StateDb(source, byteBufferFactory)) { list.add(CompletableFuture.runAsync(() -> { insertData(writer, 1000000, keyFunction, valueFunction); writeComplete.set(true); @@ -221,8 +208,8 @@ void testZipUnzip(@TempDir final Path rootDir) throws IOException { ZipUtil.unzip(zipFile, target); Files.delete(zipFile); // Read. - try (final StateReader reader = new StateReader(target, byteBufferFactory)) { - assertThat(reader.count()).isGreaterThanOrEqualTo(0); + try (final StateDb db = new StateDb(target, byteBufferFactory, false, true)) { + assertThat(db.count()).isGreaterThanOrEqualTo(0); } // Cleanup. FileUtil.deleteDir(target); @@ -247,12 +234,12 @@ void testDeleteWhileRead(@TempDir Path tempDir) { testWrite(tempDir, 100, keyFunction, valueFunction); final ByteBufferFactory byteBufferFactory = new ByteBufferFactoryImpl(); - try (final StateReader reader = new StateReader(tempDir, byteBufferFactory)) { - assertThat(reader.count()).isEqualTo(1); + try (final StateDb db = new StateDb(tempDir, byteBufferFactory, false, true)) { + assertThat(db.count()).isEqualTo(1); final Key key = Key.builder().name("TEST_KEY").build(); // Read the data. - Optional optional = reader.get(key); + Optional optional = db.get(key); assertThat(optional).isNotEmpty(); StateValue res = optional.get(); assertThat(res.typeId()).isEqualTo(StringValue.TYPE_ID); @@ -262,7 +249,7 @@ void testDeleteWhileRead(@TempDir Path tempDir) { FileUtil.deleteDir(tempDir); // Try and read. - optional = reader.get(key); + optional = db.get(key); assertThat(optional).isNotEmpty(); res = optional.get(); assertThat(res.typeId()).isEqualTo(StringValue.TYPE_ID); @@ -326,18 +313,18 @@ private void testWrite(final Path dbDir, final Function keyFunction, final Function valueFunction) { final ByteBufferFactory byteBufferFactory = new ByteBufferFactoryImpl(); - try (final StateWriter writer = new StateWriter(dbDir, byteBufferFactory)) { - insertData(writer, insertRows, keyFunction, valueFunction); + try (final StateDb db = new StateDb(dbDir, byteBufferFactory)) { + insertData(db, insertRows, keyFunction, valueFunction); } } private void testRead(final Path tempDir, final int expectedRows) { final ByteBufferFactory byteBufferFactory = new ByteBufferFactoryImpl(); - try (final StateReader reader = new StateReader(tempDir, byteBufferFactory)) { - assertThat(reader.count()).isEqualTo(1); + try (final StateDb db = new StateDb(tempDir, byteBufferFactory)) { + assertThat(db.count()).isEqualTo(1); final Key key = Key.builder().name("TEST_KEY").build(); - final Optional optional = reader.get(key); + final Optional optional = db.get(key); assertThat(optional).isNotEmpty(); final StateValue res = optional.get(); assertThat(res.typeId()).isEqualTo(StringValue.TYPE_ID); @@ -349,7 +336,7 @@ private void testRead(final Path tempDir, fieldIndex.create(StateFields.VALUE); final List results = new ArrayList<>(); final ExpressionPredicateFactory expressionPredicateFactory = new ExpressionPredicateFactory(null); - reader.search( + db.search( new ExpressionCriteria(ExpressionOperator.builder().build()), fieldIndex, null, @@ -380,7 +367,7 @@ private void testRead(final Path tempDir, // }); // } - private void insertData(final StateWriter writer, + private void insertData(final StateDb writer, final int rows, final Function keyFunction, final Function valueFunction) { diff --git a/stroom-state/stroom-planb-impl/src/test/java/stroom/planb/impl/TestTemporalRangedState.java b/stroom-state/stroom-planb-impl/src/test/java/stroom/planb/impl/TestTemporalRangedStateDb.java similarity index 86% rename from stroom-state/stroom-planb-impl/src/test/java/stroom/planb/impl/TestTemporalRangedState.java rename to stroom-state/stroom-planb-impl/src/test/java/stroom/planb/impl/TestTemporalRangedStateDb.java index 62e41fcc10..ec61569c6d 100644 --- a/stroom-state/stroom-planb-impl/src/test/java/stroom/planb/impl/TestTemporalRangedState.java +++ b/stroom-state/stroom-planb-impl/src/test/java/stroom/planb/impl/TestTemporalRangedStateDb.java @@ -21,13 +21,12 @@ import stroom.bytebuffer.impl6.ByteBufferFactoryImpl; import stroom.entity.shared.ExpressionCriteria; import stroom.pipeline.refdata.store.StringValue; -import stroom.planb.impl.io.StateValue; -import stroom.planb.impl.io.TemporalRangedState; -import stroom.planb.impl.io.TemporalRangedState.Key; -import stroom.planb.impl.io.TemporalRangedStateFields; -import stroom.planb.impl.io.TemporalRangedStateReader; -import stroom.planb.impl.io.TemporalRangedStateRequest; -import stroom.planb.impl.io.TemporalRangedStateWriter; +import stroom.planb.impl.db.StateValue; +import stroom.planb.impl.db.TemporalRangedState; +import stroom.planb.impl.db.TemporalRangedState.Key; +import stroom.planb.impl.db.TemporalRangedStateDb; +import stroom.planb.impl.db.TemporalRangedStateFields; +import stroom.planb.impl.db.TemporalRangedStateRequest; import stroom.query.api.v2.ExpressionOperator; import stroom.query.common.v2.ExpressionPredicateFactory; import stroom.query.language.functions.FieldIndex; @@ -48,7 +47,7 @@ import static org.assertj.core.api.Assertions.assertThat; -class TestTemporalRangedState { +class TestTemporalRangedStateDb { @Test void test(@TempDir Path tempDir) { @@ -56,14 +55,18 @@ void test(@TempDir Path tempDir) { final Instant refTime = Instant.parse("2000-01-01T00:00:00.000Z"); final ByteBufferFactory byteBufferFactory = new ByteBufferFactoryImpl(); - try (final TemporalRangedStateReader reader = new TemporalRangedStateReader(tempDir, byteBufferFactory)) { - assertThat(reader.count()).isEqualTo(100); - testGet(reader); + try (final TemporalRangedStateDb db = new TemporalRangedStateDb( + tempDir, + byteBufferFactory, + false, + true)) { + assertThat(db.count()).isEqualTo(100); + testGet(db); final TemporalRangedStateRequest stateRequest = new TemporalRangedStateRequest(11, refTime.toEpochMilli()); - final Optional optional = reader.getState(stateRequest); + final Optional optional = db.getState(stateRequest); assertThat(optional).isNotEmpty(); final TemporalRangedState res = optional.get(); assertThat(res.key().keyStart()).isEqualTo(10); @@ -98,7 +101,7 @@ void test(@TempDir Path tempDir) { fieldIndex.create(TemporalRangedStateFields.VALUE); final List results = new ArrayList<>(); final ExpressionPredicateFactory expressionPredicateFactory = new ExpressionPredicateFactory(null); - reader.search( + db.search( new ExpressionCriteria(ExpressionOperator.builder().build()), fieldIndex, null, @@ -124,7 +127,7 @@ void testMerge(@TempDir final Path rootDir) throws IOException { testWrite(db2); final ByteBufferFactory byteBufferFactory = new ByteBufferFactoryImpl(); - try (final TemporalRangedStateWriter writer = new TemporalRangedStateWriter(db1, byteBufferFactory)) { + try (final TemporalRangedStateDb writer = new TemporalRangedStateDb(db1, byteBufferFactory)) { writer.merge(db2); } } @@ -133,16 +136,16 @@ private void testWrite(final Path dbDir) { final Instant refTime = Instant.parse("2000-01-01T00:00:00.000Z"); final ByteBufferFactory byteBufferFactory = new ByteBufferFactoryImpl(); - try (final TemporalRangedStateWriter writer = - new TemporalRangedStateWriter(dbDir, byteBufferFactory)) { + try (final TemporalRangedStateDb writer = + new TemporalRangedStateDb(dbDir, byteBufferFactory)) { insertData(writer, refTime, "test", 100, 10); } } - private void testGet(final TemporalRangedStateReader reader) { + private void testGet(final TemporalRangedStateDb db) { final Instant refTime = Instant.parse("2000-01-01T00:00:00.000Z"); final Key k = Key.builder().keyStart(10).keyEnd(30).effectiveTime(refTime).build(); - final Optional optional = reader.get(k); + final Optional optional = db.get(k); assertThat(optional).isNotEmpty(); final StateValue res = optional.get(); assertThat(res.typeId()).isEqualTo(StringValue.TYPE_ID); @@ -188,7 +191,7 @@ private void testGet(final TemporalRangedStateReader reader) { // }); // } // - private void insertData(final TemporalRangedStateWriter writer, + private void insertData(final TemporalRangedStateDb db, final Instant refTime, final String value, final int rows, @@ -198,7 +201,7 @@ private void insertData(final TemporalRangedStateWriter writer, final Instant effectiveTime = refTime.plusSeconds(i * deltaSeconds); final Key k = Key.builder().keyStart(10).keyEnd(30).effectiveTime(effectiveTime).build(); final StateValue v = StateValue.builder().typeId(StringValue.TYPE_ID).byteBuffer(byteBuffer).build(); - writer.insert(k, v); + db.insert(k, v); } } } diff --git a/stroom-state/stroom-planb-impl/src/test/java/stroom/planb/impl/TestTemporalState.java b/stroom-state/stroom-planb-impl/src/test/java/stroom/planb/impl/TestTemporalStateDb.java similarity index 89% rename from stroom-state/stroom-planb-impl/src/test/java/stroom/planb/impl/TestTemporalState.java rename to stroom-state/stroom-planb-impl/src/test/java/stroom/planb/impl/TestTemporalStateDb.java index 905b4a4b4d..9217b78c1a 100644 --- a/stroom-state/stroom-planb-impl/src/test/java/stroom/planb/impl/TestTemporalState.java +++ b/stroom-state/stroom-planb-impl/src/test/java/stroom/planb/impl/TestTemporalStateDb.java @@ -21,11 +21,10 @@ import stroom.bytebuffer.impl6.ByteBufferFactoryImpl; import stroom.entity.shared.ExpressionCriteria; import stroom.pipeline.refdata.store.StringValue; -import stroom.planb.impl.io.StateValue; -import stroom.planb.impl.io.TemporalState; -import stroom.planb.impl.io.TemporalStateFields; -import stroom.planb.impl.io.TemporalStateReader; -import stroom.planb.impl.io.TemporalStateWriter; +import stroom.planb.impl.db.StateValue; +import stroom.planb.impl.db.TemporalState; +import stroom.planb.impl.db.TemporalStateDb; +import stroom.planb.impl.db.TemporalStateFields; import stroom.query.api.v2.ExpressionOperator; import stroom.query.common.v2.ExpressionPredicateFactory; import stroom.query.language.functions.FieldIndex; @@ -46,7 +45,7 @@ import static org.assertj.core.api.Assertions.assertThat; -class TestTemporalState { +class TestTemporalStateDb { @Test void test(@TempDir Path tempDir) { @@ -54,12 +53,12 @@ void test(@TempDir Path tempDir) { final Instant refTime = Instant.parse("2000-01-01T00:00:00.000Z"); final ByteBufferFactory byteBufferFactory = new ByteBufferFactoryImpl(); - try (final TemporalStateReader reader = new TemporalStateReader(tempDir, byteBufferFactory)) { - assertThat(reader.count()).isEqualTo(100); + try (final TemporalStateDb db = new TemporalStateDb(tempDir, byteBufferFactory, false, true)) { + assertThat(db.count()).isEqualTo(100); // final TemporalStateRequest stateRequest = // new TemporalStateRequest("TEST_MAP", "TEST_KEY", refTime); final TemporalState.Key key = TemporalState.Key.builder().name("TEST_KEY").effectiveTime(refTime).build(); - final Optional optional = reader.get(key); + final Optional optional = db.get(key); assertThat(optional).isNotEmpty(); final StateValue res = optional.get(); // assertThat(res.key()).isEqualTo("TEST_KEY"); @@ -74,7 +73,7 @@ void test(@TempDir Path tempDir) { fieldIndex.create(TemporalStateFields.VALUE); final List results = new ArrayList<>(); final ExpressionPredicateFactory expressionPredicateFactory = new ExpressionPredicateFactory(null); - reader.search( + db.search( new ExpressionCriteria(ExpressionOperator.builder().build()), fieldIndex, null, @@ -105,7 +104,7 @@ void testMerge(@TempDir final Path rootDir) throws IOException { testWrite(db2); final ByteBufferFactory byteBufferFactory = new ByteBufferFactoryImpl(); - try (final TemporalStateWriter writer = new TemporalStateWriter(db1, byteBufferFactory)) { + try (final TemporalStateDb writer = new TemporalStateDb(db1, byteBufferFactory)) { writer.merge(db2); } } @@ -113,7 +112,7 @@ void testMerge(@TempDir final Path rootDir) throws IOException { private void testWrite(final Path dbDir) { final ByteBufferFactory byteBufferFactory = new ByteBufferFactoryImpl(); final Instant refTime = Instant.parse("2000-01-01T00:00:00.000Z"); - try (final TemporalStateWriter writer = new TemporalStateWriter(dbDir, byteBufferFactory)) { + try (final TemporalStateDb writer = new TemporalStateDb(dbDir, byteBufferFactory)) { insertData(writer, refTime, "test", 100, 10); } } @@ -158,7 +157,7 @@ private void testWrite(final Path dbDir) { // } // - private void insertData(final TemporalStateWriter writer, + private void insertData(final TemporalStateDb db, final Instant refTime, final String value, final int rows, @@ -178,7 +177,7 @@ private void insertData(final TemporalStateWriter writer, .byteBuffer(byteBuffer) .build(); - writer.insert(k, v); + db.insert(k, v); } } } diff --git a/stroom-state/stroom-planb-impl/src/test/java/stroom/planb/impl/data/TestSequentialFileStore.java b/stroom-state/stroom-planb-impl/src/test/java/stroom/planb/impl/data/TestSequentialFileStore.java index 65827be117..d9324e699e 100644 --- a/stroom-state/stroom-planb-impl/src/test/java/stroom/planb/impl/data/TestSequentialFileStore.java +++ b/stroom-state/stroom-planb-impl/src/test/java/stroom/planb/impl/data/TestSequentialFileStore.java @@ -1,6 +1,6 @@ package stroom.planb.impl.data; -import stroom.planb.impl.io.StatePaths; +import stroom.planb.impl.db.StatePaths; import stroom.util.io.FileUtil; import org.junit.jupiter.api.Test; diff --git a/stroom-state/stroom-planb-impl/src/test/java/stroom/planb/impl/experiment/TestState2.java b/stroom-state/stroom-planb-impl/src/test/java/stroom/planb/impl/experiment/TestState2.java index d778eadaac..b3c7db7a52 100644 --- a/stroom-state/stroom-planb-impl/src/test/java/stroom/planb/impl/experiment/TestState2.java +++ b/stroom-state/stroom-planb-impl/src/test/java/stroom/planb/impl/experiment/TestState2.java @@ -21,10 +21,10 @@ import stroom.bytebuffer.impl6.ByteBufferFactoryImpl; import stroom.entity.shared.ExpressionCriteria; import stroom.pipeline.refdata.store.StringValue; -import stroom.planb.impl.io.State.Key; -import stroom.planb.impl.io.StateFields; -import stroom.planb.impl.io.StateReader; -import stroom.planb.impl.io.StateValue; +import stroom.planb.impl.db.State.Key; +import stroom.planb.impl.db.StateDb; +import stroom.planb.impl.db.StateFields; +import stroom.planb.impl.db.StateValue; import stroom.query.api.v2.ExpressionOperator; import stroom.query.common.v2.ExpressionPredicateFactory; import stroom.query.language.functions.FieldIndex; @@ -98,10 +98,10 @@ private void testWrite(final Path tempDir, private void testRead(final Path tempDir, final int expectedRows) { final ByteBufferFactory byteBufferFactory = new ByteBufferFactoryImpl(); - try (final StateReader reader = new StateReader(tempDir, byteBufferFactory)) { - assertThat(reader.count()).isEqualTo(1); + try (final StateDb db = new StateDb(tempDir, byteBufferFactory, false, true)) { + assertThat(db.count()).isEqualTo(1); final Key key = Key.builder().name("TEST_KEY").build(); - final Optional optional = reader.get(key); + final Optional optional = db.get(key); assertThat(optional).isNotEmpty(); final StateValue res = optional.get(); assertThat(res.typeId()).isEqualTo(StringValue.TYPE_ID); @@ -113,7 +113,7 @@ private void testRead(final Path tempDir, fieldIndex.create(StateFields.VALUE); final List results = new ArrayList<>(); final ExpressionPredicateFactory expressionPredicateFactory = new ExpressionPredicateFactory(null); - reader.search( + db.search( new ExpressionCriteria(ExpressionOperator.builder().build()), fieldIndex, null, diff --git a/stroom-state/stroom-planb-impl/src/test/java/stroom/planb/impl/pipeline/TestStateLookupImpl.java b/stroom-state/stroom-planb-impl/src/test/java/stroom/planb/impl/pipeline/TestStateLookupImpl.java index cfc4f69155..2a713c0ba9 100644 --- a/stroom-state/stroom-planb-impl/src/test/java/stroom/planb/impl/pipeline/TestStateLookupImpl.java +++ b/stroom-state/stroom-planb-impl/src/test/java/stroom/planb/impl/pipeline/TestStateLookupImpl.java @@ -3,12 +3,11 @@ import stroom.bytebuffer.impl6.ByteBufferFactory; import stroom.bytebuffer.impl6.ByteBufferFactoryImpl; import stroom.pipeline.refdata.store.StringValue; -import stroom.planb.impl.io.StateValue; -import stroom.planb.impl.io.TemporalState; -import stroom.planb.impl.io.TemporalState.Key; -import stroom.planb.impl.io.TemporalStateReader; -import stroom.planb.impl.io.TemporalStateRequest; -import stroom.planb.impl.io.TemporalStateWriter; +import stroom.planb.impl.db.StateValue; +import stroom.planb.impl.db.TemporalState; +import stroom.planb.impl.db.TemporalState.Key; +import stroom.planb.impl.db.TemporalStateDb; +import stroom.planb.impl.db.TemporalStateRequest; import stroom.util.logging.DurationTimer; import stroom.util.logging.LambdaLogger; import stroom.util.logging.LambdaLoggerFactory; @@ -60,7 +59,7 @@ void perfTest(@TempDir Path tempDir) { final List lookupTimes = new ArrayList<>(refStreamDefCount); final ByteBufferFactory byteBufferFactory = new ByteBufferFactoryImpl(); - try (final TemporalStateWriter writer = new TemporalStateWriter(tempDir, byteBufferFactory)) { + try (final TemporalStateDb writer = new TemporalStateDb(tempDir, byteBufferFactory)) { for (int refStrmIdx = 0; refStrmIdx < refStreamDefCount; refStrmIdx++) { final List mapNames = mapNamesMap.computeIfAbsent(refStrmIdx, k -> new ArrayList<>(keyValueMapCount)); @@ -92,7 +91,7 @@ void perfTest(@TempDir Path tempDir) { } } - try (final TemporalStateReader reader = new TemporalStateReader(tempDir, byteBufferFactory)) { + try (final TemporalStateDb db = new TemporalStateDb(tempDir, byteBufferFactory, false, true)) { final Random random = new Random(892374809); final Runnable work = () -> { final int refStrmIdx = random.nextInt(refStreamDefCount); @@ -106,7 +105,7 @@ void perfTest(@TempDir Path tempDir) { key.getBytes(StandardCharsets.UTF_8), time.toEpochMilli()); - final TemporalState state = reader.getState(request) + final TemporalState state = db.getState(request) .orElseThrow(() -> new RuntimeException(LogUtil.message( "No entry found for map: {}, key: {}, time: {}", mapName, key, time)));