diff --git a/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/exec/MSQTasksTest.java b/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/exec/MSQTasksTest.java index eec5e9d4ebe4..6c543f862532 100644 --- a/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/exec/MSQTasksTest.java +++ b/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/exec/MSQTasksTest.java @@ -24,7 +24,6 @@ import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.errorprone.annotations.concurrent.GuardedBy; -import org.apache.druid.client.indexing.NoopOverlordClient; import org.apache.druid.client.indexing.TaskStatusResponse; import org.apache.druid.common.guava.FutureUtils; import org.apache.druid.indexer.RunnerTaskState; @@ -48,6 +47,7 @@ import org.apache.druid.msq.indexing.error.TooManyWorkersFault; import org.apache.druid.msq.indexing.error.UnknownFault; import org.apache.druid.msq.indexing.error.WorkerRpcFailedFault; +import org.apache.druid.rpc.indexing.NoopOverlordClient; import org.apache.druid.segment.column.ColumnHolder; import org.apache.druid.utils.CollectionUtils; import org.junit.Assert; diff --git a/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/test/MSQTestControllerContext.java b/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/test/MSQTestControllerContext.java index 22c7cff88477..86051bc4b380 100644 --- a/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/test/MSQTestControllerContext.java +++ b/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/test/MSQTestControllerContext.java @@ -29,7 +29,6 @@ import com.google.inject.Injector; import org.apache.druid.client.ImmutableSegmentLoadInfo; import org.apache.druid.client.coordinator.CoordinatorClient; -import org.apache.druid.client.indexing.NoopOverlordClient; import org.apache.druid.client.indexing.TaskStatusResponse; import org.apache.druid.common.guava.FutureUtils; import org.apache.druid.indexer.RunnerTaskState; @@ -67,6 +66,7 @@ import org.apache.druid.msq.util.MultiStageQueryContext; import org.apache.druid.query.Query; import org.apache.druid.query.QueryContext; +import org.apache.druid.rpc.indexing.NoopOverlordClient; import org.apache.druid.rpc.indexing.OverlordClient; import org.apache.druid.server.DruidNode; import org.mockito.ArgumentMatchers; diff --git a/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/test/MSQTestOverlordServiceClient.java b/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/test/MSQTestOverlordServiceClient.java index b35c074fa060..590a086c8e43 100644 --- a/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/test/MSQTestOverlordServiceClient.java +++ b/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/test/MSQTestOverlordServiceClient.java @@ -28,7 +28,6 @@ import com.google.inject.Injector; import org.apache.calcite.sql.type.SqlTypeName; import org.apache.druid.client.ImmutableSegmentLoadInfo; -import org.apache.druid.client.indexing.NoopOverlordClient; import org.apache.druid.client.indexing.TaskPayloadResponse; import org.apache.druid.client.indexing.TaskStatusResponse; import org.apache.druid.indexer.TaskStatus; @@ -48,6 +47,7 @@ import org.apache.druid.msq.indexing.report.MSQStatusReport; import org.apache.druid.msq.indexing.report.MSQTaskReport; import org.apache.druid.msq.indexing.report.MSQTaskReportPayload; +import org.apache.druid.rpc.indexing.NoopOverlordClient; import org.joda.time.DateTime; import javax.annotation.Nullable; diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/compact/LocalOverlordClient.java b/indexing-service/src/main/java/org/apache/druid/indexing/compact/LocalOverlordClient.java index dcce2f4615ae..3f1427c7c34e 100644 --- a/indexing-service/src/main/java/org/apache/druid/indexing/compact/LocalOverlordClient.java +++ b/indexing-service/src/main/java/org/apache/druid/indexing/compact/LocalOverlordClient.java @@ -25,33 +25,25 @@ import com.google.common.util.concurrent.ListenableFuture; import org.apache.druid.client.indexing.ClientCompactionTaskQuery; import org.apache.druid.client.indexing.IndexingTotalWorkerCapacityInfo; -import org.apache.druid.client.indexing.IndexingWorkerInfo; import org.apache.druid.client.indexing.TaskPayloadResponse; -import org.apache.druid.client.indexing.TaskStatusResponse; import org.apache.druid.error.DruidException; import org.apache.druid.indexer.TaskStatus; import org.apache.druid.indexer.TaskStatusPlus; -import org.apache.druid.indexer.report.TaskReport; import org.apache.druid.indexing.common.task.CompactionTask; import org.apache.druid.indexing.overlord.TaskMaster; import org.apache.druid.indexing.overlord.TaskQueryTool; import org.apache.druid.indexing.overlord.TaskQueue; import org.apache.druid.indexing.overlord.http.TotalWorkerCapacityResponse; -import org.apache.druid.indexing.overlord.supervisor.SupervisorStatus; import org.apache.druid.java.util.common.CloseableIterators; import org.apache.druid.java.util.common.concurrent.Execs; import org.apache.druid.java.util.common.logger.Logger; import org.apache.druid.java.util.common.parsers.CloseableIterator; import org.apache.druid.metadata.LockFilterPolicy; -import org.apache.druid.rpc.ServiceRetryPolicy; -import org.apache.druid.rpc.indexing.OverlordClient; -import org.apache.druid.server.compaction.CompactionProgressResponse; -import org.apache.druid.server.compaction.CompactionStatusResponse; +import org.apache.druid.rpc.indexing.NoopOverlordClient; import org.joda.time.Interval; import javax.annotation.Nullable; import java.io.IOException; -import java.net.URI; import java.util.List; import java.util.Map; import java.util.Set; @@ -62,7 +54,7 @@ * task related info. This client simply redirects all queries to the * {@link TaskQueryTool} and all updates to the {@link TaskQueue}. */ -class LocalOverlordClient implements OverlordClient +class LocalOverlordClient extends NoopOverlordClient { private static final Logger log = new Logger(LocalOverlordClient.class); @@ -199,66 +191,4 @@ private V convertTask(Object taskPayload, Class inputType, Class ou ); } } - - // Unsupported methods as these are not used by the CompactionScheduler / CompactSegments duty - - @Override - public ListenableFuture findCurrentLeader() - { - throw new UnsupportedOperationException(); - } - - @Override - public ListenableFuture taskStatus(String taskId) - { - throw new UnsupportedOperationException(); - } - - @Override - public ListenableFuture taskReportAsMap(String taskId) - { - throw new UnsupportedOperationException(); - } - - @Override - public ListenableFuture> supervisorStatuses() - { - throw new UnsupportedOperationException(); - } - - @Override - public ListenableFuture killPendingSegments(String dataSource, Interval interval) - { - throw new UnsupportedOperationException(); - } - - @Override - public ListenableFuture> getWorkers() - { - throw new UnsupportedOperationException(); - } - - @Override - public ListenableFuture getCompactionSnapshots(@Nullable String dataSource) - { - throw new UnsupportedOperationException(); - } - - @Override - public ListenableFuture getBytesAwaitingCompaction(String dataSource) - { - throw new UnsupportedOperationException(); - } - - @Override - public ListenableFuture isCompactionSupervisorEnabled() - { - throw new UnsupportedOperationException(); - } - - @Override - public OverlordClient withRetryPolicy(ServiceRetryPolicy retryPolicy) - { - return this; - } } diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/overlord/http/OverlordDataSourcesResource.java b/indexing-service/src/main/java/org/apache/druid/indexing/overlord/http/OverlordDataSourcesResource.java new file mode 100644 index 000000000000..bf9ff43981cd --- /dev/null +++ b/indexing-service/src/main/java/org/apache/druid/indexing/overlord/http/OverlordDataSourcesResource.java @@ -0,0 +1,285 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.indexing.overlord.http; + +import com.google.common.base.Throwables; +import com.google.common.collect.ImmutableMap; +import com.google.inject.Inject; +import com.sun.jersey.spi.container.ResourceFilters; +import org.apache.druid.audit.AuditEntry; +import org.apache.druid.audit.AuditManager; +import org.apache.druid.error.DruidException; +import org.apache.druid.error.InvalidInput; +import org.apache.druid.indexing.overlord.TaskMaster; +import org.apache.druid.java.util.common.StringUtils; +import org.apache.druid.java.util.common.logger.Logger; +import org.apache.druid.metadata.SegmentsMetadataManager; +import org.apache.druid.rpc.indexing.SegmentUpdateResponse; +import org.apache.druid.server.http.SegmentsToUpdateFilter; +import org.apache.druid.server.http.ServletResourceUtils; +import org.apache.druid.server.http.security.DatasourceResourceFilter; +import org.apache.druid.server.security.AuthorizationUtils; +import org.apache.druid.timeline.SegmentId; +import org.joda.time.Interval; + +import javax.servlet.http.HttpServletRequest; +import javax.ws.rs.Consumes; +import javax.ws.rs.DELETE; +import javax.ws.rs.POST; +import javax.ws.rs.Path; +import javax.ws.rs.PathParam; +import javax.ws.rs.Produces; +import javax.ws.rs.core.Context; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Response; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; +import java.util.Set; +import java.util.stream.Collectors; + +/** + * Datasource APIs exposed by the Overlord to update segments. + * Some of these APIs are also exposed by the Coordinator, but they have been + * deprecated and the Overlord APIs must be used for all update operations. + */ +@Path("/druid/indexer/v1/datasources") +public class OverlordDataSourcesResource +{ + private static final Logger log = new Logger(OverlordDataSourcesResource.class); + + private final SegmentsMetadataManager segmentsMetadataManager; + private final TaskMaster taskMaster; + private final AuditManager auditManager; + + @Inject + public OverlordDataSourcesResource( + TaskMaster taskMaster, + SegmentsMetadataManager segmentsMetadataManager, + AuditManager auditManager + ) + { + this.taskMaster = taskMaster; + this.auditManager = auditManager; + this.segmentsMetadataManager = segmentsMetadataManager; + } + + private interface SegmentUpdateOperation + { + int perform(); + } + + @POST + @Path("/{dataSourceName}") + @Consumes(MediaType.APPLICATION_JSON) + @ResourceFilters(DatasourceResourceFilter.class) + public Response markAllNonOvershadowedSegmentsAsUsed( + @PathParam("dataSourceName") final String dataSourceName, + @Context HttpServletRequest req + ) + { + SegmentUpdateOperation operation = () -> segmentsMetadataManager + .markAsUsedAllNonOvershadowedSegmentsInDataSource(dataSourceName); + return performSegmentUpdate(dataSourceName, operation); + } + + @DELETE + @Path("/{dataSourceName}") + @ResourceFilters(DatasourceResourceFilter.class) + @Produces(MediaType.APPLICATION_JSON) + public Response markAllSegmentsAsUnused( + @PathParam("dataSourceName") final String dataSourceName, + @Context HttpServletRequest req + ) + { + SegmentUpdateOperation operation = () -> segmentsMetadataManager + .markAsUnusedAllSegmentsInDataSource(dataSourceName); + final Response response = performSegmentUpdate(dataSourceName, operation); + + final int responseCode = response.getStatus(); + if (responseCode >= 200 && responseCode < 300) { + auditMarkUnusedOperation(response.getEntity(), dataSourceName, req); + } + + return response; + } + + @POST + @Path("/{dataSourceName}/markUsed") + @Consumes(MediaType.APPLICATION_JSON) + @ResourceFilters(DatasourceResourceFilter.class) + public Response markNonOvershadowedSegmentsAsUsed( + @PathParam("dataSourceName") final String dataSourceName, + final SegmentsToUpdateFilter payload + ) + { + if (payload == null || !payload.isValid()) { + return Response + .status(Response.Status.BAD_REQUEST) + .entity(SegmentsToUpdateFilter.INVALID_PAYLOAD_ERROR_MESSAGE) + .build(); + } else { + SegmentUpdateOperation operation = () -> { + final Interval interval = payload.getInterval(); + final List versions = payload.getVersions(); + if (interval != null) { + return segmentsMetadataManager.markAsUsedNonOvershadowedSegmentsInInterval(dataSourceName, interval, versions); + } else { + final Set segmentIds = payload.getSegmentIds(); + if (segmentIds == null || segmentIds.isEmpty()) { + return 0; + } + + // Validate segmentIds + final List invalidSegmentIds = new ArrayList<>(); + for (String segmentId : segmentIds) { + if (SegmentId.iteratePossibleParsingsWithDataSource(dataSourceName, segmentId).isEmpty()) { + invalidSegmentIds.add(segmentId); + } + } + if (!invalidSegmentIds.isEmpty()) { + throw InvalidInput.exception("Could not parse invalid segment IDs[%s]", invalidSegmentIds); + } + + return segmentsMetadataManager.markAsUsedNonOvershadowedSegments(dataSourceName, segmentIds); + } + }; + + return performSegmentUpdate(dataSourceName, operation); + } + } + + @POST + @Path("/{dataSourceName}/markUnused") + @ResourceFilters(DatasourceResourceFilter.class) + @Produces(MediaType.APPLICATION_JSON) + @Consumes(MediaType.APPLICATION_JSON) + public Response markSegmentsAsUnused( + @PathParam("dataSourceName") final String dataSourceName, + final SegmentsToUpdateFilter payload, + @Context final HttpServletRequest req + ) + { + if (payload == null || !payload.isValid()) { + return Response + .status(Response.Status.BAD_REQUEST) + .entity(SegmentsToUpdateFilter.INVALID_PAYLOAD_ERROR_MESSAGE) + .build(); + } else { + SegmentUpdateOperation operation = () -> { + final Interval interval = payload.getInterval(); + final List versions = payload.getVersions(); + final int numUpdatedSegments; + if (interval != null) { + numUpdatedSegments = segmentsMetadataManager.markAsUnusedSegmentsInInterval(dataSourceName, interval, versions); + } else { + final Set segmentIds = payload.getSegmentIds() + .stream() + .map(id -> SegmentId.tryParse(dataSourceName, id)) + .filter(Objects::nonNull) + .collect(Collectors.toSet()); + + // Filter out segmentIds that do not belong to this datasource + numUpdatedSegments = segmentsMetadataManager.markSegmentsAsUnused( + segmentIds.stream() + .filter(segmentId -> segmentId.getDataSource().equals(dataSourceName)) + .collect(Collectors.toSet()) + ); + } + auditMarkUnusedOperation(payload, dataSourceName, req); + return numUpdatedSegments; + }; + return performSegmentUpdate(dataSourceName, operation); + } + } + + @POST + @Path("/{dataSourceName}/segments/{segmentId}") + @Consumes(MediaType.APPLICATION_JSON) + @ResourceFilters(DatasourceResourceFilter.class) + public Response markSegmentAsUsed( + @PathParam("dataSourceName") String dataSourceName, + @PathParam("segmentId") String segmentId + ) + { + SegmentUpdateOperation operation = + () -> segmentsMetadataManager.markSegmentAsUsed(segmentId) ? 1 : 0; + return performSegmentUpdate(dataSourceName, operation); + } + + @DELETE + @Path("/{dataSourceName}/segments/{segmentId}") + @ResourceFilters(DatasourceResourceFilter.class) + public Response markSegmentAsUnused( + @PathParam("dataSourceName") String dataSourceName, + @PathParam("segmentId") String segmentIdString + ) + { + final SegmentId segmentId = SegmentId.tryParse(dataSourceName, segmentIdString); + if (segmentId == null) { + return Response.status(Response.Status.BAD_REQUEST).entity( + StringUtils.format("Could not parse Segment ID[%s] for DataSource[%s]", segmentIdString, dataSourceName) + ).build(); + } + + SegmentUpdateOperation operation = + () -> segmentsMetadataManager.markSegmentAsUnused(segmentId) ? 1 : 0; + return performSegmentUpdate(dataSourceName, operation); + } + + private Response performSegmentUpdate(String dataSourceName, SegmentUpdateOperation operation) + { + if (!taskMaster.isHalfOrFullLeader()) { + return Response.status(Response.Status.SERVICE_UNAVAILABLE).entity("I am not leader").build(); + } + + try { + int numChangedSegments = operation.perform(); + return Response.ok(new SegmentUpdateResponse(numChangedSegments)).build(); + } + catch (DruidException e) { + return ServletResourceUtils.buildErrorResponseFrom(e); + } + catch (Exception e) { + log.error(e, "Error occurred while updating segments for datasource[%s]", dataSourceName); + return Response + .serverError() + .entity(ImmutableMap.of("error", "Server error", "message", Throwables.getRootCause(e).toString())) + .build(); + } + } + + private void auditMarkUnusedOperation( + Object auditPayload, + String dataSourceName, + HttpServletRequest request + ) + { + auditManager.doAudit( + AuditEntry.builder() + .key(dataSourceName) + .type("segment.markUnused") + .payload(auditPayload) + .auditInfo(AuthorizationUtils.buildAuditInfo(request)) + .request(AuthorizationUtils.buildRequestInfo("overlord", request)) + .build() + ); + } +} diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/common/TaskToolboxTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/common/TaskToolboxTest.java index 78cbb88c3cf4..25aa3c4a0899 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/common/TaskToolboxTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/common/TaskToolboxTest.java @@ -24,7 +24,6 @@ import org.apache.druid.client.cache.CacheConfig; import org.apache.druid.client.cache.CachePopulatorStats; import org.apache.druid.client.coordinator.NoopCoordinatorClient; -import org.apache.druid.client.indexing.NoopOverlordClient; import org.apache.druid.indexing.common.actions.TaskActionClientFactory; import org.apache.druid.indexing.common.config.TaskConfig; import org.apache.druid.indexing.common.config.TaskConfigBuilder; @@ -39,6 +38,7 @@ import org.apache.druid.query.DruidProcessingConfigTest; import org.apache.druid.query.QueryProcessingPool; import org.apache.druid.query.QueryRunnerFactoryConglomerate; +import org.apache.druid.rpc.indexing.NoopOverlordClient; import org.apache.druid.segment.IndexIO; import org.apache.druid.segment.IndexMergerV9; import org.apache.druid.segment.IndexMergerV9Factory; diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/common/TestUtils.java b/indexing-service/src/test/java/org/apache/druid/indexing/common/TestUtils.java index ae5ba44f7f09..ea58e21d035f 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/common/TestUtils.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/common/TestUtils.java @@ -25,19 +25,18 @@ import com.fasterxml.jackson.databind.module.SimpleModule; import com.google.common.base.Stopwatch; import com.google.common.collect.ImmutableMap; -import org.apache.druid.client.indexing.NoopOverlordClient; import org.apache.druid.data.input.impl.NoopInputFormat; import org.apache.druid.data.input.impl.NoopInputSource; import org.apache.druid.guice.DruidSecondaryModule; import org.apache.druid.indexing.common.stats.DropwizardRowIngestionMetersFactory; import org.apache.druid.indexing.common.task.TestAppenderatorsManager; -import org.apache.druid.indexing.common.task.batch.parallel.ParallelIndexSupervisorTaskClientProvider; import org.apache.druid.jackson.DefaultObjectMapper; import org.apache.druid.java.util.common.ISE; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.logger.Logger; import org.apache.druid.math.expr.ExprMacroTable; import org.apache.druid.query.expression.LookupEnabledTestExprMacroTable; +import org.apache.druid.rpc.indexing.NoopOverlordClient; import org.apache.druid.rpc.indexing.OverlordClient; import org.apache.druid.segment.IndexIO; import org.apache.druid.segment.IndexMergerV9; @@ -63,10 +62,6 @@ public class TestUtils { public static final OverlordClient OVERLORD_SERVICE_CLIENT = new NoopOverlordClient(); - public static final ParallelIndexSupervisorTaskClientProvider TASK_CLIENT_PROVIDER = - (supervisorTaskId, httpTimeout, numRetries) -> { - throw new UnsupportedOperationException(); - }; public static final AppenderatorsManager APPENDERATORS_MANAGER = new TestAppenderatorsManager(); private static final Logger log = new Logger(TestUtils.class); diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/ClientCompactionTaskQuerySerdeTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/ClientCompactionTaskQuerySerdeTest.java index 55898357d663..a354cb20db28 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/ClientCompactionTaskQuerySerdeTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/ClientCompactionTaskQuerySerdeTest.java @@ -35,7 +35,6 @@ import org.apache.druid.client.indexing.ClientCompactionTaskQueryTuningConfig; import org.apache.druid.client.indexing.ClientCompactionTaskTransformSpec; import org.apache.druid.client.indexing.ClientTaskQuery; -import org.apache.druid.client.indexing.NoopOverlordClient; import org.apache.druid.data.input.SegmentsSplitHintSpec; import org.apache.druid.data.input.impl.DimensionsSpec; import org.apache.druid.guice.GuiceAnnotationIntrospector; @@ -53,6 +52,7 @@ import org.apache.druid.query.aggregation.AggregatorFactory; import org.apache.druid.query.aggregation.CountAggregatorFactory; import org.apache.druid.query.filter.SelectorDimFilter; +import org.apache.druid.rpc.indexing.NoopOverlordClient; import org.apache.druid.rpc.indexing.OverlordClient; import org.apache.druid.segment.IndexSpec; import org.apache.druid.segment.TestIndex; diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskRunTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskRunTest.java index a3fb807604fa..e6e9230439cc 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskRunTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskRunTest.java @@ -32,7 +32,6 @@ import org.apache.druid.client.coordinator.NoopCoordinatorClient; import org.apache.druid.client.indexing.ClientCompactionTaskGranularitySpec; import org.apache.druid.client.indexing.ClientCompactionTaskTransformSpec; -import org.apache.druid.client.indexing.NoopOverlordClient; import org.apache.druid.data.input.impl.CSVParseSpec; import org.apache.druid.data.input.impl.DimensionSchema; import org.apache.druid.data.input.impl.DimensionsSpec; @@ -70,6 +69,7 @@ import org.apache.druid.query.aggregation.LongSumAggregatorFactory; import org.apache.druid.query.dimension.DefaultDimensionSpec; import org.apache.druid.query.filter.SelectorDimFilter; +import org.apache.druid.rpc.indexing.NoopOverlordClient; import org.apache.druid.rpc.indexing.OverlordClient; import org.apache.druid.segment.AutoTypeColumnSchema; import org.apache.druid.segment.ColumnSelectorFactory; diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/batch/parallel/AbstractParallelIndexSupervisorTaskTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/batch/parallel/AbstractParallelIndexSupervisorTaskTest.java index d96a4d2a37e9..9dd6fe3f88a9 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/batch/parallel/AbstractParallelIndexSupervisorTaskTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/batch/parallel/AbstractParallelIndexSupervisorTaskTest.java @@ -33,7 +33,6 @@ import org.apache.druid.client.ImmutableDruidDataSource; import org.apache.druid.client.coordinator.CoordinatorClient; import org.apache.druid.client.coordinator.NoopCoordinatorClient; -import org.apache.druid.client.indexing.NoopOverlordClient; import org.apache.druid.client.indexing.TaskStatusResponse; import org.apache.druid.data.input.InputFormat; import org.apache.druid.data.input.MaxSizeSplitHintSpec; @@ -83,6 +82,7 @@ import org.apache.druid.query.aggregation.AggregatorFactory; import org.apache.druid.query.aggregation.LongSumAggregatorFactory; import org.apache.druid.query.expression.LookupEnabledTestExprMacroTable; +import org.apache.druid.rpc.indexing.NoopOverlordClient; import org.apache.druid.segment.DataSegmentsWithSchemas; import org.apache.druid.segment.IndexIO; import org.apache.druid.segment.TestIndex; diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/batch/parallel/TaskMonitorTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/batch/parallel/TaskMonitorTest.java index 4d04dbd86cd8..fdbb7bb0a526 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/batch/parallel/TaskMonitorTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/batch/parallel/TaskMonitorTest.java @@ -21,7 +21,6 @@ import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; -import org.apache.druid.client.indexing.NoopOverlordClient; import org.apache.druid.client.indexing.TaskStatusResponse; import org.apache.druid.data.input.InputSplit; import org.apache.druid.indexer.RunnerTaskState; @@ -35,6 +34,7 @@ import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.ISE; import org.apache.druid.java.util.common.concurrent.Execs; +import org.apache.druid.rpc.indexing.NoopOverlordClient; import org.junit.After; import org.junit.Assert; import org.junit.Before; diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/SingleTaskBackgroundRunnerTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/SingleTaskBackgroundRunnerTest.java index 3acf9ff6f0e5..5590ce608ec8 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/SingleTaskBackgroundRunnerTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/SingleTaskBackgroundRunnerTest.java @@ -21,7 +21,6 @@ import com.google.common.util.concurrent.ListenableFuture; import org.apache.druid.client.coordinator.NoopCoordinatorClient; -import org.apache.druid.client.indexing.NoopOverlordClient; import org.apache.druid.indexer.TaskLocation; import org.apache.druid.indexer.TaskState; import org.apache.druid.indexer.TaskStatus; @@ -45,6 +44,7 @@ import org.apache.druid.query.QueryRunner; import org.apache.druid.query.scan.ScanResultValue; import org.apache.druid.query.spec.MultipleIntervalSegmentSpec; +import org.apache.druid.rpc.indexing.NoopOverlordClient; import org.apache.druid.segment.TestIndex; import org.apache.druid.segment.join.NoopJoinableFactory; import org.apache.druid.segment.loading.NoopDataSegmentArchiver; diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/TaskLifecycleTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/TaskLifecycleTest.java index abab38bf0c9b..49fa9a9790a4 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/TaskLifecycleTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/TaskLifecycleTest.java @@ -35,7 +35,6 @@ import org.apache.druid.client.cache.CachePopulatorStats; import org.apache.druid.client.cache.MapCache; import org.apache.druid.client.coordinator.NoopCoordinatorClient; -import org.apache.druid.client.indexing.NoopOverlordClient; import org.apache.druid.data.input.AbstractInputSource; import org.apache.druid.data.input.InputRow; import org.apache.druid.data.input.InputRowListPlusRawValues; @@ -110,6 +109,7 @@ import org.apache.druid.query.QueryRunnerFactoryConglomerate; import org.apache.druid.query.SegmentDescriptor; import org.apache.druid.query.aggregation.DoubleSumAggregatorFactory; +import org.apache.druid.rpc.indexing.NoopOverlordClient; import org.apache.druid.segment.IndexIO; import org.apache.druid.segment.IndexMergerV9Factory; import org.apache.druid.segment.IndexSpec; diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/TestTaskToolboxFactory.java b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/TestTaskToolboxFactory.java index 19e4bd33fd8e..3f3081b0e69e 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/TestTaskToolboxFactory.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/TestTaskToolboxFactory.java @@ -26,7 +26,6 @@ import org.apache.druid.client.cache.CachePopulatorStats; import org.apache.druid.client.coordinator.CoordinatorClient; import org.apache.druid.client.coordinator.NoopCoordinatorClient; -import org.apache.druid.client.indexing.NoopOverlordClient; import org.apache.druid.discovery.DataNodeService; import org.apache.druid.discovery.DruidNodeAnnouncer; import org.apache.druid.discovery.LookupNodeService; @@ -44,6 +43,7 @@ import org.apache.druid.java.util.metrics.MonitorScheduler; import org.apache.druid.query.QueryProcessingPool; import org.apache.druid.query.QueryRunnerFactoryConglomerate; +import org.apache.druid.rpc.indexing.NoopOverlordClient; import org.apache.druid.rpc.indexing.OverlordClient; import org.apache.druid.segment.IndexIO; import org.apache.druid.segment.IndexMergerV9Factory; diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/http/OverlordDataSourcesResourceTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/http/OverlordDataSourcesResourceTest.java new file mode 100644 index 000000000000..7bea4b2aec1b --- /dev/null +++ b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/http/OverlordDataSourcesResourceTest.java @@ -0,0 +1,370 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.indexing.overlord.http; + +import com.google.common.collect.ImmutableSet; +import org.apache.druid.audit.AuditManager; +import org.apache.druid.client.ImmutableDruidDataSource; +import org.apache.druid.indexing.overlord.TaskMaster; +import org.apache.druid.java.util.common.Intervals; +import org.apache.druid.java.util.common.granularity.Granularities; +import org.apache.druid.rpc.indexing.SegmentUpdateResponse; +import org.apache.druid.segment.TestDataSource; +import org.apache.druid.server.coordinator.CreateDataSegments; +import org.apache.druid.server.coordinator.simulate.TestSegmentsMetadataManager; +import org.apache.druid.server.http.SegmentsToUpdateFilter; +import org.apache.druid.server.security.AuthConfig; +import org.apache.druid.timeline.DataSegment; +import org.easymock.EasyMock; +import org.joda.time.Interval; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; + +import javax.servlet.http.HttpServletRequest; +import javax.ws.rs.core.Response; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; + +public class OverlordDataSourcesResourceTest +{ + private static final String WIKI_SEGMENTS_START = "2024-01-01"; + private static final List WIKI_SEGMENTS_10X1D + = CreateDataSegments.ofDatasource(TestDataSource.WIKI) + .forIntervals(10, Granularities.DAY) + .startingAt(WIKI_SEGMENTS_START) + .eachOfSizeInMb(500); + + private TestSegmentsMetadataManager segmentsMetadataManager; + + private OverlordDataSourcesResource dataSourcesResource; + + @Before + public void setup() + { + AuditManager auditManager = EasyMock.createStrictMock(AuditManager.class); + segmentsMetadataManager = new TestSegmentsMetadataManager(); + + TaskMaster taskMaster = new TaskMaster(null, null); + dataSourcesResource = new OverlordDataSourcesResource( + taskMaster, + segmentsMetadataManager, + auditManager + ); + taskMaster.becomeFullLeader(); + + WIKI_SEGMENTS_10X1D.forEach(segmentsMetadataManager::addSegment); + } + + @Test + public void testMarkSegmentAsUnused() + { + Response response = dataSourcesResource.markSegmentAsUnused( + TestDataSource.WIKI, + WIKI_SEGMENTS_10X1D.get(0).getId().toString() + ); + verifyNumSegmentsUpdated(1, response); + } + + @Test + public void testMarkSegmentAsUnused_withInvalidSegmentId() + { + Response response = dataSourcesResource.markSegmentAsUnused( + TestDataSource.WIKI, + "someSegment" + ); + Assert.assertEquals(400, response.getStatus()); + Assert.assertEquals( + "Could not parse Segment ID[someSegment] for DataSource[wiki]", + response.getEntity() + ); + } + + @Test + public void testMarkAllSegmentsAsUnused() + { + Response response = dataSourcesResource.markAllSegmentsAsUnused( + TestDataSource.WIKI, + createHttpServletRequest() + ); + verifyNumSegmentsUpdated(10, response); + } + + @Test + public void testMarkSegmentsAsUnused_bySegmentIds() + { + final Set segmentIdsToUpdate = ImmutableSet.of( + WIKI_SEGMENTS_10X1D.get(0).getId().toString(), + WIKI_SEGMENTS_10X1D.get(8).getId().toString() + ); + + Response response = dataSourcesResource.markSegmentsAsUnused( + TestDataSource.WIKI, + new SegmentsToUpdateFilter(null, segmentIdsToUpdate, null), + createHttpServletRequest() + ); + verifyNumSegmentsUpdated(2, response); + } + + @Test + public void testMarkSegmentsAsUnused_byInterval() + { + final Interval nonOverlappingInterval = Intervals.of("1000/2000"); + Response response = dataSourcesResource.markSegmentsAsUnused( + TestDataSource.WIKI, + new SegmentsToUpdateFilter(nonOverlappingInterval, null, null), + createHttpServletRequest() + ); + verifyNumSegmentsUpdated(0, response); + + final Interval overlappingInterval3Days = Intervals.of(WIKI_SEGMENTS_START + "/P3D"); + response = dataSourcesResource.markSegmentsAsUnused( + TestDataSource.WIKI, + new SegmentsToUpdateFilter(overlappingInterval3Days, null, null), + createHttpServletRequest() + ); + verifyNumSegmentsUpdated(3, response); + } + + @Test + public void testMarkSegmentsAsUnused_byIntervalAndVersion() + { + final Interval overlappingInterval3Days = Intervals.of(WIKI_SEGMENTS_START + "/P3D"); + Response response = dataSourcesResource.markSegmentsAsUnused( + TestDataSource.WIKI, + new SegmentsToUpdateFilter( + overlappingInterval3Days, + null, + Collections.singletonList("invalidVersion") + ), + createHttpServletRequest() + ); + verifyNumSegmentsUpdated(0, response); + + final String wikiSegmentVersion = WIKI_SEGMENTS_10X1D.get(0).getVersion(); + response = dataSourcesResource.markSegmentsAsUnused( + TestDataSource.WIKI, + new SegmentsToUpdateFilter( + overlappingInterval3Days, + null, + Collections.singletonList(wikiSegmentVersion) + ), + createHttpServletRequest() + ); + verifyNumSegmentsUpdated(3, response); + } + + @Test + public void testMarkSegmentAsUsed() + { + final DataSegment segment = WIKI_SEGMENTS_10X1D.get(0); + final String segmentId = segment.getId().toString(); + + // Verify that segment which is already "used" is not updated + Response response = dataSourcesResource.markSegmentAsUsed(TestDataSource.WIKI, segmentId); + verifyNumSegmentsUpdated(0, response); + + // Mark segment as unused and then mark it as used + dataSourcesResource.markSegmentAsUnused(TestDataSource.WIKI, segmentId); + response = dataSourcesResource.markSegmentAsUsed(TestDataSource.WIKI, segmentId); + verifyNumSegmentsUpdated(1, response); + } + + @Test + public void testMarkAllNonOvershadowedSegmentsAsUsed() + { + // Create higher version segments + final String version2 = WIKI_SEGMENTS_10X1D.get(0).getVersion() + "__2"; + final List wikiSegmentsV2 = WIKI_SEGMENTS_10X1D.stream().map( + segment -> DataSegment.builder(segment).version(version2).build() + ).collect(Collectors.toList()); + + wikiSegmentsV2.forEach(segmentsMetadataManager::addSegment); + + // Mark all segments as unused + Response response = dataSourcesResource.markAllSegmentsAsUnused( + TestDataSource.WIKI, + createHttpServletRequest() + ); + verifyNumSegmentsUpdated(20, response); + + // Verify that only higher version segments are marked as used + response = dataSourcesResource.markAllNonOvershadowedSegmentsAsUsed( + TestDataSource.WIKI, + createHttpServletRequest() + ); + verifyNumSegmentsUpdated(10, response); + + final ImmutableDruidDataSource dataSource = segmentsMetadataManager + .getImmutableDataSourceWithUsedSegments(TestDataSource.WIKI); + Assert.assertNotNull(dataSource); + + final Collection usedSegments = dataSource.getSegments(); + Assert.assertEquals(10, usedSegments.size()); + for (DataSegment segment : usedSegments) { + Assert.assertEquals(version2, segment.getVersion()); + } + } + + @Test + public void testMarkNonOvershadowedSegmentsAsUsed_byInterval() + { + dataSourcesResource.markAllSegmentsAsUnused(TestDataSource.WIKI, createHttpServletRequest()); + + final Interval disjointInterval = Intervals.of("1000/2000"); + Response response = dataSourcesResource.markNonOvershadowedSegmentsAsUsed( + TestDataSource.WIKI, + new SegmentsToUpdateFilter(disjointInterval, null, null) + ); + verifyNumSegmentsUpdated(0, response); + + final Interval overlappingInterval3Days = Intervals.of(WIKI_SEGMENTS_START + "/P3D"); + response = dataSourcesResource.markNonOvershadowedSegmentsAsUsed( + TestDataSource.WIKI, + new SegmentsToUpdateFilter(overlappingInterval3Days, null, null) + ); + verifyNumSegmentsUpdated(3, response); + } + + @Test + public void testMarkNonOvershadowedSegmentsAsUsed_byIntervalAndVersion() + { + dataSourcesResource.markAllSegmentsAsUnused(TestDataSource.WIKI, createHttpServletRequest()); + + final Interval overlappingInterval4Days = Intervals.of(WIKI_SEGMENTS_START + "/P4D"); + Response response = dataSourcesResource.markNonOvershadowedSegmentsAsUsed( + TestDataSource.WIKI, + new SegmentsToUpdateFilter( + overlappingInterval4Days, + null, + Collections.singletonList("invalidVersion") + ) + ); + verifyNumSegmentsUpdated(0, response); + + final String wikiSegmentsVersion = WIKI_SEGMENTS_10X1D.get(0).getVersion(); + response = dataSourcesResource.markNonOvershadowedSegmentsAsUsed( + TestDataSource.WIKI, + new SegmentsToUpdateFilter( + overlappingInterval4Days, + null, + Collections.singletonList(wikiSegmentsVersion) + ) + ); + verifyNumSegmentsUpdated(4, response); + } + + @Test + public void testMarkNonOvershadowedSegmentsAsUsed_bySegmentIds() + { + dataSourcesResource.markAllSegmentsAsUnused(TestDataSource.WIKI, createHttpServletRequest()); + + final Set segmentIdsToUpdate = ImmutableSet.of( + WIKI_SEGMENTS_10X1D.get(0).getId().toString(), + WIKI_SEGMENTS_10X1D.get(1).getId().toString() + ); + Response response = dataSourcesResource.markNonOvershadowedSegmentsAsUsed( + TestDataSource.WIKI, + new SegmentsToUpdateFilter(null, segmentIdsToUpdate, null) + ); + verifyNumSegmentsUpdated(2, response); + } + + @Test + public void testMarkNonOvershadowedSegmentsAsUsed_withNullPayload_throwsBadRequestError() + { + final Response response + = dataSourcesResource.markNonOvershadowedSegmentsAsUsed(TestDataSource.WIKI, null); + Assert.assertEquals(400, response.getStatus()); + Assert.assertEquals( + "Invalid request payload. Specify either 'interval' or 'segmentIds', but not both." + + " Optionally, include 'versions' only when 'interval' is provided.", + response.getEntity() + ); + } + + @Test + public void testMarkNonOvershadowedSegmentsAsUsed_withInvalidPayload_throwsBadRequestError() + { + final String segmentId = WIKI_SEGMENTS_10X1D.get(0).getId().toString(); + final String expectedErrorMessage + = "Invalid request payload. Specify either 'interval' or 'segmentIds', but not both." + + " Optionally, include 'versions' only when 'interval' is provided."; + + // Both interval and segmentIds are null + Response response = dataSourcesResource.markNonOvershadowedSegmentsAsUsed( + TestDataSource.WIKI, + new SegmentsToUpdateFilter(null, null, null) + ); + Assert.assertEquals(400, response.getStatus()); + Assert.assertEquals(expectedErrorMessage, response.getEntity()); + + // interval is null and segmentIds is empty + response = dataSourcesResource.markNonOvershadowedSegmentsAsUsed( + TestDataSource.WIKI, + new SegmentsToUpdateFilter(null, Collections.emptySet(), null) + ); + Assert.assertEquals(400, response.getStatus()); + Assert.assertEquals(expectedErrorMessage, response.getEntity()); + + // Both interval and segmentIds are specified + response = dataSourcesResource.markNonOvershadowedSegmentsAsUsed( + TestDataSource.WIKI, + new SegmentsToUpdateFilter(Intervals.of("1000/2000"), Collections.singleton(segmentId), null) + ); + Assert.assertEquals(400, response.getStatus()); + Assert.assertEquals(expectedErrorMessage, response.getEntity()); + + // versions are specified with segmentIds + response = dataSourcesResource.markNonOvershadowedSegmentsAsUsed( + TestDataSource.WIKI, + new SegmentsToUpdateFilter(null, Collections.singleton(segmentId), Collections.singletonList("v1")) + ); + Assert.assertEquals(400, response.getStatus()); + Assert.assertEquals(expectedErrorMessage, response.getEntity()); + } + + private void verifyNumSegmentsUpdated(int expectedUpdatedCount, Response response) + { + Assert.assertEquals(200, response.getStatus()); + Assert.assertEquals(new SegmentUpdateResponse(expectedUpdatedCount), response.getEntity()); + } + + private static HttpServletRequest createHttpServletRequest() + { + final HttpServletRequest request = EasyMock.createStrictMock(HttpServletRequest.class); + + EasyMock.expect(request.getHeader(AuditManager.X_DRUID_AUTHOR)).andReturn("author").anyTimes(); + EasyMock.expect(request.getHeader(AuditManager.X_DRUID_COMMENT)).andReturn("comment").anyTimes(); + EasyMock.expect(request.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)).andReturn(null).anyTimes(); + EasyMock.expect(request.getRemoteAddr()).andReturn("127.0.0.1").anyTimes(); + + EasyMock.expect(request.getMethod()).andReturn("POST").anyTimes(); + EasyMock.expect(request.getRequestURI()).andReturn("/request/uri").anyTimes(); + EasyMock.expect(request.getQueryString()).andReturn("query=string").anyTimes(); + + EasyMock.replay(request); + + return request; + } +} diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/seekablestream/SeekableStreamIndexTaskTestBase.java b/indexing-service/src/test/java/org/apache/druid/indexing/seekablestream/SeekableStreamIndexTaskTestBase.java index 7a4fd7dadb3a..b815a48d71e6 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/seekablestream/SeekableStreamIndexTaskTestBase.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/seekablestream/SeekableStreamIndexTaskTestBase.java @@ -34,7 +34,6 @@ import org.apache.druid.client.cache.CachePopulatorStats; import org.apache.druid.client.cache.MapCache; import org.apache.druid.client.coordinator.NoopCoordinatorClient; -import org.apache.druid.client.indexing.NoopOverlordClient; import org.apache.druid.common.config.NullHandling; import org.apache.druid.data.input.InputFormat; import org.apache.druid.data.input.impl.ByteEntity; @@ -100,6 +99,7 @@ import org.apache.druid.query.aggregation.LongSumAggregatorFactory; import org.apache.druid.query.timeseries.TimeseriesQuery; import org.apache.druid.query.timeseries.TimeseriesResultValue; +import org.apache.druid.rpc.indexing.NoopOverlordClient; import org.apache.druid.segment.DimensionHandlerUtils; import org.apache.druid.segment.IndexIO; import org.apache.druid.segment.QueryableIndex; diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/worker/WorkerTaskMonitorTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/worker/WorkerTaskMonitorTest.java index e1822505b977..c8a754625358 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/worker/WorkerTaskMonitorTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/worker/WorkerTaskMonitorTest.java @@ -27,7 +27,6 @@ import org.apache.curator.retry.ExponentialBackoffRetry; import org.apache.curator.test.TestingCluster; import org.apache.druid.client.coordinator.NoopCoordinatorClient; -import org.apache.druid.client.indexing.NoopOverlordClient; import org.apache.druid.curator.PotentiallyGzippedCompressionProvider; import org.apache.druid.indexer.TaskState; import org.apache.druid.indexing.common.IndexingServiceCondition; @@ -48,6 +47,7 @@ import org.apache.druid.indexing.worker.config.WorkerConfig; import org.apache.druid.java.util.common.FileUtils; import org.apache.druid.java.util.common.StringUtils; +import org.apache.druid.rpc.indexing.NoopOverlordClient; import org.apache.druid.rpc.indexing.OverlordClient; import org.apache.druid.segment.IndexIO; import org.apache.druid.segment.IndexMergerV9Factory; diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/worker/shuffle/LocalIntermediaryDataManagerAutoCleanupTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/worker/shuffle/LocalIntermediaryDataManagerAutoCleanupTest.java index 5505160d989d..be07a3ef4858 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/worker/shuffle/LocalIntermediaryDataManagerAutoCleanupTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/worker/shuffle/LocalIntermediaryDataManagerAutoCleanupTest.java @@ -24,13 +24,13 @@ import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import org.apache.commons.io.FileUtils; -import org.apache.druid.client.indexing.NoopOverlordClient; import org.apache.druid.indexer.TaskState; import org.apache.druid.indexer.TaskStatus; import org.apache.druid.indexing.common.config.TaskConfig; import org.apache.druid.indexing.common.config.TaskConfigBuilder; import org.apache.druid.indexing.worker.config.WorkerConfig; import org.apache.druid.java.util.common.Intervals; +import org.apache.druid.rpc.indexing.NoopOverlordClient; import org.apache.druid.rpc.indexing.OverlordClient; import org.apache.druid.segment.loading.StorageLocationConfig; import org.apache.druid.timeline.DataSegment; diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/worker/shuffle/LocalIntermediaryDataManagerManualAddAndDeleteTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/worker/shuffle/LocalIntermediaryDataManagerManualAddAndDeleteTest.java index cc14d4505e95..59f194350a0d 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/worker/shuffle/LocalIntermediaryDataManagerManualAddAndDeleteTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/worker/shuffle/LocalIntermediaryDataManagerManualAddAndDeleteTest.java @@ -23,7 +23,6 @@ import com.google.common.io.ByteSource; import com.google.common.primitives.Ints; import org.apache.commons.io.FileUtils; -import org.apache.druid.client.indexing.NoopOverlordClient; import org.apache.druid.error.DruidException; import org.apache.druid.error.DruidExceptionMatcher; import org.apache.druid.indexing.common.config.TaskConfig; @@ -31,6 +30,7 @@ import org.apache.druid.indexing.worker.config.WorkerConfig; import org.apache.druid.java.util.common.Intervals; import org.apache.druid.java.util.common.StringUtils; +import org.apache.druid.rpc.indexing.NoopOverlordClient; import org.apache.druid.rpc.indexing.OverlordClient; import org.apache.druid.segment.loading.StorageLocationConfig; import org.apache.druid.timeline.DataSegment; diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/worker/shuffle/ShuffleDataSegmentPusherTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/worker/shuffle/ShuffleDataSegmentPusherTest.java index c8c9b7fe0665..70e734f1b224 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/worker/shuffle/ShuffleDataSegmentPusherTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/worker/shuffle/ShuffleDataSegmentPusherTest.java @@ -28,7 +28,6 @@ import com.google.common.primitives.Ints; import com.google.inject.Injector; import org.apache.commons.io.FileUtils; -import org.apache.druid.client.indexing.NoopOverlordClient; import org.apache.druid.guice.GuiceAnnotationIntrospector; import org.apache.druid.guice.GuiceInjectableValues; import org.apache.druid.guice.GuiceInjectors; @@ -37,6 +36,7 @@ import org.apache.druid.indexing.worker.config.WorkerConfig; import org.apache.druid.jackson.DefaultObjectMapper; import org.apache.druid.java.util.common.Intervals; +import org.apache.druid.rpc.indexing.NoopOverlordClient; import org.apache.druid.rpc.indexing.OverlordClient; import org.apache.druid.segment.loading.LoadSpec; import org.apache.druid.segment.loading.LocalDataSegmentPuller; diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/worker/shuffle/ShuffleResourceTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/worker/shuffle/ShuffleResourceTest.java index 69d48417deee..159800b4d4ee 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/worker/shuffle/ShuffleResourceTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/worker/shuffle/ShuffleResourceTest.java @@ -24,7 +24,6 @@ import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import org.apache.commons.io.FileUtils; -import org.apache.druid.client.indexing.NoopOverlordClient; import org.apache.druid.indexer.TaskState; import org.apache.druid.indexer.TaskStatus; import org.apache.druid.indexing.common.config.TaskConfig; @@ -32,6 +31,7 @@ import org.apache.druid.indexing.worker.config.WorkerConfig; import org.apache.druid.indexing.worker.shuffle.ShuffleMetrics.PerDatasourceShuffleMetrics; import org.apache.druid.java.util.common.Intervals; +import org.apache.druid.rpc.indexing.NoopOverlordClient; import org.apache.druid.rpc.indexing.OverlordClient; import org.apache.druid.segment.loading.StorageLocationConfig; import org.apache.druid.timeline.DataSegment; diff --git a/server/src/test/java/org/apache/druid/client/indexing/NoopOverlordClient.java b/server/src/main/java/org/apache/druid/rpc/indexing/NoopOverlordClient.java similarity index 73% rename from server/src/test/java/org/apache/druid/client/indexing/NoopOverlordClient.java rename to server/src/main/java/org/apache/druid/rpc/indexing/NoopOverlordClient.java index 8ccc32e435ce..7ccdcedb2bd3 100644 --- a/server/src/test/java/org/apache/druid/client/indexing/NoopOverlordClient.java +++ b/server/src/main/java/org/apache/druid/rpc/indexing/NoopOverlordClient.java @@ -17,9 +17,13 @@ * under the License. */ -package org.apache.druid.client.indexing; +package org.apache.druid.rpc.indexing; import com.google.common.util.concurrent.ListenableFuture; +import org.apache.druid.client.indexing.IndexingTotalWorkerCapacityInfo; +import org.apache.druid.client.indexing.IndexingWorkerInfo; +import org.apache.druid.client.indexing.TaskPayloadResponse; +import org.apache.druid.client.indexing.TaskStatusResponse; import org.apache.druid.indexer.TaskStatus; import org.apache.druid.indexer.TaskStatusPlus; import org.apache.druid.indexer.report.TaskReport; @@ -27,9 +31,10 @@ import org.apache.druid.java.util.common.parsers.CloseableIterator; import org.apache.druid.metadata.LockFilterPolicy; import org.apache.druid.rpc.ServiceRetryPolicy; -import org.apache.druid.rpc.indexing.OverlordClient; import org.apache.druid.server.compaction.CompactionProgressResponse; import org.apache.druid.server.compaction.CompactionStatusResponse; +import org.apache.druid.server.http.SegmentsToUpdateFilter; +import org.apache.druid.timeline.SegmentId; import org.joda.time.Interval; import javax.annotation.Nullable; @@ -38,6 +43,10 @@ import java.util.Map; import java.util.Set; +/** + * Implementation of {@link OverlordClient} that throws + * {@link UnsupportedOperationException} for every method. + */ public class NoopOverlordClient implements OverlordClient { @Override @@ -130,6 +139,45 @@ public ListenableFuture getCompactionSnapshots(@Nullab throw new UnsupportedOperationException(); } + @Override + public ListenableFuture markNonOvershadowedSegmentsAsUsed(String dataSource) + { + throw new UnsupportedOperationException(); + } + + @Override + public ListenableFuture markNonOvershadowedSegmentsAsUsed( + String dataSource, + SegmentsToUpdateFilter filter + ) + { + throw new UnsupportedOperationException(); + } + + @Override + public ListenableFuture markSegmentAsUsed(SegmentId segmentId) + { + throw new UnsupportedOperationException(); + } + + @Override + public ListenableFuture markSegmentsAsUnused(String dataSource) + { + throw new UnsupportedOperationException(); + } + + @Override + public ListenableFuture markSegmentsAsUnused(String dataSource, SegmentsToUpdateFilter filter) + { + throw new UnsupportedOperationException(); + } + + @Override + public ListenableFuture markSegmentAsUnused(SegmentId segmentId) + { + throw new UnsupportedOperationException(); + } + @Override public ListenableFuture getBytesAwaitingCompaction(String dataSource) { diff --git a/server/src/main/java/org/apache/druid/rpc/indexing/OverlordClient.java b/server/src/main/java/org/apache/druid/rpc/indexing/OverlordClient.java index fcc93d39c8de..b14c287e734f 100644 --- a/server/src/main/java/org/apache/druid/rpc/indexing/OverlordClient.java +++ b/server/src/main/java/org/apache/druid/rpc/indexing/OverlordClient.java @@ -37,6 +37,8 @@ import org.apache.druid.rpc.ServiceRetryPolicy; import org.apache.druid.server.compaction.CompactionProgressResponse; import org.apache.druid.server.compaction.CompactionStatusResponse; +import org.apache.druid.server.http.SegmentsToUpdateFilter; +import org.apache.druid.timeline.SegmentId; import org.joda.time.DateTime; import org.joda.time.Interval; @@ -239,6 +241,58 @@ ListenableFuture>> findLockedIntervals( */ ListenableFuture getCompactionSnapshots(@Nullable String dataSource); + /** + * Marks all non-overshadowed segments of the datasource as used. + *

+ * API: {@code POST /druid/indexer/v1/datasources/{dataSourceName}} + */ + ListenableFuture markNonOvershadowedSegmentsAsUsed(String dataSource); + + /** + * Marks non-overshadowed segments that satisfy the given filter as used. + *

+ * API: {@code POST /druid/indexer/v1/datasources/{dataSourceName}/markUsed} + * + * @param filter Must be non-null + */ + ListenableFuture markNonOvershadowedSegmentsAsUsed( + String dataSource, + SegmentsToUpdateFilter filter + ); + + /** + * Marks the given segment as used. + *

+ * API: {@code POST /druid/indexer/v1/datasources/{dataSourceName}/segments/{segmentId}} + */ + ListenableFuture markSegmentAsUsed(SegmentId segmentId); + + /** + * Marks all non-overshadowed segments of the datasource as unused. + *

+ * API: {@code DELETE /druid/indexer/v1/datasources/{dataSourceName}} + */ + ListenableFuture markSegmentsAsUnused(String dataSource); + + /** + * Marks non-overshadowed segments that satisfy the given filter as unused. + *

+ * API: {@code POST /druid/indexer/v1/datasources/{dataSourceName}/markUnused} + * + * @param filter Must be non-null + */ + ListenableFuture markSegmentsAsUnused( + String dataSource, + SegmentsToUpdateFilter filter + ); + + /** + * Marks the given segment as unused. + *

+ * API: {@code DELETE /druid/indexer/v1/datasources/{dataSourceName}/segments/{segmentId}} + */ + ListenableFuture markSegmentAsUnused(SegmentId segmentId); + /** * Returns a copy of this client with a different retry policy. */ diff --git a/server/src/main/java/org/apache/druid/rpc/indexing/OverlordClientImpl.java b/server/src/main/java/org/apache/druid/rpc/indexing/OverlordClientImpl.java index 1653962e1940..097a1efb7947 100644 --- a/server/src/main/java/org/apache/druid/rpc/indexing/OverlordClientImpl.java +++ b/server/src/main/java/org/apache/druid/rpc/indexing/OverlordClientImpl.java @@ -47,6 +47,8 @@ import org.apache.druid.rpc.ServiceRetryPolicy; import org.apache.druid.server.compaction.CompactionProgressResponse; import org.apache.druid.server.compaction.CompactionStatusResponse; +import org.apache.druid.server.http.SegmentsToUpdateFilter; +import org.apache.druid.timeline.SegmentId; import org.jboss.netty.handler.codec.http.HttpMethod; import org.joda.time.Interval; @@ -335,6 +337,112 @@ public ListenableFuture getCompactionSnapshots(@Nullab ); } + @Override + public ListenableFuture markNonOvershadowedSegmentsAsUsed(String dataSource) + { + final String path = StringUtils.format( + "/druid/indexer/v1/datasources/%s", + StringUtils.urlEncode(dataSource) + ); + return FutureUtils.transform( + client.asyncRequest( + new RequestBuilder(HttpMethod.POST, path), + new BytesFullResponseHandler() + ), + holder -> JacksonUtils.readValue(jsonMapper, holder.getContent(), SegmentUpdateResponse.class) + ); + } + + @Override + public ListenableFuture markNonOvershadowedSegmentsAsUsed( + String dataSource, + SegmentsToUpdateFilter filter + ) + { + final String path = StringUtils.format( + "/druid/indexer/v1/datasources/%s/markUsed", + StringUtils.urlEncode(dataSource) + ); + return FutureUtils.transform( + client.asyncRequest( + new RequestBuilder(HttpMethod.POST, path) + .jsonContent(jsonMapper, filter), + new BytesFullResponseHandler() + ), + holder -> JacksonUtils.readValue(jsonMapper, holder.getContent(), SegmentUpdateResponse.class) + ); + } + + @Override + public ListenableFuture markSegmentAsUsed(SegmentId segmentId) + { + final String path = StringUtils.format( + "/druid/indexer/v1/datasources/%s/segments/%s", + StringUtils.urlEncode(segmentId.getDataSource()), + StringUtils.urlEncode(segmentId.toString()) + ); + return FutureUtils.transform( + client.asyncRequest( + new RequestBuilder(HttpMethod.POST, path), + new BytesFullResponseHandler() + ), + holder -> JacksonUtils.readValue(jsonMapper, holder.getContent(), SegmentUpdateResponse.class) + ); + } + + @Override + public ListenableFuture markSegmentsAsUnused(String dataSource) + { + final String path = StringUtils.format( + "/druid/indexer/v1/datasources/%s", + StringUtils.urlEncode(dataSource) + ); + return FutureUtils.transform( + client.asyncRequest( + new RequestBuilder(HttpMethod.DELETE, path), + new BytesFullResponseHandler() + ), + holder -> JacksonUtils.readValue(jsonMapper, holder.getContent(), SegmentUpdateResponse.class) + ); + } + + @Override + public ListenableFuture markSegmentsAsUnused( + String dataSource, + SegmentsToUpdateFilter filter + ) + { + final String path = StringUtils.format( + "/druid/indexer/v1/datasources/%s/markUnused", + StringUtils.urlEncode(dataSource) + ); + return FutureUtils.transform( + client.asyncRequest( + new RequestBuilder(HttpMethod.POST, path) + .jsonContent(jsonMapper, filter), + new BytesFullResponseHandler() + ), + holder -> JacksonUtils.readValue(jsonMapper, holder.getContent(), SegmentUpdateResponse.class) + ); + } + + @Override + public ListenableFuture markSegmentAsUnused(SegmentId segmentId) + { + final String path = StringUtils.format( + "/druid/indexer/v1/datasources/%s/segments/%s", + StringUtils.urlEncode(segmentId.getDataSource()), + StringUtils.urlEncode(segmentId.toString()) + ); + return FutureUtils.transform( + client.asyncRequest( + new RequestBuilder(HttpMethod.DELETE, path), + new BytesFullResponseHandler() + ), + holder -> JacksonUtils.readValue(jsonMapper, holder.getContent(), SegmentUpdateResponse.class) + ); + } + @Override public ListenableFuture getBytesAwaitingCompaction(String dataSource) { diff --git a/server/src/main/java/org/apache/druid/rpc/indexing/SegmentUpdateResponse.java b/server/src/main/java/org/apache/druid/rpc/indexing/SegmentUpdateResponse.java new file mode 100644 index 000000000000..c326d3de8821 --- /dev/null +++ b/server/src/main/java/org/apache/druid/rpc/indexing/SegmentUpdateResponse.java @@ -0,0 +1,82 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.rpc.indexing; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; + +import java.util.Objects; + +public class SegmentUpdateResponse +{ + private final int numChangedSegments; + private final boolean segmentStateChanged; + + @JsonCreator + public SegmentUpdateResponse( + @JsonProperty("numChangedSegments") int numChangedSegments + ) + { + this.numChangedSegments = numChangedSegments; + this.segmentStateChanged = numChangedSegments > 0; + } + + @JsonProperty + public int getNumChangedSegments() + { + return numChangedSegments; + } + + /** + * This field is required for backward compatibility of responses of + * {@link org.apache.druid.server.http.DataSourcesResource#markSegmentAsUsed} + * and {@link org.apache.druid.server.http.DataSourcesResource#markSegmentAsUnused} + */ + @JsonProperty + public boolean isSegmentStateChanged() + { + return segmentStateChanged; + } + + @Override + public boolean equals(Object o) + { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + SegmentUpdateResponse response = (SegmentUpdateResponse) o; + return numChangedSegments == response.numChangedSegments; + } + + @Override + public int hashCode() + { + return Objects.hashCode(numChangedSegments); + } + + @Override + public String toString() + { + return "SegmentUpdateResponse{numChangedSegments=" + numChangedSegments + "}"; + } +} diff --git a/server/src/main/java/org/apache/druid/server/compaction/CompactionRunSimulator.java b/server/src/main/java/org/apache/druid/server/compaction/CompactionRunSimulator.java index db0a5f8c98c4..b74735e6aa3a 100644 --- a/server/src/main/java/org/apache/druid/server/compaction/CompactionRunSimulator.java +++ b/server/src/main/java/org/apache/druid/server/compaction/CompactionRunSimulator.java @@ -25,17 +25,13 @@ import org.apache.druid.client.indexing.ClientCompactionTaskQuery; import org.apache.druid.client.indexing.ClientCompactionTaskQueryTuningConfig; import org.apache.druid.client.indexing.IndexingTotalWorkerCapacityInfo; -import org.apache.druid.client.indexing.IndexingWorkerInfo; import org.apache.druid.client.indexing.TaskPayloadResponse; -import org.apache.druid.client.indexing.TaskStatusResponse; import org.apache.druid.indexer.CompactionEngine; import org.apache.druid.indexer.TaskStatus; import org.apache.druid.indexer.TaskStatusPlus; -import org.apache.druid.indexer.report.TaskReport; -import org.apache.druid.indexing.overlord.supervisor.SupervisorStatus; import org.apache.druid.java.util.common.parsers.CloseableIterator; import org.apache.druid.metadata.LockFilterPolicy; -import org.apache.druid.rpc.ServiceRetryPolicy; +import org.apache.druid.rpc.indexing.NoopOverlordClient; import org.apache.druid.rpc.indexing.OverlordClient; import org.apache.druid.server.coordinator.ClusterCompactionConfig; import org.apache.druid.server.coordinator.DataSourceCompactionConfig; @@ -45,7 +41,6 @@ import org.joda.time.Interval; import javax.annotation.Nullable; -import java.net.URI; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -192,7 +187,7 @@ private Object[] createRow( /** * Dummy overlord client that returns empty results for all APIs. */ - private static class ReadOnlyOverlordClient implements OverlordClient + private static class ReadOnlyOverlordClient extends NoopOverlordClient { final OverlordClient delegate; @@ -249,67 +244,5 @@ public ListenableFuture cancelTask(String taskId) { return Futures.immediateVoidFuture(); } - - // Unsupported methods as these are not used by the CompactionScheduler / CompactSegments duty - - @Override - public ListenableFuture findCurrentLeader() - { - throw new UnsupportedOperationException(); - } - - @Override - public ListenableFuture taskStatus(String taskId) - { - throw new UnsupportedOperationException(); - } - - @Override - public ListenableFuture taskReportAsMap(String taskId) - { - throw new UnsupportedOperationException(); - } - - @Override - public ListenableFuture> supervisorStatuses() - { - throw new UnsupportedOperationException(); - } - - @Override - public ListenableFuture killPendingSegments(String dataSource, Interval interval) - { - throw new UnsupportedOperationException(); - } - - @Override - public ListenableFuture> getWorkers() - { - throw new UnsupportedOperationException(); - } - - @Override - public ListenableFuture getCompactionSnapshots(@Nullable String dataSource) - { - throw new UnsupportedOperationException(); - } - - @Override - public ListenableFuture getBytesAwaitingCompaction(String dataSource) - { - throw new UnsupportedOperationException(); - } - - @Override - public ListenableFuture isCompactionSupervisorEnabled() - { - throw new UnsupportedOperationException(); - } - - @Override - public OverlordClient withRetryPolicy(ServiceRetryPolicy retryPolicy) - { - return this; - } } } diff --git a/server/src/main/java/org/apache/druid/server/coordinator/DruidCoordinator.java b/server/src/main/java/org/apache/druid/server/coordinator/DruidCoordinator.java index f8c29cd79849..fe3f889b5a82 100644 --- a/server/src/main/java/org/apache/druid/server/coordinator/DruidCoordinator.java +++ b/server/src/main/java/org/apache/druid/server/coordinator/DruidCoordinator.java @@ -33,6 +33,7 @@ import org.apache.druid.client.ImmutableDruidDataSource; import org.apache.druid.client.ServerInventoryView; import org.apache.druid.client.coordinator.Coordinator; +import org.apache.druid.common.guava.FutureUtils; import org.apache.druid.curator.discovery.ServiceAnnouncer; import org.apache.druid.discovery.DruidLeaderSelector; import org.apache.druid.guice.ManageLifecycle; @@ -48,6 +49,7 @@ import org.apache.druid.java.util.emitter.service.ServiceMetricEvent; import org.apache.druid.metadata.SegmentsMetadataManager; import org.apache.druid.rpc.indexing.OverlordClient; +import org.apache.druid.rpc.indexing.SegmentUpdateResponse; import org.apache.druid.segment.metadata.CentralizedDatasourceSchemaConfig; import org.apache.druid.segment.metadata.CoordinatorSegmentMetadataCache; import org.apache.druid.server.DruidNode; @@ -89,6 +91,7 @@ import org.apache.druid.server.coordinator.stats.Dimension; import org.apache.druid.server.coordinator.stats.RowKey; import org.apache.druid.server.coordinator.stats.Stats; +import org.apache.druid.server.http.SegmentsToUpdateFilter; import org.apache.druid.server.lookup.cache.LookupCoordinatorManager; import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.SegmentId; @@ -445,16 +448,14 @@ private void becomeLeader() config.getCoordinatorPeriod() ) ); - if (overlordClient != null) { - dutiesRunnables.add( - new DutiesRunnable( - makeIndexingServiceDuties(), - startingLeaderCounter, - INDEXING_SERVICE_DUTIES_DUTY_GROUP, - config.getCoordinatorIndexingPeriod() - ) - ); - } + dutiesRunnables.add( + new DutiesRunnable( + makeIndexingServiceDuties(), + startingLeaderCounter, + INDEXING_SERVICE_DUTIES_DUTY_GROUP, + config.getCoordinatorIndexingPeriod() + ) + ); dutiesRunnables.add( new DutiesRunnable( makeMetadataStoreManagementDuties(), @@ -535,8 +536,7 @@ private void stopAllDutyGroups() private List makeHistoricalManagementDuties() { - final MetadataAction.DeleteSegments deleteSegments - = segments -> metadataManager.segments().markSegmentsAsUnused(segments); + final MetadataAction.DeleteSegments deleteSegments = this::markSegmentsAsUnused; final MetadataAction.GetDatasourceRules getRules = dataSource -> metadataManager.rules().getRulesWithDefault(dataSource); @@ -628,6 +628,30 @@ private List getCompactSegmentsDutyFromCustomGroups() .collect(Collectors.toList()); } + /** + * Makes an API call to Overlord to mark segments of a datasource as unused. + * + * @return Number of segments updated. + */ + private int markSegmentsAsUnused(String datasource, Set segmentIds) + { + try { + final Set segmentIdsToUpdate + = segmentIds.stream().map(SegmentId::toString).collect(Collectors.toSet()); + final SegmentsToUpdateFilter filter + = new SegmentsToUpdateFilter(null, segmentIdsToUpdate, null); + SegmentUpdateResponse response = FutureUtils.getUnchecked( + overlordClient.markSegmentsAsUnused(datasource, filter), + true + ); + return response.getNumChangedSegments(); + } + catch (Exception e) { + log.error(e, "Could not mark segments as unused"); + return 0; + } + } + /** * Used by {@link CoordinatorDutyGroup} to check leadership and emit stats. */ diff --git a/server/src/main/java/org/apache/druid/server/coordinator/duty/MarkEternityTombstonesAsUnused.java b/server/src/main/java/org/apache/druid/server/coordinator/duty/MarkEternityTombstonesAsUnused.java index 78704166ce9e..f332f11020c5 100644 --- a/server/src/main/java/org/apache/druid/server/coordinator/duty/MarkEternityTombstonesAsUnused.java +++ b/server/src/main/java/org/apache/druid/server/coordinator/duty/MarkEternityTombstonesAsUnused.java @@ -89,7 +89,7 @@ public DruidCoordinatorRuntimeParams run(final DruidCoordinatorRuntimeParams par dataSourcesSnapshot ); - if (datasourceToNonOvershadowedEternityTombstones.size() == 0) { + if (datasourceToNonOvershadowedEternityTombstones.isEmpty()) { log.debug("No non-overshadowed eternity tombstones found."); return params; } @@ -102,7 +102,7 @@ public DruidCoordinatorRuntimeParams run(final DruidCoordinatorRuntimeParams par datasourceToNonOvershadowedEternityTombstones.forEach((datasource, nonOvershadowedEternityTombstones) -> { final RowKey datasourceKey = RowKey.of(Dimension.DATASOURCE, datasource); stats.add(Stats.Segments.UNNEEDED_ETERNITY_TOMBSTONE, datasourceKey, nonOvershadowedEternityTombstones.size()); - final int unusedCount = deleteHandler.markSegmentsAsUnused(nonOvershadowedEternityTombstones); + final int unusedCount = deleteHandler.markSegmentsAsUnused(datasource, nonOvershadowedEternityTombstones); log.info( "Successfully marked [%d] non-overshadowed eternity tombstones[%s] of datasource[%s] as unused.", unusedCount, @@ -127,7 +127,7 @@ public DruidCoordinatorRuntimeParams run(final DruidCoordinatorRuntimeParams par *

* * @param dataSourcesSnapshot the datasources snapshot for segments timeline - * @return the set of non-overshadowed eternity tombstones grouped by datasource + * @return Map from datasource to set of non-overshadowed eternity tombstones */ private Map> determineNonOvershadowedEternityTombstones(final DataSourcesSnapshot dataSourcesSnapshot) { diff --git a/server/src/main/java/org/apache/druid/server/coordinator/duty/MarkOvershadowedSegmentsAsUnused.java b/server/src/main/java/org/apache/druid/server/coordinator/duty/MarkOvershadowedSegmentsAsUnused.java index 9574df1b720b..596f9df7017f 100644 --- a/server/src/main/java/org/apache/druid/server/coordinator/duty/MarkOvershadowedSegmentsAsUnused.java +++ b/server/src/main/java/org/apache/druid/server/coordinator/duty/MarkOvershadowedSegmentsAsUnused.java @@ -119,7 +119,7 @@ public DruidCoordinatorRuntimeParams run(DruidCoordinatorRuntimeParams params) stats.add(Stats.Segments.OVERSHADOWED, datasourceKey, unusedSegments.size()); final Stopwatch updateTime = Stopwatch.createStarted(); - int updatedCount = deleteHandler.markSegmentsAsUnused(unusedSegments); + int updatedCount = deleteHandler.markSegmentsAsUnused(datasource, unusedSegments); log.info( "Marked [%d] segments of datasource[%s] as unused in [%,d]ms.", updatedCount, datasource, updateTime.millisElapsed() diff --git a/server/src/main/java/org/apache/druid/server/coordinator/duty/MetadataAction.java b/server/src/main/java/org/apache/druid/server/coordinator/duty/MetadataAction.java index f890298cb880..dd6e886a4bbd 100644 --- a/server/src/main/java/org/apache/druid/server/coordinator/duty/MetadataAction.java +++ b/server/src/main/java/org/apache/druid/server/coordinator/duty/MetadataAction.java @@ -34,7 +34,7 @@ public final class MetadataAction @FunctionalInterface public interface DeleteSegments { - int markSegmentsAsUnused(Set segmentIds); + int markSegmentsAsUnused(String datasource, Set segmentIds); } @FunctionalInterface diff --git a/server/src/main/java/org/apache/druid/server/coordinator/duty/RunRules.java b/server/src/main/java/org/apache/druid/server/coordinator/duty/RunRules.java index c2f8751f19ea..f51ea95673c2 100644 --- a/server/src/main/java/org/apache/druid/server/coordinator/duty/RunRules.java +++ b/server/src/main/java/org/apache/druid/server/coordinator/duty/RunRules.java @@ -125,7 +125,7 @@ private void processSegmentDeletes( { segmentAssigner.getSegmentsToDelete().forEach((datasource, segmentIds) -> { final Stopwatch stopwatch = Stopwatch.createStarted(); - int numUpdatedSegments = deleteHandler.markSegmentsAsUnused(segmentIds); + int numUpdatedSegments = deleteHandler.markSegmentsAsUnused(datasource, segmentIds); RowKey rowKey = RowKey.of(Dimension.DATASOURCE, datasource); runStats.add(Stats.Segments.DELETED, rowKey, numUpdatedSegments); diff --git a/server/src/main/java/org/apache/druid/server/http/DataSourcesResource.java b/server/src/main/java/org/apache/druid/server/http/DataSourcesResource.java index 21bf0baa5bde..52fe5dbb7285 100644 --- a/server/src/main/java/org/apache/druid/server/http/DataSourcesResource.java +++ b/server/src/main/java/org/apache/druid/server/http/DataSourcesResource.java @@ -19,13 +19,11 @@ package org.apache.druid.server.http; -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Optional; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; +import com.google.common.util.concurrent.ListenableFuture; import com.google.inject.Inject; import com.sun.jersey.spi.container.ResourceFilters; import it.unimi.dsi.fastutil.objects.Object2LongMap; @@ -53,7 +51,9 @@ import org.apache.druid.metadata.SegmentsMetadataManager; import org.apache.druid.query.SegmentDescriptor; import org.apache.druid.query.TableDataSource; +import org.apache.druid.rpc.HttpResponseException; import org.apache.druid.rpc.indexing.OverlordClient; +import org.apache.druid.rpc.indexing.SegmentUpdateResponse; import org.apache.druid.server.coordination.DruidServerMetadata; import org.apache.druid.server.coordinator.DruidCoordinator; import org.apache.druid.server.coordinator.rules.LoadRule; @@ -67,7 +67,7 @@ import org.apache.druid.timeline.TimelineObjectHolder; import org.apache.druid.timeline.VersionedIntervalTimeline; import org.apache.druid.timeline.partition.PartitionChunk; -import org.apache.druid.utils.CollectionUtils; +import org.jboss.netty.handler.codec.http.HttpResponseStatus; import org.joda.time.DateTime; import org.joda.time.Interval; @@ -122,7 +122,7 @@ public DataSourcesResource( CoordinatorServerView serverInventoryView, SegmentsMetadataManager segmentsMetadataManager, MetadataRuleManager metadataRuleManager, - @Nullable OverlordClient overlordClient, + OverlordClient overlordClient, AuthorizerMapper authorizerMapper, DruidCoordinator coordinator, AuditManager auditManager @@ -189,17 +189,33 @@ private interface SegmentUpdateOperation int perform(); } + private interface RemoteSegmentUpdateOperation + { + ListenableFuture perform(); + } + + /** + * @deprecated Use {@code OverlordDataSourcesResource#markAllNonOvershadowedSegmentsAsUsed} instead. + */ + @Deprecated @POST @Path("/{dataSourceName}") @Consumes(MediaType.APPLICATION_JSON) @ResourceFilters(DatasourceResourceFilter.class) public Response markAsUsedAllNonOvershadowedSegments(@PathParam("dataSourceName") final String dataSourceName) { - SegmentUpdateOperation operation = () -> segmentsMetadataManager + SegmentUpdateOperation metadataOperation = () -> segmentsMetadataManager .markAsUsedAllNonOvershadowedSegmentsInDataSource(dataSourceName); - return performSegmentUpdate(dataSourceName, operation); + RemoteSegmentUpdateOperation remoteOperation = () -> overlordClient + .markNonOvershadowedSegmentsAsUsed(dataSourceName); + return updateSegmentsViaOverlord(dataSourceName, remoteOperation) + .orUpdateMetadataIf404(metadataOperation); } + /** + * @deprecated Use {@code OverlordDataSourcesResource#markNonOvershadowedSegmentsAsUsed} instead. + */ + @Deprecated @POST @Path("/{dataSourceName}/markUsed") @Produces(MediaType.APPLICATION_JSON) @@ -215,7 +231,7 @@ public Response markAsUsedNonOvershadowedSegments( .entity(SegmentsToUpdateFilter.INVALID_PAYLOAD_ERROR_MESSAGE) .build(); } else { - SegmentUpdateOperation operation = () -> { + SegmentUpdateOperation metadataOperation = () -> { final Interval interval = payload.getInterval(); final List versions = payload.getVersions(); if (interval != null) { @@ -241,10 +257,17 @@ public Response markAsUsedNonOvershadowedSegments( } }; - return performSegmentUpdate(dataSourceName, operation); + RemoteSegmentUpdateOperation remoteOperation + = () -> overlordClient.markNonOvershadowedSegmentsAsUsed(dataSourceName, payload); + return updateSegmentsViaOverlord(dataSourceName, remoteOperation) + .orUpdateMetadataIf404(metadataOperation); } } + /** + * @deprecated Use {@code OverlordDataSourcesResource#markSegmentsAsUnused} instead. + */ + @Deprecated @POST @Path("/{dataSourceName}/markUnused") @ResourceFilters(DatasourceResourceFilter.class) @@ -262,7 +285,7 @@ public Response markSegmentsAsUnused( .entity(SegmentsToUpdateFilter.INVALID_PAYLOAD_ERROR_MESSAGE) .build(); } else { - SegmentUpdateOperation operation = () -> { + SegmentUpdateOperation metadataOperation = () -> { final Interval interval = payload.getInterval(); final List versions = payload.getVersions(); final int numUpdatedSegments; @@ -294,7 +317,11 @@ public Response markSegmentsAsUnused( ); return numUpdatedSegments; }; - return performSegmentUpdate(dataSourceName, operation); + + RemoteSegmentUpdateOperation remoteOperation + = () -> overlordClient.markSegmentsAsUnused(dataSourceName, payload); + return updateSegmentsViaOverlord(dataSourceName, remoteOperation) + .orUpdateMetadataIf404(metadataOperation); } } @@ -308,7 +335,7 @@ private static Response performSegmentUpdate(String dataSourceName, SegmentUpdat { try { int numChangedSegments = operation.perform(); - return Response.ok(ImmutableMap.of("numChangedSegments", numChangedSegments)).build(); + return Response.ok(new SegmentUpdateResponse(numChangedSegments)).build(); } catch (DruidException e) { return ServletResourceUtils.buildErrorResponseFrom(e); @@ -317,17 +344,65 @@ private static Response performSegmentUpdate(String dataSourceName, SegmentUpdat log.error(e, "Error occurred while updating segments for datasource[%s]", dataSourceName); return Response .serverError() - .entity(ImmutableMap.of("error", "Exception occurred.", "message", Throwables.getRootCause(e).toString())) + .entity(ImmutableMap.of("error", "Server error", "message", Throwables.getRootCause(e).toString())) .build(); } } + private static RemoteOrMetadataUpdate updateSegmentsViaOverlord( + String dataSourceName, + RemoteSegmentUpdateOperation operation + ) + { + return new RemoteOrMetadataUpdate(dataSourceName, operation); + } + + private static class RemoteOrMetadataUpdate + { + private final String dataSourceName; + private final RemoteSegmentUpdateOperation remoteOperation; + + private RemoteOrMetadataUpdate( + String dataSourceName, + RemoteSegmentUpdateOperation remoteOperation + ) + { + this.dataSourceName = dataSourceName; + this.remoteOperation = remoteOperation; + } + + Response orUpdateMetadataIf404(SegmentUpdateOperation operation) + { + try { + SegmentUpdateResponse response = FutureUtils.getUnchecked(remoteOperation.perform(), true); + return Response.ok(response).build(); + } + catch (DruidException e) { + return ServletResourceUtils.buildErrorResponseFrom(e); + } + catch (Exception e) { + log.error(e, "Error occurred while updating segments for datasource[%s]", dataSourceName); + + final Throwable rootCause = Throwables.getRootCause(e); + if (rootCause instanceof HttpResponseException) { + HttpResponseStatus status = ((HttpResponseException) rootCause).getResponse().getStatus(); + if (status.getCode() == 404) { + // Overlord is probably still on old version, update metadata store directly + return performSegmentUpdate(dataSourceName, operation); + } + } + + return Response + .serverError() + .entity(ImmutableMap.of("error", "Unknown server error", "message", rootCause.toString())) + .build(); + } + } + } + /** - * When this method is removed, a new method needs to be introduced corresponding to - * the end point "DELETE /druid/coordinator/v1/datasources/{dataSourceName}" (with no query parameters). - * Ultimately we want to have no method with kill parameter - - * DELETE `{dataSourceName}` to mark all segments belonging to a data source as unused, and - * DELETE `{dataSourceName}/intervals/{interval}` to kill unused segments within an interval + * @deprecated Use {@code OverlordDataSourcesResource#markAllSegmentsAsUnused} + * or {@link #killUnusedSegmentsInInterval} instead. */ @DELETE @Deprecated @@ -341,31 +416,29 @@ public Response markAsUnusedAllSegmentsOrKillUnusedSegmentsInInterval( @Context HttpServletRequest req ) { - if (overlordClient == null) { - return Response.ok(ImmutableMap.of("error", "no indexing service found")).build(); - } - - boolean killSegments = kill != null && Boolean.valueOf(kill); - if (killSegments) { + if (Boolean.parseBoolean(kill)) { return killUnusedSegmentsInInterval(dataSourceName, interval, req); } else { - SegmentUpdateOperation operation = () -> segmentsMetadataManager.markAsUnusedAllSegmentsInDataSource(dataSourceName); - final Response response = performSegmentUpdate(dataSourceName, operation); - - final int responseCode = response.getStatus(); - if (responseCode >= 200 && responseCode < 300) { - auditManager.doAudit( - AuditEntry.builder() - .key(dataSourceName) - .type("segment.markUnused") - .payload(response.getEntity()) - .auditInfo(AuthorizationUtils.buildAuditInfo(req)) - .request(AuthorizationUtils.buildRequestInfo("coordinator", req)) - .build() - ); - } + SegmentUpdateOperation metadataOperation = () -> { + int numUpdatedSegments = segmentsMetadataManager.markAsUnusedAllSegmentsInDataSource(dataSourceName); + if (numUpdatedSegments > 0) { + auditManager.doAudit( + AuditEntry.builder() + .key(dataSourceName) + .type("segment.markUnused") + .payload(new SegmentUpdateResponse(numUpdatedSegments)) + .auditInfo(AuthorizationUtils.buildAuditInfo(req)) + .request(AuthorizationUtils.buildRequestInfo("coordinator", req)) + .build() + ); + } + return numUpdatedSegments; + }; - return response; + RemoteSegmentUpdateOperation remoteOperation + = () -> overlordClient.markSegmentsAsUnused(dataSourceName); + return updateSegmentsViaOverlord(dataSourceName, remoteOperation) + .orUpdateMetadataIf404(metadataOperation); } } @@ -379,9 +452,6 @@ public Response killUnusedSegmentsInInterval( @Context final HttpServletRequest req ) { - if (overlordClient == null) { - return Response.ok(ImmutableMap.of("error", "no indexing service found")).build(); - } if (StringUtils.contains(interval, '_')) { log.warn("Use interval with '/', not '_': [%s] given", interval); } @@ -696,6 +766,10 @@ public Response getServedSegment( return Response.noContent().build(); } + /** + * @deprecated Use {@code OverlordDataSourcesResource#markSegmentAsUnused} instead. + */ + @Deprecated @DELETE @Path("/{dataSourceName}/segments/{segmentId}") @ResourceFilters(DatasourceResourceFilter.class) @@ -705,21 +779,54 @@ public Response markSegmentAsUnused( ) { final SegmentId segmentId = SegmentId.tryParse(dataSourceName, segmentIdString); - final boolean segmentStateChanged = segmentId != null && segmentsMetadataManager.markSegmentAsUnused(segmentId); - return Response.ok(ImmutableMap.of("segmentStateChanged", segmentStateChanged)).build(); + if (segmentId == null) { + return Response.status(Response.Status.BAD_REQUEST).entity( + org.apache.druid.java.util.common.StringUtils.format( + "Could not parse Segment ID[%s] for DataSource[%s]", + segmentIdString, dataSourceName + ) + ).build(); + } + + SegmentUpdateOperation metadataOperation + = () -> segmentsMetadataManager.markSegmentAsUnused(segmentId) ? 1 : 0; + RemoteSegmentUpdateOperation remoteOperation + = () -> overlordClient.markSegmentAsUnused(segmentId); + + return updateSegmentsViaOverlord(dataSourceName, remoteOperation) + .orUpdateMetadataIf404(metadataOperation); } + /** + * @deprecated Use {@code OverlordDataSourcesResource#markSegmentAsUsed} instead. + */ + @Deprecated @POST @Path("/{dataSourceName}/segments/{segmentId}") @Consumes(MediaType.APPLICATION_JSON) @ResourceFilters(DatasourceResourceFilter.class) public Response markSegmentAsUsed( @PathParam("dataSourceName") String dataSourceName, - @PathParam("segmentId") String segmentId + @PathParam("segmentId") String segmentIdString ) { - boolean segmentStateChanged = segmentsMetadataManager.markSegmentAsUsed(segmentId); - return Response.ok().entity(ImmutableMap.of("segmentStateChanged", segmentStateChanged)).build(); + final SegmentId segmentId = SegmentId.tryParse(dataSourceName, segmentIdString); + if (segmentId == null) { + return Response.status(Response.Status.BAD_REQUEST).entity( + org.apache.druid.java.util.common.StringUtils.format( + "Could not parse Segment ID[%s] for DataSource[%s]", + segmentIdString, dataSourceName + ) + ).build(); + } + + SegmentUpdateOperation metadataOperation + = () -> segmentsMetadataManager.markSegmentAsUsed(segmentIdString) ? 1 : 0; + RemoteSegmentUpdateOperation remoteOperation + = () -> overlordClient.markSegmentAsUsed(segmentId); + + return updateSegmentsViaOverlord(dataSourceName, remoteOperation) + .orUpdateMetadataIf404(metadataOperation); } @GET @@ -986,61 +1093,4 @@ static boolean isSegmentLoaded(Iterable servedSegments return false; } - /** - * Either {@code interval} or {@code segmentIds} array must be specified, but not both. - * {@code versions} may be optionally specified only when {@code interval} is provided. - */ - @VisibleForTesting - static class SegmentsToUpdateFilter - { - private final Interval interval; - private final Set segmentIds; - private final List versions; - - private static final String INVALID_PAYLOAD_ERROR_MESSAGE = "Invalid request payload. Specify either 'interval' or 'segmentIds', but not both." - + " Optionally, include 'versions' only when 'interval' is provided."; - - @JsonCreator - public SegmentsToUpdateFilter( - @JsonProperty("interval") @Nullable Interval interval, - @JsonProperty("segmentIds") @Nullable Set segmentIds, - @JsonProperty("versions") @Nullable List versions - ) - { - this.interval = interval; - this.segmentIds = segmentIds; - this.versions = versions; - } - - @Nullable - @JsonProperty - public Interval getInterval() - { - return interval; - } - - @Nullable - @JsonProperty - public Set getSegmentIds() - { - return segmentIds; - } - - @Nullable - @JsonProperty - public List getVersions() - { - return versions; - } - - private boolean isValid() - { - final boolean hasSegmentIds = !CollectionUtils.isNullOrEmpty(segmentIds); - if (interval == null) { - return hasSegmentIds && CollectionUtils.isNullOrEmpty(versions); - } else { - return !hasSegmentIds; - } - } - } } diff --git a/server/src/main/java/org/apache/druid/server/http/SegmentsToUpdateFilter.java b/server/src/main/java/org/apache/druid/server/http/SegmentsToUpdateFilter.java new file mode 100644 index 000000000000..9c8f17946182 --- /dev/null +++ b/server/src/main/java/org/apache/druid/server/http/SegmentsToUpdateFilter.java @@ -0,0 +1,90 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.server.http; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; +import org.apache.druid.utils.CollectionUtils; +import org.joda.time.Interval; + +import javax.annotation.Nullable; +import java.util.List; +import java.util.Set; + +/** + * Filter to identify segments that need to be updated via REST APIs. + */ +public class SegmentsToUpdateFilter +{ + private final Interval interval; + private final Set segmentIds; + private final List versions; + + public static final String INVALID_PAYLOAD_ERROR_MESSAGE = + "Invalid request payload. Specify either 'interval' or 'segmentIds', but not both." + + " Optionally, include 'versions' only when 'interval' is provided."; + + @JsonCreator + public SegmentsToUpdateFilter( + @JsonProperty("interval") @Nullable Interval interval, + @JsonProperty("segmentIds") @Nullable Set segmentIds, + @JsonProperty("versions") @Nullable List versions + ) + { + this.interval = interval; + this.segmentIds = segmentIds; + this.versions = versions; + } + + @Nullable + @JsonProperty + public Interval getInterval() + { + return interval; + } + + @Nullable + @JsonProperty + public Set getSegmentIds() + { + return segmentIds; + } + + @Nullable + @JsonProperty + public List getVersions() + { + return versions; + } + + /** + * The filter is valid if either {@code interval} or {@code segmentIds} is specified, but not both. + * {@code versions} may be optionally specified only when {@code interval} is provided. + */ + public boolean isValid() + { + final boolean hasSegmentIds = !CollectionUtils.isNullOrEmpty(segmentIds); + if (interval == null) { + return hasSegmentIds && CollectionUtils.isNullOrEmpty(versions); + } else { + return !hasSegmentIds; + } + } +} diff --git a/server/src/test/java/org/apache/druid/rpc/indexing/OverlordClientImplTest.java b/server/src/test/java/org/apache/druid/rpc/indexing/OverlordClientImplTest.java index 1167d10502d7..7ef15c931176 100644 --- a/server/src/test/java/org/apache/druid/rpc/indexing/OverlordClientImplTest.java +++ b/server/src/test/java/org/apache/druid/rpc/indexing/OverlordClientImplTest.java @@ -47,9 +47,12 @@ import org.apache.druid.rpc.HttpResponseException; import org.apache.druid.rpc.MockServiceClient; import org.apache.druid.rpc.RequestBuilder; +import org.apache.druid.segment.TestDataSource; import org.apache.druid.server.compaction.CompactionProgressResponse; import org.apache.druid.server.compaction.CompactionStatusResponse; import org.apache.druid.server.coordinator.AutoCompactionSnapshot; +import org.apache.druid.server.http.SegmentsToUpdateFilter; +import org.apache.druid.timeline.DataSegment; import org.hamcrest.CoreMatchers; import org.hamcrest.MatcherAssert; import org.jboss.netty.handler.codec.http.DefaultHttpResponse; @@ -531,4 +534,216 @@ public void test_getBytesAwaitingCompaction() overlordClient.getBytesAwaitingCompaction("ds1").get() ); } + + @Test + public void test_markSegmentAsUsed() throws Exception + { + final DataSegment wikiSegment = DataSegment.builder() + .dataSource(TestDataSource.WIKI) + .interval(Intervals.of("2024-12-01/P1D")) + .version("v1") + .size(100) + .build(); + + final String url = StringUtils.format( + "/druid/indexer/v1/datasources/%s/segments/%s", + TestDataSource.WIKI, StringUtils.urlEncode(wikiSegment.getId().toString()) + ); + serviceClient.expectAndRespond( + new RequestBuilder(HttpMethod.POST, url), + HttpResponseStatus.OK, + Collections.emptyMap(), + DefaultObjectMapper.INSTANCE.writeValueAsBytes(new SegmentUpdateResponse(1)) + ); + + Assert.assertEquals( + new SegmentUpdateResponse(1), + overlordClient.markSegmentAsUsed(wikiSegment.getId()).get() + ); + } + + @Test + public void test_markSegmentAsUnused() throws Exception + { + final DataSegment wikiSegment = DataSegment.builder() + .dataSource(TestDataSource.WIKI) + .interval(Intervals.of("2024-12-01/P1D")) + .version("v1") + .size(100) + .build(); + + final String url = StringUtils.format( + "/druid/indexer/v1/datasources/%s/segments/%s", + TestDataSource.WIKI, StringUtils.urlEncode(wikiSegment.getId().toString()) + ); + serviceClient.expectAndRespond( + new RequestBuilder(HttpMethod.DELETE, url), + HttpResponseStatus.OK, + Collections.emptyMap(), + DefaultObjectMapper.INSTANCE.writeValueAsBytes(new SegmentUpdateResponse(1)) + ); + + Assert.assertEquals( + new SegmentUpdateResponse(1), + overlordClient.markSegmentAsUnused(wikiSegment.getId()).get() + ); + } + + @Test + public void test_markNonOvershadowedSegmentsAsUsed_bySegmentIds() throws Exception + { + final DataSegment wikiSegment = DataSegment.builder() + .dataSource(TestDataSource.WIKI) + .interval(Intervals.of("2024-12-01/P1D")) + .version("v1") + .size(100) + .build(); + + final String url = StringUtils.format( + "/druid/indexer/v1/datasources/%s/markUsed", + TestDataSource.WIKI + ); + + final SegmentsToUpdateFilter segmentFilter = new SegmentsToUpdateFilter( + null, + Collections.singleton(wikiSegment.getId().toString()), + null + ); + serviceClient.expectAndRespond( + new RequestBuilder(HttpMethod.POST, url) + .jsonContent(jsonMapper, segmentFilter), + HttpResponseStatus.OK, + Collections.emptyMap(), + DefaultObjectMapper.INSTANCE.writeValueAsBytes(new SegmentUpdateResponse(1)) + ); + + Assert.assertEquals( + new SegmentUpdateResponse(1), + overlordClient.markNonOvershadowedSegmentsAsUsed(TestDataSource.WIKI, segmentFilter).get() + ); + } + + @Test + public void test_markSegmentsAsUnused_bySegmentIds() throws Exception + { + final DataSegment wikiSegment = DataSegment.builder() + .dataSource(TestDataSource.WIKI) + .interval(Intervals.of("2024-12-01/P1D")) + .version("v1") + .size(100) + .build(); + + final String url = StringUtils.format( + "/druid/indexer/v1/datasources/%s/markUnused", + TestDataSource.WIKI + ); + + final SegmentsToUpdateFilter segmentFilter = new SegmentsToUpdateFilter( + null, + Collections.singleton(wikiSegment.getId().toString()), + null + ); + serviceClient.expectAndRespond( + new RequestBuilder(HttpMethod.POST, url) + .jsonContent(jsonMapper, segmentFilter), + HttpResponseStatus.OK, + Collections.emptyMap(), + DefaultObjectMapper.INSTANCE.writeValueAsBytes(new SegmentUpdateResponse(1)) + ); + + Assert.assertEquals( + new SegmentUpdateResponse(1), + overlordClient.markSegmentsAsUnused(TestDataSource.WIKI, segmentFilter).get() + ); + } + + @Test + public void test_markNonOvershadowedSegmentsAsUsed_byInterval() throws Exception + { + final String url = StringUtils.format( + "/druid/indexer/v1/datasources/%s/markUsed", + TestDataSource.WIKI + ); + + final SegmentsToUpdateFilter segmentFilter + = new SegmentsToUpdateFilter(Intervals.of("2024/P1Y"), null, null); + serviceClient.expectAndRespond( + new RequestBuilder(HttpMethod.POST, url) + .jsonContent(jsonMapper, segmentFilter), + HttpResponseStatus.OK, + Collections.emptyMap(), + DefaultObjectMapper.INSTANCE.writeValueAsBytes(new SegmentUpdateResponse(3)) + ); + + Assert.assertEquals( + new SegmentUpdateResponse(3), + overlordClient.markNonOvershadowedSegmentsAsUsed(TestDataSource.WIKI, segmentFilter).get() + ); + } + + @Test + public void test_markSegmentsAsUnused_byInterval() throws Exception + { + final String url = StringUtils.format( + "/druid/indexer/v1/datasources/%s/markUnused", + TestDataSource.WIKI + ); + + final SegmentsToUpdateFilter segmentFilter + = new SegmentsToUpdateFilter(Intervals.of("2024/P1Y"), null, null); + serviceClient.expectAndRespond( + new RequestBuilder(HttpMethod.POST, url) + .jsonContent(jsonMapper, segmentFilter), + HttpResponseStatus.OK, + Collections.emptyMap(), + DefaultObjectMapper.INSTANCE.writeValueAsBytes(new SegmentUpdateResponse(5)) + ); + + Assert.assertEquals( + new SegmentUpdateResponse(5), + overlordClient.markSegmentsAsUnused(TestDataSource.WIKI, segmentFilter).get() + ); + } + + @Test + public void test_markNonOvershadowedSegmentsAsUsed_byDatasource() throws Exception + { + final String url = StringUtils.format( + "/druid/indexer/v1/datasources/%s", + TestDataSource.WIKI + ); + + serviceClient.expectAndRespond( + new RequestBuilder(HttpMethod.POST, url), + HttpResponseStatus.OK, + Collections.emptyMap(), + DefaultObjectMapper.INSTANCE.writeValueAsBytes(new SegmentUpdateResponse(10)) + ); + + Assert.assertEquals( + new SegmentUpdateResponse(10), + overlordClient.markNonOvershadowedSegmentsAsUsed(TestDataSource.WIKI).get() + ); + } + + @Test + public void test_markSegmentsAsUnused_byDatasource() throws Exception + { + final String url = StringUtils.format( + "/druid/indexer/v1/datasources/%s", + TestDataSource.WIKI + ); + + serviceClient.expectAndRespond( + new RequestBuilder(HttpMethod.DELETE, url), + HttpResponseStatus.OK, + Collections.emptyMap(), + DefaultObjectMapper.INSTANCE.writeValueAsBytes(new SegmentUpdateResponse(11)) + ); + + Assert.assertEquals( + new SegmentUpdateResponse(11), + overlordClient.markSegmentsAsUnused(TestDataSource.WIKI).get() + ); + } } diff --git a/server/src/test/java/org/apache/druid/server/compaction/CompactionRunSimulatorTest.java b/server/src/test/java/org/apache/druid/server/compaction/CompactionRunSimulatorTest.java index 690a4bcfc7cb..2c65a8f147fb 100644 --- a/server/src/test/java/org/apache/druid/server/compaction/CompactionRunSimulatorTest.java +++ b/server/src/test/java/org/apache/druid/server/compaction/CompactionRunSimulatorTest.java @@ -22,7 +22,6 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; -import org.apache.druid.client.indexing.NoopOverlordClient; import org.apache.druid.client.indexing.TaskPayloadResponse; import org.apache.druid.indexer.CompactionEngine; import org.apache.druid.indexer.TaskStatus; @@ -33,6 +32,7 @@ import org.apache.druid.java.util.common.granularity.Granularities; import org.apache.druid.java.util.common.parsers.CloseableIterator; import org.apache.druid.metadata.LockFilterPolicy; +import org.apache.druid.rpc.indexing.NoopOverlordClient; import org.apache.druid.server.coordinator.CreateDataSegments; import org.apache.druid.server.coordinator.DataSourceCompactionConfig; import org.apache.druid.server.coordinator.DruidCompactionConfig; diff --git a/server/src/test/java/org/apache/druid/server/coordinator/BalanceSegmentsProfiler.java b/server/src/test/java/org/apache/druid/server/coordinator/BalanceSegmentsProfiler.java index 5cc36f219784..505d27a55cef 100644 --- a/server/src/test/java/org/apache/druid/server/coordinator/BalanceSegmentsProfiler.java +++ b/server/src/test/java/org/apache/druid/server/coordinator/BalanceSegmentsProfiler.java @@ -47,7 +47,6 @@ import java.util.Collections; import java.util.HashMap; import java.util.List; -import java.util.Set; /** * TODO convert benchmarks to JMH @@ -143,7 +142,7 @@ public void bigProfiler() .build(); BalanceSegments tester = new BalanceSegments(Duration.standardMinutes(1)); - RunRules runner = new RunRules(Set::size, manager::getRulesWithDefault); + RunRules runner = new RunRules((ds, set) -> set.size(), manager::getRulesWithDefault); watch.start(); DruidCoordinatorRuntimeParams balanceParams = tester.run(params); DruidCoordinatorRuntimeParams assignParams = runner.run(params); diff --git a/server/src/test/java/org/apache/druid/server/coordinator/duty/CompactSegmentsTest.java b/server/src/test/java/org/apache/druid/server/coordinator/duty/CompactSegmentsTest.java index 31789dd540ca..0f29ab1547e0 100644 --- a/server/src/test/java/org/apache/druid/server/coordinator/duty/CompactSegmentsTest.java +++ b/server/src/test/java/org/apache/druid/server/coordinator/duty/CompactSegmentsTest.java @@ -39,7 +39,6 @@ import org.apache.druid.client.indexing.ClientMSQContext; import org.apache.druid.client.indexing.ClientTaskQuery; import org.apache.druid.client.indexing.IndexingTotalWorkerCapacityInfo; -import org.apache.druid.client.indexing.NoopOverlordClient; import org.apache.druid.client.indexing.TaskPayloadResponse; import org.apache.druid.common.config.NullHandling; import org.apache.druid.data.input.impl.DimensionsSpec; @@ -66,6 +65,7 @@ import org.apache.druid.query.aggregation.AggregatorFactory; import org.apache.druid.query.aggregation.CountAggregatorFactory; import org.apache.druid.query.filter.SelectorDimFilter; +import org.apache.druid.rpc.indexing.NoopOverlordClient; import org.apache.druid.rpc.indexing.OverlordClient; import org.apache.druid.segment.incremental.OnheapIncrementalIndex; import org.apache.druid.segment.indexing.BatchIOConfig; @@ -105,7 +105,6 @@ import org.mockito.Mockito; import javax.annotation.Nullable; -import java.net.URI; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -2080,12 +2079,6 @@ private TestOverlordClient(ObjectMapper jsonMapper) this.jsonMapper = jsonMapper; } - @Override - public ListenableFuture findCurrentLeader() - { - throw new UnsupportedOperationException(); - } - @Override public ListenableFuture runTask(String taskId, Object taskObject) { diff --git a/server/src/test/java/org/apache/druid/server/coordinator/duty/KillStalePendingSegmentsTest.java b/server/src/test/java/org/apache/druid/server/coordinator/duty/KillStalePendingSegmentsTest.java index 4833abac41aa..e916244f36b2 100644 --- a/server/src/test/java/org/apache/druid/server/coordinator/duty/KillStalePendingSegmentsTest.java +++ b/server/src/test/java/org/apache/druid/server/coordinator/duty/KillStalePendingSegmentsTest.java @@ -21,7 +21,6 @@ import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; -import org.apache.druid.client.indexing.NoopOverlordClient; import org.apache.druid.indexer.RunnerTaskState; import org.apache.druid.indexer.TaskLocation; import org.apache.druid.indexer.TaskState; @@ -30,6 +29,7 @@ import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.Intervals; import org.apache.druid.java.util.common.parsers.CloseableIterator; +import org.apache.druid.rpc.indexing.NoopOverlordClient; import org.apache.druid.segment.TestDataSource; import org.apache.druid.server.coordinator.CoordinatorDynamicConfig; import org.apache.druid.server.coordinator.DruidCoordinatorRuntimeParams; diff --git a/server/src/test/java/org/apache/druid/server/coordinator/duty/KillUnusedSegmentsTest.java b/server/src/test/java/org/apache/druid/server/coordinator/duty/KillUnusedSegmentsTest.java index ce65f30ede75..5c18616ab58e 100644 --- a/server/src/test/java/org/apache/druid/server/coordinator/duty/KillUnusedSegmentsTest.java +++ b/server/src/test/java/org/apache/druid/server/coordinator/duty/KillUnusedSegmentsTest.java @@ -27,7 +27,6 @@ import org.apache.druid.client.indexing.IndexingTotalWorkerCapacityInfo; import org.apache.druid.client.indexing.IndexingWorker; import org.apache.druid.client.indexing.IndexingWorkerInfo; -import org.apache.druid.client.indexing.NoopOverlordClient; import org.apache.druid.indexer.RunnerTaskState; import org.apache.druid.indexer.TaskLocation; import org.apache.druid.indexer.TaskState; @@ -42,6 +41,7 @@ import org.apache.druid.metadata.SqlSegmentsMetadataManager; import org.apache.druid.metadata.SqlSegmentsMetadataManagerTestBase; import org.apache.druid.metadata.TestDerbyConnector; +import org.apache.druid.rpc.indexing.NoopOverlordClient; import org.apache.druid.segment.TestHelper; import org.apache.druid.segment.metadata.CentralizedDatasourceSchemaConfig; import org.apache.druid.server.coordinator.CoordinatorDynamicConfig; diff --git a/server/src/test/java/org/apache/druid/server/coordinator/duty/MarkEternityTombstonesAsUnusedTest.java b/server/src/test/java/org/apache/druid/server/coordinator/duty/MarkEternityTombstonesAsUnusedTest.java index 76a4bbbedd65..37fbe1447834 100644 --- a/server/src/test/java/org/apache/druid/server/coordinator/duty/MarkEternityTombstonesAsUnusedTest.java +++ b/server/src/test/java/org/apache/druid/server/coordinator/duty/MarkEternityTombstonesAsUnusedTest.java @@ -484,7 +484,9 @@ private void runEternityTombstonesDutyAndVerify( final ImmutableList expectedUsedSegments ) { - params = new MarkEternityTombstonesAsUnused(segmentsMetadataManager::markSegmentsAsUnused).run(params); + params = new MarkEternityTombstonesAsUnused( + (ds, segmentIds) -> segmentsMetadataManager.markSegmentsAsUnused(segmentIds) + ).run(params); final Set actualUsedSegments = Sets.newHashSet(segmentsMetadataManager.iterateAllUsedSegments()); diff --git a/server/src/test/java/org/apache/druid/server/coordinator/duty/MarkOvershadowedSegmentsAsUnusedTest.java b/server/src/test/java/org/apache/druid/server/coordinator/duty/MarkOvershadowedSegmentsAsUnusedTest.java index 43cad2391f25..a06f1d3c7322 100644 --- a/server/src/test/java/org/apache/druid/server/coordinator/duty/MarkOvershadowedSegmentsAsUnusedTest.java +++ b/server/src/test/java/org/apache/druid/server/coordinator/duty/MarkOvershadowedSegmentsAsUnusedTest.java @@ -111,7 +111,9 @@ public void testRun(String serverType) Assert.assertTrue(timeline.isOvershadowed(segmentV1)); // Run the duty and verify that the overshadowed segments are marked unused - params = new MarkOvershadowedSegmentsAsUnused(segmentsMetadataManager::markSegmentsAsUnused).run(params); + params = new MarkOvershadowedSegmentsAsUnused( + (ds, segmentIds) -> segmentsMetadataManager.markSegmentsAsUnused(segmentIds) + ).run(params); Set updatedUsedSegments = Sets.newHashSet(segmentsMetadataManager.iterateAllUsedSegments()); Assert.assertEquals(1, updatedUsedSegments.size()); diff --git a/server/src/test/java/org/apache/druid/server/coordinator/duty/RunRulesTest.java b/server/src/test/java/org/apache/druid/server/coordinator/duty/RunRulesTest.java index 774c0699b07b..f7e9ececc1ac 100644 --- a/server/src/test/java/org/apache/druid/server/coordinator/duty/RunRulesTest.java +++ b/server/src/test/java/org/apache/druid/server/coordinator/duty/RunRulesTest.java @@ -99,7 +99,7 @@ public void setUp() emitter = new StubServiceEmitter("coordinator", "host"); EmittingLogger.registerEmitter(emitter); databaseRuleManager = EasyMock.createMock(MetadataRuleManager.class); - ruleRunner = new RunRules(Set::size, databaseRuleManager::getRulesWithDefault); + ruleRunner = new RunRules((ds, set) -> set.size(), databaseRuleManager::getRulesWithDefault); loadQueueManager = new SegmentLoadQueueManager(null, null); balancerExecutor = MoreExecutors.listeningDecorator(Execs.multiThreaded(1, "RunRulesTest-%d")); } diff --git a/server/src/test/java/org/apache/druid/server/coordinator/simulate/CoordinatorSimulationBuilder.java b/server/src/test/java/org/apache/druid/server/coordinator/simulate/CoordinatorSimulationBuilder.java index f82b89c68593..ddc360aa0124 100644 --- a/server/src/test/java/org/apache/druid/server/coordinator/simulate/CoordinatorSimulationBuilder.java +++ b/server/src/test/java/org/apache/druid/server/coordinator/simulate/CoordinatorSimulationBuilder.java @@ -22,6 +22,8 @@ import com.fasterxml.jackson.databind.InjectableValues; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Preconditions; +import com.google.common.util.concurrent.Futures; +import com.google.common.util.concurrent.ListenableFuture; import org.apache.druid.audit.AuditInfo; import org.apache.druid.client.DruidServer; import org.apache.druid.common.config.JacksonConfigManager; @@ -36,6 +38,9 @@ import org.apache.druid.java.util.http.client.HttpClient; import org.apache.druid.java.util.metrics.MetricsVerifier; import org.apache.druid.java.util.metrics.StubServiceEmitter; +import org.apache.druid.metadata.SegmentsMetadataManager; +import org.apache.druid.rpc.indexing.NoopOverlordClient; +import org.apache.druid.rpc.indexing.SegmentUpdateResponse; import org.apache.druid.segment.metadata.CentralizedDatasourceSchemaConfig; import org.apache.druid.server.compaction.CompactionStatusTracker; import org.apache.druid.server.coordinator.CoordinatorConfigManager; @@ -58,8 +63,10 @@ import org.apache.druid.server.coordinator.loading.LoadQueueTaskMaster; import org.apache.druid.server.coordinator.loading.SegmentLoadQueueManager; import org.apache.druid.server.coordinator.rules.Rule; +import org.apache.druid.server.http.SegmentsToUpdateFilter; import org.apache.druid.server.lookup.cache.LookupCoordinatorManager; import org.apache.druid.timeline.DataSegment; +import org.apache.druid.timeline.SegmentId; import org.easymock.EasyMock; import org.joda.time.Duration; @@ -67,8 +74,10 @@ import java.util.Arrays; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.atomic.AtomicBoolean; @@ -201,7 +210,7 @@ public CoordinatorSimulation build() env.coordinatorInventoryView, env.serviceEmitter, env.executorFactory, - null, + new SimOverlordClient(env.metadataManager.segments()), env.loadQueueTaskMaster, env.loadQueueManager, new ServiceAnnouncer.Noop(), @@ -610,4 +619,33 @@ private void tearDown() } } + private static class SimOverlordClient extends NoopOverlordClient + { + private final SegmentsMetadataManager segmentsMetadataManager; + + private SimOverlordClient(SegmentsMetadataManager segmentsMetadataManager) + { + this.segmentsMetadataManager = segmentsMetadataManager; + } + + @Override + public ListenableFuture markSegmentsAsUnused( + String dataSource, + SegmentsToUpdateFilter filter + ) + { + final Set segmentsToUpdate = new HashSet<>(); + if (filter.getSegmentIds() != null) { + for (String idString : filter.getSegmentIds()) { + SegmentId segmentId = SegmentId.tryParse(dataSource, idString); + if (segmentId != null) { + segmentsToUpdate.add(segmentId); + } + } + } + int numUpdatedSegments = segmentsMetadataManager.markSegmentsAsUnused(segmentsToUpdate); + return Futures.immediateFuture(new SegmentUpdateResponse(numUpdatedSegments)); + } + } + } diff --git a/server/src/test/java/org/apache/druid/server/coordinator/simulate/TestSegmentsMetadataManager.java b/server/src/test/java/org/apache/druid/server/coordinator/simulate/TestSegmentsMetadataManager.java index ad8294786e6d..7b32560f9338 100644 --- a/server/src/test/java/org/apache/druid/server/coordinator/simulate/TestSegmentsMetadataManager.java +++ b/server/src/test/java/org/apache/druid/server/coordinator/simulate/TestSegmentsMetadataManager.java @@ -20,6 +20,7 @@ package org.apache.druid.server.coordinator.simulate; import com.google.common.base.Optional; +import com.google.common.base.Predicates; import org.apache.druid.client.DataSourcesSnapshot; import org.apache.druid.client.ImmutableDruidDataSource; import org.apache.druid.metadata.SegmentsMetadataManager; @@ -28,16 +29,21 @@ import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.Partitions; import org.apache.druid.timeline.SegmentId; +import org.apache.druid.timeline.SegmentTimeline; import org.apache.druid.timeline.VersionedIntervalTimeline; import org.joda.time.DateTime; import org.joda.time.Interval; import javax.annotation.Nullable; import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; +import java.util.function.Predicate; import java.util.stream.Collectors; public class TestSegmentsMetadataManager implements SegmentsMetadataManager @@ -52,14 +58,14 @@ public void addSegment(DataSegment segment) { allSegments.put(segment.getId().toString(), segment); usedSegments.put(segment.getId().toString(), segment); - snapshot = null; + invalidateDatasourcesSnapshot(); } public void removeSegment(DataSegment segment) { allSegments.remove(segment.getId().toString()); usedSegments.remove(segment.getId().toString()); - snapshot = null; + invalidateDatasourcesSnapshot(); } @Override @@ -83,25 +89,31 @@ public boolean isPollingDatabasePeriodically() @Override public int markAsUsedAllNonOvershadowedSegmentsInDataSource(String dataSource) { - return 0; + return markNonOvershadowedSegmentsAsUsed(dataSource, Predicates.alwaysTrue()); } @Override public int markAsUsedNonOvershadowedSegmentsInInterval(String dataSource, Interval interval, @Nullable List versions) { - return 0; + final Set allowedVersions = versions == null ? null : new HashSet<>(versions); + return markNonOvershadowedSegmentsAsUsed( + dataSource, + segment -> segment.getInterval().overlaps(interval) + && (versions == null || allowedVersions.contains(segment.getVersion())) + ); } @Override public int markAsUsedNonOvershadowedSegments(String dataSource, Set segmentIds) { - return 0; + return markNonOvershadowedSegmentsAsUsed(dataSource, segment -> segmentIds.contains(segment.getId().toString())); } @Override public boolean markSegmentAsUsed(String segmentId) { - if (!allSegments.containsKey(segmentId)) { + if (!allSegments.containsKey(segmentId) + || usedSegments.containsKey(segmentId)) { return false; } @@ -112,31 +124,26 @@ public boolean markSegmentAsUsed(String segmentId) @Override public int markAsUnusedAllSegmentsInDataSource(String dataSource) { - return 0; + return markSegmentsAsUnused(segment -> segment.getDataSource().equals(dataSource)); } @Override public int markAsUnusedSegmentsInInterval(String dataSource, Interval interval, @Nullable List versions) { - return 0; + final Set eligibleVersions = versions == null ? null : new HashSet<>(versions); + return markSegmentsAsUnused( + segment -> segment.getDataSource().equals(dataSource) + && segment.getInterval().overlaps(interval) + && (eligibleVersions == null || eligibleVersions.contains(segment.getVersion())) + ); } @Override public int markSegmentsAsUnused(Set segmentIds) { - int numModifiedSegments = 0; - - for (SegmentId segmentId : segmentIds) { - if (allSegments.containsKey(segmentId.toString())) { - usedSegments.remove(segmentId.toString()); - ++numModifiedSegments; - } - } - - if (numModifiedSegments > 0) { - snapshot = null; - } - return numModifiedSegments; + return markSegmentsAsUnused( + segment -> segmentIds.contains(segment.getId()) + ); } @Override @@ -144,7 +151,7 @@ public boolean markSegmentAsUnused(SegmentId segmentId) { boolean updated = usedSegments.remove(segmentId.toString()) != null; if (updated) { - snapshot = null; + invalidateDatasourcesSnapshot(); } return updated; @@ -236,4 +243,55 @@ public void populateUsedFlagLastUpdatedAsync() public void stopAsyncUsedFlagLastUpdatedUpdate() { } + + private int markNonOvershadowedSegmentsAsUsed(String dataSource, Predicate isSegmentEligible) + { + // Build a timeline of all datasource segments + final Set datasourceSegments = allSegments.values().stream().filter( + segment -> segment.getDataSource().equals(dataSource) + ).collect(Collectors.toSet()); + final SegmentTimeline timeline = SegmentTimeline.forSegments(datasourceSegments); + + // Find all unused segments which are not overshadowed + final Map segmentsToUpdate = new HashMap<>(); + datasourceSegments.forEach(segment -> { + final String segmentId = segment.getId().toString(); + if (isSegmentEligible.test(segment) + && !usedSegments.containsKey(segmentId) + && !timeline.isOvershadowed(segment)) { + segmentsToUpdate.put(segmentId, segment); + } + }); + + if (segmentsToUpdate.isEmpty()) { + return 0; + } else { + usedSegments.putAll(segmentsToUpdate); + invalidateDatasourcesSnapshot(); + return segmentsToUpdate.size(); + } + } + + private int markSegmentsAsUnused(Predicate isSegmentEligible) + { + final Set segmentIdsToMarkUnused + = usedSegments.values() + .stream() + .filter(isSegmentEligible::test) + .map(segment -> segment.getId().toString()) + .collect(Collectors.toSet()); + + if (segmentIdsToMarkUnused.isEmpty()) { + return 0; + } else { + segmentIdsToMarkUnused.forEach(usedSegments::remove); + invalidateDatasourcesSnapshot(); + return segmentIdsToMarkUnused.size(); + } + } + + private void invalidateDatasourcesSnapshot() + { + snapshot = null; + } } diff --git a/server/src/test/java/org/apache/druid/server/http/CoordinatorCompactionResourceTest.java b/server/src/test/java/org/apache/druid/server/http/CoordinatorCompactionResourceTest.java index 91b348b72f40..002fba124540 100644 --- a/server/src/test/java/org/apache/druid/server/http/CoordinatorCompactionResourceTest.java +++ b/server/src/test/java/org/apache/druid/server/http/CoordinatorCompactionResourceTest.java @@ -22,9 +22,9 @@ import com.google.common.collect.ImmutableMap; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; -import org.apache.druid.client.indexing.NoopOverlordClient; import org.apache.druid.error.DruidExceptionMatcher; import org.apache.druid.error.ErrorResponse; +import org.apache.druid.rpc.indexing.NoopOverlordClient; import org.apache.druid.rpc.indexing.OverlordClient; import org.apache.druid.server.compaction.CompactionStatistics; import org.apache.druid.server.compaction.CompactionStatusResponse; diff --git a/server/src/test/java/org/apache/druid/server/http/DataSourcesResourceTest.java b/server/src/test/java/org/apache/druid/server/http/DataSourcesResourceTest.java index f6bccbeb7da4..9ddce6275e21 100644 --- a/server/src/test/java/org/apache/druid/server/http/DataSourcesResourceTest.java +++ b/server/src/test/java/org/apache/druid/server/http/DataSourcesResourceTest.java @@ -30,6 +30,7 @@ import com.google.common.util.concurrent.Futures; import it.unimi.dsi.fastutil.objects.Object2LongMap; import it.unimi.dsi.fastutil.objects.Object2LongOpenHashMap; +import org.apache.curator.shaded.com.google.common.base.Charsets; import org.apache.druid.audit.AuditManager; import org.apache.druid.client.CoordinatorServerView; import org.apache.druid.client.DruidDataSource; @@ -40,11 +41,15 @@ import org.apache.druid.error.DruidExceptionMatcher; import org.apache.druid.jackson.DefaultObjectMapper; import org.apache.druid.java.util.common.Intervals; +import org.apache.druid.java.util.http.client.response.StringFullResponseHolder; import org.apache.druid.metadata.MetadataRuleManager; import org.apache.druid.metadata.SegmentsMetadataManager; import org.apache.druid.query.SegmentDescriptor; import org.apache.druid.query.TableDataSource; +import org.apache.druid.rpc.HttpResponseException; import org.apache.druid.rpc.indexing.OverlordClient; +import org.apache.druid.rpc.indexing.SegmentUpdateResponse; +import org.apache.druid.segment.TestDataSource; import org.apache.druid.server.coordination.DruidServerMetadata; import org.apache.druid.server.coordination.ServerType; import org.apache.druid.server.coordinator.DruidCoordinator; @@ -68,6 +73,9 @@ import org.apache.druid.timeline.partition.NumberedShardSpec; import org.apache.druid.timeline.partition.PartitionHolder; import org.easymock.EasyMock; +import org.jboss.netty.handler.codec.http.DefaultHttpResponse; +import org.jboss.netty.handler.codec.http.HttpResponseStatus; +import org.jboss.netty.handler.codec.http.HttpVersion; import org.joda.time.Interval; import org.junit.Assert; import org.junit.Before; @@ -93,7 +101,10 @@ public class DataSourcesResourceTest private List dataSegmentList; private HttpServletRequest request; private SegmentsMetadataManager segmentsMetadataManager; + private OverlordClient overlordClient; private AuditManager auditManager; + + private DataSourcesResource dataSourcesResource; @Before public void setUp() @@ -105,7 +116,7 @@ public void setUp() dataSegmentList = new ArrayList<>(); dataSegmentList.add( new DataSegment( - "datasource1", + TestDataSource.WIKI, Intervals.of("2010-01-01/P1D"), "v0", null, @@ -118,7 +129,7 @@ public void setUp() ); dataSegmentList.add( new DataSegment( - "datasource1", + TestDataSource.WIKI, Intervals.of("2010-01-22/P1D"), "v0", null, @@ -131,7 +142,7 @@ public void setUp() ); dataSegmentList.add( new DataSegment( - "datasource2", + TestDataSource.KOALA, Intervals.of("2010-01-01/P1D"), "v0", null, @@ -144,12 +155,23 @@ public void setUp() ); listDataSources = new ArrayList<>(); listDataSources.add( - new DruidDataSource("datasource1", new HashMap<>()).addSegment(dataSegmentList.get(0)) + new DruidDataSource(TestDataSource.WIKI, new HashMap<>()).addSegment(dataSegmentList.get(0)) ); listDataSources.add( - new DruidDataSource("datasource2", new HashMap<>()).addSegment(dataSegmentList.get(1)) + new DruidDataSource(TestDataSource.KOALA, new HashMap<>()).addSegment(dataSegmentList.get(1)) ); segmentsMetadataManager = EasyMock.createMock(SegmentsMetadataManager.class); + overlordClient = EasyMock.createStrictMock(OverlordClient.class); + + dataSourcesResource = new DataSourcesResource( + inventoryView, + segmentsMetadataManager, + null, + overlordClient, + AuthTestUtils.TEST_AUTHORIZER_MAPPER, + null, + auditManager + ); } @Test @@ -186,7 +208,6 @@ public void testGetFullQueryableDataSources() EasyMock.expectLastCall().times(1); EasyMock.replay(inventoryView, server, request); - DataSourcesResource dataSourcesResource = createResource(); Response response = dataSourcesResource.getQueryableDataSources("full", null, request); Set result = (Set) response.getEntity(); Assert.assertEquals(200, response.getStatus()); @@ -200,8 +221,8 @@ public void testGetFullQueryableDataSources() List result1 = (List) response.getEntity(); Assert.assertEquals(200, response.getStatus()); Assert.assertEquals(2, result1.size()); - Assert.assertTrue(result1.contains("datasource1")); - Assert.assertTrue(result1.contains("datasource2")); + Assert.assertTrue(result1.contains(TestDataSource.WIKI)); + Assert.assertTrue(result1.contains(TestDataSource.KOALA)); EasyMock.verify(inventoryView, server); } @@ -249,7 +270,7 @@ public Authorizer getAuthorizer(String name) @Override public Access authorize(AuthenticationResult authenticationResult1, Resource resource, Action action) { - if (resource.getName().equals("datasource1")) { + if (resource.getName().equals(TestDataSource.WIKI)) { return new Access(true); } else { return new Access(false); @@ -261,7 +282,7 @@ public Access authorize(AuthenticationResult authenticationResult1, Resource res }; DataSourcesResource dataSourcesResource = - new DataSourcesResource(inventoryView, null, null, null, authMapper, null, auditManager); + new DataSourcesResource(inventoryView, null, null, overlordClient, authMapper, null, auditManager); Response response = dataSourcesResource.getQueryableDataSources("full", null, request); Set result = (Set) response.getEntity(); @@ -277,7 +298,7 @@ public Access authorize(AuthenticationResult authenticationResult1, Resource res Assert.assertEquals(200, response.getStatus()); Assert.assertEquals(1, result1.size()); - Assert.assertTrue(result1.contains("datasource1")); + Assert.assertTrue(result1.contains(TestDataSource.WIKI)); EasyMock.verify(inventoryView, server, request); } @@ -286,9 +307,9 @@ public Access authorize(AuthenticationResult authenticationResult1, Resource res public void testGetSimpleQueryableDataSources() { EasyMock.expect(server.getDataSources()).andReturn(listDataSources).atLeastOnce(); - EasyMock.expect(server.getDataSource("datasource1")).andReturn(listDataSources.get(0)).atLeastOnce(); + EasyMock.expect(server.getDataSource(TestDataSource.WIKI)).andReturn(listDataSources.get(0)).atLeastOnce(); EasyMock.expect(server.getTier()).andReturn(null).atLeastOnce(); - EasyMock.expect(server.getDataSource("datasource2")).andReturn(listDataSources.get(1)).atLeastOnce(); + EasyMock.expect(server.getDataSource(TestDataSource.KOALA)).andReturn(listDataSources.get(1)).atLeastOnce(); EasyMock.expect(inventoryView.getInventory()).andReturn(ImmutableList.of(server)).atLeastOnce(); EasyMock.expect(request.getAttribute(AuthConfig.DRUID_ALLOW_UNSECURED_PATH)).andReturn(null).once(); EasyMock.expect(request.getAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED)).andReturn(null).once(); @@ -299,17 +320,15 @@ public void testGetSimpleQueryableDataSources() EasyMock.expectLastCall().times(1); EasyMock.replay(inventoryView, server, request); - DataSourcesResource dataSourcesResource = createResource(); Response response = dataSourcesResource.getQueryableDataSources(null, "simple", request); Assert.assertEquals(200, response.getStatus()); List> results = (List>) response.getEntity(); - int index = 0; + + Assert.assertEquals(2, results.size()); for (Map entry : results) { - Assert.assertEquals(listDataSources.get(index).getName(), entry.get("name").toString()); Assert.assertTrue(((Map) ((Map) entry.get("properties")).get("tiers")).containsKey(null)); Assert.assertNotNull((((Map) entry.get("properties")).get("segments"))); Assert.assertEquals(1, ((Map) ((Map) entry.get("properties")).get("segments")).get("count")); - index++; } EasyMock.verify(inventoryView, server); } @@ -317,13 +336,12 @@ public void testGetSimpleQueryableDataSources() @Test public void testFullGetTheDataSource() { - DruidDataSource dataSource1 = new DruidDataSource("datasource1", new HashMap<>()); - EasyMock.expect(server.getDataSource("datasource1")).andReturn(dataSource1).atLeastOnce(); + DruidDataSource dataSource1 = new DruidDataSource(TestDataSource.WIKI, new HashMap<>()); + EasyMock.expect(server.getDataSource(TestDataSource.WIKI)).andReturn(dataSource1).atLeastOnce(); EasyMock.expect(inventoryView.getInventory()).andReturn(ImmutableList.of(server)).atLeastOnce(); EasyMock.replay(inventoryView, server); - DataSourcesResource dataSourcesResource = createResource(); - Response response = dataSourcesResource.getQueryableDataSource("datasource1", "full"); + Response response = dataSourcesResource.getQueryableDataSource(TestDataSource.WIKI, "full"); ImmutableDruidDataSource result = (ImmutableDruidDataSource) response.getEntity(); Assert.assertEquals(200, response.getStatus()); ImmutableDruidDataSourceTestUtils.assertEquals(dataSource1.toImmutableDruidDataSource(), result); @@ -337,7 +355,6 @@ public void testNullGetTheDataSource() EasyMock.expect(inventoryView.getInventory()).andReturn(ImmutableList.of(server)).atLeastOnce(); EasyMock.replay(inventoryView, server); - DataSourcesResource dataSourcesResource = createResource(); Assert.assertEquals(204, dataSourcesResource.getQueryableDataSource("none", null).getStatus()); EasyMock.verify(inventoryView, server); } @@ -345,17 +362,16 @@ public void testNullGetTheDataSource() @Test public void testSimpleGetTheDataSource() { - DruidDataSource dataSource1 = new DruidDataSource("datasource1", new HashMap<>()); + DruidDataSource dataSource1 = new DruidDataSource(TestDataSource.WIKI, new HashMap<>()); dataSource1.addSegment( new DataSegment("datasegment1", Intervals.of("2010-01-01/P1D"), "", null, null, null, null, 0x9, 10) ); - EasyMock.expect(server.getDataSource("datasource1")).andReturn(dataSource1).atLeastOnce(); + EasyMock.expect(server.getDataSource(TestDataSource.WIKI)).andReturn(dataSource1).atLeastOnce(); EasyMock.expect(server.getTier()).andReturn(null).atLeastOnce(); EasyMock.expect(inventoryView.getInventory()).andReturn(ImmutableList.of(server)).atLeastOnce(); EasyMock.replay(inventoryView, server); - DataSourcesResource dataSourcesResource = createResource(); - Response response = dataSourcesResource.getQueryableDataSource("datasource1", null); + Response response = dataSourcesResource.getQueryableDataSource(TestDataSource.WIKI, null); Assert.assertEquals(200, response.getStatus()); Map> result = (Map>) response.getEntity(); Assert.assertEquals(1, ((Map) (result.get("tiers").get(null))).get("segmentCount")); @@ -373,22 +389,21 @@ public void testSimpleGetTheDataSource() @Test public void testSimpleGetTheDataSourceManyTiers() { - EasyMock.expect(server.getDataSource("datasource1")).andReturn(listDataSources.get(0)).atLeastOnce(); + EasyMock.expect(server.getDataSource(TestDataSource.WIKI)).andReturn(listDataSources.get(0)).atLeastOnce(); EasyMock.expect(server.getTier()).andReturn("cold").atLeastOnce(); DruidServer server2 = EasyMock.createStrictMock(DruidServer.class); - EasyMock.expect(server2.getDataSource("datasource1")).andReturn(listDataSources.get(1)).atLeastOnce(); + EasyMock.expect(server2.getDataSource(TestDataSource.WIKI)).andReturn(listDataSources.get(1)).atLeastOnce(); EasyMock.expect(server2.getTier()).andReturn("hot").atLeastOnce(); DruidServer server3 = EasyMock.createStrictMock(DruidServer.class); - EasyMock.expect(server3.getDataSource("datasource1")).andReturn(listDataSources.get(1)).atLeastOnce(); + EasyMock.expect(server3.getDataSource(TestDataSource.WIKI)).andReturn(listDataSources.get(1)).atLeastOnce(); EasyMock.expect(server3.getTier()).andReturn("cold").atLeastOnce(); EasyMock.expect(inventoryView.getInventory()).andReturn(ImmutableList.of(server, server2, server3)).atLeastOnce(); EasyMock.replay(inventoryView, server, server2, server3); - DataSourcesResource dataSourcesResource = createResource(); - Response response = dataSourcesResource.getQueryableDataSource("datasource1", null); + Response response = dataSourcesResource.getQueryableDataSource(TestDataSource.WIKI, null); Assert.assertEquals(200, response.getStatus()); Map> result = (Map>) response.getEntity(); Assert.assertEquals(2, ((Map) (result.get("tiers").get("cold"))).get("segmentCount")); @@ -425,8 +440,7 @@ public void testSimpleGetTheDataSourceWithReplicatedSegments() EasyMock.replay(inventoryView); - DataSourcesResource dataSourcesResource = createResource(); - Response response = dataSourcesResource.getQueryableDataSource("datasource1", null); + Response response = dataSourcesResource.getQueryableDataSource(TestDataSource.WIKI, null); Assert.assertEquals(200, response.getStatus()); Map> result1 = (Map>) response.getEntity(); Assert.assertEquals(2, ((Map) (result1.get("tiers").get("tier1"))).get("segmentCount")); @@ -441,7 +455,7 @@ public void testSimpleGetTheDataSourceWithReplicatedSegments() Assert.assertEquals(30L, result1.get("segments").get("size")); Assert.assertEquals(60L, result1.get("segments").get("replicatedSize")); - response = dataSourcesResource.getQueryableDataSource("datasource2", null); + response = dataSourcesResource.getQueryableDataSource(TestDataSource.KOALA, null); Assert.assertEquals(200, response.getStatus()); Map> result2 = (Map>) response.getEntity(); Assert.assertEquals(1, ((Map) (result2.get("tiers").get("tier1"))).get("segmentCount")); @@ -469,8 +483,6 @@ public void testGetSegmentDataSourceIntervals() List expectedIntervals = new ArrayList<>(); expectedIntervals.add(Intervals.of("2010-01-22T00:00:00.000Z/2010-01-23T00:00:00.000Z")); expectedIntervals.add(Intervals.of("2010-01-01T00:00:00.000Z/2010-01-02T00:00:00.000Z")); - DataSourcesResource dataSourcesResource = createResource(); - Response response = dataSourcesResource.getIntervalsWithServedSegmentsOrAllServedSegmentsPerIntervals( "invalidDataSource", null, @@ -479,7 +491,7 @@ public void testGetSegmentDataSourceIntervals() Assert.assertNull(response.getEntity()); response = dataSourcesResource.getIntervalsWithServedSegmentsOrAllServedSegmentsPerIntervals( - "datasource1", + TestDataSource.WIKI, null, null ); @@ -489,7 +501,7 @@ public void testGetSegmentDataSourceIntervals() Assert.assertEquals(expectedIntervals.get(1), actualIntervals.last()); response = dataSourcesResource.getIntervalsWithServedSegmentsOrAllServedSegmentsPerIntervals( - "datasource1", + TestDataSource.WIKI, "simple", null ); @@ -501,7 +513,7 @@ public void testGetSegmentDataSourceIntervals() Assert.assertEquals(1, results.lastEntry().getValue().get(DataSourcesResource.SimpleProperties.count)); response = dataSourcesResource.getIntervalsWithServedSegmentsOrAllServedSegmentsPerIntervals( - "datasource1", + TestDataSource.WIKI, null, "full" ); @@ -528,7 +540,6 @@ public void testGetServedSegmentsInIntervalInDataSource() EasyMock.expect(inventoryView.getInventory()).andReturn(ImmutableList.of(server)).atLeastOnce(); EasyMock.replay(inventoryView); - DataSourcesResource dataSourcesResource = createResource(); Response response = dataSourcesResource.getServedSegmentsInInterval( "invalidDataSource", "2010-01-01/P1D", @@ -538,24 +549,24 @@ public void testGetServedSegmentsInIntervalInDataSource() Assert.assertNull(response.getEntity()); response = dataSourcesResource.getServedSegmentsInInterval( - "datasource1", + TestDataSource.WIKI, "2010-03-01/P1D", null, null ); // interval not present in the datasource Assert.assertEquals(ImmutableSet.of(), response.getEntity()); - response = dataSourcesResource.getServedSegmentsInInterval("datasource1", "2010-01-01/P1D", null, null); + response = dataSourcesResource.getServedSegmentsInInterval(TestDataSource.WIKI, "2010-01-01/P1D", null, null); Assert.assertEquals(ImmutableSet.of(dataSegmentList.get(0).getId()), response.getEntity()); - response = dataSourcesResource.getServedSegmentsInInterval("datasource1", "2010-01-01/P1M", null, null); + response = dataSourcesResource.getServedSegmentsInInterval(TestDataSource.WIKI, "2010-01-01/P1M", null, null); Assert.assertEquals( ImmutableSet.of(dataSegmentList.get(1).getId(), dataSegmentList.get(0).getId()), response.getEntity() ); response = dataSourcesResource.getServedSegmentsInInterval( - "datasource1", + TestDataSource.WIKI, "2010-01-01/P1M", "simple", null @@ -572,7 +583,7 @@ public void testGetServedSegmentsInIntervalInDataSource() ); } - response = dataSourcesResource.getServedSegmentsInInterval("datasource1", "2010-01-01/P1M", null, "full"); + response = dataSourcesResource.getServedSegmentsInInterval(TestDataSource.WIKI, "2010-01-01/P1M", null, "full"); Map> results1 = ((Map>) response.getEntity()); i = 1; for (Map.Entry> entry : results1.entrySet()) { @@ -592,15 +603,12 @@ public void testKillSegmentsInIntervalInDataSource() String interval = "2010-01-01/P1D"; Interval theInterval = Intervals.of(interval.replace('_', '/')); - OverlordClient overlordClient = EasyMock.createStrictMock(OverlordClient.class); - EasyMock.expect(overlordClient.runKillTask("api-issued", "datasource1", theInterval, null, null, null)) + EasyMock.expect(overlordClient.runKillTask("api-issued", TestDataSource.WIKI, theInterval, null, null, null)) .andReturn(Futures.immediateFuture("kill_task_1")); EasyMock.replay(overlordClient, server); - DataSourcesResource dataSourcesResource = - new DataSourcesResource(inventoryView, null, null, overlordClient, null, null, auditManager); prepareRequestForAudit(); - Response response = dataSourcesResource.killUnusedSegmentsInInterval("datasource1", interval, request); + Response response = dataSourcesResource.killUnusedSegmentsInInterval(TestDataSource.WIKI, interval, request); Assert.assertEquals(200, response.getStatus()); Assert.assertNull(response.getEntity()); @@ -610,12 +618,13 @@ public void testKillSegmentsInIntervalInDataSource() @Test public void testMarkAsUnusedAllSegmentsInDataSourceBadRequest() { - OverlordClient overlordClient = EasyMock.createStrictMock(OverlordClient.class); - EasyMock.replay(overlordClient, server); - DataSourcesResource dataSourcesResource = - new DataSourcesResource(inventoryView, null, null, overlordClient, null, null, auditManager); DruidExceptionMatcher.invalidInput().assertThrowsAndMatches( - () -> dataSourcesResource.markAsUnusedAllSegmentsOrKillUnusedSegmentsInInterval("datasource", "true", "???", request) + () -> dataSourcesResource.markAsUnusedAllSegmentsOrKillUnusedSegmentsInInterval( + "datasource", + "true", + "???", + request + ) ); } @@ -624,15 +633,15 @@ public void testMarkAsUnusedAllSegmentsInDataSource() { prepareRequestForAudit(); - OverlordClient overlordClient = EasyMock.createStrictMock(OverlordClient.class); + EasyMock.expect(overlordClient.markSegmentsAsUnused(TestDataSource.WIKI)) + .andReturn(Futures.immediateFuture(new SegmentUpdateResponse(0))).once(); + EasyMock.replay(overlordClient, server); - DataSourcesResource dataSourcesResource = - new DataSourcesResource(inventoryView, segmentsMetadataManager, null, overlordClient, null, null, auditManager); Response response = dataSourcesResource - .markAsUnusedAllSegmentsOrKillUnusedSegmentsInInterval("datasource", null, null, request); + .markAsUnusedAllSegmentsOrKillUnusedSegmentsInInterval(TestDataSource.WIKI, null, null, request); Assert.assertEquals(200, response.getStatus()); - EasyMock.verify(request); + EasyMock.verify(overlordClient, request); } @Test @@ -645,29 +654,29 @@ public void testIsHandOffComplete() new DataSourcesResource(inventoryView, null, databaseRuleManager, null, null, null, auditManager); // test dropped - EasyMock.expect(databaseRuleManager.getRulesWithDefault("dataSource1")) + EasyMock.expect(databaseRuleManager.getRulesWithDefault(TestDataSource.WIKI)) .andReturn(ImmutableList.of(loadRule, dropRule)) .once(); EasyMock.replay(databaseRuleManager); String interval1 = "2013-01-01T01:00:00Z/2013-01-01T02:00:00Z"; - Response response1 = dataSourcesResource.isHandOffComplete("dataSource1", interval1, 1, "v1"); + Response response1 = dataSourcesResource.isHandOffComplete(TestDataSource.WIKI, interval1, 1, "v1"); Assert.assertTrue((boolean) response1.getEntity()); EasyMock.verify(databaseRuleManager); // test isn't dropped and no timeline found EasyMock.reset(databaseRuleManager); - EasyMock.expect(databaseRuleManager.getRulesWithDefault("dataSource1")) + EasyMock.expect(databaseRuleManager.getRulesWithDefault(TestDataSource.WIKI)) .andReturn(ImmutableList.of(loadRule, dropRule)) .once(); - EasyMock.expect(inventoryView.getTimeline(new TableDataSource("dataSource1"))) + EasyMock.expect(inventoryView.getTimeline(new TableDataSource(TestDataSource.WIKI))) .andReturn(null) .once(); EasyMock.replay(inventoryView, databaseRuleManager); String interval2 = "2013-01-02T01:00:00Z/2013-01-02T02:00:00Z"; - Response response2 = dataSourcesResource.isHandOffComplete("dataSource1", interval2, 1, "v1"); + Response response2 = dataSourcesResource.isHandOffComplete(TestDataSource.WIKI, interval2, 1, "v1"); Assert.assertFalse((boolean) response2.getEntity()); EasyMock.verify(inventoryView, databaseRuleManager); @@ -690,15 +699,15 @@ public List> lookupWithIncompleteP } }; EasyMock.reset(inventoryView, databaseRuleManager); - EasyMock.expect(databaseRuleManager.getRulesWithDefault("dataSource1")) + EasyMock.expect(databaseRuleManager.getRulesWithDefault(TestDataSource.WIKI)) .andReturn(ImmutableList.of(loadRule, dropRule)) .once(); - EasyMock.expect(inventoryView.getTimeline(new TableDataSource("dataSource1"))) + EasyMock.expect(inventoryView.getTimeline(new TableDataSource(TestDataSource.WIKI))) .andReturn(timeline) .once(); EasyMock.replay(inventoryView, databaseRuleManager); - Response response3 = dataSourcesResource.isHandOffComplete("dataSource1", interval3, 1, "v1"); + Response response3 = dataSourcesResource.isHandOffComplete(TestDataSource.WIKI, interval3, 1, "v1"); Assert.assertTrue((boolean) response3.getEntity()); EasyMock.verify(inventoryView, databaseRuleManager); @@ -708,179 +717,183 @@ public List> lookupWithIncompleteP public void testMarkSegmentAsUsed() { DataSegment segment = dataSegmentList.get(0); - EasyMock.expect(segmentsMetadataManager.markSegmentAsUsed(segment.getId().toString())).andReturn(true).once(); - EasyMock.replay(segmentsMetadataManager); - - DataSourcesResource dataSourcesResource = createResource(); + EasyMock.expect(overlordClient.markSegmentAsUsed(segment.getId())) + .andReturn(Futures.immediateFuture(new SegmentUpdateResponse(1))).once(); + EasyMock.replay(overlordClient); Response response = dataSourcesResource.markSegmentAsUsed(segment.getDataSource(), segment.getId().toString()); Assert.assertEquals(200, response.getStatus()); - EasyMock.verify(segmentsMetadataManager); + EasyMock.verify(overlordClient); } @Test - public void testMarkSegmentAsUsedNoChange() + public void testMarkSegmentAsUsedWhenOverlordIsOnOldVersion() { DataSegment segment = dataSegmentList.get(0); - EasyMock.expect(segmentsMetadataManager.markSegmentAsUsed(segment.getId().toString())).andReturn(false).once(); + + final StringFullResponseHolder responseHolder = new StringFullResponseHolder( + new DefaultHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.NOT_FOUND), + Charsets.UTF_8 + ); + EasyMock.expect(overlordClient.markSegmentAsUsed(segment.getId())) + .andThrow(new RuntimeException(new HttpResponseException(responseHolder))).once(); + EasyMock.replay(overlordClient); + + EasyMock.expect(segmentsMetadataManager.markSegmentAsUsed(segment.getId().toString())) + .andReturn(true).once(); EasyMock.replay(segmentsMetadataManager); - DataSourcesResource dataSourcesResource = createResource(); + Response response = dataSourcesResource.markSegmentAsUsed(segment.getDataSource(), segment.getId().toString()); + Assert.assertEquals(200, response.getStatus()); + Assert.assertEquals(new SegmentUpdateResponse(1), response.getEntity()); + + EasyMock.verify(overlordClient, segmentsMetadataManager); + } + + @Test + public void testMarkSegmentAsUsedNoChange() + { + DataSegment segment = dataSegmentList.get(0); + EasyMock.expect(overlordClient.markSegmentAsUsed(segment.getId())) + .andReturn(Futures.immediateFuture(new SegmentUpdateResponse(0))).once(); + EasyMock.replay(overlordClient); Response response = dataSourcesResource.markSegmentAsUsed(segment.getDataSource(), segment.getId().toString()); Assert.assertEquals(200, response.getStatus()); - Assert.assertEquals(ImmutableMap.of("segmentStateChanged", false), response.getEntity()); - EasyMock.verify(segmentsMetadataManager); + Assert.assertEquals(new SegmentUpdateResponse(0), response.getEntity()); + EasyMock.verify(overlordClient); } @Test public void testMarkAsUsedNonOvershadowedSegmentsInterval() { Interval interval = Intervals.of("2010-01-22/P1D"); - int numUpdatedSegments = segmentsMetadataManager.markAsUsedNonOvershadowedSegmentsInInterval( - EasyMock.eq("datasource1"), EasyMock.eq(interval), EasyMock.isNull() - ); - EasyMock.expect(numUpdatedSegments).andReturn(3).once(); - EasyMock.replay(segmentsMetadataManager, inventoryView, server); + SegmentsToUpdateFilter segmentFilter = new SegmentsToUpdateFilter(interval, null, null); + + EasyMock.expect(overlordClient.markNonOvershadowedSegmentsAsUsed(TestDataSource.WIKI, segmentFilter)) + .andReturn(Futures.immediateFuture(new SegmentUpdateResponse(3))).once(); + EasyMock.replay(overlordClient, inventoryView, server); - DataSourcesResource dataSourcesResource = createResource(); Response response = dataSourcesResource.markAsUsedNonOvershadowedSegments( - "datasource1", - new DataSourcesResource.SegmentsToUpdateFilter(interval, null, null) + TestDataSource.WIKI, + segmentFilter ); Assert.assertEquals(200, response.getStatus()); - EasyMock.verify(segmentsMetadataManager, inventoryView, server); + EasyMock.verify(overlordClient, inventoryView, server); } @Test public void testMarkAsUsedNonOvershadowedSegmentsIntervalWithVersions() { Interval interval = Intervals.of("2010-01-22/P1D"); + SegmentsToUpdateFilter segmentFilter = new SegmentsToUpdateFilter(interval, null, ImmutableList.of("v0")); - int numUpdatedSegments = segmentsMetadataManager.markAsUsedNonOvershadowedSegmentsInInterval( - EasyMock.eq("datasource1"), EasyMock.eq(interval), EasyMock.eq(ImmutableList.of("v0")) - ); - EasyMock.expect(numUpdatedSegments).andReturn(3).once(); - EasyMock.replay(segmentsMetadataManager, inventoryView, server); + EasyMock.expect(overlordClient.markNonOvershadowedSegmentsAsUsed(TestDataSource.WIKI, segmentFilter)) + .andReturn(Futures.immediateFuture(new SegmentUpdateResponse(3))).once(); + EasyMock.replay(overlordClient, inventoryView, server); - DataSourcesResource dataSourcesResource = createResource(); Response response = dataSourcesResource.markAsUsedNonOvershadowedSegments( - "datasource1", - new DataSourcesResource.SegmentsToUpdateFilter(interval, null, ImmutableList.of("v0")) + TestDataSource.WIKI, + segmentFilter ); Assert.assertEquals(200, response.getStatus()); - EasyMock.verify(segmentsMetadataManager, inventoryView, server); + EasyMock.verify(overlordClient, inventoryView, server); } @Test public void testMarkAsUsedNonOvershadowedSegmentsIntervalWithNonExistentVersion() { Interval interval = Intervals.of("2010-01-22/P1D"); + SegmentsToUpdateFilter filter = new SegmentsToUpdateFilter(interval, null, ImmutableList.of("foo")); - int numUpdatedSegments = segmentsMetadataManager.markAsUsedNonOvershadowedSegmentsInInterval( - EasyMock.eq("datasource1"), EasyMock.eq(interval), EasyMock.eq(ImmutableList.of("foo")) - ); - EasyMock.expect(numUpdatedSegments).andReturn(0).once(); - EasyMock.replay(segmentsMetadataManager, inventoryView, server); + EasyMock.expect(overlordClient.markNonOvershadowedSegmentsAsUsed(TestDataSource.WIKI, filter)) + .andReturn(Futures.immediateFuture(new SegmentUpdateResponse(0))).once(); + EasyMock.replay(overlordClient, inventoryView, server); - DataSourcesResource dataSourcesResource = createResource(); Response response = dataSourcesResource.markAsUsedNonOvershadowedSegments( - "datasource1", - new DataSourcesResource.SegmentsToUpdateFilter(interval, null, ImmutableList.of("foo")) + TestDataSource.WIKI, + filter ); Assert.assertEquals(200, response.getStatus()); - EasyMock.verify(segmentsMetadataManager, inventoryView, server); + EasyMock.verify(overlordClient, inventoryView, server); } @Test public void testMarkAsUsedNonOvershadowedSegmentsIntervalNoneUpdated() { Interval interval = Intervals.of("2010-01-22/P1D"); - int numUpdatedSegments = segmentsMetadataManager.markAsUsedNonOvershadowedSegmentsInInterval( - EasyMock.eq("datasource1"), EasyMock.eq(interval), EasyMock.isNull() - ); - EasyMock.expect(numUpdatedSegments).andReturn(0).once(); - EasyMock.replay(segmentsMetadataManager, inventoryView, server); - - DataSourcesResource dataSourcesResource = createResource(); + final SegmentsToUpdateFilter segmentFilter = new SegmentsToUpdateFilter(interval, null, null); + EasyMock.expect(overlordClient.markNonOvershadowedSegmentsAsUsed(TestDataSource.WIKI, segmentFilter)) + .andReturn(Futures.immediateFuture(new SegmentUpdateResponse(0))).once(); + EasyMock.replay(overlordClient, inventoryView, server); Response response = dataSourcesResource.markAsUsedNonOvershadowedSegments( - "datasource1", - new DataSourcesResource.SegmentsToUpdateFilter(interval, null, null) + TestDataSource.WIKI, + segmentFilter ); - Assert.assertEquals(ImmutableMap.of("numChangedSegments", 0), response.getEntity()); - EasyMock.verify(segmentsMetadataManager, inventoryView, server); + Assert.assertEquals(new SegmentUpdateResponse(0), response.getEntity()); + EasyMock.verify(overlordClient, inventoryView, server); } @Test public void testMarkAsUsedNonOvershadowedSegmentsSet() { Set segmentIds = ImmutableSet.of(dataSegmentList.get(1).getId().toString()); - int numUpdatedSegments = segmentsMetadataManager.markAsUsedNonOvershadowedSegments( - EasyMock.eq("datasource1"), EasyMock.eq(segmentIds) - ); - EasyMock.expect(numUpdatedSegments).andReturn(3).once(); - EasyMock.replay(segmentsMetadataManager, inventoryView, server); + SegmentsToUpdateFilter segmentFilter = new SegmentsToUpdateFilter(null, segmentIds, null); - DataSourcesResource dataSourcesResource = createResource(); + EasyMock.expect(overlordClient.markNonOvershadowedSegmentsAsUsed(TestDataSource.WIKI, segmentFilter)) + .andReturn(Futures.immediateFuture(new SegmentUpdateResponse(3))).once(); + EasyMock.replay(overlordClient, inventoryView, server); Response response = dataSourcesResource.markAsUsedNonOvershadowedSegments( - "datasource1", - new DataSourcesResource.SegmentsToUpdateFilter(null, segmentIds, null) + TestDataSource.WIKI, + segmentFilter ); Assert.assertEquals(200, response.getStatus()); - EasyMock.verify(segmentsMetadataManager, inventoryView, server); + EasyMock.verify(overlordClient, inventoryView, server); } @Test public void testMarkAsUsedNonOvershadowedSegmentsIntervalException() { Interval interval = Intervals.of("2010-01-22/P1D"); - int numUpdatedSegments = segmentsMetadataManager.markAsUsedNonOvershadowedSegmentsInInterval( - EasyMock.eq("datasource1"), EasyMock.eq(interval), EasyMock.isNull() - ); - EasyMock.expect(numUpdatedSegments).andThrow(new RuntimeException("Error!")).once(); - EasyMock.replay(segmentsMetadataManager, inventoryView, server); - - DataSourcesResource dataSourcesResource = createResource(); + SegmentsToUpdateFilter segmentFilter = new SegmentsToUpdateFilter(interval, null, null); + EasyMock.expect(overlordClient.markNonOvershadowedSegmentsAsUsed(TestDataSource.WIKI, segmentFilter)) + .andThrow(new RuntimeException("Error!")).once(); + EasyMock.replay(overlordClient, inventoryView, server); Response response = dataSourcesResource.markAsUsedNonOvershadowedSegments( - "datasource1", - new DataSourcesResource.SegmentsToUpdateFilter(interval, null, null) + TestDataSource.WIKI, + segmentFilter ); Assert.assertEquals(500, response.getStatus()); - EasyMock.verify(segmentsMetadataManager, inventoryView, server); + EasyMock.verify(overlordClient, inventoryView, server); } @Test public void testMarkAsUsedNonOvershadowedSegmentsNoDataSource() { - Interval interval = Intervals.of("2010-01-22/P1D"); - int numUpdatedSegments = segmentsMetadataManager.markAsUsedNonOvershadowedSegmentsInInterval( - EasyMock.eq("datasource1"), EasyMock.eq(interval), EasyMock.isNull() - ); - EasyMock.expect(numUpdatedSegments).andReturn(0).once(); - EasyMock.replay(segmentsMetadataManager, inventoryView, server); - - DataSourcesResource dataSourcesResource = createResource(); + SegmentsToUpdateFilter segmentFilter = + new SegmentsToUpdateFilter(Intervals.of("2010-01-22/P1D"), null, null); + EasyMock.expect(overlordClient.markNonOvershadowedSegmentsAsUsed(TestDataSource.WIKI, segmentFilter)) + .andReturn(Futures.immediateFuture(new SegmentUpdateResponse(0))).once(); + EasyMock.replay(overlordClient, inventoryView, server); Response response = dataSourcesResource.markAsUsedNonOvershadowedSegments( - "datasource1", - new DataSourcesResource.SegmentsToUpdateFilter(Intervals.of("2010-01-22/P1D"), null, null) + TestDataSource.WIKI, + segmentFilter ); Assert.assertEquals(200, response.getStatus()); - Assert.assertEquals(ImmutableMap.of("numChangedSegments", 0), response.getEntity()); - EasyMock.verify(segmentsMetadataManager); + Assert.assertEquals(new SegmentUpdateResponse(0), response.getEntity()); + EasyMock.verify(overlordClient); } @Test public void testMarkAsUsedNonOvershadowedSegmentsWithNullIntervalAndSegmentIdsAndVersions() { - DataSourcesResource dataSourcesResource = createResource(); - Response response = dataSourcesResource.markAsUsedNonOvershadowedSegments( - "datasource1", - new DataSourcesResource.SegmentsToUpdateFilter(null, null, null) + TestDataSource.WIKI, + new SegmentsToUpdateFilter(null, null, null) ); Assert.assertEquals(400, response.getStatus()); } @@ -888,39 +901,48 @@ public void testMarkAsUsedNonOvershadowedSegmentsWithNullIntervalAndSegmentIdsAn @Test public void testMarkAsUsedNonOvershadowedSegmentsWithNonNullIntervalAndEmptySegmentIds() { - DataSourcesResource dataSourcesResource = createResource(); + final SegmentsToUpdateFilter segmentFilter + = new SegmentsToUpdateFilter(Intervals.of("2010-01-22/P1D"), ImmutableSet.of(), null); + + EasyMock.expect(overlordClient.markNonOvershadowedSegmentsAsUsed(TestDataSource.WIKI, segmentFilter)) + .andReturn(Futures.immediateFuture(new SegmentUpdateResponse(2))).once(); + EasyMock.replay(overlordClient); Response response = dataSourcesResource.markAsUsedNonOvershadowedSegments( - "datasource1", - new DataSourcesResource.SegmentsToUpdateFilter( - Intervals.of("2010-01-22/P1D"), ImmutableSet.of(), null - ) + TestDataSource.WIKI, + segmentFilter ); Assert.assertEquals(200, response.getStatus()); - Assert.assertEquals(ImmutableMap.of("numChangedSegments", 0), response.getEntity()); + Assert.assertEquals(new SegmentUpdateResponse(2), response.getEntity()); + EasyMock.verify(overlordClient); } @Test public void testMarkAsUsedNonOvershadowedSegmentsWithNonNullInterval() { - DataSourcesResource dataSourcesResource = createResource(); + final SegmentsToUpdateFilter segmentFilter = + new SegmentsToUpdateFilter(Intervals.of("2010-01-22/P1D"), null, null); + + EasyMock.expect(overlordClient.markNonOvershadowedSegmentsAsUsed(TestDataSource.WIKI, segmentFilter)) + .andReturn(Futures.immediateFuture(new SegmentUpdateResponse(0))).once(); + EasyMock.replay(overlordClient); Response response = dataSourcesResource.markAsUsedNonOvershadowedSegments( - "datasource1", - new DataSourcesResource.SegmentsToUpdateFilter(Intervals.of("2010-01-22/P1D"), null, null) + TestDataSource.WIKI, + segmentFilter ); Assert.assertEquals(200, response.getStatus()); - Assert.assertEquals(ImmutableMap.of("numChangedSegments", 0), response.getEntity()); + Assert.assertEquals(new SegmentUpdateResponse(0), response.getEntity()); + + EasyMock.verify(overlordClient); } @Test public void testMarkAsUsedNonOvershadowedSegmentsWithNonNullIntervalAndSegmentIds() { - DataSourcesResource dataSourcesResource = createResource(); - Response response = dataSourcesResource.markAsUsedNonOvershadowedSegments( - "datasource1", - new DataSourcesResource.SegmentsToUpdateFilter(Intervals.of("2010-01-22/P1D"), ImmutableSet.of("segment1"), null) + TestDataSource.WIKI, + new SegmentsToUpdateFilter(Intervals.of("2010-01-22/P1D"), ImmutableSet.of("segment1"), null) ); Assert.assertEquals(400, response.getStatus()); } @@ -928,11 +950,9 @@ public void testMarkAsUsedNonOvershadowedSegmentsWithNonNullIntervalAndSegmentId @Test public void testMarkAsUsedNonOvershadowedSegmentsWithNonNullIntervalAndSegmentIdsAndVersions() { - DataSourcesResource dataSourcesResource = createResource(); - Response response = dataSourcesResource.markAsUsedNonOvershadowedSegments( - "datasource1", - new DataSourcesResource.SegmentsToUpdateFilter( + TestDataSource.WIKI, + new SegmentsToUpdateFilter( Intervals.of("2020/2030"), ImmutableSet.of("seg1"), ImmutableList.of("v1", "v2") ) ); @@ -942,11 +962,9 @@ public void testMarkAsUsedNonOvershadowedSegmentsWithNonNullIntervalAndSegmentId @Test public void testMarkAsUsedNonOvershadowedSegmentsWithEmptySegmentIds() { - DataSourcesResource dataSourcesResource = createResource(); - Response response = dataSourcesResource.markAsUsedNonOvershadowedSegments( - "datasource1", - new DataSourcesResource.SegmentsToUpdateFilter(null, ImmutableSet.of(), null) + TestDataSource.WIKI, + new SegmentsToUpdateFilter(null, ImmutableSet.of(), null) ); Assert.assertEquals(400, response.getStatus()); } @@ -954,11 +972,9 @@ public void testMarkAsUsedNonOvershadowedSegmentsWithEmptySegmentIds() @Test public void testMarkAsUsedNonOvershadowedSegmentsWithEmptyVersions() { - DataSourcesResource dataSourcesResource = createResource(); - Response response = dataSourcesResource.markAsUsedNonOvershadowedSegments( - "datasource1", - new DataSourcesResource.SegmentsToUpdateFilter(null, null, ImmutableList.of()) + TestDataSource.WIKI, + new SegmentsToUpdateFilter(null, null, ImmutableList.of()) ); Assert.assertEquals(400, response.getStatus()); } @@ -966,11 +982,9 @@ public void testMarkAsUsedNonOvershadowedSegmentsWithEmptyVersions() @Test public void testMarkAsUsedNonOvershadowedSegmentsWithNonNullVersions() { - DataSourcesResource dataSourcesResource = createResource(); - Response response = dataSourcesResource.markAsUsedNonOvershadowedSegments( - "datasource1", - new DataSourcesResource.SegmentsToUpdateFilter(null, null, ImmutableList.of("v1", "v2")) + TestDataSource.WIKI, + new SegmentsToUpdateFilter(null, null, ImmutableList.of("v1", "v2")) ); Assert.assertEquals(400, response.getStatus()); } @@ -978,11 +992,9 @@ public void testMarkAsUsedNonOvershadowedSegmentsWithNonNullVersions() @Test public void testMarkAsUsedNonOvershadowedSegmentsWithNonNullSegmentIdsAndVersions() { - DataSourcesResource dataSourcesResource = createResource(); - Response response = dataSourcesResource.markAsUsedNonOvershadowedSegments( - "datasource1", - new DataSourcesResource.SegmentsToUpdateFilter(null, ImmutableSet.of("segment1"), ImmutableList.of("v1", "v2")) + TestDataSource.WIKI, + new SegmentsToUpdateFilter(null, ImmutableSet.of("segment1"), ImmutableList.of("v1", "v2")) ); Assert.assertEquals(400, response.getStatus()); } @@ -990,27 +1002,39 @@ public void testMarkAsUsedNonOvershadowedSegmentsWithNonNullSegmentIdsAndVersion @Test public void testMarkAsUsedNonOvershadowedSegmentsWithNonNullIntervalAndVersions() { - DataSourcesResource dataSourcesResource = createResource(); - + SegmentsToUpdateFilter segmentFilter = + new SegmentsToUpdateFilter(Intervals.ETERNITY, null, ImmutableList.of("v1", "v2")); + + EasyMock.expect(overlordClient.markNonOvershadowedSegmentsAsUsed(TestDataSource.WIKI, segmentFilter)) + .andReturn(Futures.immediateFuture(new SegmentUpdateResponse(2))).once(); + EasyMock.replay(overlordClient); + Response response = dataSourcesResource.markAsUsedNonOvershadowedSegments( - "datasource1", - new DataSourcesResource.SegmentsToUpdateFilter(Intervals.ETERNITY, null, ImmutableList.of("v1", "v2")) + TestDataSource.WIKI, + segmentFilter ); Assert.assertEquals(200, response.getStatus()); - Assert.assertEquals(ImmutableMap.of("numChangedSegments", 0), response.getEntity()); + Assert.assertEquals(new SegmentUpdateResponse(2), response.getEntity()); + EasyMock.verify(overlordClient); } @Test public void testMarkAsUsedNonOvershadowedSegmentsWithNonNullIntervalAndEmptyVersions() { - DataSourcesResource dataSourcesResource = createResource(); + final SegmentsToUpdateFilter segmentFilter = + new SegmentsToUpdateFilter(Intervals.ETERNITY, null, ImmutableList.of()); + EasyMock.expect(overlordClient.markNonOvershadowedSegmentsAsUsed(TestDataSource.WIKI, segmentFilter)) + .andReturn(Futures.immediateFuture(new SegmentUpdateResponse(5))).once(); + EasyMock.replay(overlordClient); Response response = dataSourcesResource.markAsUsedNonOvershadowedSegments( - "datasource1", - new DataSourcesResource.SegmentsToUpdateFilter(Intervals.ETERNITY, null, ImmutableList.of()) + TestDataSource.WIKI, + segmentFilter ); Assert.assertEquals(200, response.getStatus()); - Assert.assertEquals(ImmutableMap.of("numChangedSegments", 0), response.getEntity()); + Assert.assertEquals(new SegmentUpdateResponse(5), response.getEntity()); + + EasyMock.verify(overlordClient); } @Test @@ -1146,18 +1170,15 @@ public void testSegmentLoadChecksForInterval() @Test public void testMarkSegmentsAsUnused() { - final DruidDataSource dataSource1 = new DruidDataSource("datasource1", new HashMap<>()); + final DruidDataSource dataSource1 = new DruidDataSource(TestDataSource.WIKI, new HashMap<>()); final Set segmentIds = dataSegmentList.stream() .filter(segment -> segment.getDataSource().equals(dataSource1.getName())) .map(DataSegment::getId) .collect(Collectors.toSet()); - EasyMock.expect(segmentsMetadataManager.markSegmentsAsUnused(segmentIds)).andReturn(1).once(); - EasyMock.replay(segmentsMetadataManager, inventoryView, server); - - final DataSourcesResource.SegmentsToUpdateFilter payload = - new DataSourcesResource.SegmentsToUpdateFilter( + final SegmentsToUpdateFilter payload = + new SegmentsToUpdateFilter( null, segmentIds.stream() .map(SegmentId::toString) @@ -1165,29 +1186,29 @@ public void testMarkSegmentsAsUnused() null ); - DataSourcesResource dataSourcesResource = createResource(); + EasyMock.expect(overlordClient.markSegmentsAsUnused(TestDataSource.WIKI, payload)) + .andReturn(Futures.immediateFuture(new SegmentUpdateResponse(1))).once(); + EasyMock.replay(overlordClient, inventoryView, server); + prepareRequestForAudit(); - Response response = dataSourcesResource.markSegmentsAsUnused("datasource1", payload, request); + Response response = dataSourcesResource.markSegmentsAsUnused(TestDataSource.WIKI, payload, request); Assert.assertEquals(200, response.getStatus()); - Assert.assertEquals(ImmutableMap.of("numChangedSegments", 1), response.getEntity()); - EasyMock.verify(segmentsMetadataManager, inventoryView, server); + Assert.assertEquals(new SegmentUpdateResponse(1), response.getEntity()); + EasyMock.verify(overlordClient, inventoryView, server); } @Test public void testMarkSegmentsAsUnusedNoChanges() { - final DruidDataSource dataSource1 = new DruidDataSource("datasource1", new HashMap<>()); + final DruidDataSource dataSource1 = new DruidDataSource(TestDataSource.WIKI, new HashMap<>()); final Set segmentIds = dataSegmentList.stream() .filter(segment -> segment.getDataSource().equals(dataSource1.getName())) .map(DataSegment::getId) .collect(Collectors.toSet()); - EasyMock.expect(segmentsMetadataManager.markSegmentsAsUnused(segmentIds)).andReturn(0).once(); - EasyMock.replay(segmentsMetadataManager, inventoryView, server); - - final DataSourcesResource.SegmentsToUpdateFilter payload = - new DataSourcesResource.SegmentsToUpdateFilter( + final SegmentsToUpdateFilter segmentFilter = + new SegmentsToUpdateFilter( null, segmentIds.stream() .map(SegmentId::toString) @@ -1195,31 +1216,29 @@ public void testMarkSegmentsAsUnusedNoChanges() null ); - DataSourcesResource dataSourcesResource = createResource(); + EasyMock.expect(overlordClient.markSegmentsAsUnused(TestDataSource.WIKI, segmentFilter)) + .andReturn(Futures.immediateFuture(new SegmentUpdateResponse(0))).once(); + EasyMock.replay(overlordClient, inventoryView, server); + prepareRequestForAudit(); - Response response = dataSourcesResource.markSegmentsAsUnused("datasource1", payload, request); + Response response = dataSourcesResource.markSegmentsAsUnused(TestDataSource.WIKI, segmentFilter, request); Assert.assertEquals(200, response.getStatus()); - Assert.assertEquals(ImmutableMap.of("numChangedSegments", 0), response.getEntity()); - EasyMock.verify(segmentsMetadataManager, inventoryView, server); + Assert.assertEquals(new SegmentUpdateResponse(0), response.getEntity()); + EasyMock.verify(overlordClient, inventoryView, server); } @Test public void testMarkSegmentsAsUnusedException() { - final DruidDataSource dataSource1 = new DruidDataSource("datasource1", new HashMap<>()); + final DruidDataSource dataSource1 = new DruidDataSource(TestDataSource.WIKI, new HashMap<>()); final Set segmentIds = dataSegmentList.stream() .filter(segment -> segment.getDataSource().equals(dataSource1.getName())) .map(DataSegment::getId) .collect(Collectors.toSet()); - EasyMock.expect(segmentsMetadataManager.markSegmentsAsUnused(segmentIds)) - .andThrow(new RuntimeException("Exception occurred")) - .once(); - EasyMock.replay(segmentsMetadataManager, inventoryView, server); - - final DataSourcesResource.SegmentsToUpdateFilter payload = - new DataSourcesResource.SegmentsToUpdateFilter( + final SegmentsToUpdateFilter segmentFilter = + new SegmentsToUpdateFilter( null, segmentIds.stream() .map(SegmentId::toString) @@ -1227,127 +1246,123 @@ public void testMarkSegmentsAsUnusedException() null ); - DataSourcesResource dataSourcesResource = createResource(); - Response response = dataSourcesResource.markSegmentsAsUnused("datasource1", payload, request); + EasyMock.expect(overlordClient.markSegmentsAsUnused(TestDataSource.WIKI, segmentFilter)) + .andThrow(new RuntimeException("Exception occurred")) + .once(); + EasyMock.replay(overlordClient, inventoryView, server); + + Response response = dataSourcesResource.markSegmentsAsUnused(TestDataSource.WIKI, segmentFilter, request); Assert.assertEquals(500, response.getStatus()); Assert.assertNotNull(response.getEntity()); - EasyMock.verify(segmentsMetadataManager, inventoryView, server); + EasyMock.verify(overlordClient, inventoryView, server); } @Test public void testMarkAsUnusedSegmentsInInterval() { final Interval theInterval = Intervals.of("2010-01-01/P1D"); + final SegmentsToUpdateFilter segmentFilter = + new SegmentsToUpdateFilter(theInterval, null, null); - EasyMock.expect(segmentsMetadataManager.markAsUnusedSegmentsInInterval("datasource1", theInterval, null)).andReturn(1).once(); - EasyMock.replay(segmentsMetadataManager, inventoryView, server); - - final DataSourcesResource.SegmentsToUpdateFilter payload = - new DataSourcesResource.SegmentsToUpdateFilter(theInterval, null, null); + EasyMock.expect(overlordClient.markSegmentsAsUnused(TestDataSource.WIKI, segmentFilter)) + .andReturn(Futures.immediateFuture(new SegmentUpdateResponse(1))).once(); + EasyMock.replay(overlordClient, inventoryView, server); - DataSourcesResource dataSourcesResource = createResource(); prepareRequestForAudit(); - Response response = dataSourcesResource.markSegmentsAsUnused("datasource1", payload, request); + Response response = dataSourcesResource.markSegmentsAsUnused(TestDataSource.WIKI, segmentFilter, request); Assert.assertEquals(200, response.getStatus()); - Assert.assertEquals(ImmutableMap.of("numChangedSegments", 1), response.getEntity()); - EasyMock.verify(segmentsMetadataManager, inventoryView, server); - EasyMock.verify(segmentsMetadataManager, inventoryView, server); + Assert.assertEquals(new SegmentUpdateResponse(1), response.getEntity()); + EasyMock.verify(overlordClient, inventoryView, server); } @Test public void testMarkAsUnusedSegmentsInIntervalNoChanges() { final Interval theInterval = Intervals.of("2010-01-01/P1D"); + final SegmentsToUpdateFilter segmentFilter = + new SegmentsToUpdateFilter(theInterval, null, null); - EasyMock.expect(segmentsMetadataManager.markAsUnusedSegmentsInInterval("datasource1", theInterval, null)).andReturn(0).once(); - EasyMock.replay(segmentsMetadataManager, inventoryView, server); - - final DataSourcesResource.SegmentsToUpdateFilter payload = - new DataSourcesResource.SegmentsToUpdateFilter(theInterval, null, null); + EasyMock.expect(overlordClient.markSegmentsAsUnused(TestDataSource.WIKI, segmentFilter)) + .andReturn(Futures.immediateFuture(new SegmentUpdateResponse(0))).once(); + EasyMock.replay(overlordClient, inventoryView, server); - DataSourcesResource dataSourcesResource = createResource(); prepareRequestForAudit(); - Response response = dataSourcesResource.markSegmentsAsUnused("datasource1", payload, request); + Response response = dataSourcesResource.markSegmentsAsUnused(TestDataSource.WIKI, segmentFilter, request); Assert.assertEquals(200, response.getStatus()); - Assert.assertEquals(ImmutableMap.of("numChangedSegments", 0), response.getEntity()); - EasyMock.verify(segmentsMetadataManager, inventoryView, server); + Assert.assertEquals(new SegmentUpdateResponse(0), response.getEntity()); + EasyMock.verify(overlordClient, inventoryView, server); } @Test public void testMarkAsUnusedSegmentsInIntervalException() { final Interval theInterval = Intervals.of("2010-01-01/P1D"); + final SegmentsToUpdateFilter segmentFilter = + new SegmentsToUpdateFilter(theInterval, null, null); - EasyMock.expect(segmentsMetadataManager.markAsUnusedSegmentsInInterval("datasource1", theInterval, null)) + EasyMock.expect(overlordClient.markSegmentsAsUnused(TestDataSource.WIKI, segmentFilter)) .andThrow(new RuntimeException("Exception occurred")) .once(); - EasyMock.replay(segmentsMetadataManager, inventoryView, server); + EasyMock.replay(overlordClient, inventoryView, server); - final DataSourcesResource.SegmentsToUpdateFilter payload = - new DataSourcesResource.SegmentsToUpdateFilter(theInterval, null, null); - - DataSourcesResource dataSourcesResource = createResource(); - Response response = dataSourcesResource.markSegmentsAsUnused("datasource1", payload, request); + Response response = dataSourcesResource.markSegmentsAsUnused(TestDataSource.WIKI, segmentFilter, request); Assert.assertEquals(500, response.getStatus()); Assert.assertNotNull(response.getEntity()); - EasyMock.verify(segmentsMetadataManager, inventoryView, server); + EasyMock.verify(overlordClient, inventoryView, server); } @Test public void testMarkAsUnusedSegmentsInIntervalNoDataSource() { final Interval theInterval = Intervals.of("2010-01-01/P1D"); - EasyMock.expect(segmentsMetadataManager.markAsUnusedSegmentsInInterval("datasource1", theInterval, null)) - .andReturn(0).once(); - EasyMock.replay(segmentsMetadataManager, inventoryView, server); + final SegmentsToUpdateFilter segmentFilter = + new SegmentsToUpdateFilter(theInterval, null, null); + EasyMock.expect(overlordClient.markSegmentsAsUnused(TestDataSource.WIKI, segmentFilter)) + .andReturn(Futures.immediateFuture(new SegmentUpdateResponse(0))).once(); + EasyMock.replay(overlordClient, inventoryView, server); - final DataSourcesResource.SegmentsToUpdateFilter payload = - new DataSourcesResource.SegmentsToUpdateFilter(theInterval, null, null); - DataSourcesResource dataSourcesResource = createResource(); prepareRequestForAudit(); - Response response = dataSourcesResource.markSegmentsAsUnused("datasource1", payload, request); + Response response = dataSourcesResource.markSegmentsAsUnused(TestDataSource.WIKI, segmentFilter, request); Assert.assertEquals(200, response.getStatus()); - Assert.assertEquals(ImmutableMap.of("numChangedSegments", 0), response.getEntity()); - EasyMock.verify(segmentsMetadataManager); + Assert.assertEquals(new SegmentUpdateResponse(0), response.getEntity()); + EasyMock.verify(overlordClient); } @Test public void testMarkAsUnusedSegmentsInIntervalWithVersions() { final Interval theInterval = Intervals.of("2010-01-01/P1D"); - EasyMock.expect(segmentsMetadataManager.markAsUnusedSegmentsInInterval("datasource1", theInterval, ImmutableList.of("v1"))) - .andReturn(2).once(); - EasyMock.replay(segmentsMetadataManager, inventoryView, server); + final SegmentsToUpdateFilter segmentFilter = + new SegmentsToUpdateFilter(theInterval, null, ImmutableList.of("v1")); + EasyMock.expect(overlordClient.markSegmentsAsUnused(TestDataSource.WIKI, segmentFilter)) + .andReturn(Futures.immediateFuture(new SegmentUpdateResponse(2))).once(); + EasyMock.replay(overlordClient, inventoryView, server); - final DataSourcesResource.SegmentsToUpdateFilter payload = - new DataSourcesResource.SegmentsToUpdateFilter(theInterval, null, ImmutableList.of("v1")); - DataSourcesResource dataSourcesResource = createResource(); prepareRequestForAudit(); - Response response = dataSourcesResource.markSegmentsAsUnused("datasource1", payload, request); + Response response = dataSourcesResource.markSegmentsAsUnused(TestDataSource.WIKI, segmentFilter, request); Assert.assertEquals(200, response.getStatus()); - Assert.assertEquals(ImmutableMap.of("numChangedSegments", 2), response.getEntity()); - EasyMock.verify(segmentsMetadataManager); + Assert.assertEquals(new SegmentUpdateResponse(2), response.getEntity()); + EasyMock.verify(overlordClient); } @Test public void testMarkAsUnusedSegmentsInIntervalWithNonExistentVersion() { final Interval theInterval = Intervals.of("2010-01-01/P1D"); - EasyMock.expect(segmentsMetadataManager.markAsUnusedSegmentsInInterval("datasource1", theInterval, ImmutableList.of("foo"))) - .andReturn(0).once(); - EasyMock.replay(segmentsMetadataManager, inventoryView, server); + final SegmentsToUpdateFilter segmentFilter = + new SegmentsToUpdateFilter(theInterval, null, ImmutableList.of("foo")); + EasyMock.expect(overlordClient.markSegmentsAsUnused(TestDataSource.WIKI, segmentFilter)) + .andReturn(Futures.immediateFuture(new SegmentUpdateResponse(0))).once(); + EasyMock.replay(overlordClient, inventoryView, server); - final DataSourcesResource.SegmentsToUpdateFilter payload = - new DataSourcesResource.SegmentsToUpdateFilter(theInterval, null, ImmutableList.of("foo")); - DataSourcesResource dataSourcesResource = createResource(); prepareRequestForAudit(); - Response response = dataSourcesResource.markSegmentsAsUnused("datasource1", payload, request); + Response response = dataSourcesResource.markSegmentsAsUnused(TestDataSource.WIKI, segmentFilter, request); Assert.assertEquals(200, response.getStatus()); - Assert.assertEquals(ImmutableMap.of("numChangedSegments", 0), response.getEntity()); - EasyMock.verify(segmentsMetadataManager); + Assert.assertEquals(new SegmentUpdateResponse(0), response.getEntity()); + EasyMock.verify(overlordClient); } @Test @@ -1356,8 +1371,8 @@ public void testSegmentsToUpdateFilterSerde() throws JsonProcessingException final ObjectMapper mapper = new DefaultObjectMapper(); final String payload = "{\"interval\":\"2023-01-01T00:00:00.000Z/2024-01-01T00:00:00.000Z\",\"segmentIds\":null,\"versions\":[\"v1\"]}"; - final DataSourcesResource.SegmentsToUpdateFilter obj = - mapper.readValue(payload, DataSourcesResource.SegmentsToUpdateFilter.class); + final SegmentsToUpdateFilter obj = + mapper.readValue(payload, SegmentsToUpdateFilter.class); Assert.assertEquals(Intervals.of("2023/2024"), obj.getInterval()); Assert.assertEquals(ImmutableList.of("v1"), obj.getVersions()); Assert.assertNull(obj.getSegmentIds()); @@ -1368,9 +1383,7 @@ public void testSegmentsToUpdateFilterSerde() throws JsonProcessingException @Test public void testMarkSegmentsAsUnusedNullPayload() { - DataSourcesResource dataSourcesResource = createResource(); - - Response response = dataSourcesResource.markSegmentsAsUnused("datasource1", null, request); + Response response = dataSourcesResource.markSegmentsAsUnused(TestDataSource.WIKI, null, request); Assert.assertEquals(400, response.getStatus()); Assert.assertNotNull(response.getEntity()); Assert.assertEquals( @@ -1383,12 +1396,10 @@ public void testMarkSegmentsAsUnusedNullPayload() @Test public void testMarkSegmentsAsUnusedWithNullIntervalAndSegmentIdsAndVersions() { - DataSourcesResource dataSourcesResource = createResource(); + final SegmentsToUpdateFilter payload = + new SegmentsToUpdateFilter(null, null, null); - final DataSourcesResource.SegmentsToUpdateFilter payload = - new DataSourcesResource.SegmentsToUpdateFilter(null, null, null); - - Response response = dataSourcesResource.markSegmentsAsUnused("datasource1", payload, request); + Response response = dataSourcesResource.markSegmentsAsUnused(TestDataSource.WIKI, payload, request); Assert.assertEquals(400, response.getStatus()); Assert.assertNotNull(response.getEntity()); } @@ -1396,21 +1407,26 @@ public void testMarkSegmentsAsUnusedWithNullIntervalAndSegmentIdsAndVersions() @Test public void testMarkSegmentsAsUnusedWithNonNullIntervalAndEmptySegmentIds() { - DataSourcesResource dataSourcesResource = createResource(); prepareRequestForAudit(); - final DataSourcesResource.SegmentsToUpdateFilter payload = - new DataSourcesResource.SegmentsToUpdateFilter(Intervals.of("2010-01-01/P1D"), ImmutableSet.of(), null); - Response response = dataSourcesResource.markSegmentsAsUnused("datasource1", payload, request); + final SegmentsToUpdateFilter segmentFilter = + new SegmentsToUpdateFilter(Intervals.of("2010-01-01/P1D"), ImmutableSet.of(), null); + + EasyMock.expect(overlordClient.markSegmentsAsUnused(TestDataSource.WIKI, segmentFilter)) + .andReturn(Futures.immediateFuture(new SegmentUpdateResponse(0))).once(); + EasyMock.replay(overlordClient); + + Response response = dataSourcesResource.markSegmentsAsUnused(TestDataSource.WIKI, segmentFilter, request); Assert.assertEquals(200, response.getStatus()); - Assert.assertEquals(ImmutableMap.of("numChangedSegments", 0), response.getEntity()); + Assert.assertEquals(new SegmentUpdateResponse(0), response.getEntity()); + + EasyMock.verify(overlordClient); } @Test public void testGetDatasourceLoadstatusForceMetadataRefreshNull() { - DataSourcesResource dataSourcesResource = createResource(); - Response response = dataSourcesResource.getDatasourceLoadstatus("datasource1", null, null, null, null, null); + Response response = dataSourcesResource.getDatasourceLoadstatus(TestDataSource.WIKI, null, null, null, null, null); Assert.assertEquals(400, response.getStatus()); } @@ -1419,21 +1435,16 @@ public void testGetDatasourceLoadstatusNoSegmentForInterval() { List segments = ImmutableList.of(); // Test when datasource fully loaded - EasyMock.expect(segmentsMetadataManager.iterateAllUsedNonOvershadowedSegmentsForDatasourceInterval(EasyMock.eq( - "datasource1"), EasyMock.anyObject(Interval.class), EasyMock.anyBoolean())) - .andReturn(Optional.of(segments)).once(); + EasyMock.expect( + segmentsMetadataManager.iterateAllUsedNonOvershadowedSegmentsForDatasourceInterval( + EasyMock.eq(TestDataSource.WIKI), + EasyMock.anyObject(Interval.class), + EasyMock.anyBoolean() + ) + ).andReturn(Optional.of(segments)).once(); EasyMock.replay(segmentsMetadataManager); - DataSourcesResource dataSourcesResource = new DataSourcesResource( - inventoryView, - segmentsMetadataManager, - null, - null, - null, - null, - null - ); - Response response = dataSourcesResource.getDatasourceLoadstatus("datasource1", true, null, null, null, null); + Response response = dataSourcesResource.getDatasourceLoadstatus(TestDataSource.WIKI, true, null, null, null, null); Assert.assertEquals(204, response.getStatus()); } @@ -1441,7 +1452,7 @@ public void testGetDatasourceLoadstatusNoSegmentForInterval() public void testGetDatasourceLoadstatusDefault() { DataSegment datasource1Segment1 = new DataSegment( - "datasource1", + TestDataSource.WIKI, Intervals.of("2010-01-01/P1D"), "", null, @@ -1453,7 +1464,7 @@ public void testGetDatasourceLoadstatusDefault() ); DataSegment datasource1Segment2 = new DataSegment( - "datasource1", + TestDataSource.WIKI, Intervals.of("2010-01-22/P1D"), "", null, @@ -1464,7 +1475,7 @@ public void testGetDatasourceLoadstatusDefault() 20 ); DataSegment datasource2Segment1 = new DataSegment( - "datasource2", + TestDataSource.KOALA, Intervals.of("2010-01-01/P1D"), "", null, @@ -1485,34 +1496,32 @@ public void testGetDatasourceLoadstatusDefault() ); // Test when datasource fully loaded - EasyMock.expect(segmentsMetadataManager.iterateAllUsedNonOvershadowedSegmentsForDatasourceInterval(EasyMock.eq("datasource1"), EasyMock.anyObject(Interval.class), EasyMock.anyBoolean())) + EasyMock.expect(segmentsMetadataManager.iterateAllUsedNonOvershadowedSegmentsForDatasourceInterval(EasyMock.eq(TestDataSource.WIKI), EasyMock.anyObject(Interval.class), EasyMock.anyBoolean())) .andReturn(Optional.of(segments)).once(); EasyMock.expect(inventoryView.getLoadInfoForAllSegments()).andReturn(completedLoadInfoMap).once(); EasyMock.replay(segmentsMetadataManager, inventoryView); - DataSourcesResource dataSourcesResource = createResource(); - Response response = dataSourcesResource.getDatasourceLoadstatus("datasource1", true, null, null, null, null); + Response response = dataSourcesResource.getDatasourceLoadstatus(TestDataSource.WIKI, true, null, null, null, null); Assert.assertEquals(200, response.getStatus()); Assert.assertNotNull(response.getEntity()); Assert.assertEquals(1, ((Map) response.getEntity()).size()); - Assert.assertTrue(((Map) response.getEntity()).containsKey("datasource1")); - Assert.assertEquals(100.0, ((Map) response.getEntity()).get("datasource1")); + Assert.assertTrue(((Map) response.getEntity()).containsKey(TestDataSource.WIKI)); + Assert.assertEquals(100.0, ((Map) response.getEntity()).get(TestDataSource.WIKI)); EasyMock.verify(segmentsMetadataManager, inventoryView); EasyMock.reset(segmentsMetadataManager, inventoryView); // Test when datasource half loaded - EasyMock.expect(segmentsMetadataManager.iterateAllUsedNonOvershadowedSegmentsForDatasourceInterval(EasyMock.eq("datasource1"), EasyMock.anyObject(Interval.class), EasyMock.anyBoolean())) + EasyMock.expect(segmentsMetadataManager.iterateAllUsedNonOvershadowedSegmentsForDatasourceInterval(EasyMock.eq(TestDataSource.WIKI), EasyMock.anyObject(Interval.class), EasyMock.anyBoolean())) .andReturn(Optional.of(segments)).once(); EasyMock.expect(inventoryView.getLoadInfoForAllSegments()).andReturn(halfLoadedInfoMap).once(); EasyMock.replay(segmentsMetadataManager, inventoryView); - dataSourcesResource = createResource(); - response = dataSourcesResource.getDatasourceLoadstatus("datasource1", true, null, null, null, null); + response = dataSourcesResource.getDatasourceLoadstatus(TestDataSource.WIKI, true, null, null, null, null); Assert.assertEquals(200, response.getStatus()); Assert.assertNotNull(response.getEntity()); Assert.assertEquals(1, ((Map) response.getEntity()).size()); - Assert.assertTrue(((Map) response.getEntity()).containsKey("datasource1")); - Assert.assertEquals(50.0, ((Map) response.getEntity()).get("datasource1")); + Assert.assertTrue(((Map) response.getEntity()).containsKey(TestDataSource.WIKI)); + Assert.assertEquals(50.0, ((Map) response.getEntity()).get(TestDataSource.WIKI)); EasyMock.verify(segmentsMetadataManager, inventoryView); } @@ -1520,7 +1529,7 @@ public void testGetDatasourceLoadstatusDefault() public void testGetDatasourceLoadstatusSimple() { DataSegment datasource1Segment1 = new DataSegment( - "datasource1", + TestDataSource.WIKI, Intervals.of("2010-01-01/P1D"), "", null, @@ -1532,7 +1541,7 @@ public void testGetDatasourceLoadstatusSimple() ); DataSegment datasource1Segment2 = new DataSegment( - "datasource1", + TestDataSource.WIKI, Intervals.of("2010-01-22/P1D"), "", null, @@ -1543,7 +1552,7 @@ public void testGetDatasourceLoadstatusSimple() 20 ); DataSegment datasource2Segment1 = new DataSegment( - "datasource2", + TestDataSource.KOALA, Intervals.of("2010-01-01/P1D"), "", null, @@ -1564,34 +1573,32 @@ public void testGetDatasourceLoadstatusSimple() ); // Test when datasource fully loaded - EasyMock.expect(segmentsMetadataManager.iterateAllUsedNonOvershadowedSegmentsForDatasourceInterval(EasyMock.eq("datasource1"), EasyMock.anyObject(Interval.class), EasyMock.anyBoolean())) + EasyMock.expect(segmentsMetadataManager.iterateAllUsedNonOvershadowedSegmentsForDatasourceInterval(EasyMock.eq(TestDataSource.WIKI), EasyMock.anyObject(Interval.class), EasyMock.anyBoolean())) .andReturn(Optional.of(segments)).once(); EasyMock.expect(inventoryView.getLoadInfoForAllSegments()).andReturn(completedLoadInfoMap).once(); EasyMock.replay(segmentsMetadataManager, inventoryView); - DataSourcesResource dataSourcesResource = createResource(); - Response response = dataSourcesResource.getDatasourceLoadstatus("datasource1", true, null, "simple", null, null); + Response response = dataSourcesResource.getDatasourceLoadstatus(TestDataSource.WIKI, true, null, "simple", null, null); Assert.assertEquals(200, response.getStatus()); Assert.assertNotNull(response.getEntity()); Assert.assertEquals(1, ((Map) response.getEntity()).size()); - Assert.assertTrue(((Map) response.getEntity()).containsKey("datasource1")); - Assert.assertEquals(0, ((Map) response.getEntity()).get("datasource1")); + Assert.assertTrue(((Map) response.getEntity()).containsKey(TestDataSource.WIKI)); + Assert.assertEquals(0, ((Map) response.getEntity()).get(TestDataSource.WIKI)); EasyMock.verify(segmentsMetadataManager, inventoryView); EasyMock.reset(segmentsMetadataManager, inventoryView); // Test when datasource half loaded - EasyMock.expect(segmentsMetadataManager.iterateAllUsedNonOvershadowedSegmentsForDatasourceInterval(EasyMock.eq("datasource1"), EasyMock.anyObject(Interval.class), EasyMock.anyBoolean())) + EasyMock.expect(segmentsMetadataManager.iterateAllUsedNonOvershadowedSegmentsForDatasourceInterval(EasyMock.eq(TestDataSource.WIKI), EasyMock.anyObject(Interval.class), EasyMock.anyBoolean())) .andReturn(Optional.of(segments)).once(); EasyMock.expect(inventoryView.getLoadInfoForAllSegments()).andReturn(halfLoadedInfoMap).once(); EasyMock.replay(segmentsMetadataManager, inventoryView); - dataSourcesResource = createResource(); - response = dataSourcesResource.getDatasourceLoadstatus("datasource1", true, null, "simple", null, null); + response = dataSourcesResource.getDatasourceLoadstatus(TestDataSource.WIKI, true, null, "simple", null, null); Assert.assertEquals(200, response.getStatus()); Assert.assertNotNull(response.getEntity()); Assert.assertEquals(1, ((Map) response.getEntity()).size()); - Assert.assertTrue(((Map) response.getEntity()).containsKey("datasource1")); - Assert.assertEquals(1, ((Map) response.getEntity()).get("datasource1")); + Assert.assertTrue(((Map) response.getEntity()).containsKey(TestDataSource.WIKI)); + Assert.assertEquals(1, ((Map) response.getEntity()).get(TestDataSource.WIKI)); EasyMock.verify(segmentsMetadataManager, inventoryView); } @@ -1599,7 +1606,7 @@ public void testGetDatasourceLoadstatusSimple() public void testGetDatasourceLoadstatusFull() { DataSegment datasource1Segment1 = new DataSegment( - "datasource1", + TestDataSource.WIKI, Intervals.of("2010-01-01/P1D"), "", null, @@ -1611,7 +1618,7 @@ public void testGetDatasourceLoadstatusFull() ); DataSegment datasource1Segment2 = new DataSegment( - "datasource1", + TestDataSource.WIKI, Intervals.of("2010-01-22/P1D"), "", null, @@ -1625,14 +1632,14 @@ public void testGetDatasourceLoadstatusFull() final Map> underReplicationCountsPerDataSourcePerTier = new HashMap<>(); Object2LongMap tier1 = new Object2LongOpenHashMap<>(); - tier1.put("datasource1", 0L); + tier1.put(TestDataSource.WIKI, 0L); Object2LongMap tier2 = new Object2LongOpenHashMap<>(); - tier2.put("datasource1", 3L); + tier2.put(TestDataSource.WIKI, 3L); underReplicationCountsPerDataSourcePerTier.put("tier1", tier1); underReplicationCountsPerDataSourcePerTier.put("tier2", tier2); // Test when datasource fully loaded - EasyMock.expect(segmentsMetadataManager.iterateAllUsedNonOvershadowedSegmentsForDatasourceInterval(EasyMock.eq("datasource1"), EasyMock.anyObject(Interval.class), EasyMock.anyBoolean())) + EasyMock.expect(segmentsMetadataManager.iterateAllUsedNonOvershadowedSegmentsForDatasourceInterval(EasyMock.eq(TestDataSource.WIKI), EasyMock.anyObject(Interval.class), EasyMock.anyBoolean())) .andReturn(Optional.of(segments)).once(); DruidCoordinator druidCoordinator = EasyMock.createMock(DruidCoordinator.class); EasyMock.expect(druidCoordinator.getTierToDatasourceToUnderReplicatedCount(segments, false)) @@ -1642,14 +1649,14 @@ public void testGetDatasourceLoadstatusFull() DataSourcesResource dataSourcesResource = new DataSourcesResource(null, segmentsMetadataManager, null, null, null, druidCoordinator, auditManager); - Response response = dataSourcesResource.getDatasourceLoadstatus("datasource1", true, null, null, "full", null); + Response response = dataSourcesResource.getDatasourceLoadstatus(TestDataSource.WIKI, true, null, null, "full", null); Assert.assertEquals(200, response.getStatus()); Assert.assertNotNull(response.getEntity()); Assert.assertEquals(2, ((Map) response.getEntity()).size()); Assert.assertEquals(1, ((Map) ((Map) response.getEntity()).get("tier1")).size()); Assert.assertEquals(1, ((Map) ((Map) response.getEntity()).get("tier2")).size()); - Assert.assertEquals(0L, ((Map) ((Map) response.getEntity()).get("tier1")).get("datasource1")); - Assert.assertEquals(3L, ((Map) ((Map) response.getEntity()).get("tier2")).get("datasource1")); + Assert.assertEquals(0L, ((Map) ((Map) response.getEntity()).get("tier1")).get(TestDataSource.WIKI)); + Assert.assertEquals(3L, ((Map) ((Map) response.getEntity()).get("tier2")).get(TestDataSource.WIKI)); EasyMock.verify(segmentsMetadataManager); } @@ -1657,7 +1664,7 @@ public void testGetDatasourceLoadstatusFull() public void testGetDatasourceLoadstatusFullAndComputeUsingClusterView() { DataSegment datasource1Segment1 = new DataSegment( - "datasource1", + TestDataSource.WIKI, Intervals.of("2010-01-01/P1D"), "", null, @@ -1669,7 +1676,7 @@ public void testGetDatasourceLoadstatusFullAndComputeUsingClusterView() ); DataSegment datasource1Segment2 = new DataSegment( - "datasource1", + TestDataSource.WIKI, Intervals.of("2010-01-22/P1D"), "", null, @@ -1683,14 +1690,14 @@ public void testGetDatasourceLoadstatusFullAndComputeUsingClusterView() final Map> underReplicationCountsPerDataSourcePerTier = new HashMap<>(); Object2LongMap tier1 = new Object2LongOpenHashMap<>(); - tier1.put("datasource1", 0L); + tier1.put(TestDataSource.WIKI, 0L); Object2LongMap tier2 = new Object2LongOpenHashMap<>(); - tier2.put("datasource1", 3L); + tier2.put(TestDataSource.WIKI, 3L); underReplicationCountsPerDataSourcePerTier.put("tier1", tier1); underReplicationCountsPerDataSourcePerTier.put("tier2", tier2); // Test when datasource fully loaded - EasyMock.expect(segmentsMetadataManager.iterateAllUsedNonOvershadowedSegmentsForDatasourceInterval(EasyMock.eq("datasource1"), EasyMock.anyObject(Interval.class), EasyMock.anyBoolean())) + EasyMock.expect(segmentsMetadataManager.iterateAllUsedNonOvershadowedSegmentsForDatasourceInterval(EasyMock.eq(TestDataSource.WIKI), EasyMock.anyObject(Interval.class), EasyMock.anyBoolean())) .andReturn(Optional.of(segments)).once(); DruidCoordinator druidCoordinator = EasyMock.createMock(DruidCoordinator.class); EasyMock.expect(druidCoordinator.getTierToDatasourceToUnderReplicatedCount(segments, true)) @@ -1700,14 +1707,14 @@ public void testGetDatasourceLoadstatusFullAndComputeUsingClusterView() DataSourcesResource dataSourcesResource = new DataSourcesResource(null, segmentsMetadataManager, null, null, null, druidCoordinator, auditManager); - Response response = dataSourcesResource.getDatasourceLoadstatus("datasource1", true, null, null, "full", "computeUsingClusterView"); + Response response = dataSourcesResource.getDatasourceLoadstatus(TestDataSource.WIKI, true, null, null, "full", "computeUsingClusterView"); Assert.assertEquals(200, response.getStatus()); Assert.assertNotNull(response.getEntity()); Assert.assertEquals(2, ((Map) response.getEntity()).size()); Assert.assertEquals(1, ((Map) ((Map) response.getEntity()).get("tier1")).size()); Assert.assertEquals(1, ((Map) ((Map) response.getEntity()).get("tier2")).size()); - Assert.assertEquals(0L, ((Map) ((Map) response.getEntity()).get("tier1")).get("datasource1")); - Assert.assertEquals(3L, ((Map) ((Map) response.getEntity()).get("tier2")).get("datasource1")); + Assert.assertEquals(0L, ((Map) ((Map) response.getEntity()).get("tier1")).get(TestDataSource.WIKI)); + Assert.assertEquals(3L, ((Map) ((Map) response.getEntity()).get("tier2")).get(TestDataSource.WIKI)); EasyMock.verify(segmentsMetadataManager); } @@ -1753,17 +1760,4 @@ private void prepareRequestForAudit() EasyMock.replay(request); } - - private DataSourcesResource createResource() - { - return new DataSourcesResource( - inventoryView, - segmentsMetadataManager, - null, - null, - AuthTestUtils.TEST_AUTHORIZER_MAPPER, - null, - auditManager - ); - } } diff --git a/services/src/main/java/org/apache/druid/cli/CliOverlord.java b/services/src/main/java/org/apache/druid/cli/CliOverlord.java index 52962ddf4c1e..67a36ed1ff54 100644 --- a/services/src/main/java/org/apache/druid/cli/CliOverlord.java +++ b/services/src/main/java/org/apache/druid/cli/CliOverlord.java @@ -95,6 +95,7 @@ import org.apache.druid.indexing.overlord.hrtr.HttpRemoteTaskRunnerFactory; import org.apache.druid.indexing.overlord.hrtr.HttpRemoteTaskRunnerResource; import org.apache.druid.indexing.overlord.http.OverlordCompactionResource; +import org.apache.druid.indexing.overlord.http.OverlordDataSourcesResource; import org.apache.druid.indexing.overlord.http.OverlordRedirectInfo; import org.apache.druid.indexing.overlord.http.OverlordResource; import org.apache.druid.indexing.overlord.sampler.SamplerModule; @@ -299,6 +300,7 @@ public void configure(Binder binder) Jerseys.addResource(binder, SupervisorResource.class); Jerseys.addResource(binder, HttpRemoteTaskRunnerResource.class); Jerseys.addResource(binder, OverlordCompactionResource.class); + Jerseys.addResource(binder, OverlordDataSourcesResource.class); binder.bind(AppenderatorsManager.class) diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/schema/DruidCalciteSchemaModuleTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/schema/DruidCalciteSchemaModuleTest.java index 0616dd2a3a34..df9e4e12027e 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/schema/DruidCalciteSchemaModuleTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/schema/DruidCalciteSchemaModuleTest.java @@ -34,7 +34,6 @@ import org.apache.druid.client.coordinator.CoordinatorClient; import org.apache.druid.client.coordinator.NoopCoordinatorClient; import org.apache.druid.client.indexing.IndexingService; -import org.apache.druid.client.indexing.NoopOverlordClient; import org.apache.druid.discovery.DruidLeaderClient; import org.apache.druid.discovery.DruidNodeDiscoveryProvider; import org.apache.druid.guice.LazySingleton; @@ -43,6 +42,7 @@ import org.apache.druid.java.util.emitter.service.ServiceEmitter; import org.apache.druid.query.lookup.LookupExtractorFactoryContainerProvider; import org.apache.druid.query.lookup.LookupReferencesManager; +import org.apache.druid.rpc.indexing.NoopOverlordClient; import org.apache.druid.rpc.indexing.OverlordClient; import org.apache.druid.segment.join.JoinableFactory; import org.apache.druid.segment.join.MapJoinableFactory; diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTests.java b/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTests.java index 05c267a00bbe..de8aec59fd13 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTests.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTests.java @@ -32,7 +32,6 @@ import org.apache.druid.client.FilteredServerInventoryView; import org.apache.druid.client.ServerInventoryView; import org.apache.druid.client.ServerView; -import org.apache.druid.client.indexing.NoopOverlordClient; import org.apache.druid.discovery.DiscoveryDruidNode; import org.apache.druid.discovery.DruidLeaderClient; import org.apache.druid.discovery.DruidNodeDiscovery; @@ -53,6 +52,7 @@ import org.apache.druid.math.expr.ExprMacroTable; import org.apache.druid.query.QueryRunnerFactoryConglomerate; import org.apache.druid.query.QuerySegmentWalker; +import org.apache.druid.rpc.indexing.NoopOverlordClient; import org.apache.druid.rpc.indexing.OverlordClient; import org.apache.druid.segment.join.JoinableFactory; import org.apache.druid.segment.join.JoinableFactoryWrapper; diff --git a/sql/src/test/java/org/apache/druid/sql/guice/SqlModuleTest.java b/sql/src/test/java/org/apache/druid/sql/guice/SqlModuleTest.java index 3801ef2888b5..b4437045fe1a 100644 --- a/sql/src/test/java/org/apache/druid/sql/guice/SqlModuleTest.java +++ b/sql/src/test/java/org/apache/druid/sql/guice/SqlModuleTest.java @@ -33,7 +33,6 @@ import org.apache.druid.client.coordinator.CoordinatorClient; import org.apache.druid.client.coordinator.NoopCoordinatorClient; import org.apache.druid.client.indexing.IndexingService; -import org.apache.druid.client.indexing.NoopOverlordClient; import org.apache.druid.discovery.DruidLeaderClient; import org.apache.druid.discovery.DruidNodeDiscoveryProvider; import org.apache.druid.guice.DruidGuiceExtensions; @@ -52,6 +51,7 @@ import org.apache.druid.query.QuerySegmentWalker; import org.apache.druid.query.QueryToolChestWarehouse; import org.apache.druid.query.lookup.LookupExtractorFactoryContainerProvider; +import org.apache.druid.rpc.indexing.NoopOverlordClient; import org.apache.druid.rpc.indexing.OverlordClient; import org.apache.druid.segment.join.JoinableFactory; import org.apache.druid.segment.loading.SegmentCacheManager;