From b175ec37d220b94fbdab1bc12b9e573025627981 Mon Sep 17 00:00:00 2001 From: Max Thonagel <12283268+thoniTUB@users.noreply.github.com> Date: Thu, 20 Feb 2025 14:27:49 +0100 Subject: [PATCH] refine logs Signed-off-by: Max Thonagel <12283268+thoniTUB@users.noreply.github.com> --- .../commands/PreprocessorCommand.java | 2 +- .../io/storage/xodus/stores/CachedStore.java | 2 +- .../conquery/models/auth/web/AuthFilter.java | 4 +- .../models/jobs/CalculateCBlocksJob.java | 6 +- .../specific/CollectColumnValuesJob.java | 66 +++++++++---------- .../models/query/ExecutionManager.java | 2 +- .../conquery/models/query/QueryExecutor.java | 3 +- .../bakdata/conquery/util/io/ProgressBar.java | 17 ++--- 8 files changed, 47 insertions(+), 55 deletions(-) diff --git a/backend/src/main/java/com/bakdata/conquery/commands/PreprocessorCommand.java b/backend/src/main/java/com/bakdata/conquery/commands/PreprocessorCommand.java index 686e8200eb..193293a641 100644 --- a/backend/src/main/java/com/bakdata/conquery/commands/PreprocessorCommand.java +++ b/backend/src/main/java/com/bakdata/conquery/commands/PreprocessorCommand.java @@ -262,7 +262,7 @@ private void preprocessJobs(Collection jobs, int buckets, Conq log.info("Required to preprocess {} in total", BinaryByteUnit.format(totalSize)); - final ProgressBar totalProgress = new ProgressBar(totalSize, System.out); + final ProgressBar totalProgress = new ProgressBar(totalSize); for (PreprocessingJob job : jobs) { pool.submit(() -> { diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/CachedStore.java b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/CachedStore.java index 043058bfaf..5cb496e32b 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/CachedStore.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/CachedStore.java @@ -18,7 +18,7 @@ @Slf4j public class CachedStore implements Store { - private static final ProgressBar PROGRESS_BAR = new ProgressBar(0, System.out); + private static final ProgressBar PROGRESS_BAR = new ProgressBar(0); private ConcurrentHashMap cache = new ConcurrentHashMap<>(); private final Store store; diff --git a/backend/src/main/java/com/bakdata/conquery/models/auth/web/AuthFilter.java b/backend/src/main/java/com/bakdata/conquery/models/auth/web/AuthFilter.java index 24a0273966..78a13f765b 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/auth/web/AuthFilter.java +++ b/backend/src/main/java/com/bakdata/conquery/models/auth/web/AuthFilter.java @@ -60,7 +60,7 @@ public void filter(final ContainerRequestContext requestContext) throws IOExcept } if (tokens.isEmpty()) { - log.warn("No tokens could be parsed from the request"); + log.trace("No tokens could be parsed from the request"); } int failedTokens = 0; @@ -87,7 +87,7 @@ public void filter(final ContainerRequestContext requestContext) throws IOExcept } } - log.warn("Non of the configured realms was able to successfully authenticate the extracted token(s)."); + log.trace("Non of the configured realms was able to successfully authenticate the extracted token(s)."); log.trace("The {} tokens failed.", failedTokens); throw new NotAuthorizedException("Failed to authenticate request. The cause has been logged."); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/jobs/CalculateCBlocksJob.java b/backend/src/main/java/com/bakdata/conquery/models/jobs/CalculateCBlocksJob.java index 270230a145..b02b18437b 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/jobs/CalculateCBlocksJob.java +++ b/backend/src/main/java/com/bakdata/conquery/models/jobs/CalculateCBlocksJob.java @@ -41,11 +41,7 @@ public class CalculateCBlocksJob extends Job { private final ExecutorService executorService; public void addCBlock(Bucket bucket, ConceptTreeConnector connector) { - tasks.add(createInformationProcessor(connector, bucket)); - } - - private CalculationInformationProcessor createInformationProcessor(ConceptTreeConnector connector, Bucket bucket) { - return new CalculationInformationProcessor(connector, bucket, bucketManager, storage); + tasks.add(new CalculationInformationProcessor(connector, bucket, bucketManager, storage)); } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/CollectColumnValuesJob.java b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/CollectColumnValuesJob.java index 526be8e7b5..22da38eeae 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/CollectColumnValuesJob.java +++ b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/CollectColumnValuesJob.java @@ -73,42 +73,42 @@ public void react(Worker context) throws Exception { final AtomicInteger done = new AtomicInteger(); + // Might be too much because they get filtered, but no problem + getProgressReporter().setMax(columns.size()); + final List> futures = columns.stream() .filter(column -> table2Buckets.get(column.getTable()) != null) .map(ColumnId::resolve) - .map(column -> { - // Acquire before submitting, so we don't spam the executor with waiting threads - return jobsExecutorService.submit(() -> { - final List buckets = table2Buckets.get(column.getTable().getId()); - - final Set values = buckets.stream() - .flatMap(bucket -> ((StringStore) bucket.getStore(column)).streamValues()) - .collect(Collectors.toSet()); - - log.trace("Finished collecting {} values for column {}", values.size(), column); - - // Chunk values, to produce smaller messages - Iterable> partition = Iterables.partition(values, columValueChunkSize); - - log.trace("BEGIN Sending column values for {}. {} total values in {} sized batches", - column.getId(), values.size(), columValueChunkSize - ); - - int i = 0; - for (List chunk : partition) { - // Send values to manager - RegisterColumnValues message = - new RegisterColumnValues(getMessageId(), context.getInfo().getId(), column.getId(), chunk); - WriteFuture send = context.send(message); - - log.trace("Finished sending chunk {} for column '{}'", i++, column.getId()); - } - - getProgressReporter().report(1); - log.trace("Finished collections values for column {} as number {}", column, done.incrementAndGet()); - }); - } + .map(column -> jobsExecutorService.submit(() -> { + final List buckets = table2Buckets.get(column.getTable().getId()); + + final Set values = buckets.stream() + .flatMap(bucket -> ((StringStore) bucket.getStore(column)).streamValues()) + .collect(Collectors.toSet()); + + log.trace("Finished collecting {} values for column {}", values.size(), column); + + // Chunk values, to produce smaller messages + Iterable> partition = Iterables.partition(values, columValueChunkSize); + + log.trace("BEGIN Sending column values for {}. {} total values in {} sized batches", + column.getId(), values.size(), columValueChunkSize + ); + + int i = 0; + for (List chunk : partition) { + // Send values to manager + RegisterColumnValues message = + new RegisterColumnValues(getMessageId(), context.getInfo().getId(), column.getId(), chunk); + WriteFuture send = context.send(message); + + log.trace("Finished sending chunk {} for column '{}'", i++, column.getId()); + } + + getProgressReporter().report(1); + log.trace("Finished collections values for column {} as number {}", column, done.incrementAndGet()); + }) ) .collect(Collectors.toList()); @@ -130,7 +130,6 @@ public void react(Worker context) throws Exception { // We may do this, because we own this specific ExecutorService. jobsExecutorService.shutdown(); - getProgressReporter().done(); log.info("Finished collecting values from these columns: {}", Arrays.toString(columns.toArray())); context.send(new FinalizeReactionMessage(getMessageId(), context.getInfo().getId())); @@ -184,7 +183,6 @@ public void execute() { } } - getProgressReporter().done(); log.debug("FINISHED counting search totals on {}", namespace.getDataset().getId()); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/ExecutionManager.java b/backend/src/main/java/com/bakdata/conquery/models/query/ExecutionManager.java index 663bf5be6c..0fa8d28397 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/ExecutionManager.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/ExecutionManager.java @@ -50,7 +50,7 @@ private void executionRemoved(RemovalNotification rem final ManagedExecutionId executionId = removalNotification.getKey(); - log.warn("Evicted Results for Query[{}] (Reason: {})", executionId, removalNotification.getCause()); + log.trace("Evicted Results for Query[{}] (Reason: {})", executionId, removalNotification.getCause()); final ManagedExecution execution = getExecution(executionId); diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/QueryExecutor.java b/backend/src/main/java/com/bakdata/conquery/models/query/QueryExecutor.java index ddee9379a4..cf8dcb38c4 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/QueryExecutor.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/QueryExecutor.java @@ -57,7 +57,8 @@ public boolean execute(Query query, QueryExecutionContext executionContext, Shar final ThreadLocal> plan = ThreadLocal.withInitial(() -> query.createQueryPlan(new QueryPlanContext(worker, secondaryIdSubPlanLimit))); if (entities.isEmpty()) { - log.warn("Entities for query are empty"); + // This is quite common for the entity preview, as only single entities are requested + log.trace("Entities for query are empty"); } try { diff --git a/backend/src/main/java/com/bakdata/conquery/util/io/ProgressBar.java b/backend/src/main/java/com/bakdata/conquery/util/io/ProgressBar.java index 7db32d7124..fa6e292dd0 100644 --- a/backend/src/main/java/com/bakdata/conquery/util/io/ProgressBar.java +++ b/backend/src/main/java/com/bakdata/conquery/util/io/ProgressBar.java @@ -1,12 +1,13 @@ package com.bakdata.conquery.util.io; -import java.io.PrintStream; import java.time.Duration; import java.util.concurrent.atomic.AtomicLong; import lombok.Getter; +import lombok.extern.slf4j.Slf4j; //TODO replace with https://github.com/vdurmont/etaprinter +@Slf4j public class ProgressBar { private static final int CHARACTERS = 50; @@ -21,12 +22,10 @@ public class ProgressBar { @Getter private final AtomicLong maxValue; private final AtomicLong lastPercentage = new AtomicLong(0); - private final PrintStream out; private final long startTime; - - public ProgressBar(long maxValue, PrintStream out) { + + public ProgressBar(long maxValue) { this.maxValue = new AtomicLong(Math.max(1, maxValue)); - this.out = out; this.startTime = System.nanoTime(); } @@ -45,11 +44,9 @@ public void addMaxValue(long add) { public void done() { StringBuilder sb = new StringBuilder(); - for(int i=0;i