Skip to content

Commit

Permalink
refine logs
Browse files Browse the repository at this point in the history
Signed-off-by: Max Thonagel <12283268+thoniTUB@users.noreply.github.com>
  • Loading branch information
thoniTUB committed Feb 20, 2025
1 parent 3e0aeae commit b175ec3
Show file tree
Hide file tree
Showing 8 changed files with 47 additions and 55 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -262,7 +262,7 @@ private void preprocessJobs(Collection<PreprocessingJob> jobs, int buckets, Conq

log.info("Required to preprocess {} in total", BinaryByteUnit.format(totalSize));

final ProgressBar totalProgress = new ProgressBar(totalSize, System.out);
final ProgressBar totalProgress = new ProgressBar(totalSize);

for (PreprocessingJob job : jobs) {
pool.submit(() -> {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
@Slf4j
public class CachedStore<KEY, VALUE> implements Store<KEY, VALUE> {

private static final ProgressBar PROGRESS_BAR = new ProgressBar(0, System.out);
private static final ProgressBar PROGRESS_BAR = new ProgressBar(0);

private ConcurrentHashMap<KEY, VALUE> cache = new ConcurrentHashMap<>();
private final Store<KEY, VALUE> store;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ public void filter(final ContainerRequestContext requestContext) throws IOExcept
}

if (tokens.isEmpty()) {
log.warn("No tokens could be parsed from the request");
log.trace("No tokens could be parsed from the request");
}

int failedTokens = 0;
Expand All @@ -87,7 +87,7 @@ public void filter(final ContainerRequestContext requestContext) throws IOExcept

}
}
log.warn("Non of the configured realms was able to successfully authenticate the extracted token(s).");
log.trace("Non of the configured realms was able to successfully authenticate the extracted token(s).");
log.trace("The {} tokens failed.", failedTokens);
throw new NotAuthorizedException("Failed to authenticate request. The cause has been logged.");
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,11 +41,7 @@ public class CalculateCBlocksJob extends Job {
private final ExecutorService executorService;

public void addCBlock(Bucket bucket, ConceptTreeConnector connector) {
tasks.add(createInformationProcessor(connector, bucket));
}

private CalculationInformationProcessor createInformationProcessor(ConceptTreeConnector connector, Bucket bucket) {
return new CalculationInformationProcessor(connector, bucket, bucketManager, storage);
tasks.add(new CalculationInformationProcessor(connector, bucket, bucketManager, storage));
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,42 +73,42 @@ public void react(Worker context) throws Exception {

final AtomicInteger done = new AtomicInteger();

// Might be too much because they get filtered, but no problem
getProgressReporter().setMax(columns.size());

final List<? extends ListenableFuture<?>> futures =
columns.stream()
.filter(column -> table2Buckets.get(column.getTable()) != null)
.map(ColumnId::resolve)
.map(column -> {
// Acquire before submitting, so we don't spam the executor with waiting threads
return jobsExecutorService.submit(() -> {
final List<Bucket> buckets = table2Buckets.get(column.getTable().getId());

final Set<String> values = buckets.stream()
.flatMap(bucket -> ((StringStore) bucket.getStore(column)).streamValues())
.collect(Collectors.toSet());

log.trace("Finished collecting {} values for column {}", values.size(), column);

// Chunk values, to produce smaller messages
Iterable<List<String>> partition = Iterables.partition(values, columValueChunkSize);

log.trace("BEGIN Sending column values for {}. {} total values in {} sized batches",
column.getId(), values.size(), columValueChunkSize
);

int i = 0;
for (List<String> chunk : partition) {
// Send values to manager
RegisterColumnValues message =
new RegisterColumnValues(getMessageId(), context.getInfo().getId(), column.getId(), chunk);
WriteFuture send = context.send(message);

log.trace("Finished sending chunk {} for column '{}'", i++, column.getId());
}

getProgressReporter().report(1);
log.trace("Finished collections values for column {} as number {}", column, done.incrementAndGet());
});
}
.map(column -> jobsExecutorService.submit(() -> {
final List<Bucket> buckets = table2Buckets.get(column.getTable().getId());

final Set<String> values = buckets.stream()
.flatMap(bucket -> ((StringStore) bucket.getStore(column)).streamValues())
.collect(Collectors.toSet());

log.trace("Finished collecting {} values for column {}", values.size(), column);

// Chunk values, to produce smaller messages
Iterable<List<String>> partition = Iterables.partition(values, columValueChunkSize);

log.trace("BEGIN Sending column values for {}. {} total values in {} sized batches",
column.getId(), values.size(), columValueChunkSize
);

int i = 0;
for (List<String> chunk : partition) {
// Send values to manager
RegisterColumnValues message =
new RegisterColumnValues(getMessageId(), context.getInfo().getId(), column.getId(), chunk);
WriteFuture send = context.send(message);

log.trace("Finished sending chunk {} for column '{}'", i++, column.getId());
}

getProgressReporter().report(1);
log.trace("Finished collections values for column {} as number {}", column, done.incrementAndGet());
})
)
.collect(Collectors.toList());

Expand All @@ -130,7 +130,6 @@ public void react(Worker context) throws Exception {

// We may do this, because we own this specific ExecutorService.
jobsExecutorService.shutdown();
getProgressReporter().done();

log.info("Finished collecting values from these columns: {}", Arrays.toString(columns.toArray()));
context.send(new FinalizeReactionMessage(getMessageId(), context.getInfo().getId()));
Expand Down Expand Up @@ -184,7 +183,6 @@ public void execute() {
}
}

getProgressReporter().done();
log.debug("FINISHED counting search totals on {}", namespace.getDataset().getId());
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ private void executionRemoved(RemovalNotification<ManagedExecutionId, State> rem

final ManagedExecutionId executionId = removalNotification.getKey();

log.warn("Evicted Results for Query[{}] (Reason: {})", executionId, removalNotification.getCause());
log.trace("Evicted Results for Query[{}] (Reason: {})", executionId, removalNotification.getCause());

final ManagedExecution execution = getExecution(executionId);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,8 @@ public boolean execute(Query query, QueryExecutionContext executionContext, Shar
final ThreadLocal<QueryPlan<?>> plan = ThreadLocal.withInitial(() -> query.createQueryPlan(new QueryPlanContext(worker, secondaryIdSubPlanLimit)));

if (entities.isEmpty()) {
log.warn("Entities for query are empty");
// This is quite common for the entity preview, as only single entities are requested
log.trace("Entities for query are empty");
}

try {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
package com.bakdata.conquery.util.io;

import java.io.PrintStream;
import java.time.Duration;
import java.util.concurrent.atomic.AtomicLong;

import lombok.Getter;
import lombok.extern.slf4j.Slf4j;

//TODO replace with https://github.com/vdurmont/etaprinter
@Slf4j
public class ProgressBar {

private static final int CHARACTERS = 50;
Expand All @@ -21,12 +22,10 @@ public class ProgressBar {
@Getter
private final AtomicLong maxValue;
private final AtomicLong lastPercentage = new AtomicLong(0);
private final PrintStream out;
private final long startTime;
public ProgressBar(long maxValue, PrintStream out) {

public ProgressBar(long maxValue) {
this.maxValue = new AtomicLong(Math.max(1, maxValue));
this.out = out;
this.startTime = System.nanoTime();
}

Expand All @@ -45,11 +44,9 @@ public void addMaxValue(long add) {

public void done() {
StringBuilder sb = new StringBuilder();
for(int i=0;i<CHARACTERS;i++) {
sb.append(BAR_CHARACTERS[2]);
}
sb.append(String.valueOf(BAR_CHARACTERS[2]).repeat(CHARACTERS));
sb.append(" 100% DONE");
out.println(sb.toString());
log.info(sb.toString());
}

private void print() {
Expand Down Expand Up @@ -80,6 +77,6 @@ private void print() {
.toLowerCase()
);
sb.append('\r');
out.print(sb.toString());
log.info(sb.toString());
}
}

0 comments on commit b175ec3

Please sign in to comment.