added other logging filters, include a custom one for suppressing duplicate messages within a time period

This commit is contained in:
Jason_DiDonato@yahoo.com 2021-03-25 15:15:51 -04:00
parent 2fbe926725
commit eea549707a
24 changed files with 353 additions and 237 deletions

View file

@ -0,0 +1,77 @@
// Copyright (c) 2021 PSForever
package net.psforever.filters;
import ch.qos.logback.classic.spi.ILoggingEvent;
import ch.qos.logback.core.filter.Filter;
import ch.qos.logback.core.spi.FilterReply;
import java.util.Iterator;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
/**
* Disrupts a variety of logging messages that would otherwise repeat within a certain frame of time.
* Until there is a significant break in time between the logging of the duplicated messages,
* those messages are denied logging.
* Only exact matches via hash are denied.
* Be aware of the pitfalls of default `String` hash code.
*/
public class ApplyCooldownToDuplicateLoggingFilter extends Filter<ILoggingEvent> {
private long cooldown;
private ConcurrentHashMap<String, Long> messageMap;
private long cleaning = 900000L; //default: 15min
private ScheduledExecutorService housecleaning;
@Override
public FilterReply decide(ILoggingEvent event) {
String msg = event.getMessage();
long currTime = System.currentTimeMillis();
Long previousTime = messageMap.put(msg, currTime);
if (previousTime != null && previousTime + cooldown > currTime) {
return FilterReply.DENY;
} else {
return FilterReply.NEUTRAL;
}
}
public void setCooldown(Long duration) {
this.cooldown = duration;
}
public void setCleaning(Long duration) {
this.cleaning = duration;
}
@Override
public void start() {
if (this.cooldown != 0L) {
messageMap = new ConcurrentHashMap<>(1000);
housecleaning = Executors.newScheduledThreadPool(1);
Runnable task = () -> {
//being "concurrent" should be enough
//the worst that can happen is two of the same message back-to-back in the log once in a while
if (!messageMap.isEmpty()) {
long currTime = System.currentTimeMillis();
Iterator<String> oldLogMessages = messageMap.entrySet().stream()
.filter( entry -> entry.getValue() + cooldown < currTime )
.map( Map.Entry::getKey )
.iterator();
oldLogMessages.forEachRemaining(key -> messageMap.remove(key));
}
};
housecleaning.scheduleWithFixedDelay(task, cleaning, cleaning, TimeUnit.MILLISECONDS);
super.start();
}
}
@Override
public void stop() {
housecleaning.shutdown();
messageMap.clear();
messageMap = null;
super.stop();
}
}

View file

@ -9,7 +9,9 @@ import ch.qos.logback.core.spi.FilterReply;
* Disrupts a variety of logging messages that originate from specific loggers.
* A comparison of the prefix text of the logger handling the event is performed,
* with a positive match denying that event being appended.
* The full prefix must be provided, as the is occasionally appear in an abbreviated form.
* The full prefix must be provided, as the filter uses the fully authenticated name
* and the logger occasionally displays an abbreviated form for longer names,
* e.g., "i.g.context.jasync ..." instead of "io.getquill.context.jasync ...".
*/
public class LoggerPrefixFilter extends Filter<ILoggingEvent> {
private String prefix;
@ -27,6 +29,7 @@ public class LoggerPrefixFilter extends Filter<ILoggingEvent> {
this.prefix = name;
}
@Override
public void start() {
if (this.prefix != null) {
super.start();