Added a 10s caching layer between database and network page creation

This commit is contained in:
Rsl1122 2018-08-07 17:15:57 +03:00
parent efa4b36b6c
commit a8f57c8edd
7 changed files with 110 additions and 9 deletions

View File

@ -16,15 +16,22 @@ public class CachingSupplier<T> implements Supplier<T> {
private final Supplier<T> original;
private T cachedValue;
private long cacheTime;
private long timeToLive;
public CachingSupplier(Supplier<T> original) {
this(original, TimeAmount.SECOND.ms() * 30L);
}
public CachingSupplier(Supplier<T> original, long timeToLive) {
this.original = original;
this.timeToLive = timeToLive;
cacheTime = 0L;
}
@Override
public T get() {
if (cachedValue == null || System.currentTimeMillis() - cacheTime > TimeAmount.SECOND.ms() * 30L) {
if (cachedValue == null || System.currentTimeMillis() - cacheTime > timeToLive) {
cachedValue = original.get();
cacheTime = System.currentTimeMillis();
}

View File

@ -3,6 +3,7 @@ package com.djrapitops.plan.data.store.containers;
import com.djrapitops.plan.data.store.CachingSupplier;
import com.djrapitops.plan.data.store.Key;
import com.djrapitops.plan.data.store.mutators.formatting.Formatter;
import com.djrapitops.plugin.api.TimeAmount;
import java.util.HashMap;
import java.util.Optional;
@ -18,6 +19,16 @@ import java.util.function.Supplier;
*/
public class DataContainer extends HashMap<Key, Supplier> {
private long timeToLive;
public DataContainer() {
timeToLive = TimeAmount.SECOND.ms() * 30L;
}
public DataContainer(long timeToLive) {
this.timeToLive = timeToLive;
}
/**
* Place your data inside the container.
*
@ -33,7 +44,7 @@ public class DataContainer extends HashMap<Key, Supplier> {
if (supplier == null) {
return;
}
super.put(key, new CachingSupplier<>(supplier));
super.put(key, new CachingSupplier<>(supplier, timeToLive));
}
public <T> Supplier<T> getSupplier(Key<T> key) {

View File

@ -18,6 +18,7 @@ public class CacheSystem implements SubSystem {
private final DataCache dataCache;
private final GeolocationCache geolocationCache;
private final DataContainerCache dataContainerCache;
public CacheSystem(PlanSystem system) {
this(new DataCache(system), system);
@ -26,6 +27,7 @@ public class CacheSystem implements SubSystem {
protected CacheSystem(DataCache dataCache, PlanSystem system) {
this.dataCache = dataCache;
geolocationCache = new GeolocationCache(() -> system.getLocaleSystem().getLocale());
dataContainerCache = new DataContainerCache();
}
public static CacheSystem getInstance() {
@ -43,6 +45,7 @@ public class CacheSystem implements SubSystem {
@Override
public void disable() {
geolocationCache.clearCache();
dataContainerCache.clear();
}
public DataCache getDataCache() {
@ -52,4 +55,8 @@ public class CacheSystem implements SubSystem {
public GeolocationCache getGeolocationCache() {
return geolocationCache;
}
public DataContainerCache getDataContainerCache() {
return dataContainerCache;
}
}

View File

@ -0,0 +1,71 @@
package com.djrapitops.plan.system.cache;
import com.djrapitops.plan.data.store.Key;
import com.djrapitops.plan.data.store.containers.AnalysisContainer;
import com.djrapitops.plan.data.store.containers.DataContainer;
import com.djrapitops.plan.data.store.containers.NetworkContainer;
import com.djrapitops.plan.data.store.containers.PlayerContainer;
import com.djrapitops.plan.system.database.databases.Database;
import com.djrapitops.plugin.api.TimeAmount;
import java.util.UUID;
import java.util.function.Supplier;
/**
* Cache to aid Bungee in case SQL is causing cpu thread starvation, leading to mysql connection drops.
*
* @author Rsl1122
*/
public class DataContainerCache extends DataContainer {
public DataContainerCache() {
super(TimeAmount.SECOND.ms() * 10L);
putSupplier(Keys.NETWORK_CONTAINER, Suppliers.NETWORK_CONTAINER);
}
public PlayerContainer getPlayerContainer(UUID uuid) {
return getAndCacheSupplier(Keys.playerContainer(uuid), Suppliers.playerContainer(uuid));
}
public AnalysisContainer getAnalysisContainer(UUID serverUUID) {
return getAndCacheSupplier(Keys.analysisContainer(serverUUID), Suppliers.analysisContainer(serverUUID));
}
public <T> T getAndCacheSupplier(Key<T> key, Supplier<T> ifNotPresent) {
if (!supports(key)) {
putSupplier(key, ifNotPresent);
}
return getUnsafe(key);
}
public NetworkContainer getNetworkContainer() {
return getAndCacheSupplier(Keys.NETWORK_CONTAINER, Suppliers.NETWORK_CONTAINER);
}
public static class Keys {
static final Key<NetworkContainer> NETWORK_CONTAINER = new Key<>(NetworkContainer.class, "NETWORK_CONTAINER");
static Key<AnalysisContainer> analysisContainer(UUID serverUUID) {
return new Key<>(AnalysisContainer.class, "ANALYSIS_CONTAINER:" + serverUUID);
}
static Key<PlayerContainer> playerContainer(UUID uuid) {
return new Key<>(PlayerContainer.class, "PLAYER_CONTAINER:" + uuid);
}
}
public static class Suppliers {
static final Supplier<NetworkContainer> NETWORK_CONTAINER = () -> Database.getActive().fetch().getNetworkContainer();
static Supplier<AnalysisContainer> analysisContainer(UUID serverUUID) {
return () -> new AnalysisContainer(Database.getActive().fetch().getServerContainer(serverUUID));
}
static Supplier<PlayerContainer> playerContainer(UUID uuid) {
return () -> Database.getActive().fetch().getPlayerContainer(uuid);
}
}
}

View File

@ -66,12 +66,12 @@ public class MySQLDB extends SQLDB {
config.setPassword(password);
config.setPoolName("Plan Connection Pool-" + increment);
config.setDriverClassName("com.mysql.jdbc.Driver");
increment();
config.setAutoCommit(true);
config.setMaximumPoolSize(8);
config.setLeakDetectionThreshold(TimeAmount.MINUTE.ms() * 10L);
config.setMaxLifetime(25L * TimeAmount.MINUTE.ms());
config.setLeakDetectionThreshold(10L * TimeAmount.MINUTE.ms());
this.dataSource = new HikariDataSource(config);

View File

@ -8,6 +8,8 @@ import com.djrapitops.plan.api.exceptions.ParseException;
import com.djrapitops.plan.api.exceptions.connection.NoServersException;
import com.djrapitops.plan.api.exceptions.connection.WebException;
import com.djrapitops.plan.api.exceptions.connection.WebFailException;
import com.djrapitops.plan.data.store.containers.NetworkContainer;
import com.djrapitops.plan.system.cache.CacheSystem;
import com.djrapitops.plan.system.info.connection.BungeeConnectionSystem;
import com.djrapitops.plan.system.info.request.CacheRequest;
import com.djrapitops.plan.system.info.request.GenerateInspectPageRequest;
@ -43,7 +45,8 @@ public class BungeeInfoSystem extends InfoSystem {
@Override
public void updateNetworkPage() throws WebException {
try {
String html = new NetworkPage().toHtml();
NetworkContainer networkContainer = CacheSystem.getInstance().getDataContainerCache().getNetworkContainer();
String html = new NetworkPage(networkContainer).toHtml();
ResponseCache.cacheResponse(PageId.SERVER.of(ServerInfo.getServerUUID()), () -> new AnalysisPageResponse(html));
} catch (ParseException e) {
throw new WebFailException("Exception during Network Page Parsing", e);

View File

@ -7,7 +7,6 @@ package com.djrapitops.plan.system.webserver.pages.parsing;
import com.djrapitops.plan.api.exceptions.ParseException;
import com.djrapitops.plan.data.store.containers.NetworkContainer;
import com.djrapitops.plan.data.store.mutators.formatting.PlaceholderReplacer;
import com.djrapitops.plan.system.database.databases.Database;
import com.djrapitops.plan.system.webserver.response.cache.PageId;
import com.djrapitops.plan.system.webserver.response.cache.ResponseCache;
import com.djrapitops.plan.system.webserver.response.pages.parts.NetworkPageContent;
@ -22,12 +21,15 @@ import static com.djrapitops.plan.data.store.keys.NetworkKeys.*;
*/
public class NetworkPage implements Page {
private final NetworkContainer networkContainer;
public NetworkPage(NetworkContainer networkContainer) {
this.networkContainer = networkContainer;
}
@Override
public String toHtml() throws ParseException {
try {
Database database = Database.getActive();
NetworkContainer networkContainer = database.fetch().getNetworkContainer();
PlaceholderReplacer placeholderReplacer = new PlaceholderReplacer();
placeholderReplacer.addAllPlaceholdersFrom(networkContainer,
VERSION, NETWORK_NAME, TIME_ZONE,