Skip to content
Open
4 changes: 4 additions & 0 deletions modules/cells/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,10 @@
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</dependency>
<dependency>
<groupId>com.github.ben-manes.caffeine</groupId>
<artifactId>caffeine</artifactId>
</dependency>

</dependencies>

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,7 @@

import com.google.common.collect.HashMultimap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Multimap;
import com.google.common.collect.Sets;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.MoreExecutors;
Expand Down Expand Up @@ -329,7 +327,7 @@ public void messageArrived(CellMessage msg) {
.map(CellDomainInfo::getCellDomainName)
.forEach(domain -> domains.put(domain, new ArrayList<>()));
queueRoutes.asMap().forEach(
(domain, cells) -> domains.put(domain, Lists.newArrayList(cells)));
(domain, cells) -> domains.put(domain, new ArrayList<>(cells)));
Comment thread
LukasMansour marked this conversation as resolved.
}
msg.revertDirection();
msg.setMessageObject(new GetAllDomainsReply(domains));
Expand Down Expand Up @@ -504,9 +502,9 @@ private Optional<CellTunnelInfo> getTunnelInfo(CellAddressCore tunnel) {
public synchronized Object ac_ls_$_0(Args args) {
return new Object[]{
getCellDomainName(),
Sets.newHashSet(localConsumers.values()),
new HashSet<>(localConsumers.values()),
Comment thread
LukasMansour marked this conversation as resolved.
queueRoutes.asMap().entrySet().stream().collect(
toMap(Map.Entry::getKey, e -> Sets.newHashSet(e.getValue())))
toMap(Map.Entry::getKey, e -> new HashSet<>(e.getValue())))
};
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,9 @@
*/
package dmg.cells.zookeeper;

import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.github.benmanes.caffeine.cache.CacheLoader;
import com.github.benmanes.caffeine.cache.Caffeine;
import com.github.benmanes.caffeine.cache.LoadingCache;
import dmg.cells.nucleus.CDC;
import java.util.Collection;
import java.util.List;
Expand Down Expand Up @@ -115,9 +115,9 @@ public class CellCuratorFramework implements CuratorFramework {
private final BoundedExecutor executor;

private final LoadingCache<Watcher, Watcher> watchers =
CacheBuilder.newBuilder().build(new CacheLoader<Watcher, Watcher>() {
Caffeine.newBuilder().build(new CacheLoader<>() {
@Override
public Watcher load(Watcher watcher) throws Exception {
public Watcher load(Watcher watcher) {
CDC cdc = new CDC();
return event -> executor.execute(() -> {
try (CDC ignore = cdc.restore()) {
Expand All @@ -128,9 +128,9 @@ public Watcher load(Watcher watcher) throws Exception {
});

private final LoadingCache<CuratorWatcher, CuratorWatcher> curatorWatchers =
CacheBuilder.newBuilder().build(new CacheLoader<CuratorWatcher, CuratorWatcher>() {
Caffeine.newBuilder().build(new CacheLoader<>() {
@Override
public CuratorWatcher load(CuratorWatcher watcher) throws Exception {
public CuratorWatcher load(CuratorWatcher watcher) {
CDC cdc = new CDC();
return event -> executor.execute(() -> {
try (CDC ignore = cdc.restore()) {
Expand Down Expand Up @@ -175,11 +175,11 @@ protected static BackgroundCallback wrap(BackgroundCallback callback) {
}

protected Watcher wrap(Watcher watcher) {
return watchers.getUnchecked(watcher);
return watchers.get(watcher);
}

protected CuratorWatcher wrap(CuratorWatcher watcher) {
return curatorWatchers.getUnchecked(watcher);
return curatorWatchers.get(watcher);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,23 +4,24 @@
import static java.util.Objects.requireNonNull;

import ch.qos.logback.classic.Level;
import com.github.benmanes.caffeine.cache.CacheLoader;
import com.github.benmanes.caffeine.cache.Caffeine;
import com.github.benmanes.caffeine.cache.LoadingCache;
import com.google.common.base.Throwables;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.common.collect.HashBasedTable;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.google.common.collect.Table;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.CompletionException;
import java.util.function.Function;
import org.slf4j.Logger;

/**
Expand All @@ -41,7 +42,7 @@ public class FilterThresholdSet {

private final FilterThresholdSet _parent;

private final Set<String> _appenders = Sets.newHashSet();
private final Set<String> _appenders = new HashSet<>();

private final Set<LoggerName> _roots = new HashSet<>();

Expand All @@ -50,26 +51,25 @@ public class FilterThresholdSet {

/* Logger -> (Appender -> Level) */
private final LoadingCache<String, Map<String, Level>> _effectiveMaps =
CacheBuilder.newBuilder().build(CacheLoader.from(
Caffeine.newBuilder().build(new FunctionToCacheLoader<>(
logger -> computeEffectiveMap(LoggerName.getInstance(logger))));

/* Logger -> Level */
private final LoadingCache<Logger, Optional<Level>> _effectiveLevels =
CacheBuilder.newBuilder().build(CacheLoader.from(
Caffeine.newBuilder().build(new FunctionToCacheLoader<>(
logger -> {
try {
Map<String, Level> map = _effectiveMaps.get(logger.getName());
return map.isEmpty()
? Optional.empty()
: Optional.of(Collections.min(map.values(), LEVEL_ORDER));
} catch (ExecutionException e) {
} catch (CompletionException e) {
Throwables.throwIfUnchecked(e.getCause());
throw new RuntimeException(e.getCause());
}
}));

private static final Comparator<Level> LEVEL_ORDER =
(o1, o2) -> Integer.compare(o1.toInt(), o2.toInt());
private static final Comparator<Level> LEVEL_ORDER = Comparator.comparingInt(Level::toInt);

public FilterThresholdSet() {
this(null);
Expand All @@ -93,7 +93,7 @@ public synchronized void addAppender(String name) {
*/
public synchronized Collection<String> getAppenders() {
if (_parent == null) {
return Lists.newArrayList(_appenders);
return new ArrayList<>(_appenders);
Comment thread
LukasMansour marked this conversation as resolved.
} else {
Collection<String> appenders = _parent.getAppenders();
appenders.addAll(_appenders);
Expand Down Expand Up @@ -182,7 +182,7 @@ private void clearCache() {
*/
public synchronized Map<String, Level> getInheritedMap(LoggerName logger) {
if (_parent == null) {
return Maps.newHashMap(_rules.row(logger));
return new HashMap<>(_rules.row(logger));
} else {
Map<String, Level> map = _parent.getInheritedMap(logger);
map.putAll(_rules.row(logger));
Expand Down Expand Up @@ -222,7 +222,7 @@ public Level getThreshold(LoggerName logger, String appender) {
public Level getThreshold(String logger, String appender) {
try {
return _effectiveMaps.get(logger).get(appender);
} catch (ExecutionException e) {
} catch (CompletionException e) {
Throwables.throwIfUnchecked(e.getCause());
throw new RuntimeException(e.getCause());
}
Expand All @@ -234,9 +234,24 @@ public Level getThreshold(String logger, String appender) {
public Level getThreshold(Logger logger) {
try {
return _effectiveLevels.get(logger).orElse(null);
} catch (ExecutionException e) {
} catch (CompletionException e) {
Throwables.throwIfUnchecked(e.getCause());
throw new RuntimeException(e.getCause());
}
}

private static final class FunctionToCacheLoader<K, V> implements
CacheLoader<K, V>, Serializable {

private final Function<K, V> computingFunction;
private static final long serialVersionUID = 0L;

public FunctionToCacheLoader(Function<K, V> computingFunction) {
this.computingFunction = requireNonNull(computingFunction);
}

public V load(K key) {
return this.computingFunction.apply(requireNonNull(key));
}
}
}
4 changes: 4 additions & 0 deletions modules/chimera/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,10 @@
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</dependency>
<dependency>
<groupId>com.github.ben-manes.caffeine</groupId>
<artifactId>caffeine</artifactId>
</dependency>
<dependency>
<groupId>com.github.spotbugs</groupId>
<artifactId>spotbugs-annotations</artifactId>
Expand Down
68 changes: 33 additions & 35 deletions modules/chimera/src/main/java/org/dcache/chimera/JdbcFs.java
Original file line number Diff line number Diff line change
Expand Up @@ -26,11 +26,11 @@
import static org.dcache.util.ByteUnit.EiB;
import static org.dcache.util.SqlHelper.tryToClose;

import com.github.benmanes.caffeine.cache.AsyncLoadingCache;
import com.github.benmanes.caffeine.cache.Cache;
import com.github.benmanes.caffeine.cache.CacheLoader;
import com.github.benmanes.caffeine.cache.Caffeine;
import com.google.common.base.Throwables;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.common.io.ByteSource;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import diskCacheV111.util.RetentionPolicy;
Expand All @@ -45,7 +45,8 @@
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionException;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
Expand Down Expand Up @@ -127,30 +128,23 @@ public class JdbcFs implements FileSystemProvider, LeaderLatchListener {
.build()
);

private final LoadingCache<Object, FsStat> _fsStatCache
= CacheBuilder.newBuilder()
private final AsyncLoadingCache<Object, FsStat> _fsStatCache
= Caffeine.newBuilder()
.refreshAfterWrite(100, TimeUnit.MILLISECONDS)
.build(
CacheLoader.asyncReloading(new CacheLoader<Object, FsStat>() {

@Override
public FsStat load(Object k) throws Exception {
return JdbcFs.this.getFsStat0();
}
}
, _fsStatUpdateExecutor));
.executor(_fsStatUpdateExecutor)
.buildAsync(key -> JdbcFs.this.getFsStat0());

/* The PNFS ID to inode number mapping will never change while dCache is running.
*/
protected final Cache<String, Long> _inoCache =
CacheBuilder.newBuilder()
Caffeine.newBuilder()
.maximumSize(100000)
.build();

/* The inode number to PNFS ID mapping will never change while dCache is running.
*/
protected final Cache<Long, String> _idCache =
CacheBuilder.newBuilder()
Caffeine.newBuilder()
.maximumSize(100000)
.build();

Expand Down Expand Up @@ -725,36 +719,40 @@ public FsInode path2inode(String path, FsInode startFrom) throws ChimeraFsExcept
@Override
public String inode2id(FsInode inode) throws ChimeraFsException {
try {
return _idCache.get(inode.ino(), () -> {
return _idCache.get(inode.ino(), (key) -> {
String id = _sqlDriver.getId(inode);
if (id == null) {
throw FileNotFoundChimeraFsException.of(inode);
throw new CompletionException(FileNotFoundChimeraFsException.of(inode));
}
return id;
});
} catch (ExecutionException e) {
Throwables.throwIfInstanceOf(e.getCause(), ChimeraFsException.class);
Throwables.throwIfInstanceOf(e.getCause(), DataAccessException.class);
Throwables.throwIfUnchecked(e.getCause());
throw new RuntimeException(e.getCause());
} catch (CompletionException e) {
if (e.getCause() != null) {
Comment thread
LukasMansour marked this conversation as resolved.
Outdated
Throwables.throwIfInstanceOf(e.getCause(), ChimeraFsException.class);
Throwables.throwIfInstanceOf(e.getCause(), DataAccessException.class);
Throwables.throwIfUnchecked(e.getCause());
}
throw e;
}
}

@Override
public FsInode id2inode(String id, StatCacheOption option) throws ChimeraFsException {
if (option == NO_STAT) {
try {
return new FsInode(this, _inoCache.get(id, () -> {
return new FsInode(this, _inoCache.get(id, (key) -> {
Long ino = _sqlDriver.getInumber(id);
if (ino == null) {
throw FileNotFoundChimeraFsException.ofPnfsId(id);
throw new CompletionException(FileNotFoundChimeraFsException.ofPnfsId(id));
}
return ino;
}));
} catch (ExecutionException e) {
Throwables.throwIfInstanceOf(e.getCause(), ChimeraFsException.class);
Throwables.throwIfInstanceOf(e.getCause(), DataAccessException.class);
Throwables.throwIfUnchecked(e.getCause());
} catch (CompletionException e) {
if (e.getCause() != null) {
Comment thread
LukasMansour marked this conversation as resolved.
Outdated
Throwables.throwIfInstanceOf(e.getCause(), ChimeraFsException.class);
Throwables.throwIfInstanceOf(e.getCause(), DataAccessException.class);
Throwables.throwIfUnchecked(e.getCause());
}
throw new RuntimeException(e.getCause());
}
} else {
Expand Down Expand Up @@ -1403,19 +1401,19 @@ private static void checkNameLength(String name) throws InvalidNameChimeraExcept
}

@Override
public void updateFsStat() throws ChimeraFsException {
public void updateFsStat() {
_sqlDriver.updateFsStat();
}

public FsStat getFsStat0() throws ChimeraFsException {
public FsStat getFsStat0() {
return _sqlDriver.getFsStat();
}

@Override
public FsStat getFsStat() throws ChimeraFsException {
try {
return _fsStatCache.get(DUMMY_KEY);
} catch (ExecutionException e) {
return _fsStatCache.synchronous().get(DUMMY_KEY);
} catch (CompletionException e) {
Throwable t = e.getCause();
Throwables.propagateIfPossible(t, ChimeraFsException.class);
throw new ChimeraFsException(t.getMessage(), t);
Expand Down
Loading