You can use Java LinkedHashMap and override removeEldestEntry to implement LRU cache
public class SimpleLru<K, V> extends LinkedHashMap<K, V>{
    final int cacheSize;
    public SimpleLru(int cacheSize) {
        this.cacheSize = cacheSize;
    }
    @Override
    protected boolean removeEldestEntry(Map.Entry<K, V> eldest) {
        return this.size() > this.cacheSize;
    }
    public static void main(String[] args) {
        SimpleLru<String, Integer> m = new SimpleLru<>(2); // Max 2
        m.put("k1", 1); // k1:1
        m.put("k2", 2); // k1:1, k2:2
        m.put("k3", 3); // k2:2, k3:3
    }
}
If you want to have a thread-safe version, you can use:
public class ConcurrentLru<K, V> {
    final Object mutex = new Object();
    final Map<K, V> cache;
    public ConcurrentLru(final int cacheSize) {
        this.cache = new LinkedHashMap<K, V>() {
            @Override
            protected boolean removeEldestEntry(Map.Entry<K, V> eldest) {
                return this.size() > cacheSize;
            }
        };
    }
    public void put(K k, V v) {
        synchronized (this.mutex) { this.cache.put(k, v); }
    }
    public boolean contains(K k) {
        synchronized (this.mutex) { return this.cache.containsKey(k); }
    }
    public V remove(K k) {
        synchronized (this.mutex) { return this.cache.remove(k); }
    }
    public V get(K k) {
        synchronized (this.mutex) { return this.cache.get(k); }
    }
}