Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions davinci/src/main/java/cn/hadcn/davinci/base/ImageLoader.java
Original file line number Diff line number Diff line change
Expand Up @@ -54,11 +54,11 @@ public class ImageLoader {
* that we can coalesce multiple requests to the same URL into a single network request.
*/
private final HashMap<String, BatchedImageRequest> mInFlightRequests =
new HashMap<String, BatchedImageRequest>();
new HashMap<>();

/** HashMap of the currently pending responses (waiting to be delivered). */
private final HashMap<String, BatchedImageRequest> mBatchedResponses =
new HashMap<String, BatchedImageRequest>();
new HashMap<>();

/** Handler to the main thread. */
private final Handler mHandler = new Handler(Looper.getMainLooper());
Expand Down Expand Up @@ -367,7 +367,7 @@ private class BatchedImageRequest {
private VolleyError mError;

/** List of all of the active ImageContainers that are interested in the request */
private final LinkedList<ImageContainer> mContainers = new LinkedList<ImageContainer>();
private final LinkedList<ImageContainer> mContainers = new LinkedList<>();

/**
* Constructs a new BatchedImageRequest object
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ public PersistentCookieStore(Context context) {
}

private void loadAllFromPersistence() {
allCookies = new HashMap<URI, Set<HttpCookie>>();
allCookies = new HashMap<>();

Map<String, ?> allPairs = sharedPreferences.getAll();
for (Map.Entry<String, ?> entry : allPairs.entrySet()) {
Expand All @@ -66,7 +66,7 @@ private void loadAllFromPersistence() {

Set<HttpCookie> targetCookies = allCookies.get(uri);
if (targetCookies == null) {
targetCookies = new HashSet<HttpCookie>();
targetCookies = new HashSet<>();
allCookies.put(uri, targetCookies);
}
// Repeated cookies cannot exist in persistence
Expand All @@ -84,7 +84,7 @@ public synchronized void add(URI uri, HttpCookie cookie) {

Set<HttpCookie> targetCookies = allCookies.get(uri);
if (targetCookies == null) {
targetCookies = new HashSet<HttpCookie>();
targetCookies = new HashSet<>();
allCookies.put(uri, targetCookies);
}
targetCookies.remove(cookie);
Expand Down Expand Up @@ -136,7 +136,7 @@ public synchronized List<HttpCookie> get(URI uri) {

@Override
public synchronized List<HttpCookie> getCookies() {
List<HttpCookie> allValidCookies = new ArrayList<HttpCookie>();
List<HttpCookie> allValidCookies = new ArrayList<>();
for (URI storedUri : allCookies.keySet()) {
allValidCookies.addAll(getValidCookies(storedUri));
}
Expand All @@ -145,7 +145,7 @@ public synchronized List<HttpCookie> getCookies() {
}

private List<HttpCookie> getValidCookies(URI uri) {
List<HttpCookie> targetCookies = new ArrayList<HttpCookie>();
List<HttpCookie> targetCookies = new ArrayList<>();
// If the stored URI does not have a path then it must match any URI in
// the same domain
for (URI storedUri : allCookies.keySet()) {
Expand All @@ -160,7 +160,7 @@ private List<HttpCookie> getValidCookies(URI uri) {

// Check it there are expired cookies and remove them
if (!targetCookies.isEmpty()) {
List<HttpCookie> cookiesToRemoveFromPersistence = new ArrayList<HttpCookie>();
List<HttpCookie> cookiesToRemoveFromPersistence = new ArrayList<>();
for (Iterator<HttpCookie> it = targetCookies.iterator(); it
.hasNext(); ) {
HttpCookie currentCookie = it.next();
Expand Down Expand Up @@ -230,7 +230,7 @@ private void removeFromPersistence(URI uri, List<HttpCookie> cookiesToRemove) {

@Override
public synchronized List<URI> getURIs() {
return new ArrayList<URI>(allCookies.keySet());
return new ArrayList<>(allCookies.keySet());
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,7 @@ public final class DiskLruCache implements Closeable {
private long size = 0;
private Writer journalWriter;
private final LinkedHashMap<String, Entry> lruEntries =
new LinkedHashMap<String, Entry>(0, 0.75f, true);
new LinkedHashMap<>(0, 0.75f, true);
private int redundantOpCount;

/**
Expand Down Expand Up @@ -634,7 +634,7 @@ public synchronized void close() throws IOException {
if (journalWriter == null) {
return; // Already closed.
}
for (Entry entry : new ArrayList<Entry>(lruEntries.values())) {
for (Entry entry : new ArrayList<>(lruEntries.values())) {
if (entry.currentEditor != null) {
entry.currentEditor.abort();
}
Expand Down
4 changes: 2 additions & 2 deletions davinci/src/main/java/cn/hadcn/davinci/image/LruCache.java
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ public LruCache(int maxSize) {
throw new IllegalArgumentException("maxSize <= 0");
}
this.maxSize = maxSize;
this.map = new LinkedHashMap<K, V>(0, 0.75f, true);
this.map = new LinkedHashMap<>(0, 0.75f, true);
}
/**
* Returns the value for {@code key} if it exists in the cache or can be
Expand Down Expand Up @@ -215,7 +215,7 @@ public synchronized final int evictionCount() {
* recently accessed to most recently accessed.
*/
public synchronized final Map<K, V> snapshot() {
return new LinkedHashMap<K, V>(map);
return new LinkedHashMap<>(map);
}
@Override public synchronized final String toString() {
int accesses = hitCount + missCount;
Expand Down