Skip to content

Commit cca6406

Browse files
committed
Remove unused or commented code
1 parent 297b5a9 commit cca6406

File tree

1 file changed

+1
-42
lines changed

1 file changed

+1
-42
lines changed

firebase-firestore/src/main/java/com/google/firebase/firestore/local/SQLiteRemoteDocumentCache.java

Lines changed: 1 addition & 42 deletions
Original file line numberDiff line numberDiff line change
@@ -32,8 +32,6 @@
3232
import com.google.firebase.firestore.model.MutableDocument;
3333
import com.google.firebase.firestore.model.ResourcePath;
3434
import com.google.firebase.firestore.model.SnapshotVersion;
35-
import com.google.firebase.firestore.util.BackgroundQueue;
36-
import com.google.firebase.firestore.util.Executors;
3735
import com.google.firebase.firestore.util.Function;
3836
import com.google.protobuf.InvalidProtocolBufferException;
3937
import com.google.protobuf.MessageLite;
@@ -47,7 +45,6 @@
4745
import java.util.Objects;
4846
import java.util.Set;
4947
import java.util.concurrent.ConcurrentHashMap;
50-
import java.util.concurrent.Executor;
5148
import javax.annotation.Nonnull;
5249
import javax.annotation.Nullable;
5350

@@ -170,15 +167,9 @@ public Map<DocumentKey, MutableDocument> getAll(Iterable<DocumentKey> documentKe
170167
bindVars,
171168
") ORDER BY path");
172169

173-
// BackgroundQueue backgroundQueue = new BackgroundQueue();
174170
while (longQuery.hasMoreSubqueries()) {
175-
longQuery
176-
.performNextSubquery()
177-
// .forEach(row -> processRowInBackground(backgroundQueue, results, row, /*filter*/
178-
// null));
179-
.forEach(row -> processRow(results, row, /*filter*/ null));
171+
longQuery.performNextSubquery().forEach(row -> processRow(results, row, /*filter*/ null));
180172
}
181-
// backgroundQueue.drain();
182173

183174
// Backfill any rows with null "document_type" discovered by processRowInBackground().
184175
documentTypeBackfiller.backfill(db);
@@ -268,19 +259,16 @@ private Map<DocumentKey, MutableDocument> getAll(
268259
}
269260
bindVars[i] = count;
270261

271-
// BackgroundQueue backgroundQueue = new BackgroundQueue();
272262
Map<DocumentKey, MutableDocument> results = new HashMap<>();
273263
db.query(sql.toString())
274264
.binding(bindVars)
275265
.forEach(
276266
row -> {
277-
// processRowInBackground(backgroundQueue, results, row, filter);
278267
processRow(results, row, filter);
279268
if (context != null) {
280269
context.incrementDocumentReadCount();
281270
}
282271
});
283-
// backgroundQueue.drain();
284272

285273
// Backfill any null "document_type" columns discovered by processRowInBackground().
286274
documentTypeBackfiller.backfill(db);
@@ -300,35 +288,6 @@ private Map<DocumentKey, MutableDocument> getAll(
300288
collections, offset, count, /*tryFilterDocumentType*/ null, filter, /*context*/ null);
301289
}
302290

303-
private void processRowInBackground(
304-
BackgroundQueue backgroundQueue,
305-
Map<DocumentKey, MutableDocument> results,
306-
Cursor row,
307-
@Nullable Function<MutableDocument, Boolean> filter) {
308-
byte[] rawDocument = row.getBlob(0);
309-
int readTimeSeconds = row.getInt(1);
310-
int readTimeNanos = row.getInt(2);
311-
boolean documentTypeIsNull = row.isNull(3);
312-
String path = row.getString(4);
313-
314-
// Since scheduling background tasks incurs overhead, we only dispatch to a
315-
// background thread if there are still some documents remaining.
316-
Executor executor = row.isLast() ? Executors.DIRECT_EXECUTOR : backgroundQueue;
317-
executor.execute(
318-
() -> {
319-
MutableDocument document =
320-
decodeMaybeDocument(rawDocument, readTimeSeconds, readTimeNanos);
321-
if (documentTypeIsNull) {
322-
documentTypeBackfiller.enqueue(path, readTimeSeconds, readTimeNanos, document);
323-
}
324-
if (filter == null || filter.apply(document)) {
325-
synchronized (results) {
326-
results.put(document.getKey(), document);
327-
}
328-
}
329-
});
330-
}
331-
332291
private void processRow(
333292
Map<DocumentKey, MutableDocument> results,
334293
Cursor row,

0 commit comments

Comments
 (0)