mBuffers;
+ private final AtomicBoolean mIsCoalesced = new AtomicBoolean(false);
+
+ public static Builder builder() {
+ return new Builder();
+ }
+
+ /**
+ * Use this class during a request, to combine streamed buffers of a response into a single final
+ * buffer.
+ *
+ * For example: {@code @Override public void onResponseStarted(UrlRequest request,
+ * UrlResponseInfo info) { request.read(builder.getFirstBuffer(info)); } @Override public void
+ * onReadCompleted(UrlRequest request, UrlResponseInfo info, ByteBuffer buffer) {
+ * request.read(builder.getNextBuffer(buffer)); } }
+ */
+ public static final class Builder {
+ private ArrayDeque mBuffers = new ArrayDeque<>();
+ private RuntimeException whenClosed;
+
+ private Builder() {}
+
+ /** Returns the next buffer to write data into. */
+ public ByteBuffer getNextBuffer(ByteBuffer lastBuffer) {
+ if (mBuffers == null) {
+ throw new RuntimeException(whenClosed);
+ }
+ if (lastBuffer != mBuffers.peekLast()) {
+ mBuffers.addLast(lastBuffer);
+ }
+ if (lastBuffer.hasRemaining()) {
+ return lastBuffer;
+ } else {
+ return ByteBuffer.allocateDirect(8096);
+ }
+ }
+
+ /** Returns a ByteBuffer heuristically sized to hold the whole response body. */
+ public ByteBuffer getFirstBuffer(UrlResponseInfo info) {
+ // Security note - a malicious server could attempt to exhaust client memory by sending
+ // down a Content-Length of a very large size, which we would eagerly allocate without
+ // the server having to actually send those bytes. This isn't considered to be an
+ // issue, because that same malicious server could use our transparent gzip to force us
+ // to allocate 1032 bytes per byte sent by the server.
+ return ByteBuffer.allocateDirect((int) Math.min(bufferSizeHeuristic(info), 524288));
+ }
+
+ private static long bufferSizeHeuristic(UrlResponseInfo info) {
+ final Map> headers = info.getAllHeaders();
+ if (headers.containsKey(CONTENT_LENGTH)) {
+ long contentLength = Long.parseLong(headers.get(CONTENT_LENGTH).get(0));
+ boolean isCompressed =
+ !headers.containsKey(CONTENT_ENCODING)
+ || (headers.get(CONTENT_ENCODING).size() == 1
+ && "identity".equals(headers.get(CONTENT_ENCODING).get(0)));
+ if (isCompressed) {
+ // We have to guess at the uncompressed size. In the future, consider guessing a
+ // compression ratio based on the content-type and content-encoding. For now,
+ // assume 2.
+ return 2 * contentLength;
+ } else {
+ // In this case, we know exactly how many bytes we're going to get, so we can
+ // size our buffer perfectly. However, we still have to call read() for the last time,
+ // even when we know there shouldn't be any more bytes coming. To avoid allocating another
+ // buffer for that case, add one more byte than we really need.
+ return contentLength + 1;
+ }
+ } else {
+ // No content-length. This means we're either being sent a chunked response, or the
+ // java stack stripped content length because of transparent gzip. In either case we really
+ // have no idea, and so we fall back to a reasonable guess.
+ return 8192;
+ }
+ }
+
+ public BufferQueue build() {
+ whenClosed = new RuntimeException();
+ final ArrayDeque buffers = mBuffers;
+ mBuffers = null;
+ return new BufferQueue(buffers);
+ }
+ }
+
+ private BufferQueue(Queue buffers) {
+ mBuffers = buffers;
+ for (ByteBuffer buffer : mBuffers) {
+ buffer.flip();
+ }
+ }
+
+ /** Returns the response body as a single contiguous buffer. */
+ public ByteBuffer coalesceToBuffer() {
+ markCoalesced();
+ if (mBuffers.size() == 0) {
+ return ByteBuffer.allocateDirect(0);
+ } else if (mBuffers.size() == 1) {
+ return mBuffers.remove();
+ } else {
+ int size = 0;
+ for (ByteBuffer buffer : mBuffers) {
+ size += buffer.remaining();
+ }
+ ByteBuffer result = ByteBuffer.allocateDirect(size);
+ while (!mBuffers.isEmpty()) {
+ result.put(mBuffers.remove());
+ }
+ result.flip();
+ return result;
+ }
+ }
+
+ private void markCoalesced() {
+ if (!mIsCoalesced.compareAndSet(false, true)) {
+ throw new IllegalStateException("This BufferQueue has already been consumed");
+ }
+ }
+}
diff --git a/integration/cronet/src/main/java/com/bumptech/glide/integration/cronet/ByteBufferParser.java b/integration/cronet/src/main/java/com/bumptech/glide/integration/cronet/ByteBufferParser.java
new file mode 100644
index 0000000000..fef4319a03
--- /dev/null
+++ b/integration/cronet/src/main/java/com/bumptech/glide/integration/cronet/ByteBufferParser.java
@@ -0,0 +1,15 @@
+package com.bumptech.glide.integration.cronet;
+
+import java.nio.ByteBuffer;
+
+/**
+ * Parses a {@link java.nio.ByteBuffer} to a particular data type.
+ *
+ * @param The type of data to parse the buffer to.
+ */
+interface ByteBufferParser {
+ /** Returns the required type of data parsed from the given {@link ByteBuffer}. */
+ T parse(ByteBuffer byteBuffer);
+ /** Returns the {@link Class} of the data that will be parsed from {@link ByteBuffer}s. */
+ Class getDataClass();
+}
diff --git a/integration/cronet/src/main/java/com/bumptech/glide/integration/cronet/ChromiumRequestSerializer.java b/integration/cronet/src/main/java/com/bumptech/glide/integration/cronet/ChromiumRequestSerializer.java
new file mode 100644
index 0000000000..455935e328
--- /dev/null
+++ b/integration/cronet/src/main/java/com/bumptech/glide/integration/cronet/ChromiumRequestSerializer.java
@@ -0,0 +1,389 @@
+package com.bumptech.glide.integration.cronet;
+
+import android.util.Log;
+import androidx.annotation.Nullable;
+import com.bumptech.glide.Priority;
+import com.bumptech.glide.load.HttpException;
+import com.bumptech.glide.load.engine.executor.GlideExecutor;
+import com.bumptech.glide.load.engine.executor.GlideExecutor.UncaughtThrowableStrategy;
+import com.bumptech.glide.load.model.GlideUrl;
+import com.google.common.base.Supplier;
+import com.google.common.base.Suppliers;
+import java.io.IOException;
+import java.net.HttpURLConnection;
+import java.nio.ByteBuffer;
+import java.util.ArrayDeque;
+import java.util.ArrayList;
+import java.util.EnumMap;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.Executor;
+import org.chromium.net.CronetException;
+import org.chromium.net.UrlRequest;
+import org.chromium.net.UrlRequest.Callback;
+import org.chromium.net.UrlResponseInfo;
+
+/**
+ * Ensures that two simultaneous requests for exactly the same url make only a single http request.
+ *
+ * Requests are started by Glide on multiple threads in a thread pool. An arbitrary number of
+ * threads may attempt to start or cancel requests for one or more urls at once. Our goal is to
+ * ensure:
+ *
+ *
+ *
+ * A new request is made to cronet if a url is requested and no cronet request for that url is
+ * in progress
+ *
+ * Subsequent requests for in progress urls do not make new requests to cronet, but are
+ * notified when the existing cronet request completes.
+ *
+ * Cancelling a single request does not cancel the cronet request if multiple requests for
+ * the url have been made, but cancelling all requests for a url does cancel the cronet
+ * request.
+ */
+final class ChromiumRequestSerializer {
+ private static final String TAG = "ChromiumSerializer";
+
+ private static final Map GLIDE_TO_CHROMIUM_PRIORITY =
+ new EnumMap<>(Priority.class);
+ // Memoized so that all callers can share an instance.
+ // Suppliers.memoize() is thread safe. See google3/java/com/google/common/base/Suppliers.java
+ private static final Supplier glideExecutorSupplier =
+ Suppliers.memoize(
+ new Supplier() {
+ @Override
+ public GlideExecutor get() {
+ // Allow network operations, but use a single thread. See b/37684357.
+ return GlideExecutor.newSourceExecutor(
+ 1 /*threadCount*/, "chromium-serializer", UncaughtThrowableStrategy.DEFAULT);
+ }
+ });
+
+ private abstract static class PriorityRunnable implements Runnable, Comparable {
+
+ private final int priority;
+
+ private PriorityRunnable(Priority priority) {
+ this.priority = priority.ordinal();
+ }
+
+ @Override
+ public final int compareTo(PriorityRunnable another) {
+ if (another.priority > this.priority) {
+ return -1;
+ } else if (another.priority < this.priority) {
+ return 1;
+ }
+ return 0;
+ }
+ }
+
+ static {
+ GLIDE_TO_CHROMIUM_PRIORITY.put(Priority.IMMEDIATE, UrlRequest.Builder.REQUEST_PRIORITY_HIGHEST);
+ GLIDE_TO_CHROMIUM_PRIORITY.put(Priority.HIGH, UrlRequest.Builder.REQUEST_PRIORITY_MEDIUM);
+ GLIDE_TO_CHROMIUM_PRIORITY.put(Priority.NORMAL, UrlRequest.Builder.REQUEST_PRIORITY_LOW);
+ GLIDE_TO_CHROMIUM_PRIORITY.put(Priority.LOW, UrlRequest.Builder.REQUEST_PRIORITY_LOWEST);
+ }
+
+ private final JobPool jobPool = new JobPool();
+ private final Map jobs = new HashMap<>();
+ private final CronetRequestFactory requestFactory;
+ @Nullable private final DataLogger dataLogger;
+
+ ChromiumRequestSerializer(CronetRequestFactory requestFactory, @Nullable DataLogger dataLogger) {
+ this.requestFactory = requestFactory;
+ this.dataLogger = dataLogger;
+ }
+
+ void startRequest(Priority priority, GlideUrl glideUrl, Listener listener) {
+ boolean startNewRequest = false;
+ Job job;
+ synchronized (this) {
+ job = jobs.get(glideUrl);
+ if (job == null) {
+ startNewRequest = true;
+ job = jobPool.get(glideUrl);
+ jobs.put(glideUrl, job);
+ }
+ job.addListener(listener);
+ }
+
+ if (startNewRequest) {
+ if (Log.isLoggable(TAG, Log.VERBOSE)) {
+ Log.v(TAG, "Fetching image url using cronet" + " url: " + glideUrl);
+ }
+ job.priority = priority;
+ job.request =
+ requestFactory
+ .newRequest(
+ glideUrl.toStringUrl(),
+ GLIDE_TO_CHROMIUM_PRIORITY.get(priority),
+ glideUrl.getHeaders(),
+ job)
+ .build();
+ job.request.start();
+
+ // It's possible we will be cancelled between adding the job to the job list and starting the
+ // corresponding request. We don't want to hold a lock while starting the request, because
+ // starting the request may block for a while and we need cancellation to happen quickly (it
+ // happens on the main thread).
+ if (job.isCancelled) {
+ job.request.cancel();
+ }
+ }
+ }
+
+ void cancelRequest(GlideUrl glideUrl, Listener listener) {
+ final Job job;
+ synchronized (this) {
+ job = jobs.get(glideUrl);
+ }
+ // Jobs may be cancelled before they are started.
+ if (job != null) {
+ job.removeListener(listener);
+ }
+ }
+
+ private static IOException getExceptionIfFailed(
+ UrlResponseInfo info, IOException e, boolean wasCancelled) {
+ if (wasCancelled) {
+ return null;
+ } else if (e != null) {
+ return e;
+ } else if (info.getHttpStatusCode() != HttpURLConnection.HTTP_OK) {
+ return new HttpException(info.getHttpStatusCode());
+ }
+ return null;
+ }
+
+ /**
+ * Manages a single cronet request for a single url with one or more active listeners.
+ *
+ * Cronet requests are cancelled when all listeners are removed.
+ */
+ private class Job extends Callback {
+ private final List listeners = new ArrayList<>(2);
+
+ private GlideUrl glideUrl;
+ private Priority priority;
+ private long startTime;
+ private UrlRequest request;
+ private long endTimeMs;
+ private long responseStartTimeMs;
+ private volatile boolean isCancelled;
+ private BufferQueue.Builder builder;
+
+ void init(GlideUrl glideUrl) {
+ startTime = System.currentTimeMillis();
+ this.glideUrl = glideUrl;
+ }
+
+ void addListener(Listener listener) {
+ synchronized (ChromiumRequestSerializer.this) {
+ listeners.add(listener);
+ }
+ }
+
+ void removeListener(Listener listener) {
+ synchronized (ChromiumRequestSerializer.this) {
+ // Note: multiple cancellation calls + a subsequent request for a url may mean we fail to
+ // remove the listener here because that listener is actually for a previous request. Since
+ // that race is harmless, we simply ignore it.
+ listeners.remove(listener);
+ if (listeners.isEmpty()) {
+ isCancelled = true;
+ jobs.remove(glideUrl);
+ }
+ }
+
+ // The request may not have started yet, so request may be null.
+ if (isCancelled) {
+ UrlRequest localRequest = request;
+ if (localRequest != null) {
+ localRequest.cancel();
+ }
+ }
+ }
+
+ @Override
+ public void onRedirectReceived(UrlRequest urlRequest, UrlResponseInfo urlResponseInfo, String s)
+ throws Exception {
+ urlRequest.followRedirect();
+ }
+
+ @Override
+ public void onResponseStarted(UrlRequest request, UrlResponseInfo info) {
+ responseStartTimeMs = System.currentTimeMillis();
+ builder = BufferQueue.builder();
+ request.read(builder.getFirstBuffer(info));
+ }
+
+ @Override
+ public void onReadCompleted(
+ UrlRequest urlRequest, UrlResponseInfo urlResponseInfo, ByteBuffer byteBuffer)
+ throws Exception {
+ request.read(builder.getNextBuffer(byteBuffer));
+ }
+
+ @Override
+ public void onSucceeded(UrlRequest request, final UrlResponseInfo info) {
+ glideExecutorSupplier
+ .get()
+ .execute(
+ new PriorityRunnable(priority) {
+ @Override
+ public void run() {
+ onRequestFinished(
+ info,
+ null /*exception*/,
+ false /*wasCancelled*/,
+ builder.build().coalesceToBuffer());
+ }
+ });
+ }
+
+ @Override
+ public void onFailed(
+ UrlRequest urlRequest, final UrlResponseInfo urlResponseInfo, final CronetException e) {
+ glideExecutorSupplier
+ .get()
+ .execute(
+ new PriorityRunnable(priority) {
+ @Override
+ public void run() {
+ onRequestFinished(urlResponseInfo, e, false /*wasCancelled*/, null /*buffer*/);
+ }
+ });
+ }
+
+ @Override
+ public void onCanceled(UrlRequest urlRequest, @Nullable final UrlResponseInfo urlResponseInfo) {
+ glideExecutorSupplier
+ .get()
+ .execute(
+ new PriorityRunnable(priority) {
+ @Override
+ public void run() {
+ onRequestFinished(
+ urlResponseInfo, null /*exception*/, true /*wasCancelled*/, null /*buffer*/);
+ }
+ });
+ }
+
+ private void onRequestFinished(
+ UrlResponseInfo info,
+ @Nullable CronetException e,
+ boolean wasCancelled,
+ ByteBuffer buffer) {
+ synchronized (ChromiumRequestSerializer.this) {
+ jobs.remove(glideUrl);
+ }
+
+ Exception exception = getExceptionIfFailed(info, e, wasCancelled);
+ boolean isSuccess = exception == null && !wasCancelled;
+
+ endTimeMs = System.currentTimeMillis();
+
+ maybeLogResult(isSuccess, exception, wasCancelled, buffer);
+ if (isSuccess) {
+ notifySuccess(buffer);
+ } else {
+ notifyFailure(exception);
+ }
+
+ if (dataLogger != null) {
+ dataLogger.logNetworkData(info, startTime, responseStartTimeMs, endTimeMs);
+ }
+ builder = null;
+
+ jobPool.put(this);
+ }
+
+ private void notifySuccess(ByteBuffer buffer) {
+ ByteBuffer toNotify = buffer;
+ /* Locking here isn't necessary and is potentially dangerous. There's an optimization in
+ * Glide that avoids re-posting results if the callback onRequestComplete triggers is called
+ * on the calling thread. If that were ever to happen here (the request is cached in memory?),
+ * this might block all requests for a while. Locking isn't necessary because the Job is
+ * removed from the serializer's job set at the beginning of onRequestFinished. After that
+ * point, whatever thread we're on is the only one that has access to the Job. Subsequent
+ * requests for the same image would trigger an additional RPC/Job. */
+ for (int i = 0, size = listeners.size(); i < size; i++) {
+ Listener listener = listeners.get(i);
+ listener.onRequestComplete(toNotify);
+ toNotify = (ByteBuffer) toNotify.asReadOnlyBuffer().position(0);
+ }
+ }
+
+ private void notifyFailure(Exception exception) {
+ /* Locking here isn't necessary and is potentially dangerous. There's an optimization in
+ * Glide that avoids re-posting results if the callback onRequestComplete triggers is called
+ * on the calling thread. If that were ever to happen here (the request is cached in memory?),
+ * this might block all requests for a while. Locking isn't necessary because the Job is
+ * removed from the serializer's job set at the beginning of onRequestFinished. After that
+ * point, whatever thread we're on is the only one that has access to the Job. Subsequent
+ * requests for the same image would trigger an additional RPC/Job. */
+ for (int i = 0, size = listeners.size(); i < size; i++) {
+ Listener listener = listeners.get(i);
+ listener.onRequestFailed(exception);
+ }
+ }
+
+ private void maybeLogResult(
+ boolean isSuccess, Exception exception, boolean wasCancelled, ByteBuffer buffer) {
+ if (isSuccess && Log.isLoggable(TAG, Log.VERBOSE)) {
+ Log.v(
+ TAG,
+ "Successfully completed request"
+ + ", url: "
+ + glideUrl
+ + ", duration: "
+ + (System.currentTimeMillis() - startTime)
+ + ", file size: "
+ + (buffer.limit() / 1024)
+ + "kb");
+ } else if (!isSuccess && Log.isLoggable(TAG, Log.ERROR) && !wasCancelled) {
+ Log.e(TAG, "Request failed", exception);
+ }
+ }
+
+ private void clearListeners() {
+ synchronized (ChromiumRequestSerializer.this) {
+ listeners.clear();
+ request = null;
+ isCancelled = false;
+ }
+ }
+ }
+
+ private class JobPool {
+ private static final int MAX_POOL_SIZE = 50;
+ private final ArrayDeque pool = new ArrayDeque<>();
+
+ public synchronized Job get(GlideUrl glideUrl) {
+ Job job = pool.poll();
+ if (job == null) {
+ job = new Job();
+ }
+ job.init(glideUrl);
+ return job;
+ }
+
+ public void put(Job job) {
+ job.clearListeners();
+ synchronized (this) {
+ if (pool.size() < MAX_POOL_SIZE) {
+ pool.offer(job);
+ }
+ }
+ }
+ }
+
+ interface Listener {
+ void onRequestComplete(ByteBuffer byteBuffer);
+
+ void onRequestFailed(@Nullable Exception e);
+ }
+}
diff --git a/integration/cronet/src/main/java/com/bumptech/glide/integration/cronet/ChromiumUrlFetcher.java b/integration/cronet/src/main/java/com/bumptech/glide/integration/cronet/ChromiumUrlFetcher.java
new file mode 100644
index 0000000000..7c4154e43b
--- /dev/null
+++ b/integration/cronet/src/main/java/com/bumptech/glide/integration/cronet/ChromiumUrlFetcher.java
@@ -0,0 +1,61 @@
+package com.bumptech.glide.integration.cronet;
+
+import androidx.annotation.Nullable;
+import com.bumptech.glide.Priority;
+import com.bumptech.glide.load.DataSource;
+import com.bumptech.glide.load.data.DataFetcher;
+import com.bumptech.glide.load.model.GlideUrl;
+import java.nio.ByteBuffer;
+
+/** An {@link DataFetcher} for fetching {@link GlideUrl} using cronet. */
+final class ChromiumUrlFetcher implements DataFetcher, ChromiumRequestSerializer.Listener {
+
+ private final ChromiumRequestSerializer serializer;
+ private final ByteBufferParser parser;
+ private final GlideUrl url;
+
+ private DataCallback super T> callback;
+
+ public ChromiumUrlFetcher(
+ ChromiumRequestSerializer serializer, ByteBufferParser parser, GlideUrl url) {
+ this.serializer = serializer;
+ this.parser = parser;
+ this.url = url;
+ }
+
+ @Override
+ public void loadData(Priority priority, DataCallback super T> callback) {
+ this.callback = callback;
+ serializer.startRequest(priority, url, this);
+ }
+
+ @Override
+ public void cleanup() {
+ // Nothing to cleanup.
+ }
+
+ @Override
+ public void cancel() {
+ serializer.cancelRequest(url, this);
+ }
+
+ @Override
+ public Class getDataClass() {
+ return parser.getDataClass();
+ }
+
+ @Override
+ public DataSource getDataSource() {
+ return DataSource.REMOTE;
+ }
+
+ @Override
+ public void onRequestComplete(ByteBuffer byteBuffer) {
+ callback.onDataReady(parser.parse(byteBuffer));
+ }
+
+ @Override
+ public void onRequestFailed(@Nullable Exception e) {
+ callback.onLoadFailed(e);
+ }
+}
diff --git a/integration/cronet/src/main/java/com/bumptech/glide/integration/cronet/ChromiumUrlLoader.java b/integration/cronet/src/main/java/com/bumptech/glide/integration/cronet/ChromiumUrlLoader.java
new file mode 100644
index 0000000000..acae780028
--- /dev/null
+++ b/integration/cronet/src/main/java/com/bumptech/glide/integration/cronet/ChromiumUrlLoader.java
@@ -0,0 +1,109 @@
+package com.bumptech.glide.integration.cronet;
+
+import androidx.annotation.Nullable;
+import com.bumptech.glide.load.Options;
+import com.bumptech.glide.load.data.DataFetcher;
+import com.bumptech.glide.load.model.GlideUrl;
+import com.bumptech.glide.load.model.ModelLoader;
+import com.bumptech.glide.load.model.ModelLoaderFactory;
+import com.bumptech.glide.load.model.MultiModelLoaderFactory;
+import com.bumptech.glide.util.ByteBufferUtil;
+import java.io.InputStream;
+import java.nio.ByteBuffer;
+
+/**
+ * An {@link com.bumptech.glide.load.model.ModelLoader} for loading urls using cronet.
+ *
+ * @param The type of data this loader will load.
+ */
+public final class ChromiumUrlLoader implements ModelLoader {
+ private final ChromiumRequestSerializer requestSerializer;
+ private final ByteBufferParser parser;
+
+ ChromiumUrlLoader(CronetRequestFactory requestFactory, ByteBufferParser parser) {
+ this(parser, requestFactory, null /*dataLogger*/);
+ }
+
+ ChromiumUrlLoader(
+ ByteBufferParser parser,
+ CronetRequestFactory requestFactory,
+ @Nullable DataLogger dataLogger) {
+ this.parser = parser;
+ requestSerializer = new ChromiumRequestSerializer(requestFactory, dataLogger);
+ }
+
+ @Override
+ public LoadData buildLoadData(GlideUrl glideUrl, int width, int height, Options options) {
+ DataFetcher fetcher = new ChromiumUrlFetcher<>(requestSerializer, parser, glideUrl);
+ return new LoadData<>(glideUrl, fetcher);
+ }
+
+ @Override
+ public boolean handles(GlideUrl glideUrl) {
+ return true;
+ }
+
+ /** Loads {@link InputStream}s for {@link GlideUrl}s using cronet. */
+ public static final class StreamFactory
+ implements ModelLoaderFactory, ByteBufferParser {
+
+ private CronetRequestFactory requestFactory;
+ @Nullable private final DataLogger dataLogger;
+
+ public StreamFactory(CronetRequestFactory requestFactory, @Nullable DataLogger dataLogger) {
+ this.requestFactory = requestFactory;
+ this.dataLogger = dataLogger;
+ }
+
+ @Override
+ public ModelLoader build(MultiModelLoaderFactory multiFactory) {
+ return new ChromiumUrlLoader<>(this /*parser*/, requestFactory, dataLogger);
+ }
+
+ @Override
+ public void teardown() {}
+
+ @Override
+ public InputStream parse(ByteBuffer byteBuffer) {
+ return ByteBufferUtil.toStream(byteBuffer);
+ }
+
+ @Override
+ public Class getDataClass() {
+ return InputStream.class;
+ }
+ }
+
+ /** Loads {@link ByteBuffer}s for {@link GlideUrl}s using cronet. */
+ public static final class ByteBufferFactory
+ implements ModelLoaderFactory, ByteBufferParser {
+
+ private CronetRequestFactory requestFactory;
+ @Nullable private final DataLogger dataLogger;
+
+ public ByteBufferFactory(CronetRequestFactory requestFactory, @Nullable DataLogger dataLogger) {
+ this.requestFactory = requestFactory;
+ this.dataLogger = dataLogger;
+ }
+
+ @Override
+ public ModelLoader build(MultiModelLoaderFactory multiFactory) {
+ return new ChromiumUrlLoader<>(this /*parser*/, requestFactory, dataLogger);
+ }
+
+ @Override
+ public void teardown() {
+ // Do nothing.
+ }
+
+ @Override
+ public ByteBuffer parse(ByteBuffer byteBuffer) {
+ return byteBuffer;
+ }
+
+ @Override
+ public Class getDataClass() {
+ return ByteBuffer.class;
+ }
+ }
+}
diff --git a/integration/cronet/src/main/java/com/bumptech/glide/integration/cronet/CronetRequestFactory.java b/integration/cronet/src/main/java/com/bumptech/glide/integration/cronet/CronetRequestFactory.java
new file mode 100644
index 0000000000..b4d2f227f9
--- /dev/null
+++ b/integration/cronet/src/main/java/com/bumptech/glide/integration/cronet/CronetRequestFactory.java
@@ -0,0 +1,11 @@
+package com.bumptech.glide.integration.cronet;
+
+import java.util.Map;
+import org.chromium.net.UrlRequest;
+
+/** Factory to build custom cronet requests. */
+public interface CronetRequestFactory {
+
+ UrlRequest.Builder newRequest(
+ String url, int requestPriority, Map headers, UrlRequest.Callback listener);
+}
diff --git a/integration/cronet/src/main/java/com/bumptech/glide/integration/cronet/CronetRequestFactoryImpl.java b/integration/cronet/src/main/java/com/bumptech/glide/integration/cronet/CronetRequestFactoryImpl.java
new file mode 100644
index 0000000000..a2bebc2c0c
--- /dev/null
+++ b/integration/cronet/src/main/java/com/bumptech/glide/integration/cronet/CronetRequestFactoryImpl.java
@@ -0,0 +1,44 @@
+package com.bumptech.glide.integration.cronet;
+
+import com.google.common.base.Supplier;
+import java.util.Map;
+import java.util.concurrent.Executor;
+import org.chromium.net.CronetEngine;
+import org.chromium.net.UrlRequest;
+
+/** Default implementation for building cronet requests. */
+public final class CronetRequestFactoryImpl implements CronetRequestFactory {
+
+ private final Supplier cronetEngineGetter;
+
+ public CronetRequestFactoryImpl(Supplier cronetEngineGetter) {
+ this.cronetEngineGetter = cronetEngineGetter;
+ }
+
+ @Override
+ public UrlRequest.Builder newRequest(
+ String url, int requestPriority, Map headers, UrlRequest.Callback listener) {
+ CronetEngine engine = cronetEngineGetter.get();
+ UrlRequest.Builder builder =
+ engine.newUrlRequestBuilder(
+ url,
+ listener,
+ new Executor() {
+ @Override
+ public void execute(Runnable runnable) {
+ runnable.run();
+ }
+ });
+ builder.allowDirectExecutor();
+ builder.setPriority(requestPriority);
+ for (Map.Entry header : headers.entrySet()) {
+ // Cronet owns the Accept-Encoding header and user agent
+ String key = header.getKey();
+ if ("Accept-Encoding".equalsIgnoreCase(key) || "User-Agent".equalsIgnoreCase(key)) {
+ continue;
+ }
+ builder.addHeader(key, header.getValue());
+ }
+ return builder;
+ }
+}
diff --git a/integration/cronet/src/main/java/com/bumptech/glide/integration/cronet/DataLogger.java b/integration/cronet/src/main/java/com/bumptech/glide/integration/cronet/DataLogger.java
new file mode 100644
index 0000000000..6af58b78ad
--- /dev/null
+++ b/integration/cronet/src/main/java/com/bumptech/glide/integration/cronet/DataLogger.java
@@ -0,0 +1,23 @@
+package com.bumptech.glide.integration.cronet;
+
+import androidx.annotation.Nullable;
+import org.chromium.net.UrlResponseInfo;
+
+/** A interface for logging data information related to loading the data. */
+public interface DataLogger {
+
+ /**
+ * Logs the related network information.
+ *
+ * @param httpUrlRequest HttpUrlRequest that contains information on the request. May be {@code
+ * null} if the request was cancelled.
+ * @param startTimeMs Timestamp (ms) that the request started.
+ * @param responseStartTimeMs Timestamp (ms) when the first header byte was received.
+ * @param endTimeMs Timestamp (ms) that the request ended.
+ */
+ void logNetworkData(
+ @Nullable UrlResponseInfo httpUrlRequest,
+ long startTimeMs,
+ long responseStartTimeMs,
+ long endTimeMs);
+}
diff --git a/integration/cronet/src/test/AndroidManifest.xml b/integration/cronet/src/test/AndroidManifest.xml
new file mode 100644
index 0000000000..0f9d016ef7
--- /dev/null
+++ b/integration/cronet/src/test/AndroidManifest.xml
@@ -0,0 +1,7 @@
+
+
+
+
+
+
diff --git a/integration/cronet/src/test/java/com/bumptech/glide/integration/cronet/ChromiumUrlFetcherTest.java b/integration/cronet/src/test/java/com/bumptech/glide/integration/cronet/ChromiumUrlFetcherTest.java
new file mode 100644
index 0000000000..090f8d6a85
--- /dev/null
+++ b/integration/cronet/src/test/java/com/bumptech/glide/integration/cronet/ChromiumUrlFetcherTest.java
@@ -0,0 +1,328 @@
+package com.bumptech.glide.integration.cronet;
+
+import static com.google.common.truth.Truth.assertThat;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.anyInt;
+import static org.mockito.ArgumentMatchers.anyMap;
+import static org.mockito.ArgumentMatchers.anyString;
+import static org.mockito.ArgumentMatchers.isA;
+import static org.mockito.Matchers.eq;
+import static org.mockito.Matchers.isNull;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.timeout;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import androidx.annotation.NonNull;
+import com.bumptech.glide.Priority;
+import com.bumptech.glide.load.HttpException;
+import com.bumptech.glide.load.data.DataFetcher.DataCallback;
+import com.bumptech.glide.load.model.GlideUrl;
+import com.bumptech.glide.load.model.LazyHeaders;
+import com.bumptech.glide.load.model.LazyHeaders.Builder;
+import com.google.common.collect.ImmutableList;
+import com.google.common.util.concurrent.MoreExecutors;
+import java.net.HttpURLConnection;
+import java.nio.ByteBuffer;
+import java.util.AbstractMap.SimpleImmutableEntry;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.concurrent.Executor;
+import org.chromium.net.CronetEngine;
+import org.chromium.net.CronetException;
+import org.chromium.net.UrlRequest;
+import org.chromium.net.UrlRequest.Callback;
+import org.chromium.net.UrlResponseInfo;
+import org.chromium.net.impl.UrlResponseInfoImpl;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.ArgumentCaptor;
+import org.mockito.Matchers;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+import org.mockito.invocation.InvocationOnMock;
+import org.mockito.stubbing.Answer;
+import org.robolectric.RobolectricTestRunner;
+
+/** Tests for {@link ChromiumUrlFetcher}. */
+@RunWith(RobolectricTestRunner.class)
+public class ChromiumUrlFetcherTest {
+ @Mock private DataCallback callback;
+ @Mock private CronetEngine cronetEngine;
+ @Mock private UrlRequest request;
+ @Mock private UrlRequest.Builder mockUrlRequestBuilder;
+ @Mock private ByteBufferParser parser;
+ @Mock private CronetRequestFactory cronetRequestFactory;
+
+ private UrlRequest.Builder builder;
+ private GlideUrl glideUrl;
+ private ChromiumUrlFetcher fetcher;
+ private ChromiumRequestSerializer serializer;
+ private ArgumentCaptor urlRequestListenerCaptor;
+
+ @Before
+ public void setUp() {
+ MockitoAnnotations.initMocks(this);
+ when(parser.getDataClass()).thenReturn(ByteBuffer.class);
+ when(parser.parse(any(ByteBuffer.class)))
+ .thenAnswer(
+ new Answer() {
+ @Override
+ public ByteBuffer answer(InvocationOnMock invocation) throws Throwable {
+ return (ByteBuffer) invocation.getArguments()[0];
+ }
+ });
+ when(cronetEngine.newUrlRequestBuilder(
+ anyString(), any(UrlRequest.Callback.class), any(Executor.class)))
+ .thenReturn(mockUrlRequestBuilder);
+ when(mockUrlRequestBuilder.build()).thenReturn(request);
+
+ glideUrl = new GlideUrl("http://www.google.com");
+
+ urlRequestListenerCaptor = ArgumentCaptor.forClass(UrlRequest.Callback.class);
+ serializer = new ChromiumRequestSerializer(cronetRequestFactory, null /*dataLogger*/);
+ fetcher = new ChromiumUrlFetcher<>(serializer, parser, glideUrl);
+ builder =
+ cronetEngine.newUrlRequestBuilder(
+ glideUrl.toStringUrl(),
+ mock(UrlRequest.Callback.class),
+ MoreExecutors.directExecutor());
+ when(cronetRequestFactory.newRequest(
+ anyString(), anyInt(), anyHeaders(), urlRequestListenerCaptor.capture()))
+ .thenReturn(builder);
+ when(builder.build()).thenReturn(request);
+ }
+
+ @Test
+ public void testLoadData_createsAndStartsRequest() {
+ when(cronetRequestFactory.newRequest(
+ eq(glideUrl.toStringUrl()),
+ eq(UrlRequest.Builder.REQUEST_PRIORITY_LOWEST),
+ anyHeaders(),
+ any(UrlRequest.Callback.class)))
+ .thenReturn(builder);
+
+ fetcher.loadData(Priority.LOW, callback);
+
+ verify(request).start();
+ }
+
+ @Test
+ public void testLoadData_providesHeadersFromGlideUrl() {
+ LazyHeaders.Builder headersBuilder = new Builder();
+ headersBuilder.addHeader("key", "value");
+ LazyHeaders headers = headersBuilder.build();
+
+ glideUrl = new GlideUrl("http://www.google.com", headers);
+ fetcher = new ChromiumUrlFetcher<>(serializer, parser, glideUrl);
+ fetcher.loadData(Priority.LOW, callback);
+
+ verify(cronetRequestFactory)
+ .newRequest(
+ Matchers.eq(glideUrl.toStringUrl()),
+ anyInt(),
+ Matchers.eq(headers.getHeaders()),
+ any(UrlRequest.Callback.class));
+
+ verify(request).start();
+ }
+
+ @Test
+ public void testLoadData_withInProgressRequest_doesNotStartNewRequest() {
+ ChromiumUrlFetcher firstFetcher =
+ new ChromiumUrlFetcher<>(serializer, parser, glideUrl);
+ ChromiumUrlFetcher secondFetcher =
+ new ChromiumUrlFetcher<>(serializer, parser, glideUrl);
+
+ firstFetcher.loadData(Priority.LOW, callback);
+ secondFetcher.loadData(Priority.HIGH, callback);
+
+ verify(cronetRequestFactory, times(1))
+ .newRequest(
+ Matchers.eq(glideUrl.toStringUrl()),
+ anyInt(),
+ anyMap(),
+ any(UrlRequest.Callback.class));
+ }
+
+ @Test
+ public void testLoadData_withInProgressRequest_isNotifiedWhenRequestCompletes() throws Exception {
+ ChromiumUrlFetcher firstFetcher =
+ new ChromiumUrlFetcher<>(serializer, parser, glideUrl);
+ ChromiumUrlFetcher secondFetcher =
+ new ChromiumUrlFetcher<>(serializer, parser, glideUrl);
+
+ DataCallback firstCb = mock(DataCallback.class);
+ DataCallback secondCb = mock(DataCallback.class);
+ firstFetcher.loadData(Priority.LOW, firstCb);
+ secondFetcher.loadData(Priority.HIGH, secondCb);
+
+ succeed(getInfo(10, 200), urlRequestListenerCaptor.getValue(), ByteBuffer.allocateDirect(10));
+
+ verify(firstCb, timeout(1000)).onDataReady(isA(ByteBuffer.class));
+ verify(secondCb, timeout(1000)).onDataReady(isA(ByteBuffer.class));
+ }
+
+ @NonNull
+ private UrlResponseInfo getInfo(int contentLength, int statusCode) {
+ return new UrlResponseInfoImpl(
+ ImmutableList.of(glideUrl.toStringUrl()),
+ statusCode,
+ "OK",
+ ImmutableList.>of(
+ new SimpleImmutableEntry<>("Content-Length", Integer.toString(contentLength))),
+ false,
+ "",
+ "");
+ }
+
+ @Test
+ public void testCancel_withMultipleInProgressRequests_doesNotCancelChromiumRequest() {
+ ChromiumUrlFetcher firstFetcher =
+ new ChromiumUrlFetcher<>(serializer, parser, glideUrl);
+ ChromiumUrlFetcher secondFetcher =
+ new ChromiumUrlFetcher<>(serializer, parser, glideUrl);
+
+ firstFetcher.loadData(Priority.LOW, callback);
+ secondFetcher.loadData(Priority.HIGH, callback);
+
+ firstFetcher.cancel();
+
+ verify(request, never()).cancel();
+ }
+
+ @Test
+ public void testCancel_afterCancellingAllInProgressRequests_cancelsChromiumRequest() {
+ ChromiumUrlFetcher firstFetcher =
+ new ChromiumUrlFetcher<>(serializer, parser, glideUrl);
+ ChromiumUrlFetcher secondFetcher =
+ new ChromiumUrlFetcher<>(serializer, parser, glideUrl);
+
+ firstFetcher.loadData(Priority.LOW, callback);
+ secondFetcher.loadData(Priority.HIGH, callback);
+
+ firstFetcher.cancel();
+ secondFetcher.cancel();
+
+ verify(request).cancel();
+ }
+
+ @Test
+ public void testCancel_withNoStartedRequest_doesNothing() {
+ fetcher.cancel();
+ }
+
+ @Test
+ public void testCancel_withStartedRequest_cancelsRequest() {
+ fetcher.loadData(Priority.LOW, callback);
+
+ fetcher.cancel();
+
+ verify(request).cancel();
+ }
+
+ @Test
+ public void testRequestComplete_withNonNullException_callsCallbackWithException() {
+ CronetException expected = new CronetException("test", /*cause=*/ null) {};
+ fetcher.loadData(Priority.LOW, callback);
+ urlRequestListenerCaptor.getValue().onFailed(request, null, expected);
+
+ verify(callback, timeout(1000)).onLoadFailed(eq(expected));
+ }
+
+ @Test
+ public void testRequestComplete_withNon200StatusCode_callsCallbackWithException()
+ throws Exception {
+ UrlResponseInfo info = getInfo(0, HttpURLConnection.HTTP_INTERNAL_ERROR);
+ fetcher.loadData(Priority.LOW, callback);
+ UrlRequest.Callback urlCallback = urlRequestListenerCaptor.getValue();
+ succeed(info, urlCallback, ByteBuffer.allocateDirect(0));
+ ArgumentCaptor captor = ArgumentCaptor.forClass(HttpException.class);
+ verify(callback, timeout(1000)).onLoadFailed(captor.capture());
+ assertThat(captor.getValue())
+ .hasMessageThat()
+ .isEqualTo("Http request failed with status code: 500");
+ }
+
+ private void succeed(UrlResponseInfo info, Callback urlCallback, ByteBuffer byteBuffer)
+ throws Exception {
+ byteBuffer.position(byteBuffer.limit());
+ urlCallback.onResponseStarted(request, info);
+ urlCallback.onReadCompleted(request, info, byteBuffer);
+ urlCallback.onSucceeded(request, info);
+ }
+
+ @Test
+ public void testRequestComplete_withUnauthorizedStatusCode_callsCallbackWithAuthError()
+ throws Exception {
+ UrlResponseInfo info = getInfo(0, HttpURLConnection.HTTP_FORBIDDEN);
+ fetcher.loadData(Priority.LOW, callback);
+ UrlRequest.Callback urlCallback = urlRequestListenerCaptor.getValue();
+ succeed(info, urlCallback, ByteBuffer.allocateDirect(0));
+
+ verifyAuthError();
+ }
+
+ @Test
+ public void testRequestComplete_whenCancelledAndUnauthorized_callsCallbackWithNullError()
+ throws Exception {
+ UrlResponseInfo info = getInfo(0, HttpURLConnection.HTTP_FORBIDDEN);
+ fetcher.loadData(Priority.HIGH, callback);
+ Callback urlCallback = urlRequestListenerCaptor.getValue();
+ urlCallback.onResponseStarted(request, info);
+ urlCallback.onCanceled(request, info);
+
+ verify(callback, timeout(1000)).onLoadFailed(isNull(Exception.class));
+ }
+
+ private void verifyAuthError() {
+ ArgumentCaptor exceptionArgumentCaptor = ArgumentCaptor.forClass(Exception.class);
+ verify(callback, timeout(1000)).onLoadFailed(exceptionArgumentCaptor.capture());
+ HttpException exception = (HttpException) exceptionArgumentCaptor.getValue();
+ assertThat(exception.getStatusCode()).isEqualTo(HttpURLConnection.HTTP_FORBIDDEN);
+ }
+
+ @Test
+ public void testRequestComplete_with200AndCancelled_callsCallbackWithNullException()
+ throws Exception {
+ UrlResponseInfo info = getInfo(0, 200);
+ fetcher.loadData(Priority.LOW, callback);
+ Callback urlCallback = urlRequestListenerCaptor.getValue();
+ urlCallback.onResponseStarted(request, info);
+ urlCallback.onCanceled(request, info);
+
+ verify(callback, timeout(1000)).onLoadFailed(isNull(Exception.class));
+ }
+
+ @Test
+ public void testRequestComplete_with200NotCancelledMatchingLength_callsCallbackWithValidData()
+ throws Exception {
+ String data = "data";
+ ByteBuffer expected = ByteBuffer.wrap(data.getBytes());
+ ArgumentCaptor captor = ArgumentCaptor.forClass(ByteBuffer.class);
+
+ fetcher.loadData(Priority.LOW, callback);
+ succeed(
+ getInfo(expected.remaining(), 200),
+ urlRequestListenerCaptor.getValue(),
+ expected.duplicate());
+
+ verify(callback, timeout(1000)).onDataReady(captor.capture());
+
+ ByteBuffer received = captor.getValue();
+
+ assertThat(
+ new String(
+ received.array(),
+ received.arrayOffset() + received.position(),
+ received.remaining()))
+ .isEqualTo(data);
+ }
+
+ private static Map anyHeaders() {
+ return anyMap();
+ }
+}