Migrate components/update_client/url_fetcher_downloader.cc

URLFetcher will stop working with advent of Network Service, and
SimpleURLLoader is the replacement API for most clients.
This CL migrates UrlFetcherDownloader away from URLFetcher.

Note that UrlFetcherDownloader makes use of URLFetcher's "progress
update" hook, in order to track fetch progresses. It turns out the
progress values were not being used in production at all:

UrlFetcherDownloader::OnURLFetchDownloadProgress used to instantiate
a CrxDownloader::Result object, and pass it all the way to
Component::StateDownloadingDiff::DownloadComplete and
Component::StateDownloading::DownloadComplete. There, |result|
parameter was being ignored in both methods.

Given SimpleURLLoader's lack of ability to hook to a progress update
callback at the time of doing this migration, and that the progress values
were not being used in production code anyways, this CL replaces
URLFetcher's progress upload hook by SimpleURLLoader's "response started"
hook, to indicate that the load has been triggered, and update
Component's status accordingly.

Hence, references to |CrxDownloader::Result::downloaded_bytes| and
|CrxDownloader::Result::total_bytes| were removed from CrxDownloaderTest.*
(components/update_client/crx_downloader_unittest.cc).

On the other hand, references to CrxDownloader::DownloadMetrics::downloaded_bytes
and |CrxDownloader::DownloadMetrics::total_bytes| remain unchanged
(update_client::BuildDownloadCompleteEventElement uses them).

Bug:844972,871211

Cq-Include-Trybots: luci.chromium.try:linux_mojo
Change-Id: I50410b46576263aec9a9a4b648a8a627f4832702
Reviewed-on: https://chromium-review.googlesource.com/1160725
Reviewed-by: Matt Menke <[email protected]>
Reviewed-by: Joshua Pawlicki <[email protected]>
Commit-Queue: Antonio Gomes <[email protected]>
Cr-Commit-Position: refs/heads/master@{#582150}
diff --git a/components/update_client/crx_downloader_unittest.cc b/components/update_client/crx_downloader_unittest.cc
index cb5c808..cf195323 100644
--- a/components/update_client/crx_downloader_unittest.cc
+++ b/components/update_client/crx_downloader_unittest.cc
@@ -13,14 +13,15 @@
 #include "base/memory/ref_counted.h"
 #include "base/path_service.h"
 #include "base/run_loop.h"
+#include "base/test/bind_test_util.h"
 #include "base/test/scoped_task_environment.h"
 #include "base/threading/thread_task_runner_handle.h"
 #include "build/build_config.h"
 #include "components/update_client/update_client_errors.h"
 #include "components/update_client/utils.h"
 #include "net/base/net_errors.h"
-#include "net/url_request/test_url_request_interceptor.h"
-#include "net/url_request/url_request_test_util.h"
+#include "services/network/public/cpp/weak_wrapper_shared_url_loader_factory.h"
+#include "services/network/test/test_url_loader_factory.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 using base::ContentsEqual;
@@ -29,9 +30,6 @@
 
 namespace {
 
-// Intercepts HTTP GET requests sent to "localhost".
-typedef net::LocalHostTestURLRequestInterceptor GetInterceptor;
-
 const char kTestFileName[] = "jebgalgnebhfojomionfpkfelancnnkf.crx";
 
 const char hash_jebg[] =
@@ -63,10 +61,16 @@
 
   void DownloadProgress(int crx_context, const CrxDownloader::Result& result);
 
+  int GetInterceptorCount() { return interceptor_count_; }
+
+  void AddResponse(const GURL& url,
+                   const base::FilePath& file_path,
+                   int net_error);
+
  protected:
   std::unique_ptr<CrxDownloader> crx_downloader_;
 
-  std::unique_ptr<GetInterceptor> get_interceptor_;
+  network::TestURLLoaderFactory test_url_loader_factory_;
 
   CrxDownloader::DownloadCallback callback_;
   CrxDownloader::ProgressCallback progress_callback_;
@@ -80,12 +84,16 @@
   int num_progress_calls_;
   CrxDownloader::Result download_progress_result_;
 
+  // Accumulates the number of loads triggered.
+  int interceptor_count_ = 0;
+
   // A magic value for the context to be used in the tests.
   static const int kExpectedContext = 0xaabb;
 
  private:
   base::test::ScopedTaskEnvironment scoped_task_environment_;
-  scoped_refptr<net::TestURLRequestContextGetter> context_;
+  scoped_refptr<network::SharedURLLoaderFactory>
+      test_shared_url_loader_factory_;
   base::OnceClosure quit_closure_;
 };
 
@@ -103,16 +111,11 @@
       num_progress_calls_(0),
       scoped_task_environment_(
           base::test::ScopedTaskEnvironment::MainThreadType::IO),
-      context_(base::MakeRefCounted<net::TestURLRequestContextGetter>(
-          base::ThreadTaskRunnerHandle::Get())) {}
+      test_shared_url_loader_factory_(
+          base::MakeRefCounted<network::WeakWrapperSharedURLLoaderFactory>(
+              &test_url_loader_factory_)) {}
 
-CrxDownloaderTest::~CrxDownloaderTest() {
-  context_ = nullptr;
-
-  // The GetInterceptor requires the message loop to run to destruct correctly.
-  get_interceptor_.reset();
-  RunThreadsUntilIdle();
-}
+CrxDownloaderTest::~CrxDownloaderTest() {}
 
 void CrxDownloaderTest::SetUp() {
   num_download_complete_calls_ = 0;
@@ -121,11 +124,12 @@
   download_progress_result_ = CrxDownloader::Result();
 
   // Do not use the background downloader in these tests.
-  crx_downloader_ = CrxDownloader::Create(false, context_.get());
+  crx_downloader_ =
+      CrxDownloader::Create(false, test_shared_url_loader_factory_);
   crx_downloader_->set_progress_callback(progress_callback_);
 
-  get_interceptor_ = std::make_unique<GetInterceptor>(
-      base::ThreadTaskRunnerHandle::Get(), base::ThreadTaskRunnerHandle::Get());
+  test_url_loader_factory_.SetInterceptor(base::BindLambdaForTesting(
+      [&](const network::ResourceRequest& request) { interceptor_count_++; }));
 }
 
 void CrxDownloaderTest::TearDown() {
@@ -151,6 +155,26 @@
   download_progress_result_ = result;
 }
 
+void CrxDownloaderTest::AddResponse(const GURL& url,
+                                    const base::FilePath& file_path,
+                                    int net_error) {
+  if (net_error == net::OK) {
+    std::string data;
+    EXPECT_TRUE(base::ReadFileToString(file_path, &data));
+    network::ResourceResponseHead head;
+    head.content_length = data.size();
+    network::URLLoaderCompletionStatus status(net_error);
+    status.decoded_body_length = data.size();
+    test_url_loader_factory_.AddResponse(url, head, data, status);
+    return;
+  }
+
+  EXPECT_NE(net_error, net::OK);
+  test_url_loader_factory_.AddResponse(
+      url, network::ResourceResponseHead(), std::string(),
+      network::URLLoaderCompletionStatus(net_error));
+}
+
 void CrxDownloaderTest::RunThreads() {
   base::RunLoop runloop;
   quit_closure_ = runloop.QuitClosure();
@@ -208,27 +232,23 @@
       GURL("http://localhost/download/jebgalgnebhfojomionfpkfelancnnkf.crx");
 
   const base::FilePath test_file(MakeTestFilePath(kTestFileName));
-  get_interceptor_->SetResponse(expected_crx_url, test_file);
+  AddResponse(expected_crx_url, test_file, net::OK);
 
   crx_downloader_->StartDownloadFromUrl(
       expected_crx_url, std::string(hash_jebg), std::move(callback_));
   RunThreads();
 
-  EXPECT_EQ(1, get_interceptor_->GetHitCount());
+  EXPECT_EQ(1, GetInterceptorCount());
 
   EXPECT_EQ(1, num_download_complete_calls_);
   EXPECT_EQ(kExpectedContext, crx_context_);
   EXPECT_EQ(0, download_complete_result_.error);
-  EXPECT_EQ(1843, download_complete_result_.downloaded_bytes);
-  EXPECT_EQ(1843, download_complete_result_.total_bytes);
   EXPECT_TRUE(ContentsEqual(download_complete_result_.response, test_file));
 
   EXPECT_TRUE(
       DeleteFileAndEmptyParentDirectory(download_complete_result_.response));
 
   EXPECT_LE(1, num_progress_calls_);
-  EXPECT_EQ(1843, download_progress_result_.downloaded_bytes);
-  EXPECT_EQ(1843, download_progress_result_.total_bytes);
 }
 
 // Tests that downloading from one url fails if the actual hash of the file
@@ -238,7 +258,7 @@
       GURL("http://localhost/download/jebgalgnebhfojomionfpkfelancnnkf.crx");
 
   const base::FilePath test_file(MakeTestFilePath(kTestFileName));
-  get_interceptor_->SetResponse(expected_crx_url, test_file);
+  AddResponse(expected_crx_url, test_file, net::OK);
 
   crx_downloader_->StartDownloadFromUrl(
       expected_crx_url,
@@ -247,19 +267,15 @@
       std::move(callback_));
   RunThreads();
 
-  EXPECT_EQ(1, get_interceptor_->GetHitCount());
+  EXPECT_EQ(1, GetInterceptorCount());
 
   EXPECT_EQ(1, num_download_complete_calls_);
   EXPECT_EQ(kExpectedContext, crx_context_);
   EXPECT_EQ(static_cast<int>(CrxDownloaderError::BAD_HASH),
             download_complete_result_.error);
-  EXPECT_EQ(1843, download_complete_result_.downloaded_bytes);
-  EXPECT_EQ(1843, download_complete_result_.total_bytes);
   EXPECT_TRUE(download_complete_result_.response.empty());
 
   EXPECT_LE(1, num_progress_calls_);
-  EXPECT_EQ(1843, download_progress_result_.downloaded_bytes);
-  EXPECT_EQ(1843, download_progress_result_.total_bytes);
 }
 
 // Tests that specifying two urls has no side effects. Expect a successful
@@ -269,7 +285,7 @@
       GURL("http://localhost/download/jebgalgnebhfojomionfpkfelancnnkf.crx");
 
   const base::FilePath test_file(MakeTestFilePath(kTestFileName));
-  get_interceptor_->SetResponse(expected_crx_url, test_file);
+  AddResponse(expected_crx_url, test_file, net::OK);
 
   std::vector<GURL> urls;
   urls.push_back(expected_crx_url);
@@ -279,21 +295,17 @@
                                  std::move(callback_));
   RunThreads();
 
-  EXPECT_EQ(1, get_interceptor_->GetHitCount());
+  EXPECT_EQ(1, GetInterceptorCount());
 
   EXPECT_EQ(1, num_download_complete_calls_);
   EXPECT_EQ(kExpectedContext, crx_context_);
   EXPECT_EQ(0, download_complete_result_.error);
-  EXPECT_EQ(1843, download_complete_result_.downloaded_bytes);
-  EXPECT_EQ(1843, download_complete_result_.total_bytes);
   EXPECT_TRUE(ContentsEqual(download_complete_result_.response, test_file));
 
   EXPECT_TRUE(
       DeleteFileAndEmptyParentDirectory(download_complete_result_.response));
 
   EXPECT_LE(1, num_progress_calls_);
-  EXPECT_EQ(1843, download_progress_result_.downloaded_bytes);
-  EXPECT_EQ(1843, download_progress_result_.total_bytes);
 }
 
 // Tests that the fallback to a valid url is successful.
@@ -304,9 +316,8 @@
       GURL("http://localhost/download/ihfokbkgjpifnbbojhneepfflplebdkc.crx");
 
   const base::FilePath test_file(MakeTestFilePath(kTestFileName));
-  get_interceptor_->SetResponse(expected_crx_url, test_file);
-  get_interceptor_->SetResponse(no_file_url,
-                                base::FilePath(FILE_PATH_LITERAL("no-file")));
+  AddResponse(expected_crx_url, test_file, net::OK);
+  AddResponse(no_file_url, base::FilePath(), net::ERR_FILE_NOT_FOUND);
 
   std::vector<GURL> urls;
   urls.push_back(no_file_url);
@@ -316,22 +327,18 @@
                                  std::move(callback_));
   RunThreads();
 
-  EXPECT_EQ(2, get_interceptor_->GetHitCount());
+  EXPECT_EQ(2, GetInterceptorCount());
 
   EXPECT_EQ(1, num_download_complete_calls_);
   EXPECT_EQ(kExpectedContext, crx_context_);
   EXPECT_EQ(0, download_complete_result_.error);
-  EXPECT_EQ(1843, download_complete_result_.downloaded_bytes);
-  EXPECT_EQ(1843, download_complete_result_.total_bytes);
   EXPECT_TRUE(ContentsEqual(download_complete_result_.response, test_file));
 
   EXPECT_TRUE(
       DeleteFileAndEmptyParentDirectory(download_complete_result_.response));
 
-  // Expect at least some progress reported by the fetcher.
+  // Expect at least some progress reported by the loader.
   EXPECT_LE(1, num_progress_calls_);
-  EXPECT_EQ(1843, download_progress_result_.downloaded_bytes);
-  EXPECT_EQ(1843, download_progress_result_.total_bytes);
 
   const auto download_metrics = crx_downloader_->download_metrics();
   ASSERT_EQ(2u, download_metrics.size());
@@ -354,9 +361,8 @@
       GURL("http://localhost/download/ihfokbkgjpifnbbojhneepfflplebdkc.crx");
 
   const base::FilePath test_file(MakeTestFilePath(kTestFileName));
-  get_interceptor_->SetResponse(expected_crx_url, test_file);
-  get_interceptor_->SetResponse(no_file_url,
-                                base::FilePath(FILE_PATH_LITERAL("no-file")));
+  AddResponse(expected_crx_url, test_file, net::OK);
+  AddResponse(no_file_url, base::FilePath(), net::ERR_FILE_NOT_FOUND);
 
   std::vector<GURL> urls;
   urls.push_back(expected_crx_url);
@@ -366,21 +372,17 @@
                                  std::move(callback_));
   RunThreads();
 
-  EXPECT_EQ(1, get_interceptor_->GetHitCount());
+  EXPECT_EQ(1, GetInterceptorCount());
 
   EXPECT_EQ(1, num_download_complete_calls_);
   EXPECT_EQ(kExpectedContext, crx_context_);
   EXPECT_EQ(0, download_complete_result_.error);
-  EXPECT_EQ(1843, download_complete_result_.downloaded_bytes);
-  EXPECT_EQ(1843, download_complete_result_.total_bytes);
   EXPECT_TRUE(ContentsEqual(download_complete_result_.response, test_file));
 
   EXPECT_TRUE(
       DeleteFileAndEmptyParentDirectory(download_complete_result_.response));
 
   EXPECT_LE(1, num_progress_calls_);
-  EXPECT_EQ(1843, download_progress_result_.downloaded_bytes);
-  EXPECT_EQ(1843, download_progress_result_.total_bytes);
 
   EXPECT_EQ(1u, crx_downloader_->download_metrics().size());
 }
@@ -390,8 +392,7 @@
   const GURL expected_crx_url =
       GURL("http://localhost/download/jebgalgnebhfojomionfpkfelancnnkf.crx");
 
-  get_interceptor_->SetResponse(expected_crx_url,
-                                base::FilePath(FILE_PATH_LITERAL("no-file")));
+  AddResponse(expected_crx_url, base::FilePath(), net::ERR_FILE_NOT_FOUND);
 
   std::vector<GURL> urls;
   urls.push_back(expected_crx_url);
@@ -401,7 +402,7 @@
                                  std::move(callback_));
   RunThreads();
 
-  EXPECT_EQ(2, get_interceptor_->GetHitCount());
+  EXPECT_EQ(2, GetInterceptorCount());
 
   EXPECT_EQ(1, num_download_complete_calls_);
   EXPECT_EQ(kExpectedContext, crx_context_);