Replace wstring with string16 in history. The only time wstring is used now in history is for bookmark-related stuff (the bookmarks system is still wstring-based).

The substantial change here is in RTL to make a string16 variant of the functions and in changing the WordIterator to use string16 (this cleaned up some weird utf-32 code).

TEST=none
BUG=none
Review URL: http://codereview.chromium.org/2808017

git-svn-id: svn://svn.chromium.org/chrome/trunk/src@50597 0039d316-1c4b-4281-b951-d872f2087c98
diff --git a/chrome/browser/autocomplete/autocomplete.cc b/chrome/browser/autocomplete/autocomplete.cc
index cdaa56f..1846e2d1 100644
--- a/chrome/browser/autocomplete/autocomplete.cc
+++ b/chrome/browser/autocomplete/autocomplete.cc
@@ -1010,7 +1010,7 @@
                               ACMatchClassification::NONE));
   }
   match.destination_url =
-      HistoryUI::GetHistoryURLWithSearchText(input_.text());
+      HistoryUI::GetHistoryURLWithSearchText(WideToUTF16(input_.text()));
   match.transition = PageTransition::AUTO_BOOKMARK;
   match.provider = history_contents_provider_;
   latest_result_.AddMatch(match);
diff --git a/chrome/browser/autocomplete/autocomplete_edit_view_browsertest.cc b/chrome/browser/autocomplete/autocomplete_edit_view_browsertest.cc
index 951a9b0..35dbff5 100644
--- a/chrome/browser/autocomplete/autocomplete_edit_view_browsertest.cc
+++ b/chrome/browser/autocomplete/autocomplete_edit_view_browsertest.cc
@@ -41,18 +41,18 @@
 
 namespace {
 
-const wchar_t kSearchKeyword[] = L"foo";
+const char kSearchKeyword[] = "foo";
 const wchar_t kSearchKeywordKeys[] = {
   base::VKEY_F, base::VKEY_O, base::VKEY_O, 0
 };
-const wchar_t kSearchURL[] = L"http://www.foo.com/search?q={searchTerms}";
-const wchar_t kSearchShortName[] = L"foo";
-const wchar_t kSearchText[] = L"abc";
+const char kSearchURL[] = "http://www.foo.com/search?q={searchTerms}";
+const char kSearchShortName[] = "foo";
+const char kSearchText[] = "abc";
 const wchar_t kSearchTextKeys[] = {
   base::VKEY_A, base::VKEY_B, base::VKEY_C, 0
 };
 const char kSearchTextURL[] = "http://www.foo.com/search?q=abc";
-const wchar_t kSearchSingleChar[] = L"z";
+const char kSearchSingleChar[] = "z";
 const wchar_t kSearchSingleCharKeys[] = { base::VKEY_Z, 0 };
 const char kSearchSingleCharURL[] = "http://www.foo.com/search?q=z";
 
@@ -77,24 +77,24 @@
 
 const struct TestHistoryEntry {
   const char* url;
-  const wchar_t* title;
-  const wchar_t* body;
+  const char* title;
+  const char* body;
   int visit_count;
   int typed_count;
   bool starred;
 } kHistoryEntries[] = {
-  {"http://www.bar.com/1", L"Page 1", kSearchText, 1, 1, false },
-  {"http://www.bar.com/2", L"Page 2", kSearchText, 1, 1, false },
-  {"http://www.bar.com/3", L"Page 3", kSearchText, 1, 1, false },
-  {"http://www.bar.com/4", L"Page 4", kSearchText, 1, 1, false },
-  {"http://www.bar.com/5", L"Page 5", kSearchText, 1, 1, false },
-  {"http://www.bar.com/6", L"Page 6", kSearchText, 1, 1, false },
-  {"http://www.bar.com/7", L"Page 7", kSearchText, 1, 1, false },
-  {"http://www.bar.com/8", L"Page 8", kSearchText, 1, 1, false },
-  {"http://www.bar.com/9", L"Page 9", kSearchText, 1, 1, false },
+  {"http://www.bar.com/1", "Page 1", kSearchText, 1, 1, false },
+  {"http://www.bar.com/2", "Page 2", kSearchText, 1, 1, false },
+  {"http://www.bar.com/3", "Page 3", kSearchText, 1, 1, false },
+  {"http://www.bar.com/4", "Page 4", kSearchText, 1, 1, false },
+  {"http://www.bar.com/5", "Page 5", kSearchText, 1, 1, false },
+  {"http://www.bar.com/6", "Page 6", kSearchText, 1, 1, false },
+  {"http://www.bar.com/7", "Page 7", kSearchText, 1, 1, false },
+  {"http://www.bar.com/8", "Page 8", kSearchText, 1, 1, false },
+  {"http://www.bar.com/9", "Page 9", kSearchText, 1, 1, false },
 
   // To trigger inline autocomplete.
-  {"http://www.abc.com", L"Page abc", kSearchText, 10000, 10000, true },
+  {"http://www.abc.com", "Page abc", kSearchText, 10000, 10000, true },
 };
 
 }  // namespace
@@ -192,9 +192,9 @@
     ASSERT_TRUE(model->loaded());
 
     TemplateURL* template_url = new TemplateURL();
-    template_url->SetURL(kSearchURL, 0, 0);
-    template_url->set_keyword(kSearchKeyword);
-    template_url->set_short_name(kSearchShortName);
+    template_url->SetURL(UTF8ToWide(kSearchURL), 0, 0);
+    template_url->set_keyword(UTF8ToWide(kSearchKeyword));
+    template_url->set_short_name(UTF8ToWide(kSearchShortName));
 
     model->Add(template_url);
     model->SetDefaultSearchProvider(template_url);
@@ -231,9 +231,10 @@
       // Add everything in order of time. We don't want to have a time that
       // is "right now" or it will nondeterministically appear in the results.
       Time t = Time::Now() - TimeDelta::FromHours(i + 1);
-      history_service->AddPageWithDetails(url, cur.title, cur.visit_count,
+      history_service->AddPageWithDetails(url, UTF8ToUTF16(cur.title),
+                                          cur.visit_count,
                                           cur.typed_count, t, false);
-      history_service->SetPageContents(url, WideToUTF16(cur.body));
+      history_service->SetPageContents(url, UTF8ToUTF16(cur.body));
       if (cur.starred) {
         bookmark_model->SetURLStarred(url, std::wstring(), true);
       }
@@ -330,22 +331,22 @@
   // Trigger keyword hint mode.
   ASSERT_NO_FATAL_FAILURE(SendKeySequence(kSearchKeywordKeys));
   ASSERT_TRUE(edit_view->model()->is_keyword_hint());
-  ASSERT_EQ(std::wstring(kSearchKeyword), edit_view->model()->keyword());
+  ASSERT_EQ(kSearchKeyword, WideToUTF8(edit_view->model()->keyword()));
 
   // Trigger keyword mode.
   ASSERT_NO_FATAL_FAILURE(SendKey(base::VKEY_TAB, false, false, false));
   ASSERT_FALSE(edit_view->model()->is_keyword_hint());
-  ASSERT_EQ(std::wstring(kSearchKeyword), edit_view->model()->keyword());
+  ASSERT_EQ(kSearchKeyword, WideToUTF8(edit_view->model()->keyword()));
 
   // Backspace without search text should bring back keyword hint mode.
   ASSERT_NO_FATAL_FAILURE(SendKey(base::VKEY_BACK, false, false, false));
   ASSERT_TRUE(edit_view->model()->is_keyword_hint());
-  ASSERT_EQ(std::wstring(kSearchKeyword), edit_view->model()->keyword());
+  ASSERT_EQ(kSearchKeyword, WideToUTF8(edit_view->model()->keyword()));
 
   // Trigger keyword mode again.
   ASSERT_NO_FATAL_FAILURE(SendKey(base::VKEY_TAB, false, false, false));
   ASSERT_FALSE(edit_view->model()->is_keyword_hint());
-  ASSERT_EQ(std::wstring(kSearchKeyword), edit_view->model()->keyword());
+  ASSERT_EQ(kSearchKeyword, WideToUTF8(edit_view->model()->keyword()));
 
   // Input something as search text.
   ASSERT_NO_FATAL_FAILURE(SendKeySequence(kSearchTextKeys));
@@ -355,7 +356,7 @@
   for (size_t i = 0; i < arraysize(kSearchText) - 1; ++i) {
     ASSERT_NO_FATAL_FAILURE(SendKey(base::VKEY_BACK, false, false, false));
     ASSERT_FALSE(edit_view->model()->is_keyword_hint());
-    ASSERT_EQ(std::wstring(kSearchKeyword), edit_view->model()->keyword());
+    ASSERT_EQ(kSearchKeyword, WideToUTF8(edit_view->model()->keyword()));
   }
 
   // Input something as search text.
@@ -367,9 +368,9 @@
   // the keyword mode.
   ASSERT_NO_FATAL_FAILURE(SendKey(base::VKEY_BACK, false, false, false));
   ASSERT_FALSE(edit_view->model()->is_keyword_hint());
-  ASSERT_EQ(std::wstring(), edit_view->model()->keyword());
-  ASSERT_EQ(std::wstring(kSearchKeyword) + std::wstring(kSearchText),
-            edit_view->GetText());
+  ASSERT_EQ(std::string(), WideToUTF8(edit_view->model()->keyword()));
+  ASSERT_EQ(std::string(kSearchKeyword) + kSearchText,
+            WideToUTF8(edit_view->GetText()));
 }
 
 IN_PROC_BROWSER_TEST_F(AutocompleteEditViewTest, Escape) {
@@ -459,7 +460,7 @@
   ASSERT_NO_FATAL_FAILURE(SendKeySequence(kSearchSingleCharKeys));
   ASSERT_NO_FATAL_FAILURE(WaitForAutocompleteControllerDone());
   ASSERT_TRUE(popup_model->IsOpen());
-  EXPECT_EQ(std::wstring(kSearchSingleChar), edit_view->GetText());
+  EXPECT_EQ(kSearchSingleChar, WideToUTF8(edit_view->GetText()));
 
   // Check if the default match result is Search Primary Provider.
   ASSERT_EQ(AutocompleteMatch::SEARCH_WHAT_YOU_TYPED,
diff --git a/chrome/browser/autocomplete/history_contents_provider.cc b/chrome/browser/autocomplete/history_contents_provider.cc
index c05fbdc..947f0be 100644
--- a/chrome/browser/autocomplete/history_contents_provider.cc
+++ b/chrome/browser/autocomplete/history_contents_provider.cc
@@ -120,7 +120,8 @@
       history::QueryOptions options;
       options.SetRecentDayRange(kDaysToSearch);
       options.max_count = kMaxMatchCount;
-      history->QueryHistory(input.text(), options, &request_consumer_,
+      history->QueryHistory(WideToUTF16(input.text()), options,
+          &request_consumer_,
           NewCallback(this, &HistoryContentsProvider::QueryComplete));
     }
   }
@@ -213,7 +214,7 @@
   match.destination_url = result.url();
   match.contents_class.push_back(
       ACMatchClassification(0, ACMatchClassification::URL));
-  match.description = result.title();
+  match.description = UTF16ToWide(result.title());
   match.starred =
       (profile_->GetBookmarkModel() &&
        profile_->GetBookmarkModel()->IsBookmarked(result.url()));
@@ -277,6 +278,6 @@
 void HistoryContentsProvider::AddBookmarkTitleMatchToResults(
     const bookmark_utils::TitleMatch& match) {
   history::URLResult url_result(match.node->GetURL(), match.match_positions);
-  url_result.set_title(match.node->GetTitle());
+  url_result.set_title(WideToUTF16(match.node->GetTitle()));
   results_.AppendURLBySwapping(&url_result);
 }
diff --git a/chrome/browser/autocomplete/history_contents_provider_unittest.cc b/chrome/browser/autocomplete/history_contents_provider_unittest.cc
index ff8fa09..2fd6e232 100644
--- a/chrome/browser/autocomplete/history_contents_provider_unittest.cc
+++ b/chrome/browser/autocomplete/history_contents_provider_unittest.cc
@@ -19,12 +19,12 @@
 
 struct TestEntry {
   const char* url;
-  const wchar_t* title;
+  const char* title;
   const char* body;
 } test_entries[] = {
-  {"http://www.google.com/1", L"PAGEONE 1",   "FOO some body text"},
-  {"http://www.google.com/2", L"PAGEONE 2",   "FOO some more blah blah"},
-  {"http://www.google.com/3", L"PAGETHREE 3", "BAR some hello world for you"},
+  {"http://www.google.com/1", "PAGEONE 1",   "FOO some body text"},
+  {"http://www.google.com/2", "PAGEONE 2",   "FOO some more blah blah"},
+  {"http://www.google.com/3", "PAGETHREE 3", "BAR some hello world for you"},
 };
 
 class HistoryContentsProviderTest : public testing::Test,
@@ -72,7 +72,7 @@
 
       history_service->AddPage(url, t, id_scope, i, GURL(),
           PageTransition::LINK, history::RedirectList(), false);
-      history_service->SetPageTitle(url, test_entries[i].title);
+      history_service->SetPageTitle(url, UTF8ToUTF16(test_entries[i].title));
       history_service->SetPageContents(url, UTF8ToUTF16(test_entries[i].body));
     }
 
@@ -108,9 +108,9 @@
   const ACMatches& m = matches();
   ASSERT_EQ(2U, m.size());
   EXPECT_EQ(test_entries[0].url, m[0].destination_url.spec());
-  EXPECT_STREQ(test_entries[0].title, m[0].description.c_str());
+  EXPECT_STREQ(test_entries[0].title, WideToUTF8(m[0].description).c_str());
   EXPECT_EQ(test_entries[1].url, m[1].destination_url.spec());
-  EXPECT_STREQ(test_entries[1].title, m[1].description.c_str());
+  EXPECT_STREQ(test_entries[1].title, WideToUTF8(m[1].description).c_str());
 }
 
 TEST_F(HistoryContentsProviderTest, Title) {
@@ -121,9 +121,9 @@
   const ACMatches& m = matches();
   ASSERT_EQ(2U, m.size());
   EXPECT_EQ(test_entries[0].url, m[0].destination_url.spec());
-  EXPECT_STREQ(test_entries[0].title, m[0].description.c_str());
+  EXPECT_STREQ(test_entries[0].title, WideToUTF8(m[0].description).c_str());
   EXPECT_EQ(test_entries[1].url, m[1].destination_url.spec());
-  EXPECT_STREQ(test_entries[1].title, m[1].description.c_str());
+  EXPECT_STREQ(test_entries[1].title, WideToUTF8(m[1].description).c_str());
 }
 
 // The "minimal changes" flag should mean that we don't re-query the DB.
diff --git a/chrome/browser/autocomplete/history_url_provider.cc b/chrome/browser/autocomplete/history_url_provider.cc
index 1952a659..a4cbd28 100644
--- a/chrome/browser/autocomplete/history_url_provider.cc
+++ b/chrome/browser/autocomplete/history_url_provider.cc
@@ -10,6 +10,7 @@
 #include "base/histogram.h"
 #include "base/message_loop.h"
 #include "base/string_util.h"
+#include "base/utf_string_conversions.h"
 #include "chrome/browser/history/history.h"
 #include "chrome/browser/history/history_backend.h"
 #include "chrome/browser/history/history_database.h"
@@ -158,7 +159,7 @@
     // for more results than we need, of every prefix type, in hopes this will
     // give us far more than enough to work with.  CullRedirects() will then
     // reduce the list to the best kMaxMatches results.
-    db->AutocompleteForPrefix(i->prefix + params->input.text(),
+    db->AutocompleteForPrefix(WideToUTF16(i->prefix + params->input.text()),
                               kMaxMatches * 2, &url_matches);
     for (URLRowVector::const_iterator j(url_matches.begin());
          j != url_matches.end(); ++j) {
@@ -327,8 +328,9 @@
   } else {
     // We have data for this match, use it.
     match->deletable = true;
-    match->description = info.title();
-    AutocompleteMatch::ClassifyMatchInString(input.text(), info.title(),
+    match->description = UTF16ToWide(info.title());
+    AutocompleteMatch::ClassifyMatchInString(input.text(),
+        UTF16ToWide(info.title()),
         ACMatchClassification::NONE, &match->description_class);
   }
 
@@ -882,8 +884,9 @@
         match.contents.length(), ACMatchClassification::URL,
         &match.contents_class);
   }
-  match.description = info.title();
-  AutocompleteMatch::ClassifyMatchInString(params->input.text(), info.title(),
+  match.description = UTF16ToWide(info.title());
+  AutocompleteMatch::ClassifyMatchInString(params->input.text(),
+                                           UTF16ToWide(info.title()),
                                            ACMatchClassification::NONE,
                                            &match.description_class);
 
diff --git a/chrome/browser/autocomplete/history_url_provider_unittest.cc b/chrome/browser/autocomplete/history_url_provider_unittest.cc
index 9e2f159b..845d2e7 100644
--- a/chrome/browser/autocomplete/history_url_provider_unittest.cc
+++ b/chrome/browser/autocomplete/history_url_provider_unittest.cc
@@ -20,73 +20,73 @@
 
 struct TestURLInfo {
   std::string url;
-  std::wstring title;
+  std::string title;
   int visit_count;
   int typed_count;
 };
 
 // Contents of the test database.
 static TestURLInfo test_db[] = {
-  {"http://www.google.com/", L"Google", 3, 3},
+  {"http://www.google.com/", "Google", 3, 3},
 
   // High-quality pages should get a host synthesized as a lower-quality match.
-  {"http://slashdot.org/favorite_page.html", L"Favorite page", 200, 100},
+  {"http://slashdot.org/favorite_page.html", "Favorite page", 200, 100},
 
   // Less popular pages should have hosts synthesized as higher-quality
   // matches.
-  {"http://kerneltrap.org/not_very_popular.html", L"Less popular", 4, 0},
+  {"http://kerneltrap.org/not_very_popular.html", "Less popular", 4, 0},
 
   // Unpopular pages should not appear in the results at all.
-  {"http://freshmeat.net/unpopular.html", L"Unpopular", 1, 1},
+  {"http://freshmeat.net/unpopular.html", "Unpopular", 1, 1},
 
   // If a host has a match, we should pick it up during host synthesis.
-  {"http://news.google.com/?ned=us&topic=n", L"Google News - U.S.", 2, 2},
-  {"http://news.google.com/", L"Google News", 1, 1},
+  {"http://news.google.com/?ned=us&topic=n", "Google News - U.S.", 2, 2},
+  {"http://news.google.com/", "Google News", 1, 1},
 
   // Suggested short URLs must be "good enough" and must match user input.
-  {"http://foo.com/", L"Dir", 5, 5},
-  {"http://foo.com/dir/", L"Dir", 2, 2},
-  {"http://foo.com/dir/another/", L"Dir", 5, 1},
-  {"http://foo.com/dir/another/again/", L"Dir", 10, 0},
-  {"http://foo.com/dir/another/again/myfile.html", L"File", 10, 2},
+  {"http://foo.com/", "Dir", 5, 5},
+  {"http://foo.com/dir/", "Dir", 2, 2},
+  {"http://foo.com/dir/another/", "Dir", 5, 1},
+  {"http://foo.com/dir/another/again/", "Dir", 10, 0},
+  {"http://foo.com/dir/another/again/myfile.html", "File", 10, 2},
 
   // We throw in a lot of extra URLs here to make sure we're testing the
   // history database's query, not just the autocomplete provider.
-  {"http://startest.com/y/a", L"A", 2, 2},
-  {"http://startest.com/y/b", L"B", 5, 2},
-  {"http://startest.com/x/c", L"C", 5, 2},
-  {"http://startest.com/x/d", L"D", 5, 5},
-  {"http://startest.com/y/e", L"E", 4, 2},
-  {"http://startest.com/y/f", L"F", 3, 2},
-  {"http://startest.com/y/g", L"G", 3, 2},
-  {"http://startest.com/y/h", L"H", 3, 2},
-  {"http://startest.com/y/i", L"I", 3, 2},
-  {"http://startest.com/y/j", L"J", 3, 2},
-  {"http://startest.com/y/k", L"K", 3, 2},
-  {"http://startest.com/y/l", L"L", 3, 2},
-  {"http://startest.com/y/m", L"M", 3, 2},
+  {"http://startest.com/y/a", "A", 2, 2},
+  {"http://startest.com/y/b", "B", 5, 2},
+  {"http://startest.com/x/c", "C", 5, 2},
+  {"http://startest.com/x/d", "D", 5, 5},
+  {"http://startest.com/y/e", "E", 4, 2},
+  {"http://startest.com/y/f", "F", 3, 2},
+  {"http://startest.com/y/g", "G", 3, 2},
+  {"http://startest.com/y/h", "H", 3, 2},
+  {"http://startest.com/y/i", "I", 3, 2},
+  {"http://startest.com/y/j", "J", 3, 2},
+  {"http://startest.com/y/k", "K", 3, 2},
+  {"http://startest.com/y/l", "L", 3, 2},
+  {"http://startest.com/y/m", "M", 3, 2},
 
   // A file: URL is useful for testing that fixup does the right thing w.r.t.
   // the number of trailing slashes on the user's input.
-  {"file:///C:/foo.txt", L"", 2, 2},
+  {"file:///C:/foo.txt", "", 2, 2},
 
   // Results with absurdly high typed_counts so that very generic queries like
   // "http" will give consistent results even if more data is added above.
-  {"http://bogussite.com/a", L"Bogus A", 10002, 10000},
-  {"http://bogussite.com/b", L"Bogus B", 10001, 10000},
-  {"http://bogussite.com/c", L"Bogus C", 10000, 10000},
+  {"http://bogussite.com/a", "Bogus A", 10002, 10000},
+  {"http://bogussite.com/b", "Bogus B", 10001, 10000},
+  {"http://bogussite.com/c", "Bogus C", 10000, 10000},
 
   // Domain name with number.
-  {"http://www.17173.com/", L"Domain with number", 3, 3},
+  {"http://www.17173.com/", "Domain with number", 3, 3},
 
   // URLs to test exact-matching behavior.
-  {"http://go/", L"Intranet URL", 1, 1},
-  {"http://gooey/", L"Intranet URL 2", 5, 5},
+  {"http://go/", "Intranet URL", 1, 1},
+  {"http://gooey/", "Intranet URL 2", 5, 5},
 
   // URLs for testing offset adjustment.
-  {"http://www.\xEA\xB5\x90\xEC\x9C\xA1.kr/", L"Korean", 2, 2},
-  {"http://spaces.com/path%20with%20spaces/foo.html", L"Spaces", 2, 2},
-  {"http://ms/c++%20style%20guide", L"Style guide", 2, 2},
+  {"http://www.\xEA\xB5\x90\xEC\x9C\xA1.kr/", "Korean", 2, 2},
+  {"http://spaces.com/path%20with%20spaces/foo.html", "Spaces", 2, 2},
+  {"http://ms/c++%20style%20guide", "Style guide", 2, 2},
 };
 
 class HistoryURLProviderTest : public testing::Test,
@@ -172,7 +172,7 @@
   for (size_t i = 0; i < arraysize(test_db); ++i) {
     const TestURLInfo& cur = test_db[i];
     const GURL current_url(cur.url);
-    history_service_->AddPageWithDetails(current_url, cur.title,
+    history_service_->AddPageWithDetails(current_url, UTF8ToUTF16(cur.title),
                                          cur.visit_count, cur.typed_count,
                                          visit_time, false);
   }
@@ -311,7 +311,8 @@
     {"http://redirects/C", 10}
   };
   for (size_t i = 0; i < ARRAYSIZE_UNSAFE(redirect); i++) {
-    history_service_->AddPageWithDetails(GURL(redirect[i].url), L"Title",
+    history_service_->AddPageWithDetails(GURL(redirect[i].url),
+                                         UTF8ToUTF16("Title"),
                                          redirect[i].count, redirect[i].count,
                                          Time::Now(), false);
   }
diff --git a/chrome/browser/autocomplete/search_provider.cc b/chrome/browser/autocomplete/search_provider.cc
index 3da49df..87d0a68 100644
--- a/chrome/browser/autocomplete/search_provider.cc
+++ b/chrome/browser/autocomplete/search_provider.cc
@@ -328,7 +328,7 @@
       profile_->GetHistoryService(Profile::EXPLICIT_ACCESS);
   HistoryService::Handle request_handle =
       history_service->GetMostRecentKeywordSearchTerms(
-          search_id, text, static_cast<int>(kMaxMatches),
+          search_id, WideToUTF16(text), static_cast<int>(kMaxMatches),
           &history_request_consumer_,
           NewCallback(this,
                       &SearchProvider::OnGotMostRecentKeywordSearchTerms));
@@ -537,7 +537,8 @@
                                             MatchMap* map) {
   for (HistoryResults::const_iterator i(results.begin()); i != results.end();
        ++i) {
-    AddMatchToMap(i->term, CalculateRelevanceForHistory(i->time, is_keyword),
+    AddMatchToMap(UTF16ToWide(i->term),
+                  CalculateRelevanceForHistory(i->time, is_keyword),
                   AutocompleteMatch::SEARCH_HISTORY, did_not_accept_suggestion,
                   is_keyword, map);
   }
diff --git a/chrome/browser/autocomplete/search_provider_unittest.cc b/chrome/browser/autocomplete/search_provider_unittest.cc
index 26c5843..1e489f2 100644
--- a/chrome/browser/autocomplete/search_provider_unittest.cc
+++ b/chrome/browser/autocomplete/search_provider_unittest.cc
@@ -4,6 +4,7 @@
 
 #include "base/string_util.h"
 #include "base/time.h"
+#include "base/utf_string_conversions.h"
 #include "build/build_config.h"
 #include "chrome/browser/autocomplete/search_provider.h"
 #include "chrome/browser/chrome_thread.h"
@@ -29,9 +30,9 @@
  public:
   SearchProviderTest()
       : default_t_url_(NULL),
-        term1_(L"term1"),
+        term1_(UTF8ToUTF16("term1")),
         keyword_t_url_(NULL),
-        keyword_term_(L"keyword"),
+        keyword_term_(UTF8ToUTF16("keyword")),
         io_thread_(ChromeThread::IO),
         quit_when_done_(false) {
     io_thread_.Start();
@@ -56,14 +57,14 @@
   void RunTillProviderDone();
 
   // Invokes Start on provider_, then runs all pending tasks.
-  void QueryForInput(const std::wstring& text);
+  void QueryForInput(const string16& text);
 
   // See description above class for details of these fields.
   TemplateURL* default_t_url_;
-  const std::wstring term1_;
+  const string16 term1_;
   GURL term1_url_;
   TemplateURL* keyword_t_url_;
-  const std::wstring keyword_term_;
+  const string16 keyword_term_;
   GURL keyword_url_;
 
   MessageLoopForUI message_loop_;
@@ -106,8 +107,8 @@
   HistoryService* history =
       profile_.GetHistoryService(Profile::EXPLICIT_ACCESS);
   term1_url_ = GURL(WideToUTF8(default_t_url_->url()->ReplaceSearchTerms(
-      *default_t_url_, term1_, 0, std::wstring())));
-  history->AddPageWithDetails(term1_url_, std::wstring(), 1, 1,
+      *default_t_url_, UTF16ToWide(term1_), 0, std::wstring())));
+  history->AddPageWithDetails(term1_url_, string16(), 1, 1,
                               base::Time::Now(), false);
   history->SetKeywordSearchTermsForURL(term1_url_, default_t_url_->id(),
                                        term1_);
@@ -123,8 +124,8 @@
 
   // Add a page and search term for keyword_t_url_.
   keyword_url_ = GURL(WideToUTF8(keyword_t_url_->url()->ReplaceSearchTerms(
-      *keyword_t_url_, keyword_term_, 0, std::wstring())));
-  history->AddPageWithDetails(keyword_url_, std::wstring(), 1, 1,
+      *keyword_t_url_, UTF16ToWide(keyword_term_), 0, std::wstring())));
+  history->AddPageWithDetails(keyword_url_, string16(), 1, 1,
                               base::Time::Now(), false);
   history->SetKeywordSearchTermsForURL(keyword_url_, keyword_t_url_->id(),
                                        keyword_term_);
@@ -154,9 +155,10 @@
 #endif
 }
 
-void SearchProviderTest::QueryForInput(const std::wstring& text) {
+void SearchProviderTest::QueryForInput(const string16& text) {
   // Start a query.
-  AutocompleteInput input(text, std::wstring(), false, false, false);
+  AutocompleteInput input(UTF16ToWide(text), std::wstring(),
+                          false, false, false);
   provider_->Start(input, false);
 
   // RunAllPending so that the task scheduled by SearchProvider to create the
@@ -188,7 +190,7 @@
 // Make sure we query history for the default provider and a URLFetcher is
 // created for the default provider suggest results.
 TEST_F(SearchProviderTest, QueryDefaultProvider) {
-  std::wstring term = term1_.substr(0, term1_.size() - 1);
+  string16 term = term1_.substr(0, term1_.size() - 1);
   QueryForInput(term);
 
   // Make sure the default providers suggest service was queried.
@@ -198,7 +200,8 @@
 
   // And the URL matches what we expected.
   GURL expected_url = GURL(WideToUTF8(default_t_url_->suggestions_url()->
-      ReplaceSearchTerms(*default_t_url_, term, 0, std::wstring())));
+      ReplaceSearchTerms(*default_t_url_, UTF16ToWide(term),
+      0, std::wstring())));
   ASSERT_TRUE(fetcher->original_url() == expected_url);
 
   // Tell the SearchProvider the suggest query is done.
@@ -219,8 +222,9 @@
 // Issues a query that matches the registered keyword and makes sure history
 // is queried as well as URLFetchers getting created.
 TEST_F(SearchProviderTest, QueryKeywordProvider) {
-  std::wstring term = keyword_term_.substr(0, keyword_term_.size() - 1);
-  QueryForInput(keyword_t_url_->keyword() + L" " + term);
+  string16 term = keyword_term_.substr(0, keyword_term_.size() - 1);
+  QueryForInput(WideToUTF16(keyword_t_url_->keyword()) +
+                UTF8ToUTF16(" ") + term);
 
   // Make sure the default providers suggest service was queried.
   TestURLFetcher* default_fetcher = test_factory_.GetFetcherByID(
@@ -240,7 +244,8 @@
 
   // And the URL matches what we expected.
   GURL expected_url = GURL(WideToUTF8(keyword_t_url_->suggestions_url()->
-      ReplaceSearchTerms(*keyword_t_url_, term, 0, std::wstring())));
+      ReplaceSearchTerms(*keyword_t_url_, UTF16ToWide(term), 0,
+      std::wstring())));
   ASSERT_TRUE(keyword_fetcher->original_url() == expected_url);
 
   // Tell the SearchProvider the keyword suggest query is done.
@@ -261,6 +266,6 @@
   EXPECT_TRUE(match.template_url);
 
   // The fill into edit should contain the keyword.
-  EXPECT_EQ(keyword_t_url_->keyword() + L" " + keyword_term_,
+  EXPECT_EQ(keyword_t_url_->keyword() + L" " + UTF16ToWide(keyword_term_),
             match.fill_into_edit);
 }
diff --git a/chrome/browser/automation/automation_provider.cc b/chrome/browser/automation/automation_provider.cc
index 3dccaaa..8c4b5489 100644
--- a/chrome/browser/automation/automation_provider.cc
+++ b/chrome/browser/automation/automation_provider.cc
@@ -1737,8 +1737,8 @@
                                         IPC::Message* reply_message) {
   consumer_.CancelAllRequests();
 
-  std::wstring search_text;
-  args->GetString(L"search_text", &search_text);
+  string16 search_text;
+  args->GetString("search_text", &search_text);
 
   // Fetch history.
   HistoryService* hs = profile_->GetHistoryService(Profile::EXPLICIT_ACCESS);
@@ -1769,12 +1769,12 @@
   DictionaryValue* item = NULL;
   args->GetDictionary(L"item", &item);
   string16 url_text;
-  std::wstring title;
+  string16 title;
   base::Time time = base::Time::Now();
 
   if (item->GetString("url", &url_text)) {
     GURL gurl(url_text);
-    item->GetString(L"title", &title);  // Don't care if it fails.
+    item->GetString("title", &title);  // Don't care if it fails.
     int it;
     double dt;
     if (item->GetInteger(L"time", &it))
@@ -1793,11 +1793,8 @@
                 PageTransition::LINK,
                 history::RedirectList(),
                 false);
-    if (title.length()) {
-      // TODO(jrg): add a string16 interface for
-      // HistoryService::SetPageTitle(), then use it.
+    if (title.length())
       hs->SetPageTitle(gurl, title);
-    }
   } else {
     json_return = "{\"error\": \"bad args (no URL in dict?).\"}";
     reply_return = false;
diff --git a/chrome/browser/automation/automation_provider_observers.cc b/chrome/browser/automation/automation_provider_observers.cc
index 8b09fe5..0aa6488 100644
--- a/chrome/browser/automation/automation_provider_observers.cc
+++ b/chrome/browser/automation/automation_provider_observers.cc
@@ -1,4 +1,4 @@
-// Copyright (c) 2009 The Chromium Authors. All rights reserved.
+// Copyright (c) 2010 The Chromium Authors. All rights reserved.
 // Use of this source code is governed by a BSD-style license that can be
 // found in the LICENSE file.
 
@@ -990,11 +990,11 @@
   for (size_t i = 0; i < results->size(); ++i) {
     DictionaryValue* page_value = new DictionaryValue;
     history::URLResult const &page = (*results)[i];
-    page_value->SetString(L"title", page.title());
+    page_value->SetStringFromUTF16(L"title", page.title());
     page_value->SetString(L"url", page.url().spec());
     page_value->SetReal(L"time",
                         static_cast<double>(page.visit_time().ToDoubleT()));
-    page_value->SetString(L"snippet", page.snippet().text());
+    page_value->SetStringFromUTF16(L"snippet", page.snippet().text());
     page_value->SetBoolean(
         L"starred",
         provider_->profile()->GetBookmarkModel()->IsBookmarked(page.url()));
diff --git a/chrome/browser/bookmarks/bookmark_index.cc b/chrome/browser/bookmarks/bookmark_index.cc
index 18c3134..161058e 100644
--- a/chrome/browser/bookmarks/bookmark_index.cc
+++ b/chrome/browser/bookmarks/bookmark_index.cc
@@ -62,7 +62,7 @@
   // matches and so this shouldn't be performance critical.
   QueryParser parser;
   ScopedVector<QueryNode> query_nodes;
-  parser.ParseQuery(query, &query_nodes.get());
+  parser.ParseQuery(WideToUTF16(query), &query_nodes.get());
 
   // The highest typed counts should be at the beginning of the results vector
   // so that the best matches will always be included in the results. The loop
@@ -115,7 +115,7 @@
   // of QueryParser may filter it out.  For example, the query
   // ["thi"] will match the bookmark titled [Thinking], but since
   // ["thi"] is quoted we don't want to do a prefix match.
-  if (parser->DoesQueryMatch(node->GetTitle(), query_nodes,
+  if (parser->DoesQueryMatch(WideToUTF16(node->GetTitle()), query_nodes,
                              &(title_match.match_positions))) {
     title_match.node = node;
     results->push_back(title_match);
@@ -129,7 +129,7 @@
   if (i == index_.end())
     return false;
 
-  if (!QueryParser::IsWordLongEnoughForPrefixSearch(term)) {
+  if (!QueryParser::IsWordLongEnoughForPrefixSearch(WideToUTF16(term))) {
     // Term is too short for prefix match, compare using exact match.
     if (i->first != term)
       return false;  // No bookmarks with this term.
@@ -206,13 +206,23 @@
 
 std::vector<std::wstring> BookmarkIndex::ExtractQueryWords(
     const std::wstring& query) {
-  std::vector<std::wstring> terms;
+  std::vector<string16> terms;
   if (query.empty())
-    return terms;
+    return std::vector<std::wstring>();
   QueryParser parser;
   // TODO: use ICU normalization.
-  parser.ExtractQueryWords(l10n_util::ToLower(query), &terms);
+  parser.ExtractQueryWords(l10n_util::ToLower(WideToUTF16(query)), &terms);
+
+  // TODO(brettw) just remove this and return |terms| when this is converted
+  // to string16.
+#if defined(WCHAR_T_IS_UTF32)
+  std::vector<std::wstring> wterms;
+  for (size_t i = 0; i < terms.size(); i++)
+    wterms.push_back(UTF16ToWide(terms[i]));
+  return wterms;
+#else
   return terms;
+#endif
 }
 
 void BookmarkIndex::RegisterNode(const std::wstring& term,
diff --git a/chrome/browser/bookmarks/bookmark_index_unittest.cc b/chrome/browser/bookmarks/bookmark_index_unittest.cc
index cfc1972..6225bc7f 100644
--- a/chrome/browser/bookmarks/bookmark_index_unittest.cc
+++ b/chrome/browser/bookmarks/bookmark_index_unittest.cc
@@ -232,41 +232,42 @@
 
   struct TestData {
     const GURL url;
-    const std::wstring title;
+    const char* title;
     const int typed_count;
   } data[] = {
-    { GURL("http://www.google.com/"),      L"Google",           100 },
-    { GURL("http://maps.google.com/"),     L"Google Maps",       40 },
-    { GURL("http://docs.google.com/"),     L"Google Docs",       50 },
-    { GURL("http://reader.google.com/"),   L"Google Reader",     80 },
+    { GURL("http://www.google.com/"),      "Google",           100 },
+    { GURL("http://maps.google.com/"),     "Google Maps",       40 },
+    { GURL("http://docs.google.com/"),     "Google Docs",       50 },
+    { GURL("http://reader.google.com/"),   "Google Reader",     80 },
   };
 
   for (size_t i = 0; i < ARRAYSIZE_UNSAFE(data); ++i) {
     history::URLRow info(data[i].url);
-    info.set_title(data[i].title);
+    info.set_title(UTF8ToUTF16(data[i].title));
     info.set_typed_count(data[i].typed_count);
     // Populate the InMemoryDatabase....
     url_db->AddURL(info);
     // Populate the BookmarkIndex.
-    model->AddURL(model->other_node(), i, data[i].title, data[i].url);
+    model->AddURL(model->other_node(), i, UTF8ToWide(data[i].title),
+                  data[i].url);
   }
 
   // Check that the InMemoryDatabase stored the URLs properly.
   history::URLRow result1;
   url_db->GetRowForURL(data[0].url, &result1);
-  EXPECT_EQ(data[0].title, result1.title());
+  EXPECT_EQ(data[0].title, UTF16ToUTF8(result1.title()));
 
   history::URLRow result2;
   url_db->GetRowForURL(data[1].url, &result2);
-  EXPECT_EQ(data[1].title, result2.title());
+  EXPECT_EQ(data[1].title, UTF16ToUTF8(result2.title()));
 
   history::URLRow result3;
   url_db->GetRowForURL(data[2].url, &result3);
-  EXPECT_EQ(data[2].title, result3.title());
+  EXPECT_EQ(data[2].title, UTF16ToUTF8(result3.title()));
 
   history::URLRow result4;
   url_db->GetRowForURL(data[3].url, &result4);
-  EXPECT_EQ(data[3].title, result4.title());
+  EXPECT_EQ(data[3].title, UTF16ToUTF8(result4.title()));
 
   // Populate match nodes.
   std::vector<bookmark_utils::TitleMatch> matches;
diff --git a/chrome/browser/bookmarks/bookmark_utils.cc b/chrome/browser/bookmarks/bookmark_utils.cc
index 2f4bc182..63cf4296 100644
--- a/chrome/browser/bookmarks/bookmark_utils.cc
+++ b/chrome/browser/bookmarks/bookmark_utils.cc
@@ -204,8 +204,8 @@
 
 // Returns true if |text| contains each string in |words|. This is used when
 // searching for bookmarks.
-bool DoesBookmarkTextContainWords(const std::wstring& text,
-                                  const std::vector<std::wstring>& words) {
+bool DoesBookmarkTextContainWords(const string16& text,
+                                  const std::vector<string16>& words) {
   for (size_t i = 0; i < words.size(); ++i) {
     if (text.find(words[i]) == std::wstring::npos)
       return false;
@@ -216,16 +216,17 @@
 // Returns true if |node|s title or url contains the strings in |words|.
 // |languages| argument is user's accept-language setting to decode IDN.
 bool DoesBookmarkContainWords(const BookmarkNode* node,
-                              const std::vector<std::wstring>& words,
+                              const std::vector<string16>& words,
                               const std::wstring& languages) {
   return
       DoesBookmarkTextContainWords(
-          l10n_util::ToLower(node->GetTitle()), words) ||
+          l10n_util::ToLower(WideToUTF16(node->GetTitle())), words) ||
       DoesBookmarkTextContainWords(
-          l10n_util::ToLower(UTF8ToWide(node->GetURL().spec())), words) ||
-      DoesBookmarkTextContainWords(l10n_util::ToLower(net::FormatUrl(
-          node->GetURL(), languages, net::kFormatUrlOmitNothing,
-          UnescapeRule::NORMAL, NULL, NULL, NULL)), words);
+          l10n_util::ToLower(UTF8ToUTF16(node->GetURL().spec())), words) ||
+      DoesBookmarkTextContainWords(l10n_util::ToLower(WideToUTF16(
+          net::FormatUrl(
+              node->GetURL(), languages, net::kFormatUrlOmitNothing,
+              UnescapeRule::NORMAL, NULL, NULL, NULL))), words);
 }
 
 }  // namespace
@@ -530,9 +531,9 @@
                                 size_t max_count,
                                 const std::wstring& languages,
                                 std::vector<const BookmarkNode*>* nodes) {
-  std::vector<std::wstring> words;
+  std::vector<string16> words;
   QueryParser parser;
-  parser.ExtractQueryWords(l10n_util::ToLower(text), &words);
+  parser.ExtractQueryWords(l10n_util::ToLower(WideToUTF16(text)), &words);
   if (words.empty())
     return;
 
@@ -550,9 +551,9 @@
 bool DoesBookmarkContainText(const BookmarkNode* node,
                              const std::wstring& text,
                              const std::wstring& languages) {
-  std::vector<std::wstring> words;
+  std::vector<string16> words;
   QueryParser parser;
-  parser.ExtractQueryWords(l10n_util::ToLower(text), &words);
+  parser.ExtractQueryWords(l10n_util::ToLower(WideToUTF16(text)), &words);
   if (words.empty())
     return false;
 
diff --git a/chrome/browser/custom_home_pages_table_model.cc b/chrome/browser/custom_home_pages_table_model.cc
index 9f4aa95..aa3937f 100644
--- a/chrome/browser/custom_home_pages_table_model.cc
+++ b/chrome/browser/custom_home_pages_table_model.cc
@@ -161,7 +161,7 @@
   }
   entry->title_handle = 0;
   if (found_url && !row->title().empty()) {
-    entry->title = row->title();
+    entry->title = UTF16ToWide(row->title());
     if (observer_)
       observer_->OnItemsChanged(static_cast<int>(entry_index), 1);
   }
diff --git a/chrome/browser/dom_ui/dom_ui.cc b/chrome/browser/dom_ui/dom_ui.cc
index 4cb12027c..8b9c45f 100644
--- a/chrome/browser/dom_ui/dom_ui.cc
+++ b/chrome/browser/dom_ui/dom_ui.cc
@@ -115,22 +115,22 @@
 // DOMMessageHandler, protected: ----------------------------------------------
 
 void DOMMessageHandler::SetURLAndTitle(DictionaryValue* dictionary,
-                                       std::wstring title,
+                                       string16 title,
                                        const GURL& gurl) {
-  std::wstring wstring_url = UTF8ToWide(gurl.spec());
-  dictionary->SetString(L"url", wstring_url);
+  string16 url16 = UTF8ToUTF16(gurl.spec());
+  dictionary->SetStringFromUTF16(L"url", url16);
 
   bool using_url_as_the_title = false;
   if (title.empty()) {
     using_url_as_the_title = true;
-    title = wstring_url;
+    title = url16;
   }
 
   // Since the title can contain BiDi text, we need to mark the text as either
   // RTL or LTR, depending on the characters in the string. If we use the URL
   // as the title, we mark the title as LTR since URLs are always treated as
   // left to right strings.
-  std::wstring title_to_set(title);
+  string16 title_to_set(title);
   if (base::i18n::IsRTL()) {
     if (using_url_as_the_title) {
       base::i18n::WrapStringWithLTRFormatting(&title_to_set);
@@ -140,7 +140,7 @@
       DCHECK(success ? (title != title_to_set) : (title == title_to_set));
     }
   }
-  dictionary->SetString(L"title", title_to_set);
+  dictionary->SetStringFromUTF16(L"title", title_to_set);
 }
 
 bool DOMMessageHandler::ExtractIntegerValue(const Value* value, int* out_int) {
diff --git a/chrome/browser/dom_ui/dom_ui.h b/chrome/browser/dom_ui/dom_ui.h
index 838cbcf4..b1e22c7 100644
--- a/chrome/browser/dom_ui/dom_ui.h
+++ b/chrome/browser/dom_ui/dom_ui.h
@@ -165,7 +165,7 @@
   // Adds "url" and "title" keys on incoming dictionary, setting title
   // as the url as a fallback on empty title.
   static void SetURLAndTitle(DictionaryValue* dictionary,
-                             std::wstring title,
+                             string16 title,
                              const GURL& gurl);
 
   // This is where subclasses specify which messages they'd like to handle.
diff --git a/chrome/browser/dom_ui/history2_ui.cc b/chrome/browser/dom_ui/history2_ui.cc
index 3fd433d..151186f4f 100644
--- a/chrome/browser/dom_ui/history2_ui.cc
+++ b/chrome/browser/dom_ui/history2_ui.cc
@@ -160,7 +160,7 @@
   options.end_time -= base::TimeDelta::FromDays(day - 1);
 
   // Need to remember the query string for our results.
-  search_text_ = std::wstring();
+  search_text_ = string16();
 
   HistoryService* hs =
       dom_ui_->GetProfile()->GetHistoryService(Profile::EXPLICIT_ACCESS);
@@ -176,7 +176,7 @@
 
   // Get arguments (if any).
   int month = 0;
-  std::wstring query;
+  string16 query;
   ExtractSearchHistoryArguments(value, &month, &query);
 
   // Set the query ranges for the given month.
@@ -278,7 +278,7 @@
     } else {
       page_value->SetString(L"dateShort",
           base::TimeFormatShortDate(page.visit_time()));
-      page_value->SetString(L"snippet", page.snippet().text());
+      page_value->SetStringFromUTF16(L"snippet", page.snippet().text());
     }
     page_value->SetBoolean(L"starred",
         dom_ui_->GetProfile()->GetBookmarkModel()->IsBookmarked(page.url()));
@@ -286,7 +286,7 @@
   }
 
   DictionaryValue info_value;
-  info_value.SetString(L"term", search_text_);
+  info_value.SetStringFromUTF16(L"term", search_text_);
   info_value.SetBoolean(L"finished", results->reached_beginning());
 
   dom_ui_->CallJavascriptFunction(L"historyResult", info_value, results_value);
@@ -298,7 +298,8 @@
 }
 
 void BrowsingHistoryHandler2::ExtractSearchHistoryArguments(const Value* value,
-    int* month, std::wstring* query) {
+                                                           int* month,
+                                                           string16* query) {
   *month = 0;
 
   if (value && value->GetType() == Value::TYPE_LIST) {
@@ -310,7 +311,7 @@
         list_member->GetType() == Value::TYPE_STRING) {
       const StringValue* string_value =
         static_cast<const StringValue*>(list_member);
-      string_value->GetAsString(query);
+      string_value->GetAsUTF16(query);
     }
 
     // Get search month.
@@ -401,9 +402,9 @@
 }
 
 // static
-const GURL HistoryUI2::GetHistoryURLWithSearchText(const std::wstring& text) {
+const GURL HistoryUI2::GetHistoryURLWithSearchText(const string16& text) {
   return GURL(std::string(chrome::kChromeUIHistory2URL) + "#q=" +
-              EscapeQueryParamValue(WideToUTF8(text), true));
+              EscapeQueryParamValue(UTF16ToUTF8(text), true));
 }
 
 // static
diff --git a/chrome/browser/dom_ui/history2_ui.h b/chrome/browser/dom_ui/history2_ui.h
index 042e0c4..0ad8d2ff 100644
--- a/chrome/browser/dom_ui/history2_ui.h
+++ b/chrome/browser/dom_ui/history2_ui.h
@@ -8,6 +8,7 @@
 #include <string>
 #include <vector>
 
+#include "base/string16.h"
 #include "chrome/browser/dom_ui/chrome_url_data_manager.h"
 #include "chrome/browser/dom_ui/dom_ui.h"
 #include "chrome/browser/cancelable_request.h"
@@ -78,7 +79,7 @@
   // Extract the arguments from the call to HandleSearchHistory.
   void ExtractSearchHistoryArguments(const Value* value,
                                      int* month,
-                                     std::wstring* query);
+                                     string16* query);
 
   // Figure out the query options for a month-wide query.
   history::QueryOptions CreateMonthQueryOptions(int month);
@@ -86,7 +87,7 @@
   NotificationRegistrar registrar_;
 
   // Current search text.
-  std::wstring search_text_;
+  string16 search_text_;
 
   // Our consumer for search requests to the history service.
   CancelableRequestConsumerT<int, 0> cancelable_search_consumer_;
@@ -102,7 +103,7 @@
   explicit HistoryUI2(TabContents* contents);
 
   // Return the URL for a given search term.
-  static const GURL GetHistoryURLWithSearchText(const std::wstring& text);
+  static const GURL GetHistoryURLWithSearchText(const string16& text);
 
   static RefCountedMemory* GetFaviconResourceBytes();
 
diff --git a/chrome/browser/dom_ui/history_ui.cc b/chrome/browser/dom_ui/history_ui.cc
index c107227..cb3b4cc2 100644
--- a/chrome/browser/dom_ui/history_ui.cc
+++ b/chrome/browser/dom_ui/history_ui.cc
@@ -160,7 +160,7 @@
   options.end_time -= base::TimeDelta::FromDays(day - 1);
 
   // Need to remember the query string for our results.
-  search_text_ = std::wstring();
+  search_text_ = string16();
 
   HistoryService* hs =
       dom_ui_->GetProfile()->GetHistoryService(Profile::EXPLICIT_ACCESS);
@@ -176,7 +176,7 @@
 
   // Get arguments (if any).
   int month = 0;
-  std::wstring query;
+  string16 query;
   ExtractSearchHistoryArguments(value, &month, &query);
 
   // Set the query ranges for the given month.
@@ -278,7 +278,7 @@
     } else {
       page_value->SetString(L"dateShort",
           base::TimeFormatShortDate(page.visit_time()));
-      page_value->SetString(L"snippet", page.snippet().text());
+      page_value->SetStringFromUTF16(L"snippet", page.snippet().text());
     }
     page_value->SetBoolean(L"starred",
         dom_ui_->GetProfile()->GetBookmarkModel()->IsBookmarked(page.url()));
@@ -286,7 +286,7 @@
   }
 
   DictionaryValue info_value;
-  info_value.SetString(L"term", search_text_);
+  info_value.SetStringFromUTF16(L"term", search_text_);
   info_value.SetBoolean(L"finished", results->reached_beginning());
 
   dom_ui_->CallJavascriptFunction(L"historyResult", info_value, results_value);
@@ -298,7 +298,8 @@
 }
 
 void BrowsingHistoryHandler::ExtractSearchHistoryArguments(const Value* value,
-    int* month, std::wstring* query) {
+                                                           int* month,
+                                                           string16* query) {
   *month = 0;
 
   if (value && value->GetType() == Value::TYPE_LIST) {
@@ -310,7 +311,7 @@
         list_member->GetType() == Value::TYPE_STRING) {
       const StringValue* string_value =
         static_cast<const StringValue*>(list_member);
-      string_value->GetAsString(query);
+      string_value->GetAsUTF16(query);
     }
 
     // Get search month.
@@ -401,9 +402,9 @@
 }
 
 // static
-const GURL HistoryUI::GetHistoryURLWithSearchText(const std::wstring& text) {
+const GURL HistoryUI::GetHistoryURLWithSearchText(const string16& text) {
   return GURL(std::string(chrome::kChromeUIHistoryURL) + "#q=" +
-              EscapeQueryParamValue(WideToUTF8(text), true));
+              EscapeQueryParamValue(UTF16ToUTF8(text), true));
 }
 
 // static
diff --git a/chrome/browser/dom_ui/history_ui.h b/chrome/browser/dom_ui/history_ui.h
index 8adb691..ff3849a 100644
--- a/chrome/browser/dom_ui/history_ui.h
+++ b/chrome/browser/dom_ui/history_ui.h
@@ -8,6 +8,7 @@
 #include <string>
 #include <vector>
 
+#include "base/string16.h"
 #include "chrome/browser/dom_ui/chrome_url_data_manager.h"
 #include "chrome/browser/dom_ui/dom_ui.h"
 #include "chrome/browser/cancelable_request.h"
@@ -74,7 +75,7 @@
   // Extract the arguments from the call to HandleSearchHistory.
   void ExtractSearchHistoryArguments(const Value* value,
                                      int* month,
-                                     std::wstring* query);
+                                     string16* query);
 
   // Figure out the query options for a month-wide query.
   history::QueryOptions CreateMonthQueryOptions(int month);
@@ -82,7 +83,7 @@
   NotificationRegistrar registrar_;
 
   // Current search text.
-  std::wstring search_text_;
+  string16 search_text_;
 
   // Our consumer for search requests to the history service.
   CancelableRequestConsumerT<int, 0> cancelable_search_consumer_;
@@ -98,7 +99,7 @@
   explicit HistoryUI(TabContents* contents);
 
   // Return the URL for a given search term.
-  static const GURL GetHistoryURLWithSearchText(const std::wstring& text);
+  static const GURL GetHistoryURLWithSearchText(const string16& text);
 
   static RefCountedMemory* GetFaviconResourceBytes();
 
diff --git a/chrome/browser/download/download_manager.cc b/chrome/browser/download/download_manager.cc
index 81ac6cb..9639a44 100644
--- a/chrome/browser/download/download_manager.cc
+++ b/chrome/browser/download/download_manager.cc
@@ -482,7 +482,7 @@
       profile_->GetHistoryService(Profile::EXPLICIT_ACCESS);
   if (hs) {
     HistoryService::Handle h =
-        hs->SearchDownloads(search_text,
+        hs->SearchDownloads(WideToUTF16(search_text),
                             &cancelable_consumer_,
                             NewCallback(this,
                                         &DownloadManager::OnSearchComplete));
@@ -1182,7 +1182,7 @@
   // FIXME(paulg) see bug 958058. EXPLICIT_ACCESS below is wrong.
   HistoryService* hs = profile_->GetHistoryService(Profile::EXPLICIT_ACCESS);
   if (hs)
-    hs->UpdateDownloadPath(new_path.ToWStringHack(), download->db_handle());
+    hs->UpdateDownloadPath(new_path, download->db_handle());
 }
 
 void DownloadManager::RemoveDownload(int64 download_handle) {
diff --git a/chrome/browser/extensions/extension_history_api.cc b/chrome/browser/extensions/extension_history_api.cc
index db4ec492..23f861f1 100644
--- a/chrome/browser/extensions/extension_history_api.cc
+++ b/chrome/browser/extensions/extension_history_api.cc
@@ -30,7 +30,7 @@
                               DictionaryValue* value) {
   value->SetString(keys::kIdKey, Int64ToString(row.id()));
   value->SetString(keys::kUrlKey, row.url().spec());
-  value->SetString(keys::kTitleKey, row.title());
+  value->SetStringFromUTF16(keys::kTitleKey, row.title());
   value->SetReal(keys::kLastVisitdKey, MilliSecondsFromTime(row.last_visit()));
   value->SetInteger(keys::kTypedCountKey, row.typed_count());
   value->SetInteger(keys::kVisitCountKey, row.visit_count());
@@ -247,8 +247,9 @@
   EXTENSION_FUNCTION_VALIDATE(args_->GetDictionary(0, &json));
 
   // Initialize the HistoryQuery
-  std::wstring search_text;
-  EXTENSION_FUNCTION_VALIDATE(json->GetString(keys::kTextKey, &search_text));
+  string16 search_text;
+  EXTENSION_FUNCTION_VALIDATE(json->GetStringAsUTF16(keys::kTextKey,
+                                                     &search_text));
 
   history::QueryOptions options;
   options.SetRecentDayRange(1);
diff --git a/chrome/browser/history/download_database.cc b/chrome/browser/history/download_database.cc
index ea0649d..26780ad 100644
--- a/chrome/browser/history/download_database.cc
+++ b/chrome/browser/history/download_database.cc
@@ -1,4 +1,4 @@
-// Copyright (c) 2009 The Chromium Authors. All rights reserved.
+// Copyright (c) 2010 The Chromium Authors. All rights reserved.
 // Use of this source code is governed by a BSD-style license that can be
 // found in the LICENSE file.
 
@@ -9,6 +9,7 @@
 
 #include "app/sql/connection.h"
 #include "app/sql/statement.h"
+#include "base/file_path.h"
 #include "base/utf_string_conversions.h"
 #include "build/build_config.h"
 #include "chrome/browser/download/download_manager.h"
@@ -122,7 +123,7 @@
   return statement.Run();
 }
 
-bool DownloadDatabase::UpdateDownloadPath(const std::wstring& path,
+bool DownloadDatabase::UpdateDownloadPath(const FilePath& path,
                                           DownloadID db_handle) {
   DCHECK(db_handle > 0);
   sql::Statement statement(GetDB().GetCachedStatement(SQL_FROM_HERE,
@@ -130,7 +131,7 @@
   if (!statement)
     return false;
 
-  statement.BindString(0, WideToUTF8(path));
+  BindFilePath(statement, path, 0);
   statement.BindInt64(1, db_handle);
   return statement.Run();
 }
@@ -197,7 +198,7 @@
 }
 
 void DownloadDatabase::SearchDownloads(std::vector<int64>* results,
-                                       const std::wstring& search_text) {
+                                       const string16& search_text) {
   sql::Statement statement(GetDB().GetCachedStatement(SQL_FROM_HERE,
       "SELECT id FROM downloads WHERE url LIKE ? "
       "OR full_path LIKE ? ORDER BY id"));
@@ -205,7 +206,7 @@
     return;
 
   std::string text("%");
-  text.append(WideToUTF8(search_text));
+  text.append(UTF16ToUTF8(search_text));
   text.push_back('%');
   statement.BindString(0, text);
   statement.BindString(1, text);
diff --git a/chrome/browser/history/download_database.h b/chrome/browser/history/download_database.h
index 8ba2e7a..11adf31 100644
--- a/chrome/browser/history/download_database.h
+++ b/chrome/browser/history/download_database.h
@@ -1,4 +1,4 @@
-// Copyright (c) 2009 The Chromium Authors. All rights reserved.
+// Copyright (c) 2010 The Chromium Authors. All rights reserved.
 // Use of this source code is governed by a BSD-style license that can be
 // found in the LICENSE file.
 
@@ -8,6 +8,7 @@
 #include "chrome/browser/history/history_types.h"
 
 struct DownloadCreateInfo;
+class FilePath;
 
 namespace sql {
 class Connection;
@@ -29,7 +30,7 @@
   bool UpdateDownload(int64 received_bytes, int32 state, DownloadID db_handle);
 
   // Update the path of one download. Returns true if successful.
-  bool UpdateDownloadPath(const std::wstring& path, DownloadID db_handle);
+  bool UpdateDownloadPath(const FilePath& path, DownloadID db_handle);
 
   // Fixes state of the download entries. Sometimes entries with IN_PROGRESS
   // state are not updated during browser shutdown (particularly when crashing).
@@ -51,7 +52,7 @@
 
   // Search for downloads matching the search text.
   void SearchDownloads(std::vector<int64>* results,
-                       const std::wstring& search_text);
+                       const string16& search_text);
 
  protected:
   // Returns the database for the functions in this interface.
diff --git a/chrome/browser/history/expire_history_backend_unittest.cc b/chrome/browser/history/expire_history_backend_unittest.cc
index 927ba5c..ca822bc2 100644
--- a/chrome/browser/history/expire_history_backend_unittest.cc
+++ b/chrome/browser/history/expire_history_backend_unittest.cc
@@ -243,17 +243,20 @@
 
   // Full text index for each visit.
   text_db_->AddPageData(url_row1.url(), visit_row1.url_id, visit_row1.visit_id,
-                        visit_row1.visit_time, L"title", UTF8ToUTF16("body"));
+                        visit_row1.visit_time, UTF8ToUTF16("title"),
+                        UTF8ToUTF16("body"));
 
   text_db_->AddPageData(url_row2.url(), visit_row2.url_id, visit_row2.visit_id,
-                        visit_row2.visit_time, L"title", UTF8ToUTF16("body"));
+                        visit_row2.visit_time, UTF8ToUTF16("title"),
+                        UTF8ToUTF16("body"));
   text_db_->AddPageData(url_row2.url(), visit_row3.url_id, visit_row3.visit_id,
-                        visit_row3.visit_time, L"title", UTF8ToUTF16("body"));
+                        visit_row3.visit_time, UTF8ToUTF16("title"),
+                        UTF8ToUTF16("body"));
 
   // Note the special text in this URL. We'll search the file for this string
   // to make sure it doesn't hang around after the delete.
   text_db_->AddPageData(url_row3.url(), visit_row4.url_id, visit_row4.visit_id,
-                        visit_row4.visit_time, L"title",
+                        visit_row4.visit_time, UTF8ToUTF16("title"),
                         UTF8ToUTF16("goats body"));
 }
 
@@ -280,7 +283,8 @@
   std::vector<TextDatabase::Match> results;
   QueryOptions options;
   Time first_time;
-  text_db_->GetTextMatches(L"body", options, &results, &first_time);
+  text_db_->GetTextMatches(UTF8ToUTF16("body"), options,
+                           &results, &first_time);
 
   int count = 0;
   for (size_t i = 0; i < results.size(); i++) {
diff --git a/chrome/browser/history/history.cc b/chrome/browser/history/history.cc
index d115795..c692963 100644
--- a/chrome/browser/history/history.cc
+++ b/chrome/browser/history/history.cc
@@ -224,7 +224,7 @@
 
 void HistoryService::SetKeywordSearchTermsForURL(const GURL& url,
                                                  TemplateURL::IDType keyword_id,
-                                                 const std::wstring& term) {
+                                                 const string16& term) {
   ScheduleAndForget(PRIORITY_UI,
                     &HistoryBackend::SetKeywordSearchTermsForURL,
                     url, keyword_id, term);
@@ -239,7 +239,7 @@
 
 HistoryService::Handle HistoryService::GetMostRecentKeywordSearchTerms(
     TemplateURL::IDType keyword_id,
-    const std::wstring& prefix,
+    const string16& prefix,
     int max_count,
     CancelableRequestConsumerBase* consumer,
     GetMostRecentKeywordSearchTermsCallback* callback) {
@@ -331,12 +331,12 @@
 }
 
 void HistoryService::SetPageTitle(const GURL& url,
-                                  const std::wstring& title) {
+                                  const string16& title) {
   ScheduleAndForget(PRIORITY_NORMAL, &HistoryBackend::SetPageTitle, url, title);
 }
 
 void HistoryService::AddPageWithDetails(const GURL& url,
-                                        const std::wstring& title,
+                                        const string16& title,
                                         int visit_count,
                                         int typed_count,
                                         Time last_visit,
@@ -504,7 +504,7 @@
                     received_bytes, state, db_handle);
 }
 
-void HistoryService::UpdateDownloadPath(const std::wstring& path,
+void HistoryService::UpdateDownloadPath(const FilePath& path,
                                         int64 db_handle) {
   ScheduleAndForget(PRIORITY_NORMAL, &HistoryBackend::UpdateDownloadPath,
                     path, db_handle);
@@ -524,7 +524,7 @@
 }
 
 HistoryService::Handle HistoryService::SearchDownloads(
-    const std::wstring& search_text,
+    const string16& search_text,
     CancelableRequestConsumerBase* consumer,
     DownloadSearchCallback* callback) {
   return Schedule(PRIORITY_NORMAL, &HistoryBackend::SearchDownloads, consumer,
@@ -532,7 +532,7 @@
 }
 
 HistoryService::Handle HistoryService::QueryHistory(
-    const std::wstring& text_query,
+    const string16& text_query,
     const history::QueryOptions& options,
     CancelableRequestConsumerBase* consumer,
     QueryHistoryCallback* callback) {
diff --git a/chrome/browser/history/history.h b/chrome/browser/history/history.h
index 8e68a86a..e9abbacc 100644
--- a/chrome/browser/history/history.h
+++ b/chrome/browser/history/history.h
@@ -2,8 +2,8 @@
 // Use of this source code is governed by a BSD-style license that can be
 // found in the LICENSE file.
 
-#ifndef CHROME_BROWSER_HISTORY_HISTORY_H__
-#define CHROME_BROWSER_HISTORY_HISTORY_H__
+#ifndef CHROME_BROWSER_HISTORY_HISTORY_H_
+#define CHROME_BROWSER_HISTORY_HISTORY_H_
 
 #include <string>
 #include <vector>
@@ -13,6 +13,7 @@
 #include "base/file_path.h"
 #include "base/ref_counted.h"
 #include "base/scoped_ptr.h"
+#include "base/string16.h"
 #include "base/task.h"
 #include "chrome/browser/cancelable_request.h"
 #include "chrome/browser/favicon_service.h"
@@ -204,7 +205,7 @@
   // is not, this operation is ignored. This call will not update the full
   // text index. The last title set when the page is indexed will be the
   // title in the full text index.
-  void SetPageTitle(const GURL& url, const std::wstring& title);
+  void SetPageTitle(const GURL& url, const string16& title);
 
   // Indexing ------------------------------------------------------------------
 
@@ -274,7 +275,7 @@
   //
   // This isn't totally hooked up yet, this will query the "new" full text
   // database (see SetPageContents) which won't generally be set yet.
-  Handle QueryHistory(const std::wstring& text_query,
+  Handle QueryHistory(const string16& text_query,
                       const history::QueryOptions& options,
                       CancelableRequestConsumerBase* consumer,
                       QueryHistoryCallback* callback);
@@ -428,7 +429,7 @@
 
   // Called to update the history service about the path of a download.
   // This is a 'fire and forget' query.
-  void UpdateDownloadPath(const std::wstring& path, int64 db_handle);
+  void UpdateDownloadPath(const FilePath& path, int64 db_handle);
 
   // Permanently remove a download from the history system. This is a 'fire and
   // forget' operation.
@@ -446,7 +447,7 @@
   typedef Callback2<Handle, std::vector<int64>*>::Type DownloadSearchCallback;
 
   // Search for downloads that match the search text.
-  Handle SearchDownloads(const std::wstring& search_text,
+  Handle SearchDownloads(const string16& search_text,
                          CancelableRequestConsumerBase* consumer,
                          DownloadSearchCallback* callback);
 
@@ -482,7 +483,7 @@
   // id of the url, keyword_id the id of the keyword and term the search term.
   void SetKeywordSearchTermsForURL(const GURL& url,
                                    TemplateURL::IDType keyword_id,
-                                   const std::wstring& term);
+                                   const string16& term);
 
   // Deletes all search terms for the specified keyword.
   void DeleteAllSearchTermsForKeyword(TemplateURL::IDType keyword_id);
@@ -496,7 +497,7 @@
   // first.
   Handle GetMostRecentKeywordSearchTerms(
       TemplateURL::IDType keyword_id,
-      const std::wstring& prefix,
+      const string16& prefix,
       int max_count,
       CancelableRequestConsumerBase* consumer,
       GetMostRecentKeywordSearchTermsCallback* callback);
@@ -536,7 +537,7 @@
   // transaction. If this functionality is needed for importing many URLs, a
   // version that takes an array should probably be added.
   void AddPageWithDetails(const GURL& url,
-                          const std::wstring& title,
+                          const string16& title,
                           int visit_count,
                           int typed_count,
                           base::Time last_visit,
@@ -840,4 +841,4 @@
   DISALLOW_COPY_AND_ASSIGN(HistoryService);
 };
 
-#endif  // CHROME_BROWSER_HISTORY_HISTORY_H__
+#endif  // CHROME_BROWSER_HISTORY_HISTORY_H_
diff --git a/chrome/browser/history/history_backend.cc b/chrome/browser/history/history_backend.cc
index a001233..2e57b46 100644
--- a/chrome/browser/history/history_backend.cc
+++ b/chrome/browser/history/history_backend.cc
@@ -792,7 +792,7 @@
 }
 
 void HistoryBackend::SetPageTitle(const GURL& url,
-                                  const std::wstring& title) {
+                                  const string16& title) {
   if (!db_.get())
     return;
 
@@ -1005,7 +1005,7 @@
 
 void HistoryBackend::SetKeywordSearchTermsForURL(const GURL& url,
                                                  TemplateURL::IDType keyword_id,
-                                                 const std::wstring& term) {
+                                                 const string16& term) {
   if (!db_.get())
     return;
 
@@ -1034,7 +1034,7 @@
 void HistoryBackend::GetMostRecentKeywordSearchTerms(
     scoped_refptr<GetMostRecentKeywordSearchTermsRequest> request,
     TemplateURL::IDType keyword_id,
-    const std::wstring& prefix,
+    const string16& prefix,
     int max_count) {
   if (request->canceled())
     return;
@@ -1078,7 +1078,7 @@
 }
 
 // Update the path of a particular download entry.
-void HistoryBackend::UpdateDownloadPath(const std::wstring& path,
+void HistoryBackend::UpdateDownloadPath(const FilePath& path,
                                         int64 db_handle) {
   if (db_.get())
     db_->UpdateDownloadPath(path, db_handle);
@@ -1110,7 +1110,7 @@
 
 void HistoryBackend::SearchDownloads(
     scoped_refptr<DownloadSearchRequest> request,
-    const std::wstring& search_text) {
+    const string16& search_text) {
   if (request->canceled())
     return;
   if (db_.get())
@@ -1120,7 +1120,7 @@
 }
 
 void HistoryBackend::QueryHistory(scoped_refptr<QueryHistoryRequest> request,
-                                  const std::wstring& text_query,
+                                  const string16& text_query,
                                   const QueryOptions& options) {
   if (request->canceled())
     return;
@@ -1196,7 +1196,7 @@
     result->set_reached_beginning(true);
 }
 
-void HistoryBackend::QueryHistoryFTS(const std::wstring& text_query,
+void HistoryBackend::QueryHistoryFTS(const string16& text_query,
                                      const QueryOptions& options,
                                      QueryResults* result) {
   if (!text_database_.get())
diff --git a/chrome/browser/history/history_backend.h b/chrome/browser/history/history_backend.h
index 386f393..37bc80e 100644
--- a/chrome/browser/history/history_backend.h
+++ b/chrome/browser/history/history_backend.h
@@ -112,7 +112,7 @@
   // Navigation ----------------------------------------------------------------
 
   void AddPage(scoped_refptr<HistoryAddPageArgs> request);
-  virtual void SetPageTitle(const GURL& url, const std::wstring& title);
+  virtual void SetPageTitle(const GURL& url, const string16& title);
 
   // Indexing ------------------------------------------------------------------
 
@@ -131,7 +131,7 @@
                 const GURL& url,
                 bool want_visits);
   void QueryHistory(scoped_refptr<QueryHistoryRequest> request,
-                    const std::wstring& text_query,
+                    const string16& text_query,
                     const QueryOptions& options);
   void QueryRedirectsFrom(scoped_refptr<QueryRedirectsRequest> request,
                           const GURL& url);
@@ -212,7 +212,7 @@
   void QueryDownloads(scoped_refptr<DownloadQueryRequest> request);
   void CleanUpInProgressEntries();
   void UpdateDownload(int64 received_bytes, int32 state, int64 db_handle);
-  void UpdateDownloadPath(const std::wstring& path, int64 db_handle);
+  void UpdateDownloadPath(const FilePath& path, int64 db_handle);
   void CreateDownload(scoped_refptr<DownloadCreateRequest> request,
                       const DownloadCreateInfo& info);
   void RemoveDownload(int64 db_handle);
@@ -220,7 +220,7 @@
                               const base::Time remove_end);
   void RemoveDownloads(const base::Time remove_end);
   void SearchDownloads(scoped_refptr<DownloadSearchRequest>,
-                       const std::wstring& search_text);
+                       const string16& search_text);
 
   // Segment usage -------------------------------------------------------------
 
@@ -234,14 +234,14 @@
 
   void SetKeywordSearchTermsForURL(const GURL& url,
                                    TemplateURL::IDType keyword_id,
-                                   const std::wstring& term);
+                                   const string16& term);
 
   void DeleteAllSearchTermsForKeyword(TemplateURL::IDType keyword_id);
 
   void GetMostRecentKeywordSearchTerms(
       scoped_refptr<GetMostRecentKeywordSearchTermsRequest> request,
       TemplateURL::IDType keyword_id,
-      const std::wstring& prefix,
+      const string16& prefix,
       int max_count);
 
   // Generic operations --------------------------------------------------------
@@ -360,7 +360,7 @@
   // Both functions assume QueryHistory already checked the DB for validity.
   void QueryHistoryBasic(URLDatabase* url_db, VisitDatabase* visit_db,
                          const QueryOptions& options, QueryResults* result);
-  void QueryHistoryFTS(const std::wstring& text_query,
+  void QueryHistoryFTS(const string16& text_query,
                        const QueryOptions& options,
                        QueryResults* result);
 
diff --git a/chrome/browser/history/history_backend_unittest.cc b/chrome/browser/history/history_backend_unittest.cc
index 1628d76..4d1dca6 100644
--- a/chrome/browser/history/history_backend_unittest.cc
+++ b/chrome/browser/history/history_backend_unittest.cc
@@ -250,10 +250,12 @@
   // Set full text index for each one.
   backend_->text_database_->AddPageData(row1.url(), row1_id, visit1_id,
                                         row1.last_visit(),
-                                        L"Title 1", UTF8ToUTF16("Body 1"));
+                                        UTF8ToUTF16("Title 1"),
+                                        UTF8ToUTF16("Body 1"));
   backend_->text_database_->AddPageData(row2.url(), row2_id, visit2_id,
                                         row2.last_visit(),
-                                        L"Title 2", UTF8ToUTF16("Body 2"));
+                                        UTF8ToUTF16("Title 2"),
+                                        UTF8ToUTF16("Body 2"));
 
   // Now finally clear all history.
   backend_->DeleteAllHistory();
@@ -298,7 +300,8 @@
   // The full text database should have no data.
   std::vector<TextDatabase::Match> text_matches;
   Time first_time_searched;
-  backend_->text_database_->GetTextMatches(L"Body", QueryOptions(),
+  backend_->text_database_->GetTextMatches(UTF8ToUTF16("Body"),
+                                           QueryOptions(),
                                            &text_matches,
                                            &first_time_searched);
   EXPECT_EQ(0U, text_matches.size());
diff --git a/chrome/browser/history/history_publisher.cc b/chrome/browser/history/history_publisher.cc
index 2c70a31..0392632f 100644
--- a/chrome/browser/history/history_publisher.cc
+++ b/chrome/browser/history/history_publisher.cc
@@ -27,14 +27,15 @@
 
 void HistoryPublisher::PublishPageContent(const base::Time& time,
                                           const GURL& url,
-                                          const std::wstring& title,
+                                          const string16& title,
                                           const string16& contents) const {
+  std::wstring wide_title = UTF16ToWide(title);
   std::wstring wide_contents = UTF16ToWide(contents);
   PageData page_data = {
     time,
     url,
     wide_contents.c_str(),
-    title.c_str(),
+    wide_title.c_str(),
     NULL,
     NULL,
   };
diff --git a/chrome/browser/history/history_publisher.h b/chrome/browser/history/history_publisher.h
index c7a3530..5fafc3e 100644
--- a/chrome/browser/history/history_publisher.h
+++ b/chrome/browser/history/history_publisher.h
@@ -37,7 +37,7 @@
   void PublishPageThumbnail(const std::vector<unsigned char>& thumbnail,
                             const GURL& url, const base::Time& time) const;
   void PublishPageContent(const base::Time& time, const GURL& url,
-                          const std::wstring& title,
+                          const string16& title,
                           const string16& contents) const;
   void DeleteUserHistoryBetween(const base::Time& begin_time,
                                 const base::Time& end_time) const;
diff --git a/chrome/browser/history/history_querying_unittest.cc b/chrome/browser/history/history_querying_unittest.cc
index 1265eef..7512786 100644
--- a/chrome/browser/history/history_querying_unittest.cc
+++ b/chrome/browser/history/history_querying_unittest.cc
@@ -22,27 +22,27 @@
 
 struct TestEntry {
   const char* url;
-  const wchar_t* title;
+  const char* title;
   const int days_ago;
   const char* body;
   Time time;  // Filled by SetUp.
 } test_entries[] = {
   // This one is visited super long ago so it will be in a different database
   // from the next appearance of it at the end.
-  {"http://example.com/", L"Other", 180, "Other"},
+  {"http://example.com/", "Other", 180, "Other"},
 
   // These are deliberately added out of chronological order. The history
   // service should sort them by visit time when returning query results.
   // The correct index sort order is 4 2 3 1 0.
-  {"http://www.google.com/1", L"Title 1", 10,
+  {"http://www.google.com/1", "Title 1", 10,
    "PAGEONE FOO some body text"},
-  {"http://www.google.com/3", L"Title 3", 8,
+  {"http://www.google.com/3", "Title 3", 8,
    "PAGETHREE BAR some hello world for you"},
-  {"http://www.google.com/2", L"Title 2", 9,
+  {"http://www.google.com/2", "Title 2", 9,
    "PAGETWO FOO some more blah blah blah"},
 
   // A more recent visit of the first one.
-  {"http://example.com/", L"Other", 6, "Other"},
+  {"http://example.com/", "Other", 6, "Other"},
 };
 
 // Returns true if the nth result in the given results set matches. It will
@@ -61,7 +61,7 @@
 
   // Now check the URL & title.
   return result.url() == GURL(test_entries[test_entry_index].url) &&
-         result.title() == std::wstring(test_entries[test_entry_index].title);
+         result.title() == UTF8ToUTF16(test_entries[test_entry_index].title);
 }
 
 }  // namespace
@@ -72,10 +72,10 @@
   }
 
   // Acts like a synchronous call to history's QueryHistory.
-  void QueryHistory(const std::wstring& text_query,
+  void QueryHistory(const std::string& text_query,
                     const QueryOptions& options,
                     QueryResults* results) {
-    history_->QueryHistory(text_query, options, &consumer_,
+    history_->QueryHistory(UTF8ToUTF16(text_query), options, &consumer_,
         NewCallback(this, &HistoryQueryTest::QueryHistoryComplete));
     MessageLoop::current()->Run();  // Will go until ...Complete calls Quit.
     results->Swap(&last_query_results_);
@@ -112,7 +112,7 @@
       history_->AddPage(url, test_entries[i].time, id_scope, page_id, GURL(),
                         PageTransition::LINK, history::RedirectList(),
                         false);
-      history_->SetPageTitle(url, test_entries[i].title);
+      history_->SetPageTitle(url, UTF8ToUTF16(test_entries[i].title));
       history_->SetPageContents(url, UTF8ToUTF16(test_entries[i].body));
     }
   }
@@ -152,7 +152,7 @@
   QueryResults results;
 
   // Test duplicate collapsing.
-  QueryHistory(std::wstring(), options, &results);
+  QueryHistory(std::string(), options, &results);
   EXPECT_EQ(4U, results.size());
   EXPECT_TRUE(NthResultIs(results, 0, 4));
   EXPECT_TRUE(NthResultIs(results, 1, 2));
@@ -163,7 +163,7 @@
   // should be exclusive.
   options.begin_time = test_entries[3].time;
   options.end_time = test_entries[2].time;
-  QueryHistory(std::wstring(), options, &results);
+  QueryHistory(std::string(), options, &results);
   EXPECT_EQ(1U, results.size());
   EXPECT_TRUE(NthResultIs(results, 0, 3));
 }
@@ -178,7 +178,7 @@
   // Query all time but with a limit on the number of entries. We should
   // get the N most recent entries.
   options.max_count = 2;
-  QueryHistory(std::wstring(), options, &results);
+  QueryHistory(std::string(), options, &results);
   EXPECT_EQ(2U, results.size());
   EXPECT_TRUE(NthResultIs(results, 0, 4));
   EXPECT_TRUE(NthResultIs(results, 1, 2));
@@ -190,23 +190,23 @@
   QueryOptions options;
   QueryResults results;
 
-  QueryHistory(std::wstring(), options, &results);
+  QueryHistory(std::string(), options, &results);
   EXPECT_TRUE(results.reached_beginning());
 
   options.begin_time = test_entries[1].time;
-  QueryHistory(std::wstring(), options, &results);
+  QueryHistory(std::string(), options, &results);
   EXPECT_FALSE(results.reached_beginning());
 
   options.begin_time = test_entries[0].time + TimeDelta::FromMicroseconds(1);
-  QueryHistory(std::wstring(), options, &results);
+  QueryHistory(std::string(), options, &results);
   EXPECT_FALSE(results.reached_beginning());
 
   options.begin_time = test_entries[0].time;
-  QueryHistory(std::wstring(), options, &results);
+  QueryHistory(std::string(), options, &results);
   EXPECT_TRUE(results.reached_beginning());
 
   options.begin_time = test_entries[0].time - TimeDelta::FromMicroseconds(1);
-  QueryHistory(std::wstring(), options, &results);
+  QueryHistory(std::string(), options, &results);
   EXPECT_TRUE(results.reached_beginning());
 }
 
@@ -221,14 +221,14 @@
   // Query all of them to make sure they are there and in order. Note that
   // this query will return the starred item twice since we requested all
   // starred entries and no de-duping.
-  QueryHistory(std::wstring(L"some"), options, &results);
+  QueryHistory("some", options, &results);
   EXPECT_EQ(3U, results.size());
   EXPECT_TRUE(NthResultIs(results, 0, 2));
   EXPECT_TRUE(NthResultIs(results, 1, 3));
   EXPECT_TRUE(NthResultIs(results, 2, 1));
 
   // Do a query that should only match one of them.
-  QueryHistory(std::wstring(L"PAGETWO"), options, &results);
+  QueryHistory("PAGETWO", options, &results);
   EXPECT_EQ(1U, results.size());
   EXPECT_TRUE(NthResultIs(results, 0, 3));
 
@@ -236,7 +236,7 @@
   // should be exclusive.
   options.begin_time = test_entries[1].time;
   options.end_time = test_entries[3].time;
-  QueryHistory(std::wstring(L"some"), options, &results);
+  QueryHistory("some", options, &results);
   EXPECT_EQ(1U, results.size());
   EXPECT_TRUE(NthResultIs(results, 0, 1));
 }
@@ -250,7 +250,7 @@
 
   // Query all time but with a limit on the number of entries. We should
   // get the N most recent entries.
-  QueryHistory(std::wstring(L"title"), options, &results);
+  QueryHistory("title", options, &results);
   EXPECT_EQ(3U, results.size());
   EXPECT_TRUE(NthResultIs(results, 0, 2));
   EXPECT_TRUE(NthResultIs(results, 1, 3));
@@ -266,7 +266,7 @@
 
   // Query with a prefix search.  Should return matches for "PAGETWO" and
   // "PAGETHREE".
-  QueryHistory(std::wstring(L"PAGET"), options, &results);
+  QueryHistory("PAGET", options, &results);
   EXPECT_EQ(2U, results.size());
   EXPECT_TRUE(NthResultIs(results, 0, 2));
   EXPECT_TRUE(NthResultIs(results, 1, 3));
@@ -282,7 +282,7 @@
   // Query all time but with a limit on the number of entries. We should
   // get the N most recent entries.
   options.max_count = 2;
-  QueryHistory(std::wstring(L"some"), options, &results);
+  QueryHistory("some", options, &results);
   EXPECT_EQ(2U, results.size());
   EXPECT_TRUE(NthResultIs(results, 0, 2));
   EXPECT_TRUE(NthResultIs(results, 1, 3));
@@ -291,7 +291,7 @@
   // the 2nd & 3rd pages, but we should only get the 3rd one because of the one
   // page max restriction.
   options.max_count = 1;
-  QueryHistory(std::wstring(L"FOO"), options, &results);
+  QueryHistory("FOO", options, &results);
   EXPECT_EQ(1U, results.size());
   EXPECT_TRUE(NthResultIs(results, 0, 3));
 }
@@ -305,12 +305,12 @@
   std::vector<URLRow> urls_to_add;
 
   URLRow row1(GURL("http://foo.bar/"));
-  row1.set_title(L"archived title");
+  row1.set_title(UTF8ToUTF16("archived title"));
   row1.set_last_visit(Time::Now() - TimeDelta::FromDays(365));
   urls_to_add.push_back(row1);
 
   URLRow row2(GURL("http://foo.bar/"));
-  row2.set_title(L"nonarchived title");
+  row2.set_title(UTF8ToUTF16("nonarchived title"));
   row2.set_last_visit(Time::Now());
   urls_to_add.push_back(row2);
 
@@ -322,7 +322,7 @@
   // Query all time. The title we get should be the one in the full text
   // database and not the most current title (since otherwise highlighting in
   // the title might be wrong).
-  QueryHistory(std::wstring(L"archived"), options, &results);
+  QueryHistory("archived", options, &results);
   ASSERT_EQ(1U, results.size());
   EXPECT_TRUE(row1.url() == results[0].url());
   EXPECT_TRUE(row1.title() == results[0].title());
@@ -341,7 +341,7 @@
   QueryOptions options;
   QueryResults results;
 
-  QueryHistory(std::wstring(L"Other"), options, &results);
+  QueryHistory("Other", options, &results);
   EXPECT_EQ(1, results.urls().size());
   EXPECT_TRUE(NthResultIs(results, 0, 4));
 }
diff --git a/chrome/browser/history/history_types.h b/chrome/browser/history/history_types.h
index 1d45f6cc..f7bc7fb 100644
--- a/chrome/browser/history/history_types.h
+++ b/chrome/browser/history/history_types.h
@@ -12,6 +12,7 @@
 
 #include "base/basictypes.h"
 #include "base/stack_container.h"
+#include "base/string16.h"
 #include "base/time.h"
 #include "chrome/browser/history/snippet.h"
 #include "chrome/common/page_transition_types.h"
@@ -78,10 +79,10 @@
   URLID id() const { return id_; }
   const GURL& url() const { return url_; }
 
-  const std::wstring& title() const {
+  const string16& title() const {
     return title_;
   }
-  void set_title(const std::wstring& title) {
+  void set_title(const string16& title) {
     // The title is frequently set to the same thing, so we don't bother
     // updating unless the string has changed.
     if (title != title_) {
@@ -149,7 +150,7 @@
   // the constructor to make a new one.
   GURL url_;
 
-  std::wstring title_;
+  string16 title_;
 
   // Total number of times this URL has been visited.
   int visit_count_;
@@ -283,7 +284,7 @@
   StarID id;
 
   // Title.
-  std::wstring title;
+  string16 title;
 
   // When this was added.
   base::Time date_added;
@@ -506,7 +507,7 @@
   base::Time time;
 
   // The search term that was used.
-  std::wstring term;
+  string16 term;
 };
 
 // MostVisitedURL --------------------------------------------------------------
diff --git a/chrome/browser/history/history_unittest.cc b/chrome/browser/history/history_unittest.cc
index be0cd0a3..a0e6e21 100644
--- a/chrome/browser/history/history_unittest.cc
+++ b/chrome/browser/history/history_unittest.cc
@@ -594,7 +594,7 @@
   history->AddPage(existing_url);
 
   // Set some title.
-  const std::wstring existing_title(L"Google");
+  const string16 existing_title = UTF8ToUTF16("Google");
   history->SetPageTitle(existing_url, existing_title);
 
   // Make sure the title got set.
@@ -603,12 +603,12 @@
 
   // set a title on a nonexistent page
   const GURL nonexistent_url("http://news.google.com/");
-  const std::wstring nonexistent_title(L"Google News");
+  const string16 nonexistent_title = UTF8ToUTF16("Google News");
   history->SetPageTitle(nonexistent_url, nonexistent_title);
 
   // Make sure nothing got written.
   EXPECT_FALSE(QueryURL(history, nonexistent_url));
-  EXPECT_EQ(std::wstring(), query_url_row_.title());
+  EXPECT_EQ(string16(), query_url_row_.title());
 
   // TODO(brettw) this should also test redirects, which get the title of the
   // destination page.
diff --git a/chrome/browser/history/query_parser.cc b/chrome/browser/history/query_parser.cc
index 6bfaa7f..e1afb86 100644
--- a/chrome/browser/history/query_parser.cc
+++ b/chrome/browser/history/query_parser.cc
@@ -1,16 +1,17 @@
-// Copyright (c) 2006-2008 The Chromium Authors. All rights reserved.
+// Copyright (c) 2010 The Chromium Authors. All rights reserved.
 // Use of this source code is governed by a BSD-style license that can be
 // found in the LICENSE file.
 
-#include <algorithm>
-
 #include "chrome/browser/history/query_parser.h"
 
+#include <algorithm>
+
 #include "app/l10n_util.h"
 #include "base/i18n/word_iterator.h"
 #include "base/logging.h"
 #include "base/scoped_vector.h"
 #include "base/string_util.h"
+#include "base/utf_string_conversions.h"
 #include "unicode/uscript.h"
 
 namespace {
@@ -56,7 +57,7 @@
     CoalesceMatchesFrom(i, matches);
 }
 
-} // namespace
+}  // namespace
 
 // Inheritance structure:
 // Queries are represented as trees of QueryNodes.
@@ -66,23 +67,23 @@
 // A QueryNodeWord is a single word in the query.
 class QueryNodeWord : public QueryNode {
  public:
-  explicit QueryNodeWord(const std::wstring& word)
+  explicit QueryNodeWord(const string16& word)
       : word_(word), literal_(false) {}
   virtual ~QueryNodeWord() {}
-  virtual int AppendToSQLiteQuery(std::wstring* query) const;
+  virtual int AppendToSQLiteQuery(string16* query) const;
   virtual bool IsWord() const { return true; }
 
-  const std::wstring& word() const { return word_; }
+  const string16& word() const { return word_; }
   void set_literal(bool literal) { literal_ = literal; }
 
   virtual bool HasMatchIn(const std::vector<QueryWord>& words,
                           Snippet::MatchPositions* match_positions) const;
 
-  virtual bool Matches(const std::wstring& word, bool exact) const;
-  virtual void AppendWords(std::vector<std::wstring>* words) const;
+  virtual bool Matches(const string16& word, bool exact) const;
+  virtual void AppendWords(std::vector<string16>* words) const;
 
  private:
-  std::wstring word_;
+  string16 word_;
   bool literal_;
 };
 
@@ -100,18 +101,18 @@
   return false;
 }
 
-bool QueryNodeWord::Matches(const std::wstring& word, bool exact) const {
+bool QueryNodeWord::Matches(const string16& word, bool exact) const {
   if (exact || !QueryParser::IsWordLongEnoughForPrefixSearch(word_))
     return word == word_;
   return word.size() >= word_.size() &&
          (word_.compare(0, word_.size(), word, 0, word_.size()) == 0);
 }
 
-void QueryNodeWord::AppendWords(std::vector<std::wstring>* words) const {
+void QueryNodeWord::AppendWords(std::vector<string16>* words) const {
   words->push_back(word_);
 }
 
-int QueryNodeWord::AppendToSQLiteQuery(std::wstring* query) const {
+int QueryNodeWord::AppendToSQLiteQuery(string16* query) const {
   query->append(word_);
 
   // Use prefix search if we're not literal and long enough.
@@ -126,7 +127,7 @@
  public:
   virtual ~QueryNodeList();
 
-  virtual int AppendToSQLiteQuery(std::wstring* query) const {
+  virtual int AppendToSQLiteQuery(string16* query) const {
     return AppendChildrenToString(query);
   }
   virtual bool IsWord() const { return false; }
@@ -140,7 +141,7 @@
   void RemoveEmptySubnodes();
 
   // QueryNodeList is never used with Matches or HasMatchIn.
-  virtual bool Matches(const std::wstring& word, bool exact) const {
+  virtual bool Matches(const string16& word, bool exact) const {
     NOTREACHED();
     return false;
   }
@@ -149,10 +150,10 @@
     NOTREACHED();
     return false;
   }
-  virtual void AppendWords(std::vector<std::wstring>* words) const;
+  virtual void AppendWords(std::vector<string16>* words) const;
 
  protected:
-  int AppendChildrenToString(std::wstring* query) const;
+  int AppendChildrenToString(string16* query) const;
 
   QueryNodeVector children_;
 };
@@ -178,12 +179,12 @@
   }
 }
 
-void QueryNodeList::AppendWords(std::vector<std::wstring>* words) const {
+void QueryNodeList::AppendWords(std::vector<string16>* words) const {
   for (size_t i = 0; i < children_.size(); ++i)
     children_[i]->AppendWords(words);
 }
 
-int QueryNodeList::AppendChildrenToString(std::wstring* query) const {
+int QueryNodeList::AppendChildrenToString(string16* query) const {
   int num_words = 0;
   for (QueryNodeVector::const_iterator node = children_.begin();
        node != children_.end(); ++node) {
@@ -197,19 +198,19 @@
 // A QueryNodePhrase is a phrase query ("quoted").
 class QueryNodePhrase : public QueryNodeList {
  public:
-  virtual int AppendToSQLiteQuery(std::wstring* query) const {
+  virtual int AppendToSQLiteQuery(string16* query) const {
     query->push_back(L'"');
     int num_words = AppendChildrenToString(query);
     query->push_back(L'"');
     return num_words;
   }
 
-  virtual bool Matches(const std::wstring& word, bool exact) const;
+  virtual bool Matches(const string16& word, bool exact) const;
   virtual bool HasMatchIn(const std::vector<QueryWord>& words,
                           Snippet::MatchPositions* match_positions) const;
 };
 
-bool QueryNodePhrase::Matches(const std::wstring& word, bool exact) const {
+bool QueryNodePhrase::Matches(const string16& word, bool exact) const {
   NOTREACHED();
   return false;
 }
@@ -243,7 +244,7 @@
 }
 
 // static
-bool QueryParser::IsWordLongEnoughForPrefixSearch(const std::wstring& word) {
+bool QueryParser::IsWordLongEnoughForPrefixSearch(const string16& word) {
   DCHECK(word.size() > 0);
   size_t minimum_length = 3;
   // We intentionally exclude Hangul Jamos (both Conjoining and compatibility)
@@ -264,15 +265,15 @@
          ch == 0x201e;    // double low-9 quotation mark
 }
 
-int QueryParser::ParseQuery(const std::wstring& query,
-                            std::wstring* sqlite_query) {
+int QueryParser::ParseQuery(const string16& query,
+                            string16* sqlite_query) {
   QueryNodeList root;
   if (!ParseQueryImpl(query, &root))
     return 0;
   return root.AppendToSQLiteQuery(sqlite_query);
 }
 
-void QueryParser::ParseQuery(const std::wstring& query,
+void QueryParser::ParseQuery(const string16& query,
                              std::vector<QueryNode*>* nodes) {
   QueryNodeList root;
   if (ParseQueryImpl(l10n_util::ToLower(query), &root))
@@ -280,22 +281,22 @@
 }
 
 
-void QueryParser::ExtractQueryWords(const std::wstring& query,
-                                    std::vector<std::wstring>* words) {
+void QueryParser::ExtractQueryWords(const string16& query,
+                                    std::vector<string16>* words) {
   QueryNodeList root;
   if (!ParseQueryImpl(query, &root))
     return;
   root.AppendWords(words);
 }
 
-bool QueryParser::DoesQueryMatch(const std::wstring& text,
+bool QueryParser::DoesQueryMatch(const string16& text,
                                  const std::vector<QueryNode*>& query_nodes,
                                  Snippet::MatchPositions* match_positions) {
   if (query_nodes.empty())
     return false;
 
   std::vector<QueryWord> query_words;
-  std::wstring lower_text = l10n_util::ToLower(text);
+  string16 lower_text = l10n_util::ToLower(text);
   ExtractQueryWords(lower_text, &query_words);
 
   if (query_words.empty())
@@ -319,9 +320,9 @@
   return true;
 }
 
-bool QueryParser::ParseQueryImpl(const std::wstring& query,
+bool QueryParser::ParseQueryImpl(const string16& query,
                                 QueryNodeList* root) {
-  WordIterator iter(query, WordIterator::BREAK_WORD);
+  WordIterator iter(&query, WordIterator::BREAK_WORD);
   // TODO(evanm): support a locale here
   if (!iter.Init())
     return false;
@@ -337,7 +338,7 @@
     // is not necessarily a word, but could also be a sequence of punctuation
     // or whitespace.
     if (iter.IsWord()) {
-      std::wstring word = iter.GetWord();
+      string16 word = iter.GetWord();
 
       QueryNodeWord* word_node = new QueryNodeWord(word);
       if (in_quotes)
@@ -351,7 +352,7 @@
           query_stack.push_back(quotes_node);
           in_quotes = true;
         } else {
-          query_stack.pop_back();  // stop adding to the quoted phrase
+          query_stack.pop_back();  // Stop adding to the quoted phrase.
           in_quotes = false;
         }
       }
@@ -362,9 +363,9 @@
   return true;
 }
 
-void QueryParser::ExtractQueryWords(const std::wstring& text,
+void QueryParser::ExtractQueryWords(const string16& text,
                                     std::vector<QueryWord>* words) {
-  WordIterator iter(text, WordIterator::BREAK_WORD);
+  WordIterator iter(&text, WordIterator::BREAK_WORD);
   // TODO(evanm): support a locale here
   if (!iter.Init())
     return;
@@ -374,7 +375,7 @@
     // is not necessarily a word, but could also be a sequence of punctuation
     // or whitespace.
     if (iter.IsWord()) {
-      std::wstring word = iter.GetWord();
+      string16 word = iter.GetWord();
       if (!word.empty()) {
         words->push_back(QueryWord());
         words->back().word = word;
diff --git a/chrome/browser/history/query_parser.h b/chrome/browser/history/query_parser.h
index 8f6c86dc..8399abf 100644
--- a/chrome/browser/history/query_parser.h
+++ b/chrome/browser/history/query_parser.h
@@ -1,16 +1,16 @@
-// Copyright (c) 2006-2008 The Chromium Authors. All rights reserved.
+// Copyright (c) 2010 The Chromium Authors. All rights reserved.
 // Use of this source code is governed by a BSD-style license that can be
 // found in the LICENSE file.
 
 // The query parser is used to parse queries entered into the history
 // search into more normalized queries can be passed to the SQLite backend.
 
-#ifndef CHROME_BROWSER_HISTORY_QUERY_PARSER_H__
-#define CHROME_BROWSER_HISTORY_QUERY_PARSER_H__
+#ifndef CHROME_BROWSER_HISTORY_QUERY_PARSER_H_
+#define CHROME_BROWSER_HISTORY_QUERY_PARSER_H_
 
-#include <string>
 #include <vector>
 
+#include "base/string16.h"
 #include "chrome/browser/history/snippet.h"
 
 class QueryNodeList;
@@ -18,7 +18,7 @@
 // Used by HasMatchIn.
 struct QueryWord {
   // The work to match against.
-  std::wstring word;
+  string16 word;
 
   // The starting position of the word in the original text.
   size_t position;
@@ -33,7 +33,7 @@
 
   // Serialize ourselves out to a string that can be passed to SQLite. Returns
   // the number of words in this node.
-  virtual int AppendToSQLiteQuery(std::wstring* query) const = 0;
+  virtual int AppendToSQLiteQuery(string16* query) const = 0;
 
   // Return true if this is a word node, false if it's a QueryNodeList.
   virtual bool IsWord() const = 0;
@@ -41,7 +41,7 @@
   // Returns true if this node matches the specified text. If exact is true,
   // the string must exactly match. Otherwise, this uses a starts with
   // comparison.
-  virtual bool Matches(const std::wstring& word, bool exact) const = 0;
+  virtual bool Matches(const string16& word, bool exact) const = 0;
 
   // Returns true if this node matches at least one of the words in words. If
   // the node matches at least one word, an entry is added to match_positions
@@ -50,7 +50,7 @@
                           Snippet::MatchPositions* match_positions) const = 0;
 
   // Appends the words that make up this node in |words|.
-  virtual void AppendWords(std::vector<std::wstring>* words) const = 0;
+  virtual void AppendWords(std::vector<string16>* words) const = 0;
 };
 
 
@@ -65,43 +65,43 @@
   // point doing anything for them and we only adjust the minimum length
   // to 2 for Korean Hangul while using 3 for others. This is a temporary
   // hack until we have a segmentation support.
-  static bool IsWordLongEnoughForPrefixSearch(const std::wstring& word);
+  static bool IsWordLongEnoughForPrefixSearch(const string16& word);
 
   // Parse a query into a SQLite query. The resulting query is placed in
   // sqlite_query and the number of words is returned.
-  int ParseQuery(const std::wstring& query,
-                 std::wstring* sqlite_query);
+  int ParseQuery(const string16& query,
+                 string16* sqlite_query);
 
   // Parses the query words in query, returning the nodes that constitute the
   // valid words in the query. This is intended for later usage with
   // DoesQueryMatch.
   // Ownership of the nodes passes to the caller.
-  void ParseQuery(const std::wstring& query,
+  void ParseQuery(const string16& query,
                   std::vector<QueryNode*>* nodes);
 
   // Parses a query returning the words that make up the query. Any words in
   // quotes are put in |words| without the quotes. For example, the query text
   // "foo bar" results in two entries being added to words, one for foo and one
   // for bar.
-  void ExtractQueryWords(const std::wstring& query,
-                         std::vector<std::wstring>* words);
+  void ExtractQueryWords(const string16& query,
+                         std::vector<string16>* words);
 
   // Returns true if the string text matches the query nodes created by a call
   // to ParseQuery. If the query does match each of the matching positions in
   // the text is added to |match_positions|.
-  bool DoesQueryMatch(const std::wstring& text,
+  bool DoesQueryMatch(const string16& text,
                       const std::vector<QueryNode*>& nodes,
                       Snippet::MatchPositions* match_positions);
 
  private:
   // Does the work of parsing a query; creates nodes in QueryNodeList as
   // appropriate. This is invoked from both of the ParseQuery methods.
-  bool ParseQueryImpl(const std::wstring& query,
+  bool ParseQueryImpl(const string16& query,
                       QueryNodeList* root);
 
   // Extracts the words from text, placing each word into words.
-  void ExtractQueryWords(const std::wstring& text,
+  void ExtractQueryWords(const string16& text,
                          std::vector<QueryWord>* words);
 };
 
-#endif  // CHROME_BROWSER_HISTORY_QUERY_PARSER_H__
+#endif  // CHROME_BROWSER_HISTORY_QUERY_PARSER_H_
diff --git a/chrome/browser/history/query_parser_unittest.cc b/chrome/browser/history/query_parser_unittest.cc
index 9245cc9d..f8b41d9 100644
--- a/chrome/browser/history/query_parser_unittest.cc
+++ b/chrome/browser/history/query_parser_unittest.cc
@@ -1,121 +1,125 @@
-// Copyright (c) 2006-2008 The Chromium Authors. All rights reserved.
+// Copyright (c) 2010 The Chromium Authors. All rights reserved.
 // Use of this source code is governed by a BSD-style license that can be
 // found in the LICENSE file.
 
 #include "base/basictypes.h"
 #include "base/scoped_vector.h"
+#include "base/utf_string_conversions.h"
 #include "chrome/browser/history/query_parser.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
 class QueryParserTest : public testing::Test {
  public:
   struct TestData {
-    const std::wstring input;
+    const char* input;
     const int expected_word_count;
   };
 
-  std::wstring QueryToString(const std::wstring& query);
+  std::string QueryToString(const std::string& query);
 
  protected:
   QueryParser query_parser_;
 };
 
-// Test helper: Convert a user query string to a SQLite query string.
-std::wstring QueryParserTest::QueryToString(const std::wstring& query) {
-  std::wstring sqlite_query;
-  query_parser_.ParseQuery(query, &sqlite_query);
-  return sqlite_query;
+// Test helper: Convert a user query string in 8-bit (for hardcoding
+// convenience) to a SQLite query string.
+std::string QueryParserTest::QueryToString(const std::string& query) {
+  string16 sqlite_query;
+  query_parser_.ParseQuery(UTF8ToUTF16(query), &sqlite_query);
+  return UTF16ToUTF8(sqlite_query);
 }
 
 // Basic multi-word queries, including prefix matching.
 TEST_F(QueryParserTest, SimpleQueries) {
-  EXPECT_EQ(L"", QueryToString(L" "));
-  EXPECT_EQ(L"singleword*", QueryToString(L"singleword"));
-  EXPECT_EQ(L"spacedout*", QueryToString(L"  spacedout "));
-  EXPECT_EQ(L"foo* bar*", QueryToString(L"foo bar"));
+  EXPECT_EQ("", QueryToString(" "));
+  EXPECT_EQ("singleword*", QueryToString("singleword"));
+  EXPECT_EQ("spacedout*", QueryToString("  spacedout "));
+  EXPECT_EQ("foo* bar*", QueryToString("foo bar"));
   // Short words aren't prefix matches. For Korean Hangul
   // the minimum is 2 while for other scripts, it's 3.
-  EXPECT_EQ(L"f b", QueryToString(L" f b"));
+  EXPECT_EQ("f b", QueryToString(" f b"));
   // KA JANG
-  EXPECT_EQ(L"\xAC00 \xC7A5", QueryToString(L" \xAC00 \xC7A5"));
-  EXPECT_EQ(L"foo* bar*", QueryToString(L" foo   bar "));
+  EXPECT_EQ(WideToUTF8(L"\xAC00 \xC7A5"),
+            QueryToString(WideToUTF8(L" \xAC00 \xC7A5")));
+  EXPECT_EQ("foo* bar*", QueryToString(" foo   bar "));
   // KA-JANG BICH-GO
-  EXPECT_EQ(L"\xAC00\xC7A5* \xBE5B\xACE0*",
-            QueryToString(L"\xAC00\xC7A5 \xBE5B\xACE0"));
+  EXPECT_EQ(WideToUTF8(L"\xAC00\xC7A5* \xBE5B\xACE0*"),
+            QueryToString(WideToUTF8(L"\xAC00\xC7A5 \xBE5B\xACE0")));
 }
 
 // Quoted substring parsing.
 TEST_F(QueryParserTest, Quoted) {
   // ASCII quotes
-  EXPECT_EQ(L"\"Quoted\"", QueryToString(L"\"Quoted\""));
+  EXPECT_EQ("\"Quoted\"", QueryToString("\"Quoted\""));
   // Missing end quotes
-  EXPECT_EQ(L"\"miss end\"", QueryToString(L"\"miss end"));
+  EXPECT_EQ("\"miss end\"", QueryToString("\"miss end"));
   // Missing begin quotes
-  EXPECT_EQ(L"miss* beg*", QueryToString(L"miss beg\""));
+  EXPECT_EQ("miss* beg*", QueryToString("miss beg\""));
   // Weird formatting
-  EXPECT_EQ(L"\"Many\" \"quotes\"", QueryToString(L"\"Many   \"\"quotes"));
+  EXPECT_EQ("\"Many\" \"quotes\"", QueryToString("\"Many   \"\"quotes"));
 }
 
 // Apostrophes within words should be preserved, but otherwise stripped.
 TEST_F(QueryParserTest, Apostrophes) {
-  EXPECT_EQ(L"foo* bar's*", QueryToString(L"foo bar's"));
-  EXPECT_EQ(L"l'foo*", QueryToString(L"l'foo"));
-  EXPECT_EQ(L"foo*", QueryToString(L"'foo"));
+  EXPECT_EQ("foo* bar's*", QueryToString("foo bar's"));
+  EXPECT_EQ("l'foo*", QueryToString("l'foo"));
+  EXPECT_EQ("foo*", QueryToString("'foo"));
 }
 
 // Special characters.
 TEST_F(QueryParserTest, SpecialChars) {
-  EXPECT_EQ(L"foo* the* bar*", QueryToString(L"!#:/*foo#$*;'* the!#:/*bar"));
+  EXPECT_EQ("foo* the* bar*", QueryToString("!#:/*foo#$*;'* the!#:/*bar"));
 }
 
 TEST_F(QueryParserTest, NumWords) {
   TestData data[] = {
-    { L"blah",                  1 },
-    { L"foo \"bar baz\"",       3 },
-    { L"foo \"baz\"",           2 },
-    { L"foo \"bar baz\"  blah", 4 },
+    { "blah",                  1 },
+    { "foo \"bar baz\"",       3 },
+    { "foo \"baz\"",           2 },
+    { "foo \"bar baz\"  blah", 4 },
   };
 
   for (size_t i = 0; i < arraysize(data); ++i) {
-    std::wstring query_string;
+    string16 query_string;
     EXPECT_EQ(data[i].expected_word_count,
-              query_parser_.ParseQuery(data[i].input, &query_string));
+              query_parser_.ParseQuery(UTF8ToUTF16(data[i].input),
+                                       &query_string));
   }
 }
 
 TEST_F(QueryParserTest, ParseQueryNodesAndMatch) {
   struct TestData2 {
-    const std::wstring query;
-    const std::wstring text;
+    const std::string query;
+    const std::string text;
     const bool matches;
     const size_t m1_start;
     const size_t m1_end;
     const size_t m2_start;
     const size_t m2_end;
   } data[] = {
-    { L"foo foo",       L"foo",              true,  0, 3, 0, 0 },
-    { L"foo fooey",     L"fooey",            true,  0, 5, 0, 0 },
-    { L"foo fooey bar", L"bar fooey",        true,  0, 3, 4, 9 },
-    { L"blah",          L"blah",             true,  0, 4, 0, 0 },
-    { L"blah",          L"foo",              false, 0, 0, 0, 0 },
-    { L"blah",          L"blahblah",         true,  0, 4, 0, 0 },
-    { L"blah",          L"foo blah",         true,  4, 8, 0, 0 },
-    { L"foo blah",      L"blah",             false, 0, 0, 0, 0 },
-    { L"foo blah",      L"blahx foobar",     true,  0, 4, 6, 9 },
-    { L"\"foo blah\"",  L"foo blah",         true,  0, 8, 0, 0 },
-    { L"\"foo blah\"",  L"foox blahx",       false, 0, 0, 0, 0 },
-    { L"\"foo blah\"",  L"foo blah",         true,  0, 8, 0, 0 },
-    { L"\"foo blah\"",  L"\"foo blah\"",     true,  1, 9, 0, 0 },
-    { L"foo blah",      L"\"foo bar blah\"", true,  1, 4, 9, 13 },
+    { "foo foo",       "foo",              true,  0, 3, 0, 0 },
+    { "foo fooey",     "fooey",            true,  0, 5, 0, 0 },
+    { "foo fooey bar", "bar fooey",        true,  0, 3, 4, 9 },
+    { "blah",          "blah",             true,  0, 4, 0, 0 },
+    { "blah",          "foo",              false, 0, 0, 0, 0 },
+    { "blah",          "blahblah",         true,  0, 4, 0, 0 },
+    { "blah",          "foo blah",         true,  4, 8, 0, 0 },
+    { "foo blah",      "blah",             false, 0, 0, 0, 0 },
+    { "foo blah",      "blahx foobar",     true,  0, 4, 6, 9 },
+    { "\"foo blah\"",  "foo blah",         true,  0, 8, 0, 0 },
+    { "\"foo blah\"",  "foox blahx",       false, 0, 0, 0, 0 },
+    { "\"foo blah\"",  "foo blah",         true,  0, 8, 0, 0 },
+    { "\"foo blah\"",  "\"foo blah\"",     true,  1, 9, 0, 0 },
+    { "foo blah",      "\"foo bar blah\"", true,  1, 4, 9, 13 },
   };
   for (size_t i = 0; i < ARRAYSIZE_UNSAFE(data); ++i) {
-    std::vector<std::wstring> results;
     QueryParser parser;
     ScopedVector<QueryNode> query_nodes;
-    parser.ParseQuery(data[i].query, &query_nodes.get());
+    parser.ParseQuery(UTF8ToUTF16(data[i].query), &query_nodes.get());
     Snippet::MatchPositions match_positions;
     ASSERT_EQ(data[i].matches,
-              parser.DoesQueryMatch(data[i].text, query_nodes.get(),
+              parser.DoesQueryMatch(UTF8ToUTF16(data[i].text),
+                                    query_nodes.get(),
                                     &match_positions));
     size_t offset = 0;
     if (data[i].m1_start != 0 || data[i].m1_end != 0) {
@@ -134,26 +138,26 @@
 
 TEST_F(QueryParserTest, ExtractQueryWords) {
   struct TestData2 {
-    const std::wstring text;
-    const std::wstring w1;
-    const std::wstring w2;
-    const std::wstring w3;
+    const std::string text;
+    const std::string w1;
+    const std::string w2;
+    const std::string w3;
     const size_t word_count;
   } data[] = {
-    { L"foo",           L"foo", L"",    L"",  1 },
-    { L"foo bar",       L"foo", L"bar", L"",  2 },
-    { L"\"foo bar\"",   L"foo", L"bar", L"",  2 },
-    { L"\"foo bar\" a", L"foo", L"bar", L"a", 3 },
+    { "foo",           "foo", "",    "",  1 },
+    { "foo bar",       "foo", "bar", "",  2 },
+    { "\"foo bar\"",   "foo", "bar", "",  2 },
+    { "\"foo bar\" a", "foo", "bar", "a", 3 },
   };
   for (size_t i = 0; i < ARRAYSIZE_UNSAFE(data); ++i) {
-    std::vector<std::wstring> results;
+    std::vector<string16> results;
     QueryParser parser;
-    parser.ExtractQueryWords(data[i].text, &results);
+    parser.ExtractQueryWords(UTF8ToUTF16(data[i].text), &results);
     ASSERT_EQ(data[i].word_count, results.size());
-    EXPECT_EQ(data[i].w1, results[0]);
+    EXPECT_EQ(data[i].w1, UTF16ToUTF8(results[0]));
     if (results.size() == 2)
-      EXPECT_EQ(data[i].w2, results[1]);
+      EXPECT_EQ(data[i].w2, UTF16ToUTF8(results[1]));
     if (results.size() == 3)
-      EXPECT_EQ(data[i].w3, results[2]);
+      EXPECT_EQ(data[i].w3, UTF16ToUTF8(results[2]));
   }
 }
diff --git a/chrome/browser/history/redirect_uitest.cc b/chrome/browser/history/redirect_uitest.cc
index 49a2bb9..f7a1669 100644
--- a/chrome/browser/history/redirect_uitest.cc
+++ b/chrome/browser/history/redirect_uitest.cc
@@ -1,4 +1,4 @@
-// Copyright (c) 2006-2008 The Chromium Authors. All rights reserved.
+// Copyright (c) 2010 The Chromium Authors. All rights reserved.
 // Use of this source code is governed by a BSD-style license that can be
 // found in the LICENSE file.
 
@@ -11,6 +11,7 @@
 #include "base/platform_thread.h"
 #include "base/scoped_ptr.h"
 #include "base/string_util.h"
+#include "base/string16.h"
 #include "chrome/test/automation/tab_proxy.h"
 #include "chrome/test/ui/ui_test.h"
 #include "net/base/net_util.h"
@@ -195,14 +196,14 @@
 // B) does not take place.
 TEST_F(RedirectTest, NoHttpToFile) {
   scoped_refptr<HTTPTestServer> server =
-    HTTPTestServer::CreateServer(kDocRoot, NULL);
+      HTTPTestServer::CreateServer(kDocRoot, NULL);
   ASSERT_TRUE(NULL != server.get());
   FilePath test_file(test_data_directory_);
   test_file = test_file.AppendASCII("http_to_file.html");
   GURL file_url = net::FilePathToFileURL(test_file);
 
   GURL initial_url = server->TestServerPage(
-    "client-redirect?" + file_url.spec());
+      "client-redirect?" + file_url.spec());
 
   NavigateToURL(initial_url);
   // UITest will check for crashes. We make sure the title doesn't match the
@@ -211,7 +212,7 @@
   ASSERT_TRUE(tab_proxy.get());
   std::wstring actual_title;
   ASSERT_TRUE(tab_proxy->GetTabTitle(&actual_title));
-  EXPECT_NE(L"File!", actual_title);
+  EXPECT_NE("File!", WideToUTF8(actual_title));
 }
 
 // Ensures that non-user initiated location changes (within page) are
@@ -269,7 +270,7 @@
   NavigateToURL(final_url);
 
   std::wstring tab_title;
-  std::wstring final_url_title = L"Title Of Awesomeness";
+  std::wstring final_url_title = UTF8ToWide("Title Of Awesomeness");
   // Wait till the final page has been loaded.
   for (int i = 0; i < 10; ++i) {
     PlatformThread::Sleep(sleep_timeout_ms());
diff --git a/chrome/browser/history/snippet.cc b/chrome/browser/history/snippet.cc
index 95b66cd..cb96e16 100644
--- a/chrome/browser/history/snippet.cc
+++ b/chrome/browser/history/snippet.cc
@@ -90,8 +90,8 @@
   }
 }
 
-// Converts an index in a utf8 string into the index in the corresponding wide
-// string and returns the wide index. This is intended to be called in a loop
+// Converts an index in a utf8 string into the index in the corresponding utf16
+// string and returns the utf16 index. This is intended to be called in a loop
 // iterating through a utf8 string.
 //
 // utf8_string: the utf8 string.
@@ -101,19 +101,19 @@
 //           matches offset.
 // wide_pos: current index in the wide string. This is the same as the return
 //           value.
-size_t AdvanceAndReturnWidePos(const char* utf8_string,
-                               int32_t utf8_length,
-                               int32_t offset,
-                               int32_t* utf8_pos,
-                               size_t* wide_pos) {
+size_t AdvanceAndReturnUTF16Pos(const char* utf8_string,
+                                int32_t utf8_length,
+                                int32_t offset,
+                                int32_t* utf8_pos,
+                                size_t* utf16_pos) {
   DCHECK(offset >= *utf8_pos && offset <= utf8_length);
 
   UChar32 wide_char;
   while (*utf8_pos < offset) {
     U8_NEXT(utf8_string, *utf8_pos, utf8_length, wide_char);
-    *wide_pos += (wide_char <= 0xFFFF) ? 1 : 2;
+    *utf16_pos += (wide_char <= 0xFFFF) ? 1 : 2;
   }
-  return *wide_pos;
+  return *utf16_pos;
 }
 
 // Given a character break iterator over a UTF-8 string, set the iterator
@@ -187,15 +187,15 @@
     Snippet::MatchPositions* match_positions) {
   DCHECK(match_positions);
   int32_t utf8_pos = 0;
-  size_t wide_pos = 0;
+  size_t utf16_pos = 0;
   const char* utf8_cstring = utf8_string.c_str();
   const int32_t utf8_length = static_cast<int32_t>(utf8_string.size());
   for (Snippet::MatchPositions::iterator i = match_positions->begin();
        i != match_positions->end(); ++i) {
-    i->first = AdvanceAndReturnWidePos(utf8_cstring, utf8_length,
-                                       i->first, &utf8_pos, &wide_pos);
-    i->second = AdvanceAndReturnWidePos(utf8_cstring, utf8_length,
-                                        i->second, &utf8_pos, &wide_pos);
+    i->first = AdvanceAndReturnUTF16Pos(utf8_cstring, utf8_length,
+                                        i->first, &utf8_pos, &utf16_pos);
+    i->second = AdvanceAndReturnUTF16Pos(utf8_cstring, utf8_length,
+                                         i->second, &utf8_pos, &utf16_pos);
   }
 }
 
@@ -204,7 +204,7 @@
   // The length of snippets we try to produce.
   // We can generate longer snippets but stop once we cross kSnippetMaxLength.
   const size_t kSnippetMaxLength = 200;
-  const std::wstring kEllipsis = L" ... ";
+  const string16 kEllipsis = ASCIIToUTF16(" ... ");
 
   UText* document_utext = NULL;
   UErrorCode status = U_ZERO_ERROR;
@@ -220,7 +220,7 @@
   // We build the snippet by iterating through the matches and then grabbing
   // context around each match.  If matches are near enough each other (within
   // kSnippetContext), we skip the "..." between them.
-  std::wstring snippet;
+  string16 snippet;
   size_t start = 0;
   for (size_t i = 0; i < match_positions.size(); ++i) {
     // Some shorter names for the current match.
@@ -240,12 +240,12 @@
         snippet += kEllipsis;
       // Switch to DCHECK after debugging http://crbug.com/15261.
       CHECK(start < document.size());
-      snippet += UTF8ToWide(document.substr(start, match_start - start));
+      snippet += UTF8ToUTF16(document.substr(start, match_start - start));
     }
 
     // Add the match.
     const size_t first = snippet.size();
-    snippet += UTF8ToWide(document.substr(match_start,
+    snippet += UTF8ToUTF16(document.substr(match_start,
                                           match_end - match_start));
     matches_.push_back(std::make_pair(first, snippet.size()));
 
@@ -261,7 +261,7 @@
       // Switch to DCHECK after debugging http://crbug.com/15261.
       CHECK(end >= match_end);
       CHECK(end <= document.size());
-      snippet += UTF8ToWide(document.substr(match_end, end - match_end));
+      snippet += UTF8ToUTF16(document.substr(match_end, end - match_end));
     } else {
       // No, there's either no next match or the next match is too far away.
       end = match_end;
@@ -269,7 +269,7 @@
       // Switch to DCHECK after debugging http://crbug.com/15261.
       CHECK(end >= match_end);
       CHECK(end <= document.size());
-      snippet += UTF8ToWide(document.substr(match_end, end - match_end));
+      snippet += UTF8ToUTF16(document.substr(match_end, end - match_end));
       if (end < document.size())
         snippet += kEllipsis;
     }
diff --git a/chrome/browser/history/snippet.h b/chrome/browser/history/snippet.h
index 2c911ba..9e92893 100644
--- a/chrome/browser/history/snippet.h
+++ b/chrome/browser/history/snippet.h
@@ -8,9 +8,10 @@
 #ifndef CHROME_BROWSER_HISTORY_SNIPPET_H__
 #define CHROME_BROWSER_HISTORY_SNIPPET_H__
 
-#include <string>
 #include <vector>
 
+#include "base/string16.h"
+
 class Snippet {
  public:
   // Each MatchPosition is the [begin, end) positions of a match within a
@@ -48,7 +49,7 @@
   void ComputeSnippet(const MatchPositions& matches,
                       const std::string& document);
 
-  const std::wstring& text() const { return text_; }
+  const string16& text() const { return text_; }
   const MatchPositions& matches() const { return matches_; }
 
   // Efficiently swaps the contents of this snippet with the other.
@@ -59,7 +60,7 @@
 
  private:
   // The text of the snippet.
-  std::wstring text_;
+  string16 text_;
 
   // The matches within text_.
   MatchPositions matches_;
diff --git a/chrome/browser/history/snippet_unittest.cc b/chrome/browser/history/snippet_unittest.cc
index 517e2c0..5bc8a3b4 100644
--- a/chrome/browser/history/snippet_unittest.cc
+++ b/chrome/browser/history/snippet_unittest.cc
@@ -1,4 +1,4 @@
-// Copyright (c) 2006-2008 The Chromium Authors. All rights reserved.
+// Copyright (c) 2010 The Chromium Authors. All rights reserved.
 // Use of this source code is governed by a BSD-style license that can be
 // found in the LICENSE file.
 
@@ -86,8 +86,8 @@
 // For testing, we'll compute the match positions manually instead of using
 // sqlite's FTS matching.  BuildSnippet returns the snippet for matching
 // |query| against |document|.  Matches are surrounded by "**".
-std::wstring BuildSnippet(const std::string& document,
-                          const std::string& query) {
+string16 BuildSnippet(const std::string& document,
+                      const std::string& query) {
   // This function assumes that |document| does not contain
   // any character for which lowercasing changes its length. Further,
   // it's assumed that lowercasing only the ASCII-portion works for
@@ -119,16 +119,16 @@
   snippet.ComputeSnippet(match_positions, document);
 
   // Now "highlight" all matches in the snippet with **.
-  std::wstring star_snippet;
+  string16 star_snippet;
   Snippet::MatchPositions::const_iterator match;
   size_t pos = 0;
   for (match = snippet.matches().begin();
        match != snippet.matches().end(); ++match) {
     star_snippet += snippet.text().substr(pos, match->first - pos);
-    star_snippet += L"**";
+    star_snippet += UTF8ToUTF16("**");
     star_snippet += snippet.text().substr(match->first,
                                           match->second - match->first);
-    star_snippet += L"**";
+    star_snippet += UTF8ToUTF16("**");
     pos = match->second;
   }
   star_snippet += snippet.text().substr(pos);
@@ -137,20 +137,20 @@
 }
 
 TEST(Snippets, SimpleQuery) {
-  ASSERT_EQ(L" ... eferred to collectively as the \"Services\" in this "
-            L"**document** and excluding any services provided to you by "
-            L"Goo ...  ... way, Mountain View, CA 94043, United States. This "
-            L"**document** explains how the agreement is made up, and sets "
-            L"o ... ",
-            BuildSnippet(kSampleDocument, "document"));
+  ASSERT_EQ(" ... eferred to collectively as the \"Services\" in this "
+            "**document** and excluding any services provided to you by "
+            "Goo ...  ... way, Mountain View, CA 94043, United States. This "
+            "**document** explains how the agreement is made up, and sets "
+            "o ... ",
+            UTF16ToUTF8(BuildSnippet(kSampleDocument, "document")));
 }
 
 // Test that two words that are near each other don't produce two elided bits.
 TEST(Snippets, NearbyWords) {
-  ASSERT_EQ(L" ... lace of business is at 1600 Amphitheatre Parkway, "
-            L"**Mountain** **View**, CA 94043, United States. This "
-            L"document explains  ... ",
-            BuildSnippet(kSampleDocument, "mountain view"));
+  ASSERT_EQ(" ... lace of business is at 1600 Amphitheatre Parkway, "
+            "**Mountain** **View**, CA 94043, United States. This "
+            "document explains  ... ",
+            UTF16ToUTF8(BuildSnippet(kSampleDocument, "mountain view")));
 }
 
 // The above tests already test that we get byte offsets correct, but here's
@@ -159,7 +159,7 @@
   ASSERT_EQ(" ... ogle\xe2\x84\xa2 Terms of Service Welcome to Google! "
             "1. Your **relationship** with Google 1.1 Your use of Google's "
             "products, so ... ",
-            WideToUTF8(BuildSnippet(kSampleDocument, "relationship")));
+            UTF16ToUTF8(BuildSnippet(kSampleDocument, "relationship")));
 }
 
 // Bug: 1274923
@@ -221,8 +221,8 @@
             "\xE0\xB8\x9A\xE0\xB9\x81\xE0\xB8\x95\xE0\xB9\x88\xE0\xB8\x87"
             "\xE0\xB9\x80\xE0\xB8\x99\xE0\xB8\xB7\xE0\xB9\x89\xE0\xB8\xAD"
             "\xE0\xB8\xAB\xE0\xB8\xB2",
-            WideToUTF8(BuildSnippet(kThaiSample,
-                                    "\xE0\xB9\x83\xE0\xB8\xAB\xE0\xB9\x89")));
+            UTF16ToUTF8(BuildSnippet(kThaiSample,
+                                     "\xE0\xB9\x83\xE0\xB8\xAB\xE0\xB9\x89")));
 }
 
 TEST(Snippets, ExtractMatchPositions) {
diff --git a/chrome/browser/history/starred_url_database.cc b/chrome/browser/history/starred_url_database.cc
index 23692eb..cf2a306 100644
--- a/chrome/browser/history/starred_url_database.cc
+++ b/chrome/browser/history/starred_url_database.cc
@@ -71,7 +71,7 @@
       NOTREACHED();
       break;
   }
-  entry->title = UTF8ToWide(s.ColumnString(2));
+  entry->title = s.ColumnString16(2);
   entry->date_added = base::Time::FromInternalValue(s.ColumnInt64(3));
   entry->visual_order = s.ColumnInt(4);
   entry->parent_group_id = s.ColumnInt64(5);
@@ -150,7 +150,7 @@
 }
 
 bool StarredURLDatabase::UpdateStarredEntryRow(StarID star_id,
-                                               const std::wstring& title,
+                                               const string16& title,
                                                UIStarID parent_group_id,
                                                int visual_order,
                                                base::Time date_modified) {
@@ -161,7 +161,7 @@
   if (!statement)
     return 0;
 
-  statement.BindString(0, WideToUTF8(title));
+  statement.BindString16(0, title);
   statement.BindInt64(1, parent_group_id);
   statement.BindInt(2, visual_order);
   statement.BindInt64(3, date_modified.ToInternalValue());
@@ -188,7 +188,7 @@
 StarID StarredURLDatabase::CreateStarredEntryRow(URLID url_id,
                                                  UIStarID group_id,
                                                  UIStarID parent_group_id,
-                                                 const std::wstring& title,
+                                                 const string16& title,
                                                  const base::Time& date_added,
                                                  int visual_order,
                                                  StarredEntry::Type type) {
@@ -219,7 +219,7 @@
   }
   statement.BindInt64(1, url_id);
   statement.BindInt64(2, group_id);
-  statement.BindString(3, WideToUTF8(title));
+  statement.BindString16(3, title);
   statement.BindInt64(4, date_added.ToInternalValue());
   statement.BindInt(5, visual_order);
   statement.BindInt64(6, parent_group_id);
@@ -447,7 +447,7 @@
       return false;
     }
     entry.id = CreateStarredEntryRow(
-        0, entry.group_id, 0, L"other", base::Time::Now(), 0,
+        0, entry.group_id, 0, UTF8ToUTF16("other"), base::Time::Now(), 0,
         history::StarredEntry::OTHER);
     if (!entry.id) {
       NOTREACHED() << "Unable to create other bookmarks folder";
diff --git a/chrome/browser/history/starred_url_database.h b/chrome/browser/history/starred_url_database.h
index c11db84..8d327d8 100644
--- a/chrome/browser/history/starred_url_database.h
+++ b/chrome/browser/history/starred_url_database.h
@@ -1,4 +1,4 @@
-// Copyright (c) 2009 The Chromium Authors. All rights reserved.
+// Copyright (c) 2010 The Chromium Authors. All rights reserved.
 // Use of this source code is governed by a BSD-style license that can be
 // found in the LICENSE file.
 
@@ -10,6 +10,7 @@
 #include "app/tree_node_model.h"
 #include "base/basictypes.h"
 #include "base/gtest_prod_util.h"
+#include "base/string16.h"
 #include "chrome/browser/history/history_types.h"
 #include "chrome/browser/history/url_database.h"
 
@@ -69,7 +70,7 @@
   //
   // WARNING: Does not update the visual order.
   bool UpdateStarredEntryRow(StarID star_id,
-                             const std::wstring& title,
+                             const string16& title,
                              UIStarID parent_group_id,
                              int visual_order,
                              base::Time date_modified);
@@ -89,7 +90,7 @@
   StarID CreateStarredEntryRow(URLID url_id,
                                UIStarID group_id,
                                UIStarID parent_group_id,
-                               const std::wstring& title,
+                               const string16& title,
                                const base::Time& date_added,
                                int visual_order,
                                StarredEntry::Type type);
diff --git a/chrome/browser/history/starred_url_database_unittest.cc b/chrome/browser/history/starred_url_database_unittest.cc
index 961b240a..02c6a64 100644
--- a/chrome/browser/history/starred_url_database_unittest.cc
+++ b/chrome/browser/history/starred_url_database_unittest.cc
@@ -267,7 +267,7 @@
 
   StarredEntry entry;
   entry.url = GURL("http://google.com");
-  entry.title = L"FOO";
+  entry.title = UTF8ToUTF16("FOO");
   entry.parent_group_id = HistoryService::kBookmarkBarID;
 
   ASSERT_NE(0, CreateStarredEntry(&entry));
diff --git a/chrome/browser/history/text_database.cc b/chrome/browser/history/text_database.cc
index dc8ff8b..f44207a 100644
--- a/chrome/browser/history/text_database.cc
+++ b/chrome/browser/history/text_database.cc
@@ -337,7 +337,7 @@
     Match& match = results->at(results->size() - 1);
     match.url.Swap(&url);
 
-    match.title = UTF8ToWide(statement.ColumnString(1));
+    match.title = statement.ColumnString16(1);
     match.time = base::Time::FromInternalValue(statement.ColumnInt64(2));
 
     // Extract any matches in the title.
diff --git a/chrome/browser/history/text_database.h b/chrome/browser/history/text_database.h
index d1cb843..e34c071 100644
--- a/chrome/browser/history/text_database.h
+++ b/chrome/browser/history/text_database.h
@@ -12,6 +12,7 @@
 #include "app/sql/meta_table.h"
 #include "base/basictypes.h"
 #include "base/file_path.h"
+#include "base/string16.h"
 #include "chrome/browser/history/history_types.h"
 #include "googleurl/src/gurl.h"
 
@@ -32,7 +33,7 @@
     // The title is returned because the title in the text database and the URL
     // database may differ. This happens because we capture the title when the
     // body is captured, and don't update it later.
-    std::wstring title;
+    string16 title;
 
     // Time the page that was returned was visited.
     base::Time time;
diff --git a/chrome/browser/history/text_database_manager.cc b/chrome/browser/history/text_database_manager.cc
index 13d681ea..ff1ae38 100644
--- a/chrome/browser/history/text_database_manager.cc
+++ b/chrome/browser/history/text_database_manager.cc
@@ -4,8 +4,6 @@
 
 #include "chrome/browser/history/text_database_manager.h"
 
-#include <string>
-
 #include "base/compiler_specific.h"
 #include "base/file_util.h"
 #include "base/histogram.h"
@@ -28,17 +26,10 @@
 // The number of database files we will be attached to at once.
 const int kCacheDBSize = 5;
 
-std::string ConvertStringForIndexer(const std::wstring& input) {
-  // TODO(evanm): other transformations here?
-  return WideToUTF8(CollapseWhitespace(input, false));
-}
-
-#if !defined(OS_WIN)  // string16 == wstring on Windows.
 std::string ConvertStringForIndexer(const string16& input) {
   // TODO(evanm): other transformations here?
   return UTF16ToUTF8(CollapseWhitespace(input, false));
 }
-#endif
 
 // Data older than this will be committed to the full text index even if we
 // haven't gotten a title and/or body.
@@ -57,9 +48,9 @@
   added_time_ = TimeTicks::Now();
 }
 
-void TextDatabaseManager::PageInfo::set_title(const std::wstring& ttl) {
+void TextDatabaseManager::PageInfo::set_title(const string16& ttl) {
   if (ttl.empty())  // Make the title nonempty when we set it for EverybodySet.
-    title_ = L" ";
+    title_ = ASCIIToUTF16(" ");
   else
     title_ = ttl;
 }
@@ -184,7 +175,7 @@
 }
 
 void TextDatabaseManager::AddPageTitle(const GURL& url,
-                                       const std::wstring& title) {
+                                       const string16& title) {
   RecentChangeList::iterator found = recent_changes_.Peek(url);
   if (found == recent_changes_.end()) {
     // This page is not in our cache of recent pages. This is very much an edge
@@ -271,7 +262,7 @@
                                       URLID url_id,
                                       VisitID visit_id,
                                       Time visit_time,
-                                      const std::wstring& title,
+                                      const string16& title,
                                       const string16& body) {
   TextDatabase* db = GetDBForTime(visit_time, true);
   if (!db)
@@ -411,7 +402,7 @@
 }
 
 void TextDatabaseManager::GetTextMatches(
-    const std::wstring& query,
+    const string16& query,
     const QueryOptions& options,
     std::vector<TextDatabase::Match>* results,
     Time* first_time_searched) {
@@ -425,9 +416,9 @@
   }
 
   // Get the query into the proper format for the individual DBs.
-  std::wstring fts_query_wide;
-  query_parser_.ParseQuery(query, &fts_query_wide);
-  std::string fts_query = WideToUTF8(fts_query_wide);
+  string16 fts_query16;
+  query_parser_.ParseQuery(query, &fts_query16);
+  std::string fts_query = UTF16ToUTF8(fts_query16);
 
   // Need a copy of the options so we can modify the max count for each call
   // to the individual databases.
diff --git a/chrome/browser/history/text_database_manager.h b/chrome/browser/history/text_database_manager.h
index c84d423..7f25bf7 100644
--- a/chrome/browser/history/text_database_manager.h
+++ b/chrome/browser/history/text_database_manager.h
@@ -11,6 +11,7 @@
 #include "base/basictypes.h"
 #include "base/file_path.h"
 #include "base/gtest_prod_util.h"
+#include "base/string16.h"
 #include "base/task.h"
 #include "chrome/browser/history/history_types.h"
 #include "chrome/browser/history/text_database.h"
@@ -103,7 +104,7 @@
   // should be the time corresponding to that visit in the database.
   void AddPageURL(const GURL& url, URLID url_id, VisitID visit_id,
                   base::Time visit_time);
-  void AddPageTitle(const GURL& url, const std::wstring& title);
+  void AddPageTitle(const GURL& url, const string16& title);
   void AddPageContents(const GURL& url, const string16& body);
 
   // Adds the given data to the appropriate database file, returning true on
@@ -114,7 +115,7 @@
                    URLID url_id,
                    VisitID visit_id,
                    base::Time visit_time,
-                   const std::wstring& title,
+                   const string16& title,
                    const string16& body);
 
   // Deletes the instance of indexed data identified by the given time and URL.
@@ -152,7 +153,7 @@
   //
   // This function will return more than one match per URL if there is more than
   // one entry for that URL in the database.
-  void GetTextMatches(const std::wstring& query,
+  void GetTextMatches(const string16& query,
                       const QueryOptions& options,
                       std::vector<TextDatabase::Match>* results,
                       base::Time* first_time_searched);
@@ -176,11 +177,11 @@
     URLID url_id() const { return url_id_; }
     VisitID visit_id() const { return visit_id_; }
     base::Time visit_time() const { return visit_time_; }
-    const std::wstring& title() const { return title_; }
+    const string16& title() const { return title_; }
     const string16& body() const { return body_; }
 
     // Setters, we can only update the title and body.
-    void set_title(const std::wstring& ttl);
+    void set_title(const string16& ttl);
     void set_body(const string16& bdy);
 
     // Returns true if both the title or body of the entry has been set. Since
@@ -207,7 +208,7 @@
     base::TimeTicks added_time_;
 
     // Will be the string " " when they are set to distinguish set and unset.
-    std::wstring title_;
+    string16 title_;
     string16 body_;
   };
 
diff --git a/chrome/browser/history/text_database_manager_unittest.cc b/chrome/browser/history/text_database_manager_unittest.cc
index aa33af3..8e7f27e 100644
--- a/chrome/browser/history/text_database_manager_unittest.cc
+++ b/chrome/browser/history/text_database_manager_unittest.cc
@@ -20,23 +20,23 @@
 namespace {
 
 const char* kURL1 = "http://www.google.com/asdf";
-const wchar_t* kTitle1 = L"Google A";
+const char* kTitle1 = "Google A";
 const char* kBody1 = "FOO page one.";
 
 const char* kURL2 = "http://www.google.com/qwer";
-const wchar_t* kTitle2 = L"Google B";
+const char* kTitle2 = "Google B";
 const char* kBody2 = "FOO two.";
 
 const char* kURL3 = "http://www.google.com/zxcv";
-const wchar_t* kTitle3 = L"Google C";
+const char* kTitle3 = "Google C";
 const char* kBody3 = "FOO drei";
 
 const char* kURL4 = "http://www.google.com/hjkl";
-const wchar_t* kTitle4 = L"Google D";
+const char* kTitle4 = "Google D";
 const char* kBody4 = "FOO lalala four.";
 
 const char* kURL5 = "http://www.google.com/uiop";
-const wchar_t* kTitle5 = L"Google cinq";
+const char* kTitle5 = "Google cinq";
 const char* kBody5 = "FOO page one.";
 
 // This provides a simple implementation of a URL+VisitDatabase using an
@@ -83,7 +83,8 @@
 
   times->push_back(visit_row.visit_time);
   manager.AddPageData(GURL(kURL1), visit_row.url_id, visit_row.visit_id,
-                      visit_row.visit_time, kTitle1, UTF8ToUTF16(kBody1));
+                      visit_row.visit_time, UTF8ToUTF16(kTitle1),
+                      UTF8ToUTF16(kBody1));
 
   exploded.day_of_month++;
   visit_row.url_id = 2;
@@ -91,7 +92,8 @@
   visit_id = visit_db->AddVisit(&visit_row);
   times->push_back(visit_row.visit_time);
   manager.AddPageData(GURL(kURL2), visit_row.url_id, visit_row.visit_id,
-                      visit_row.visit_time, kTitle2, UTF8ToUTF16(kBody2));
+                      visit_row.visit_time, UTF8ToUTF16(kTitle2),
+                      UTF8ToUTF16(kBody2));
 
   exploded.day_of_month++;
   visit_row.url_id = 2;
@@ -99,7 +101,8 @@
   visit_id = visit_db->AddVisit(&visit_row);
   times->push_back(visit_row.visit_time);
   manager.AddPageData(GURL(kURL3), visit_row.url_id, visit_row.visit_id,
-                      visit_row.visit_time, kTitle3, UTF8ToUTF16(kBody3));
+                      visit_row.visit_time, UTF8ToUTF16(kTitle3),
+                      UTF8ToUTF16(kBody3));
 
   // Put the next ones in the next month.
   exploded.month++;
@@ -108,7 +111,8 @@
   visit_id = visit_db->AddVisit(&visit_row);
   times->push_back(visit_row.visit_time);
   manager.AddPageData(GURL(kURL4), visit_row.url_id, visit_row.visit_id,
-                      visit_row.visit_time, kTitle4, UTF8ToUTF16(kBody4));
+                      visit_row.visit_time, UTF8ToUTF16(kTitle4),
+                      UTF8ToUTF16(kBody4));
 
   exploded.day_of_month++;
   visit_row.url_id = 2;
@@ -116,7 +120,8 @@
   visit_id = visit_db->AddVisit(&visit_row);
   times->push_back(visit_row.visit_time);
   manager.AddPageData(GURL(kURL5), visit_row.url_id, visit_row.visit_id,
-                      visit_row.visit_time, kTitle5, UTF8ToUTF16(kBody5));
+                      visit_row.visit_time, UTF8ToUTF16(kTitle5),
+                      UTF8ToUTF16(kBody5));
 
   // Put the first one in again in the second month.
   exploded.day_of_month++;
@@ -125,7 +130,8 @@
   visit_id = visit_db->AddVisit(&visit_row);
   times->push_back(visit_row.visit_time);
   manager.AddPageData(GURL(kURL1), visit_row.url_id, visit_row.visit_id,
-                      visit_row.visit_time, kTitle1, UTF8ToUTF16(kBody1));
+                      visit_row.visit_time, UTF8ToUTF16(kTitle1),
+                      UTF8ToUTF16(kBody1));
 }
 
 bool ResultsHaveURL(const std::vector<TextDatabase::Match>& results,
@@ -177,7 +183,8 @@
   options.end_time = times[times.size() - 1] + TimeDelta::FromDays(100);
   std::vector<TextDatabase::Match> results;
   Time first_time_searched;
-  manager.GetTextMatches(L"FOO", options, &results, &first_time_searched);
+  manager.GetTextMatches(UTF8ToUTF16("FOO"), options,
+                         &results, &first_time_searched);
 
   // We should have matched every page.
   EXPECT_EQ(6U, results.size());
@@ -204,7 +211,7 @@
   // First add one without a visit.
   const GURL url(kURL1);
   manager.AddPageURL(url, 0, 0, Time::Now());
-  manager.AddPageTitle(url, kTitle1);
+  manager.AddPageTitle(url, UTF8ToUTF16(kTitle1));
   manager.AddPageContents(url, UTF8ToUTF16(kBody1));
 
   // Check that the page got added.
@@ -212,9 +219,10 @@
   std::vector<TextDatabase::Match> results;
   Time first_time_searched;
 
-  manager.GetTextMatches(L"FOO", options, &results, &first_time_searched);
+  manager.GetTextMatches(UTF8ToUTF16("FOO"), options,
+                         &results, &first_time_searched);
   ASSERT_EQ(1U, results.size());
-  EXPECT_EQ(kTitle1, results[0].title);
+  EXPECT_EQ(kTitle1, UTF16ToUTF8(results[0].title));
 }
 
 // Like InsertCompleteNoVisit but specifies a visit to update. We check that the
@@ -240,16 +248,17 @@
   const GURL url(kURL2);
   manager.AddPageURL(url, visit.url_id, visit.visit_id, visit.visit_time);
   manager.AddPageContents(url, UTF8ToUTF16(kBody2));
-  manager.AddPageTitle(url, kTitle2);
+  manager.AddPageTitle(url, UTF8ToUTF16(kTitle2));
 
   // Check that the page got added.
   QueryOptions options;
   std::vector<TextDatabase::Match> results;
   Time first_time_searched;
 
-  manager.GetTextMatches(L"FOO", options, &results, &first_time_searched);
+  manager.GetTextMatches(UTF8ToUTF16("FOO"), options,
+                         &results, &first_time_searched);
   ASSERT_EQ(1U, results.size());
-  EXPECT_EQ(kTitle2, results[0].title);
+  EXPECT_EQ(kTitle2, UTF16ToUTF8(results[0].title));
 
   // Check that the visit got updated for its new indexed state.
   VisitRow out_visit;
@@ -271,7 +280,7 @@
   // Now add a second one with a URL and title.
   GURL url2(kURL2);
   manager.AddPageURL(url2, 0, 0, Time::Now());
-  manager.AddPageTitle(url2, kTitle2);
+  manager.AddPageTitle(url2, UTF8ToUTF16(kTitle2));
 
   // The third one has a URL and body.
   GURL url3(kURL3);
@@ -288,7 +297,8 @@
   QueryOptions options;
   std::vector<TextDatabase::Match> results;
   Time first_time_searched;
-  manager.GetTextMatches(L"google", options, &results, &first_time_searched);
+  manager.GetTextMatches(UTF8ToUTF16("google"), options,
+                         &results, &first_time_searched);
   ASSERT_EQ(0U, results.size());
 
   // Compute a time threshold that will cause everything to be flushed, and
@@ -297,7 +307,8 @@
   manager.FlushOldChangesForTime(expire_time);
 
   // Now we should have all 3 URLs added.
-  manager.GetTextMatches(L"google", options, &results, &first_time_searched);
+  manager.GetTextMatches(UTF8ToUTF16("google"), options,
+                         &results, &first_time_searched);
   ASSERT_EQ(3U, results.size());
   EXPECT_TRUE(ResultsHaveURL(results, kURL1));
   EXPECT_TRUE(ResultsHaveURL(results, kURL2));
@@ -318,7 +329,7 @@
   // We have to have the URL in the URL and visit databases for this test to
   // work.
   URLRow url_row(url);
-  url_row.set_title(L"chocolate");
+  url_row.set_title(UTF8ToUTF16("chocolate"));
   URLID url_id = visit_db.AddURL(url_row);
   ASSERT_TRUE(url_id);
   VisitRow visit_row;
@@ -333,25 +344,27 @@
 
   // Add the title. We should be able to query based on that. The title in the
   // URL row we set above should not come into the picture.
-  manager.AddPageTitle(url, L"Some unique title");
+  manager.AddPageTitle(url, UTF8ToUTF16("Some unique title"));
   Time first_time_searched;
   QueryOptions options;
   std::vector<TextDatabase::Match> results;
-  manager.GetTextMatches(L"unique", options, &results, &first_time_searched);
+  manager.GetTextMatches(UTF8ToUTF16("unique"), options,
+                         &results, &first_time_searched);
   EXPECT_EQ(1U, results.size());
-  manager.GetTextMatches(L"chocolate", options, &results, &first_time_searched);
+  manager.GetTextMatches(UTF8ToUTF16("chocolate"), options,
+                         &results, &first_time_searched);
   EXPECT_EQ(0U, results.size());
 
   // Now add the body, which should be queryable.
   manager.AddPageContents(url, UTF8ToUTF16("Very awesome body"));
-  manager.GetTextMatches(L"awesome", options, &results, &first_time_searched);
+  manager.GetTextMatches(UTF8ToUTF16("awesome"), options, &results, &first_time_searched);
   EXPECT_EQ(1U, results.size());
 
   // Adding the body will actually copy the title from the URL table rather
   // than the previously indexed row (we made them not match above). This isn't
   // necessarily what we want, but it's how it's implemented, and we don't want
   // to regress it.
-  manager.GetTextMatches(L"chocolate", options, &results, &first_time_searched);
+  manager.GetTextMatches(UTF8ToUTF16("chocolate"), options, &results, &first_time_searched);
   EXPECT_EQ(1U, results.size());
 }
 
@@ -374,7 +387,7 @@
     AddAllPages(manager, &visit_db, &times);
 
     // We should have matched every page.
-    manager.GetTextMatches(L"FOO", options, &results, &first_time_searched);
+    manager.GetTextMatches(UTF8ToUTF16("FOO"), options, &results, &first_time_searched);
     EXPECT_EQ(6U, results.size());
   }
   results.clear();
@@ -385,7 +398,8 @@
     ASSERT_TRUE(manager.Init(NULL));
 
     // We should have matched every page again.
-    manager.GetTextMatches(L"FOO", options, &results, &first_time_searched);
+    manager.GetTextMatches(UTF8ToUTF16("FOO"), options,
+                           &results, &first_time_searched);
     EXPECT_EQ(6U, results.size());
   }
 }
@@ -412,7 +426,8 @@
     // "Forget" to commit, it should be autocommittedd for us.
 
     // We should have matched every page.
-    manager.GetTextMatches(L"FOO", options, &results, &first_time_searched);
+    manager.GetTextMatches(UTF8ToUTF16("FOO"), options,
+                           &results, &first_time_searched);
     EXPECT_EQ(6U, results.size());
   }
   results.clear();
@@ -423,7 +438,8 @@
     ASSERT_TRUE(manager.Init(NULL));
 
     // We should have matched every page again.
-    manager.GetTextMatches(L"FOO", options, &results, &first_time_searched);
+    manager.GetTextMatches(UTF8ToUTF16("FOO"), options,
+                           &results, &first_time_searched);
     EXPECT_EQ(6U, results.size());
   }
 }
@@ -438,13 +454,15 @@
   std::vector<Time> times;
   AddAllPages(manager, &visit_db, &times);
 
+  string16 foo = UTF8ToUTF16("FOO");
+
   QueryOptions options;
   options.begin_time = times[0] - TimeDelta::FromDays(100);
   options.end_time = times[times.size() - 1] + TimeDelta::FromDays(100);
   options.max_count = 2;
   std::vector<TextDatabase::Match> results;
   Time first_time_searched;
-  manager.GetTextMatches(L"FOO", options, &results, &first_time_searched);
+  manager.GetTextMatches(foo, options, &results, &first_time_searched);
 
   // We should have gotten the last two pages as results (the first page is
   // also the last).
@@ -455,7 +473,7 @@
 
   // Asking for 4 pages, the first one should be in another DB.
   options.max_count = 4;
-  manager.GetTextMatches(L"FOO", options, &results, &first_time_searched);
+  manager.GetTextMatches(foo, options, &results, &first_time_searched);
 
   EXPECT_EQ(4U, results.size());
   EXPECT_TRUE(first_time_searched <= times[4]);
@@ -475,6 +493,8 @@
   std::vector<Time> times;
   AddAllPages(manager, &visit_db, &times);
 
+  string16 foo = UTF8ToUTF16("FOO");
+
   // First do a query for all time, but with a max of 2. This will give us the
   // last two results and will tell us where to start searching when we want
   // to go back in time.
@@ -484,7 +504,7 @@
   options.max_count = 2;
   std::vector<TextDatabase::Match> results;
   Time first_time_searched;
-  manager.GetTextMatches(L"FOO", options, &results, &first_time_searched);
+  manager.GetTextMatches(foo, options, &results, &first_time_searched);
 
   // Check that we got the last two results.
   EXPECT_EQ(2U, results.size());
@@ -494,7 +514,7 @@
 
   // Query the previous two URLs and make sure we got the correct ones.
   options.end_time = first_time_searched;
-  manager.GetTextMatches(L"FOO", options, &results, &first_time_searched);
+  manager.GetTextMatches(foo, options, &results, &first_time_searched);
   EXPECT_EQ(2U, results.size());
   EXPECT_TRUE(first_time_searched <= times[2]);
   EXPECT_TRUE(ResultsHaveURL(results, kURL3));
@@ -502,7 +522,7 @@
 
   // Query the previous two URLs...
   options.end_time = first_time_searched;
-  manager.GetTextMatches(L"FOO", options, &results, &first_time_searched);
+  manager.GetTextMatches(foo, options, &results, &first_time_searched);
   EXPECT_EQ(2U, results.size());
   EXPECT_TRUE(first_time_searched <= times[0]);
   EXPECT_TRUE(ResultsHaveURL(results, kURL2));
@@ -510,7 +530,7 @@
 
   // Try to query some more, there should be no results.
   options.end_time = first_time_searched;
-  manager.GetTextMatches(L"FOO", options, &results, &first_time_searched);
+  manager.GetTextMatches(foo, options, &results, &first_time_searched);
   EXPECT_EQ(0U, results.size());
 }
 
diff --git a/chrome/browser/history/text_database_unittest.cc b/chrome/browser/history/text_database_unittest.cc
index c94de8a0..f604301d 100644
--- a/chrome/browser/history/text_database_unittest.cc
+++ b/chrome/browser/history/text_database_unittest.cc
@@ -223,9 +223,9 @@
   EXPECT_TRUE(Time::FromInternalValue(kTime2) == results[1].time);
   EXPECT_TRUE(Time::FromInternalValue(kTime3) == results[0].time);
 
-  EXPECT_EQ(UTF8ToWide(std::string(kTitle1)), results[2].title);
-  EXPECT_EQ(UTF8ToWide(std::string(kTitle2)), results[1].title);
-  EXPECT_EQ(UTF8ToWide(std::string(kTitle3)), results[0].title);
+  EXPECT_EQ(std::string(kTitle1), UTF16ToUTF8(results[2].title));
+  EXPECT_EQ(std::string(kTitle2), UTF16ToUTF8(results[1].title));
+  EXPECT_EQ(std::string(kTitle3), UTF16ToUTF8(results[0].title));
 
   // Should have no matches in the title.
   EXPECT_EQ(0U, results[0].title_match_positions.size());
@@ -235,11 +235,11 @@
   // We don't want to be dependent on the exact snippet algorithm, but we know
   // since we searched for "COUNTTAG" which occurs at the beginning of each
   // document, that each snippet should start with that.
-  EXPECT_TRUE(StartsWithASCII(WideToUTF8(results[0].snippet.text()),
+  EXPECT_TRUE(StartsWithASCII(UTF16ToUTF8(results[0].snippet.text()),
                               "COUNTTAG", false));
-  EXPECT_TRUE(StartsWithASCII(WideToUTF8(results[1].snippet.text()),
+  EXPECT_TRUE(StartsWithASCII(UTF16ToUTF8(results[1].snippet.text()),
                               "COUNTTAG", false));
-  EXPECT_TRUE(StartsWithASCII(WideToUTF8(results[2].snippet.text()),
+  EXPECT_TRUE(StartsWithASCII(UTF16ToUTF8(results[2].snippet.text()),
                               "COUNTTAG", false));
 }
 
diff --git a/chrome/browser/history/url_database.cc b/chrome/browser/history/url_database.cc
index e1c8106..07f8881 100644
--- a/chrome/browser/history/url_database.cc
+++ b/chrome/browser/history/url_database.cc
@@ -53,7 +53,7 @@
   DCHECK(i);
   i->id_ = s.ColumnInt64(0);
   i->url_ = GURL(s.ColumnString(1));
-  i->title_ = UTF8ToWide(s.ColumnString(2));
+  i->title_ = s.ColumnString16(2);
   i->visit_count_ = s.ColumnInt(3);
   i->typed_count_ = s.ColumnInt(4);
   i->last_visit_ = base::Time::FromInternalValue(s.ColumnInt64(5));
@@ -118,7 +118,7 @@
   if (!statement)
     return false;
 
-  statement.BindString(0, WideToUTF8(info.title()));
+  statement.BindString16(0, info.title());
   statement.BindInt(1, info.visit_count());
   statement.BindInt(2, info.typed_count());
   statement.BindInt64(3, info.last_visit().ToInternalValue());
@@ -157,7 +157,7 @@
   }
 
   statement.BindString(0, GURLToDatabaseURL(info.url()));
-  statement.BindString(1, WideToUTF8(info.title()));
+  statement.BindString16(1, info.title());
   statement.BindInt(2, info.visit_count());
   statement.BindInt(3, info.typed_count());
   statement.BindInt64(4, info.last_visit().ToInternalValue());
@@ -246,7 +246,7 @@
   return statement.Step();
 }
 
-void URLDatabase::AutocompleteForPrefix(const std::wstring& prefix,
+void URLDatabase::AutocompleteForPrefix(const string16& prefix,
                                         size_t max_results,
                                         std::vector<history::URLRow>* results) {
   // NOTE: this query originally sorted by starred as the second parameter. But
@@ -265,7 +265,7 @@
   // followed by the maximum character size. Use 8-bit strings for everything
   // so we can be sure sqlite is comparing everything in 8-bit mode. Otherwise,
   // it will have to convert strings either to UTF-8 or UTF-16 for comparison.
-  std::string prefix_utf8(WideToUTF8(prefix));
+  std::string prefix_utf8(UTF16ToUTF8(prefix));
   std::string end_query(prefix_utf8);
   end_query.push_back(std::numeric_limits<unsigned char>::max());
 
@@ -347,7 +347,7 @@
 
 bool URLDatabase::SetKeywordSearchTermsForURL(URLID url_id,
                                               TemplateURL::IDType keyword_id,
-                                              const std::wstring& term) {
+                                              const string16& term) {
   DCHECK(url_id && keyword_id && !term.empty());
 
   sql::Statement exist_statement(GetDB().GetCachedStatement(SQL_FROM_HERE,
@@ -368,8 +368,8 @@
 
   statement.BindInt64(0, keyword_id);
   statement.BindInt64(1, url_id);
-  statement.BindString(2, UTF16ToUTF8(l10n_util::ToLower(WideToUTF16(term))));
-  statement.BindString(3, WideToUTF8(term));
+  statement.BindString16(2, l10n_util::ToLower(term));
+  statement.BindString16(3, term);
   return statement.Run();
 }
 
@@ -387,7 +387,7 @@
 
 void URLDatabase::GetMostRecentKeywordSearchTerms(
     TemplateURL::IDType keyword_id,
-    const std::wstring& prefix,
+    const string16& prefix,
     int max_count,
     std::vector<KeywordSearchTermVisit>* matches) {
   // NOTE: the keyword_id can be zero if on first run the user does a query
@@ -407,19 +407,19 @@
     return;
 
   // NOTE: Keep this ToLower() call in sync with search_provider.cc.
-  string16 lower_prefix = l10n_util::ToLower(WideToUTF16(prefix));
+  string16 lower_prefix = l10n_util::ToLower(prefix);
   // This magic gives us a prefix search.
   string16 next_prefix = lower_prefix;
   next_prefix[next_prefix.size() - 1] =
       next_prefix[next_prefix.size() - 1] + 1;
   statement.BindInt64(0, keyword_id);
-  statement.BindString(1, UTF16ToUTF8(lower_prefix));
-  statement.BindString(2, UTF16ToUTF8(next_prefix));
+  statement.BindString16(1, lower_prefix);
+  statement.BindString16(2, next_prefix);
   statement.BindInt(3, max_count);
 
   KeywordSearchTermVisit visit;
   while (statement.Step()) {
-    visit.term = UTF8ToWide(statement.ColumnString(0));
+    visit.term = statement.ColumnString16(0);
     visit.time = base::Time::FromInternalValue(statement.ColumnInt64(1));
     matches->push_back(visit);
   }
diff --git a/chrome/browser/history/url_database.h b/chrome/browser/history/url_database.h
index 9fb67b43..84c8dde 100644
--- a/chrome/browser/history/url_database.h
+++ b/chrome/browser/history/url_database.h
@@ -1,4 +1,4 @@
-// Copyright (c) 2009 The Chromium Authors. All rights reserved.
+// Copyright (c) 2010 The Chromium Authors. All rights reserved.
 // Use of this source code is governed by a BSD-style license that can be
 // found in the LICENSE file.
 
@@ -139,7 +139,7 @@
   // Fills the given array with URLs matching the given prefix. They will be
   // sorted by typed count, then by visit count, then by visit date (most
   // recent first) up to the given maximum number. Called by HistoryURLProvider.
-  void AutocompleteForPrefix(const std::wstring& prefix,
+  void AutocompleteForPrefix(const string16& prefix,
                              size_t max_results,
                              std::vector<URLRow>* results);
 
@@ -160,7 +160,7 @@
   // Sets the search terms for the specified url/keyword pair.
   bool SetKeywordSearchTermsForURL(URLID url_id,
                                    TemplateURL::IDType keyword_id,
-                                   const std::wstring& term);
+                                   const string16& term);
 
   // Deletes all search terms for the specified keyword that have been added by
   // way of SetKeywordSearchTermsForURL.
@@ -170,7 +170,7 @@
   // keyword.
   void GetMostRecentKeywordSearchTerms(
       TemplateURL::IDType keyword_id,
-      const std::wstring& prefix,
+      const string16& prefix,
       int max_count,
       std::vector<KeywordSearchTermVisit>* matches);
 
diff --git a/chrome/browser/history/url_database_unittest.cc b/chrome/browser/history/url_database_unittest.cc
index d6dc43f..32ded0c6 100644
--- a/chrome/browser/history/url_database_unittest.cc
+++ b/chrome/browser/history/url_database_unittest.cc
@@ -7,6 +7,7 @@
 #include "base/file_util.h"
 #include "base/path_service.h"
 #include "base/string_util.h"
+#include "base/utf_string_conversions.h"
 #include "chrome/browser/history/url_database.h"
 #include "testing/gtest/include/gtest/gtest.h"
 
@@ -71,7 +72,7 @@
   // first, add two URLs
   const GURL url1("http://www.google.com/");
   URLRow url_info1(url1);
-  url_info1.set_title(L"Google");
+  url_info1.set_title(UTF8ToUTF16("Google"));
   url_info1.set_visit_count(4);
   url_info1.set_typed_count(2);
   url_info1.set_last_visit(Time::Now() - TimeDelta::FromDays(1));
@@ -80,7 +81,7 @@
 
   const GURL url2("http://mail.google.com/");
   URLRow url_info2(url2);
-  url_info2.set_title(L"Google Mail");
+  url_info2.set_title(UTF8ToUTF16("Google Mail"));
   url_info2.set_visit_count(3);
   url_info2.set_typed_count(0);
   url_info2.set_last_visit(Time::Now() - TimeDelta::FromDays(2));
@@ -96,7 +97,7 @@
   EXPECT_TRUE(IsURLRowEqual(url_info2, info));
 
   // update the second
-  url_info2.set_title(L"Google Mail Too");
+  url_info2.set_title(UTF8ToUTF16("Google Mail Too"));
   url_info2.set_visit_count(4);
   url_info2.set_typed_count(1);
   url_info2.set_typed_count(91011);
@@ -125,7 +126,7 @@
 TEST_F(URLDatabaseTest, KeywordSearchTermVisit) {
   const GURL url1("http://www.google.com/");
   URLRow url_info1(url1);
-  url_info1.set_title(L"Google");
+  url_info1.set_title(UTF8ToUTF16("Google"));
   url_info1.set_visit_count(4);
   url_info1.set_typed_count(2);
   url_info1.set_last_visit(Time::Now() - TimeDelta::FromDays(1));
@@ -134,20 +135,20 @@
   ASSERT_TRUE(url_id != 0);
 
   // Add a keyword visit.
-  ASSERT_TRUE(SetKeywordSearchTermsForURL(url_id, 1, L"visit"));
+  ASSERT_TRUE(SetKeywordSearchTermsForURL(url_id, 1, UTF8ToUTF16("visit")));
 
   // Make sure we get it back.
   std::vector<KeywordSearchTermVisit> matches;
-  GetMostRecentKeywordSearchTerms(1, L"visit", 10, &matches);
+  GetMostRecentKeywordSearchTerms(1, UTF8ToUTF16("visit"), 10, &matches);
   ASSERT_EQ(1U, matches.size());
-  ASSERT_EQ(L"visit", matches[0].term);
+  ASSERT_EQ(UTF8ToUTF16("visit"), matches[0].term);
 
   // Delete the keyword visit.
   DeleteAllSearchTermsForKeyword(1);
 
   // Make sure we don't get it back when querying.
   matches.clear();
-  GetMostRecentKeywordSearchTerms(1, L"visit", 10, &matches);
+  GetMostRecentKeywordSearchTerms(1, UTF8ToUTF16("visit"), 10, &matches);
   ASSERT_EQ(0U, matches.size());
 }
 
@@ -155,7 +156,7 @@
 TEST_F(URLDatabaseTest, DeleteURLDeletesKeywordSearchTermVisit) {
   const GURL url1("http://www.google.com/");
   URLRow url_info1(url1);
-  url_info1.set_title(L"Google");
+  url_info1.set_title(UTF8ToUTF16("Google"));
   url_info1.set_visit_count(4);
   url_info1.set_typed_count(2);
   url_info1.set_last_visit(Time::Now() - TimeDelta::FromDays(1));
@@ -164,14 +165,14 @@
   ASSERT_TRUE(url_id != 0);
 
   // Add a keyword visit.
-  ASSERT_TRUE(SetKeywordSearchTermsForURL(url_id, 1, L"visit"));
+  ASSERT_TRUE(SetKeywordSearchTermsForURL(url_id, 1, UTF8ToUTF16("visit")));
 
   // Delete the url.
   ASSERT_TRUE(DeleteURLRow(url_id));
 
   // Make sure the keyword visit was deleted.
   std::vector<KeywordSearchTermVisit> matches;
-  GetMostRecentKeywordSearchTerms(1, L"visit", 10, &matches);
+  GetMostRecentKeywordSearchTerms(1, UTF8ToUTF16("visit"), 10, &matches);
   ASSERT_EQ(0U, matches.size());
 }
 
diff --git a/chrome/browser/importer/firefox3_importer.cc b/chrome/browser/importer/firefox3_importer.cc
index 775e8ca..2b95cc3 100644
--- a/chrome/browser/importer/firefox3_importer.cc
+++ b/chrome/browser/importer/firefox3_importer.cc
@@ -109,7 +109,7 @@
       continue;
 
     history::URLRow row(url);
-    row.set_title(s.column_wstring(1));
+    row.set_title(UTF8ToUTF16(s.column_string(1)));
     row.set_visit_count(s.column_int(2));
     row.set_hidden(s.column_int(3) == 1);
     row.set_typed_count(s.column_int(4));
diff --git a/chrome/browser/importer/importer_messages.h b/chrome/browser/importer/importer_messages.h
index 0ed8002..3feca84 100644
--- a/chrome/browser/importer/importer_messages.h
+++ b/chrome/browser/importer/importer_messages.h
@@ -81,7 +81,7 @@
   static bool Read(const Message* m, void** iter, param_type* p) {
     history::URLID id;
     GURL url;
-    std::wstring title;
+    string16 title;
     int visit_count, typed_count;
     base::Time last_visit;
     bool hidden;
diff --git a/chrome/browser/importer/mork_reader.cc b/chrome/browser/importer/mork_reader.cc
index ee7bbc5e..49dadb0 100644
--- a/chrome/browser/importer/mork_reader.cc
+++ b/chrome/browser/importer/mork_reader.cc
@@ -515,14 +515,13 @@
   if (CanImportURL(url)) {
     history::URLRow row(url);
 
-    // title is really a UTF-16 string at this point
-    std::wstring title;
+    string16 title;
     if (data.swap_bytes) {
-      base::CodepageToWide(values[kNameColumn], base::kCodepageUTF16BE,
-                           base::OnStringConversionError::SKIP, &title);
+      base::CodepageToUTF16(values[kNameColumn], base::kCodepageUTF16BE,
+                            base::OnStringConversionError::SKIP, &title);
     } else {
-      base::CodepageToWide(values[kNameColumn], base::kCodepageUTF16LE,
-                           base::OnStringConversionError::SKIP, &title);
+      base::CodepageToUTF16(values[kNameColumn], base::kCodepageUTF16LE,
+                            base::OnStringConversionError::SKIP, &title);
     }
     row.set_title(title);
 
diff --git a/chrome/browser/importer/safari_importer.mm b/chrome/browser/importer/safari_importer.mm
index 2cd776c..9a30073 100644
--- a/chrome/browser/importer/safari_importer.mm
+++ b/chrome/browser/importer/safari_importer.mm
@@ -352,7 +352,7 @@
 
   for (NSDictionary* history_item in safari_history_items) {
     using base::SysNSStringToUTF8;
-    using base::SysNSStringToWide;
+    using base::SysNSStringToUTF16;
     NSString* url_ns = [history_item objectForKey:@""];
     if (!url_ns)
       continue;
@@ -370,7 +370,7 @@
     if (!title_ns)
       title_ns = url_ns;
 
-    row.set_title(SysNSStringToWide(title_ns));
+    row.set_title(SysNSStringToUTF16(title_ns));
     int visit_count = [[history_item objectForKey:@"visitCount"]
                           intValue];
     row.set_visit_count(visit_count);
diff --git a/chrome/browser/importer/safari_importer_unittest.mm b/chrome/browser/importer/safari_importer_unittest.mm
index bb8b66d..081c26e3 100644
--- a/chrome/browser/importer/safari_importer_unittest.mm
+++ b/chrome/browser/importer/safari_importer_unittest.mm
@@ -9,6 +9,7 @@
 #include "base/file_util.h"
 #include "base/path_service.h"
 #include "base/sys_string_conversions.h"
+#include "base/utf_string_conversions.h"
 #include "chrome/browser/history/history_types.h"
 #include "chrome/common/chrome_paths.h"
 #include "chrome/test/file_test_utils.h"
@@ -58,7 +59,7 @@
 
   history::URLRow& it1 = history_items[0];
   EXPECT_EQ(it1.url(), GURL("http://www.firsthistoryitem.com/"));
-  EXPECT_EQ(it1.title(), L"First History Item Title");
+  EXPECT_EQ(it1.title(), UTF8ToUTF16("First History Item Title"));
   EXPECT_EQ(it1.visit_count(), 1);
   EXPECT_EQ(it1.hidden(), 0);
   EXPECT_EQ(it1.typed_count(), 0);
@@ -69,7 +70,7 @@
   std::string second_item_title("http://www.secondhistoryitem.com/");
   EXPECT_EQ(it2.url(), GURL(second_item_title));
   // The second item lacks a title so we expect the URL to be substituted.
-  EXPECT_EQ(base::SysWideToUTF8(it2.title()), second_item_title.c_str());
+  EXPECT_EQ(UTF16ToUTF8(it2.title()), second_item_title.c_str());
   EXPECT_EQ(it2.visit_count(), 55);
   EXPECT_EQ(it2.hidden(), 0);
   EXPECT_EQ(it2.typed_count(), 0);
diff --git a/chrome/browser/possible_url_model.cc b/chrome/browser/possible_url_model.cc
index 70b4319..370163d 100644
--- a/chrome/browser/possible_url_model.cc
+++ b/chrome/browser/possible_url_model.cc
@@ -53,7 +53,7 @@
         options.end_time - TimeDelta::FromDays(kPossibleURLTimeScope);
     options.max_count = 50;
 
-    hs->QueryHistory(std::wstring(), options, &consumer_,
+    hs->QueryHistory(string16(), options, &consumer_,
         NewCallback(this, &PossibleURLModel::OnHistoryQueryComplete));
   }
 }
@@ -69,7 +69,7 @@
     results_[i].index = i;
     results_[i].display_url =
         gfx::SortedDisplayURL((*result)[i].url(), languages);
-    results_[i].title = (*result)[i].title();
+    results_[i].title = UTF16ToWide((*result)[i].title());
   }
 
   // The old version of this code would filter out all but the most recent
diff --git a/chrome/browser/search_engines/template_url_model.cc b/chrome/browser/search_engines/template_url_model.cc
index 1aa8f441..a14dfda 100644
--- a/chrome/browser/search_engines/template_url_model.cc
+++ b/chrome/browser/search_engines/template_url_model.cc
@@ -386,7 +386,8 @@
       profile_->GetHistoryService(Profile::EXPLICIT_ACCESS) : NULL;
   if (!history)
     return;
-  history->SetKeywordSearchTermsForURL(url, t_url->id(), term);
+  history->SetKeywordSearchTermsForURL(url, t_url->id(),
+                                       WideToUTF16Hack(term));
 }
 
 void TemplateURLModel::RemoveFromMaps(const TemplateURL* template_url) {
diff --git a/chrome/browser/sync/glue/typed_url_change_processor.cc b/chrome/browser/sync/glue/typed_url_change_processor.cc
index e033b34..87cfa4fc 100644
--- a/chrome/browser/sync/glue/typed_url_change_processor.cc
+++ b/chrome/browser/sync/glue/typed_url_change_processor.cc
@@ -225,7 +225,7 @@
       }
 
       history::URLRow new_url(url);
-      new_url.set_title(UTF8ToWide(typed_url.title()));
+      new_url.set_title(UTF8ToUTF16(typed_url.title()));
 
       // When we add a new url, the last visit is always added, thus we set
       // the initial visit count to one.  This value will be automatically
@@ -268,7 +268,7 @@
       }
 
       history::URLRow new_url(url);
-      new_url.set_title(UTF8ToWide(typed_url.title()));
+      new_url.set_title(UTF8ToUTF16(typed_url.title()));
       new_url.set_visit_count(old_url.visit_count());
       new_url.set_typed_count(typed_url.typed_count());
       new_url.set_last_visit(old_url.last_visit());
@@ -278,8 +278,8 @@
         std::pair<history::URLID, history::URLRow>(old_url.id(), new_url));
 
       if (old_url.title().compare(new_url.title()) != 0) {
-        titles.push_back(std::pair<GURL, std::wstring>(new_url.url(),
-                                                       new_url.title()));
+        titles.push_back(std::pair<GURL, string16>(new_url.url(),
+                                                   new_url.title()));
       }
 
       std::vector<base::Time> added_visits;
diff --git a/chrome/browser/sync/glue/typed_url_model_associator.cc b/chrome/browser/sync/glue/typed_url_model_associator.cc
index 0c9ff628..f5c7e614 100644
--- a/chrome/browser/sync/glue/typed_url_model_associator.cc
+++ b/chrome/browser/sync/glue/typed_url_model_associator.cc
@@ -98,8 +98,8 @@
           WriteToSyncNode(new_url, visits, &write_node);
         }
         if (difference & DIFF_TITLE_CHANGED) {
-          titles.push_back(std::pair<GURL, std::wstring>(new_url.url(),
-                                                         new_url.title()));
+          titles.push_back(std::pair<GURL, string16>(new_url.url(),
+                                                     new_url.title()));
         }
         if (difference & DIFF_ROW_CHANGED) {
           updated_urls.push_back(
@@ -147,7 +147,7 @@
         std::vector<base::Time>& visits = new_visits.back().second;
         history::URLRow new_url(GURL(typed_url.url()));
 
-        new_url.set_title(UTF8ToWide(typed_url.title()));
+        new_url.set_title(UTF8ToUTF16(typed_url.title()));
 
         // When we add a new url, the last visit is always added, thus we set
         // the initial visit count to one.  This value will be automatically
@@ -331,7 +331,7 @@
   new_url->set_visit_count(visits->size());
 
   // Convert these values only once.
-  std::wstring typed_title(UTF8ToWide(typed_url.title()));
+  string16 typed_title(UTF8ToUTF16(typed_url.title()));
   base::Time typed_visit =
       base::Time::FromInternalValue(
           typed_url.visit(typed_url.visit_size() - 1));
@@ -426,16 +426,16 @@
 
 // static
 void TypedUrlModelAssociator::WriteToSyncNode(
-         const history::URLRow& url,
-         const history::VisitVector& visits,
-         sync_api::WriteNode* node) {
+    const history::URLRow& url,
+    const history::VisitVector& visits,
+    sync_api::WriteNode* node) {
   DCHECK(!url.last_visit().is_null());
   DCHECK(!visits.empty());
   DCHECK(url.last_visit() == visits.back().visit_time);
 
   sync_pb::TypedUrlSpecifics typed_url;
   typed_url.set_url(url.url().spec());
-  typed_url.set_title(WideToUTF8(url.title()));
+  typed_url.set_title(UTF16ToUTF8(url.title()));
   typed_url.set_typed_count(url.typed_count());
   typed_url.set_hidden(url.hidden());
 
diff --git a/chrome/browser/sync/glue/typed_url_model_associator.h b/chrome/browser/sync/glue/typed_url_model_associator.h
index 9aad3903..7557596b 100644
--- a/chrome/browser/sync/glue/typed_url_model_associator.h
+++ b/chrome/browser/sync/glue/typed_url_model_associator.h
@@ -12,6 +12,7 @@
 
 #include "base/basictypes.h"
 #include "base/scoped_ptr.h"
+#include "base/string16.h"
 #include "base/task.h"
 #include "chrome/browser/chrome_thread.h"
 #include "chrome/browser/history/history_types.h"
@@ -47,7 +48,7 @@
 class TypedUrlModelAssociator
   : public PerDataTypeAssociatorInterface<std::string, std::string> {
  public:
-  typedef std::vector<std::pair<GURL, std::wstring> > TypedUrlTitleVector;
+  typedef std::vector<std::pair<GURL, string16> > TypedUrlTitleVector;
   typedef std::vector<history::URLRow> TypedUrlVector;
   typedef std::vector<std::pair<history::URLID, history::URLRow> >
       TypedUrlUpdateVector;
diff --git a/chrome/browser/sync/glue/typed_url_model_associator_unittest.cc b/chrome/browser/sync/glue/typed_url_model_associator_unittest.cc
index 238b575..c36050da 100644
--- a/chrome/browser/sync/glue/typed_url_model_associator_unittest.cc
+++ b/chrome/browser/sync/glue/typed_url_model_associator_unittest.cc
@@ -23,7 +23,7 @@
                                          history::VisitVector* visits) {
     GURL gurl(url);
     history::URLRow history_url(gurl);
-    history_url.set_title(UTF8ToWide(title));
+    history_url.set_title(UTF8ToUTF16(title));
     history_url.set_typed_count(typed_count);
     history_url.set_last_visit(
         base::Time::FromInternalValue(last_visit));
diff --git a/chrome/browser/sync/profile_sync_service_typed_url_unittest.cc b/chrome/browser/sync/profile_sync_service_typed_url_unittest.cc
index cfe16427..2aae53f 100644
--- a/chrome/browser/sync/profile_sync_service_typed_url_unittest.cc
+++ b/chrome/browser/sync/profile_sync_service_typed_url_unittest.cc
@@ -242,7 +242,7 @@
           child_node.GetTypedUrlSpecifics());
       history::URLRow new_url(GURL(typed_url.url()));
 
-      new_url.set_title(UTF8ToWide(typed_url.title()));
+      new_url.set_title(UTF8ToUTF16(typed_url.title()));
       new_url.set_typed_count(typed_url.typed_count());
       DCHECK(typed_url.visit_size());
       new_url.set_visit_count(typed_url.visit_size());
@@ -279,7 +279,7 @@
                                            history::VisitVector* visits) {
     GURL gurl(url);
     URLRow history_url(gurl);
-    history_url.set_title(UTF8ToWide(title));
+    history_url.set_title(UTF8ToUTF16(title));
     history_url.set_typed_count(typed_count);
     history_url.set_last_visit(
         base::Time::FromInternalValue(last_visit));
diff --git a/chrome/browser/tab_contents/tab_contents.cc b/chrome/browser/tab_contents/tab_contents.cc
index 3164619..725db75 100644
--- a/chrome/browser/tab_contents/tab_contents.cc
+++ b/chrome/browser/tab_contents/tab_contents.cc
@@ -1728,20 +1728,20 @@
   // For file URLs without a title, use the pathname instead. In the case of a
   // synthesized title, we don't want the update to count toward the "one set
   // per page of the title to history."
-  std::wstring final_title;
+  string16 final_title;
   bool explicit_set;
   if (entry->url().SchemeIsFile() && title.empty()) {
-    final_title = UTF8ToWide(entry->url().ExtractFileName());
+    final_title = UTF8ToUTF16(entry->url().ExtractFileName());
     explicit_set = false;  // Don't count synthetic titles toward the set limit.
   } else {
-    TrimWhitespace(title, TRIM_ALL, &final_title);
+    TrimWhitespace(WideToUTF16Hack(title), TRIM_ALL, &final_title);
     explicit_set = true;
   }
 
-  if (final_title == UTF16ToWideHack(entry->title()))
+  if (final_title == entry->title())
     return false;  // Nothing changed, don't bother.
 
-  entry->set_title(WideToUTF16Hack(final_title));
+  entry->set_title(final_title);
 
   // Update the history system for this page.
   if (!profile()->IsOffTheRecord() && !received_page_title_) {
@@ -1755,7 +1755,7 @@
   }
 
   // Lastly, set the title for the view.
-  view_->SetPageTitle(final_title);
+  view_->SetPageTitle(UTF16ToWideHack(final_title));
 
   NotificationService::current()->Notify(
       NotificationType::TAB_CONTENTS_TITLE_UPDATED,
diff --git a/chrome/renderer/pepper_plugin_delegate_impl.cc b/chrome/renderer/pepper_plugin_delegate_impl.cc
index 40ef9614..bc11d6ca 100644
--- a/chrome/renderer/pepper_plugin_delegate_impl.cc
+++ b/chrome/renderer/pepper_plugin_delegate_impl.cc
@@ -7,6 +7,7 @@
 #include "app/surface/transport_dib.h"
 #include "base/scoped_ptr.h"
 #include "webkit/glue/plugins/pepper_plugin_instance.h"
+#include "webkit/glue/webkit_glue.h"
 
 #if defined(OS_MACOSX)
 #include "chrome/common/render_messages.h"
@@ -131,3 +132,16 @@
 
   return new PlatformImage2DImpl(width, height, dib);
 }
+
+bool PepperPluginDelegateImpl::OptimizedPluginPaintInRect(
+    skia::PlatformCanvas* canvas,
+    const gfx::Rect& rect) {
+  for (std::set<pepper::PluginInstance*>::iterator i = active_instances_.begin();
+       i != active_instances_.end(); ++i) {
+    if ((*i)->position().Contains(rect)) {
+      (*i)->Paint(webkit_glue::ToWebCanvas(canvas), (*i)->position(), rect);
+      return true;
+    }
+  }
+  return false;
+}
diff --git a/chrome/renderer/pepper_plugin_delegate_impl.h b/chrome/renderer/pepper_plugin_delegate_impl.h
index 2edb4dd..c7a106e 100644
--- a/chrome/renderer/pepper_plugin_delegate_impl.h
+++ b/chrome/renderer/pepper_plugin_delegate_impl.h
@@ -18,6 +18,14 @@
 class PluginInstance;
 }
 
+namespace skia {
+class PlatformCanvas;
+}
+
+namespace gfx {
+class Rect;
+}
+
 class PepperPluginDelegateImpl
     : public pepper::PluginDelegate,
       public base::SupportsWeakPtr<PepperPluginDelegateImpl> {
@@ -29,6 +37,9 @@
   void ViewInitiatedPaint();
   void ViewFlushedPaint();
 
+  bool OptimizedPluginPaintInRect(skia::PlatformCanvas* canvas,
+                                  const gfx::Rect& rect);
+
   // pepper::PluginDelegate implementation.
   virtual void InstanceCreated(pepper::PluginInstance* instance);
   virtual void InstanceDeleted(pepper::PluginInstance* instance);
diff --git a/chrome/renderer/render_view.cc b/chrome/renderer/render_view.cc
index 1c14bd0..19d99046 100644
--- a/chrome/renderer/render_view.cc
+++ b/chrome/renderer/render_view.cc
@@ -5007,6 +5007,11 @@
   edit_commands_.clear();
 }
 
+bool RenderView::OptimizedPluginPaintInRect(skia::PlatformCanvas* canvas,
+                                            const gfx::Rect& rect) {
+  return pepper_delegate_.OptimizedPluginPaintInRect(canvas, rect);
+}
+
 #if defined(OS_MACOSX)
 void RenderView::OnWasHidden() {
   RenderWidget::OnWasHidden();
diff --git a/chrome/renderer/render_view.h b/chrome/renderer/render_view.h
index e98d22c..1defef1 100644
--- a/chrome/renderer/render_view.h
+++ b/chrome/renderer/render_view.h
@@ -568,6 +568,8 @@
   virtual void OnWasHidden();
   virtual void OnWasRestored(bool needs_repainting);
 #endif
+  virtual bool OptimizedPluginPaintInRect(skia::PlatformCanvas* canvas,
+                                          const gfx::Rect& rect);
 
  private:
   // For unit tests.
diff --git a/chrome/renderer/render_widget.cc b/chrome/renderer/render_widget.cc
index fe0d3c1..37ea5a4 100644
--- a/chrome/renderer/render_widget.cc
+++ b/chrome/renderer/render_widget.cc
@@ -395,7 +395,8 @@
     canvas->drawPaint(paint);
   }
 
-  webwidget_->paint(webkit_glue::ToWebCanvas(canvas), rect);
+  if (!OptimizedPluginPaintInRect(canvas, rect))
+    webwidget_->paint(webkit_glue::ToWebCanvas(canvas), rect);
 
   PaintDebugBorder(rect, canvas);
 
@@ -975,3 +976,8 @@
     }
   }
 }
+
+bool RenderWidget::OptimizedPluginPaintInRect(skia::PlatformCanvas* canvas,
+                                              const gfx::Rect& rect) {
+  return false;
+}
diff --git a/chrome/renderer/render_widget.h b/chrome/renderer/render_widget.h
index 4086137a..d2c8347 100644
--- a/chrome/renderer/render_widget.h
+++ b/chrome/renderer/render_widget.h
@@ -215,6 +215,9 @@
   // just handled.
   virtual void DidHandleKeyEvent() {}
 
+  virtual bool OptimizedPluginPaintInRect(skia::PlatformCanvas* canvas,
+                                          const gfx::Rect& rect);
+
   // Routing ID that allows us to communicate to the parent browser process
   // RenderWidgetHost. When MSG_ROUTING_NONE, no messages may be sent.
   int32 routing_id_;