platform2: Fix issues with new version of libchrome
libchrome r334380 has the following breaking changes that need to be fixed:
- base::JSONWriter::Write() and base::JSONWriter::WriteWithOptions() take
"const base::Value&" instead of "const base::Value*"
- base::JSONReader::Read() and base::JSONReader::ReadAndReturnError()
return a scoped_ptr<base::Value> instead of base::Value*
- base/safe_strerror_posix.h is moved to base/posix/safe_strerror.h
- safe_strerror() is now in "base" namespace
- StartsWithASCII(), EndsWith(), StringToUpperASCII(), LowerCaseEqualsASCII()
are now in "base" namespace
- ObserverList<T> is now in "base" namespace
- base::PrintTo(base::FilePath) used in gtest is now moved to libchrome-test
library and as such, unit test runners need to link to this library now.
- crypto::RSAPrivateKey::CreateSensitive() is now removed from //crypto, so
some of tests in chromeos-login that used that function had to be changed
to use crypto::GenerateRSAKeyPairNSS() directly.
- UnixDomanSocket class is now in "base" namespace
- Pickle class is now in "base" namespace
BUG=chromium:496469
TEST=`./build_packages`
CQ-DEPEND=CL:277662
Change-Id: I36e5fbf2e36a92068873ffbd44020c862a3ed9e3
Reviewed-on: https://chromium-review.googlesource.com/277671
Reviewed-by: Alex Vakulenko <avakulenko@chromium.org>
Commit-Queue: Alex Vakulenko <avakulenko@chromium.org>
Trybot-Ready: Alex Vakulenko <avakulenko@chromium.org>
Tested-by: Alex Vakulenko <avakulenko@chromium.org>
diff --git a/libcurl_http_fetcher.cc b/libcurl_http_fetcher.cc
index e32d925..ee8820b 100644
--- a/libcurl_http_fetcher.cc
+++ b/libcurl_http_fetcher.cc
@@ -37,21 +37,21 @@
bool LibcurlHttpFetcher::GetProxyType(const string& proxy,
curl_proxytype* out_type) {
- if (StartsWithASCII(proxy, "socks5://", true) ||
- StartsWithASCII(proxy, "socks://", true)) {
+ if (base::StartsWithASCII(proxy, "socks5://", true) ||
+ base::StartsWithASCII(proxy, "socks://", true)) {
*out_type = CURLPROXY_SOCKS5_HOSTNAME;
return true;
}
- if (StartsWithASCII(proxy, "socks4://", true)) {
+ if (base::StartsWithASCII(proxy, "socks4://", true)) {
*out_type = CURLPROXY_SOCKS4A;
return true;
}
- if (StartsWithASCII(proxy, "http://", true) ||
- StartsWithASCII(proxy, "https://", true)) {
+ if (base::StartsWithASCII(proxy, "http://", true) ||
+ base::StartsWithASCII(proxy, "https://", true)) {
*out_type = CURLPROXY_HTTP;
return true;
}
- if (StartsWithASCII(proxy, kNoProxy, true)) {
+ if (base::StartsWithASCII(proxy, kNoProxy, true)) {
// known failure case. don't log.
return false;
}
@@ -167,7 +167,7 @@
// Lock down the appropriate curl options for HTTP or HTTPS depending on
// the url.
if (GetSystemState()->hardware()->IsOfficialBuild()) {
- if (StartsWithASCII(url_, "http://", false))
+ if (base::StartsWithASCII(url_, "http://", false))
SetCurlOptionsForHttp();
else
SetCurlOptionsForHttps();
diff --git a/mtd_file_descriptor.cc b/mtd_file_descriptor.cc
index 2e0d329..7ab2afa 100644
--- a/mtd_file_descriptor.cc
+++ b/mtd_file_descriptor.cc
@@ -157,7 +157,8 @@
bool UbiFileDescriptor::IsUbi(const char* path) {
base::FilePath device_node(path);
base::FilePath ubi_name(device_node.BaseName());
- TEST_AND_RETURN_FALSE(StartsWithASCII(ubi_name.MaybeAsASCII(), "ubi", true));
+ TEST_AND_RETURN_FALSE(
+ base::StartsWithASCII(ubi_name.MaybeAsASCII(), "ubi", true));
return static_cast<bool>(GetUbiVolumeInfo(path));
}
diff --git a/omaha_request_action.cc b/omaha_request_action.cc
index 9a0e3b2..251e607 100644
--- a/omaha_request_action.cc
+++ b/omaha_request_action.cc
@@ -394,7 +394,7 @@
const string path_suffix = string("/") + element;
- if (!EndsWith(data->current_path, path_suffix, true)) {
+ if (!base::EndsWith(data->current_path, path_suffix, true)) {
LOG(ERROR) << "Unexpected end element '" << element
<< "' with current_path='" << data->current_path << "'";
data->failed = true;
diff --git a/omaha_request_params.cc b/omaha_request_params.cc
index 9388715..abbd012 100644
--- a/omaha_request_params.cc
+++ b/omaha_request_params.cc
@@ -122,13 +122,12 @@
}
bool OmahaRequestParams::CollectECFWVersions() const {
- return
- StartsWithASCII(hwid_, string("SAMS ALEX"), true) ||
- StartsWithASCII(hwid_, string("BUTTERFLY"), true) ||
- StartsWithASCII(hwid_, string("LUMPY"), true) ||
- StartsWithASCII(hwid_, string("PARROT"), true) ||
- StartsWithASCII(hwid_, string("SPRING"), true) ||
- StartsWithASCII(hwid_, string("SNOW"), true);
+ return base::StartsWithASCII(hwid_, string("SAMS ALEX"), true) ||
+ base::StartsWithASCII(hwid_, string("BUTTERFLY"), true) ||
+ base::StartsWithASCII(hwid_, string("LUMPY"), true) ||
+ base::StartsWithASCII(hwid_, string("PARROT"), true) ||
+ base::StartsWithASCII(hwid_, string("SPRING"), true) ||
+ base::StartsWithASCII(hwid_, string("SNOW"), true);
}
bool OmahaRequestParams::SetTargetChannel(const string& new_target_channel,
diff --git a/omaha_response_handler_action.cc b/omaha_response_handler_action.cc
index 3e6f5d2..67639cc 100644
--- a/omaha_response_handler_action.cc
+++ b/omaha_response_handler_action.cc
@@ -166,7 +166,7 @@
// If we're using p2p, |install_plan_.download_url| may contain a
// HTTP URL even if |response.payload_urls| contain only HTTPS URLs.
- if (!StartsWithASCII(install_plan_.download_url, "https://", false)) {
+ if (!base::StartsWithASCII(install_plan_.download_url, "https://", false)) {
LOG(INFO) << "Mandating hash checks since download_url is not HTTPS.";
return true;
}
@@ -181,7 +181,7 @@
// on. It's really hard to do book-keeping based on each byte being
// downloaded to see whether we only used HTTPS throughout.
for (size_t i = 0; i < response.payload_urls.size(); i++) {
- if (!StartsWithASCII(response.payload_urls[i], "https://", false)) {
+ if (!base::StartsWithASCII(response.payload_urls[i], "https://", false)) {
LOG(INFO) << "Mandating payload hash checks since Omaha response "
<< "contains non-HTTPS URL(s)";
return true;
diff --git a/payload_state.cc b/payload_state.cc
index 5ed219e..ed7b775 100644
--- a/payload_state.cc
+++ b/payload_state.cc
@@ -527,9 +527,9 @@
current_download_source_ = kDownloadSourceHttpPeer;
} else if (GetUrlIndex() < candidate_urls_.size()) {
string current_url = candidate_urls_[GetUrlIndex()];
- if (StartsWithASCII(current_url, "https://", false))
+ if (base::StartsWithASCII(current_url, "https://", false))
current_download_source_ = kDownloadSourceHttpsServer;
- else if (StartsWithASCII(current_url, "http://", false))
+ else if (base::StartsWithASCII(current_url, "http://", false))
current_download_source_ = kDownloadSourceHttpServer;
}
@@ -1333,8 +1333,8 @@
candidate_urls_.clear();
for (size_t i = 0; i < response_.payload_urls.size(); i++) {
string candidate_url = response_.payload_urls[i];
- if (StartsWithASCII(candidate_url, "http://", false) && !http_url_ok)
- continue;
+ if (base::StartsWithASCII(candidate_url, "http://", false) && !http_url_ok)
+ continue;
candidate_urls_.push_back(candidate_url);
LOG(INFO) << "Candidate Url" << (candidate_urls_.size() - 1)
<< ": " << candidate_url;
diff --git a/test_http_server.cc b/test_http_server.cc
index 5c85dbf..988599e 100644
--- a/test_http_server.cc
+++ b/test_http_server.cc
@@ -78,7 +78,7 @@
exit(RC_ERR_READ);
}
headers.append(buf, r);
- } while (!EndsWith(headers, EOL EOL, true));
+ } while (!base::EndsWith(headers, EOL EOL, true));
LOG(INFO) << "got headers:\n--8<------8<------8<------8<----\n"
<< headers
@@ -107,7 +107,7 @@
CHECK_EQ(terms.size(), static_cast<vector<string>::size_type>(2));
string &range = terms[1];
LOG(INFO) << "range attribute: " << range;
- CHECK(StartsWithASCII(range, "bytes=", true) &&
+ CHECK(base::StartsWithASCII(range, "bytes=", true) &&
range.find('-') != string::npos);
request->start_offset = atoll(range.c_str() + strlen("bytes="));
// Decode end offset and increment it by one (so it is non-inclusive).
@@ -491,10 +491,10 @@
LOG(INFO) << "pid(" << getpid() << "): handling url " << url;
if (url == "/quitquitquit") {
HandleQuit(fd);
- } else if (StartsWithASCII(url, "/download/", true)) {
+ } else if (base::StartsWithASCII(url, "/download/", true)) {
const UrlTerms terms(url, 2);
HandleGet(fd, request, terms.GetSizeT(1));
- } else if (StartsWithASCII(url, "/flaky/", true)) {
+ } else if (base::StartsWithASCII(url, "/flaky/", true)) {
const UrlTerms terms(url, 5);
HandleGet(fd, request, terms.GetSizeT(1), terms.GetSizeT(2),
terms.GetInt(3), terms.GetInt(4));
@@ -502,7 +502,7 @@
HandleRedirect(fd, request);
} else if (url == "/error") {
HandleError(fd, request);
- } else if (StartsWithASCII(url, "/error-if-offset/", true)) {
+ } else if (base::StartsWithASCII(url, "/error-if-offset/", true)) {
const UrlTerms terms(url, 3);
HandleErrorIfOffset(fd, request, terms.GetSizeT(1), terms.GetInt(2));
} else {
diff --git a/test_utils.cc b/test_utils.cc
index 814432d..0c02279 100644
--- a/test_utils.cc
+++ b/test_utils.cc
@@ -129,7 +129,7 @@
// Bind to an unused loopback device, sanity check the device name.
lo_dev_name_p->clear();
if (!(utils::ReadPipe("losetup --show -f " + filename, lo_dev_name_p) &&
- StartsWithASCII(*lo_dev_name_p, "/dev/loop", true))) {
+ base::StartsWithASCII(*lo_dev_name_p, "/dev/loop", true))) {
ADD_FAILURE();
return false;
}
diff --git a/update_manager/chromeos_policy.cc b/update_manager/chromeos_policy.cc
index 5abf5c6..2acccaf 100644
--- a/update_manager/chromeos_policy.cc
+++ b/update_manager/chromeos_policy.cc
@@ -144,7 +144,7 @@
// Checks whether |url| can be used under given download restrictions.
bool IsUrlUsable(const string& url, bool http_allowed) {
- return http_allowed || !StartsWithASCII(url, "http://", false);
+ return http_allowed || !base::StartsWithASCII(url, "http://", false);
}
} // namespace
diff --git a/update_manager/evaluation_context.cc b/update_manager/evaluation_context.cc
index 0c4ef21..856c5cd 100644
--- a/update_manager/evaluation_context.cc
+++ b/update_manager/evaluation_context.cc
@@ -228,9 +228,8 @@
chromeos_update_engine::utils::ToString(evaluation_start_monotonic_));
string json_str;
- base::JSONWriter::WriteWithOptions(&value,
- base::JSONWriter::OPTIONS_PRETTY_PRINT,
- &json_str);
+ base::JSONWriter::WriteWithOptions(
+ value, base::JSONWriter::OPTIONS_PRETTY_PRINT, &json_str);
base::TrimWhitespaceASCII(json_str, base::TRIM_TRAILING, &json_str);
return json_str;
diff --git a/utils.cc b/utils.cc
index 1e94ffa..8fba231 100644
--- a/utils.cc
+++ b/utils.cc
@@ -74,8 +74,8 @@
// Return true if |disk_name| is an MTD or a UBI device. Note that this test is
// simply based on the name of the device.
bool IsMtdDeviceName(const string& disk_name) {
- return StartsWithASCII(disk_name, "/dev/ubi", true) ||
- StartsWithASCII(disk_name, "/dev/mtd", true);
+ return base::StartsWithASCII(disk_name, "/dev/ubi", true) ||
+ base::StartsWithASCII(disk_name, "/dev/mtd", true);
}
// Return the device name for the corresponding partition on a NAND device.
@@ -440,7 +440,7 @@
bool SplitPartitionName(const string& partition_name,
string* out_disk_name,
int* out_partition_num) {
- if (!StartsWithASCII(partition_name, "/dev/", true)) {
+ if (!base::StartsWithASCII(partition_name, "/dev/", true)) {
LOG(ERROR) << "Invalid partition device name: " << partition_name;
return false;
}
@@ -494,7 +494,7 @@
return string();
}
- if (!StartsWithASCII(disk_name, "/dev/", true)) {
+ if (!base::StartsWithASCII(disk_name, "/dev/", true)) {
LOG(ERROR) << "Invalid disk name: " << disk_name;
return string();
}
@@ -621,8 +621,8 @@
// non-empty, prepends it to |path|. Otherwise, prepends /tmp. Returns the
// resulting path.
static const string PrependTmpdir(const string& path) {
- if (path[0] == '/' || StartsWithASCII(path, "./", true) ||
- StartsWithASCII(path, "../", true))
+ if (path[0] == '/' || base::StartsWithASCII(path, "./", true) ||
+ base::StartsWithASCII(path, "../", true))
return path;
const char *tmpdir = getenv("TMPDIR");