| // Copyright (c) 2011 The Chromium Authors. All rights reserved. |
| // Use of this source code is governed by a BSD-style license that can be |
| // found in the LICENSE file. |
| // |
| // This job type handles Chrome plugin network interception. When a plugin |
| // wants to intercept a request, a job of this type is created. The intercept |
| // job communicates with the plugin to retrieve the response headers and data. |
| |
| #include "chrome/common/net/url_request_intercept_job.h" |
| |
| #include <vector> |
| |
| #include "base/compiler_specific.h" |
| #include "base/message_loop.h" |
| #include "base/string_util.h" |
| #include "chrome/common/chrome_plugin_lib.h" |
| #include "chrome/common/notification_source.h" |
| #include "net/base/io_buffer.h" |
| #include "net/base/net_errors.h" |
| #include "net/base/x509_certificate.h" |
| #include "net/http/http_response_headers.h" |
| #include "net/url_request/url_request.h" |
| |
| using base::Time; |
| using base::TimeDelta; |
| |
| // |
| // URLRequestInterceptJob |
| // |
| |
| URLRequestInterceptJob::URLRequestInterceptJob(net::URLRequest* request, |
| ChromePluginLib* plugin, |
| ScopableCPRequest* cprequest) |
| : net::URLRequestJob(request), |
| cprequest_(cprequest), |
| plugin_(plugin), |
| read_buffer_(NULL), |
| ALLOW_THIS_IN_INITIALIZER_LIST(method_factory_(this)) { |
| cprequest_->data = this; // see FromCPRequest(). |
| |
| registrar_.Add(this, NotificationType::CHROME_PLUGIN_UNLOADED, |
| Source<ChromePluginLib>(plugin_)); |
| } |
| |
| URLRequestInterceptJob::~URLRequestInterceptJob() { |
| if (plugin_) { |
| plugin_->functions().request_funcs->end_request(cprequest_.get(), |
| CPERR_SUCCESS); |
| } |
| } |
| |
| void URLRequestInterceptJob::DetachPlugin() { |
| registrar_.RemoveAll(); |
| plugin_ = NULL; |
| } |
| |
| void URLRequestInterceptJob::Start() { |
| // Start reading asynchronously so that all error reporting and data |
| // callbacks happen as they would for network requests. |
| MessageLoop::current()->PostTask( |
| FROM_HERE, |
| method_factory_.NewRunnableMethod( |
| &URLRequestInterceptJob::StartAsync)); |
| } |
| |
| void URLRequestInterceptJob::Kill() { |
| if (plugin_) { |
| plugin_->functions().request_funcs->end_request(cprequest_.get(), |
| CPERR_CANCELLED); |
| DetachPlugin(); |
| } |
| net::URLRequestJob::Kill(); |
| method_factory_.RevokeAll(); |
| } |
| |
| bool URLRequestInterceptJob::ReadRawData(net::IOBuffer* dest, int dest_size, |
| int* bytes_read) { |
| DCHECK_NE(dest_size, 0); |
| DCHECK(bytes_read); |
| |
| if (!plugin_) |
| return false; |
| |
| int rv = plugin_->functions().request_funcs->read(cprequest_.get(), |
| dest->data(), dest_size); |
| if (rv >= 0) { |
| *bytes_read = rv; |
| return true; |
| } |
| |
| if (rv == CPERR_IO_PENDING) { |
| read_buffer_ = dest; |
| read_buffer_size_ = dest_size; |
| SetStatus(net::URLRequestStatus(net::URLRequestStatus::IO_PENDING, 0)); |
| } else { |
| // TODO(mpcomplete): better error code |
| NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED, |
| net::ERR_FAILED)); |
| } |
| |
| return false; |
| } |
| |
| bool URLRequestInterceptJob::GetMimeType(std::string* mime_type) const { |
| return request_->response_headers()->GetMimeType(mime_type); |
| } |
| |
| bool URLRequestInterceptJob::GetCharset(std::string* charset) { |
| return request_->response_headers()->GetCharset(charset); |
| } |
| |
| bool URLRequestInterceptJob::GetContentEncodings( |
| std::vector<Filter::FilterType>* encoding_types) { |
| DCHECK(encoding_types->empty()); |
| if (!request_->response_headers()) |
| return false; |
| |
| std::string encoding_type; |
| void* iter = NULL; |
| while (request_->response_headers()->EnumerateHeader( |
| &iter, "Content-Encoding", &encoding_type)) { |
| encoding_types->push_back(Filter::ConvertEncodingToType(encoding_type)); |
| } |
| |
| // Even if encoding types are empty, there is a chance that we need to add |
| // some decoding, as some proxies strip encoding completely. In such cases, |
| // we may need to add (for example) SDCH filtering (when the context suggests |
| // it is appropriate). |
| Filter::FixupEncodingTypes(*this, encoding_types); |
| return !encoding_types->empty(); |
| } |
| |
| void URLRequestInterceptJob::GetResponseInfo(net::HttpResponseInfo* info) { |
| if (!plugin_) |
| return; |
| |
| std::string raw_headers; |
| int size = plugin_->functions().request_funcs->get_response_info( |
| cprequest_.get(), CPRESPONSEINFO_HTTP_RAW_HEADERS, NULL, 0); |
| int rv = size < 0 ? size : |
| plugin_->functions().request_funcs->get_response_info( |
| cprequest_.get(), CPRESPONSEINFO_HTTP_RAW_HEADERS, |
| WriteInto(&raw_headers, size+1), size); |
| if (rv != CPERR_SUCCESS) { |
| // TODO(mpcomplete): what should we do here? |
| raw_headers = "HTTP/1.1 404 Not Found"; |
| raw_headers.push_back('\0'); |
| } |
| |
| info->headers = new net::HttpResponseHeaders(raw_headers); |
| |
| if (request_->url().SchemeIsSecure()) { |
| // Make up a fake certificate for intercepted data since we don't have |
| // access to the real SSL info. |
| // TODO(mpcomplete): we should probably have the interception API transmit |
| // the SSL info, but the only consumer of this API (Gears) doesn't keep that |
| // around. We should change that. |
| const char* kCertIssuer = "Chrome Internal"; |
| const int kLifetimeDays = 100; |
| |
| DLOG(WARNING) << "Issuing a fake SSL certificate for interception of URL " |
| << request_->url(); |
| |
| info->ssl_info.cert = |
| new net::X509Certificate(request_->url().GetWithEmptyPath().spec(), |
| kCertIssuer, |
| Time::Now(), |
| Time::Now() + |
| TimeDelta::FromDays(kLifetimeDays)); |
| info->ssl_info.cert_status = 0; |
| info->ssl_info.security_bits = -1; |
| } |
| } |
| |
| int URLRequestInterceptJob::GetResponseCode() const { |
| if (!plugin_) |
| return -1; |
| |
| int status = 200; |
| plugin_->functions().request_funcs->get_response_info( |
| cprequest_.get(), CPRESPONSEINFO_HTTP_STATUS, &status, sizeof(status)); |
| |
| return status; |
| } |
| |
| bool URLRequestInterceptJob::IsRedirectResponse(GURL* location, |
| int* http_status_code) { |
| if (!request_->response_headers()) |
| return false; |
| |
| std::string value; |
| if (!request_->response_headers()->IsRedirect(&value)) |
| return false; |
| |
| *location = request_->url().Resolve(value); |
| *http_status_code = request_->response_headers()->response_code(); |
| return true; |
| } |
| |
| void URLRequestInterceptJob::StartAsync() { |
| // We may have been orphaned... |
| if (!request_ || !plugin_) |
| return; |
| |
| int rv = plugin_->functions().request_funcs->start_request(cprequest_.get()); |
| if (rv != CPERR_IO_PENDING) |
| OnStartCompleted(rv); |
| } |
| |
| void URLRequestInterceptJob::OnStartCompleted(int result) { |
| if (result != CPERR_SUCCESS) { |
| NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED, |
| net::ERR_CONNECTION_FAILED)); |
| return; |
| } |
| |
| NotifyHeadersComplete(); |
| } |
| |
| void URLRequestInterceptJob::OnReadCompleted(int bytes_read) { |
| if (bytes_read < 0) { |
| NotifyDone(net::URLRequestStatus(net::URLRequestStatus::FAILED, |
| net::ERR_FAILED)); |
| return; |
| } |
| |
| SetStatus(net::URLRequestStatus()); // clear the async flag |
| NotifyReadComplete(bytes_read); |
| } |
| |
| void URLRequestInterceptJob::Observe(NotificationType type, |
| const NotificationSource& source, |
| const NotificationDetails& details) { |
| DCHECK(type == NotificationType::CHROME_PLUGIN_UNLOADED); |
| DCHECK(plugin_ == Source<ChromePluginLib>(source).ptr()); |
| DetachPlugin(); |
| } |