AU: Beginnings of delta support
- proto file for delta files; still needs hardlink support
- code to generate a delta update from two directory trees (old, new).
- code to parse delta update
- Actions: postinst-runner, install, bootable flag setter, filesystem
copier, Omaha response handler, Omaha request preparer,
- misc utility functions, like StringHasSuffix(), templatized Action
classes to feed/collect an object from another action.
- FilesystemIterator: iterates a directory tree with optional
exclusion path. Tolerates deleting of files during iteration.
- Subprocess class: support for synchronously or asynchronously
running an external command. Doesn't pass any env variables.
- Integration test that strings many Actions together and tests using
actual Omaha/Lorry. Currently only tests full updates.
- New simple HTTP server for unittest that supports fake flaky
connections.
- Some refactoring.
Review URL: http://codereview.chromium.org/466036
git-svn-id: svn://chrome-svn/chromeos/trunk@334 06c00378-0e64-4dae-be16-12b19f9950a1
diff --git a/download_action.cc b/download_action.cc
index 3a039d1..833f806 100644
--- a/download_action.cc
+++ b/download_action.cc
@@ -2,23 +2,19 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#include <glib.h>
-
-#include "update_engine/action_pipe.h"
#include "update_engine/download_action.h"
+#include <errno.h>
+#include <algorithm>
+#include <glib.h>
+#include "update_engine/action_pipe.h"
+
+using std::min;
namespace chromeos_update_engine {
-DownloadAction::DownloadAction(const std::string& url,
- const std::string& output_path,
- off_t size, const std::string& hash,
- const bool should_decompress,
- HttpFetcher* http_fetcher)
- : size_(size),
- url_(url),
- output_path_(output_path),
- hash_(hash),
- should_decompress_(should_decompress),
+DownloadAction::DownloadAction(HttpFetcher* http_fetcher)
+ : size_(0),
+ should_decompress_(false),
writer_(NULL),
http_fetcher_(http_fetcher) {}
@@ -29,14 +25,24 @@
CHECK(!writer_);
direct_file_writer_.reset(new DirectFileWriter);
+ // Get the InstallPlan and read it
+ CHECK(HasInputObject());
+ InstallPlan install_plan(GetInputObject());
+
+ should_decompress_ = install_plan.is_full_update;
+ url_ = install_plan.download_url;
+ output_path_ = install_plan.download_path;
+ hash_ = install_plan.download_hash;
+ install_plan.Dump();
+
if (should_decompress_) {
decompressing_file_writer_.reset(
new GzipDecompressingFileWriter(direct_file_writer_.get()));
writer_ = decompressing_file_writer_.get();
+ output_path_ = install_plan.install_path;
} else {
writer_ = direct_file_writer_.get();
}
-
int rc = writer_->Open(output_path_.c_str(),
O_TRUNC | O_WRONLY | O_CREAT | O_LARGEFILE, 0644);
if (rc < 0) {
@@ -58,35 +64,29 @@
void DownloadAction::ReceivedBytes(HttpFetcher *fetcher,
const char* bytes,
int length) {
- int bytes_written = 0;
- do {
- CHECK_GT(length, bytes_written);
- int rc = writer_->Write(bytes + bytes_written, length - bytes_written);
- // TODO(adlr): handle write error
- CHECK_GE(rc, 0);
- bytes_written += rc;
- } while (length != bytes_written);
+ int rc = writer_->Write(bytes, length);
+ TEST_AND_RETURN(rc >= 0);
omaha_hash_calculator_.Update(bytes, length);
}
void DownloadAction::TransferComplete(HttpFetcher *fetcher, bool successful) {
if (writer_) {
- CHECK_EQ(0, writer_->Close()) << errno;
+ CHECK_EQ(writer_->Close(), 0) << errno;
writer_ = NULL;
}
if (successful) {
// Make sure hash is correct
omaha_hash_calculator_.Finalize();
if (omaha_hash_calculator_.hash() != hash_) {
- LOG(INFO) << "Download of " << url_ << " failed. Expect hash "
- << hash_ << " but got hash " << omaha_hash_calculator_.hash();
+ LOG(ERROR) << "Download of " << url_ << " failed. Expect hash "
+ << hash_ << " but got hash " << omaha_hash_calculator_.hash();
successful = false;
}
}
// Write the path to the output pipe if we're successful
if (successful && HasOutputPipe())
- SetOutputObject(output_path_);
+ SetOutputObject(GetInputObject());
processor_->ActionComplete(this, successful);
}