| Line |
Branch |
Exec |
Source |
| 1 |
|
|
/** |
| 2 |
|
|
* This file is part of the CernVM File System |
| 3 |
|
|
*/ |
| 4 |
|
|
|
| 5 |
|
|
#include "swissknife_history.h" |
| 6 |
|
|
|
| 7 |
|
|
#include <algorithm> |
| 8 |
|
|
#include <cassert> |
| 9 |
|
|
#include <ctime> |
| 10 |
|
|
|
| 11 |
|
|
#include "catalog_rw.h" |
| 12 |
|
|
#include "crypto/hash.h" |
| 13 |
|
|
#include "crypto/signature.h" |
| 14 |
|
|
#include "manifest_fetch.h" |
| 15 |
|
|
#include "network/download.h" |
| 16 |
|
|
#include "upload.h" |
| 17 |
|
|
|
| 18 |
|
|
using namespace std; // NOLINT |
| 19 |
|
|
using namespace swissknife; // NOLINT |
| 20 |
|
|
|
| 21 |
|
|
const std::string CommandTag::kHeadTag = "trunk"; |
| 22 |
|
|
const std::string CommandTag::kPreviousHeadTag = "trunk-previous"; |
| 23 |
|
|
|
| 24 |
|
|
const std::string CommandTag::kHeadTagDescription = "current HEAD"; |
| 25 |
|
|
const std::string |
| 26 |
|
|
CommandTag::kPreviousHeadTagDescription = "default undo target"; |
| 27 |
|
|
|
| 28 |
|
✗ |
static void InsertCommonParameters(ParameterList *r) { |
| 29 |
|
✗ |
r->push_back(Parameter::Mandatory('w', "repository directory / url")); |
| 30 |
|
✗ |
r->push_back(Parameter::Mandatory('t', "temporary scratch directory")); |
| 31 |
|
✗ |
r->push_back(Parameter::Optional('p', "public key of the repository")); |
| 32 |
|
✗ |
r->push_back(Parameter::Optional('f', "fully qualified repository name")); |
| 33 |
|
✗ |
r->push_back(Parameter::Optional('r', "spooler definition string")); |
| 34 |
|
✗ |
r->push_back(Parameter::Optional('m', "(unsigned) manifest file to edit")); |
| 35 |
|
✗ |
r->push_back(Parameter::Optional('b', "mounted repository base hash")); |
| 36 |
|
✗ |
r->push_back( |
| 37 |
|
✗ |
Parameter::Optional('e', "hash algorithm to use (default SHA1)")); |
| 38 |
|
✗ |
r->push_back(Parameter::Switch('L', "follow HTTP redirects")); |
| 39 |
|
✗ |
r->push_back(Parameter::Optional('P', "session_token_file")); |
| 40 |
|
✗ |
r->push_back(Parameter::Optional('@', "proxy url")); |
| 41 |
|
|
} |
| 42 |
|
|
|
| 43 |
|
✗ |
CommandTag::Environment *CommandTag::InitializeEnvironment( |
| 44 |
|
|
const ArgumentList &args, const bool read_write) { |
| 45 |
|
✗ |
const string repository_url = MakeCanonicalPath(*args.find('w')->second); |
| 46 |
|
✗ |
const string tmp_path = MakeCanonicalPath(*args.find('t')->second); |
| 47 |
|
✗ |
const string spl_definition = (args.find('r') == args.end()) |
| 48 |
|
|
? "" |
| 49 |
|
|
: MakeCanonicalPath( |
| 50 |
|
✗ |
*args.find('r')->second); |
| 51 |
|
✗ |
const string manifest_path = (args.find('m') == args.end()) |
| 52 |
|
|
? "" |
| 53 |
|
✗ |
: MakeCanonicalPath(*args.find('m')->second); |
| 54 |
|
✗ |
const shash::Algorithms hash_algo = (args.find('e') == args.end()) |
| 55 |
|
✗ |
? shash::kSha1 |
| 56 |
|
✗ |
: shash::ParseHashAlgorithm( |
| 57 |
|
✗ |
*args.find('e')->second); |
| 58 |
|
✗ |
const string pubkey_path = (args.find('p') == args.end()) |
| 59 |
|
|
? "" |
| 60 |
|
✗ |
: MakeCanonicalPath(*args.find('p')->second); |
| 61 |
|
✗ |
const shash::Any base_hash = (args.find('b') == args.end()) |
| 62 |
|
|
? shash::Any() |
| 63 |
|
✗ |
: shash::MkFromHexPtr( |
| 64 |
|
✗ |
shash::HexPtr(*args.find('b')->second), |
| 65 |
|
✗ |
shash::kSuffixCatalog); |
| 66 |
|
✗ |
const string repo_name = (args.find('f') == args.end()) |
| 67 |
|
|
? "" |
| 68 |
|
✗ |
: *args.find('f')->second; |
| 69 |
|
|
|
| 70 |
|
✗ |
string session_token_file; |
| 71 |
|
✗ |
if (args.find('P') != args.end()) { |
| 72 |
|
✗ |
session_token_file = *args.find('P')->second; |
| 73 |
|
|
} |
| 74 |
|
|
|
| 75 |
|
|
// Sanity checks |
| 76 |
|
✗ |
if (hash_algo == shash::kAny) { |
| 77 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, "failed to parse hash algorithm to use"); |
| 78 |
|
✗ |
return NULL; |
| 79 |
|
|
} |
| 80 |
|
|
|
| 81 |
|
✗ |
if (read_write && spl_definition.empty()) { |
| 82 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, "no upstream storage provided (-r)"); |
| 83 |
|
✗ |
return NULL; |
| 84 |
|
|
} |
| 85 |
|
|
|
| 86 |
|
✗ |
if (read_write && manifest_path.empty()) { |
| 87 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, "no (unsigned) manifest provided (-m)"); |
| 88 |
|
✗ |
return NULL; |
| 89 |
|
|
} |
| 90 |
|
|
|
| 91 |
|
✗ |
if (!read_write && pubkey_path.empty()) { |
| 92 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, "no public key provided (-p)"); |
| 93 |
|
✗ |
return NULL; |
| 94 |
|
|
} |
| 95 |
|
|
|
| 96 |
|
✗ |
if (!read_write && repo_name.empty()) { |
| 97 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, "no repository name provided (-f)"); |
| 98 |
|
✗ |
return NULL; |
| 99 |
|
|
} |
| 100 |
|
|
|
| 101 |
|
✗ |
if (HasPrefix(spl_definition, "gw", false)) { |
| 102 |
|
✗ |
if (session_token_file.empty()) { |
| 103 |
|
✗ |
PrintError("Session token file has to be provided " |
| 104 |
|
|
"when upstream type is gw."); |
| 105 |
|
✗ |
return NULL; |
| 106 |
|
|
} |
| 107 |
|
|
} |
| 108 |
|
|
|
| 109 |
|
|
// create new environment |
| 110 |
|
|
// Note: We use this encapsulation because we cannot be sure that the |
| 111 |
|
|
// Command object gets deleted properly. With the Environment object at |
| 112 |
|
|
// hand we have full control and can make heavy and safe use of RAII |
| 113 |
|
✗ |
UniquePtr<Environment> env(new Environment(repository_url, tmp_path)); |
| 114 |
|
✗ |
env->manifest_path.Set(manifest_path); |
| 115 |
|
✗ |
env->history_path.Set(CreateTempPath(tmp_path + "/history", 0600)); |
| 116 |
|
|
|
| 117 |
|
|
// initialize the (swissknife global) download manager |
| 118 |
|
✗ |
const bool follow_redirects = (args.count('L') > 0); |
| 119 |
|
✗ |
const std::string &proxy = (args.count('@') > 0) ? *args.find('@')->second |
| 120 |
|
✗ |
: ""; |
| 121 |
|
✗ |
if (!this->InitDownloadManager(follow_redirects, proxy)) { |
| 122 |
|
✗ |
return NULL; |
| 123 |
|
|
} |
| 124 |
|
|
|
| 125 |
|
|
// initialize the (swissknife global) signature manager (if possible) |
| 126 |
|
✗ |
if (!pubkey_path.empty() && !this->InitSignatureManager(pubkey_path)) { |
| 127 |
|
✗ |
return NULL; |
| 128 |
|
|
} |
| 129 |
|
|
|
| 130 |
|
|
// open the (yet unsigned) manifest file if it is there, otherwise load the |
| 131 |
|
|
// latest manifest from the server |
| 132 |
|
✗ |
env->manifest = (FileExists(env->manifest_path.path())) |
| 133 |
|
✗ |
? OpenLocalManifest(env->manifest_path.path()) |
| 134 |
|
✗ |
: FetchRemoteManifest(env->repository_url, repo_name, |
| 135 |
|
✗ |
base_hash); |
| 136 |
|
|
|
| 137 |
|
✗ |
if (!env->manifest.IsValid()) { |
| 138 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, "failed to load manifest file"); |
| 139 |
|
✗ |
return NULL; |
| 140 |
|
|
} |
| 141 |
|
|
|
| 142 |
|
|
// figure out the hash of the history from the previous revision if needed |
| 143 |
|
✗ |
if (read_write && env->manifest->history().IsNull() && !base_hash.IsNull()) { |
| 144 |
|
✗ |
env->previous_manifest = FetchRemoteManifest(env->repository_url, repo_name, |
| 145 |
|
✗ |
base_hash); |
| 146 |
|
✗ |
if (!env->previous_manifest.IsValid()) { |
| 147 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, "failed to load previous manifest"); |
| 148 |
|
✗ |
return NULL; |
| 149 |
|
|
} |
| 150 |
|
|
|
| 151 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogDebug, |
| 152 |
|
|
"using history database '%s' from previous " |
| 153 |
|
|
"manifest (%s) as basis", |
| 154 |
|
✗ |
env->previous_manifest->history().ToString().c_str(), |
| 155 |
|
✗ |
env->previous_manifest->repository_name().c_str()); |
| 156 |
|
✗ |
env->manifest->set_history(env->previous_manifest->history()); |
| 157 |
|
✗ |
env->manifest->set_repository_name( |
| 158 |
|
✗ |
env->previous_manifest->repository_name()); |
| 159 |
|
|
} |
| 160 |
|
|
|
| 161 |
|
|
// download the history database referenced in the manifest |
| 162 |
|
✗ |
env->history = GetHistory(env->manifest.weak_ref(), env->repository_url, |
| 163 |
|
✗ |
env->history_path.path(), read_write); |
| 164 |
|
✗ |
if (!env->history.IsValid()) { |
| 165 |
|
✗ |
return NULL; |
| 166 |
|
|
} |
| 167 |
|
|
|
| 168 |
|
|
// if the using Command is expected to change the history database, we |
| 169 |
|
|
// need |
| 170 |
|
|
// to initialize the upload spooler for potential later history upload |
| 171 |
|
✗ |
if (read_write) { |
| 172 |
|
✗ |
const bool use_file_chunking = false; |
| 173 |
|
✗ |
const bool generate_legacy_bulk_chunks = false; |
| 174 |
|
|
const upload::SpoolerDefinition sd( |
| 175 |
|
|
spl_definition, hash_algo, zlib::kZlibDefault, |
| 176 |
|
|
generate_legacy_bulk_chunks, use_file_chunking, 0, 0, 0, |
| 177 |
|
✗ |
session_token_file); |
| 178 |
|
✗ |
env->spooler = upload::Spooler::Construct(sd); |
| 179 |
|
✗ |
if (!env->spooler.IsValid()) { |
| 180 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, "failed to initialize upload spooler"); |
| 181 |
|
✗ |
return NULL; |
| 182 |
|
|
} |
| 183 |
|
|
} |
| 184 |
|
|
|
| 185 |
|
|
// return the pointer of the Environment (passing the ownership along) |
| 186 |
|
✗ |
return env.Release(); |
| 187 |
|
|
} |
| 188 |
|
|
|
| 189 |
|
✗ |
bool CommandTag::CloseAndPublishHistory(Environment *env) { |
| 190 |
|
✗ |
assert(env->spooler.IsValid()); |
| 191 |
|
|
|
| 192 |
|
|
// set the previous revision pointer of the history database |
| 193 |
|
✗ |
env->history->SetPreviousRevision(env->manifest->history()); |
| 194 |
|
|
|
| 195 |
|
|
// close the history database |
| 196 |
|
✗ |
history::History *weak_history = env->history.Release(); |
| 197 |
|
✗ |
delete weak_history; |
| 198 |
|
|
|
| 199 |
|
|
// compress and upload the new history database |
| 200 |
|
✗ |
Future<shash::Any> history_hash; |
| 201 |
|
✗ |
upload::Spooler::CallbackPtr callback = env->spooler->RegisterListener( |
| 202 |
|
|
&CommandTag::UploadClosure, this, &history_hash); |
| 203 |
|
✗ |
env->spooler->ProcessHistory(env->history_path.path()); |
| 204 |
|
✗ |
env->spooler->WaitForUpload(); |
| 205 |
|
✗ |
const shash::Any new_history_hash = history_hash.Get(); |
| 206 |
|
✗ |
env->spooler->UnregisterListener(callback); |
| 207 |
|
|
|
| 208 |
|
|
// retrieve the (async) uploader result |
| 209 |
|
✗ |
if (new_history_hash.IsNull()) { |
| 210 |
|
✗ |
return false; |
| 211 |
|
|
} |
| 212 |
|
|
|
| 213 |
|
|
// update the (yet unsigned) manifest file |
| 214 |
|
✗ |
env->manifest->set_history(new_history_hash); |
| 215 |
|
✗ |
if (!env->manifest->Export(env->manifest_path.path())) { |
| 216 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, "failed to export the new manifest '%s'", |
| 217 |
|
✗ |
env->manifest_path.path().c_str()); |
| 218 |
|
✗ |
return false; |
| 219 |
|
|
} |
| 220 |
|
|
|
| 221 |
|
|
// disable the unlink guard in order to keep the newly exported manifest file |
| 222 |
|
✗ |
env->manifest_path.Disable(); |
| 223 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogVerboseMsg, |
| 224 |
|
|
"exported manifest (%" PRIu64 ") with new history '%s'", |
| 225 |
|
✗ |
env->manifest->revision(), new_history_hash.ToString().c_str()); |
| 226 |
|
|
|
| 227 |
|
✗ |
return true; |
| 228 |
|
|
} |
| 229 |
|
|
|
| 230 |
|
|
|
| 231 |
|
✗ |
bool CommandTag::UploadCatalogAndUpdateManifest( |
| 232 |
|
|
CommandTag::Environment *env, catalog::WritableCatalog *catalog) { |
| 233 |
|
✗ |
assert(env->spooler.IsValid()); |
| 234 |
|
|
|
| 235 |
|
|
// gather information about catalog to be uploaded and update manifest |
| 236 |
|
✗ |
UniquePtr<catalog::WritableCatalog> wr_catalog(catalog); |
| 237 |
|
✗ |
const std::string catalog_path = wr_catalog->database_path(); |
| 238 |
|
✗ |
env->manifest->set_ttl(wr_catalog->GetTTL()); |
| 239 |
|
✗ |
env->manifest->set_revision(wr_catalog->GetRevision()); |
| 240 |
|
✗ |
env->manifest->set_publish_timestamp(wr_catalog->GetLastModified()); |
| 241 |
|
|
|
| 242 |
|
|
// close the catalog |
| 243 |
|
✗ |
catalog::WritableCatalog *weak_catalog = wr_catalog.Release(); |
| 244 |
|
✗ |
delete weak_catalog; |
| 245 |
|
|
|
| 246 |
|
|
// upload the catalog |
| 247 |
|
✗ |
Future<shash::Any> catalog_hash; |
| 248 |
|
✗ |
upload::Spooler::CallbackPtr callback = env->spooler->RegisterListener( |
| 249 |
|
|
&CommandTag::UploadClosure, this, &catalog_hash); |
| 250 |
|
✗ |
env->spooler->ProcessCatalog(catalog_path); |
| 251 |
|
✗ |
env->spooler->WaitForUpload(); |
| 252 |
|
✗ |
const shash::Any new_catalog_hash = catalog_hash.Get(); |
| 253 |
|
✗ |
env->spooler->UnregisterListener(callback); |
| 254 |
|
|
|
| 255 |
|
|
// check if the upload succeeded |
| 256 |
|
✗ |
if (new_catalog_hash.IsNull()) { |
| 257 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, "failed to upload catalog '%s'", |
| 258 |
|
|
catalog_path.c_str()); |
| 259 |
|
✗ |
return false; |
| 260 |
|
|
} |
| 261 |
|
|
|
| 262 |
|
|
// update the catalog size and hash in the manifest |
| 263 |
|
✗ |
const size_t catalog_size = GetFileSize(catalog_path); |
| 264 |
|
✗ |
env->manifest->set_catalog_size(catalog_size); |
| 265 |
|
✗ |
env->manifest->set_catalog_hash(new_catalog_hash); |
| 266 |
|
|
|
| 267 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogVerboseMsg, "uploaded new catalog (%lu bytes) '%s'", |
| 268 |
|
✗ |
catalog_size, new_catalog_hash.ToString().c_str()); |
| 269 |
|
|
|
| 270 |
|
✗ |
return true; |
| 271 |
|
|
} |
| 272 |
|
|
|
| 273 |
|
✗ |
void CommandTag::UploadClosure(const upload::SpoolerResult &result, |
| 274 |
|
|
Future<shash::Any> *hash) { |
| 275 |
|
✗ |
assert(!result.IsChunked()); |
| 276 |
|
✗ |
if (result.return_code != 0) { |
| 277 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, "failed to upload history database (%d)", |
| 278 |
|
✗ |
result.return_code); |
| 279 |
|
✗ |
hash->Set(shash::Any()); |
| 280 |
|
|
} else { |
| 281 |
|
✗ |
hash->Set(result.content_hash); |
| 282 |
|
|
} |
| 283 |
|
|
} |
| 284 |
|
|
|
| 285 |
|
✗ |
bool CommandTag::UpdateUndoTags( |
| 286 |
|
|
Environment *env, const history::History::Tag ¤t_head_template, |
| 287 |
|
|
const bool undo_rollback) { |
| 288 |
|
✗ |
assert(env->history.IsValid()); |
| 289 |
|
|
|
| 290 |
|
✗ |
history::History::Tag current_head; |
| 291 |
|
✗ |
history::History::Tag current_old_head; |
| 292 |
|
|
|
| 293 |
|
|
// remove previous HEAD tag |
| 294 |
|
✗ |
if (!env->history->Remove(CommandTag::kPreviousHeadTag)) { |
| 295 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogVerboseMsg, "didn't find a previous HEAD tag"); |
| 296 |
|
|
} |
| 297 |
|
|
|
| 298 |
|
|
// check if we have a current HEAD tag that needs to renamed to previous |
| 299 |
|
|
// HEAD |
| 300 |
|
✗ |
if (env->history->GetByName(CommandTag::kHeadTag, ¤t_head)) { |
| 301 |
|
|
// remove current HEAD tag |
| 302 |
|
✗ |
if (!env->history->Remove(CommandTag::kHeadTag)) { |
| 303 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, "failed to remove current HEAD tag"); |
| 304 |
|
✗ |
return false; |
| 305 |
|
|
} |
| 306 |
|
|
|
| 307 |
|
|
// set previous HEAD tag where current HEAD used to be |
| 308 |
|
✗ |
if (!undo_rollback) { |
| 309 |
|
✗ |
current_old_head = current_head; |
| 310 |
|
✗ |
current_old_head.name = CommandTag::kPreviousHeadTag; |
| 311 |
|
✗ |
current_old_head.description = CommandTag::kPreviousHeadTagDescription; |
| 312 |
|
✗ |
if (!env->history->Insert(current_old_head)) { |
| 313 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, "failed to set previous HEAD tag"); |
| 314 |
|
✗ |
return false; |
| 315 |
|
|
} |
| 316 |
|
|
} |
| 317 |
|
|
} |
| 318 |
|
|
|
| 319 |
|
|
// set the current HEAD to the catalog provided by the template HEAD |
| 320 |
|
✗ |
current_head = current_head_template; |
| 321 |
|
✗ |
current_head.name = CommandTag::kHeadTag; |
| 322 |
|
✗ |
current_head.description = CommandTag::kHeadTagDescription; |
| 323 |
|
✗ |
if (!env->history->Insert(current_head)) { |
| 324 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, "failed to set new current HEAD"); |
| 325 |
|
✗ |
return false; |
| 326 |
|
|
} |
| 327 |
|
|
|
| 328 |
|
✗ |
return true; |
| 329 |
|
|
} |
| 330 |
|
|
|
| 331 |
|
✗ |
bool CommandTag::FetchObject(const std::string &repository_url, |
| 332 |
|
|
const shash::Any &object_hash, |
| 333 |
|
|
const std::string &destination_path) const { |
| 334 |
|
✗ |
assert(!object_hash.IsNull()); |
| 335 |
|
|
|
| 336 |
|
|
download::Failures dl_retval; |
| 337 |
|
✗ |
const std::string url = repository_url + "/data/" + object_hash.MakePath(); |
| 338 |
|
|
|
| 339 |
|
✗ |
cvmfs::PathSink pathsink(destination_path); |
| 340 |
|
✗ |
download::JobInfo download_object(&url, true, false, &object_hash, &pathsink); |
| 341 |
|
✗ |
dl_retval = download_manager()->Fetch(&download_object); |
| 342 |
|
|
|
| 343 |
|
✗ |
if (dl_retval != download::kFailOk) { |
| 344 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, "failed to download object '%s' (%d - %s)", |
| 345 |
|
✗ |
object_hash.ToStringWithSuffix().c_str(), dl_retval, |
| 346 |
|
|
download::Code2Ascii(dl_retval)); |
| 347 |
|
✗ |
return false; |
| 348 |
|
|
} |
| 349 |
|
|
|
| 350 |
|
✗ |
return true; |
| 351 |
|
|
} |
| 352 |
|
|
|
| 353 |
|
✗ |
history::History *CommandTag::GetHistory(const manifest::Manifest *manifest, |
| 354 |
|
|
const std::string &repository_url, |
| 355 |
|
|
const std::string &history_path, |
| 356 |
|
|
const bool read_write) const { |
| 357 |
|
✗ |
const shash::Any history_hash = manifest->history(); |
| 358 |
|
|
history::History *history; |
| 359 |
|
|
|
| 360 |
|
✗ |
if (history_hash.IsNull()) { |
| 361 |
|
✗ |
history = history::SqliteHistory::Create(history_path, |
| 362 |
|
✗ |
manifest->repository_name()); |
| 363 |
|
✗ |
if (NULL == history) { |
| 364 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, "failed to create history database"); |
| 365 |
|
✗ |
return NULL; |
| 366 |
|
|
} |
| 367 |
|
|
} else { |
| 368 |
|
✗ |
if (!FetchObject(repository_url, history_hash, history_path)) { |
| 369 |
|
✗ |
return NULL; |
| 370 |
|
|
} |
| 371 |
|
|
|
| 372 |
|
✗ |
history = (read_write) ? history::SqliteHistory::OpenWritable(history_path) |
| 373 |
|
✗ |
: history::SqliteHistory::Open(history_path); |
| 374 |
|
✗ |
if (NULL == history) { |
| 375 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, "failed to open history database (%s)", |
| 376 |
|
|
history_path.c_str()); |
| 377 |
|
✗ |
unlink(history_path.c_str()); |
| 378 |
|
✗ |
return NULL; |
| 379 |
|
|
} |
| 380 |
|
|
|
| 381 |
|
✗ |
assert(history->fqrn() == manifest->repository_name()); |
| 382 |
|
|
} |
| 383 |
|
|
|
| 384 |
|
✗ |
return history; |
| 385 |
|
|
} |
| 386 |
|
|
|
| 387 |
|
✗ |
catalog::Catalog *CommandTag::GetCatalog(const std::string &repository_url, |
| 388 |
|
|
const shash::Any &catalog_hash, |
| 389 |
|
|
const std::string catalog_path, |
| 390 |
|
|
const bool read_write) const { |
| 391 |
|
✗ |
assert(shash::kSuffixCatalog == catalog_hash.suffix); |
| 392 |
|
✗ |
if (!FetchObject(repository_url, catalog_hash, catalog_path)) { |
| 393 |
|
✗ |
return NULL; |
| 394 |
|
|
} |
| 395 |
|
|
|
| 396 |
|
✗ |
const std::string catalog_root_path = ""; |
| 397 |
|
✗ |
return (read_write) ? catalog::WritableCatalog::AttachFreely( |
| 398 |
|
|
catalog_root_path, catalog_path, catalog_hash) |
| 399 |
|
✗ |
: catalog::Catalog::AttachFreely( |
| 400 |
|
✗ |
catalog_root_path, catalog_path, catalog_hash); |
| 401 |
|
|
} |
| 402 |
|
|
|
| 403 |
|
✗ |
void CommandTag::PrintTagMachineReadable( |
| 404 |
|
|
const history::History::Tag &tag) const { |
| 405 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStdout, "%s %s %" PRIu64 " %" PRIu64 " %ld %s %s", |
| 406 |
|
✗ |
tag.name.c_str(), tag.root_hash.ToString().c_str(), tag.size, |
| 407 |
|
✗ |
tag.revision, tag.timestamp, |
| 408 |
|
✗ |
(tag.branch == "") ? "(default)" : tag.branch.c_str(), |
| 409 |
|
|
tag.description.c_str()); |
| 410 |
|
|
} |
| 411 |
|
|
|
| 412 |
|
✗ |
std::string CommandTag::AddPadding(const std::string &str, const size_t padding, |
| 413 |
|
|
const bool align_right, |
| 414 |
|
|
const std::string &fill_char) const { |
| 415 |
|
✗ |
assert(str.size() <= padding); |
| 416 |
|
✗ |
std::string result(str); |
| 417 |
|
✗ |
result.resize(padding); |
| 418 |
|
✗ |
const size_t pos = (align_right) ? 0 : str.size(); |
| 419 |
|
✗ |
const size_t padding_width = padding - str.size(); |
| 420 |
|
✗ |
for (size_t i = 0; i < padding_width; ++i) |
| 421 |
|
✗ |
result.insert(pos, fill_char); |
| 422 |
|
✗ |
return result; |
| 423 |
|
|
} |
| 424 |
|
|
|
| 425 |
|
✗ |
bool CommandTag::IsUndoTagName(const std::string &tag_name) const { |
| 426 |
|
✗ |
return tag_name == CommandTag::kHeadTag |
| 427 |
|
✗ |
|| tag_name == CommandTag::kPreviousHeadTag; |
| 428 |
|
|
} |
| 429 |
|
|
|
| 430 |
|
|
//------------------------------------------------------------------------------ |
| 431 |
|
|
|
| 432 |
|
✗ |
ParameterList CommandEditTag::GetParams() const { |
| 433 |
|
✗ |
ParameterList r; |
| 434 |
|
✗ |
InsertCommonParameters(&r); |
| 435 |
|
|
|
| 436 |
|
✗ |
r.push_back(Parameter::Optional('d', "space separated tags to be deleted")); |
| 437 |
|
✗ |
r.push_back(Parameter::Optional('a', "name of the new tag")); |
| 438 |
|
✗ |
r.push_back(Parameter::Optional('D', "description of the tag")); |
| 439 |
|
✗ |
r.push_back(Parameter::Optional('B', "branch of the new tag")); |
| 440 |
|
✗ |
r.push_back(Parameter::Optional('P', "predecessor branch")); |
| 441 |
|
✗ |
r.push_back(Parameter::Optional('h', "root hash of the new tag")); |
| 442 |
|
✗ |
r.push_back(Parameter::Switch('x', "maintain undo tags")); |
| 443 |
|
✗ |
return r; |
| 444 |
|
|
} |
| 445 |
|
|
|
| 446 |
|
✗ |
int CommandEditTag::Main(const ArgumentList &args) { |
| 447 |
|
✗ |
if ((args.find('d') == args.end()) && (args.find('a') == args.end()) |
| 448 |
|
✗ |
&& (args.find('x') == args.end())) { |
| 449 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, "nothing to do"); |
| 450 |
|
✗ |
return 1; |
| 451 |
|
|
} |
| 452 |
|
|
|
| 453 |
|
|
// initialize the Environment (taking ownership) |
| 454 |
|
✗ |
const bool history_read_write = true; |
| 455 |
|
|
const UniquePtr<Environment> env( |
| 456 |
|
✗ |
InitializeEnvironment(args, history_read_write)); |
| 457 |
|
✗ |
if (!env.IsValid()) { |
| 458 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, "failed to init environment"); |
| 459 |
|
✗ |
return 1; |
| 460 |
|
|
} |
| 461 |
|
|
|
| 462 |
|
|
int retval; |
| 463 |
|
✗ |
if (args.find('d') != args.end()) { |
| 464 |
|
✗ |
retval = RemoveTags(args, env.weak_ref()); |
| 465 |
|
✗ |
if (retval != 0) |
| 466 |
|
✗ |
return retval; |
| 467 |
|
|
} |
| 468 |
|
✗ |
if ((args.find('a') != args.end()) || (args.find('x') != args.end())) { |
| 469 |
|
✗ |
retval = AddNewTag(args, env.weak_ref()); |
| 470 |
|
✗ |
if (retval != 0) |
| 471 |
|
✗ |
return retval; |
| 472 |
|
|
} |
| 473 |
|
|
|
| 474 |
|
|
// finalize processing and upload new history database |
| 475 |
|
✗ |
if (!CloseAndPublishHistory(env.weak_ref())) { |
| 476 |
|
✗ |
return 1; |
| 477 |
|
|
} |
| 478 |
|
✗ |
return 0; |
| 479 |
|
|
} |
| 480 |
|
|
|
| 481 |
|
✗ |
int CommandEditTag::AddNewTag(const ArgumentList &args, Environment *env) { |
| 482 |
|
✗ |
const std::string tag_name = (args.find('a') != args.end()) |
| 483 |
|
✗ |
? *args.find('a')->second |
| 484 |
|
✗ |
: ""; |
| 485 |
|
✗ |
const std::string tag_description = (args.find('D') != args.end()) |
| 486 |
|
✗ |
? *args.find('D')->second |
| 487 |
|
✗ |
: ""; |
| 488 |
|
✗ |
const bool undo_tags = (args.find('x') != args.end()); |
| 489 |
|
✗ |
const std::string root_hash_string = (args.find('h') != args.end()) |
| 490 |
|
✗ |
? *args.find('h')->second |
| 491 |
|
✗ |
: ""; |
| 492 |
|
✗ |
const std::string branch_name = (args.find('B') != args.end()) |
| 493 |
|
✗ |
? *args.find('B')->second |
| 494 |
|
✗ |
: ""; |
| 495 |
|
✗ |
const std::string previous_branch_name = (args.find('P') != args.end()) |
| 496 |
|
✗ |
? *args.find('P')->second |
| 497 |
|
✗ |
: ""; |
| 498 |
|
|
|
| 499 |
|
✗ |
if (tag_name.find(" ") != std::string::npos) { |
| 500 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, "tag names must not contain spaces"); |
| 501 |
|
✗ |
return 1; |
| 502 |
|
|
} |
| 503 |
|
|
|
| 504 |
|
✗ |
assert(!tag_name.empty() || undo_tags); |
| 505 |
|
|
|
| 506 |
|
✗ |
if (IsUndoTagName(tag_name)) { |
| 507 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, "undo tags are managed internally"); |
| 508 |
|
✗ |
return 1; |
| 509 |
|
|
} |
| 510 |
|
|
|
| 511 |
|
|
// set the root hash to be tagged to the current HEAD if no other hash was |
| 512 |
|
|
// given by the user |
| 513 |
|
✗ |
const shash::Any root_hash = GetTagRootHash(env, root_hash_string); |
| 514 |
|
✗ |
if (root_hash.IsNull()) { |
| 515 |
|
✗ |
return 1; |
| 516 |
|
|
} |
| 517 |
|
|
|
| 518 |
|
|
// open the catalog to be tagged (to check for existence and for meta info) |
| 519 |
|
|
const UnlinkGuard catalog_path( |
| 520 |
|
✗ |
CreateTempPath(env->tmp_path + "/catalog", 0600)); |
| 521 |
|
✗ |
const bool catalog_read_write = false; |
| 522 |
|
|
const UniquePtr<catalog::Catalog> catalog(GetCatalog( |
| 523 |
|
✗ |
env->repository_url, root_hash, catalog_path.path(), catalog_read_write)); |
| 524 |
|
✗ |
if (!catalog.IsValid()) { |
| 525 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, "catalog with hash '%s' does not exist", |
| 526 |
|
✗ |
root_hash.ToString().c_str()); |
| 527 |
|
✗ |
return 1; |
| 528 |
|
|
} |
| 529 |
|
|
|
| 530 |
|
|
// check if the catalog is a root catalog |
| 531 |
|
✗ |
if (!catalog->root_prefix().IsEmpty()) { |
| 532 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, |
| 533 |
|
|
"cannot tag catalog '%s' that is not a " |
| 534 |
|
|
"root catalog.", |
| 535 |
|
✗ |
root_hash.ToString().c_str()); |
| 536 |
|
✗ |
return 1; |
| 537 |
|
|
} |
| 538 |
|
|
|
| 539 |
|
|
// create a template for the new tag to be created, moved or used as undo tag |
| 540 |
|
✗ |
history::History::Tag tag_template; |
| 541 |
|
✗ |
tag_template.name = "<template>"; |
| 542 |
|
✗ |
tag_template.root_hash = root_hash; |
| 543 |
|
✗ |
tag_template.size = GetFileSize(catalog_path.path()); |
| 544 |
|
✗ |
tag_template.revision = catalog->GetRevision(); |
| 545 |
|
✗ |
tag_template.timestamp = catalog->GetLastModified(); |
| 546 |
|
✗ |
tag_template.branch = branch_name; |
| 547 |
|
✗ |
tag_template.description = tag_description; |
| 548 |
|
|
|
| 549 |
|
|
// manipulate the tag database by creating a new tag or moving an existing one |
| 550 |
|
✗ |
if (!tag_name.empty()) { |
| 551 |
|
✗ |
tag_template.name = tag_name; |
| 552 |
|
✗ |
const bool user_provided_hash = (!root_hash_string.empty()); |
| 553 |
|
|
|
| 554 |
|
✗ |
if (!env->history->ExistsBranch(tag_template.branch)) { |
| 555 |
|
|
const history::History::Branch branch( |
| 556 |
|
✗ |
tag_template.branch, previous_branch_name, tag_template.revision); |
| 557 |
|
✗ |
if (!env->history->InsertBranch(branch)) { |
| 558 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, "cannot insert branch '%s'", |
| 559 |
|
|
tag_template.branch.c_str()); |
| 560 |
|
✗ |
return 1; |
| 561 |
|
|
} |
| 562 |
|
|
} |
| 563 |
|
|
|
| 564 |
|
✗ |
if (!ManipulateTag(env, tag_template, user_provided_hash)) { |
| 565 |
|
✗ |
return 1; |
| 566 |
|
|
} |
| 567 |
|
|
} |
| 568 |
|
|
|
| 569 |
|
|
// handle undo tags ('trunk' and 'trunk-previous') if necessary |
| 570 |
|
✗ |
if (undo_tags && !UpdateUndoTags(env, tag_template)) { |
| 571 |
|
✗ |
return 1; |
| 572 |
|
|
} |
| 573 |
|
|
|
| 574 |
|
✗ |
return 0; |
| 575 |
|
|
} |
| 576 |
|
|
|
| 577 |
|
✗ |
shash::Any CommandEditTag::GetTagRootHash( |
| 578 |
|
|
Environment *env, const std::string &root_hash_string) const { |
| 579 |
|
✗ |
shash::Any root_hash; |
| 580 |
|
|
|
| 581 |
|
✗ |
if (root_hash_string.empty()) { |
| 582 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogVerboseMsg, |
| 583 |
|
|
"no catalog hash provided, using hash" |
| 584 |
|
|
"of current HEAD catalog (%s)", |
| 585 |
|
✗ |
env->manifest->catalog_hash().ToString().c_str()); |
| 586 |
|
✗ |
root_hash = env->manifest->catalog_hash(); |
| 587 |
|
|
} else { |
| 588 |
|
✗ |
root_hash = shash::MkFromHexPtr(shash::HexPtr(root_hash_string), |
| 589 |
|
|
shash::kSuffixCatalog); |
| 590 |
|
✗ |
if (root_hash.IsNull()) { |
| 591 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, |
| 592 |
|
|
"failed to read provided catalog hash '%s'", |
| 593 |
|
|
root_hash_string.c_str()); |
| 594 |
|
|
} |
| 595 |
|
|
} |
| 596 |
|
|
|
| 597 |
|
✗ |
return root_hash; |
| 598 |
|
|
} |
| 599 |
|
|
|
| 600 |
|
✗ |
bool CommandEditTag::ManipulateTag(Environment *env, |
| 601 |
|
|
const history::History::Tag &tag_template, |
| 602 |
|
|
const bool user_provided_hash) { |
| 603 |
|
✗ |
const std::string &tag_name = tag_template.name; |
| 604 |
|
|
|
| 605 |
|
|
// check if the tag already exists, otherwise create it and return |
| 606 |
|
✗ |
if (!env->history->Exists(tag_name)) { |
| 607 |
|
✗ |
return CreateTag(env, tag_template); |
| 608 |
|
|
} |
| 609 |
|
|
|
| 610 |
|
|
// tag does exist already, now we need to see if we can move it |
| 611 |
|
✗ |
if (!user_provided_hash) { |
| 612 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, |
| 613 |
|
|
"a tag with the name '%s' already exists. Do you want to move it? " |
| 614 |
|
|
"(-h <root hash>)", |
| 615 |
|
|
tag_name.c_str()); |
| 616 |
|
✗ |
return false; |
| 617 |
|
|
} |
| 618 |
|
|
|
| 619 |
|
|
// move the already existing tag and return |
| 620 |
|
✗ |
return MoveTag(env, tag_template); |
| 621 |
|
|
} |
| 622 |
|
|
|
| 623 |
|
✗ |
bool CommandEditTag::MoveTag(Environment *env, |
| 624 |
|
|
const history::History::Tag &tag_template) { |
| 625 |
|
✗ |
const std::string &tag_name = tag_template.name; |
| 626 |
|
✗ |
history::History::Tag new_tag = tag_template; |
| 627 |
|
|
|
| 628 |
|
|
// get the already existent tag |
| 629 |
|
✗ |
history::History::Tag old_tag; |
| 630 |
|
✗ |
if (!env->history->GetByName(tag_name, &old_tag)) { |
| 631 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, "failed to retrieve tag '%s' for moving", |
| 632 |
|
|
tag_name.c_str()); |
| 633 |
|
✗ |
return false; |
| 634 |
|
|
} |
| 635 |
|
|
|
| 636 |
|
|
// check if we would move the tag to the same hash |
| 637 |
|
✗ |
if (old_tag.root_hash == new_tag.root_hash) { |
| 638 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, "tag '%s' already points to '%s'", |
| 639 |
|
✗ |
tag_name.c_str(), old_tag.root_hash.ToString().c_str()); |
| 640 |
|
✗ |
return false; |
| 641 |
|
|
} |
| 642 |
|
|
|
| 643 |
|
|
// copy over old description if no new description was given |
| 644 |
|
✗ |
if (new_tag.description.empty()) { |
| 645 |
|
✗ |
new_tag.description = old_tag.description; |
| 646 |
|
|
} |
| 647 |
|
✗ |
new_tag.branch = old_tag.branch; |
| 648 |
|
|
|
| 649 |
|
|
// remove the old tag from the database |
| 650 |
|
✗ |
if (!env->history->Remove(tag_name)) { |
| 651 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, "removing old tag '%s' before move failed", |
| 652 |
|
|
tag_name.c_str()); |
| 653 |
|
✗ |
return false; |
| 654 |
|
|
} |
| 655 |
|
✗ |
if (!env->history->PruneBranches()) { |
| 656 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, "could not prune unused branches"); |
| 657 |
|
✗ |
return false; |
| 658 |
|
|
} |
| 659 |
|
✗ |
const bool retval = env->history->Vacuum(); |
| 660 |
|
✗ |
assert(retval); |
| 661 |
|
|
|
| 662 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStdout, "moving tag '%s' from '%s' to '%s'", |
| 663 |
|
✗ |
tag_name.c_str(), old_tag.root_hash.ToString().c_str(), |
| 664 |
|
✗ |
tag_template.root_hash.ToString().c_str()); |
| 665 |
|
|
|
| 666 |
|
|
// re-create the moved tag |
| 667 |
|
✗ |
return CreateTag(env, new_tag); |
| 668 |
|
|
} |
| 669 |
|
|
|
| 670 |
|
✗ |
bool CommandEditTag::CreateTag(Environment *env, |
| 671 |
|
|
const history::History::Tag &new_tag) { |
| 672 |
|
✗ |
if (!env->history->Insert(new_tag)) { |
| 673 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, "failed to insert new tag '%s'", |
| 674 |
|
|
new_tag.name.c_str()); |
| 675 |
|
✗ |
return false; |
| 676 |
|
|
} |
| 677 |
|
|
|
| 678 |
|
✗ |
return true; |
| 679 |
|
|
} |
| 680 |
|
|
|
| 681 |
|
✗ |
int CommandEditTag::RemoveTags(const ArgumentList &args, Environment *env) { |
| 682 |
|
|
typedef std::vector<std::string> TagNames; |
| 683 |
|
✗ |
const std::string tags_to_delete = *args.find('d')->second; |
| 684 |
|
|
|
| 685 |
|
✗ |
const TagNames condemned_tags = SplitString(tags_to_delete, ' '); |
| 686 |
|
|
|
| 687 |
|
|
// check if user tries to remove a magic undo tag |
| 688 |
|
✗ |
TagNames::const_iterator i = condemned_tags.begin(); |
| 689 |
|
✗ |
const TagNames::const_iterator iend = condemned_tags.end(); |
| 690 |
|
✗ |
for (; i != iend; ++i) { |
| 691 |
|
✗ |
if (IsUndoTagName(*i)) { |
| 692 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, |
| 693 |
|
|
"undo tags are handled internally and cannot be deleted"); |
| 694 |
|
✗ |
return 1; |
| 695 |
|
|
} |
| 696 |
|
|
} |
| 697 |
|
|
|
| 698 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogDebug, "proceeding to delete %lu tags", |
| 699 |
|
|
condemned_tags.size()); |
| 700 |
|
|
|
| 701 |
|
|
// check if the tags to be deleted exist |
| 702 |
|
✗ |
bool all_exist = true; |
| 703 |
|
✗ |
for (i = condemned_tags.begin(); i != iend; ++i) { |
| 704 |
|
✗ |
if (!env->history->Exists(*i)) { |
| 705 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, "tag '%s' does not exist", i->c_str()); |
| 706 |
|
✗ |
all_exist = false; |
| 707 |
|
|
} |
| 708 |
|
|
} |
| 709 |
|
✗ |
if (!all_exist) { |
| 710 |
|
✗ |
return 1; |
| 711 |
|
|
} |
| 712 |
|
|
|
| 713 |
|
|
// delete the tags from the tag database and print their root hashes |
| 714 |
|
✗ |
i = condemned_tags.begin(); |
| 715 |
|
✗ |
env->history->BeginTransaction(); |
| 716 |
|
✗ |
for (; i != iend; ++i) { |
| 717 |
|
|
// print some information about the tag to be deleted |
| 718 |
|
✗ |
history::History::Tag condemned_tag; |
| 719 |
|
✗ |
const bool found_tag = env->history->GetByName(*i, &condemned_tag); |
| 720 |
|
✗ |
assert(found_tag); |
| 721 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStdout, "deleting '%s' (%s)", |
| 722 |
|
|
condemned_tag.name.c_str(), |
| 723 |
|
✗ |
condemned_tag.root_hash.ToString().c_str()); |
| 724 |
|
|
|
| 725 |
|
|
// remove the tag |
| 726 |
|
✗ |
if (!env->history->Remove(*i)) { |
| 727 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, "failed to remove tag '%s' from history", |
| 728 |
|
|
i->c_str()); |
| 729 |
|
✗ |
return 1; |
| 730 |
|
|
} |
| 731 |
|
|
} |
| 732 |
|
✗ |
bool retval = env->history->PruneBranches(); |
| 733 |
|
✗ |
if (!retval) { |
| 734 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, |
| 735 |
|
|
"failed to prune unused branches from history"); |
| 736 |
|
✗ |
return 1; |
| 737 |
|
|
} |
| 738 |
|
✗ |
env->history->CommitTransaction(); |
| 739 |
|
✗ |
retval = env->history->Vacuum(); |
| 740 |
|
✗ |
assert(retval); |
| 741 |
|
|
|
| 742 |
|
✗ |
return 0; |
| 743 |
|
|
} |
| 744 |
|
|
|
| 745 |
|
|
//------------------------------------------------------------------------------ |
| 746 |
|
|
|
| 747 |
|
|
|
| 748 |
|
✗ |
ParameterList CommandListTags::GetParams() const { |
| 749 |
|
✗ |
ParameterList r; |
| 750 |
|
✗ |
InsertCommonParameters(&r); |
| 751 |
|
✗ |
r.push_back(Parameter::Switch('x', "machine readable output")); |
| 752 |
|
✗ |
r.push_back(Parameter::Switch('B', "print branch hierarchy")); |
| 753 |
|
✗ |
return r; |
| 754 |
|
|
} |
| 755 |
|
|
|
| 756 |
|
✗ |
void CommandListTags::PrintHumanReadableTagList( |
| 757 |
|
|
const CommandListTags::TagList &tags) const { |
| 758 |
|
|
// go through the list of tags and figure out the column widths |
| 759 |
|
✗ |
const std::string name_label = "Name"; |
| 760 |
|
✗ |
const std::string rev_label = "Revision"; |
| 761 |
|
✗ |
const std::string time_label = "Timestamp"; |
| 762 |
|
✗ |
const std::string branch_label = "Branch"; |
| 763 |
|
✗ |
const std::string desc_label = "Description"; |
| 764 |
|
|
|
| 765 |
|
|
// figure out the maximal lengths of the fields in the lists |
| 766 |
|
✗ |
TagList::const_reverse_iterator i = tags.rbegin(); |
| 767 |
|
✗ |
const TagList::const_reverse_iterator iend = tags.rend(); |
| 768 |
|
✗ |
size_t max_name_len = name_label.size(); |
| 769 |
|
✗ |
size_t max_rev_len = rev_label.size(); |
| 770 |
|
✗ |
size_t max_time_len = desc_label.size(); |
| 771 |
|
✗ |
size_t max_branch_len = branch_label.size(); |
| 772 |
|
✗ |
for (; i != iend; ++i) { |
| 773 |
|
✗ |
max_name_len = std::max(max_name_len, i->name.size()); |
| 774 |
|
✗ |
max_rev_len = std::max(max_rev_len, StringifyInt(i->revision).size()); |
| 775 |
|
✗ |
max_time_len = std::max(max_time_len, |
| 776 |
|
✗ |
StringifyTime(i->timestamp, true).size()); |
| 777 |
|
✗ |
max_branch_len = std::max(max_branch_len, i->branch.size()); |
| 778 |
|
|
} |
| 779 |
|
|
|
| 780 |
|
|
// print the list header |
| 781 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStdout, "%s \u2502 %s \u2502 %s \u2502 %s \u2502 %s", |
| 782 |
|
✗ |
AddPadding(name_label, max_name_len).c_str(), |
| 783 |
|
✗ |
AddPadding(rev_label, max_rev_len).c_str(), |
| 784 |
|
✗ |
AddPadding(time_label, max_time_len).c_str(), |
| 785 |
|
✗ |
AddPadding(branch_label, max_branch_len).c_str(), |
| 786 |
|
|
desc_label.c_str()); |
| 787 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStdout, |
| 788 |
|
|
"%s\u2500\u253C\u2500%s\u2500\u253C\u2500%s" |
| 789 |
|
|
"\u2500\u253C\u2500%s\u2500\u253C\u2500%s", |
| 790 |
|
✗ |
AddPadding("", max_name_len, false, "\u2500").c_str(), |
| 791 |
|
✗ |
AddPadding("", max_rev_len, false, "\u2500").c_str(), |
| 792 |
|
✗ |
AddPadding("", max_time_len, false, "\u2500").c_str(), |
| 793 |
|
✗ |
AddPadding("", max_branch_len, false, "\u2500").c_str(), |
| 794 |
|
✗ |
AddPadding("", desc_label.size() + 1, false, "\u2500").c_str()); |
| 795 |
|
|
|
| 796 |
|
|
// print the rows of the list |
| 797 |
|
✗ |
i = tags.rbegin(); |
| 798 |
|
✗ |
for (; i != iend; ++i) { |
| 799 |
|
✗ |
LogCvmfs( |
| 800 |
|
|
kLogCvmfs, kLogStdout, "%s \u2502 %s \u2502 %s \u2502 %s \u2502 %s", |
| 801 |
|
✗ |
AddPadding(i->name, max_name_len).c_str(), |
| 802 |
|
✗ |
AddPadding(StringifyInt(i->revision), max_rev_len, true).c_str(), |
| 803 |
|
✗ |
AddPadding(StringifyTime(i->timestamp, true), max_time_len).c_str(), |
| 804 |
|
✗ |
AddPadding(i->branch, max_branch_len).c_str(), i->description.c_str()); |
| 805 |
|
|
} |
| 806 |
|
|
|
| 807 |
|
|
// print the list footer |
| 808 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStdout, |
| 809 |
|
|
"%s\u2500\u2534\u2500%s\u2500\u2534\u2500%s" |
| 810 |
|
|
"\u2500\u2534\u2500%s\u2500\u2534\u2500%s", |
| 811 |
|
✗ |
AddPadding("", max_name_len, false, "\u2500").c_str(), |
| 812 |
|
✗ |
AddPadding("", max_rev_len, false, "\u2500").c_str(), |
| 813 |
|
✗ |
AddPadding("", max_time_len, false, "\u2500").c_str(), |
| 814 |
|
✗ |
AddPadding("", max_branch_len, false, "\u2500").c_str(), |
| 815 |
|
✗ |
AddPadding("", desc_label.size() + 1, false, "\u2500").c_str()); |
| 816 |
|
|
|
| 817 |
|
|
// print the number of tags listed |
| 818 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStdout, "listing contains %lu tags", tags.size()); |
| 819 |
|
|
} |
| 820 |
|
|
|
| 821 |
|
✗ |
void CommandListTags::PrintMachineReadableTagList(const TagList &tags) const { |
| 822 |
|
✗ |
TagList::const_iterator i = tags.begin(); |
| 823 |
|
✗ |
const TagList::const_iterator iend = tags.end(); |
| 824 |
|
✗ |
for (; i != iend; ++i) { |
| 825 |
|
✗ |
PrintTagMachineReadable(*i); |
| 826 |
|
|
} |
| 827 |
|
|
} |
| 828 |
|
|
|
| 829 |
|
|
|
| 830 |
|
✗ |
void CommandListTags::PrintHumanReadableBranchList( |
| 831 |
|
|
const BranchHierarchy &branches) const { |
| 832 |
|
✗ |
const unsigned N = branches.size(); |
| 833 |
|
✗ |
for (unsigned i = 0; i < N; ++i) { |
| 834 |
|
✗ |
for (unsigned l = 0; l < branches[i].level; ++l) { |
| 835 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStdout | kLogNoLinebreak, "%s", |
| 836 |
|
✗ |
((l + 1) == branches[i].level) ? "\u251c " : "\u2502 "); |
| 837 |
|
|
} |
| 838 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStdout, "%s @%" PRIu64, |
| 839 |
|
✗ |
branches[i].branch.branch.c_str(), |
| 840 |
|
✗ |
branches[i].branch.initial_revision); |
| 841 |
|
|
} |
| 842 |
|
|
} |
| 843 |
|
|
|
| 844 |
|
|
|
| 845 |
|
✗ |
void CommandListTags::PrintMachineReadableBranchList( |
| 846 |
|
|
const BranchHierarchy &branches) const { |
| 847 |
|
✗ |
const unsigned N = branches.size(); |
| 848 |
|
✗ |
for (unsigned i = 0; i < N; ++i) { |
| 849 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStdout, "[%u] %s%s @%" PRIu64, branches[i].level, |
| 850 |
|
✗ |
AddPadding("", branches[i].level, false, " ").c_str(), |
| 851 |
|
✗ |
branches[i].branch.branch.c_str(), |
| 852 |
|
✗ |
branches[i].branch.initial_revision); |
| 853 |
|
|
} |
| 854 |
|
|
} |
| 855 |
|
|
|
| 856 |
|
|
|
| 857 |
|
✗ |
void CommandListTags::SortBranchesRecursively( |
| 858 |
|
|
unsigned level, |
| 859 |
|
|
const string &parent_branch, |
| 860 |
|
|
const BranchList &branches, |
| 861 |
|
|
BranchHierarchy *hierarchy) const { |
| 862 |
|
|
// For large numbers of branches, this should be turned into the O(n) version |
| 863 |
|
|
// using a linked list |
| 864 |
|
✗ |
const unsigned N = branches.size(); |
| 865 |
|
✗ |
for (unsigned i = 0; i < N; ++i) { |
| 866 |
|
✗ |
if (branches[i].branch == "") |
| 867 |
|
✗ |
continue; |
| 868 |
|
✗ |
if (branches[i].parent == parent_branch) { |
| 869 |
|
✗ |
hierarchy->push_back(BranchLevel(branches[i], level)); |
| 870 |
|
✗ |
SortBranchesRecursively(level + 1, branches[i].branch, branches, |
| 871 |
|
|
hierarchy); |
| 872 |
|
|
} |
| 873 |
|
|
} |
| 874 |
|
|
} |
| 875 |
|
|
|
| 876 |
|
|
|
| 877 |
|
✗ |
CommandListTags::BranchHierarchy CommandListTags::SortBranches( |
| 878 |
|
|
const BranchList &branches) const { |
| 879 |
|
✗ |
BranchHierarchy hierarchy; |
| 880 |
|
✗ |
hierarchy.push_back( |
| 881 |
|
✗ |
BranchLevel(history::History::Branch("(default)", "", 0), 0)); |
| 882 |
|
✗ |
SortBranchesRecursively(1, "", branches, &hierarchy); |
| 883 |
|
✗ |
return hierarchy; |
| 884 |
|
|
} |
| 885 |
|
|
|
| 886 |
|
|
|
| 887 |
|
✗ |
int CommandListTags::Main(const ArgumentList &args) { |
| 888 |
|
✗ |
const bool machine_readable = (args.find('x') != args.end()); |
| 889 |
|
✗ |
const bool branch_hierarchy = (args.find('B') != args.end()); |
| 890 |
|
|
|
| 891 |
|
|
// initialize the Environment (taking ownership) |
| 892 |
|
✗ |
const bool history_read_write = false; |
| 893 |
|
|
const UniquePtr<Environment> env( |
| 894 |
|
✗ |
InitializeEnvironment(args, history_read_write)); |
| 895 |
|
✗ |
if (!env.IsValid()) { |
| 896 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, "failed to init environment"); |
| 897 |
|
✗ |
return 1; |
| 898 |
|
|
} |
| 899 |
|
|
|
| 900 |
|
✗ |
if (branch_hierarchy) { |
| 901 |
|
✗ |
BranchList branch_list; |
| 902 |
|
✗ |
if (!env->history->ListBranches(&branch_list)) { |
| 903 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, |
| 904 |
|
|
"failed to list branches in history database"); |
| 905 |
|
✗ |
return 1; |
| 906 |
|
|
} |
| 907 |
|
✗ |
const BranchHierarchy branch_hierarchy = SortBranches(branch_list); |
| 908 |
|
|
|
| 909 |
|
✗ |
if (machine_readable) { |
| 910 |
|
✗ |
PrintMachineReadableBranchList(branch_hierarchy); |
| 911 |
|
|
} else { |
| 912 |
|
✗ |
PrintHumanReadableBranchList(branch_hierarchy); |
| 913 |
|
|
} |
| 914 |
|
✗ |
} else { |
| 915 |
|
|
// obtain a full list of all tags |
| 916 |
|
✗ |
TagList tags; |
| 917 |
|
✗ |
if (!env->history->List(&tags)) { |
| 918 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, |
| 919 |
|
|
"failed to list tags in history database"); |
| 920 |
|
✗ |
return 1; |
| 921 |
|
|
} |
| 922 |
|
|
|
| 923 |
|
✗ |
if (machine_readable) { |
| 924 |
|
✗ |
PrintMachineReadableTagList(tags); |
| 925 |
|
|
} else { |
| 926 |
|
✗ |
PrintHumanReadableTagList(tags); |
| 927 |
|
|
} |
| 928 |
|
|
} |
| 929 |
|
|
|
| 930 |
|
✗ |
return 0; |
| 931 |
|
|
} |
| 932 |
|
|
|
| 933 |
|
|
//------------------------------------------------------------------------------ |
| 934 |
|
|
|
| 935 |
|
✗ |
ParameterList CommandInfoTag::GetParams() const { |
| 936 |
|
✗ |
ParameterList r; |
| 937 |
|
✗ |
InsertCommonParameters(&r); |
| 938 |
|
|
|
| 939 |
|
✗ |
r.push_back(Parameter::Mandatory('n', "name of the tag to be inspected")); |
| 940 |
|
✗ |
r.push_back(Parameter::Switch('x', "machine readable output")); |
| 941 |
|
✗ |
return r; |
| 942 |
|
|
} |
| 943 |
|
|
|
| 944 |
|
✗ |
std::string CommandInfoTag::HumanReadableFilesize(const size_t filesize) const { |
| 945 |
|
✗ |
const size_t kiB = 1024; |
| 946 |
|
✗ |
const size_t MiB = kiB * 1024; |
| 947 |
|
✗ |
const size_t GiB = MiB * 1024; |
| 948 |
|
|
|
| 949 |
|
✗ |
if (filesize > GiB) { |
| 950 |
|
✗ |
return StringifyDouble(static_cast<double>(filesize) / GiB) + " GiB"; |
| 951 |
|
✗ |
} else if (filesize > MiB) { |
| 952 |
|
✗ |
return StringifyDouble(static_cast<double>(filesize) / MiB) + " MiB"; |
| 953 |
|
✗ |
} else if (filesize > kiB) { |
| 954 |
|
✗ |
return StringifyDouble(static_cast<double>(filesize) / kiB) + " kiB"; |
| 955 |
|
|
} else { |
| 956 |
|
✗ |
return StringifyInt(filesize) + " Byte"; |
| 957 |
|
|
} |
| 958 |
|
|
} |
| 959 |
|
|
|
| 960 |
|
✗ |
void CommandInfoTag::PrintHumanReadableInfo( |
| 961 |
|
|
const history::History::Tag &tag) const { |
| 962 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStdout, |
| 963 |
|
|
"Name: %s\n" |
| 964 |
|
|
"Revision: %" PRIu64 "\n" |
| 965 |
|
|
"Timestamp: %s\n" |
| 966 |
|
|
"Branch: %s\n" |
| 967 |
|
|
"Root Hash: %s\n" |
| 968 |
|
|
"Catalog Size: %s\n" |
| 969 |
|
|
"%s", |
| 970 |
|
✗ |
tag.name.c_str(), tag.revision, |
| 971 |
|
✗ |
StringifyTime(tag.timestamp, true /* utc */).c_str(), |
| 972 |
|
✗ |
tag.branch.c_str(), tag.root_hash.ToString().c_str(), |
| 973 |
|
✗ |
HumanReadableFilesize(tag.size).c_str(), tag.description.c_str()); |
| 974 |
|
|
} |
| 975 |
|
|
|
| 976 |
|
✗ |
int CommandInfoTag::Main(const ArgumentList &args) { |
| 977 |
|
✗ |
const std::string tag_name = *args.find('n')->second; |
| 978 |
|
✗ |
const bool machine_readable = (args.find('x') != args.end()); |
| 979 |
|
|
|
| 980 |
|
|
// initialize the Environment (taking ownership) |
| 981 |
|
✗ |
const bool history_read_write = false; |
| 982 |
|
|
const UniquePtr<Environment> env( |
| 983 |
|
✗ |
InitializeEnvironment(args, history_read_write)); |
| 984 |
|
✗ |
if (!env.IsValid()) { |
| 985 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, "failed to init environment"); |
| 986 |
|
✗ |
return 1; |
| 987 |
|
|
} |
| 988 |
|
|
|
| 989 |
|
✗ |
history::History::Tag tag; |
| 990 |
|
✗ |
const bool found = env->history->GetByName(tag_name, &tag); |
| 991 |
|
✗ |
if (!found) { |
| 992 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, "tag '%s' does not exist", |
| 993 |
|
|
tag_name.c_str()); |
| 994 |
|
✗ |
return 1; |
| 995 |
|
|
} |
| 996 |
|
|
|
| 997 |
|
✗ |
if (machine_readable) { |
| 998 |
|
✗ |
PrintTagMachineReadable(tag); |
| 999 |
|
|
} else { |
| 1000 |
|
✗ |
PrintHumanReadableInfo(tag); |
| 1001 |
|
|
} |
| 1002 |
|
|
|
| 1003 |
|
✗ |
return 0; |
| 1004 |
|
|
} |
| 1005 |
|
|
|
| 1006 |
|
|
//------------------------------------------------------------------------------ |
| 1007 |
|
|
|
| 1008 |
|
✗ |
ParameterList CommandRollbackTag::GetParams() const { |
| 1009 |
|
✗ |
ParameterList r; |
| 1010 |
|
✗ |
InsertCommonParameters(&r); |
| 1011 |
|
|
|
| 1012 |
|
✗ |
r.push_back(Parameter::Optional('n', "name of the tag to be republished")); |
| 1013 |
|
✗ |
return r; |
| 1014 |
|
|
} |
| 1015 |
|
|
|
| 1016 |
|
✗ |
int CommandRollbackTag::Main(const ArgumentList &args) { |
| 1017 |
|
✗ |
const bool undo_rollback = (args.find('n') == args.end()); |
| 1018 |
|
✗ |
const std::string tag_name = (!undo_rollback) ? *args.find('n')->second |
| 1019 |
|
✗ |
: CommandTag::kPreviousHeadTag; |
| 1020 |
|
|
|
| 1021 |
|
|
// initialize the Environment (taking ownership) |
| 1022 |
|
✗ |
const bool history_read_write = true; |
| 1023 |
|
|
const UniquePtr<Environment> env( |
| 1024 |
|
✗ |
InitializeEnvironment(args, history_read_write)); |
| 1025 |
|
✗ |
if (!env.IsValid()) { |
| 1026 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, "failed to init environment"); |
| 1027 |
|
✗ |
return 1; |
| 1028 |
|
|
} |
| 1029 |
|
|
|
| 1030 |
|
|
// find tag to be rolled back to |
| 1031 |
|
✗ |
history::History::Tag target_tag; |
| 1032 |
|
✗ |
const bool found = env->history->GetByName(tag_name, &target_tag); |
| 1033 |
|
✗ |
if (!found) { |
| 1034 |
|
✗ |
if (undo_rollback) { |
| 1035 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, |
| 1036 |
|
|
"only one anonymous rollback supported - " |
| 1037 |
|
|
"perhaps you want to provide a tag name?"); |
| 1038 |
|
|
} else { |
| 1039 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, "tag '%s' does not exist", |
| 1040 |
|
|
tag_name.c_str()); |
| 1041 |
|
|
} |
| 1042 |
|
✗ |
return 1; |
| 1043 |
|
|
} |
| 1044 |
|
✗ |
if (target_tag.branch != "") { |
| 1045 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, |
| 1046 |
|
|
"rollback is only supported on the default branch"); |
| 1047 |
|
✗ |
return 1; |
| 1048 |
|
|
} |
| 1049 |
|
|
|
| 1050 |
|
|
// list the tags that will be deleted |
| 1051 |
|
✗ |
TagList affected_tags; |
| 1052 |
|
✗ |
if (!env->history->ListTagsAffectedByRollback(tag_name, &affected_tags)) { |
| 1053 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, |
| 1054 |
|
|
"failed to list condemned tags prior to rollback to '%s'", |
| 1055 |
|
|
tag_name.c_str()); |
| 1056 |
|
✗ |
return 1; |
| 1057 |
|
|
} |
| 1058 |
|
|
|
| 1059 |
|
|
// check if tag is valid to be rolled back to |
| 1060 |
|
✗ |
const uint64_t current_revision = env->manifest->revision(); |
| 1061 |
|
✗ |
assert(target_tag.revision <= current_revision); |
| 1062 |
|
✗ |
if (target_tag.revision == current_revision) { |
| 1063 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, |
| 1064 |
|
|
"not rolling back to current head (%" PRIu64 ")", |
| 1065 |
|
|
current_revision); |
| 1066 |
|
✗ |
return 1; |
| 1067 |
|
|
} |
| 1068 |
|
|
|
| 1069 |
|
|
// open the catalog to be rolled back to |
| 1070 |
|
|
const UnlinkGuard catalog_path( |
| 1071 |
|
✗ |
CreateTempPath(env->tmp_path + "/catalog", 0600)); |
| 1072 |
|
✗ |
const bool catalog_read_write = true; |
| 1073 |
|
|
UniquePtr<catalog::WritableCatalog> catalog( |
| 1074 |
|
✗ |
dynamic_cast<catalog::WritableCatalog *>( |
| 1075 |
|
✗ |
GetCatalog(env->repository_url, target_tag.root_hash, |
| 1076 |
|
✗ |
catalog_path.path(), catalog_read_write))); |
| 1077 |
|
✗ |
if (!catalog.IsValid()) { |
| 1078 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, "failed to open catalog with hash '%s'", |
| 1079 |
|
✗ |
target_tag.root_hash.ToString().c_str()); |
| 1080 |
|
✗ |
return 1; |
| 1081 |
|
|
} |
| 1082 |
|
|
|
| 1083 |
|
|
// check if the catalog has a supported schema version |
| 1084 |
|
✗ |
if (catalog->schema() < catalog::CatalogDatabase::kLatestSupportedSchema |
| 1085 |
|
✗ |
- catalog::CatalogDatabase::kSchemaEpsilon) { |
| 1086 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, |
| 1087 |
|
|
"not rolling back to outdated and " |
| 1088 |
|
|
"incompatible catalog schema (%.1f < %.1f)", |
| 1089 |
|
✗ |
catalog->schema(), |
| 1090 |
|
|
catalog::CatalogDatabase::kLatestSupportedSchema); |
| 1091 |
|
✗ |
return 1; |
| 1092 |
|
|
} |
| 1093 |
|
|
|
| 1094 |
|
|
// update the catalog to be republished |
| 1095 |
|
✗ |
catalog->Transaction(); |
| 1096 |
|
✗ |
catalog->UpdateLastModified(); |
| 1097 |
|
✗ |
catalog->SetRevision(current_revision + 1); |
| 1098 |
|
✗ |
catalog->SetPreviousRevision(env->manifest->catalog_hash()); |
| 1099 |
|
✗ |
catalog->Commit(); |
| 1100 |
|
|
|
| 1101 |
|
|
// Upload catalog (handing over ownership of catalog pointer) |
| 1102 |
|
✗ |
if (!UploadCatalogAndUpdateManifest(env.weak_ref(), catalog.Release())) { |
| 1103 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, "catalog upload failed"); |
| 1104 |
|
✗ |
return 1; |
| 1105 |
|
|
} |
| 1106 |
|
|
|
| 1107 |
|
|
// update target tag with newly published root catalog information |
| 1108 |
|
✗ |
history::History::Tag updated_target_tag(target_tag); |
| 1109 |
|
✗ |
updated_target_tag.root_hash = env->manifest->catalog_hash(); |
| 1110 |
|
✗ |
updated_target_tag.size = env->manifest->catalog_size(); |
| 1111 |
|
✗ |
updated_target_tag.revision = env->manifest->revision(); |
| 1112 |
|
✗ |
updated_target_tag.timestamp = env->manifest->publish_timestamp(); |
| 1113 |
|
✗ |
if (!env->history->Rollback(updated_target_tag)) { |
| 1114 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, "failed to rollback history to '%s'", |
| 1115 |
|
|
updated_target_tag.name.c_str()); |
| 1116 |
|
✗ |
return 1; |
| 1117 |
|
|
} |
| 1118 |
|
✗ |
const bool retval = env->history->Vacuum(); |
| 1119 |
|
✗ |
assert(retval); |
| 1120 |
|
|
|
| 1121 |
|
|
// set the magic undo tags |
| 1122 |
|
✗ |
if (!UpdateUndoTags(env.weak_ref(), updated_target_tag, undo_rollback)) { |
| 1123 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, "failed to update magic undo tags"); |
| 1124 |
|
✗ |
return 1; |
| 1125 |
|
|
} |
| 1126 |
|
|
|
| 1127 |
|
|
// finalize the history and upload it |
| 1128 |
|
✗ |
if (!CloseAndPublishHistory(env.weak_ref())) { |
| 1129 |
|
✗ |
return 1; |
| 1130 |
|
|
} |
| 1131 |
|
|
|
| 1132 |
|
|
// print the tags that have been removed by the rollback |
| 1133 |
|
✗ |
PrintDeletedTagList(affected_tags); |
| 1134 |
|
|
|
| 1135 |
|
✗ |
return 0; |
| 1136 |
|
|
} |
| 1137 |
|
|
|
| 1138 |
|
✗ |
void CommandRollbackTag::PrintDeletedTagList(const TagList &tags) const { |
| 1139 |
|
✗ |
size_t longest_name = 0; |
| 1140 |
|
✗ |
TagList::const_iterator i = tags.begin(); |
| 1141 |
|
✗ |
const TagList::const_iterator iend = tags.end(); |
| 1142 |
|
✗ |
for (; i != iend; ++i) { |
| 1143 |
|
✗ |
longest_name = std::max(i->name.size(), longest_name); |
| 1144 |
|
|
} |
| 1145 |
|
|
|
| 1146 |
|
✗ |
i = tags.begin(); |
| 1147 |
|
✗ |
for (; i != iend; ++i) { |
| 1148 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStdout, "removed tag %s (%s)", |
| 1149 |
|
✗ |
AddPadding(i->name, longest_name).c_str(), |
| 1150 |
|
✗ |
i->root_hash.ToString().c_str()); |
| 1151 |
|
|
} |
| 1152 |
|
|
} |
| 1153 |
|
|
|
| 1154 |
|
|
//------------------------------------------------------------------------------ |
| 1155 |
|
|
|
| 1156 |
|
✗ |
ParameterList CommandEmptyRecycleBin::GetParams() const { |
| 1157 |
|
✗ |
ParameterList r; |
| 1158 |
|
✗ |
InsertCommonParameters(&r); |
| 1159 |
|
✗ |
return r; |
| 1160 |
|
|
} |
| 1161 |
|
|
|
| 1162 |
|
✗ |
int CommandEmptyRecycleBin::Main(const ArgumentList &args) { |
| 1163 |
|
|
// initialize the Environment (taking ownership) |
| 1164 |
|
✗ |
const bool history_read_write = true; |
| 1165 |
|
|
const UniquePtr<Environment> env( |
| 1166 |
|
✗ |
InitializeEnvironment(args, history_read_write)); |
| 1167 |
|
✗ |
if (!env.IsValid()) { |
| 1168 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, "failed to init environment"); |
| 1169 |
|
✗ |
return 1; |
| 1170 |
|
|
} |
| 1171 |
|
|
|
| 1172 |
|
✗ |
if (!env->history->EmptyRecycleBin()) { |
| 1173 |
|
✗ |
LogCvmfs(kLogCvmfs, kLogStderr, "failed to empty recycle bin"); |
| 1174 |
|
✗ |
return 1; |
| 1175 |
|
|
} |
| 1176 |
|
|
|
| 1177 |
|
|
// finalize the history and upload it |
| 1178 |
|
✗ |
if (!CloseAndPublishHistory(env.weak_ref())) { |
| 1179 |
|
✗ |
return 1; |
| 1180 |
|
|
} |
| 1181 |
|
|
|
| 1182 |
|
✗ |
return 0; |
| 1183 |
|
|
} |
| 1184 |
|
|
|