|
@@ -288,12 +288,12 @@ namespace LOTRO_DAT {
|
|
|
}
|
|
|
|
|
|
auto file_id = data.options["fid"].as<long long>();
|
|
|
- Subfile *file = dictionary_[file_id];
|
|
|
|
|
|
- if (file == nullptr) {
|
|
|
+ if (dictionary_.count(file_id) == 0) {
|
|
|
LOG(ERROR) << "Cannot patch file - there is no file in dictionary with file_id = " << file_id;
|
|
|
return NO_FILE_ERROR;
|
|
|
}
|
|
|
+ Subfile *file = dictionary_[file_id];
|
|
|
|
|
|
if (!CorrectSubfile(file)) {
|
|
|
LOG(ERROR) << "Incorrect subfile with id " << file->file_id() << " (headers do not match). Cannot patch it";
|
|
@@ -580,6 +580,12 @@ namespace LOTRO_DAT {
|
|
|
DAT_RESULT DatFile::ApplyFilePatch(Subfile *file, BinaryData &data) {
|
|
|
LOG(DEBUG) << "Applying " << file->file_id() << " patch.";
|
|
|
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
if (data.Empty()) {
|
|
|
LOG(ERROR) << "Error caused during making file for import. Cannot patch file " << file->file_id();
|
|
|
return FAILED;
|
|
@@ -605,10 +611,10 @@ namespace LOTRO_DAT {
|
|
|
file->file_offset_ = file_size_;
|
|
|
file->block_size_ = std::max((long long)data.size(), file->block_size_);
|
|
|
|
|
|
- free_buffered_size_ = std::max(0ll, free_buffered_size_ - file->block_size_);
|
|
|
+ free_buffered_size_ = std::max(0ll, free_buffered_size_ - file->block_size_ - 8);
|
|
|
AddBufferedSize();
|
|
|
|
|
|
- this->file_size_ += file->block_size_;
|
|
|
+ this->file_size_ += file->block_size_ + 8;
|
|
|
}
|
|
|
|
|
|
file->file_size_ = data.size() - 8;
|
|
@@ -694,10 +700,13 @@ namespace LOTRO_DAT {
|
|
|
}
|
|
|
|
|
|
|
|
|
- CommitLocales();
|
|
|
- CommitDirectories();
|
|
|
- UpdateHeader();
|
|
|
- ClearFragmentationJournal();
|
|
|
+
|
|
|
+ if (!pending_dictionary_.empty()) {
|
|
|
+ CommitLocales();
|
|
|
+ CommitDirectories();
|
|
|
+ UpdateHeader();
|
|
|
+ }
|
|
|
+
|
|
|
|
|
|
orig_dict_.clear();
|
|
|
pending_patch_.clear();
|
|
@@ -725,18 +734,32 @@ namespace LOTRO_DAT {
|
|
|
|
|
|
DAT_RESULT DatFile::InitLocales() {
|
|
|
LOG(INFO) << "Initialising locales...";
|
|
|
- BinaryData dicts_data;
|
|
|
+ BinaryData dicts_data(4);
|
|
|
+
|
|
|
+ ReadData(dicts_data, 4, 300);
|
|
|
+ long long dict_offset = dicts_data.ToNumber<4>(0);
|
|
|
+
|
|
|
+ if (dict_offset == 0) {
|
|
|
+ LOG(INFO) << "Dictionary offset is empty. Passing.";
|
|
|
+ return SUCCESS;
|
|
|
+ }
|
|
|
+
|
|
|
+ ReadData(dicts_data, 4, dict_offset);
|
|
|
+ long long dict_size = dicts_data.ToNumber<4>(0);
|
|
|
|
|
|
- if (dictionary_.count(2013266257) != 0)
|
|
|
- dicts_data = GetFileData(dictionary_[2013266257]);
|
|
|
+ ReadData(dicts_data, 4, dict_offset + 4);
|
|
|
+ long long dict_version = dicts_data.ToNumber<4>(0);
|
|
|
+ LOG(INFO) << "Dictionary size is " << dict_size << ". Version is " << dict_version;
|
|
|
|
|
|
- if (dicts_data.size() < 29) {
|
|
|
- LOG(WARNING) << "Locales file is empty.. Initialising locale dicts as empty";
|
|
|
- LOG(INFO) << "Could't find locales file or it's corrupted/empty... Continuing without locales";
|
|
|
+ if (dict_version != 100) {
|
|
|
+ LOG(WARNING) << "DICTIONARY IS OLD!!!";
|
|
|
return SUCCESS;
|
|
|
}
|
|
|
|
|
|
- BinaryData hi_data = dicts_data.CutData(14, 29) + BinaryData("\0", 1);
|
|
|
+ dicts_data = BinaryData((unsigned)dict_size);
|
|
|
+ ReadData(dicts_data, dict_size, dict_offset + 8);
|
|
|
+
|
|
|
+ BinaryData hi_data = dicts_data.CutData(0, 15) + BinaryData("\0", 1);
|
|
|
std::string hi = std::string((char *) (hi_data.data()));
|
|
|
LOG(DEBUG) << "hi info is " << hi;
|
|
|
|
|
@@ -746,7 +769,7 @@ namespace LOTRO_DAT {
|
|
|
return SUCCESS;
|
|
|
}
|
|
|
|
|
|
- int offset = 29;
|
|
|
+ int offset = 15;
|
|
|
BinaryData current_locale_data = dicts_data.CutData(offset, offset + 4) + BinaryData("\0", 1);
|
|
|
std::string locale((char *) (current_locale_data.data()));
|
|
|
offset += 4;
|
|
@@ -759,7 +782,6 @@ namespace LOTRO_DAT {
|
|
|
}
|
|
|
current_locale_ = (locale == "PATC" ? PATCHED : ORIGINAL);
|
|
|
|
|
|
-
|
|
|
|
|
|
|
|
|
|
|
@@ -929,7 +951,6 @@ namespace LOTRO_DAT {
|
|
|
}
|
|
|
|
|
|
DAT_RESULT DatFile::RepairPatches(Database *db) {
|
|
|
-
|
|
|
LOG(INFO) << "Repairing patches";
|
|
|
SubfileData data;
|
|
|
data = db->GetNextFile();
|
|
@@ -963,13 +984,6 @@ namespace LOTRO_DAT {
|
|
|
|
|
|
DAT_RESULT DatFile::CommitLocales() {
|
|
|
LOG(INFO) << "Committing locales...";
|
|
|
- SubfileData data = dictionary_[2013266257]->PrepareForExport(GetFileData(dictionary_[2013266257]));
|
|
|
- data.options["fid"] = "2013266257";
|
|
|
- data.options["ext"] = ".unknown";
|
|
|
-
|
|
|
- BinaryData old_data = BinaryData(GetFileData(dictionary_[2013266257u]));
|
|
|
-
|
|
|
-
|
|
|
|
|
|
|
|
|
|
|
@@ -979,52 +993,50 @@ namespace LOTRO_DAT {
|
|
|
|
|
|
|
|
|
|
|
|
- data.binary_data = BinaryData(14 + 15 + 4
|
|
|
+ BinaryData binary_data = BinaryData(14 + 15 + 4
|
|
|
+ 4 + (32 + 4) * orig_dict_.size()
|
|
|
+ 4 + (32 + 4) * patch_dict_.size()
|
|
|
+ 4 + 4 * inactive_categories.size());
|
|
|
|
|
|
size_t current_size = 0;
|
|
|
- data.binary_data.Append(GetFileData(dictionary_[2013266257u]).CutData(0, 14), current_size);
|
|
|
- current_size += 14;
|
|
|
-
|
|
|
- data.binary_data.Append(BinaryData("Hi from Gi1dor!", 15), current_size);
|
|
|
+ binary_data.Append(BinaryData("Hi from Gi1dor!", 15), current_size);
|
|
|
current_size += 15;
|
|
|
|
|
|
- data.binary_data.Append(BinaryData((current_locale_ == ORIGINAL ? "ORIG" : "PATC"), 4), current_size);
|
|
|
+ binary_data.Append(BinaryData((current_locale_ == ORIGINAL ? "ORIG" : "PATC"), 4), current_size);
|
|
|
current_size += 4;
|
|
|
|
|
|
- data.binary_data.Append(BinaryData::FromNumber<4>(orig_dict_.size()), current_size);
|
|
|
+ binary_data.Append(BinaryData::FromNumber<4>(orig_dict_.size()), current_size);
|
|
|
current_size += 4;
|
|
|
|
|
|
for (auto file : orig_dict_) {
|
|
|
- data.binary_data.Append(file.second->MakeHeaderData(), current_size);
|
|
|
+ binary_data.Append(file.second->MakeHeaderData(), current_size);
|
|
|
current_size += 32;
|
|
|
- data.binary_data.Append(BinaryData::FromNumber<4>(file.second->category), current_size);
|
|
|
+ binary_data.Append(BinaryData::FromNumber<4>(file.second->category), current_size);
|
|
|
current_size += 4;
|
|
|
}
|
|
|
|
|
|
- data.binary_data.Append(BinaryData::FromNumber<4>(patch_dict_.size()), current_size);
|
|
|
+ binary_data.Append(BinaryData::FromNumber<4>(patch_dict_.size()), current_size);
|
|
|
current_size += 4;
|
|
|
|
|
|
for (auto file : patch_dict_) {
|
|
|
- data.binary_data.Append(file.second->MakeHeaderData(), current_size);
|
|
|
+ binary_data.Append(file.second->MakeHeaderData(), current_size);
|
|
|
current_size += 32;
|
|
|
- data.binary_data.Append(BinaryData::FromNumber<4>(file.second->category), current_size);
|
|
|
+ binary_data.Append(BinaryData::FromNumber<4>(file.second->category), current_size);
|
|
|
current_size += 4;
|
|
|
}
|
|
|
|
|
|
- data.binary_data.Append(BinaryData::FromNumber<4>(inactive_categories.size()), current_size);
|
|
|
+ binary_data.Append(BinaryData::FromNumber<4>(inactive_categories.size()), current_size);
|
|
|
current_size += 4;
|
|
|
for (auto patch_id : inactive_categories) {
|
|
|
- data.binary_data.Append(BinaryData::FromNumber<4>(patch_id), current_size);
|
|
|
+ binary_data.Append(BinaryData::FromNumber<4>(patch_id), current_size);
|
|
|
current_size += 4;
|
|
|
}
|
|
|
|
|
|
- DAT_RESULT result = PatchFile(data);
|
|
|
- if (result != SUCCESS)
|
|
|
- return result;
|
|
|
-
|
|
|
+ WriteData(BinaryData::FromNumber<4>(binary_data.size()), 4, file_size_);
|
|
|
+ WriteData(BinaryData::FromNumber<4>(100), 4, file_size_ + 4);
|
|
|
+ WriteData(binary_data, binary_data.size(), file_size_ + 8);
|
|
|
+ WriteData(BinaryData::FromNumber<4>(file_size_), 4, 300);
|
|
|
+ file_size_ += binary_data.size() + 8;
|
|
|
LOG(INFO) << "Locales commited successfully";
|
|
|
return SUCCESS;
|
|
|
}
|
|
@@ -1095,7 +1107,18 @@ namespace LOTRO_DAT {
|
|
|
}
|
|
|
|
|
|
DAT_RESULT DatFile::CommitDirectories() {
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
for (auto file_id : pending_dictionary_) {
|
|
|
+ if (dictionary_[file_id] == nullptr)
|
|
|
+ continue;
|
|
|
WriteData(dictionary_[file_id]->MakeHeaderData(), 32, dictionary_[file_id]->dictionary_offset());
|
|
|
}
|
|
|
pending_dictionary_.clear();
|