Ivan Arkhipov 6 роки тому
батько
коміт
1239393675

+ 7 - 1
CHANGELOG

@@ -38,4 +38,10 @@ Version 4.1.0
 Version 4.2.0
     * Fixed critical bug, which caused .dat file corruption, if 2 different texts less than 256 bytes size were patched with different versions.
 ----------------------------------------------------------------------
-
+Version 5.0.0
+    * Changed subfile dictionary structure as it wasn't completely correct and caused problems with game updates.
+    * Changed structure of patch applies so that there's no need in function CommitChanges.
+    * Optimised code and speed of library.
+    * Removing all fragmentation journal on startup.
+    * Using buffered data in .dat file when writing to the end, which increases speed of patching.
+----------------------------------------------------------------------

+ 1 - 1
CMakeLists.txt

@@ -5,7 +5,7 @@ set(CMAKE_CXX_STANDARD 14)
 set(PROJECT_BINARY_DIR bin)
 set(PROJECT_VERSION 0.1.0)
 
-SET(CMAKE_CXX_FLAGS  "${CMAKE_CXX_FLAGS} ${GCC_COVERAGE_COMPILE_FLAGS} -O3 -Wall -Wextra" )
+SET(CMAKE_CXX_FLAGS  "${CMAKE_CXX_FLAGS} ${GCC_COVERAGE_COMPILE_FLAGS} -O3 -pipe -fomit-frame-pointer -Wall -Wextra" )
 SET(CMAKE_EXE_LINKER_FLAGS  "${CMAKE_EXE_LINKER_FLAGS} ${GCC_COVERAGE_LINK_FLAGS}")
 
 if (MSVS)

BIN
bin/LotRO_dat_extractor.exe


BIN
bin/LotRO_dat_patcher.exe


+ 32 - 19
include/DatFile.h

@@ -98,6 +98,8 @@ namespace LOTRO_DAT {
 
         ~DatFile();
 
+        // EXTRACT BASE SECTION
+
         DAT_RESULT ExtractFile(long long file_id, const std::string &path = "");
 
         DAT_RESULT ExtractFile(long long file_id, Database *db);
@@ -106,13 +108,14 @@ namespace LOTRO_DAT {
 
         int ExtractAllFilesByType(FILE_TYPE type, Database *db);
 
-        DAT_RESULT PatchFile(const char *filename, YAML::Node options);
+        // PATCH BASE SECTION
+
+        //DAT_RESULT PatchFile(const char *filename, YAML::Node options);
 
         DAT_RESULT PatchFile(const SubfileData &data);
 
         DAT_RESULT PatchAllDatabase(Database *db);
 
-        DAT_RESULT WriteUnorderedDictionary(std::string path) const;
 
         long long files_number() const;
 
@@ -120,8 +123,6 @@ namespace LOTRO_DAT {
 
         BinaryData GetFileData(const Subfile *file, long long offset = 0);
 
-        DAT_RESULT CommitChanges();
-
         DAT_RESULT CloseDatFile();
 
     private:
@@ -134,6 +135,7 @@ namespace LOTRO_DAT {
 
         DAT_RESULT MakeDictionary();
 
+        DAT_RESULT ClearFragmentationJournal();
         // READ-WRITE SECTION
 
         DAT_RESULT ReadData(BinaryData &data, long long size, long long offset = 0, long long data_offset = 0);
@@ -142,15 +144,19 @@ namespace LOTRO_DAT {
 
         // PATCH SECTION
 
-        DAT_RESULT ApplyFilePatch(Subfile *file, const BinaryData &data);
+        DAT_RESULT ApplyFilePatch(Subfile *file, BinaryData &data);
 
-        std::vector<std::pair<long long, long long> > GetFragmentationJournal();
+    private:
+        long long free_buffered_size_;
 
-        DAT_RESULT UpdateHeader();
+        const long long MAX_BUFFERED_SIZE = 50 * 1024 * 1024; // 50 megabytes;
+        const long long MIN_BUFFERED_SIZE = 5 * 1024 * 1024; // 5 megabytes;
 
-        DAT_RESULT UpdateSubdirectories();
+        void AddBufferedSize();
 
-        DAT_RESULT UpdateFragmentationJournal(const std::vector<std::pair<long long, long long> > &journal);
+        // COMMIT UPDATE SECTION
+
+        DAT_RESULT UpdateHeader();
 
         // LOCALE MANAGING SECTION
     private:
@@ -158,10 +164,25 @@ namespace LOTRO_DAT {
 
         DAT_RESULT CommitLocales();
 
+        DAT_RESULT CommitDirectories();
+
     public:
         DAT_RESULT SetLocale(LOCALE locale);
 
+        LOCALE current_locale();
+
+        // CATEGORY MANAGEMENT SECTION
+
+        DAT_RESULT EnableCategory(int category);
+
+        DAT_RESULT DisableCategory(int category);
+
+        const std::set<long long>& GetInactiveCategoriesList();
+
+        // SOME PRIOR TOOLS
+
         DAT_RESULT RepairDatFile();
+
         bool CorrectSubfile(Subfile *file);
 
         bool CheckIfUpdatedByGame();
@@ -170,15 +191,8 @@ namespace LOTRO_DAT {
 
         DAT_RESULT FinishRepairingPatches();
 
-        LOCALE current_locale();
-
-        DAT_RESULT EnableCategory(int category);
-
-        DAT_RESULT DisableCategory(int category);
-
-        const std::set<long long>& GetInactiveCategoriesList();
+        DAT_RESULT WriteUnorderedDictionary(std::string path) const;
 
-    public:
         bool CheckIfNotPatched();
 
         bool CheckIfPatchedByOldLauncher();
@@ -201,8 +215,7 @@ namespace LOTRO_DAT {
 
         SubDirectory *root_directory_;
 
-        std::unordered_set<long long> patched_list;
-
+        std::unordered_set<long long> pending_dictionary_;
         std::unordered_map<long long, Subfile *> dictionary_;
 
         long long constant1_;

+ 4 - 7
include/SubDirectory.h

@@ -27,15 +27,12 @@ namespace LOTRO_DAT
         SubDirectory(long long offset, DatFile *dat, long long max_subdirs = 63);
         ~SubDirectory();
         void MakeDictionary(std::unordered_map<long long, Subfile*> &dict);
-
-        void UpdateDirectories(std::unordered_set<long long> &patched_files, std::unordered_map<long long, Subfile*> &dict);
-
     private:
-        void MakeSubDirectories();
-        void MakeSubFiles();
+        bool MakeSubDirectories();
+        bool MakeSubFiles();
 
-        Subfile* MakeSubfile(long long dictionary_offset, long long fragments_count, long long unknown1, long long file_id, long long file_offset,
-                             long long file_size, long long timestamp, long long version, long long block_size);
+        Subfile* MakeSubfile(long long dictionary_offset, long long unknown1, long long file_id, long long file_offset,
+                             long long file_size, long long timestamp, long long version, long long block_size, long long unknown2);
         FILE_TYPE GetSubfileType(long long file_id, long long file_offset) const;
 
         DatFile *dat_;

+ 2 - 2
include/Subfile.h

@@ -41,7 +41,6 @@ namespace LOTRO_DAT
 		BinaryData MakeHeaderData() const;
 
 		long long dictionary_offset() const;
-        long long fragments_count() const;
         long long unknown1() const;
         long long file_id() const;
         long long file_offset() const;
@@ -49,6 +48,7 @@ namespace LOTRO_DAT
         long long timestamp() const;
         long long version() const;
         long long block_size() const;
+		long long unknown2() const;
 
         long long category;
 
@@ -56,7 +56,6 @@ namespace LOTRO_DAT
 		DatFile *dat_;
         long long dictionary_offset_;
 
-		long long fragments_count_;
         long long unknown1_;
         long long file_id_;
         long long file_offset_;
@@ -64,6 +63,7 @@ namespace LOTRO_DAT
         long long timestamp_;
         long long version_;
         long long block_size_;
+		long long unknown2_;
 	};
 }
 };

BIN
lib/libLotroDat.dll.a


BIN
lib/libLotroDat_static.a


+ 121 - 196
src/DatFile.cpp

@@ -13,11 +13,11 @@
 #include <EasyLogging++/easylogging++.h>
 #include <unistd.h>
 
+#include <locale>
+
 #define ELPP_FEATURE_CRASH_LOG
 INITIALIZE_EASYLOGGINGPP
 
-#include <locale>
-
 #ifdef WIN32
 #define fseek _fseeki64
 #define ftell _ftelli64
@@ -31,6 +31,7 @@ namespace LOTRO_DAT {
         dat_state_ = CLOSED;
         root_directory_ = nullptr;
         file_handler_ = nullptr;
+        free_buffered_size_ = 0;
 
         el::Configurations defaultConf;
         defaultConf.setToDefault();
@@ -71,6 +72,7 @@ namespace LOTRO_DAT {
         current_locale_ = ORIGINAL;
         root_directory_ = nullptr;
         file_handler_ = nullptr;
+        free_buffered_size_ = 0;
         filename_ = "none";
 
         DAT_RESULT result;
@@ -116,14 +118,20 @@ namespace LOTRO_DAT {
         }
         return_value = std::max(return_value, result);
 
+        LOG(INFO) << "File " << filename << " opened successfully!";
+        filename_ = filename;
+        dat_state_ = READY;
+
+        LOG(INFO) << "Making last preparations...";
+        return_value = std::max(return_value, result);
+
         if (return_value >= 2) {
-            LOG(WARNING) << "Dat file is corrupted. Trying to delete corrupted dictionary rows";
+            LOG(WARNING) << "Dat file could be corrupted. Trying to delete corrupted dictionary rows";
             if (RepairDatFile() != SUCCESS)
                 return CRITICAL_DAT_ERROR;
         }
-        LOG(INFO) << "File " << filename << " opened successfully!";
-        filename_ = filename;
-        dat_state_ = READY;
+
+        LOG(INFO) << "Preparations made successfully! Init return value = " << return_value;
         return return_value;
     }
 
@@ -270,48 +278,6 @@ namespace LOTRO_DAT {
         return success;
     }
 
-    // TODO: Write description and make asserts
-    DAT_RESULT DatFile::PatchFile(const char *filename, YAML::Node options) {
-        LOG(DEBUG) << "Patching file with filename" << filename << " and id = " << options["fid"].as<long long>();
-        if (dat_state_ < READY) {
-            LOG(ERROR) << "Dat state isn't READY. Cannot patch.";
-            return INCORRECT_STATE_ERROR;
-        }
-
-        if (options["did"].IsDefined() && options["did"].as<int>() != dat_id_)
-            return INCORRECT_DAT_ID;
-
-        BinaryData data;
-        data.ReadFromFile(filename);
-
-        auto file_id = options["fid"].as<long long>();
-
-        if (dictionary_[file_id] == nullptr) {
-            LOG(ERROR) << "Cannot patch file - there is no file in dictionary with file_id = " << file_id;
-            return NO_FILE_ERROR;
-        }
-
-        BinaryData old_data = GetFileData(dictionary_[file_id]);
-        if (old_data.Empty()) {
-            LOG(ERROR) << "GetFileData returned empty data. Aborting.";
-            return DAT_PATCH_FILE_ERROR;
-        }
-
-        data = dictionary_[file_id]->MakeForImport(old_data, SubfileData(data, u"", options));
-
-        try {
-            DAT_RESULT result = ApplyFilePatch(dictionary_[file_id], data);
-            if (result != SUCCESS)
-                return result;
-        } catch (std::exception &e) {
-            LOG(ERROR) << "Caught " << e.what() << " exception.";
-            return FAILED;
-        }
-        LOG(DEBUG) << "Successfully patched file with filename = " << filename << " and id = "
-                   << options["fid"].as<long long>();
-        return SUCCESS;
-    }
-
     // TODO: Write description and make asserts
     DAT_RESULT DatFile::PatchFile(const SubfileData &data) {
         LOG(DEBUG) << "Patching file with id = " << data.options["fid"].as<long long>() << ".";
@@ -358,14 +324,10 @@ namespace LOTRO_DAT {
         }
 
         BinaryData patch_data = file->MakeForImport(old_data, data);
-        try {
-            DAT_RESULT result = ApplyFilePatch(file, patch_data);
-            if (result != SUCCESS)
-                return result;
-        } catch (std::exception &e) {
-            LOG(ERROR) << "Caught " << e.what() << " exception";
-            return FAILED;
-        }
+        DAT_RESULT result = ApplyFilePatch(file, patch_data);
+        if (result != SUCCESS)
+            return result;
+
         LOG(DEBUG) << "Patched successfully file " << data.options["fid"].as<long long>() << ".";
         return SUCCESS;
     }
@@ -387,9 +349,6 @@ namespace LOTRO_DAT {
                 LOG(ERROR) << "Cannot patch file" << data.options["fid"].as<long long>() << " continuing";
             data = db->GetNextFile();
         }
-        DAT_RESULT result = CommitChanges();
-        if (result != SUCCESS)
-            return result;
         LOG(INFO) << "Successfully patched whole database";
         return SUCCESS;
     }
@@ -408,10 +367,11 @@ namespace LOTRO_DAT {
             return WRITE_TO_FILE_ERROR;
         }
 
-        fprintf(f, "file_id offset size size2 extension\n");
+        fprintf(f, "unk1 file_id offset size1 timestamp version size2 unknown2 type\n");
         for (auto i : dictionary_) {
-            fprintf(f, "%lld %lld %lld %lld %s\n", i.second->file_id(), i.second->file_offset(), i.second->file_size(),
-                    i.second->block_size(), i.second->Extension().c_str());
+            fprintf(f, "%lld %lld %lld %lld %lld %lld %lld %lld %s\n", i.second->unknown1(), i.second->file_id(),
+                    i.second->file_offset(), i.second->file_size(), i.second->timestamp(), i.second->version(),
+                    i.second->block_size(), i.second->unknown2(), i.second->Extension().c_str());
         }
         fclose(f);
         LOG(INFO) << "Unordered dictionary was written successfully to " << path << "dict.txt";
@@ -430,54 +390,50 @@ namespace LOTRO_DAT {
     // TODO: ASSERTS
     BinaryData DatFile::GetFileData(const Subfile *file, long long int offset) {
         LOG(DEBUG) << "Getting file " << file->file_id() << " data";
-        try {
-            BinaryData mfile_id(20);
-            ReadData(mfile_id, 20, file->file_offset() + 8);
-            if (mfile_id.Empty()) {
-                LOG(ERROR) << "Error while reading file " << file->file_id() << " header (offset = "
-                           << file->file_offset() << "); Aborting.";
-                return BinaryData(0);
-            }
-            if (!mfile_id.CheckCompression() && file->file_id() != mfile_id.ToNumber<4>(0)) {
-                LOG(ERROR) << "Bad DatFile::GetFileData() - file_id in Subfile ("
-                           << file->file_id()
-                           << ") doesn't match to file_id (" << mfile_id.ToNumber<4>(0) << ")in DatFile.";
-                return BinaryData(0);
-            }
+        BinaryData mfile_id(20);
+        ReadData(mfile_id, 20, file->file_offset() + 8);
+        if (mfile_id.Empty()) {
+            LOG(ERROR) << "Error while reading file " << file->file_id() << " header (offset = "
+                       << file->file_offset() << "); Aborting.";
+            return BinaryData(0);
+        }
 
-            BinaryData data((unsigned) (file->file_size() + (8 - offset)));
-            if (file->block_size() >= file->file_size() + 8) {
-                ReadData(data, file->file_size() + (8 - offset), file->file_offset() + offset);
-                return data;
-            }
+        if (!mfile_id.CheckCompression() && file->file_id() != mfile_id.ToNumber<4>(0)) {
+            LOG(ERROR) << "Bad DatFile::GetFileData() - file_id in Subfile ("
+                       << file->file_id()
+                       << ") doesn't match to file_id (" << mfile_id.ToNumber<4>(0) << ")in DatFile.";
+            return BinaryData(0);
+        }
 
-            BinaryData fragments_count(4);
-            ReadData(fragments_count, 4, file->file_offset());
+        BinaryData data((unsigned)(file->file_size() + (8 - offset)));
+        if (file->block_size() >= file->file_size() + 8) {
+            ReadData(data, file->file_size() + (8 - offset), file->file_offset() + offset);
+            return data;
+        }
 
-            long long fragments_number = fragments_count.ToNumber<4>(0);
+        BinaryData fragments_count(4);
+        ReadData(fragments_count, 4, file->file_offset());
 
-            long long current_block_size = file->block_size() - offset - 8 * fragments_number;
+        long long fragments_number = fragments_count.ToNumber<4>(0);
 
-            ReadData(data, current_block_size, file->file_offset() + offset);
+        long long current_block_size = file->block_size() - offset - 8 * fragments_number;
 
-            BinaryData FragmentsDictionary(8 * unsigned(fragments_number));
-            ReadData(FragmentsDictionary, 8 * unsigned(fragments_number),
-                     file->file_offset() + file->block_size() - 8 * fragments_number);
+        ReadData(data, current_block_size, file->file_offset() + offset);
 
+        BinaryData FragmentsDictionary(8 * unsigned(fragments_number));
+        ReadData(FragmentsDictionary, 8 * unsigned(fragments_number),
+                 file->file_offset() + file->block_size() - 8 * fragments_number);
 
-            for (long long i = 0; i < fragments_number; i++) {
-                long long fragment_size = FragmentsDictionary.ToNumber<4>(8 * i);
-                long long fragment_offset = FragmentsDictionary.ToNumber<4>(8 * i + 4);
-                ReadData(data, std::min(fragment_size, file->file_size() - current_block_size), fragment_offset,
-                         current_block_size);
-                current_block_size += fragment_size;
-            }
-            LOG(DEBUG) << "Successfully got file " << file->file_id() << " data";
-            return data;
-        } catch (std::exception &e) {
-            LOG(ERROR) << "Caught " << e.what() << " exception";
+
+        for (long long i = 0; i < fragments_number; i++) {
+            long long fragment_size = FragmentsDictionary.ToNumber<4>(8 * i);
+            long long fragment_offset = FragmentsDictionary.ToNumber<4>(8 * i + 4);
+            ReadData(data, std::min(fragment_size, file->file_size() - current_block_size), fragment_offset,
+                     current_block_size);
+            current_block_size += fragment_size;
         }
-        return BinaryData(0);
+        LOG(DEBUG) << "Successfully got file " << file->file_id() << " data";
+        return data;
     }
 
     /// DatFile special functions for opening and reading/writing raw data.
@@ -553,6 +509,7 @@ namespace LOTRO_DAT {
 
         root_directory_ = new SubDirectory((unsigned) root_directory_offset_, this);
         dat_state_ = SUCCESS_DIRECTORIES;
+
         LOG(DEBUG) << "Directories made successfully";
         return SUCCESS;
     }
@@ -619,19 +576,16 @@ namespace LOTRO_DAT {
 
     /// Special functions used by patch process.
     /// Shouldn't be used by any external class.
-    DAT_RESULT DatFile::ApplyFilePatch(Subfile *file, const BinaryData &data) {
+
+    DAT_RESULT DatFile::ApplyFilePatch(Subfile *file, BinaryData &data) {
         LOG(DEBUG) << "Applying " << file->file_id() << " patch.";
+
         if (data.Empty()) {
             LOG(ERROR) << "Error caused during making file for import. Cannot patch file " << file->file_id();
             return FAILED;
         }
 
         auto file_id = file->file_id();
-        if (patched_list.count(file_id) != 0) {
-            LOG(WARNING) << "Warning: DatFile::ApplyFilePatch - found 2 files in patch with the same file_id = "
-                         << file->file_id() << " Passing last...";
-            return DUBLICATE_PATCH_FILES_ERROR;
-        }
 
         if (current_locale() != PATCHED && file_id != 2013266257) {
             LOG(INFO) << "Changing locale to PATCHED(RU) in order to patch file";
@@ -644,55 +598,42 @@ namespace LOTRO_DAT {
             orig_dict_[file_id] = new Subfile(this, file->MakeHeaderData());
         }
 
-        auto journal = GetFragmentationJournal();
 
-        if ((patch_dict_.count(file_id) == 0 && file_id != 2013266257)
-            || data.size() > file->block_size() || file->file_size() + 8 > file->block_size()) {
-            if (journal[0].second != file_size_) {
-                journal[0].second = file_size_;
-            }
+        if ((patch_dict_.count(file_id) == 0 && file_id != 2013266257) || data.size() > file->block_size()
+            || file->file_size() + 8 > file->block_size()) {
 
-            file->file_size_ = data.size() - 8;
-            file->file_offset_ = journal[0].second;
+            file->file_offset_ = file_size_;
             file->block_size_ = std::max((long long)data.size(), file->block_size_);
 
-            journal[0].second += file->block_size_;
-
-            BinaryData nulls((unsigned)file->block_size_);
-            WriteData(nulls, nulls.size(), file_size_);
+            free_buffered_size_ = std::max(0ll, free_buffered_size_ - file->block_size_);
+            AddBufferedSize();
 
             this->file_size_ += file->block_size_;
         }
 
         file->file_size_ = data.size() - 8;
-        file->block_size_ = std::max(file->block_size_, file->file_size_ + 8);
-
-        BinaryData fragments_count(4);
-        fragments_count = BinaryData::FromNumber<4>(0);
 
-        BinaryData file_data = fragments_count + data.CutData(4);
+        data.Append(BinaryData::FromNumber<4>(0), 0); // set additional fragments count to zero
 
-        if (file_id != file_data.ToNumber<4>(8)) {
+        if (file_id != data.ToNumber<4>(8)) {
             LOG(ERROR) << "Created data's file_id doesn't match to original! Patch wasn't written to .dat file";
             return INCORRECT_PATCH_FILE;
         }
 
-        WriteData(file_data, file_data.size(), file->file_offset());
-
-        patched_list.insert(file_id);
+        WriteData(data, data.size(), file->file_offset());
 
+        patch_dict_.erase(file_id); // Удалили старое значение в русском словаре
         if (file_id != 2013266257) {
-            patch_dict_.erase(file_id); // Удалили старое значение в русском словаре
             patch_dict_[file_id] = new Subfile(this, file->MakeHeaderData()); // Создали новое значение
         }
 
         // If category is forbidden, then return file header data to original state
         if (inactive_categories.count(file->category) != 0) {
-            dictionary_[file_id]->file_offset_ = orig_dict_[file_id]->file_offset_;
-            dictionary_[file_id]->file_size_ = orig_dict_[file_id]->file_size_;
-            dictionary_[file_id]->block_size_ = orig_dict_[file_id]->block_size_;
-            dictionary_[file_id]->timestamp_ = orig_dict_[file_id]->timestamp_;
-            dictionary_[file_id]->version_ = orig_dict_[file_id]->version_;
+            file->file_offset_ = orig_dict_[file_id]->file_offset_;
+            file->file_size_ = orig_dict_[file_id]->file_size_;
+            file->block_size_ = orig_dict_[file_id]->block_size_;
+            file->timestamp_ = orig_dict_[file_id]->timestamp_;
+            file->version_ = orig_dict_[file_id]->version_;
         }
 
         if (orig_dict_.count(file_id) != 0 && file_id != 2013266257)
@@ -700,31 +641,36 @@ namespace LOTRO_DAT {
         if (patch_dict_.count(file_id) != 0 && file_id != 2013266257)
             patch_dict_[file_id]->category = file->category;
 
-        UpdateFragmentationJournal(journal);
+
+        // Applying file info in directory
+        pending_dictionary_.insert(file_id);
+
         LOG(DEBUG) << "Successfully applied file " << file->file_id() << " patch.";
         return SUCCESS;
     }
 
-    DAT_RESULT DatFile::UpdateSubdirectories() {
-        // TODO: asserts
-        LOG(DEBUG) << "Started updating subdirectories";
-        root_directory_->UpdateDirectories(patched_list, dictionary_);
-        LOG(DEBUG) << "Finished updating subdirectories";
-        return SUCCESS;
-    }
+    DAT_RESULT DatFile::ClearFragmentationJournal() {
+        LOG(DEBUG) << "Clearing fragmentation journal";
+
+        long long offset = 0;
+        BinaryData data(32);
+        DAT_RESULT res = ReadData(data, 32, fragmentation_journal_offset_ + 8 + offset);
 
-    std::vector<std::pair<long long, long long> > DatFile::GetFragmentationJournal() {
-        LOG(DEBUG) << "Getting fragmentation journal";
-        BinaryData data(8);
-        DAT_RESULT res = ReadData(data, 8, fragmentation_journal_offset_ + 8);
-        std::vector<std::pair<long long, long long> > result;
         if (res != SUCCESS) {
             LOG(ERROR) << "Error " << res << " while reading data";
-            return result;
+            return FAILED;
         }
-        result.emplace_back(std::make_pair(data.ToNumber<4>(0), data.ToNumber<4>(4)));
+
+        BinaryData nulls = BinaryData(32);
+
+        while (data != nulls && !data.Empty()) {
+            WriteData(nulls, 32, fragmentation_journal_offset_ + 8 + offset);
+            offset += 32;
+            ReadData(data, 32, fragmentation_journal_offset_ + 8 + offset);
+        }
+        //fragmentation_journal_.emplace_back(std::make_pair(data.ToNumber<4>(0), data.ToNumber<4>(4)));
         LOG(DEBUG) << "Finished getting fragmentation journal";
-        return result;
+        return SUCCESS;
     }
 
     DAT_RESULT DatFile::UpdateHeader() {
@@ -740,42 +686,6 @@ namespace LOTRO_DAT {
         return SUCCESS;
     }
 
-    DAT_RESULT DatFile::UpdateFragmentationJournal(const std::vector<std::pair<long long, long long> > &journal) {
-        LOG(DEBUG) << "Updating fragmentation journal";
-        for (unsigned i = 0; i < journal.size(); i++) {
-            long long size = journal[i].first;
-            long long offset = journal[i].second;
-
-            WriteData(BinaryData::FromNumber<4>(size), 4, fragmentation_journal_offset_ + 8 * (i + 1));
-            WriteData(BinaryData::FromNumber<4>(offset), 4, fragmentation_journal_offset_ + 8 * (i + 1) + 4);
-        }
-        LOG(DEBUG) << "Finished updating fragmentation journal";
-        return SUCCESS;
-    }
-
-    DAT_RESULT DatFile::CommitChanges() {
-        LOG(INFO) << "Started commiting changes";
-        if (dat_state_ != UPDATED) {
-            LOG(DEBUG) << "Commiting changes to file with state != UPDATED. Nothing to do";
-            return SUCCESS;
-        }
-
-        LOG(INFO) << "There are some updated files. Rewriting dictionary...";
-        CommitLocales();
-
-        auto journal = GetFragmentationJournal();
-        UpdateFragmentationJournal(journal);
-
-        UpdateHeader();
-        UpdateSubdirectories();
-        LOG(INFO) << "Changed " << patched_list.size() << " files...";
-
-        patched_list.clear();
-        dat_state_ = READY;
-        LOG(INFO) << "Done Commiting changes!";
-        return SUCCESS;
-    }
-
     DAT_RESULT DatFile::CloseDatFile() {
         LOG(INFO) << "Closing DatFile";
         if (dat_state_ == CLOSED) {
@@ -783,10 +693,13 @@ namespace LOTRO_DAT {
             return SUCCESS;
         }
 
-        CommitChanges();
+        // Commiting changes and updating/writing locales and header info
+        CommitLocales();
+        CommitDirectories();
+        UpdateHeader();
+        ClearFragmentationJournal();
 
         orig_dict_.clear();
-        patched_list.clear();
         pending_patch_.clear();
 
         current_locale_ = ORIGINAL;
@@ -798,7 +711,7 @@ namespace LOTRO_DAT {
         delete root_directory_;
 
         dictionary_.clear();
-        patched_list.clear();
+        free_buffered_size_ = 0;
 
         truncate64(filename_.c_str(), file_size_);
         filename_ = "none";
@@ -943,7 +856,6 @@ namespace LOTRO_DAT {
         return SUCCESS;
     }
 
-
     DAT_RESULT DatFile::SetLocale(LOCALE locale) {
         LOG(INFO) << "Setting locale to " << (locale == PATCHED ? " PATCHED" : " ORIGINAL");
         if (dat_state_ < READY) {
@@ -978,12 +890,11 @@ namespace LOTRO_DAT {
             dictionary_[file_id]->timestamp_ = new_file->timestamp_;
             dictionary_[file_id]->version_ = new_file->version_;
 
-            patched_list.insert(file.first);
+            pending_dictionary_.insert(file_id);
             dat_state_ = UPDATED;
         }
 
         current_locale_ = locale;
-        CommitChanges();
         LOG(INFO) << "Locale set successfull";
         return SUCCESS;
     }
@@ -1013,7 +924,6 @@ namespace LOTRO_DAT {
                 dat_state_ = UPDATED;
             }
         }
-        CommitChanges();
         LOG(INFO) << "Dat file " << (updated ? "WAS " : "WASN'T ") << "updated by game.";
         return updated;
     }
@@ -1029,7 +939,6 @@ namespace LOTRO_DAT {
             }
             data = db->GetNextFile();
         }
-        CommitChanges();
         LOG(INFO) << "Successfully repaired with database";
         return SUCCESS;
     }
@@ -1136,7 +1045,7 @@ namespace LOTRO_DAT {
                 file.second->block_size_ = patch_dict_[file_id]->block_size_;
                 file.second->timestamp_ = patch_dict_[file_id]->timestamp_;
                 file.second->version_ = patch_dict_[file_id]->version_;
-                patched_list.insert(file_id);
+                pending_dictionary_.insert(file_id);
             }
         }
         LOG(INFO) << "Category " << category << " enabled successfully";
@@ -1158,7 +1067,7 @@ namespace LOTRO_DAT {
                 file.second->block_size_ = orig_dict_[file_id]->block_size_;
                 file.second->timestamp_ = orig_dict_[file_id]->timestamp_;
                 file.second->version_ = orig_dict_[file_id]->version_;
-                patched_list.insert(file_id);
+                pending_dictionary_.insert(file_id);
             }
         }
         LOG(INFO) << "Category " << category << " disabled successfully";
@@ -1184,5 +1093,21 @@ namespace LOTRO_DAT {
     const std::string &DatFile::filename() const {
         return filename_;
     }
+
+    DAT_RESULT DatFile::CommitDirectories() {
+        for (auto file_id : pending_dictionary_) {
+            WriteData(dictionary_[file_id]->MakeHeaderData(), 32, dictionary_[file_id]->dictionary_offset());
+        }
+        pending_dictionary_.clear();
+        return SUCCESS;
+    }
+
+    void DatFile::AddBufferedSize() {
+        if (free_buffered_size_ >= MIN_BUFFERED_SIZE)
+            return;
+        BinaryData nulls(MAX_BUFFERED_SIZE);
+        WriteData(nulls, MAX_BUFFERED_SIZE, file_size_);
+        free_buffered_size_ = MAX_BUFFERED_SIZE;
+    }
 }
 }

+ 1 - 1
src/Examples/extractor_example.cpp

@@ -31,7 +31,7 @@ bool exportUnknownToDb = false;
 // There is no need to change anything else
 
 int main() {
-    std::cout << "Gi1dor's LotRO .dat extractor ver. 4.2.0" << std::endl;
+    std::cout << "Gi1dor's LotRO .dat extractor ver. 5.0.0" << std::endl;
 
     std::cout << "Hello! I'm a basic shell version of .dat file extractor. I can open .dat file directly, "
             "if you write path to it (with name of file) in file \"dat_file_path.txt\"\n";

+ 1 - 4
src/Examples/patcher_example.cpp

@@ -15,7 +15,7 @@ using namespace LOTRO_DAT;
 using namespace std;
 
 int main() {
-    std::cout << "Gi1dor's LotRO .dat patcher ver. 4.2.0" << std::endl;
+    std::cout << "Gi1dor's LotRO .dat patcher ver. 5.0.0" << std::endl;
     freopen("patcher_errors.log", "w", stderr);
 
     setbuf(stdout, nullptr);
@@ -136,7 +136,6 @@ int main() {
                 }
 
                 db.CloseDatabase();
-                file.CommitChanges();
 
                 fprintf(stdout, "Spent %f seconds on patching! Thank you for your patience!\n",
                         float(clock() - begin_time) / CLOCKS_PER_SEC);
@@ -162,7 +161,6 @@ int main() {
             std::cin >> category_id;
             file.EnableCategory(category_id);
             std::cout << "Category successfully enabled!" << std::endl;
-            file.CommitChanges();
         }
 
         if (cmd == 5) {
@@ -171,7 +169,6 @@ int main() {
             std::cin >> category_id;
             file.DisableCategory(category_id);
             std::cout << "Category successfully disabled!" << std::endl;
-            file.CommitChanges();
         }
 
         if (cmd == 6) {

+ 84 - 105
src/SubDirectory.cpp

@@ -29,162 +29,141 @@ namespace LOTRO_DAT {
 
     SubDirectory::SubDirectory(long long offset, DatFile *dat, long long max_subdirs) :
             dat_(dat), offset_(offset), max_subdirs_(max_subdirs) {
-        LOG(DEBUG) << "Initialising SubDirectory";
-        try {
-            LOG(DEBUG) << "Processing SubDirectories";
-            MakeSubDirectories();
-        } catch (std::exception &e) {
-            LOG(ERROR) << "Caught " << e.what() << " exception.";
-            LOG(WARNING) << "Unable to initialize directory at offset " << offset << ". Initializing it as empty directory...";
+        LOG(DEBUG) << "Initialising " << offset_ << " SubDirectory";
+        LOG(DEBUG) << "Processing SubDirectories";
 
-            subdirs_.clear();
+        if (!MakeSubDirectories()) {
             subfiles_.clear();
+            subdirs_.clear();
             return;
         }
 
-        try {
-            LOG(DEBUG) << "Processing SubFiles";
-            MakeSubFiles();
-        } catch (std::exception &e) {
-            LOG(ERROR) << "Caught " << e.what() << " exception.";
-            LOG(WARNING) << "Unable to initialize directory at offset " << offset << ". Initializing it as empty directory...";
-
-            subdirs_.clear();
+        LOG(DEBUG) << "Processing SubFiles";
+        if (!MakeSubFiles()) {
             subfiles_.clear();
+            subdirs_.clear();
             return;
         }
+        LOG(DEBUG) << "SubDirectory " << offset_ << " initialized successfully";
     }
 
-    void SubDirectory::MakeSubDirectories() {
+    bool SubDirectory::MakeSubDirectories() {
         BinaryData data(1024);
         dat_->ReadData(data, 63 * 8, offset_);
+
+        if (data.Empty()) {
+            LOG(ERROR) << "(READ ERROR) Incorrect directory at offset " << offset_;
+            return false;
+        }
+
         if (data.ToNumber<4>(0) != 0 || data.ToNumber<4>(4) != 0) {
             LOG(DEBUG) << "first 8 bytes are not equal to 0 at offset " << offset_;
-            std::string err =
-                    std::string("Bad SubDirectory::MakeSubDirectories - first 8 bytes are not equal to 0 at offset ")
-                    + std::to_string(offset_);
-            throw DatException(err.c_str(), SUBDIR_EXCEPTION);
+            return false;
         }
 
         for (unsigned int i = 8; i < 63 * 8; i += 8) {
             if (data.ToNumber<4>(i) == 0 || data.ToNumber<4>(i + 4) == 0)
                 break;
 
-            try {
-                SubDirectory *subdir = new SubDirectory(data.ToNumber<4>(i + 4), dat_);
-                if (subdir->subfiles_.empty() && subdir->subdirs_.empty())
-                    LOG(WARNING) << "Cannot initialize sub-subdirectory. Dictionary offset = " << offset_ + i << ";";
-                else
-                    subdirs_.push_back(subdir);
-            } catch (std::exception &e) {
-                LOG(ERROR) << "Caught " << e.what() << " exception.";
-                LOG(DEBUG) << "Making SubDirectory at offset " << data.ToNumber<4>(i + 4) << " failed, continuing";
-            }
+            SubDirectory *subdir = new SubDirectory(data.ToNumber<4>(i + 4), dat_);
+
+            if (subdir->subfiles_.empty() && subdir->subdirs_.empty()) {
+                LOG(WARNING) << "Sub-subdirectory is empty or made empty... Dictionary offset = " << offset_ + i << "; Passing others";
+                break;
+            } else
+                subdirs_.push_back(subdir);
         }
+        return true;
     }
 
-    void SubDirectory::MakeSubFiles() {
-        try {
-            BinaryData data(2048);
-            dat_->ReadData(data, 64 * 32, offset_ + 63 * 8);
-
-            for (unsigned int i = 0; i < 61 * 32; i += 32) {
-                if (data.ToNumber<4>(i + 8) < 0x32 || data.ToNumber<4>(i + 12) < 0x32)
-                    continue;
-
-                BinaryData mfile_id(20);
-                dat_->ReadData(mfile_id, 20, data.ToNumber<4>(i + 12) + 8);
-                if (mfile_id.Empty() || (!mfile_id.CheckCompression() && data.ToNumber<4>(i + 8) != mfile_id.ToNumber<4>(0))) {
-                    LOG(DEBUG) << "File id in file doesn't match to file_id in dictionary (offset = " << data.ToNumber<4>(i + 12) << ")";
-                    continue;
-                }
-
-                subfiles_.push_back(
-                        MakeSubfile(
-                                offset_ + 63 * 8 + i,
-                                data.ToNumber<4>(i), // fragments_count
-                                data.ToNumber<4>(i + 4), // unknown1
-                                data.ToNumber<4>(i + 8), // file_id
-                                data.ToNumber<4>(i + 12), // file_offset
-                                data.ToNumber<4>(i + 16), // block_size
-                                data.ToNumber<4>(i + 20), // timestamp
-                                data.ToNumber<4>(i + 24), // version
-                                data.ToNumber<4>(i + 28) // block_size
-                        )
-                );
-            }
-        } catch (std::exception &e) {
-            LOG(ERROR) << "Caught exception " << e.what();
-            subdirs_.clear();
-            subfiles_.clear();
+    bool SubDirectory::MakeSubFiles() {
+        BinaryData data = BinaryData(4);
+        dat_->ReadData(data, 4, offset_ + 63 * 8);
+
+        if (data.Empty()) {
+            LOG(ERROR) << "(READ ERROR) Incorrect directory at offset " << offset_;
+            return false;
         }
-    }
 
-    void SubDirectory::MakeDictionary(std::unordered_map<long long, Subfile *> &dict) {
-        for (Subfile *i : subfiles_) {
-            if (dict.count(i->file_id()) == 0 || dict[i->file_id()]->timestamp() < i->timestamp())
-                dict[i->file_id()] = i;
+        auto subfiles_number = data.ToNumber<4>(0);
+        if (subfiles_number >= 64) {
+            LOG(ERROR) << "Incorrect directory at offset " << offset_;
+            return false;
         }
 
-        for (SubDirectory *i : subdirs_)
-            i->MakeDictionary(dict);
-    }
+        for (int i = 0; i < subfiles_number; i++) {
+            BinaryData header(32);
+            dat_->ReadData(header, 32, offset_ + 63 * 8 + 4 + 32 * i);
 
-    void SubDirectory::UpdateDirectories(std::unordered_set<long long> &patched_files, std::unordered_map<long long, Subfile*> &dict) {
-        for (auto subfile : subfiles_) {
-            long long file_id = subfile->file_id();
-            if (!dat_->CorrectSubfile(subfile)) {
-                LOG(ERROR) << "While updating directories encountered incorrect subfile with id = "
-                           << file_id << " (offset = " << subfile->file_offset() << ").";
-                //dat_->WriteData(BinaryData::FromNumber<4>(62079999), 4, subfile->dictionary_offset() + 8);
-                continue;
+            if (header.Empty()) {
+                LOG(ERROR) << "(READ ERROR) Incorrect directory at offset " << offset_;
+                return false;
             }
 
-            if (patched_files.count(file_id) != 0) {
-                BinaryData data(32);
-                dat_->ReadData(data, 32, subfile->dictionary_offset());
+            if (header.ToNumber<4>(20) == 0 || header.ToNumber<4>(28) != 0)
+                continue;
 
-                auto new_subfile = dict[file_id];
+            subfiles_.push_back(
+                    MakeSubfile(
+                            offset_ + 63 * 8 + 4 + 32 * i,
+                            header.ToNumber<4>(0), // unknown1
+                            header.ToNumber<4>(4), // file_id
+                            header.ToNumber<4>(8), // file_offset
+                            header.ToNumber<4>(12), // file_size
+                            header.ToNumber<4>(16), // timestamp
+                            header.ToNumber<4>(20), // version
+                            header.ToNumber<4>(24), // block_size
+                            header.ToNumber<4>(28) // unknown2 - must be zero??
+                    )
+            );
+
+            if (!dat_->CorrectSubfile(subfiles_.back())) {
+                LOG(WARNING) << "Incorrect Subfile in directory at offset " << offset_ + 63 * 8 + 4 + 32 * i;
+                subfiles_.pop_back();
+                //break;
+            }
+        }
+        return true;
+    }
 
-                //dat_->WriteData(BinaryData::FromNumber<4>(new_subfile->fragments_count()), 4, subfile->dictionary_offset());
-                //dat_->WriteData(BinaryData::FromNumber<4>(new_subfile->unknown1()), 4, subfile->dictionary_offset() + 4);
-                //dat_->WriteData(BinaryData::FromNumber<4>(new_subfile->file_id()), 4, subfile->dictionary_offset() + 8);
-                dat_->WriteData(BinaryData::FromNumber<4>(new_subfile->file_offset()), 4, subfile->dictionary_offset() + 12);
-                dat_->WriteData(BinaryData::FromNumber<4>(new_subfile->file_size()), 4, subfile->dictionary_offset() + 16);
-                //dat_->WriteData(BinaryData::FromNumber<4>(new_subfile->timestamp()), 4, subfile->dictionary_offset() + 20);
-                //dat_->WriteData(BinaryData::FromNumber<4>(new_subfile->version()), 4, subfile->dictionary_offset() + 24);
-                dat_->WriteData(BinaryData::FromNumber<4>(new_subfile ->block_size()), 4, subfile->dictionary_offset() + 28);
+    void SubDirectory::MakeDictionary(std::unordered_map<long long, Subfile *> &dict) {
+        for (Subfile *i : subfiles_) {
+            if (dict.count(i->file_id() != 0)) {
+                LOG(WARNING) << "Found multiple instances of file " << i->file_id() << " at dictionary offset "
+                             << i->dictionary_offset() << ". Base offset = " << dict[i->file_id()]->dictionary_offset();
+                continue;
             }
+            dict[i->file_id()] = i;
         }
 
         for (SubDirectory *i : subdirs_)
-            i->UpdateDirectories(patched_files, dict);
+            i->MakeDictionary(dict);
     }
 
-    Subfile *SubDirectory::MakeSubfile(long long dictionary_offset, long long fragments_count, long long unknown1, long long file_id,
+    Subfile *SubDirectory::MakeSubfile(long long dictionary_offset, long long unknown1, long long file_id,
                                        long long file_offset, long long file_size, long long timestamp,
-                                       long long version, long long block_size) {
+                                       long long version, long long block_size, long long unknown2) {
 
         FILE_TYPE type = GetSubfileType(file_id, file_offset);
 
         switch (type) {
             case TEXT:
-                return dynamic_cast<Subfile *>(new TextSubfile(dat_, dictionary_offset, fragments_count, unknown1, file_id, file_offset, file_size, timestamp, version, block_size));
+                return dynamic_cast<Subfile *>(new TextSubfile(dat_, dictionary_offset, unknown1, file_id, file_offset, file_size, timestamp, version, block_size, unknown2));
             case JPG:
-                return dynamic_cast<Subfile *>(new JpgSubfile(dat_, dictionary_offset, fragments_count, unknown1, file_id, file_offset, file_size, timestamp, version, block_size));
+                return dynamic_cast<Subfile *>(new JpgSubfile(dat_, dictionary_offset, unknown1, file_id, file_offset, file_size, timestamp, version, block_size, unknown2));
             case DDS:
-                return dynamic_cast<Subfile *>(new DdsSubfile(dat_, dictionary_offset, fragments_count, unknown1, file_id, file_offset, file_size, timestamp, version, block_size));
+                return dynamic_cast<Subfile *>(new DdsSubfile(dat_, dictionary_offset, unknown1, file_id, file_offset, file_size, timestamp, version, block_size, unknown2));
             case WAV:
-                return dynamic_cast<Subfile *>(new WavSubfile(dat_, dictionary_offset, fragments_count, unknown1, file_id, file_offset, file_size, timestamp, version, block_size));
+                return dynamic_cast<Subfile *>(new WavSubfile(dat_, dictionary_offset, unknown1, file_id, file_offset, file_size, timestamp, version, block_size, unknown2));
             case OGG:
-                return dynamic_cast<Subfile *>(new OggSubfile(dat_, dictionary_offset, fragments_count, unknown1, file_id, file_offset, file_size, timestamp, version, block_size));
+                return dynamic_cast<Subfile *>(new OggSubfile(dat_, dictionary_offset, unknown1, file_id, file_offset, file_size, timestamp, version, block_size, unknown2));
             case FONT:
-                return dynamic_cast<Subfile *>(new FontSubfile(dat_, dictionary_offset, fragments_count, unknown1, file_id, file_offset, file_size, timestamp, version, block_size));
+                return dynamic_cast<Subfile *>(new FontSubfile(dat_, dictionary_offset, unknown1, file_id, file_offset, file_size, timestamp, version, block_size, unknown2));
             case UNKNOWN:
-                return dynamic_cast<Subfile *>(new UnknownSubfile(dat_, dictionary_offset, fragments_count, unknown1, file_id, file_offset, file_size, timestamp, version, block_size));
+                return dynamic_cast<Subfile *>(new UnknownSubfile(dat_, dictionary_offset, unknown1, file_id, file_offset, file_size, timestamp, version, block_size, unknown2));
         }
-        LOG(ERROR) << "Unknown file type";
-        throw DatException("Bad SubDirectory::MakeSubfile() - unable to recognize file_type!", SUBFILE_EXCEPTION);
+        LOG(ERROR) << "Incorrect file type..";
+        return dynamic_cast<Subfile *>(new UnknownSubfile(dat_, dictionary_offset, unknown1, file_id, file_offset, file_size, timestamp, version, block_size, unknown2));
     }
 
     FILE_TYPE SubDirectory::GetSubfileType(long long file_id, long long file_offset) const {

+ 19 - 19
src/Subfile.cpp

@@ -23,22 +23,22 @@ namespace LOTRO_DAT {
 
         dat_ = dat;
 
-        fragments_count_ = header.ToNumber<4>(0); // fragments_count
-        unknown1_ = header.ToNumber<4>(4); // unknown1
-        file_id_ = header.ToNumber<4>(8); // file_id
-        file_offset_ = header.ToNumber<4>(12); // file_offset
-        file_size_ = header.ToNumber<4>(16); // block_size
-        timestamp_ = header.ToNumber<4>(20); // timestamp
-        version_ = header.ToNumber<4>(24); // version
-        block_size_ = header.ToNumber<4>(28); // block_size
+        unknown1_ = header.ToNumber<4>(0); // unknown1
+        file_id_ = header.ToNumber<4>(4); // file_id
+        file_offset_ = header.ToNumber<4>(8); // file_offset
+        file_size_ = header.ToNumber<4>(12); // block_size
+        timestamp_ = header.ToNumber<4>(16); // timestamp
+        version_ = header.ToNumber<4>(20); // version
+        block_size_ = header.ToNumber<4>(24); // block_size
+        unknown2_ = header.ToNumber<4>(28); // unknown2
     }
 
-    Subfile::Subfile(DatFile *dat, long long dictionary_offset, long long fragments_count, long long unknown1, long long file_id,
+    Subfile::Subfile(DatFile *dat, long long dictionary_offset, long long unknown1, long long file_id,
                                 long long file_offset,
-                                long long file_size, long long timestamp, long long version, long long block_size) :
-            category(0), dat_(dat), dictionary_offset_(dictionary_offset), fragments_count_(fragments_count), unknown1_(unknown1), file_id_(file_id),
+                                long long file_size, long long timestamp, long long version, long long block_size, long long unknown2) :
+            category(0), dat_(dat), dictionary_offset_(dictionary_offset), unknown1_(unknown1), file_id_(file_id),
             file_offset_(file_offset),
-            file_size_(file_size), timestamp_(timestamp), version_(version), block_size_(block_size) {
+            file_size_(file_size), timestamp_(timestamp), version_(version), block_size_(block_size), unknown2_(unknown2) {
 
         if (file_size_ > MAXSIZE)
             throw DatException("Bad Subfile::Subfile() - File size is too much... Maybe it's incorrect..?",
@@ -52,10 +52,6 @@ namespace LOTRO_DAT {
         return dictionary_offset_;
     }
 
-    long long Subfile::fragments_count() const {
-        return fragments_count_;
-    }
-
     long long Subfile::unknown1() const {
         return unknown1_;
     }
@@ -84,6 +80,10 @@ namespace LOTRO_DAT {
         return block_size_;
     }
 
+    long long Subfile::unknown2() const {
+        return unknown2_;
+    }
+
     /// bool Subfile::FileType(...);
     /// Virtual function, can (and should) be redefined in child class, otherwise an exception will be thrown while exporting/importing file.
     /// Returns enum FILE_TYPE value, which is declared in DatFile.h
@@ -127,14 +127,14 @@ namespace LOTRO_DAT {
     }
 
     BinaryData Subfile::MakeHeaderData() const {
-        BinaryData header = BinaryData::FromNumber<4>(fragments_count_)
-                            + BinaryData::FromNumber<4>(unknown1_)
+        BinaryData header = BinaryData::FromNumber<4>(unknown1_)
                             + BinaryData::FromNumber<4>(file_id_)
                             + BinaryData::FromNumber<4>(file_offset_)
                             + BinaryData::FromNumber<4>(file_size_)
                             + BinaryData::FromNumber<4>(timestamp_)
                             + BinaryData::FromNumber<4>(version_)
-                            + BinaryData::FromNumber<4>(block_size_);
+                            + BinaryData::FromNumber<4>(block_size_)
+                            + BinaryData::FromNumber<4>(unknown2_);
         return header;
     }
 };

+ 4 - 4
src/Subfiles/DdsSubfile.cpp

@@ -12,11 +12,11 @@
 namespace LOTRO_DAT {
     DdsSubfile::DdsSubfile() = default;
 
-    DdsSubfile::DdsSubfile(DatFile *dat, long long dictionary_offset, long long fragments_count, long long unknown1,
+    DdsSubfile::DdsSubfile(DatFile *dat, long long dictionary_offset, long long unknown1,
                            long long file_id, long long file_offset, long long file_size,
-                           long long timestamp, long long version, long long block_size)
-            : Subfile(dat, dictionary_offset, fragments_count, unknown1, file_id, file_offset, file_size,
-                      timestamp, version, block_size) {
+                           long long timestamp, long long version, long long block_size, long long unknown2)
+            : Subfile(dat, dictionary_offset, unknown1, file_id, file_offset, file_size,
+                      timestamp, version, block_size, unknown2) {
     }
 
     FILE_TYPE DdsSubfile::FileType() const {

+ 5 - 5
src/Subfiles/FontSubfile.cpp

@@ -12,11 +12,11 @@
 namespace LOTRO_DAT {
     FontSubfile::FontSubfile() = default;
 
-    FontSubfile::FontSubfile(DatFile *dat, long long dictionary_offset, long long fragments_count, long long unknown1,
-                             long long file_id, long long file_offset, long long file_size, long long timestamp,
-                             long long version, long long block_size)
-            : Subfile(dat, dictionary_offset, fragments_count, unknown1, file_id, file_offset, file_size, timestamp,
-                      version, block_size) {
+    FontSubfile::FontSubfile(DatFile *dat, long long dictionary_offset, long long unknown1,
+                             long long file_id, long long file_offset, long long file_size,
+                             long long timestamp, long long version, long long block_size, long long unknown2)
+            : Subfile(dat, dictionary_offset, unknown1, file_id, file_offset, file_size,
+                      timestamp, version, block_size, unknown2) {
     }
 
     FILE_TYPE FontSubfile::FileType() const {

+ 5 - 5
src/Subfiles/JpgSubfile.cpp

@@ -12,11 +12,11 @@
 namespace LOTRO_DAT {
     JpgSubfile::JpgSubfile() = default;
 
-    JpgSubfile::JpgSubfile(DatFile *dat, long long dictionary_offset, long long fragments_count, long long unknown1,
-                             long long file_id, long long file_offset, long long file_size, long long timestamp,
-                             long long version, long long block_size)
-            : Subfile(dat, dictionary_offset, fragments_count, unknown1, file_id, file_offset, file_size, timestamp,
-                      version, block_size) {
+    JpgSubfile::JpgSubfile(DatFile *dat, long long dictionary_offset, long long unknown1,
+                           long long file_id, long long file_offset, long long file_size,
+                           long long timestamp, long long version, long long block_size, long long unknown2)
+            : Subfile(dat, dictionary_offset, unknown1, file_id, file_offset, file_size,
+                      timestamp, version, block_size, unknown2) {
     }
 
     FILE_TYPE JpgSubfile::FileType() const {

+ 4 - 4
src/Subfiles/OggSubfile.cpp

@@ -12,11 +12,11 @@
 namespace LOTRO_DAT {
     OggSubfile::OggSubfile() = default;
 
-    OggSubfile::OggSubfile(DatFile *dat, long long dictionary_offset, long long fragments_count, long long unknown1,
+    OggSubfile::OggSubfile(DatFile *dat, long long dictionary_offset, long long unknown1,
                            long long file_id, long long file_offset, long long file_size,
-                           long long timestamp,
-                           long long version, long long block_size)
-            : Subfile(dat, dictionary_offset, fragments_count, unknown1, file_id, file_offset, file_size, timestamp, version, block_size) {
+                           long long timestamp, long long version, long long block_size, long long unknown2)
+            : Subfile(dat, dictionary_offset, unknown1, file_id, file_offset, file_size,
+                      timestamp, version, block_size, unknown2) {
     }
 
     FILE_TYPE OggSubfile::FileType() const {

+ 22 - 69
src/Subfiles/TextSubfile.cpp

@@ -51,11 +51,11 @@ std::string argumentsFromUtf16(const std::u16string &args) {
 namespace LOTRO_DAT {
     TextSubfile::TextSubfile() = default;
 
-    TextSubfile::TextSubfile(DatFile *dat, long long dictionary_offset, long long fragments_count, long long unknown1,
-                           long long file_id, long long file_offset, long long file_size,
-                           long long timestamp,
-                           long long version, long long block_size)
-            : Subfile(dat, dictionary_offset, fragments_count, unknown1, file_id, file_offset, file_size, timestamp, version, block_size) {
+    TextSubfile::TextSubfile(DatFile *dat, long long dictionary_offset, long long unknown1,
+                             long long file_id, long long file_offset, long long file_size,
+                             long long timestamp, long long version, long long block_size, long long unknown2)
+            : Subfile(dat, dictionary_offset, unknown1, file_id, file_offset, file_size,
+                      timestamp, version, block_size, unknown2) {
     }
 
     FILE_TYPE TextSubfile::FileType() const {
@@ -136,60 +136,19 @@ namespace LOTRO_DAT {
             new_data = new_data + old_data.CutData(offset - 8, offset);
 
             if (patch_fragments.count(fragment_id) == 0) {
-                try {
-                    // Retrieving old pieces
-                    new_data = new_data + GetPieceData(old_data, offset);
-                } catch (std::exception &e) {
-                    LOG(ERROR) << "Caught " << e.what() << " exception.";
-                    LOG(DEBUG) << "Unable to get piece data for file_id " << file_id() << " and fragment_id " << fragment_id;
-                    throw DatException("Bad TextSubfile::MakeForImport()", IMPORT_EXCEPTION);
-                }
-
-                try {
-                    // Retrieving old references
-                    new_data = new_data + GetArgumentReferenceData(old_data, offset);
-                } catch (std::exception &e) {
-                    LOG(ERROR) << "Caught " << e.what() << " exception.";
-                    LOG(DEBUG) << "Unable to get argument reference data for file_id " << file_id() << " and fragment_id " << fragment_id;
-                    throw DatException("Bad TextSubfile::MakeForImport()", IMPORT_EXCEPTION);
-                }
-
-                try {
-                    // Retrieving old ref_strings
-                    new_data = new_data + GetArgumentStringsData(old_data, offset);
-                } catch (std::exception &e) {
-                    LOG(ERROR) << "Caught " << e.what() << " exception.";
-                    LOG(DEBUG) << "Unable to get argument string for file_id " << file_id() << " and fragment_id " << fragment_id;
-                    throw DatException("Bad TextSubfile::MakeForImport()", IMPORT_EXCEPTION);
-                }
-
+                // Retrieving old pieces
+                new_data = new_data + GetPieceData(old_data, offset);
+                // Retrieving old references
+                new_data = new_data + GetArgumentReferenceData(old_data, offset);
+                // Retrieving old ref_strings
+                new_data = new_data + GetArgumentStringsData(old_data, offset);
             } else {
-                try {
-                    // Making and adding new pieces
-                    new_data = new_data + BuildPieces(old_data, patch_fragments[fragment_id], offset);
-                } catch (std::exception &e) {
-                    LOG(ERROR) << "Caught " << e.what() << " exception.";
-                    LOG(DEBUG) << "Unable to build piece data for file_id " << file_id() << " and fragment_id " << fragment_id;
-                    throw DatException("Bad TextSubfile::MakeForImport()", IMPORT_EXCEPTION);
-                }
-
-                try {
-                    // Making and adding new references
-                    new_data = new_data + BuildArgumentReferences(old_data, patch_fragments[fragment_id], offset);
-                } catch (std::exception &e) {
-                    LOG(ERROR) << "Caught " << e.what() << " exception.";
-                    LOG(DEBUG) << "Unable to build argument data for file_id " << file_id() << " and fragment_id " << fragment_id;
-                    throw DatException("Bad TextSubfile::MakeForImport()", IMPORT_EXCEPTION);
-                }
-
-                try {
-                    // Making and adding new strings
-                    new_data = new_data + BuildArgumentStrings(old_data, patch_fragments[fragment_id], offset);
-                } catch (std::exception &e) {
-                    LOG(ERROR) << "Caught " << e.what() << " exception.";
-                    LOG(DEBUG) << "Unable to build argument string data for file_id " << file_id() << " and fragment_id " << fragment_id;
-                    throw DatException("Bad TextSubfile::MakeForImport()", IMPORT_EXCEPTION);
-                }
+                // Making and adding new pieces
+                new_data = new_data + BuildPieces(old_data, patch_fragments[fragment_id], offset);
+                // Making and adding new references
+                new_data = new_data + BuildArgumentReferences(old_data, patch_fragments[fragment_id], offset);
+                // Making and adding new strings
+                new_data = new_data + BuildArgumentStrings(old_data, patch_fragments[fragment_id], offset);
             }
         }
         new_data = new_data + old_data.CutData(offset); // Adding elapsed file data
@@ -313,14 +272,8 @@ namespace LOTRO_DAT {
 
     BinaryData TextSubfile::BuildPieces(const BinaryData &data, const SubfileData &new_data, long long &offset) {
         LOG(DEBUG) << "Started building pieces";
-        try {
-            // Moving &offset pointer in &data
-            GetPieceData(data, offset);
-        } catch (std::exception &e) {
-            LOG(ERROR) << "Caught " << e.what() << " exception.";
-            LOG(DEBUG) << "Unable to get piece data for file_id " << file_id();
-            throw DatException("Bad TextSubfile::BuildPieces()", IMPORT_EXCEPTION);
-        }
+        // Moving &offset pointer in &data
+        GetPieceData(data, offset);
 
         // Deleting '[' and ']' brackets
         std::u16string text_data = new_data.text_data.substr(1, new_data.text_data.size() - 2);
@@ -332,12 +285,12 @@ namespace LOTRO_DAT {
 
         while (next != std::string::npos) {
             std::u16string piece = text_data.substr(prev, next - prev);
-            pieces.push_back(piece);
+            pieces.emplace_back(piece);
             prev = next + DNT.length();
             next = text_data.find(DNT, prev);
         }
-        std::u16string piece = text_data.substr(prev);
-        pieces.push_back(piece);
+
+        pieces.emplace_back(text_data.substr(prev));
 
         // Building BinaryData from pieces
         BinaryData result;

+ 5 - 5
src/Subfiles/UnknownSubfile.cpp

@@ -12,11 +12,11 @@
 namespace LOTRO_DAT {
     UnknownSubfile::UnknownSubfile() = default;
 
-    UnknownSubfile::UnknownSubfile(DatFile *dat, long long dictionary_offset, long long fragments_count, long long unknown1,
-                                   long long file_id, long long file_offset, long long file_size, long long timestamp,
-                                   long long version, long long block_size)
-            : Subfile(dat, dictionary_offset, fragments_count, unknown1, file_id, file_offset, file_size, timestamp,
-                      version, block_size) {
+    UnknownSubfile::UnknownSubfile(DatFile *dat, long long dictionary_offset, long long unknown1,
+                                   long long file_id, long long file_offset, long long file_size,
+                                   long long timestamp, long long version, long long block_size, long long unknown2)
+            : Subfile(dat, dictionary_offset, unknown1, file_id, file_offset, file_size,
+                      timestamp, version, block_size, unknown2) {
     }
 
     FILE_TYPE UnknownSubfile::FileType() const {

+ 5 - 5
src/Subfiles/WavSubfile.cpp

@@ -12,11 +12,11 @@
 namespace LOTRO_DAT {
     WavSubfile::WavSubfile() = default;
 
-    WavSubfile::WavSubfile(DatFile *dat, long long dictionary_offset, long long fragments_count, long long unknown1,
-                           long long file_id, long long file_offset, long long file_size, long long timestamp,
-                           long long version, long long block_size)
-            : Subfile(dat, dictionary_offset, fragments_count, unknown1, file_id, file_offset, file_size, timestamp,
-                      version, block_size) {
+    WavSubfile::WavSubfile(DatFile *dat, long long dictionary_offset, long long unknown1,
+                           long long file_id, long long file_offset, long long file_size,
+                           long long timestamp, long long version, long long block_size, long long unknown2)
+            : Subfile(dat, dictionary_offset, unknown1, file_id, file_offset, file_size,
+                      timestamp, version, block_size, unknown2) {
     }
 
     FILE_TYPE WavSubfile::FileType() const {