123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182 |
- //
- // Created by Иван_Архипов on 31.10.2017.
- //
- #include "DatFile.h"
- #include "BinaryData.h"
- #include "DatException.h"
- #include "SubDirectory.h"
- #include "Subfile.h"
- #include "SubfileData.h"
- #include <EasyLogging++/easylogging++.h>
- #include <unistd.h>
- #define ELPP_FEATURE_CRASH_LOG
- INITIALIZE_EASYLOGGINGPP
- #include <locale>
- #ifdef WIN32
- #define fseek _fseeki64
- #define ftell _ftelli64
- #endif
- extern "C++"
- {
- namespace LOTRO_DAT {
- DatFile::DatFile() {
- dat_state_ = CLOSED;
- root_directory_ = nullptr;
- file_handler_ = nullptr;
- el::Configurations defaultConf;
- defaultConf.setToDefault();
- defaultConf.setGlobally(el::ConfigurationType::Format,
- "%datetime %level %fbase (line %line) : %msg (function: %func)");
- defaultConf.setGlobally(el::ConfigurationType::ToFile, "true");
- defaultConf.setGlobally(el::ConfigurationType::Filename, "dat_library.log");
- defaultConf.setGlobally(el::ConfigurationType::ToStandardOutput, "false");
- defaultConf.setGlobally(el::ConfigurationType::PerformanceTracking, "true");
- defaultConf.setGlobally(el::ConfigurationType::MaxLogFileSize, "5242880"); // 5MB
- defaultConf.setGlobally(el::ConfigurationType::LogFlushThreshold, "1"); // Flush after every one log
- defaultConf.set(el::Level::Debug, el::ConfigurationType::Enabled, "false");
- defaultConf.set(el::Level::Debug, el::ConfigurationType::Filename, "dat_library_debug.log");
- el::Loggers::reconfigureAllLoggers(defaultConf);
- LOG(INFO) << "==================================================================";
- LOG(INFO) << "Starting new DatFile class instance";
- }
- DAT_RESULT DatFile::InitDatFile(const std::string &filename, int dat_id) {
- LOG(DEBUG) << "Started initialisation of DatFile " << filename;
- if (dat_state_ != CLOSED && filename == filename_) {
- LOG(DEBUG) << "Trying to reopen the same file: " << filename << ". Doing nothing.";
- return SUCCESS;
- }
- if (dat_state_ != CLOSED && filename != filename_) {
- LOG(DEBUG) << "DatFile wasn't closed yet. Closing in order to reopen.";
- if (CloseDatFile() != SUCCESS) {
- LOG(ERROR) << "Unable to perform CloseDatFile()! Aborting initialization!";
- return FAILED;
- }
- }
- dat_id_ = dat_id;
- dat_state_ = CLOSED;
- current_locale_ = ORIGINAL;
- root_directory_ = nullptr;
- file_handler_ = nullptr;
- filename_ = "none";
- DAT_RESULT result;
- DAT_RESULT return_value = SUCCESS;
- result = OpenDatFile(filename.c_str());
- if (result != SUCCESS) {
- LOG(ERROR) << "Unable to perform opening file. Aborting.";
- CloseDatFile();
- return result;
- }
- return_value = std::max(return_value, result);
- result = ReadSuperBlock();
- if (result <= 0) {
- LOG(ERROR) << "Unable to read super block. Aborting.";
- CloseDatFile();
- return result;
- }
- return_value = std::max(return_value, result);
- result = MakeDirectories();
- if (result <= 0) {
- LOG(ERROR) << "Unable to make directories. Aborting.";
- CloseDatFile();
- return result;
- }
- return_value = std::max(return_value, result);
- result = MakeDictionary();
- if (result <= 0) {
- LOG(ERROR) << "Unable to make dictionary. Aborting.";
- CloseDatFile();
- return result;
- }
- return_value = std::max(return_value, result);
- result = InitLocales();
- if (result <= 0) {
- LOG(ERROR) << "Unable to initialize locales. Aborting.";
- CloseDatFile();
- return result;
- }
- return_value = std::max(return_value, result);
- if (return_value >= 2) {
- LOG(WARNING) << "Dat file is corrupted. Trying to delete corrupted dictionary rows";
- if (RepairDatFile() != SUCCESS)
- return CRITICAL_DAT_ERROR;
- }
- LOG(INFO) << "File " << filename << " opened successfully!";
- filename_ = filename;
- dat_state_ = READY;
- return return_value;
- }
- DAT_STATE DatFile::DatFileState() const {
- return dat_state_;
- }
- DatFile::~DatFile() {
- CloseDatFile();
- }
- /// Extracts file with file_id.
- /// If path is undefined then it will be recognised as current working directory
- /// Output file path consists of "path + file_id + file_extension";
- /// NOTICE: The directory, mentioned in "std::string path" variable SHOULD BE ALREADY CREATED;
- /// Otherwise DatException() will be thrown.
- /// Returns true, if file was successfully extracted;
- /// Throws DatException() if undefined behaviour happened
- DAT_RESULT DatFile::ExtractFile(long long file_id, const std::string &path) {
- LOG(DEBUG) << "Extracting file " << file_id << " to path " << path;
- if (dat_state_ < READY) {
- LOG(ERROR) << "Dat state isn't READY. Cannot perform extraction.";
- return INCORRECT_STATE_ERROR;
- }
- BinaryData file_data = GetFileData(dictionary_[file_id], 8);
- if (file_data.size() == 0) {
- LOG(ERROR) << "File data is empty. Aborting extraction.";
- return NO_FILE_ERROR;
- }
- SubfileData export_data = dictionary_[file_id]->PrepareForExport(file_data);
- if (export_data.Empty()) {
- LOG(ERROR) << "Export data is empty. Aborting extraction.";
- return NO_FILE_ERROR;
- }
- if (export_data.binary_data.WriteToFile(path + export_data.options["ext"].as<std::string>()) != SUCCESS) {
- LOG(ERROR) << "Cannot write to file.";
- return WRITE_TO_FILE_ERROR;
- }
- LOG(DEBUG) << "File " << file_id << " extracted successfully";
- return SUCCESS;
- }
- /// Extracts file with file_id to database "db".
- /// DATABASE SHOULD BE ALREADY CREATED; Otherwise DatException will be called.
- /// NOTICE: The directory, mentioned in "std::string path" variable SHOULD BE ALREADY CREATED;
- /// Otherwise DatException() will be thrown.
- /// Returns true, if file was successfully extracted;
- /// Throws DatException() if undefined behaviour happened
- DAT_RESULT DatFile::ExtractFile(long long file_id, Database *db) {
- LOG(DEBUG) << "Extracting file " << file_id << " to database.";
- if (dat_state_ < READY) {
- LOG(ERROR) << "Dat state isn't READY. Cannot perform extraction.";
- return INCORRECT_STATE_ERROR;
- }
- BinaryData file_data;
- try {
- file_data = GetFileData(dictionary_[file_id], 8);
- } catch (std::exception &e) {
- LOG(ERROR) << "Caught" << e.what() << " exception.";
- return FAILED;
- }
- SubfileData export_data;
- try {
- export_data = dictionary_[file_id]->PrepareForExport(file_data);
- export_data.options["did"] = dat_id_;
- } catch (std::exception &e) {
- LOG(ERROR) << "Caught" << e.what() << " exception.";
- return FAILED;
- }
- if (export_data == SubfileData()) {
- LOG(WARNING) << "file with id " << dictionary_[file_id]->file_id() << " is empty. Passing it.";
- return SUCCESS;
- }
- try {
- db->PushFile(export_data);
- } catch (std::exception &e) {
- LOG(ERROR) << "Caught " << e.what() << " exception.";
- return FAILED;
- }
- LOG(DEBUG) << "File " << file_id << " extracted successfully";
- return SUCCESS;
- }
- /// Extracts all files with specific type to "path + type + file_id + file_part + extension" files;
- /// If path is undefined then it will be recognised as current working directory
- /// NOTICE: The directory, mentioned in "std::string path" variable SHOULD BE ALREADY CREATED;
- /// Otherwise DatException() will be thrown.
- /// Returns number of successfully extracted files
- /// Throws DatException() if undefined behaviour happened
- int DatFile::ExtractAllFilesByType(FILE_TYPE type, std::string path) {
- LOG(INFO) << "Extracting all files to path " << path;
- if (dat_state_ < READY) {
- LOG(ERROR) << "Dat state isn't READY. Cannot perform extraction.";
- return INCORRECT_STATE_ERROR;
- }
- int success = 0;
- for (auto i : dictionary_) {
- FILE_TYPE file_type = i.second->FileType();
- if (file_type == type) {
- success += (ExtractFile(i.second->file_id(), (path + std::to_string(i.second->file_id()))) == SUCCESS
- ? 1 : 0);
- }
- }
- LOG(INFO) << "Successfully extracted " << success << " files";
- return success;
- }
- /// Extracts all files with specific type to database "db";
- /// DATABASE SHOULD BE ALREADY CREATED; Otherwise DatException will be called.
- /// Returns number of successfully extracted files
- /// Throws DatException() if undefined behaviour happened
- int DatFile::ExtractAllFilesByType(FILE_TYPE type, Database *db) {
- LOG(INFO) << "Extracting all files to database...";
- if (dat_state_ < READY) {
- LOG(ERROR) << "Dat state isn't READY. Cannot perform extraction.";
- return INCORRECT_STATE_ERROR;
- }
- int success = 0;
- for (auto i : dictionary_) {
- FILE_TYPE file_type = i.second->FileType();
- if (file_type == type) {
- success += (ExtractFile(i.second->file_id(), db) == SUCCESS ? 1 : 0);
- }
- }
- LOG(INFO) << "Extracted " << success << " files";
- return success;
- }
- // TODO: Write description and make asserts
- DAT_RESULT DatFile::PatchFile(const char *filename, YAML::Node options) {
- LOG(DEBUG) << "Patching file with filename" << filename << " and id = " << options["fid"].as<long long>();
- if (dat_state_ < READY) {
- LOG(ERROR) << "Dat state isn't READY. Cannot patch.";
- return INCORRECT_STATE_ERROR;
- }
- if (options["did"].IsDefined() && options["did"].as<int>() != dat_id_)
- return INCORRECT_DAT_ID;
- BinaryData data;
- data.ReadFromFile(filename);
- auto file_id = options["fid"].as<long long>();
- if (dictionary_[file_id] == nullptr) {
- LOG(ERROR) << "Cannot patch file - there is no file in dictionary with file_id = " << file_id;
- return NO_FILE_ERROR;
- }
- BinaryData old_data = GetFileData(dictionary_[file_id]);
- if (old_data.Empty()) {
- LOG(ERROR) << "GetFileData returned empty data. Aborting.";
- return DAT_PATCH_FILE_ERROR;
- }
- data = dictionary_[file_id]->MakeForImport(old_data, SubfileData(data, u"", options));
- try {
- DAT_RESULT result = ApplyFilePatch(dictionary_[file_id], data);
- if (result != SUCCESS)
- return result;
- } catch (std::exception &e) {
- LOG(ERROR) << "Caught " << e.what() << " exception.";
- return FAILED;
- }
- LOG(DEBUG) << "Successfully patched file with filename = " << filename << " and id = "
- << options["fid"].as<long long>();
- return SUCCESS;
- }
- // TODO: Write description and make asserts
- DAT_RESULT DatFile::PatchFile(const SubfileData &data, bool rewrite_original) {
- LOG(DEBUG) << "Patching file with id = " << data.options["fid"].as<long long>()
- << (rewrite_original ? " REWRITING ORIGINAL FILE." : ".");
- if (dat_state_ < READY) {
- LOG(ERROR) << "Dat state isn't READY. Cannot patch.";
- return INCORRECT_STATE_ERROR;
- }
- auto file_id = data.options["fid"].as<long long>();
- Subfile *file = dictionary_[file_id];
- if (file == nullptr) {
- LOG(ERROR) << "Cannot patch file - there is no file in dictionary with file_id = " << file_id;
- return NO_FILE_ERROR;
- }
- // If file has inactive category, then we should set it to patched state in order to commit patch and
- // then in ApplyFilePatch(), if new category is still inactive, return dictionary to its original state;
- if (inactive_categories.count(file->category) != 0 && patch_dict_.count(file_id) != 0) {
- dictionary_[file_id]->file_offset_ = patch_dict_[file_id]->file_offset_;
- dictionary_[file_id]->file_size_ = patch_dict_[file_id]->file_size_;
- dictionary_[file_id]->block_size_ = patch_dict_[file_id]->block_size_;
- dictionary_[file_id]->timestamp_ = patch_dict_[file_id]->timestamp_;
- dictionary_[file_id]->version_ = patch_dict_[file_id]->version_;
- }
- if (data.options["cat"].IsDefined()) {
- file->category = data.options["cat"].as<long long>();
- } else {
- file->category = 1;
- }
- BinaryData old_data = GetFileData(file);
- if (old_data.Empty()) {
- LOG(ERROR) << "GetFileData returned empty data. Aborting.";
- return DAT_PATCH_FILE_ERROR;
- }
- BinaryData patch_data = file->MakeForImport(old_data, data);
- try {
- DAT_RESULT result = ApplyFilePatch(file, patch_data, rewrite_original);
- if (result != SUCCESS)
- return result;
- } catch (std::exception &e) {
- LOG(ERROR) << "Caught " << e.what() << " exception";
- return FAILED;
- }
- LOG(DEBUG) << "Patched successfully file " << data.options["fid"].as<long long>()
- << (rewrite_original ? " REWRITING ORIGINAL FILE." : ".");
- return SUCCESS;
- }
- // TODO: Write description
- DAT_RESULT DatFile::PatchAllDatabase(Database *db) {
- LOG(INFO) << "Patching all database";
- if (dat_state_ < READY) {
- LOG(ERROR) << "Dat state isn't READY. Cannot patch.";
- return INCORRECT_STATE_ERROR;
- }
- SubfileData data;
- data = db->GetNextFile();
- while (!data.Empty()) {
- DAT_RESULT result = PatchFile(data);
- if (result != SUCCESS)
- LOG(ERROR) << "Cannot patch file" << data.options["fid"].as<long long>() << " continuing";
- data = db->GetNextFile();
- }
- DAT_RESULT result = CommitChanges();
- if (result != SUCCESS)
- return result;
- LOG(INFO) << "Successfully patched whole database";
- return SUCCESS;
- }
- /// DatFile::WriteUnorderedDictionary(...);
- /// Prints list of all found files with some information about them to file.
- /// Gets std::string path - path to directory, where the file will be written with name "dict.txt"
- DAT_RESULT DatFile::WriteUnorderedDictionary(std::string path) const {
- LOG(INFO) << "Writing unordered dictionary to " << path << "dict.txt";
- FILE *f = nullptr;
- fopen_s(&f, (path + "dict.txt").c_str(), "w");
- if (f == nullptr) {
- LOG(ERROR) << "Cannot open file " << path + "dict.txt";
- return WRITE_TO_FILE_ERROR;
- }
- fprintf(f, "file_id offset size size2 extension\n");
- for (auto i : dictionary_) {
- fprintf(f, "%lld %lld %lld %lld %s\n", i.second->file_id(), i.second->file_offset(), i.second->file_size(),
- i.second->block_size(), i.second->Extension().c_str());
- }
- fclose(f);
- LOG(INFO) << "Unordered dictionary was written successfully to " << path << "dict.txt";
- return SUCCESS;
- }
- /// DatFile::files_number();
- /// Returns amount of files, found in dictionaries of DatFile. Some if them may be empty or erased.
- long long DatFile::files_number() const {
- return dictionary_.size();
- }
- /// DatFile::GetFileData()
- /// Returns BinaryData, which contains of subfile data, made from parts of file in DatFile
- // TODO: ASSERTS
- BinaryData DatFile::GetFileData(const Subfile *file, long long int offset) {
- LOG(DEBUG) << "Getting file " << file->file_id() << " data";
- try {
- BinaryData mfile_id(20);
- ReadData(mfile_id, 20, file->file_offset() + 8);
- if (mfile_id.Empty()) {
- LOG(ERROR) << "Error while reading file " << file->file_id() << " header (offset = "
- << file->file_offset() << "); Aborting.";
- return BinaryData(0);
- }
- if (!mfile_id.CheckCompression() && file->file_id() != mfile_id.ToNumber<4>(0)) {
- LOG(ERROR) << "Bad DatFile::GetFileData() - file_id in Subfile ("
- << file->file_id()
- << ") doesn't match to file_id (" << mfile_id.ToNumber<4>(0) << ")in DatFile.";
- return BinaryData(0);
- }
- BinaryData data((unsigned) (file->file_size() + (8 - offset)));
- if (file->block_size() >= file->file_size() + 8) {
- ReadData(data, file->file_size() + (8 - offset), file->file_offset() + offset);
- return data;
- }
- BinaryData fragments_count(4);
- ReadData(fragments_count, 4, file->file_offset());
- long long fragments_number = fragments_count.ToNumber<4>(0);
- long long current_block_size = file->block_size() - offset - 8 * fragments_number;
- ReadData(data, current_block_size, file->file_offset() + offset);
- BinaryData FragmentsDictionary(8 * unsigned(fragments_number));
- ReadData(FragmentsDictionary, 8 * unsigned(fragments_number),
- file->file_offset() + file->block_size() - 8 * fragments_number);
- for (long long i = 0; i < fragments_number; i++) {
- long long fragment_size = FragmentsDictionary.ToNumber<4>(8 * i);
- long long fragment_offset = FragmentsDictionary.ToNumber<4>(8 * i + 4);
- ReadData(data, std::min(fragment_size, file->file_size() - current_block_size), fragment_offset,
- current_block_size);
- current_block_size += fragment_size;
- }
- LOG(DEBUG) << "Successfully got file " << file->file_id() << " data";
- return data;
- } catch (std::exception &e) {
- LOG(ERROR) << "Caught " << e.what() << " exception";
- }
- return BinaryData(0);
- }
- /// DatFile special functions for opening and reading/writing raw data.
- /// Shouldn't be used by any external classes except Subfile and Subdirectory.
- DAT_RESULT DatFile::OpenDatFile(const char *dat_name) {
- LOG(DEBUG) << "Started opening DatFile";
- if (dat_state_ != CLOSED) {
- CloseDatFile();
- }
- file_handler_ = fopen(dat_name, "r+b");
- if (file_handler_ == nullptr) {
- LOG(ERROR) << "Unable to open file " << dat_name;
- return NO_FILE_ERROR;
- }
- fseek(file_handler_, 0, SEEK_END);
- file_size_ = ftell(file_handler_);
- fseek(file_handler_, 0, SEEK_SET);
- dat_state_ = SUCCESS_OPENED;
- LOG(DEBUG) << "Successfully opened DatFile";
- return SUCCESS;
- }
- DAT_RESULT DatFile::ReadSuperBlock() {
- LOG(DEBUG) << "Started reading superblock";
- if (dat_state_ != SUCCESS_OPENED) {
- LOG(ERROR) << "Dat state isn't SUCCESS_OPENED. Cannot perform extraction.";
- return INCORRECT_STATE_ERROR;
- }
- BinaryData data(1024);
- ReadData(data, 1024);
- constant1_ = data.ToNumber<4>(0x100);
- constant2_ = data.ToNumber<4>(0x140);
- version1_ = data.ToNumber<4>(0x14C);
- version2_ = data.ToNumber<4>(0x150);
- fragmentation_journal_offset_ = data.ToNumber<4>(0x154);
- root_directory_offset_ = data.ToNumber<4>(0x160);
- auto size1 = data.ToNumber<4>(0x148);
- if (constant1_ != 0x4C5000) {
- LOG(ERROR) << "variable at position 0x100 is not equal to .dat file constant!";
- return INCORRECT_SUPERBLOCK_ERROR;
- }
- if (constant2_ != 0x5442) {
- LOG(ERROR) << "variable at position 0x140 is not equal to .dat file constant!";
- return INCORRECT_SUPERBLOCK_ERROR;
- }
- if (file_size_ != size1) {
- LOG(ERROR) << "variable at 0x148 position is not equal to .dat file size!";
- file_size_ = size1;
- dat_state_ = SUCCESS_SUPERBLOCK;
- return CORRUPTED_FILE_WARNING;
- }
- dat_state_ = SUCCESS_SUPERBLOCK;
- LOG(DEBUG) << "Superblock read successfully";
- return SUCCESS;
- }
- DAT_RESULT DatFile::MakeDirectories() {
- LOG(DEBUG) << "Started making directories";
- if (dat_state_ != SUCCESS_SUPERBLOCK) {
- LOG(ERROR) << "Dat state isn't SUCCESS_SUPERBLOCK. Cannot make directories.";
- return INCORRECT_STATE_ERROR;
- }
- root_directory_ = new SubDirectory((unsigned) root_directory_offset_, this);
- dat_state_ = SUCCESS_DIRECTORIES;
- LOG(DEBUG) << "Directories made successfully";
- return SUCCESS;
- }
- DAT_RESULT DatFile::MakeDictionary() {
- LOG(DEBUG) << "Started making dictionary";
- if (dat_state_ != SUCCESS_DIRECTORIES) {
- LOG(ERROR) << "Dat state isn't SUCCESS_DIRECTORIES. Cannot make directories.";
- return INCORRECT_STATE_ERROR;
- }
- if (root_directory_ == nullptr) {
- LOG(ERROR) << "root_directory is nullptr!!";
- return INIT_ERROR;
- }
- root_directory_->MakeDictionary(dictionary_);
- dat_state_ = SUCCESS_DICTIONARY;
- LOG(DEBUG) << "Dictionary made successfull";
- return SUCCESS;
- }
- DAT_RESULT DatFile::ReadData(BinaryData &data, long long size, long long offset, long long data_offset) {
- if (dat_state_ == CLOSED) {
- LOG(ERROR) << "Dat state is CLOSED. Cannot read data.";
- data = BinaryData(0);
- return INIT_ERROR;
- }
- if (data_offset + size > data.size()) {
- LOG(ERROR) << "Trying to read more than BinaryData size: Reading " << size << " bytes from " << offset
- << " position.";
- data = BinaryData(0);
- return DAT_READ_ERROR;
- }
- if (offset + size > file_size_) {
- LOG(ERROR) << "Trying to read more than DatFile size elapsed: Reading " << size << " bytes from " << offset
- << " position.";
- data = BinaryData(0);
- return DAT_READ_ERROR;
- }
- fseek(file_handler_, offset, SEEK_SET);
- fread(data.data() + data_offset, unsigned(size), 1, file_handler_);
- return SUCCESS;
- }
- DAT_RESULT DatFile::WriteData(const BinaryData &data, long long size, long long offset, long long data_offset) {
- if (dat_state_ < READY) {
- LOG(ERROR) << "Dat state isn't READY. Cannot write data.";
- return INCORRECT_STATE_ERROR;
- }
- fseek(file_handler_, offset, SEEK_SET);
- if (data_offset + size > data.size()) {
- LOG(ERROR) << "Trying to write more than BinaryData size";
- return DAT_WRITE_ERROR;
- }
- fwrite(data.data() + data_offset, unsigned(size), 1, file_handler_);
- return SUCCESS;
- }
- /// Special functions used by patch process.
- /// Shouldn't be used by any external class.
- DAT_RESULT DatFile::ApplyFilePatch(Subfile *file, const BinaryData &data, bool rewrite_original) {
- LOG(DEBUG) << "Applying " << file->file_id() << " patch.";
- auto file_id = file->file_id();
- if (patched_list.count(file_id) != 0) {
- LOG(WARNING) << "Warning: DatFile::ApplyFilePatch - found 2 files in patch with the same file_id = "
- << file->file_id() << " Passing last...";
- return DUBLICATE_PATCH_FILES_ERROR;
- }
- if (current_locale() != PATCHED && !rewrite_original) {
- LOG(INFO) << "Changing locale to PATCHED(RU) in order to patch file";
- SetLocale(PATCHED);
- }
- if (current_locale() == PATCHED && rewrite_original && patch_dict_.count(file_id) != 0) {
- LOG(INFO) << "Changing locale to ORIGINAL in order to patch original version of file, which has both "
- << "original and patched versions.";
- SetLocale(ORIGINAL);
- }
- dat_state_ = UPDATED;
- if (orig_dict_.count(file_id) == 0 && !rewrite_original) {
- orig_dict_[file_id] = new Subfile(this, file->MakeHeaderData());
- }
- auto journal = GetFragmentationJournal();
- file->file_size_ = data.size() - 8;
- if ((patch_dict_.count(file_id) == 0 && !rewrite_original) || data.size() > file->block_size()) {
- if (journal[0].second != file_size_) {
- journal[0].second = file_size_;
- }
- file->file_offset_ = journal[0].second;
- file->block_size_ = std::max(data.size(), 256u);
- journal[0].second += data.size();
- BinaryData nulls(data.size());
- WriteData(nulls, nulls.size(), file_size_);
- this->file_size_ += data.size();
- }
- BinaryData fragments_count(4);
- fragments_count = BinaryData::FromNumber<4>(0);
- BinaryData file_data = fragments_count + data.CutData(4);
- if (file_id != file_data.ToNumber<4>(8)) {
- LOG(ERROR) << "Created data's file_id doesn't match to original! Patch wasn't written to .dat file";
- return INCORRECT_PATCH_FILE;
- }
- WriteData(file_data, file_data.size(), file->file_offset());
- patched_list.insert(file_id);
- if (!rewrite_original) {
- patch_dict_.erase(file_id); // Удалили старое значение в русском словаре
- patch_dict_[file_id] = new Subfile(this, file->MakeHeaderData()); // Создали новое значение
- }
- // If category is forbidden, then return file header data to original state
- if (inactive_categories.count(file->category) != 0) {
- dictionary_[file_id]->file_offset_ = orig_dict_[file_id]->file_offset_;
- dictionary_[file_id]->file_size_ = orig_dict_[file_id]->file_size_;
- dictionary_[file_id]->block_size_ = orig_dict_[file_id]->block_size_;
- dictionary_[file_id]->timestamp_ = orig_dict_[file_id]->timestamp_;
- dictionary_[file_id]->version_ = orig_dict_[file_id]->version_;
- }
- if (orig_dict_.count(file_id) != 0)
- orig_dict_[file_id]->category = file->category;
- if (patch_dict_.count(file_id) != 0)
- patch_dict_[file_id]->category = file->category;
- UpdateFragmentationJournal(journal);
- LOG(DEBUG) << "Successfully applied file " << file->file_id() << " patch.";
- return SUCCESS;
- }
- DAT_RESULT DatFile::UpdateSubdirectories() {
- // TODO: asserts
- LOG(DEBUG) << "Started updating subdirectories";
- root_directory_->UpdateDirectories(patched_list, dictionary_);
- LOG(DEBUG) << "Finished updating subdirectories";
- return SUCCESS;
- }
- std::vector<std::pair<long long, long long> > DatFile::GetFragmentationJournal() {
- LOG(DEBUG) << "Getting fragmentation journal";
- BinaryData data(8);
- DAT_RESULT res = ReadData(data, 8, fragmentation_journal_offset_ + 8);
- std::vector<std::pair<long long, long long> > result;
- if (res != SUCCESS) {
- LOG(ERROR) << "Error " << res << " while reading data";
- return result;
- }
- result.emplace_back(std::make_pair(data.ToNumber<4>(0), data.ToNumber<4>(4)));
- LOG(DEBUG) << "Finished getting fragmentation journal";
- return result;
- }
- DAT_RESULT DatFile::UpdateHeader() {
- LOG(DEBUG) << "Updating header";
- WriteData(BinaryData::FromNumber<4>(constant1_), 4, 0x100);
- WriteData(BinaryData::FromNumber<4>(constant2_), 4, 0x140);
- WriteData(BinaryData::FromNumber<4>(file_size_), 4, 0x148);
- WriteData(BinaryData::FromNumber<4>(version1_), 4, 0x14C);
- WriteData(BinaryData::FromNumber<4>(version2_), 4, 0x150);
- WriteData(BinaryData::FromNumber<4>(fragmentation_journal_offset_), 4, 0x154);
- WriteData(BinaryData::FromNumber<4>(root_directory_offset_), 4, 0x160);
- LOG(DEBUG) << "Finished updating header";
- return SUCCESS;
- }
- DAT_RESULT DatFile::UpdateFragmentationJournal(const std::vector<std::pair<long long, long long> > &journal) {
- LOG(DEBUG) << "Updating fragmentation journal";
- for (unsigned i = 0; i < journal.size(); i++) {
- long long size = journal[i].first;
- long long offset = journal[i].second;
- WriteData(BinaryData::FromNumber<4>(size), 4, fragmentation_journal_offset_ + 8 * (i + 1));
- WriteData(BinaryData::FromNumber<4>(offset), 4, fragmentation_journal_offset_ + 8 * (i + 1) + 4);
- }
- LOG(DEBUG) << "Finished updating fragmentation journal";
- return SUCCESS;
- }
- DAT_RESULT DatFile::CommitChanges() {
- LOG(INFO) << "Started commiting changes";
- if (dat_state_ != UPDATED) {
- LOG(DEBUG) << "Commiting changes to file with state != UPDATED. Nothing to do";
- return SUCCESS;
- }
- LOG(INFO) << "There are some updated files. Rewriting dictionary...";
- CommitLocales();
- auto journal = GetFragmentationJournal();
- UpdateFragmentationJournal(journal);
- UpdateHeader();
- UpdateSubdirectories();
- LOG(INFO) << "Changed " << patched_list.size() << " files...";
- patched_list.clear();
- dat_state_ = READY;
- LOG(INFO) << "Done Commiting changes!";
- return SUCCESS;
- }
- DAT_RESULT DatFile::CloseDatFile() {
- LOG(INFO) << "Closing DatFile";
- if (dat_state_ == CLOSED) {
- LOG(INFO) << "DatFile is already closed. Nothing to do";
- return SUCCESS;
- }
- CommitChanges();
- orig_dict_.clear();
- patched_list.clear();
- pending_patch_.clear();
- current_locale_ = ORIGINAL;
- if (file_handler_ != nullptr) {
- fclose(file_handler_);
- }
- delete root_directory_;
- dictionary_.clear();
- patched_list.clear();
- truncate64(filename_.c_str(), file_size_);
- filename_ = "none";
- dat_state_ = CLOSED;
- LOG(INFO) << "File closed successfully.";
- return SUCCESS;
- }
- // LOCALE MANAGING SECTION
- DAT_RESULT DatFile::InitLocales() {
- LOG(INFO) << "Initialising locales...";
- BinaryData dicts_data;
- if (dictionary_.count(2013266257) != 0)
- dicts_data = GetFileData(dictionary_[2013266257]);
- if (dicts_data.size() < 29) {
- LOG(WARNING) << "Locales file is empty.. Initialising locale dicts as empty";
- LOG(INFO) << "Could't find locales file or it's corrupted/empty... Continuing without locales";
- return SUCCESS;
- }
- BinaryData hi_data = dicts_data.CutData(14, 29) + BinaryData("\0", 1);
- std::string hi = std::string((char *) (hi_data.data()));
- LOG(DEBUG) << "hi info is " << hi;
- if (hi != "Hi from Gi1dor!") {
- LOG(WARNING) << "Didn't receive 'hi' from Gi1dor... Initialising locale dicts as empty";
- LOG(INFO) << "Could't init locales' file... Continuing without them";
- return SUCCESS;
- }
- int offset = 29;
- BinaryData current_locale_data = dicts_data.CutData(offset, offset + 4) + BinaryData("\0", 1);
- std::string locale((char *) (current_locale_data.data()));
- offset += 4;
- LOG(DEBUG) << "current locale:" << locale;
- if (locale != "PATC" && locale != "ORIG") {
- LOG(WARNING) << "Incorrect locale... Initialising locale dicts as empty";
- LOG(INFO) << "Could't recognize locale... Continuing without locales";
- return SUCCESS;
- }
- current_locale_ = (locale == "PATC" ? PATCHED : ORIGINAL);
- // 14 bytes for old data
- // 15 bytes for "Hi from Gi1dor"
- // 4 bytes for LOCALE
- // 4 bytes for orig_dict.size()
- // (32 + 4) * orig_dict.size() bytes for orig_dict data
- // 4 bytes for patch_dict.size()
- // (32 + 4) * patch_dict.size() bytes for patch_dict data
- // 4 bytes for inactive_categories dict
- // 4 * inactive_categories.size() bytes for inactive_categories data
- size_t orig_dict_size = size_t(dicts_data.CutData(offset, offset + 4).ToNumber<4>(0));
- offset += 4;
- for (size_t i = 0; i < orig_dict_size; i++) {
- auto file = new Subfile(this, dicts_data.CutData(offset, offset + 32));
- orig_dict_[file->file_id()] = file;
- offset += 32;
- orig_dict_[file->file_id()]->category = dicts_data.ToNumber<4>(offset);
- offset += 4;
- if (orig_dict_[file->file_id()]->category == 0)
- LOG(DEBUG) << "file category is undefined (0)!";
- }
- size_t patch_dict_size = size_t(dicts_data.CutData(offset, offset + 4).ToNumber<4>(0));
- offset += 4;
- for (size_t i = 0; i < patch_dict_size; i++) {
- auto file = new Subfile(this, dicts_data.CutData(offset, offset + 32));
- patch_dict_[file->file_id()] = file;
- offset += 32;
- patch_dict_[file->file_id()]->category = dicts_data.ToNumber<4>(offset);
- offset += 4;
- if (patch_dict_[file->file_id()]->category == 0)
- LOG(DEBUG) << "file category is undefined (0)!";
- }
- size_t active_patches_dict_size = size_t(dicts_data.CutData(offset, offset + 4).ToNumber<4>(0));
- offset += 4;
- for (size_t i = 0; i < active_patches_dict_size; i++) {
- inactive_categories.insert(dicts_data.ToNumber<4>(offset));
- offset += 4;
- }
- LOG(INFO) << "There are " << patch_dict_.size() << " files in patch locale dictionary";
- LOG(INFO) << "There are " << orig_dict_.size() << " files in original locale dictionary";
- std::string inactive_cat_s;
- for (auto i : inactive_categories) {
- inactive_cat_s += std::to_string(i) + " ";
- }
- LOG(INFO) << "Unactive patches now: " << inactive_cat_s;
- LOG(INFO) << "Finished initialising locales";
- return SUCCESS;
- }
- std::map<long long, Subfile *> *DatFile::GetLocaleDictReference(LOCALE locale) {
- switch (locale) {
- case PATCHED:
- return &patch_dict_;
- case ORIGINAL:
- return &orig_dict_;
- default:
- LOG(ERROR) << "Unknown locale! Returning original";
- return &orig_dict_;
- }
- }
- bool DatFile::CorrectSubfile(Subfile *file) {
- BinaryData mfile_id(20);
- ReadData(mfile_id, 20, file->file_offset() + 8);
- if (mfile_id.Empty())
- return false;
- return mfile_id.CheckCompression() || file->file_id() == mfile_id.ToNumber<4>(0);
- }
- DAT_RESULT DatFile::RepairDatFile() {
- for (auto file : dictionary_) {
- auto subfile = file.second;
- auto file_id = file.first;
- if (CorrectSubfile(subfile))
- continue;
- if (orig_dict_.count(file_id) == 0 || subfile->file_offset() == orig_dict_[file_id]->file_offset())
- return CRITICAL_DAT_ERROR;
- dictionary_[file_id]->file_offset_ = orig_dict_[file_id]->file_offset_;
- dictionary_[file_id]->file_size_ = orig_dict_[file_id]->file_size_;
- dictionary_[file_id]->block_size_ = orig_dict_[file_id]->block_size_;
- dictionary_[file_id]->timestamp_ = orig_dict_[file_id]->timestamp_;
- dictionary_[file_id]->version_ = orig_dict_[file_id]->version_;
- patch_dict_.erase(file_id);
- orig_dict_.erase(file_id);
- }
- return SUCCESS;
- }
- DAT_RESULT DatFile::SetLocale(LOCALE locale) {
- LOG(INFO) << "Setting locale to " << (locale == PATCHED ? " PATCHED" : " ORIGINAL");
- if (dat_state_ < READY) {
- LOG(ERROR) << "Dat state isn't READY. Cannot set locale.";
- return INCORRECT_STATE_ERROR;
- }
- if (current_locale_ == locale) {
- return SUCCESS;
- }
- dat_state_ = UPDATED;
- auto dict = GetLocaleDictReference(locale);
- for (auto file : *dict) {
- if (dictionary_[file.first] == nullptr) {
- LOG(WARNING) << "In locale dictionary there is file with file_id = " << file.first
- << "which is not in .dat file! Passing it and removing from locale dictionary";
- dict->erase(file.first);
- continue;
- }
- if (dictionary_[file.first]->MakeHeaderData().CutData(8, 16) ==
- file.second->MakeHeaderData().CutData(8, 16) ||
- inactive_categories.count(orig_dict_[file.first]->category) != 0)
- continue;
- long long file_id = file.first;
- Subfile *new_file = file.second;
- dictionary_[file_id]->file_offset_ = new_file->file_offset_;
- dictionary_[file_id]->file_size_ = new_file->file_size_;
- dictionary_[file_id]->block_size_ = new_file->block_size_;
- dictionary_[file_id]->timestamp_ = new_file->timestamp_;
- dictionary_[file_id]->version_ = new_file->version_;
- patched_list.insert(file.first);
- dat_state_ = UPDATED;
- }
- current_locale_ = locale;
- CommitChanges();
- LOG(INFO) << "Locale set successfull";
- return SUCCESS;
- }
- bool DatFile::CheckIfUpdatedByGame() {
- LOG(INFO) << "Checking if DatFile was updated by LotRO";
- if (!pending_patch_.empty())
- return true;
- if (current_locale_ == ORIGINAL)
- return false;
- bool updated = false;
- for (auto i : dictionary_) {
- long long file_id = i.first;
- Subfile *subfile = i.second;
- if (inactive_categories.count(subfile->category) > 0)
- continue;
- if (patch_dict_.count(file_id) > 0
- && (subfile->file_size() != patch_dict_[file_id]->file_size()
- || subfile->file_offset() != patch_dict_[file_id]->file_offset()
- || subfile->block_size() != patch_dict_[file_id]->block_size())) {
- orig_dict_.erase(file_id);
- patch_dict_.erase(file_id);
- pending_patch_.insert(file_id);
- updated = true;
- dat_state_ = UPDATED;
- }
- }
- CommitChanges();
- LOG(INFO) << "Dat file " << (updated ? "WAS " : "WASN'T ") << "updated by game.";
- return updated;
- }
- DAT_RESULT DatFile::RepairPatches(Database *db) {
- // TODO: Find errors
- LOG(INFO) << "Repairing patches";
- SubfileData data;
- data = db->GetNextFile();
- while (!data.Empty()) {
- if (pending_patch_.count(data.options["fid"].as<long long>()) > 0) {
- PatchFile(data);
- }
- data = db->GetNextFile();
- }
- CommitChanges();
- LOG(INFO) << "Successfully repaired with database";
- return SUCCESS;
- }
- DAT_RESULT DatFile::FinishRepairingPatches() {
- LOG(INFO) << "Finishing repairing patches";
- pending_patch_.clear();
- return SUCCESS;
- }
- LOCALE DatFile::current_locale() {
- if (dat_state_ < READY) {
- LOG(ERROR) << "dat_file is in incorrect state!";
- return ORIGINAL;
- }
- if (current_locale_ != PATCHED && current_locale_ != ORIGINAL) {
- LOG(ERROR) << "locale has incorrect value. Setting it to original";
- current_locale_ = ORIGINAL;
- }
- return current_locale_;
- }
- DAT_RESULT DatFile::CommitLocales() {
- LOG(INFO) << "Committing locales...";
- SubfileData data = dictionary_[2013266257]->PrepareForExport(GetFileData(dictionary_[2013266257]));
- data.options["fid"] = "2013266257";
- data.options["ext"] = ".unknown";
- BinaryData old_data = BinaryData(GetFileData(dictionary_[2013266257u]));
- // 14 bytes for old data
- // 15 bytes for "Hi from Gi1dor"
- // 4 bytes for LOCALE
- // 4 bytes for orig_dict.size()
- // (32 + 4) * orig_dict.size() bytes for orig_dict data
- // 4 bytes for patch_dict.size()
- // (32 + 4) * patch_dict.size() bytes for patch_dict data
- // 4 bytes for inactive_categories list
- // 4 * inactive_categories.size() bytes for inactive_categories data
- data.binary_data = BinaryData(14 + 15 + 4
- + 4 + (32 + 4) * orig_dict_.size()
- + 4 + (32 + 4) * patch_dict_.size()
- + 4 + 4 * inactive_categories.size());
- size_t current_size = 0;
- data.binary_data.Append(GetFileData(dictionary_[2013266257u]).CutData(0, 14), current_size);
- current_size += 14;
- data.binary_data.Append(BinaryData("Hi from Gi1dor!", 15), current_size);
- current_size += 15;
- data.binary_data.Append(BinaryData((current_locale_ == ORIGINAL ? "ORIG" : "PATC"), 4), current_size);
- current_size += 4;
- data.binary_data.Append(BinaryData::FromNumber<4>(orig_dict_.size()), current_size);
- current_size += 4;
- for (auto file : orig_dict_) {
- data.binary_data.Append(file.second->MakeHeaderData(), current_size);
- current_size += 32;
- data.binary_data.Append(BinaryData::FromNumber<4>(file.second->category), current_size);
- current_size += 4;
- }
- data.binary_data.Append(BinaryData::FromNumber<4>(patch_dict_.size()), current_size);
- current_size += 4;
- for (auto file : patch_dict_) {
- data.binary_data.Append(file.second->MakeHeaderData(), current_size);
- current_size += 32;
- data.binary_data.Append(BinaryData::FromNumber<4>(file.second->category), current_size);
- current_size += 4;
- }
- data.binary_data.Append(BinaryData::FromNumber<4>(inactive_categories.size()), current_size);
- current_size += 4;
- for (auto patch_id : inactive_categories) {
- data.binary_data.Append(BinaryData::FromNumber<4>(patch_id), current_size);
- current_size += 4;
- }
- DAT_RESULT result = PatchFile(data, true);
- if (result != SUCCESS)
- return result;
- LOG(INFO) << "Locales commited successfully";
- return SUCCESS;
- }
- DAT_RESULT DatFile::EnableCategory(int category) {
- LOG(INFO) << "Enabling category " << category;
- if (inactive_categories.count(category) == 0)
- return SUCCESS;
- inactive_categories.erase(category);
- dat_state_ = UPDATED;
- for (auto file : dictionary_) {
- auto file_id = file.first;
- if (patch_dict_.count(file_id) > 0 && patch_dict_[file_id]->category == category) {
- file.second->file_offset_ = patch_dict_[file_id]->file_offset_;
- file.second->file_size_ = patch_dict_[file_id]->file_size_;
- file.second->block_size_ = patch_dict_[file_id]->block_size_;
- file.second->timestamp_ = patch_dict_[file_id]->timestamp_;
- file.second->version_ = patch_dict_[file_id]->version_;
- patched_list.insert(file_id);
- }
- }
- LOG(INFO) << "Category " << category << " enabled successfully";
- return SUCCESS;
- }
- DAT_RESULT DatFile::DisableCategory(int category) {
- LOG(INFO) << "Disabling category " << category;
- if (inactive_categories.count(category) != 0)
- return SUCCESS;
- inactive_categories.insert(category);
- dat_state_ = UPDATED;
- for (auto file : dictionary_) {
- auto file_id = file.first;
- if (orig_dict_.count(file_id) && orig_dict_[file_id]->category == category) {
- file.second->file_offset_ = orig_dict_[file_id]->file_offset_;
- file.second->file_size_ = orig_dict_[file_id]->file_size_;
- file.second->block_size_ = orig_dict_[file_id]->block_size_;
- file.second->timestamp_ = orig_dict_[file_id]->timestamp_;
- file.second->version_ = orig_dict_[file_id]->version_;
- patched_list.insert(file_id);
- }
- }
- LOG(INFO) << "Category " << category << " disabled successfully";
- return SUCCESS;
- }
- const std::set<long long> &DatFile::GetInactiveCategoriesList() {
- return inactive_categories;
- }
- bool DatFile::CheckIfNotPatched() {
- LOG(INFO) << "DatFile " << (patch_dict_.empty() ? "HASN'T " : "HAS already")
- << " been patched by LEGACY launcher!";
- return patch_dict_.empty();
- }
- bool DatFile::CheckIfPatchedByOldLauncher() {
- LOG(INFO) << "DatFile " << (dictionary_.count(620750000) == 0 ? "HASN'T " : "HAS already")
- << " been patched by OLD LAUNCHER!";
- return dictionary_.count(620750000) > 0;
- }
- const std::string &DatFile::filename() const {
- return filename_;
- }
- }
- }
|