//
// Created by Иван_Архипов on 31.10.2017.
//

#include "DatFile.h"

#include "BinaryData.h"
#include "DatException.h"
#include "SubDirectory.h"
#include "Subfile.h"
#include "SubfileData.h"

#include <locale>
#include <algorithm>

extern "C++"
{
namespace LOTRO_DAT {
    DatFile::DatFile() {
        dat_state_ = CLOSED;
        root_directory_ = nullptr;
        file_handler_ = nullptr;
    }

    bool DatFile::InitDatFile(const std::string &filename, int dat_id) {
        try {
            if (dat_state_ != CLOSED && filename == filename_)
                return true;

            if (dat_state_ != CLOSED && filename != filename_)
                CloseDatFile();

            dat_id_ = dat_id;
            dat_state_ = CLOSED;
            current_locale_ = ORIGINAL;
            root_directory_ = nullptr;
            file_handler_ = nullptr;

            filename_ = filename;

            OpenDatFile(filename.c_str());
            ReadSuperBlock();
            MakeDirectories();
            MakeDictionary();

            InitLocales();

            if (dat_state_ == SUCCESS_DICTIONARY)
                dat_state_ = READY;
            else
                throw DatException("Bad DatFile initialization! Not all init states were successfully passed!",
                                   INIT_EXCEPTION);
        } catch (std::exception &e) {
            fprintf(stderr, "Bad DatFile::InitDatFile() - caught exception %s. File closed\n", e.what());
            CloseDatFile();
            return false;
        }
        return true;
    }

    DAT_STATE DatFile::DatFileState() const {
        return dat_state_;
    }

    DatFile::~DatFile() {
        CloseDatFile();
    }

    /// Extracts file with file_id.
    /// If path is undefined then it will be recognised as current working directory
    /// Output file path consists of "path + file_id + file_extension";
    /// NOTICE: The directory, mentioned in "std::string path" variable SHOULD BE ALREADY CREATED;
    /// Otherwise DatException() will be thrown.
    /// Returns true, if file was successfully extracted;
    /// Throws DatException() if undefined behaviour happened

    bool DatFile::ExtractFile(long long file_id, const std::string &path) {
        if (dat_state_ < READY) {
            throw DatException("Bad DatFile::ExtractFile() - invalid DatFile state!", EXPORT_EXCEPTION);
        }
        BinaryData file_data;

        try {
            file_data = GetFileData(dictionary_[file_id], 8);
        } catch (std::exception &e) {
            fprintf(stderr, "Caught %s exception.", e.what());

            fprintf(stderr, "Unable to extract file due to uncaught exception while getting file data. Passing...\n");
            return false;
        }

        try {
            SubfileData export_data = dictionary_[file_id]->PrepareForExport(file_data);
            export_data.binary_data.WriteToFile(path + export_data.options["ext"].as<std::string>());
        } catch (std::exception &e) {
            fprintf(stderr, "Caught %s exception.", e.what());

            fprintf(stderr, "Unable to extract file due to uncaught exception while preparing file for export. Passing...\n");
            return false;
        }
        return true;
    }

    /// Extracts file with file_id to database "db".
    /// DATABASE SHOULD BE ALREADY CREATED; Otherwise DatException will be called.
    /// NOTICE: The directory, mentioned in "std::string path" variable SHOULD BE ALREADY CREATED;
    /// Otherwise DatException() will be thrown.
    /// Returns true, if file was successfully extracted;
    /// Throws DatException() if undefined behaviour happened

    bool DatFile::ExtractFile(long long file_id, Database *db) {
        if (dat_state_ < READY) {
            throw DatException("Bad DatFile::ExtractFile() - invalid DatFile state!", EXPORT_EXCEPTION);
        }

        BinaryData file_data;

        try {
            file_data = GetFileData(dictionary_[file_id], 8);
        } catch (std::exception &e) {
            fprintf(stderr, "Caught %s exception.", e.what());

            fprintf(stderr, "Unable to extract file due to uncaught exception while getting file data. Passing...\n");
            return false;
        }

        SubfileData export_data;

        try {
            export_data = dictionary_[file_id]->PrepareForExport(file_data);
            export_data.options["did"] = dat_id_;
        } catch (std::exception &e) {
            fprintf(stderr, "Caught %s exception.", e.what());

            fprintf(stderr, "Unable to extract file due to uncaught exception while preparing file for export. Passing...\n");
            return false;
        }

        if (export_data == SubfileData()) {
            fprintf(stderr, "WARNING: file with id %lld is empty. Passing it\n", dictionary_[file_id]->file_id());
            return true;
        }

        try {
            db->PushFile(export_data);
        } catch (std::exception &e) {
            fprintf(stderr, "Caught %s exception.", e.what());
            printf("Caught %s exception.", e.what());
            fflush(stdout);

            fprintf(stderr, "Unable to put file or it's part to database. Continuing without this part. Database may be not complete\n");
        }
        return true;
    }

    /// Extracts all files with specific type to "path + type + file_id + file_part + extension" files;
    /// If path is undefined then it will be recognised as current working directory
    /// NOTICE: The directory, mentioned in "std::string path" variable SHOULD BE ALREADY CREATED;
    /// Otherwise DatException() will be thrown.
    /// Returns number of successfully extracted files
    /// Throws DatException() if undefined behaviour happened

    int DatFile::ExtractAllFilesByType(FILE_TYPE type, std::string path) {
        if (dat_state_ <  READY) {
            throw DatException("Bad DatFile::ExtractAllFilesByType() - invalid DatFile state!", EXPORT_EXCEPTION);
        }

        int success = 0;
        for (auto i : dictionary_) {
            FILE_TYPE file_type = i.second->FileType();
            if (file_type  == type) {
                success += ExtractFile(i.second->file_id(), (path + std::to_string(i.second->file_id())));
            }
        }
        return success;
    }

    /// Extracts all files with specific type to database "db";
    /// DATABASE SHOULD BE ALREADY CREATED; Otherwise DatException will be called.
    /// Returns number of successfully extracted files
    /// Throws DatException() if undefined behaviour happened

    int DatFile::ExtractAllFilesByType(FILE_TYPE type, Database *db) {
        if (dat_state_ <  READY) {
            throw DatException("Bad DatFile::ExtractAllFilesByType() - invalid DatFile state!", EXPORT_EXCEPTION);
        }

        int success = 0;
        for (auto i : dictionary_) {
            FILE_TYPE file_type = i.second->FileType();
            if (file_type  == type) {
                success += ExtractFile(i.second->file_id(), db);
            }
        }
        return success;
    }

    // TODO: Write description and make asserts
    bool DatFile::PatchFile(const char *filename, YAML::Node options) {
        if (dat_state_ <  READY) {
            throw DatException("Bad DatFile::PatchFile() - invalid DatFile state!", EXPORT_EXCEPTION);
        }

        if (options["did"].IsDefined() && options["did"].as<int>() != dat_id_)
            return false;

        BinaryData data;
        data.ReadFromFile(filename);

        auto file_id = options["fid"].as<long long>();

        if (dictionary_[file_id] == nullptr) {
            fprintf(stderr, "ERROR DatFile::PatchFile() - Cannot patch file - there is no file in dictionary with file_id = %lld.\n", file_id);
            return false;
        }

        BinaryData old_data = GetFileData(dictionary_[file_id]);
        data = dictionary_[file_id]->MakeForImport(old_data, SubfileData(data, u"", options));

        try {
            ApplyFilePatch(dictionary_[file_id], data);
        } catch (std::exception &e) {
            fprintf(stderr, "Caught %s exception.", e.what());

            fprintf(stderr,
                    "Some errors happened while patching file with id = %lld. Continuing process without this file..\n"
                            "WARNING: DAT FILE CAN BE CORRUPTED!\n", file_id);
            printf("Some errors happened while patching file with id = %lld. Continuing process without this file..\n"
                           "WARNING: DAT FILE CAN BE CORRUPTED!\n", file_id);
            fflush(stdout);
            return false;
        }
        return true;
    }

    // TODO: Write description and make asserts
    bool DatFile::PatchFile(const SubfileData &data, bool rewrite_original) {
        if (dat_state_ <  READY) {
            throw DatException("Bad DatFile::PatchFile() - invalid DatFile state!", EXPORT_EXCEPTION);
        }

        auto file_id = data.options["fid"].as<long long>();
        Subfile *file = dictionary_[file_id];

        if (file == nullptr) {
            fprintf(stderr, "ERROR DatFile::PatchFile() - Cannot patch file - there is no file in dictionary with file_id = %lld.\n", file_id);
            return false;
        }

        // If file has inactive category, then we should set it to patched state in order to commit patch and
        // then in ApplyFilePatch(), if new category is still inactive, return dictionary to its original state;

        if (inactive_categories.count(file->category) != 0 && patch_dict_.count(file_id) != 0) {
            dictionary_[file_id]->file_offset_ = patch_dict_[file_id]->file_offset_;
            dictionary_[file_id]->file_size_ = patch_dict_[file_id]->file_size_;
            dictionary_[file_id]->block_size_ = patch_dict_[file_id]->block_size_;
            dictionary_[file_id]->timestamp_ = patch_dict_[file_id]->timestamp_;
            dictionary_[file_id]->version_ = patch_dict_[file_id]->version_;
        }

        if (data.options["cat"].IsDefined()) {
            file->category = data.options["cat"].as<long long>();
        } else {
            fprintf(stderr, "WARNING DatFile::PatchFile() - category option 'cat' was not "
                            "set in patch subfile with id = %lld. Setting it to 1\n", file_id);
            file->category = 1;
        }

        BinaryData old_data = GetFileData(file);
        BinaryData patch_data = file->MakeForImport(old_data, data);
        ApplyFilePatch(file, patch_data, rewrite_original);
        return true;
    }

    // TODO: Write description
    bool DatFile::PatchAllDatabase(Database *db) {
        if (dat_state_ <  READY) {
            throw DatException("Bad DatFile::PatchAllDatabase() - invalid DatFile state!", EXPORT_EXCEPTION);
        }

        SubfileData data;
        try {
            data = db->GetNextFile();
        } catch (std::exception &e) {
            fprintf(stderr, "Caught %s exception.\n", e.what());
            fprintf(stderr, "DatFile::PatchAllDatabase() error! Caught exception while fetching file from database! Stopping...\n");
            return false;
        }
        while (data != SubfileData()) {
            try {
                PatchFile(data);
            } catch (std::exception &e) {
                fprintf(stderr, "Caught %s exception.\n", e.what());
                fprintf(stderr, "DatFile::PatchAllDatabase() error! Caught exception while patching file! Passing...\n");
            }

            try {
                data = db->GetNextFile();
            } catch (std::exception &e) {
                fprintf(stderr, "Caught %s exception.\n", e.what());
                fprintf(stderr, "DatFile::PatchAllDatabase() error! Caught exception while fetching file from database! Stopping...\n");
                return false;
            }
        }
        CommitChanges();
        return true;
    }

    /// DatFile::WriteUnorderedDictionary(...);
    /// Prints list of all found files with some information about them to file.
    /// Gets std::string path - path to directory, where the file will be written with name "dict.txt"

    void DatFile::WriteUnorderedDictionary(std::string path) const {
        FILE *f;
        fopen_s(&f, (path + "dict.txt").c_str(), "w");
        fprintf(f, "file_id offset size size2 extension\n");
        for (auto i : dictionary_) {
            fprintf(f, "%lld %lld %lld %lld %s\n", i.second->file_id(), i.second->file_offset(), i.second->file_size(),
                    i.second->block_size(), i.second->Extension().c_str());
        }
        fclose(f);
    }

    /// DatFile::files_number();
    /// Returns amount of files, found in dictionaries of DatFile. Some if them may be empty or erased.

    long long DatFile::files_number() const {
        return dictionary_.size();
    }

    /// DatFile::GetFileData()
    /// Returns BinaryData, which contains of subfile data, made from parts of file in DatFile

    BinaryData DatFile::GetFileData(const Subfile *file, long long int offset) {
        BinaryData mfile_id(4);
        ReadData(mfile_id, 4, file->file_offset() + 8);
        if (file->file_id() != mfile_id.ToNumber<4>(0))
            throw DatException("Bad DatFile::GetFileData() - file_id in Subfile doesn't match to file_id in DatFile.", READ_EXCEPTION);

        BinaryData data((unsigned)(file->file_size() + (8 - offset)));
        if (file->block_size() >= file->file_size() + 8) {
            ReadData(data, file->file_size() + (8 - offset), file->file_offset() + offset);
            return data;
        }

        BinaryData fragments_count(4);
        ReadData(fragments_count, 4, file->file_offset());

        long long fragments_number = fragments_count.ToNumber<4>(0);

        long long current_block_size = file->block_size() - offset - 8 * fragments_number;

        ReadData(data, current_block_size , file->file_offset() + offset);

        BinaryData FragmentsDictionary(8 * unsigned(fragments_number));
        ReadData(FragmentsDictionary, 8 * unsigned(fragments_number), file->file_offset() + file->block_size() - 8 * fragments_number);


        for (long long i = 0; i < fragments_number; i++) {
            long long fragment_size = FragmentsDictionary.ToNumber<4>(8 * i);
            long long fragment_offset = FragmentsDictionary.ToNumber<4>(8 * i + 4);
            ReadData(data, std::min(fragment_size, file->file_size() - current_block_size), fragment_offset, current_block_size );
            current_block_size += fragment_size;
        }

        return data;
    }

    /// DatFile special functions for opening and reading/writing raw data.
    /// Shouldn't be used by any external classes except Subfile and Subdirectory.

    void DatFile::OpenDatFile(const char *dat_name) {
        if (dat_state_ != CLOSED)
            throw DatException("Bad initialisation of DatFile - current DatFile isn't in correct state!",
                               INIT_EXCEPTION);

        fopen_s(&file_handler_, dat_name, "r+b");

        if (file_handler_ == nullptr) {
            std::string err = "Bad DatFile::OpenDatFile. Unable to open file ";
            err += dat_name;
            throw DatException(err.c_str(), NOFILE_EXCEPTION);
        }

        fseek(file_handler_, 0, SEEK_END);
        file_size_ = ftell(file_handler_);
        fseek(file_handler_, 0, SEEK_SET);

        dat_state_ = SUCCESS_OPENED;
    }

    void DatFile::ReadSuperBlock() {
        if (dat_state_ != SUCCESS_OPENED)
            throw DatException("Bad DatFile::ReadSuperBlock() - DatFile isn't in valid state!", INIT_EXCEPTION);

        BinaryData data(1024);
        ReadData(data, 1024);

        constant1_ = data.ToNumber<4>(0x100);
        constant2_ = data.ToNumber<4>(0x140);
        version1_ = data.ToNumber<4>(0x14C);
        version2_ = data.ToNumber<4>(0x150);
        fragmentation_journal_offset_ = data.ToNumber<4>(0x154);
        root_directory_offset_ = data.ToNumber<4>(0x160);
        auto size1 = data.ToNumber<4>(0x148);

        if (constant1_ != 0x4C5000)
            throw DatException(
                    "Bad DatFile::ReadSuperBlock - variable at position 0x100 is not equal to .dat file constant!",
                    INIT_EXCEPTION);

        if (constant2_ != 0x5442)
            throw DatException(
                    "Bad DatFile::ReadSuperBlock - variable at position 0x140 is not equal to .dat file constant!",
                    INIT_EXCEPTION);

        if (file_size_ != size1)
            throw DatException(
                    "Bad DatFile::ReadSuperBlock - variable at 0x148 position is not equal to .dat file size!",
                    INIT_EXCEPTION);

        dat_state_ = SUCCESS_SUPERBLOCK;
    }

    void DatFile::MakeDirectories() {
        if (dat_state_ != SUCCESS_SUPERBLOCK)
            throw DatException("Bad DatFile::MakeDirectories() - DatFile isn't in valid state!", INIT_EXCEPTION);
        root_directory_ = new SubDirectory((unsigned) root_directory_offset_, this);
        dat_state_ = SUCCESS_DIRECTORIES;
    }

    void DatFile::MakeDictionary() {
        if (dat_state_ != SUCCESS_DIRECTORIES)
            throw DatException("Bad DatFile::MakeDictionary() - DatFile isn't in valid state!", INIT_EXCEPTION);
        try {
            if (root_directory_ == nullptr)
                throw DatException("Bad DatFile::MakeDictionary() - root_directory is nullptr!", INIT_EXCEPTION);

            root_directory_->MakeDictionary(dictionary_);
        } catch (std::exception &e) {
            fprintf(stderr, "Caught %s exception.", e.what());
            fprintf(stderr, "Bad DatFile::MakeDictionary() - File is corrupted?\n");
            return;
        }
        dat_state_ = SUCCESS_DICTIONARY;
    }

    void DatFile::ReadData(BinaryData &data, long long size, long long offset, long long data_offset) {
        if (dat_state_ == CLOSED)
            throw DatException("Bad DatFile::ReadData() - DatFile isn't in valid state!", READ_EXCEPTION);

        if (data_offset + size > data.size()) {
            std::string err = "Bad DatFile::ReadData - trying to read more than BinaryData size\n";
            err += std::string("Reading ") + std::to_string(size) + std::string(" bytes from ")
                   + std::to_string(offset) + std::string(" position in dat file.");
            throw DatException(err.c_str(), READ_EXCEPTION);
        }

        if (offset + size > file_size_) {
            std::string err = "Bad DatFile::ReadData - trying to read more than DatFile size elapsed\n";
            err += std::string("Reading ") + std::to_string(size) + std::string(" bytes from ")
                   + std::to_string(offset) + std::string(" position in dat file.");
            throw DatException(err.c_str(), READ_EXCEPTION);
        }

        _fseeki64(file_handler_, offset, SEEK_SET);
        fread(data.data() + data_offset, unsigned(size), 1, file_handler_);
        data.CheckCompression();
    }

    void DatFile::WriteData(const BinaryData &data, long long size, long long offset, long long data_offset) {
        if (dat_state_ < READY)
            throw DatException("Bad DatFile::WriteData() - DatFile isn't in valid state!", WRITE_EXCEPTION);

       _fseeki64(file_handler_, offset, SEEK_SET);
        if (data_offset + size > data.size())
            throw DatException("Bad DatFile::WriteData - trying to write more than BinaryData size", WRITE_EXCEPTION);

        fwrite(data.data() + data_offset, unsigned(size), 1, file_handler_);
    }

    /// Special functions used by patch process.
    /// Shouldn't be used by any external class.
    void DatFile::ApplyFilePatch(Subfile *file, const BinaryData &data, bool rewrite_original) {
        auto file_id = file->file_id();
        if (patched_list.count(file_id) != 0) {
            fprintf(stderr, "Warning: DatFile::ApplyFilePatch - found 2 files in patch with the same file_id = %lld. Passing last...\n", file->file_id());
            return;
        }

        if (current_locale() != PATCHED && !rewrite_original) {
            std::cout << "Changing locale to RU in order to patch file" << std::endl;
            SetLocale(PATCHED);
        }
        dat_state_ = UPDATED;

        if (orig_dict_.count(file_id) == 0 && !rewrite_original) {
            orig_dict_[file_id] = new Subfile(this, file->MakeHeaderData());
        }

        auto journal = GetFragmentationJournal();
        if (journal[0].second != file_size_) {
            journal[0].second = file_size_;
        }

        file->file_size_ = data.size() - 8;

        if ((patch_dict_.count(file_id) == 0 && !rewrite_original) || data.size() > file->block_size()) {
            file->file_offset_ = journal[0].second;
            file->block_size_ = std::max(data.size(), 256u);

            journal[0].second += data.size();

            BinaryData nulls(data.size());
            WriteData(nulls, nulls.size(), file_size_);

            this->file_size_ += data.size();
        }

        BinaryData fragments_count(4);
        fragments_count = BinaryData::FromNumber<4>(0);

        BinaryData file_data = fragments_count + data.CutData(4);

        if (file_id != file_data.ToNumber<4>(8))
            throw DatException("Bad DatFile::ApplyFilePatch() - Created data's file_id doesn't match to original! "
                                       "Patch wasn't written to .dat file");

        WriteData(file_data, file_data.size(), file->file_offset());

        patched_list.insert(file_id);

        if (!rewrite_original) {
            patch_dict_.erase(file_id); // Удалили старое значение в русском словаре
            patch_dict_[file_id] = new Subfile(this, file->MakeHeaderData()); // Создали новое значение
        }

        // If category is forbidden, then return file header data to original state
        if (inactive_categories.count(file->category) != 0) {
            dictionary_[file_id]->file_offset_ = orig_dict_[file_id]->file_offset_;
            dictionary_[file_id]->file_size_ = orig_dict_[file_id]->file_size_;
            dictionary_[file_id]->block_size_ = orig_dict_[file_id]->block_size_;
            dictionary_[file_id]->timestamp_ = orig_dict_[file_id]->timestamp_;
            dictionary_[file_id]->version_ = orig_dict_[file_id]->version_;
        }

        if (orig_dict_.count(file_id) != 0)
            orig_dict_[file_id]->category = file->category;
        if (patch_dict_.count(file_id) != 0)
            patch_dict_[file_id]->category = file->category;

        UpdateFragmentationJournal(journal);
    }

    void DatFile::UpdateSubdirectories() {
        root_directory_->UpdateDirectories(patched_list, dictionary_);
    }

    std::vector<std::pair<long long, long long> > DatFile::GetFragmentationJournal() {
        BinaryData data(8);
        ReadData(data, 8, fragmentation_journal_offset_ + 8);
        std::vector<std::pair<long long, long long> > result;
        result.emplace_back(std::make_pair(data.ToNumber<4>(0), data.ToNumber<4>(4)));
        return result;
    }

    void DatFile::UpdateHeader() {
        WriteData(BinaryData::FromNumber<4>(constant1_), 4, 0x100);
        WriteData(BinaryData::FromNumber<4>(constant2_), 4, 0x140);
        WriteData(BinaryData::FromNumber<4>(file_size_), 4, 0x148);
        WriteData(BinaryData::FromNumber<4>(version1_), 4, 0x14C);
        WriteData(BinaryData::FromNumber<4>(version2_), 4, 0x150);
        WriteData(BinaryData::FromNumber<4>(fragmentation_journal_offset_), 4, 0x154);
        WriteData(BinaryData::FromNumber<4>(root_directory_offset_), 4, 0x160);
    }

    void DatFile::UpdateFragmentationJournal(const std::vector<std::pair<long long, long long> > &journal) {
        for (unsigned i = 0; i < journal.size(); i++) {
            long long size = journal[i].first;
            long long offset = journal[i].second;

            WriteData(BinaryData::FromNumber<4>(size), 4, fragmentation_journal_offset_ + 8 * (i + 1));
            WriteData(BinaryData::FromNumber<4>(offset), 4, fragmentation_journal_offset_ + 8 * (i + 1) + 4);
        }
    }

    bool DatFile::CommitChanges() {
        try {
            if (dat_state_ != UPDATED)
                return true;
            std::cout << "There are some updated files. Rewriting dictionary..." << std::endl << std::flush;

            std::cout << "Updating locales..." << std::endl;
            CommitLocales();

            auto journal = GetFragmentationJournal();
            if (!patched_list.empty()) {
                journal[0].second = file_size_;
                BinaryData nulls(size_t(journal[0].first));
                WriteData(nulls, nulls.size(), file_size_);
                file_size_ += journal[0].first;
            }
            UpdateFragmentationJournal(journal);
            std::cout << "Updated fragmentation journal..." << std::endl << std::flush;

            UpdateHeader();
            std::cout << "Updated header..." << std::endl << std::flush;
            UpdateSubdirectories();
            std::cout << "Updated subdirectories..." << std::endl << std::flush;
            std::cout << "Changed " << patched_list.size() << " files..." << std::endl << std::flush;

            std::cout << "Done!" << std::endl;
            patched_list.clear();
            dat_state_ = READY;
            return true;
        } catch (std::exception &e) {
            fprintf(stderr, "Bad DatFile::CommitChanges - caught exception %s\n", e.what());
            return false;
        }
    }

    bool DatFile::CloseDatFile() {
        if (dat_state_ == CLOSED) {
            fprintf(stderr, "DatFile::CloseDatFile() - dat state is already closed. Nothing to do\n");
            return true;
        }
        try {
            if (dat_state_ == UPDATED) {
                CommitChanges();
            }

            orig_dict_.clear();
            patched_list.clear();
            pending_patch_.clear();

            current_locale_ = ORIGINAL;
            filename_.clear();

            if (file_handler_ != nullptr)
                fclose(file_handler_);
            delete file_handler_;

            delete root_directory_;

            patched_list.clear();
            dictionary_.clear();

            dat_state_ = CLOSED;
        } catch (std::exception &e) {
            fprintf(stderr, "Bad DatFile::CloseDatFile() - caught exception %s\n", e.what());
            return false;
        }
        return true;
    }

    // LOCALE MANAGING SECTION

    void DatFile::InitLocales() {
        std::cout << "Initialising locales..." << std::endl;
        BinaryData dicts_data = GetFileData(dictionary_[2013266257]);

        if (dicts_data.size() < 29) {
            fprintf(stderr, "WARNING: DatFile::InitLocales() - locales file is empty.. Initialising locale dicts as empty\n");
            std::cout << "Could't find locales file... Continuing without them" << std::endl;;
            return;
        }

        BinaryData hi_data = dicts_data.CutData(14, 29) + BinaryData("\0", 1);
        std::string hi = std::string((char*)(hi_data.data()));
        std::cout << hi << std::endl;

        if (hi != "Hi from Gi1dor!") {
            fprintf(stderr, "WARNING: DatFile::InitLocales() - Didn't receive 'hi' from Gi1dor... Initialising locale dicts as empty\n");
            std::cout << "Could't init locales' file... Continuing without them" << std::endl;
            return;
        }

        int offset = 29;
        BinaryData current_locale_data = dicts_data.CutData(offset, offset + 4) + BinaryData("\0", 1);
        std::string locale((char*)(current_locale_data.data()));
        offset += 4;
        std::cout << locale << std::endl;

        if (locale != "PATC" && locale != "ORIG") {
            fprintf(stderr, "WARNING: DatFile::InitLocales() - Incorrect locale... Initialising locale dicts as empty\n");
            std::cout << "Could't recognize locale... Continuing without locales" << std::endl;;
            return;
        }
        current_locale_ = (locale == "PATC" ? PATCHED : ORIGINAL);

        // 14 bytes for old data
        // 15 bytes for "Hi from Gi1dor"
        // 4 bytes for LOCALE
        // 4 bytes for orig_dict.size()
        // (32 + 4) * orig_dict.size() bytes for orig_dict data
        // 4 bytes for patch_dict.size()
        // (32 + 4) * patch_dict.size() bytes for patch_dict data
        // 4 bytes for inactive_categories dict
        // 4 * inactive_categories.size() bytes for inactive_categories data

        size_t orig_dict_size = size_t(dicts_data.CutData(offset, offset + 4).ToNumber<4>(0));
        offset += 4;
        for (size_t i = 0; i < orig_dict_size; i++) {
            auto file = new Subfile(this, dicts_data.CutData(offset, offset + 32));
            orig_dict_[file->file_id()] = file;
            offset += 32;
            orig_dict_[file->file_id()]->category = dicts_data.ToNumber<4>(offset);
            offset += 4;

            if (orig_dict_[file->file_id()]->category == 0)
                fprintf(stderr, "WARNING DatFile::InitLocales() - file category is undefined (0)!\n");
        }

        size_t patch_dict_size = size_t(dicts_data.CutData(offset, offset + 4).ToNumber<4>(0));
        offset += 4;
        for (size_t i = 0; i < patch_dict_size; i++) {
            auto file = new Subfile(this, dicts_data.CutData(offset, offset + 32));
            patch_dict_[file->file_id()] = file;
            offset += 32;
            patch_dict_[file->file_id()]->category = dicts_data.ToNumber<4>(offset);
            offset += 4;
            if (patch_dict_[file->file_id()]->category == 0)
                fprintf(stderr, "WARNING DatFile::InitLocales() - file category is undefined (0)!\n");

        }

        size_t active_patches_dict_size = size_t(dicts_data.CutData(offset, offset + 4).ToNumber<4>(0));
        offset += 4;
        for (size_t i = 0; i < active_patches_dict_size; i++) {
            inactive_categories.insert(dicts_data.ToNumber<4>(offset));
            offset += 4;
        }

        std::cout << "There are " << patch_dict_.size() << " files in patch locale dictionary" << std::endl;
        std::cout << "There are " << orig_dict_.size() << " files in original locale dictionary" << std::endl;
        std::cout << "Unactive patches now: ";
        for (auto i : inactive_categories)
            std:: cout << i;
        std::cout << std::endl;
    }

    std::unordered_map<long long, Subfile *> *DatFile::GetLocaleDictReference(LOCALE locale) {
        switch (locale) {
            case PATCHED:
                return &patch_dict_;
            case ORIGINAL:
                return &orig_dict_;
            default:
                throw DatException("Bad DatFile::GetLocaleDictReference() - unknown locale!!!", LOCALE_EXCEPTION);
        }
    }

    void DatFile::SetLocale(LOCALE locale) {
        try {
            if (dat_state_ < READY) {
                fprintf(stderr, "Bad DatFile::SetLocale() - DatFile is in incorrect state... Cannot set locale\n");
                return;
            }
            if (current_locale_ == locale) {
                return;
            }
            dat_state_ = UPDATED;
            auto dict = GetLocaleDictReference(locale);
            for (auto file : *dict) {
                if (dictionary_[file.first] == nullptr) {
                    fprintf(stderr,
                            "WARNING: In locale dictionary there is file with file_id = %lld, which is not in .dat "
                                    "file! Passing it and removing from locale dictionary\n", file.first);
                    dict->erase(file.first);
                    continue;
                }
                if (dictionary_[file.first]->MakeHeaderData().CutData(8, 16) ==
                    file.second->MakeHeaderData().CutData(8, 16) || inactive_categories.count(orig_dict_[file.first]->category) != 0)
                    continue;

                long long file_id = file.first;
                Subfile *new_file = file.second;

                dictionary_[file_id]->file_offset_ = new_file->file_offset_;
                dictionary_[file_id]->file_size_ = new_file->file_size_;
                dictionary_[file_id]->block_size_ = new_file->block_size_;
                dictionary_[file_id]->timestamp_ = new_file->timestamp_;
                dictionary_[file_id]->version_ = new_file->version_;

                patched_list.insert(file.first);
                dat_state_ = UPDATED;
            }

            current_locale_ = locale;
            CommitChanges();
        } catch (std::exception &e) {
            fprintf(stderr, "Bad DatFile::SetLocale() - caught exception %s. Locale wasn't set.\n", e.what());
            return;
        }
    }

    bool DatFile::CheckIfUpdatedByGame() {
        return false;
    }

    void DatFile::RepairPatches(Database *db) {

    }

    LOCALE DatFile::current_locale() {
        if (dat_state_ < READY) {
            fprintf(stderr, "Bad DatFile::current_locale() - dat_file is in incorrect state!\n");
            return ORIGINAL;
        }
        if (current_locale_ != PATCHED && current_locale_ != ORIGINAL) {
            fprintf(stderr, "Bad DatFile::current_locale() - locale has incorrect value. Setting it to original\n");
            current_locale_ = ORIGINAL;
        }
        return current_locale_;
    }

    void DatFile::CommitLocales() {
        std::cout << "Committing locales..." << std::endl;
        SubfileData data = dictionary_[2013266257]->PrepareForExport(GetFileData(dictionary_[2013266257]));
        data.options["fid"] = "2013266257";
        data.options["ext"] = ".unknown";

        BinaryData old_data = BinaryData(GetFileData(dictionary_[2013266257u]));

        // 14 bytes for old data
        // 15 bytes for "Hi from Gi1dor"
        // 4 bytes for LOCALE
        // 4 bytes for orig_dict.size()
        // (32 + 4) * orig_dict.size() bytes for orig_dict data
        // 4 bytes for patch_dict.size()
        // (32 + 4) * patch_dict.size() bytes for patch_dict data
        // 4 bytes for inactive_categories list
        // 4 * inactive_categories.size() bytes for inactive_categories data

        data.binary_data = BinaryData(14 + 15 + 4
                                      + 4 + (32 + 4) * orig_dict_.size()
                                      + 4 + (32 + 4) * patch_dict_.size()
                                      + 4 + 4 * inactive_categories.size());

        size_t current_size = 0;
        data.binary_data.Append(GetFileData(dictionary_[2013266257u]).CutData(0, 14), current_size);
        current_size += 14;

        data.binary_data.Append(BinaryData("Hi from Gi1dor!", 15), current_size);
        current_size += 15;

        data.binary_data.Append(BinaryData((current_locale_ == ORIGINAL ? "ORIG" : "PATC"), 4), current_size);
        current_size += 4;

        data.binary_data.Append(BinaryData::FromNumber<4>(orig_dict_.size()), current_size);
        current_size += 4;

        for (auto file : orig_dict_) {
            data.binary_data.Append(file.second->MakeHeaderData(), current_size);
            current_size += 32;
            data.binary_data.Append(BinaryData::FromNumber<4>(file.second->category), current_size);
            current_size += 4;
        }

        data.binary_data.Append(BinaryData::FromNumber<4>(patch_dict_.size()), current_size);
        current_size += 4;

        for (auto file : patch_dict_) {
            data.binary_data.Append(file.second->MakeHeaderData(), current_size);
            current_size += 32;
            data.binary_data.Append(BinaryData::FromNumber<4>(file.second->category), current_size);
            current_size += 4;
        }

        data.binary_data.Append(BinaryData::FromNumber<4>(inactive_categories.size()), current_size);
        current_size += 4;
        for (auto patch_id : inactive_categories) {
            data.binary_data.Append(BinaryData::FromNumber<4>(patch_id), current_size);
            current_size += 4;
        }

        PatchFile(data, true);
        std::cout << "Done!" << std::endl;
    }

    void DatFile::EnableCategory(int category) {
        std::cout << "Disabling category " << category << std::endl;
        if (inactive_categories.count(category) == 0)
            return;
        inactive_categories.erase(category);

        for (auto file : dictionary_) {
            auto file_id = file.first;
            if (patch_dict_.count(file_id) > 0 && patch_dict_[file_id]->category == category) {
                dat_state_ = UPDATED;

                file.second->file_offset_ = patch_dict_[file_id]->file_offset_;
                file.second->file_size_ = patch_dict_[file_id]->file_size_;
                file.second->block_size_ = patch_dict_[file_id]->block_size_;
                file.second->timestamp_ = patch_dict_[file_id]->timestamp_;
                file.second->version_ = patch_dict_[file_id]->version_;
                patched_list.insert(file_id);
            }
        }
    }

    void DatFile::DisableCategory(int category) {
        std::cout << "Disabling category " << category << std::endl;
        if (inactive_categories.count(category) != 0)
            return;
        inactive_categories.insert(category);

        for (auto file : dictionary_) {
            auto file_id = file.first;

            if (orig_dict_.count(file_id) && orig_dict_[file_id]->category == category) {
                dat_state_ = UPDATED;

                file.second->file_offset_ = orig_dict_[file_id]->file_offset_;
                file.second->file_size_ = orig_dict_[file_id]->file_size_;
                file.second->block_size_ = orig_dict_[file_id]->block_size_;
                file.second->timestamp_ = orig_dict_[file_id]->timestamp_;
                file.second->version_ = orig_dict_[file_id]->version_;
                patched_list.insert(file_id);
            }
        }
    }

    const std::unordered_set<long long>& DatFile::GetInactiveCategoriesList() {
        return inactive_categories;
    }

}
}