|
@@ -8,7 +8,6 @@
|
|
|
#include "Common/DatException.h"
|
|
|
#include "SubDirectory.h"
|
|
|
#include "Subfile.h"
|
|
|
-#include "yaml-cpp/yaml.h"
|
|
|
|
|
|
#include <locale>
|
|
|
|
|
@@ -58,16 +57,31 @@ namespace LOTRO_DAT {
|
|
|
if (dat_state_ != READY) {
|
|
|
throw DatException("Bad DatFile::ExtractFile() - invalid DatFile state!", EXPORT_EXCEPTION);
|
|
|
}
|
|
|
- BinaryData file_data = GetFileData(dictionary_[file_id], 8);
|
|
|
+ BinaryData file_data;
|
|
|
+
|
|
|
+ try {
|
|
|
+ file_data = GetFileData(dictionary_[file_id], 8);
|
|
|
+ } catch (...) {
|
|
|
+ fprintf(stderr, "Unable to extract file due to uncaught exception while getting file data. Passing...\n");
|
|
|
+ return false;
|
|
|
+ }
|
|
|
+
|
|
|
long long export_size = 0;
|
|
|
std::vector<BinaryData> binary_data;
|
|
|
std::vector<std::u16string> text_data;
|
|
|
std::vector<YAML::Node> options;
|
|
|
- dictionary_[file_id]->PrepareForExport(file_data, export_size, binary_data, text_data, options);
|
|
|
+
|
|
|
+ try {
|
|
|
+ dictionary_[file_id]->PrepareForExport(file_data, export_size, binary_data, text_data, options);
|
|
|
+ } catch (...) {
|
|
|
+ fprintf(stderr, "Unable to extract file due to uncaught exception while preparing file for export. Passing...\n");
|
|
|
+ return false;
|
|
|
+ }
|
|
|
|
|
|
for (int i = 0; i < export_size; ++i) {
|
|
|
binary_data[i].WriteToFile(path + "_" + std::to_string(i) + options[i]["extension"].as<std::string>());
|
|
|
}
|
|
|
+ return true;
|
|
|
}
|
|
|
|
|
|
/// Extracts file with file_id to database "db".
|
|
@@ -82,16 +96,40 @@ namespace LOTRO_DAT {
|
|
|
throw DatException("Bad DatFile::ExtractFile() - invalid DatFile state!", EXPORT_EXCEPTION);
|
|
|
}
|
|
|
|
|
|
- BinaryData file_data = GetFileData(dictionary_[file_id], 8);
|
|
|
+ BinaryData file_data;
|
|
|
+
|
|
|
+ try {
|
|
|
+ file_data = GetFileData(dictionary_[file_id], 8);
|
|
|
+ } catch (...) {
|
|
|
+ fprintf(stderr, "Unable to extract file due to uncaught exception while getting file data. Passing...\n");
|
|
|
+ return false;
|
|
|
+ }
|
|
|
+
|
|
|
long long export_size = 0;
|
|
|
std::vector<BinaryData> binary_data;
|
|
|
std::vector<std::u16string> text_data;
|
|
|
std::vector<YAML::Node> options;
|
|
|
- dictionary_[file_id]->PrepareForExport(file_data, export_size, binary_data, text_data, options);
|
|
|
- // TODO: Complete this function
|
|
|
+
|
|
|
+ try {
|
|
|
+ dictionary_[file_id]->PrepareForExport(file_data, export_size, binary_data, text_data, options);
|
|
|
+ } catch (...) {
|
|
|
+ fprintf(stderr, "Unable to extract file due to uncaught exception while preparing file for export. Passing...\n");
|
|
|
+ return false;
|
|
|
+ }
|
|
|
+
|
|
|
+ for (int i = 0; i < export_size; ++i) {
|
|
|
+ std::stringstream option;
|
|
|
+ option << options[i];
|
|
|
+ try {
|
|
|
+ db->PushFile(binary_data[i], text_data[i], option.str());
|
|
|
+ } catch (...) {
|
|
|
+ fprintf(stderr, "Unable to put file or it's part to database. Continuing without this part. Database may be not complete\n");
|
|
|
+ }
|
|
|
+ }
|
|
|
+ return true;
|
|
|
}
|
|
|
|
|
|
- /// Extracts all files with specific type to "path + type + file_id + extension" files;
|
|
|
+ /// Extracts all files with specific type to "path + type + file_id + file_part + extension" files;
|
|
|
/// If path is undefined then it will be recognised as current working directory
|
|
|
/// NOTICE: The directory, mentioned in "std::string path" variable SHOULD BE ALREADY CREATED;
|
|
|
/// Otherwise DatException() will be thrown.
|
|
@@ -105,8 +143,8 @@ namespace LOTRO_DAT {
|
|
|
|
|
|
int success = 0;
|
|
|
for (auto i : dictionary_) {
|
|
|
- FILE_TYPE ext = i.second->FileType();
|
|
|
- if (ext == type) {
|
|
|
+ FILE_TYPE file_type = i.second->FileType();
|
|
|
+ if (file_type == type) {
|
|
|
success += ExtractFile(i.second->file_id(), (path + std::to_string(i.second->file_id())));
|
|
|
}
|
|
|
}
|
|
@@ -125,14 +163,98 @@ namespace LOTRO_DAT {
|
|
|
|
|
|
int success = 0;
|
|
|
for (auto i : dictionary_) {
|
|
|
- FILE_TYPE ext = i.second->FileType();
|
|
|
- if (ext == type) {
|
|
|
+ FILE_TYPE file_type = i.second->FileType();
|
|
|
+ if (file_type == type) {
|
|
|
success += ExtractFile(i.second->file_id(), db);
|
|
|
}
|
|
|
}
|
|
|
return success;
|
|
|
}
|
|
|
|
|
|
+ /// DatFile::WriteUnorderedDictionary(...);
|
|
|
+ /// Prints list of all found files with some information about them to file.
|
|
|
+ /// Gets std::string path - path to directory, where the file will be written with name "dict.txt"
|
|
|
+
|
|
|
+ void DatFile::WriteUnorderedDictionary(std::string path) const {
|
|
|
+ FILE *f;
|
|
|
+ fopen_s(&f, (path + "dict.txt").c_str(), "w");
|
|
|
+ fprintf(f, "file_id offset size size2 extension\n");
|
|
|
+ for (auto i : dictionary_) {
|
|
|
+ fprintf(f, "%lld %lld %lld %lld %s\n", i.second->file_id(), i.second->file_offset(), i.second->file_size(),
|
|
|
+ i.second->block_size(), i.second->Extension().c_str());
|
|
|
+ }
|
|
|
+ fclose(f);
|
|
|
+ }
|
|
|
+
|
|
|
+ /// DatFile::files_number();
|
|
|
+ /// Returns amount of files, found in dictionaries of DatFile. Some if them may be empty or erased.
|
|
|
+
|
|
|
+ long long DatFile::files_number() const {
|
|
|
+ return dictionary_.size();
|
|
|
+ }
|
|
|
+
|
|
|
+ /// DatFile::GetFileData()
|
|
|
+ /// Returns BinaryData, which contains of subfile data, made from parts of file in DatFile
|
|
|
+
|
|
|
+ BinaryData DatFile::GetFileData(const Subfile *file, long long int offset) {
|
|
|
+ BinaryData mfile_id(4);
|
|
|
+ ReadData(mfile_id, 4, file->file_offset() + 8);
|
|
|
+ if (file->file_id() != mfile_id.ToNumber<4>(0))
|
|
|
+ throw DatException("Bad DatFile::GetFileData() - file_id in Subfile doesn't match to file_id in DatFile.", READ_EXCEPTION);
|
|
|
+
|
|
|
+ BinaryData data((unsigned)(file->file_size()));
|
|
|
+ if (file->block_size() >= file->file_size() + 8) {
|
|
|
+ ReadData(data, file->file_size(), file->file_offset() + offset);
|
|
|
+ return data;
|
|
|
+ }
|
|
|
+
|
|
|
+ BinaryData fragments_count(4);
|
|
|
+ ReadData(fragments_count, 4, file->file_offset());
|
|
|
+
|
|
|
+ long long fragments_number = fragments_count.ToNumber<4>(0);
|
|
|
+
|
|
|
+ long long current_block_size = file->block_size() - offset - 8 * fragments_number;
|
|
|
+
|
|
|
+ ReadData(data, current_block_size , file->file_offset() + offset);
|
|
|
+
|
|
|
+ BinaryData FragmentsDictionary(8 * unsigned(fragments_number));
|
|
|
+ ReadData(FragmentsDictionary, 8 * unsigned(fragments_number), file->file_offset() + file->block_size() - 8 * fragments_number);
|
|
|
+
|
|
|
+
|
|
|
+ for (long long i = 0; i < fragments_number; i++) {
|
|
|
+ long long fragment_size = FragmentsDictionary.ToNumber<4>(8 * i);
|
|
|
+ long long fragment_offset = FragmentsDictionary.ToNumber<4>(8 * i + 4);
|
|
|
+ ReadData(data, std::min(fragment_size, file->file_size() - current_block_size), fragment_offset, current_block_size );
|
|
|
+ current_block_size += fragment_size;
|
|
|
+ }
|
|
|
+
|
|
|
+ return data;
|
|
|
+ }
|
|
|
+
|
|
|
+ /// DatFile constants' getters.
|
|
|
+
|
|
|
+ long long DatFile::constant1() const {
|
|
|
+ return constant1_;
|
|
|
+ }
|
|
|
+
|
|
|
+ long long DatFile::constant2() const {
|
|
|
+ return constant2_;
|
|
|
+ }
|
|
|
+
|
|
|
+ long long DatFile::file_size() const {
|
|
|
+ return file_size_;
|
|
|
+ }
|
|
|
+
|
|
|
+ long long DatFile::version1() const {
|
|
|
+ return version1_;
|
|
|
+ }
|
|
|
+
|
|
|
+ long long DatFile::version2() const {
|
|
|
+ return version2_;
|
|
|
+ }
|
|
|
+
|
|
|
+ /// DatFile special functions for opening and reading/writing raw data.
|
|
|
+ /// Shouldn't be used by any external classes except Subfile and Subdirectory.
|
|
|
|
|
|
void DatFile::OpenDatFile(const char *dat_name) {
|
|
|
if (dat_state_ != CLOSED)
|
|
@@ -224,7 +346,7 @@ namespace LOTRO_DAT {
|
|
|
}
|
|
|
|
|
|
_fseeki64(file_handler_, offset, SEEK_SET);
|
|
|
- fread(data.data() + data_offset, size, 1, file_handler_);
|
|
|
+ fread(data.data() + data_offset, unsigned(size), 1, file_handler_);
|
|
|
data.CheckCompression();
|
|
|
}
|
|
|
|
|
@@ -236,77 +358,7 @@ namespace LOTRO_DAT {
|
|
|
if (data_offset + size > data.size())
|
|
|
throw DatException("Bad DatFile::WriteData - trying to write more than BinaryData size", WRITE_EXCEPTION);
|
|
|
|
|
|
- fwrite(data.data() + data_offset, size, 1, file_handler_);
|
|
|
- }
|
|
|
-
|
|
|
- long long DatFile::constant1() const {
|
|
|
- return constant1_;
|
|
|
- }
|
|
|
-
|
|
|
- long long DatFile::constant2() const {
|
|
|
- return constant2_;
|
|
|
- }
|
|
|
-
|
|
|
- long long DatFile::file_size() const {
|
|
|
- return file_size_;
|
|
|
- }
|
|
|
-
|
|
|
- long long DatFile::version1() const {
|
|
|
- return version1_;
|
|
|
- }
|
|
|
-
|
|
|
- long long DatFile::version2() const {
|
|
|
- return version2_;
|
|
|
- }
|
|
|
-
|
|
|
- void DatFile::WriteUnorderedDictionary(std::string path) const {
|
|
|
- FILE *f;
|
|
|
- fopen_s(&f, (path + "dict.txt").c_str(), "w");
|
|
|
- fprintf(f, "file_id offset size size2 extension\n");
|
|
|
- for (auto i : dictionary_) {
|
|
|
- fprintf(f, "%lld %lld %lld %lld %s\n", i.second->file_id(), i.second->file_offset(), i.second->file_size(),
|
|
|
- i.second->block_size(), i.second->Extension());
|
|
|
- }
|
|
|
- fclose(f);
|
|
|
- }
|
|
|
-
|
|
|
- long long DatFile::files_number() const {
|
|
|
- return dictionary_.size();
|
|
|
- }
|
|
|
-
|
|
|
- BinaryData DatFile::GetFileData(const Subfile *file, long long int offset = 0) {
|
|
|
- BinaryData mfile_id(4);
|
|
|
- ReadData(mfile_id, 4, file->file_offset() + 8);
|
|
|
- if (file->file_id() != mfile_id.ToNumber<4>(0))
|
|
|
- throw DatException("Bad DatFile::GetFileData() - file_id in Subfile doesn't match to file_id in DatFile.", READ_EXCEPTION);
|
|
|
-
|
|
|
- BinaryData data((unsigned)(file->file_size()));
|
|
|
- if (file->block_size() >= file->file_size() + 8) {
|
|
|
- ReadData(data, file->file_size(), file->file_offset() + offset);
|
|
|
- return data;
|
|
|
- }
|
|
|
-
|
|
|
- BinaryData fragments_count(4);
|
|
|
- ReadData(fragments_count, 4, file->file_offset());
|
|
|
-
|
|
|
- long long fragments_number = fragments_count.ToNumber<4>(0);
|
|
|
-
|
|
|
- long long current_block_size = file->block_size() - offset - 8 * fragments_number;
|
|
|
-
|
|
|
- ReadData(data, current_block_size , file->file_offset() + offset);
|
|
|
-
|
|
|
- BinaryData FragmentsDictionary(8 * unsigned(fragments_number));
|
|
|
- ReadData(FragmentsDictionary, 8 * unsigned(fragments_number), file->file_offset() + file->block_size() - 8 * fragments_number);
|
|
|
-
|
|
|
-
|
|
|
- for (long long i = 0; i < fragments_number; i++) {
|
|
|
- long long fragment_size = FragmentsDictionary.ToNumber<4>(8 * i);
|
|
|
- long long fragment_offset = FragmentsDictionary.ToNumber<4>(8 * i + 4);
|
|
|
- ReadData(data, std::min(fragment_size, file->file_size() - current_block_size), fragment_offset, current_block_size );
|
|
|
- current_block_size += fragment_size;
|
|
|
- }
|
|
|
-
|
|
|
- return data;
|
|
|
+ fwrite(data.data() + data_offset, unsigned(size), 1, file_handler_);
|
|
|
}
|
|
|
}
|
|
|
}
|