mirror of
https://gitee.com/openharmony/filemanagement_app_file_service
synced 2024-11-24 00:20:18 +00:00
修改文件分析返回类型,增加简报字段
Signed-off-by: zhuruigan <zhuruigan@huawei.com> Change-Id: If595a481d7bc32fbc8dbd2853232bd79d696a494
This commit is contained in:
parent
678212522a
commit
416164e31b
@ -34,7 +34,7 @@ public:
|
||||
static UntarFile &GetInstance();
|
||||
int UnPacket(const std::string &tarFile, const std::string &rootPath);
|
||||
int IncrementalUnPacket(const std::string &tarFile, const std::string &rootPath,
|
||||
const std::map<std::string, struct ReportFileInfo> &includes);
|
||||
const std::unordered_map<std::string, struct ReportFileInfo> &includes);
|
||||
|
||||
private:
|
||||
UntarFile() = default;
|
||||
@ -147,7 +147,7 @@ private:
|
||||
off_t tarFileBlockCnt_ {0};
|
||||
off_t pos_ {0};
|
||||
size_t readCnt_ {0};
|
||||
std::map<std::string, struct ReportFileInfo> includes_;
|
||||
std::unordered_map<std::string, struct ReportFileInfo> includes_;
|
||||
};
|
||||
} // namespace OHOS::FileManagement::Backup
|
||||
|
||||
|
@ -23,8 +23,10 @@
|
||||
#include <regex>
|
||||
#include <string>
|
||||
#include <tuple>
|
||||
#include <unordered_map>
|
||||
#include <vector>
|
||||
|
||||
#include <directory_ex.h>
|
||||
#include <sys/stat.h>
|
||||
#include <unistd.h>
|
||||
|
||||
@ -47,8 +49,8 @@
|
||||
#include "b_resources/b_constants.h"
|
||||
#include "b_tarball/b_tarball_factory.h"
|
||||
#include "filemgmt_libhilog.h"
|
||||
#include "service_proxy.h"
|
||||
#include "hitrace_meter.h"
|
||||
#include "service_proxy.h"
|
||||
#include "tar_file.h"
|
||||
#include "untar_file.h"
|
||||
|
||||
@ -84,7 +86,7 @@ void BackupExtExtension::VerifyCaller()
|
||||
}
|
||||
}
|
||||
|
||||
static bool CheckAndCreateDirectory(const string& filePath)
|
||||
static bool CheckAndCreateDirectory(const string &filePath)
|
||||
{
|
||||
size_t pos = filePath.rfind('/');
|
||||
if (pos == string::npos) {
|
||||
@ -184,7 +186,7 @@ ErrCode BackupExtExtension::GetIncrementalFileHandle(const string &fileName)
|
||||
if (access(tarName.c_str(), F_OK) == 0) {
|
||||
throw BError(BError::Codes::EXT_INVAL_ARG, string("The file already exists"));
|
||||
}
|
||||
UniqueFd fd (open(tarName.data(), O_RDWR | O_CREAT | O_TRUNC, S_IRUSR | S_IWUSR));
|
||||
UniqueFd fd(open(tarName.data(), O_RDWR | O_CREAT | O_TRUNC, S_IRUSR | S_IWUSR));
|
||||
if (fd < 0) {
|
||||
HILOGE("Failed to open tar file = %{private}s, err = %{public}d", tarName.c_str(), errno);
|
||||
throw BError(BError::Codes::EXT_INVAL_ARG, string("open tar file failed"));
|
||||
@ -195,7 +197,7 @@ ErrCode BackupExtExtension::GetIncrementalFileHandle(const string &fileName)
|
||||
if (access(reportName.c_str(), F_OK) == 0) {
|
||||
throw BError(BError::Codes::EXT_INVAL_ARG, string("The report file already exists"));
|
||||
}
|
||||
UniqueFd reportFd (open(reportName.data(), O_RDWR | O_CREAT | O_TRUNC, S_IRUSR | S_IWUSR));
|
||||
UniqueFd reportFd(open(reportName.data(), O_RDWR | O_CREAT | O_TRUNC, S_IRUSR | S_IWUSR));
|
||||
if (reportFd < 0) {
|
||||
HILOGE("Failed to open report file = %{private}s, err = %{public}d", reportName.c_str(), errno);
|
||||
throw BError(BError::Codes::EXT_INVAL_ARG, string("open report file failed"));
|
||||
@ -431,8 +433,7 @@ static bool IsUserTar(const string &tarFile, const string &indexFile)
|
||||
BJsonCachedEntity<BJsonEntityExtManage> cachedEntity(UniqueFd(open(filePath.data(), O_RDONLY)));
|
||||
auto cache = cachedEntity.Structuralize();
|
||||
auto info = cache.GetExtManageInfo();
|
||||
auto iter = find_if(info.begin(), info.end(),
|
||||
[&tarFile](const auto& item) { return item.hashName == tarFile; });
|
||||
auto iter = find_if(info.begin(), info.end(), [&tarFile](const auto &item) { return item.hashName == tarFile; });
|
||||
if (iter != info.end()) {
|
||||
HILOGI("tarFile:%{public}s isUserTar:%{public}d", tarFile.data(), iter->isUserTar);
|
||||
return iter->isUserTar;
|
||||
@ -543,7 +544,7 @@ int BackupExtExtension::DoRestore(const string &fileName)
|
||||
return ERR_OK;
|
||||
}
|
||||
|
||||
static map<string, struct ReportFileInfo> GetTarIncludes(const string &tarName)
|
||||
static unordered_map<string, struct ReportFileInfo> GetTarIncludes(const string &tarName)
|
||||
{
|
||||
// 获取简报文件内容
|
||||
string reportName = GetReportFileName(tarName);
|
||||
@ -617,7 +618,7 @@ void BackupExtExtension::AsyncTaskBackup(const string config)
|
||||
static void RestoreBigFilesForSpecialCloneCloud(ExtManageInfo item)
|
||||
{
|
||||
HITRACE_METER_NAME(HITRACE_TAG_FILEMANAGEMENT, __PRETTY_FUNCTION__);
|
||||
struct stat& sta = item.sta;
|
||||
struct stat &sta = item.sta;
|
||||
string fileName = item.hashName;
|
||||
if (chmod(fileName.c_str(), sta.st_mode) != 0) {
|
||||
HILOGE("Failed to chmod filePath, err = %{public}d", errno);
|
||||
@ -681,8 +682,7 @@ static ErrCode RestoreFilesForSpecialCloneCloud()
|
||||
return ERR_OK;
|
||||
}
|
||||
|
||||
static bool RestoreBigFilePrecheck(string& fileName, const string& path,
|
||||
const string& hashName, const string& filePath)
|
||||
static bool RestoreBigFilePrecheck(string &fileName, const string &path, const string &hashName, const string &filePath)
|
||||
{
|
||||
if (filePath.empty()) {
|
||||
HILOGE("file path is empty. %{public}s", filePath.c_str());
|
||||
@ -703,8 +703,9 @@ static bool RestoreBigFilePrecheck(string& fileName, const string& path,
|
||||
return true;
|
||||
}
|
||||
|
||||
static void RestoreBigFileAfter(const string& fileName, const string& filePath, const struct stat& sta,
|
||||
const set<string>& lks)
|
||||
static void RestoreBigFileAfter(const string &fileName,
|
||||
const string &filePath,
|
||||
const struct stat &sta)
|
||||
{
|
||||
if (chmod(filePath.c_str(), sta.st_mode) != 0) {
|
||||
HILOGE("Failed to chmod filePath, err = %{public}d", errno);
|
||||
@ -729,12 +730,6 @@ static void RestoreBigFileAfter(const string& fileName, const string& filePath,
|
||||
}
|
||||
}
|
||||
|
||||
for (const auto &lksPath : lks) {
|
||||
if (link(filePath.data(), lksPath.data())) {
|
||||
HILOGE("failed to create hard link file, errno : %{public}d", errno);
|
||||
}
|
||||
}
|
||||
|
||||
struct timespec tv[2] = {sta.st_atim, sta.st_mtim};
|
||||
UniqueFd fd(open(filePath.data(), O_RDONLY));
|
||||
if (futimens(fd.Get(), tv) != 0) {
|
||||
@ -768,7 +763,7 @@ static void RestoreBigFiles(bool appendTargetPath)
|
||||
continue;
|
||||
}
|
||||
|
||||
RestoreBigFileAfter(fileName, filePath, item.sta, cache.GetHardLinkInfo(item.hashName));
|
||||
RestoreBigFileAfter(fileName, filePath, item.sta);
|
||||
}
|
||||
}
|
||||
|
||||
@ -849,8 +844,8 @@ void BackupExtExtension::AsyncTaskRestore()
|
||||
}
|
||||
// 恢复用户tar包以及大文件
|
||||
// 目的地址是否需要拼接path(临时目录),FullBackupOnly为true并且非特殊场景
|
||||
bool appendTargetPath = ptr->extension_->UseFullBackupOnly() &&
|
||||
!ptr->extension_->SpeicalVersionForCloneAndCloud();
|
||||
bool appendTargetPath =
|
||||
ptr->extension_->UseFullBackupOnly() && !ptr->extension_->SpeicalVersionForCloneAndCloud();
|
||||
RestoreBigFiles(appendTargetPath);
|
||||
|
||||
// delete 1.tar/manage.json
|
||||
@ -904,8 +899,8 @@ void BackupExtExtension::AsyncTaskIncrementalRestore()
|
||||
}
|
||||
// 恢复用户tar包以及大文件
|
||||
// 目的地址是否需要拼接path(临时目录),FullBackupOnly为true并且非特殊场景
|
||||
bool appendTargetPath = ptr->extension_->UseFullBackupOnly() &&
|
||||
!ptr->extension_->SpeicalVersionForCloneAndCloud();
|
||||
bool appendTargetPath =
|
||||
ptr->extension_->UseFullBackupOnly() && !ptr->extension_->SpeicalVersionForCloneAndCloud();
|
||||
RestoreBigFiles(appendTargetPath);
|
||||
|
||||
// delete 1.tar/manage.json
|
||||
@ -1137,6 +1132,11 @@ ErrCode BackupExtExtension::HandleRestore()
|
||||
return 0;
|
||||
}
|
||||
|
||||
static bool CheckTar(const string &fileName)
|
||||
{
|
||||
return ExtractFileExt(fileName) == "tar";
|
||||
}
|
||||
|
||||
using CompareFilesResult = tuple<map<string, struct ReportFileInfo>,
|
||||
map<string, struct ReportFileInfo>,
|
||||
map<string, struct stat>,
|
||||
@ -1145,9 +1145,9 @@ using CompareFilesResult = tuple<map<string, struct ReportFileInfo>,
|
||||
static CompareFilesResult CompareFiles(const UniqueFd &cloudFd, const UniqueFd &storageFd)
|
||||
{
|
||||
BReportEntity cloudRp(UniqueFd(cloudFd.Get()));
|
||||
map<string, struct ReportFileInfo> cloudFiles = cloudRp.GetReportInfos();
|
||||
unordered_map<string, struct ReportFileInfo> cloudFiles = cloudRp.GetReportInfos();
|
||||
BReportEntity storageRp(UniqueFd(storageFd.Get()));
|
||||
map<string, struct ReportFileInfo> storageFiles = storageRp.GetReportInfos();
|
||||
unordered_map<string, struct ReportFileInfo> storageFiles = storageRp.GetReportInfos();
|
||||
map<string, struct ReportFileInfo> allFiles;
|
||||
map<string, struct ReportFileInfo> smallFiles;
|
||||
map<string, struct stat> bigFiles;
|
||||
@ -1155,7 +1155,8 @@ static CompareFilesResult CompareFiles(const UniqueFd &cloudFd, const UniqueFd &
|
||||
for (auto &item : storageFiles) {
|
||||
// 进行文件对比
|
||||
string path = item.first;
|
||||
if (item.second.isIncremental == true && item.second.isDir == true) {
|
||||
bool isExist = cloudFiles.find(path) != cloudFiles.end() ? true : false;
|
||||
if (item.second.isIncremental == true && item.second.isDir == true && !isExist) {
|
||||
smallFiles.try_emplace(path, item.second);
|
||||
}
|
||||
if (item.second.isIncremental == true && item.second.isDir == false) {
|
||||
@ -1165,13 +1166,14 @@ static CompareFilesResult CompareFiles(const UniqueFd &cloudFd, const UniqueFd &
|
||||
}
|
||||
item.second.hash = fileHash;
|
||||
item.second.isIncremental = true;
|
||||
} else {
|
||||
item.second.hash = (cloudFiles.find(path) == cloudFiles.end()) ? cloudFiles[path].hash : "";
|
||||
}
|
||||
|
||||
if (item.second.isDir == false && CheckTar(path)) {
|
||||
item.second.userTar = 1;
|
||||
}
|
||||
|
||||
allFiles.try_emplace(path, item.second);
|
||||
if (cloudFiles.find(path) == cloudFiles.end() ||
|
||||
(item.second.isDir == false && item.second.isIncremental == true &&
|
||||
if (!isExist || (item.second.isDir == false && item.second.isIncremental == true &&
|
||||
cloudFiles.find(path)->second.hash != item.second.hash)) {
|
||||
// 在云空间简报里不存在或者hash不一致
|
||||
if (item.second.size < BConstants::BIG_FILE_BOUNDARY) {
|
||||
@ -1216,12 +1218,12 @@ static void WriteFile(const string &filename, const map<string, struct ReportFil
|
||||
fstream f;
|
||||
f.open(filename.data(), ios::out);
|
||||
// 前面2行先填充进去
|
||||
f << "version=1.0&attrNum=6" << endl;
|
||||
f << "path;mode;dir;size;mtime;hash" << endl;
|
||||
f << "version=1.0&attrNum=7" << endl;
|
||||
f << "path;mode;dir;size;mtime;hash;usertar" << endl;
|
||||
for (auto item : srcFiles) {
|
||||
struct ReportFileInfo info = item.second;
|
||||
string str = item.first + ";" + info.mode + ";" + to_string(info.isDir) + ";" + to_string(info.size);
|
||||
str += ";" + to_string(info.mtime) + ";" + info.hash;
|
||||
str += ";" + to_string(info.mtime) + ";" + info.hash + ";" + to_string(info.userTar);
|
||||
f << str << endl;
|
||||
}
|
||||
f.close();
|
||||
|
@ -114,6 +114,8 @@ bool TarFile::TraversalFile(string &filePath)
|
||||
}
|
||||
|
||||
if (currentTarFileSize_ >= DEFAULT_SLICE_SIZE) {
|
||||
HILOGI("Current tar file size is over %{public}d, start to slice",
|
||||
static_cast<int32_t>(DEFAULT_SLICE_SIZE / MB_TO_BYTE));
|
||||
fileCount_ = 0;
|
||||
FillSplitTailBlocks();
|
||||
CreateSplitTarFile();
|
||||
|
@ -95,7 +95,7 @@ int UntarFile::UnPacket(const string &tarFile, const string &rootPath)
|
||||
}
|
||||
|
||||
int UntarFile::IncrementalUnPacket(const string &tarFile, const string &rootPath,
|
||||
const map<string, struct ReportFileInfo> &includes)
|
||||
const unordered_map<string, struct ReportFileInfo> &includes)
|
||||
{
|
||||
includes_ = includes;
|
||||
tarFilePtr_ = fopen(tarFile.c_str(), "rb");
|
||||
|
@ -279,7 +279,6 @@ HWTEST_F(BJsonEntityExtManageTest, b_json_entity_ext_manage_0300, testing::ext::
|
||||
* 0:通过向索引文件写入3条有效数据模拟覆盖对索引文件的(无穷)内容的测试覆盖
|
||||
* 0:通过向索引文件的记录一写入0条、记录二写入1条、记录三写入2条有效硬链接数据模拟对索引文件含
|
||||
* 有硬链接(空、有、无穷)个的测试覆盖
|
||||
* 0:通过调用接口SetHardLinkInfo向索引文件中对应记录添加硬链接
|
||||
* 1:调用接口SetExtManage,向索引文件写入数据
|
||||
* 2:调用接口GetExtManage,从索引文件读出文件名数据
|
||||
* 3:调用接口GetExtManageInfo,从索引文件读出文件详细数据(含文件名和对应文件的stat数据)
|
||||
@ -319,15 +318,6 @@ HWTEST_F(BJsonEntityExtManageTest, b_json_entity_ext_manage_0400, testing::ext::
|
||||
info.emplace(testFile3HexName, make_tuple(pathTestFile3, GetFileStat(pathTestFile3), true));
|
||||
cache.SetExtManage(info);
|
||||
|
||||
// 向索引文件中的三条记录分别追加0、1、2条硬链接信息
|
||||
set<string> hardLinks1, hardLinks2, hardLinks3;
|
||||
cache.SetHardLinkInfo(testFile1HexName, hardLinks1);
|
||||
hardLinks2.emplace(root + "testFile2hardlink1");
|
||||
cache.SetHardLinkInfo(testFile2HexName, hardLinks2);
|
||||
hardLinks3.emplace(root + "testFile3hardlink1");
|
||||
hardLinks3.emplace(root + "testFile3hardlink2");
|
||||
cache.SetHardLinkInfo(testFile3HexName, hardLinks3);
|
||||
|
||||
// 预置结果集,用以在读取索引文件后做结果判断
|
||||
set<string> resultFileName {testFile1HexName, testFile2HexName, testFile3HexName};
|
||||
|
||||
@ -338,15 +328,6 @@ HWTEST_F(BJsonEntityExtManageTest, b_json_entity_ext_manage_0400, testing::ext::
|
||||
auto fileInfo = cache.GetExtManageInfo();
|
||||
EXPECT_EQ(fileInfo.size(), info.size());
|
||||
EXPECT_TRUE(IsEqual(fileInfo, info));
|
||||
// 传入无效文件名"00000000",测试读取文件硬链接接口是否正确返回
|
||||
auto testFile0HardLinks = cache.GetHardLinkInfo("00000000");
|
||||
EXPECT_TRUE(testFile0HardLinks.empty());
|
||||
auto testFile1HardLinks = cache.GetHardLinkInfo(testFile1HexName);
|
||||
EXPECT_TRUE(testFile1HardLinks.empty());
|
||||
auto testFile2HardLinks = cache.GetHardLinkInfo(testFile2HexName);
|
||||
EXPECT_EQ(testFile2HardLinks, hardLinks2);
|
||||
auto testFile3HardLinks = cache.GetHardLinkInfo(testFile3HexName);
|
||||
EXPECT_EQ(testFile3HardLinks, hardLinks3);
|
||||
} catch (...) {
|
||||
EXPECT_TRUE(false);
|
||||
GTEST_LOG_(INFO) << "BJsonEntityExtManageTest-an exception occurred.";
|
||||
@ -394,24 +375,14 @@ HWTEST_F(BJsonEntityExtManageTest, b_json_entity_ext_manage_0500, testing::ext::
|
||||
auto cache = cachedEntity.Structuralize();
|
||||
|
||||
// 生成三条有用数据并写入索引文件
|
||||
// 通过重用原始文件的stat向该记录追加(0/1/2)条硬链接文件信息
|
||||
map<string, tuple<string, struct stat, bool>> info;
|
||||
struct stat sta = {};
|
||||
info.emplace(testFile1HexName, make_tuple(pathTestFile1, GetFileStat(pathTestFile1), true));
|
||||
info.emplace(testFile2HexName, make_tuple(pathTestFile2, sta = GetFileStat(pathTestFile2), true));
|
||||
info.emplace("testFile2hardlink1", make_tuple(root + "testFile2hardlink1", sta, true));
|
||||
info.emplace(testFile3HexName, make_tuple(pathTestFile3, sta = GetFileStat(pathTestFile3), true));
|
||||
info.emplace("testFile3hardlink1", make_tuple(root + "testFile3hardlink1", sta, true));
|
||||
info.emplace("testFile3hardlink2", make_tuple(root + "testFile3hardlink2", sta, true));
|
||||
cache.SetExtManage(info);
|
||||
|
||||
// 预置结果集,用以在读取索引文件后做结果判断
|
||||
// 将info中的硬链接信息删除,保留原始文件信息,作为后续结果值判断的比较对象
|
||||
info.erase("testFile2hardlink1");
|
||||
info.erase("testFile3hardlink1");
|
||||
info.erase("testFile3hardlink2");
|
||||
set<string> hardLinks2 {root + "testFile2hardlink1"};
|
||||
set<string> hardLinks3 {root + "testFile3hardlink1", root + "testFile3hardlink2"};
|
||||
set<string> resultFileName {testFile1HexName, testFile2HexName, testFile3HexName};
|
||||
|
||||
// 读取索引文件内容并做结果判断
|
||||
@ -421,12 +392,6 @@ HWTEST_F(BJsonEntityExtManageTest, b_json_entity_ext_manage_0500, testing::ext::
|
||||
auto fileInfo = cache.GetExtManageInfo();
|
||||
EXPECT_EQ(fileInfo.size(), info.size());
|
||||
EXPECT_TRUE(IsEqual(fileInfo, info));
|
||||
auto testFile1HardLinks = cache.GetHardLinkInfo(testFile1HexName);
|
||||
EXPECT_TRUE(testFile1HardLinks.empty());
|
||||
auto testFile2HardLinks = cache.GetHardLinkInfo(testFile2HexName);
|
||||
EXPECT_EQ(testFile2HardLinks, hardLinks2);
|
||||
auto testFile3HardLinks = cache.GetHardLinkInfo(testFile3HexName);
|
||||
EXPECT_EQ(testFile3HardLinks, hardLinks3);
|
||||
} catch (...) {
|
||||
EXPECT_TRUE(false);
|
||||
GTEST_LOG_(INFO) << "BJsonEntityExtManageTest-an exception occurred.";
|
||||
@ -434,38 +399,6 @@ HWTEST_F(BJsonEntityExtManageTest, b_json_entity_ext_manage_0500, testing::ext::
|
||||
GTEST_LOG_(INFO) << "BJsonEntityExtManageTest-end b_json_entity_ext_manage_0500";
|
||||
}
|
||||
|
||||
/**
|
||||
* @tc.number: SUB_backup_b_json_entity_ext_manage_0600
|
||||
* @tc.name: b_json_entity_ext_manage_0600
|
||||
* @tc.desc: 测试SetExtManage接口中的FindLinks在设备号或INode数目为0时能否成功通过GetExtManage获取相关信息
|
||||
* @tc.size: MEDIUM
|
||||
* @tc.type: FUNC
|
||||
* @tc.level Level 0
|
||||
* @tc.require: I6F3GV
|
||||
*/
|
||||
HWTEST_F(BJsonEntityExtManageTest, b_json_entity_ext_manage_0600, testing::ext::TestSize.Level0)
|
||||
{
|
||||
GTEST_LOG_(INFO) << "BJsonEntityExtManageTest-begin b_json_entity_ext_manage_0600";
|
||||
try {
|
||||
map<string, tuple<string, struct stat, bool>> mp = {{"key", {"first", {}, true}}};
|
||||
Json::Value jv;
|
||||
BJsonEntityExtManage extMg(jv);
|
||||
|
||||
extMg.SetExtManage(mp);
|
||||
set<string> ss = extMg.GetExtManage();
|
||||
EXPECT_EQ(ss.size(), 1);
|
||||
|
||||
std::get<INDEX_SECOND>(mp.at("key")).st_dev = 1;
|
||||
extMg.SetExtManage(mp);
|
||||
ss = extMg.GetExtManage();
|
||||
EXPECT_EQ(ss.size(), 1);
|
||||
} catch (...) {
|
||||
EXPECT_TRUE(false);
|
||||
GTEST_LOG_(INFO) << "BJsonEntityExtManageTest-an exception occurred.";
|
||||
}
|
||||
GTEST_LOG_(INFO) << "BJsonEntityExtManageTest-end b_json_entity_ext_manage_0600";
|
||||
}
|
||||
|
||||
/**
|
||||
* @tc.number: SUB_backup_b_json_entity_ext_manage_0700
|
||||
* @tc.name: b_json_entity_ext_manage_0700
|
||||
@ -590,187 +523,4 @@ HWTEST_F(BJsonEntityExtManageTest, b_json_entity_ext_manage_0803, testing::ext::
|
||||
}
|
||||
GTEST_LOG_(INFO) << "BJsonEntityExtManageTest-end b_json_entity_ext_manage_0803";
|
||||
}
|
||||
|
||||
/**
|
||||
* @tc.number: SUB_backup_bb_json_entity_ext_manage_0900
|
||||
* @tc.name: b_json_entity_ext_manage_0900
|
||||
* @tc.desc: 测试SetHardLinkInfo接口和GetHardLinkInfo接口在不符合相关条件时能否成功返回false和空set
|
||||
* @tc.size: MEDIUM
|
||||
* @tc.type: FUNC
|
||||
* @tc.level Level 0
|
||||
* @tc.require: I6F3GV
|
||||
*/
|
||||
HWTEST_F(BJsonEntityExtManageTest, b_json_entity_ext_manage_0900, testing::ext::TestSize.Level0)
|
||||
{
|
||||
GTEST_LOG_(INFO) << "BJsonEntityExtManageTest-begin b_json_entity_ext_manage_0900";
|
||||
try {
|
||||
string_view sv = R"({"key":1})";
|
||||
BJsonCachedEntity<BJsonEntityExtManage> cachedEntity(sv);
|
||||
auto cache = cachedEntity.Structuralize();
|
||||
EXPECT_FALSE(cache.SetHardLinkInfo("", {}));
|
||||
|
||||
Json::Value jv;
|
||||
BJsonEntityExtManage extMg(jv);
|
||||
EXPECT_FALSE(extMg.SetHardLinkInfo("1", {}));
|
||||
|
||||
EXPECT_FALSE(cache.SetHardLinkInfo("1", {}));
|
||||
|
||||
EXPECT_EQ(cache.GetHardLinkInfo(""), set<string>());
|
||||
|
||||
EXPECT_EQ(extMg.GetHardLinkInfo("1"), set<string>());
|
||||
|
||||
EXPECT_EQ(cache.GetHardLinkInfo("1"), set<string>());
|
||||
} catch (...) {
|
||||
EXPECT_TRUE(false);
|
||||
GTEST_LOG_(INFO) << "BJsonEntityExtManageTest-an exception occurred.";
|
||||
}
|
||||
GTEST_LOG_(INFO) << "BJsonEntityExtManageTest-end b_json_entity_ext_manage_0900";
|
||||
}
|
||||
|
||||
/**
|
||||
* @tc.number: SUB_backup_b_json_entity_ext_manage_0901
|
||||
* @tc.name: b_json_entity_ext_manage_0901
|
||||
* @tc.desc: 测试SetHardLinkInfo接口和GetHardLinkInfo接口在不符合相关条件时能否成功返回false和空set
|
||||
* @tc.size: MEDIUM
|
||||
* @tc.type: FUNC
|
||||
* @tc.level Level 0
|
||||
* @tc.require: I6F3GV
|
||||
*/
|
||||
HWTEST_F(BJsonEntityExtManageTest, b_json_entity_ext_manage_0901, testing::ext::TestSize.Level0)
|
||||
{
|
||||
GTEST_LOG_(INFO) << "BJsonEntityExtManageTest-begin b_json_entity_ext_manage_0901";
|
||||
try {
|
||||
string_view sv = R"({"key":1})";
|
||||
BJsonCachedEntity<BJsonEntityExtManage> cachedEntity(sv);
|
||||
auto cache = cachedEntity.Structuralize();
|
||||
EXPECT_FALSE(cache.SetHardLinkInfo("#4$5%", {}));
|
||||
|
||||
Json::Value jv;
|
||||
BJsonEntityExtManage extMg(jv);
|
||||
EXPECT_FALSE(extMg.SetHardLinkInfo("1", {}));
|
||||
|
||||
EXPECT_FALSE(cache.SetHardLinkInfo("1", {}));
|
||||
|
||||
EXPECT_EQ(cache.GetHardLinkInfo("#4$5%"), set<string>());
|
||||
|
||||
EXPECT_EQ(extMg.GetHardLinkInfo("1"), set<string>());
|
||||
|
||||
EXPECT_EQ(cache.GetHardLinkInfo("1"), set<string>());
|
||||
} catch (...) {
|
||||
EXPECT_TRUE(false);
|
||||
GTEST_LOG_(INFO) << "BJsonEntityExtManageTest-an exception occurred.";
|
||||
}
|
||||
GTEST_LOG_(INFO) << "BJsonEntityExtManageTest-end b_json_entity_ext_manage_0901";
|
||||
}
|
||||
|
||||
/**
|
||||
* @tc.number: SUB_backup_b_json_entity_ext_manage_0902
|
||||
* @tc.name: b_json_entity_ext_manage_0902
|
||||
* @tc.desc: 测试SetHardLinkInfo接口和GetHardLinkInfo接口在不符合相关条件时能否成功返回false和空set
|
||||
* @tc.size: MEDIUM
|
||||
* @tc.type: FUNC
|
||||
* @tc.level Level 0
|
||||
* @tc.require: I6F3GV
|
||||
*/
|
||||
HWTEST_F(BJsonEntityExtManageTest, b_json_entity_ext_manage_0902, testing::ext::TestSize.Level0)
|
||||
{
|
||||
GTEST_LOG_(INFO) << "BJsonEntityExtManageTest-begin b_json_entity_ext_manage_0902";
|
||||
try {
|
||||
string_view sv = R"({"key":1})";
|
||||
BJsonCachedEntity<BJsonEntityExtManage> cachedEntity(sv);
|
||||
auto cache = cachedEntity.Structuralize();
|
||||
EXPECT_FALSE(cache.SetHardLinkInfo("测试代码", {}));
|
||||
|
||||
Json::Value jv;
|
||||
BJsonEntityExtManage extMg(jv);
|
||||
EXPECT_FALSE(extMg.SetHardLinkInfo("1", {}));
|
||||
|
||||
EXPECT_FALSE(cache.SetHardLinkInfo("1", {}));
|
||||
|
||||
EXPECT_EQ(cache.GetHardLinkInfo("测试代码"), set<string>());
|
||||
|
||||
EXPECT_EQ(extMg.GetHardLinkInfo("1"), set<string>());
|
||||
|
||||
EXPECT_EQ(cache.GetHardLinkInfo("1"), set<string>());
|
||||
} catch (...) {
|
||||
EXPECT_TRUE(false);
|
||||
GTEST_LOG_(INFO) << "BJsonEntityExtManageTest-an exception occurred.";
|
||||
}
|
||||
GTEST_LOG_(INFO) << "BJsonEntityExtManageTest-end b_json_entity_ext_manage_0902";
|
||||
}
|
||||
|
||||
/**
|
||||
* @tc.number: SUB_backup_b_json_entity_ext_manage_0903
|
||||
* @tc.name: b_json_entity_ext_manage_0903
|
||||
* @tc.desc: 测试SetHardLinkInfo接口和GetHardLinkInfo接口在不符合相关条件时能否成功返回false和空set
|
||||
* @tc.size: MEDIUM
|
||||
* @tc.type: FUNC
|
||||
* @tc.level Level 0
|
||||
* @tc.require: I6F3GV
|
||||
*/
|
||||
HWTEST_F(BJsonEntityExtManageTest, b_json_entity_ext_manage_0903, testing::ext::TestSize.Level0)
|
||||
{
|
||||
GTEST_LOG_(INFO) << "BJsonEntityExtManageTest-begin b_json_entity_ext_manage_0903";
|
||||
try {
|
||||
string_view sv = R"({"key":1})";
|
||||
BJsonCachedEntity<BJsonEntityExtManage> cachedEntity(sv);
|
||||
auto cache = cachedEntity.Structuralize();
|
||||
EXPECT_FALSE(cache.SetHardLinkInfo("ABCDABCDABCDABCDABCDABCDABCDABCDABCDABCDABCDABCDABCDABCDABCDA\
|
||||
BCDABCDABCDABCDABCDABCDABCDABCDABCDABCDABCDABCDABCDABCD", {}));
|
||||
|
||||
Json::Value jv;
|
||||
BJsonEntityExtManage extMg(jv);
|
||||
EXPECT_FALSE(extMg.SetHardLinkInfo("1", {}));
|
||||
|
||||
EXPECT_FALSE(cache.SetHardLinkInfo("1", {}));
|
||||
|
||||
EXPECT_EQ(cache.GetHardLinkInfo("ABCDABCDABCDABCDABCDABCDABCDABCDABCDABCDABCDABCDABCDABCDABCDA\
|
||||
BCDABCDABCDABCDABCDABCDABCDABCDABCDABCDABCDABCDABCDABCD"), set<string>());
|
||||
|
||||
EXPECT_EQ(extMg.GetHardLinkInfo("1"), set<string>());
|
||||
|
||||
EXPECT_EQ(cache.GetHardLinkInfo("1"), set<string>());
|
||||
} catch (...) {
|
||||
EXPECT_TRUE(false);
|
||||
GTEST_LOG_(INFO) << "BJsonEntityExtManageTest-an exception occurred.";
|
||||
}
|
||||
GTEST_LOG_(INFO) << "BJsonEntityExtManageTest-end b_json_entity_ext_manage_0903";
|
||||
}
|
||||
|
||||
/**
|
||||
* @tc.number: SUB_backup_b_json_entity_ext_manage_0904
|
||||
* @tc.name: b_json_entity_ext_manage_0904
|
||||
* @tc.desc: 测试SetHardLinkInfo接口和GetHardLinkInfo接口在不符合相关条件时能否成功返回false和空set
|
||||
* @tc.size: MEDIUM
|
||||
* @tc.type: FUNC
|
||||
* @tc.level Level 0
|
||||
* @tc.require: I6F3GV
|
||||
*/
|
||||
HWTEST_F(BJsonEntityExtManageTest, b_json_entity_ext_manage_0904, testing::ext::TestSize.Level0)
|
||||
{
|
||||
GTEST_LOG_(INFO) << "BJsonEntityExtManageTest-begin b_json_entity_ext_manage_0904";
|
||||
try {
|
||||
string_view sv = R"({"key":1})";
|
||||
BJsonCachedEntity<BJsonEntityExtManage> cachedEntity(sv);
|
||||
auto cache = cachedEntity.Structuralize();
|
||||
EXPECT_FALSE(cache.SetHardLinkInfo("", {""""""""""""""""""""""""""""""""""""""""""""""""""}));
|
||||
|
||||
Json::Value jv;
|
||||
BJsonEntityExtManage extMg(jv);
|
||||
EXPECT_FALSE(extMg.SetHardLinkInfo("测试代码", {}));
|
||||
|
||||
EXPECT_FALSE(cache.SetHardLinkInfo("#4$5%", {}));
|
||||
|
||||
EXPECT_EQ(cache.GetHardLinkInfo(""), set<string>());
|
||||
|
||||
EXPECT_EQ(extMg.GetHardLinkInfo("测试代码"), set<string>());
|
||||
|
||||
EXPECT_EQ(cache.GetHardLinkInfo("#4$5%"), set<string>());
|
||||
} catch (...) {
|
||||
EXPECT_TRUE(false);
|
||||
GTEST_LOG_(INFO) << "BJsonEntityExtManageTest-an exception occurred.";
|
||||
}
|
||||
GTEST_LOG_(INFO) << "BJsonEntityExtManageTest-end b_json_entity_ext_manage_0904";
|
||||
}
|
||||
|
||||
} // namespace OHOS::FileManagement::Backup
|
@ -78,7 +78,7 @@ HWTEST_F(BReportEntityTest, b_report_entity_GetReportInfos_0100, testing::ext::T
|
||||
const auto [filePath, res] = GetTestFile(tm, content);
|
||||
|
||||
BReportEntity cloudRp(UniqueFd(open(filePath.data(), O_RDONLY, 0)));
|
||||
map<string, struct ReportFileInfo> cloudFiles = cloudRp.GetReportInfos();
|
||||
unordered_map<string, struct ReportFileInfo> cloudFiles = cloudRp.GetReportInfos();
|
||||
|
||||
bool flag = false;
|
||||
|
||||
|
@ -62,24 +62,6 @@ public:
|
||||
*/
|
||||
std::vector<ExtManageInfo> GetExtManageInfo() const;
|
||||
|
||||
/**
|
||||
* @brief Set the hard link Information
|
||||
*
|
||||
* @param origin 原始文件名
|
||||
* @param hardLinks 硬链接文件名
|
||||
* @return true 设置成功
|
||||
* @return false 设置失败
|
||||
*/
|
||||
bool SetHardLinkInfo(const std::string origin, const std::set<std::string> hardLinks);
|
||||
|
||||
/**
|
||||
* @brief Get the hard link Information
|
||||
*
|
||||
* @param origin 原始文件名
|
||||
* @return const 硬链接集合
|
||||
*/
|
||||
const std::set<std::string> GetHardLinkInfo(const string origin);
|
||||
|
||||
public:
|
||||
/**
|
||||
* @brief 构造方法,具备T(Json::Value&, std::any)能力的构造函数
|
||||
|
@ -32,6 +32,7 @@ struct ReportFileInfo {
|
||||
off_t mtime {0};
|
||||
std::string hash;
|
||||
bool isIncremental {false};
|
||||
off_t userTar {0};
|
||||
};
|
||||
|
||||
class BReportEntity {
|
||||
@ -41,7 +42,7 @@ public:
|
||||
*
|
||||
* @return std::map<string, ReportFileInfo>
|
||||
*/
|
||||
std::map<std::string, struct ReportFileInfo> GetReportInfos();
|
||||
std::unordered_map<std::string, struct ReportFileInfo> GetReportInfos();
|
||||
|
||||
public:
|
||||
/**
|
||||
|
@ -32,15 +32,8 @@ namespace {
|
||||
const int32_t DEFAULT_MODE = 0100660; // 0660
|
||||
}
|
||||
|
||||
static bool CheckBigFile(const string &tarFile)
|
||||
static bool CheckBigFile(struct stat sta)
|
||||
{
|
||||
HILOGI("CheckBigFile tarFile:%{public}s", tarFile.data());
|
||||
struct stat sta;
|
||||
int ret = stat(tarFile.c_str(), &sta);
|
||||
if (ret != 0) {
|
||||
HILOGE("stat file failed, file:%{public}s", tarFile.c_str());
|
||||
return false;
|
||||
}
|
||||
if (sta.st_size > BConstants::BIG_FILE_BOUNDARY) {
|
||||
return true;
|
||||
}
|
||||
@ -54,8 +47,7 @@ static bool CheckBigFile(const string &tarFile)
|
||||
*/
|
||||
static bool CheckOwnPackTar(const string &fileName)
|
||||
{
|
||||
if (access(fileName.c_str(), F_OK) != 0) {
|
||||
HILOGE("file does not exists");
|
||||
if (ExtractFileExt(fileName) != "tar") {
|
||||
return false;
|
||||
}
|
||||
// 如果不是在默认路径下的文件包,不属于自身打包的tar文件
|
||||
@ -66,6 +58,8 @@ static bool CheckOwnPackTar(const string &fileName)
|
||||
} catch (const BError &e) {
|
||||
HILOGE("file is not backup path");
|
||||
return false;
|
||||
} catch (...) {
|
||||
return false;
|
||||
}
|
||||
|
||||
string::size_type pathPos = absPath.find(defaultBackupPath);
|
||||
@ -80,17 +74,22 @@ static bool CheckOwnPackTar(const string &fileName)
|
||||
}
|
||||
|
||||
string firstName = string(tarFile).substr(0, pos);
|
||||
// 判断文件名是否包含part (兼容增量)
|
||||
string::size_type partPos = firstName.find("part");
|
||||
if (partPos == string::npos) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return (firstName == "part") && (ExtractFileExt(tarFile) == "tar");
|
||||
return true;
|
||||
}
|
||||
|
||||
static bool CheckUserTar(const string &fileName)
|
||||
static bool CheckUserTar(const string &fileName, struct stat sta)
|
||||
{
|
||||
if (access(fileName.c_str(), F_OK) != 0) {
|
||||
HILOGI("file does not exists");
|
||||
return false;
|
||||
}
|
||||
return (ExtractFileExt(fileName) == "tar") && CheckBigFile(fileName);
|
||||
return (ExtractFileExt(fileName) == "tar") && CheckBigFile(sta);
|
||||
}
|
||||
|
||||
Json::Value Stat2JsonValue(struct stat sta)
|
||||
@ -157,49 +156,14 @@ void BJsonEntityExtManage::SetExtManage(const map<string, tuple<string, struct s
|
||||
{
|
||||
obj_.clear();
|
||||
|
||||
vector<bool> vec(info.size(), false);
|
||||
|
||||
auto FindLinks = [&vec](map<string, tuple<string, struct stat, bool>>::const_iterator it,
|
||||
unsigned long index) -> set<string> {
|
||||
if (std::get<1>(it->second).st_dev == 0 || std::get<1>(it->second).st_ino == 0) {
|
||||
return {};
|
||||
}
|
||||
|
||||
set<string> lks;
|
||||
auto item = it;
|
||||
item++;
|
||||
|
||||
for (auto i = index + 1; i < vec.size(); ++i, ++item) {
|
||||
if (std::get<1>(it->second).st_dev == std::get<1>(item->second).st_dev &&
|
||||
std::get<1>(it->second).st_ino == std::get<1>(item->second).st_ino) {
|
||||
vec[i] = true;
|
||||
lks.insert(std::get<0>(item->second));
|
||||
HILOGI("lks insert %{public}s", std::get<0>(item->second).c_str());
|
||||
}
|
||||
HILOGI("lks doesn't insert %{public}s", std::get<0>(item->second).c_str());
|
||||
}
|
||||
return lks;
|
||||
};
|
||||
|
||||
unsigned long index = 0;
|
||||
for (auto item = info.begin(); item != info.end(); ++item, ++index) {
|
||||
if (vec[index]) {
|
||||
HILOGI("skipped file is %{public}s", item->first.c_str());
|
||||
continue;
|
||||
}
|
||||
HILOGI("file name is %{public}s", item->first.c_str());
|
||||
|
||||
for (auto item = info.begin(); item != info.end(); ++item) {
|
||||
Json::Value value;
|
||||
value["fileName"] = item->first;
|
||||
auto [path, sta, isBeforeTar] = item->second;
|
||||
value["information"]["path"] = path;
|
||||
value["information"]["stat"] = Stat2JsonValue(sta);
|
||||
value["isUserTar"] = isBeforeTar && CheckUserTar(path);
|
||||
value["isBigFile"] = !CheckOwnPackTar(path) && CheckBigFile(path);
|
||||
set<string> lks = FindLinks(item, index);
|
||||
for (const auto &lk : lks) {
|
||||
value["hardlinks"].append(lk);
|
||||
}
|
||||
value["isUserTar"] = isBeforeTar && CheckUserTar(path, sta);
|
||||
value["isBigFile"] = !CheckOwnPackTar(path) && CheckBigFile(sta);
|
||||
|
||||
obj_.append(value);
|
||||
}
|
||||
@ -268,71 +232,4 @@ std::vector<ExtManageInfo> BJsonEntityExtManage::GetExtManageInfo() const
|
||||
|
||||
return infos;
|
||||
}
|
||||
|
||||
bool BJsonEntityExtManage::SetHardLinkInfo(const string origin, const set<string> hardLinks)
|
||||
{
|
||||
if (origin.empty()) {
|
||||
HILOGE("origin file name can not empty");
|
||||
return false;
|
||||
}
|
||||
if (!obj_) {
|
||||
HILOGE("Uninitialized JSon Object reference");
|
||||
return false;
|
||||
}
|
||||
if (!obj_.isArray()) {
|
||||
HILOGE("json object isn't an array");
|
||||
return false;
|
||||
}
|
||||
|
||||
for (Json::Value &item : obj_) {
|
||||
string fileName = item.isObject() && item.isMember("fileName") && item["fileName"].isString()
|
||||
? item["fileName"].asString()
|
||||
: "";
|
||||
if (origin == fileName) {
|
||||
for (const auto &lk : hardLinks) {
|
||||
item["hardlinks"].append(lk);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
const set<string> BJsonEntityExtManage::GetHardLinkInfo(const string origin)
|
||||
{
|
||||
if (origin.empty()) {
|
||||
HILOGE("origin file name can not empty");
|
||||
return {};
|
||||
}
|
||||
if (!obj_) {
|
||||
HILOGE("Uninitialized JSon Object reference");
|
||||
return {};
|
||||
}
|
||||
if (!obj_.isArray()) {
|
||||
HILOGE("json object isn't an array");
|
||||
return {};
|
||||
}
|
||||
|
||||
set<string> hardlinks;
|
||||
for (const Json::Value &item : obj_) {
|
||||
if (!item.isObject()) {
|
||||
continue;
|
||||
}
|
||||
string fileName = item.isMember("fileName") && item["fileName"].isString() ? item["fileName"].asString() : "";
|
||||
if (origin != fileName) {
|
||||
continue;
|
||||
}
|
||||
if (!(item.isMember("hardlinks") && item["hardlinks"].isArray())) {
|
||||
break;
|
||||
}
|
||||
for (const auto &lk : item["hardlinks"]) {
|
||||
if (lk.isString()) {
|
||||
hardlinks.emplace(lk.asString());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return hardlinks;
|
||||
}
|
||||
} // namespace OHOS::FileManagement::Backup
|
@ -111,7 +111,7 @@ static ErrCode ParseReportInfo(struct ReportFileInfo &fileStat,
|
||||
static void DealLine(unordered_map<string, int> &keys,
|
||||
int &num,
|
||||
const string &line,
|
||||
map<string, struct ReportFileInfo> &infos)
|
||||
unordered_map<string, struct ReportFileInfo> &infos)
|
||||
{
|
||||
string currentLine = line;
|
||||
if (currentLine[currentLine.length() - 1] == LINE_WRAP) {
|
||||
@ -137,9 +137,9 @@ static void DealLine(unordered_map<string, int> &keys,
|
||||
}
|
||||
}
|
||||
|
||||
map<string, struct ReportFileInfo> BReportEntity::GetReportInfos()
|
||||
unordered_map<string, struct ReportFileInfo> BReportEntity::GetReportInfos()
|
||||
{
|
||||
map<string, struct ReportFileInfo> infos {};
|
||||
unordered_map<string, struct ReportFileInfo> infos {};
|
||||
|
||||
char buffer[HASH_BUFFER_SIZE];
|
||||
ssize_t bytesRead;
|
||||
|
Loading…
Reference in New Issue
Block a user