mirror of
https://github.com/valitydev/osquery-1.git
synced 2024-11-07 09:58:54 +00:00
Merge pull request #1448 from theopolis/strol-speedup
Speedup type conversions, yara, and 10.10 symbols at runtime
This commit is contained in:
commit
43cf5f1a0a
@ -10,6 +10,7 @@
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <deque>
|
||||
#include <map>
|
||||
#include <memory>
|
||||
#include <vector>
|
||||
@ -77,7 +78,7 @@ namespace osquery {
|
||||
/// Helper alias for TablePlugin names.
|
||||
typedef std::string TableName;
|
||||
typedef std::vector<std::pair<std::string, std::string> > TableColumns;
|
||||
typedef std::map<std::string, std::vector<std::string> > TableData;
|
||||
typedef std::map<std::string, std::deque<std::string> > TableData;
|
||||
|
||||
/**
|
||||
* @brief A ConstraintOperator is applied in an query predicate.
|
||||
|
@ -28,9 +28,7 @@ class BenchmarkTablePlugin : public TablePlugin {
|
||||
QueryData generate(QueryContext& ctx) {
|
||||
QueryData results;
|
||||
results.push_back({{"test_int", "0"}});
|
||||
results.push_back({
|
||||
{"test_int", "0"}, {"test_text", "hello"},
|
||||
});
|
||||
results.push_back({{"test_int", "0"}, {"test_text", "hello"}});
|
||||
return results;
|
||||
}
|
||||
};
|
||||
@ -47,17 +45,6 @@ static void SQL_virtual_table_registry(benchmark::State& state) {
|
||||
|
||||
BENCHMARK(SQL_virtual_table_registry);
|
||||
|
||||
static void SQL_select_metadata(benchmark::State& state) {
|
||||
auto dbc = SQLiteDBManager::get();
|
||||
while (state.KeepRunning()) {
|
||||
QueryData results;
|
||||
queryInternal("select count(*) from sqlite_temp_master;", results,
|
||||
dbc.db());
|
||||
}
|
||||
}
|
||||
|
||||
BENCHMARK(SQL_select_metadata);
|
||||
|
||||
static void SQL_virtual_table_internal(benchmark::State& state) {
|
||||
Registry::add<BenchmarkTablePlugin>("table", "benchmark");
|
||||
PluginResponse res;
|
||||
@ -75,6 +62,49 @@ static void SQL_virtual_table_internal(benchmark::State& state) {
|
||||
|
||||
BENCHMARK(SQL_virtual_table_internal);
|
||||
|
||||
class BenchmarkLongTablePlugin : public TablePlugin {
|
||||
private:
|
||||
TableColumns columns() const {
|
||||
return {{"test_int", "INTEGER"}, {"test_text", "TEXT"}};
|
||||
}
|
||||
|
||||
QueryData generate(QueryContext& ctx) {
|
||||
QueryData results;
|
||||
for (int i = 0; i < 1000; i++) {
|
||||
results.push_back({{"test_int", "0"}, {"test_text", "hello"}});
|
||||
}
|
||||
return results;
|
||||
}
|
||||
};
|
||||
|
||||
static void SQL_virtual_table_internal_long(benchmark::State& state) {
|
||||
Registry::add<BenchmarkLongTablePlugin>("table", "long_benchmark");
|
||||
PluginResponse res;
|
||||
Registry::call("table", "long_benchmark", {{"action", "columns"}}, res);
|
||||
|
||||
// Attach a sample virtual table.
|
||||
auto dbc = SQLiteDBManager::get();
|
||||
attachTableInternal("long_benchmark", columnDefinition(res), dbc.db());
|
||||
|
||||
while (state.KeepRunning()) {
|
||||
QueryData results;
|
||||
queryInternal("select * from long_benchmark", results, dbc.db());
|
||||
}
|
||||
}
|
||||
|
||||
BENCHMARK(SQL_virtual_table_internal_long);
|
||||
|
||||
static void SQL_select_metadata(benchmark::State& state) {
|
||||
auto dbc = SQLiteDBManager::get();
|
||||
while (state.KeepRunning()) {
|
||||
QueryData results;
|
||||
queryInternal(
|
||||
"select count(*) from sqlite_temp_master;", results, dbc.db());
|
||||
}
|
||||
}
|
||||
|
||||
BENCHMARK(SQL_select_metadata);
|
||||
|
||||
static void SQL_select_basic(benchmark::State& state) {
|
||||
// Profile executing a query against an internal, already attached table.
|
||||
while (state.KeepRunning()) {
|
||||
|
@ -17,7 +17,7 @@ namespace tables {
|
||||
|
||||
int xOpen(sqlite3_vtab *pVTab, sqlite3_vtab_cursor **ppCursor) {
|
||||
int rc = SQLITE_NOMEM;
|
||||
BaseCursor *pCur;
|
||||
BaseCursor *pCur = nullptr;
|
||||
|
||||
pCur = new BaseCursor;
|
||||
|
||||
@ -126,30 +126,30 @@ int xColumn(sqlite3_vtab_cursor *cur, sqlite3_context *ctx, int col) {
|
||||
if (type == "TEXT") {
|
||||
sqlite3_result_text(ctx, value.c_str(), value.size(), SQLITE_STATIC);
|
||||
} else if (type == "INTEGER") {
|
||||
int afinite;
|
||||
try {
|
||||
afinite = boost::lexical_cast<int>(value);
|
||||
} catch (const boost::bad_lexical_cast &e) {
|
||||
char *end = nullptr;
|
||||
long int afinite = strtol(value.c_str(), &end, 10);
|
||||
if (end == nullptr || end == value.c_str() || *end != '\0' ||
|
||||
((afinite == LONG_MIN || afinite == LONG_MAX) && errno == ERANGE) ||
|
||||
afinite < INT_MIN || afinite > INT_MAX) {
|
||||
afinite = -1;
|
||||
VLOG(1) << "Error casting " << column_name << " (" << value
|
||||
<< ") to INTEGER";
|
||||
}
|
||||
sqlite3_result_int(ctx, afinite);
|
||||
sqlite3_result_int(ctx, (int)afinite);
|
||||
} else if (type == "BIGINT") {
|
||||
long long int afinite;
|
||||
try {
|
||||
afinite = boost::lexical_cast<long long int>(value);
|
||||
} catch (const boost::bad_lexical_cast &e) {
|
||||
char *end = nullptr;
|
||||
long long int afinite = strtoll(value.c_str(), &end, 10);
|
||||
if (end == nullptr || end == value.c_str() || *end != '\0' ||
|
||||
((afinite == LLONG_MIN || afinite == LLONG_MAX) && errno == ERANGE)) {
|
||||
afinite = -1;
|
||||
VLOG(1) << "Error casting " << column_name << " (" << value
|
||||
<< ") to BIGINT";
|
||||
}
|
||||
sqlite3_result_int64(ctx, afinite);
|
||||
} else if (type == "DOUBLE") {
|
||||
double afinite;
|
||||
try {
|
||||
afinite = boost::lexical_cast<double>(value);
|
||||
} catch (const boost::bad_lexical_cast &e) {
|
||||
char *end = nullptr;
|
||||
double afinite = strtod(value.c_str(), &end);
|
||||
if (end == nullptr || end == value.c_str() || *end != '\0') {
|
||||
afinite = 0;
|
||||
VLOG(1) << "Error casting" << column_name << " (" << value
|
||||
<< ") to DOUBLE";
|
||||
@ -227,25 +227,28 @@ static int xFilter(sqlite3_vtab_cursor *pVtabCursor,
|
||||
|
||||
// Now organize the response rows by column instead of row.
|
||||
auto &data = pVtab->content->data;
|
||||
for (auto &row : response) {
|
||||
auto row = response.rbegin();
|
||||
while (row != response.rend()) {
|
||||
for (const auto &column : pVtab->content->columns) {
|
||||
if (row.count(column.first) == 0) {
|
||||
if (row->count(column.first) == 0) {
|
||||
VLOG(1) << "Table " << pVtab->content->name << " row "
|
||||
<< pVtab->content->n << " did not include column "
|
||||
<< column.first;
|
||||
data[column.first].push_back("");
|
||||
data[column.first].push_front("");
|
||||
continue;
|
||||
}
|
||||
|
||||
auto &value = row.at(column.first);
|
||||
auto &value = row->at(column.first);
|
||||
if (value.size() > FLAGS_value_max) {
|
||||
data[column.first].push_back(value.substr(0, FLAGS_value_max));
|
||||
data[column.first].push_front(value.substr(0, FLAGS_value_max));
|
||||
value.clear();
|
||||
} else {
|
||||
data[column.first].push_back(std::move(value));
|
||||
data[column.first].push_front(std::move(value));
|
||||
}
|
||||
}
|
||||
|
||||
response.erase((row + 1).base());
|
||||
row = response.rbegin();
|
||||
pVtab->content->n++;
|
||||
}
|
||||
|
||||
|
@ -64,7 +64,6 @@ Status doYARAScan(YR_RULES* rules,
|
||||
|
||||
QueryData genYara(QueryContext& context) {
|
||||
QueryData results;
|
||||
Status status;
|
||||
|
||||
auto paths = context.constraints["path"].getAll(EQUALS);
|
||||
auto patterns = context.constraints["pattern"].getAll(EQUALS);
|
||||
@ -82,11 +81,13 @@ QueryData genYara(QueryContext& context) {
|
||||
if (parser == nullptr) {
|
||||
return results;
|
||||
}
|
||||
|
||||
const auto& yaraParser = std::static_pointer_cast<YARAConfigParserPlugin>(parser);
|
||||
if (yaraParser == nullptr) {
|
||||
return results;
|
||||
}
|
||||
auto rules = yaraParser->rules();
|
||||
|
||||
auto& rules = yaraParser->rules();
|
||||
|
||||
// Store resolved paths in a vector of pairs.
|
||||
// Each pair has the first element as the path to scan and the second
|
||||
@ -119,22 +120,20 @@ QueryData genYara(QueryContext& context) {
|
||||
}
|
||||
|
||||
// Compile all sigfiles into a map.
|
||||
std::map<std::string, YR_RULES*> compiled_rules;
|
||||
for (const auto& file : sigfiles) {
|
||||
YR_RULES *rules = nullptr;
|
||||
// Check if this on-demand sigfile has not been used/compiled.
|
||||
if (rules.count(file) == 0) {
|
||||
auto path = (file[0] != '/') ? std::string("/etc/osquery/yara/") : "";
|
||||
path += file;
|
||||
|
||||
std::string full_path;
|
||||
if (file[0] != '/') {
|
||||
full_path = std::string("/etc/osquery/yara/") + file;
|
||||
} else {
|
||||
full_path = file;
|
||||
}
|
||||
|
||||
status = compileSingleFile(full_path, &rules);
|
||||
if (!status.ok()) {
|
||||
VLOG(1) << "YARA error: " << status.toString();
|
||||
} else {
|
||||
compiled_rules[file] = rules;
|
||||
YR_RULES* tmp_rules = nullptr;
|
||||
auto status = compileSingleFile(path, &tmp_rules);
|
||||
if (!status.ok()) {
|
||||
VLOG(1) << "YARA error: " << status.toString();
|
||||
} else {
|
||||
rules[file] = tmp_rules;
|
||||
groups.insert(file);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -147,35 +146,16 @@ QueryData genYara(QueryContext& context) {
|
||||
}
|
||||
|
||||
VLOG(1) << "Scanning with group: " << group;
|
||||
status = doYARAScan(rules[group],
|
||||
path_pair.first.c_str(),
|
||||
path_pair.second,
|
||||
results,
|
||||
group,
|
||||
"");
|
||||
auto status = doYARAScan(rules[group],
|
||||
path_pair.first.c_str(),
|
||||
path_pair.second,
|
||||
results,
|
||||
group,
|
||||
group);
|
||||
if (!status.ok()) {
|
||||
VLOG(1) << "YARA error: " << status.toString();
|
||||
}
|
||||
}
|
||||
|
||||
// Scan using files.
|
||||
for (const auto& element : compiled_rules) {
|
||||
VLOG(1) << "Scanning with file: " << element.first;
|
||||
status = doYARAScan(element.second,
|
||||
path_pair.first.c_str(),
|
||||
path_pair.second,
|
||||
results,
|
||||
"",
|
||||
element.first);
|
||||
if (!status.ok()) {
|
||||
VLOG(1) << "YARA error: " << status.toString();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Cleanup compiled rules
|
||||
for (const auto& element : compiled_rules) {
|
||||
yr_rules_destroy(element.second);
|
||||
}
|
||||
|
||||
return results;
|
||||
|
@ -28,8 +28,7 @@ void YARACompilerCallback(int error_level,
|
||||
void* user_data) {
|
||||
if (error_level == YARA_ERROR_LEVEL_ERROR) {
|
||||
VLOG(1) << file_name << "(" << line_number << "): error: " << message;
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
VLOG(1) << file_name << "(" << line_number << "): warning: " << message;
|
||||
}
|
||||
}
|
||||
@ -108,29 +107,24 @@ Status compileSingleFile(const std::string& file, YR_RULES** rules) {
|
||||
* Given a vector of strings, attempt to compile them and store the result
|
||||
* in the map under the given category.
|
||||
*/
|
||||
Status handleRuleFiles(const std::string& category,
|
||||
const pt::ptree& rule_files,
|
||||
std::map<std::string, YR_RULES*>* rules) {
|
||||
Status handleRuleFiles(const std::string &category,
|
||||
const pt::ptree &rule_files,
|
||||
std::map<std::string, YR_RULES *> &rules) {
|
||||
YR_COMPILER *compiler = nullptr;
|
||||
int result = yr_compiler_create(&compiler);
|
||||
if (result != ERROR_SUCCESS) {
|
||||
VLOG(1) << "Could not create compiler: " + std::to_string(result);
|
||||
return Status(1, "Could not create compiler: " + std::to_string(result));
|
||||
VLOG(1) << "Could not create compiler: error " + std::to_string(result);
|
||||
return Status(1, "YARA compile error " + std::to_string(result));
|
||||
}
|
||||
|
||||
yr_compiler_set_callback(compiler, YARACompilerCallback, nullptr);
|
||||
|
||||
bool compiled = false;
|
||||
for (const auto& item : rule_files) {
|
||||
YR_RULES *tmp_rules;
|
||||
const auto rule = item.second.get("", "");
|
||||
VLOG(1) << "Loading " << rule;
|
||||
|
||||
std::string full_path;
|
||||
YR_RULES *tmp_rules = nullptr;
|
||||
auto rule = item.second.get("", "");
|
||||
if (rule[0] != '/') {
|
||||
full_path = std::string("/etc/osquery/yara/") + rule;
|
||||
} else {
|
||||
full_path = rule;
|
||||
rule = std::string("/etc/osquery/yara/") + rule;
|
||||
}
|
||||
|
||||
// First attempt to load the file, in case it is saved (pre-compiled)
|
||||
@ -148,28 +142,29 @@ Status handleRuleFiles(const std::string& category,
|
||||
//
|
||||
// If you want to use saved rule files you must have them all in a single
|
||||
// file. This is easy to accomplish with yarac(1).
|
||||
result = yr_rules_load(full_path.c_str(), &tmp_rules);
|
||||
result = yr_rules_load(rule.c_str(), &tmp_rules);
|
||||
if (result != ERROR_SUCCESS && result != ERROR_INVALID_FILE) {
|
||||
yr_compiler_destroy(compiler);
|
||||
return Status(1, "Error loading YARA rules: " + std::to_string(result));
|
||||
return Status(1, "YARA load error " + std::to_string(result));
|
||||
} else if (result == ERROR_SUCCESS) {
|
||||
// If there are already rules there, destroy them and put new ones in.
|
||||
if (rules->count(category) > 0) {
|
||||
yr_rules_destroy((*rules)[category]);
|
||||
if (rules.count(category) > 0) {
|
||||
yr_rules_destroy(rules[category]);
|
||||
}
|
||||
(*rules)[category] = tmp_rules;
|
||||
|
||||
rules[category] = tmp_rules;
|
||||
} else {
|
||||
compiled = true;
|
||||
// Try to compile the rules.
|
||||
FILE *rule_file = fopen(full_path.c_str(), "r");
|
||||
FILE *rule_file = fopen(rule.c_str(), "r");
|
||||
|
||||
if (rule_file == nullptr) {
|
||||
yr_compiler_destroy(compiler);
|
||||
return Status(1, "Could not open file: " + full_path);
|
||||
return Status(1, "Could not open file: " + rule);
|
||||
}
|
||||
|
||||
int errors =
|
||||
yr_compiler_add_file(compiler, rule_file, nullptr, full_path.c_str());
|
||||
yr_compiler_add_file(compiler, rule_file, nullptr, rule.c_str());
|
||||
|
||||
fclose(rule_file);
|
||||
rule_file = nullptr;
|
||||
@ -184,7 +179,7 @@ Status handleRuleFiles(const std::string& category,
|
||||
|
||||
if (compiled) {
|
||||
// All the rules for this category have been compiled, save them in the map.
|
||||
result = yr_compiler_get_rules(compiler, &((*rules)[category]));
|
||||
result = yr_compiler_get_rules(compiler, &rules[category]);
|
||||
|
||||
if (result != ERROR_SUCCESS) {
|
||||
yr_compiler_destroy(compiler);
|
||||
@ -256,21 +251,27 @@ Status YARAConfigParserPlugin::setUp() {
|
||||
return Status(0, "OK");
|
||||
}
|
||||
|
||||
Status YARAConfigParserPlugin::update(const std::map<std::string, ConfigTree>& config) {
|
||||
Status status;
|
||||
Status YARAConfigParserPlugin::update(
|
||||
const std::map<std::string, ConfigTree> &config) {
|
||||
// The YARA config parser requested the "yara" top-level key in the config.
|
||||
const auto& yara_config = config.at("yara");
|
||||
|
||||
// Look for a "signatures" key with the group/file content.
|
||||
if (yara_config.count("signatures") > 0) {
|
||||
const auto& signatures = yara_config.get_child("signatures");
|
||||
data_.add_child("signatures", signatures);
|
||||
for (const auto& element : signatures) {
|
||||
VLOG(1) << "Compiling YARA signature group: " << element.first;
|
||||
status = handleRuleFiles(element.first, element.second, &rules_);
|
||||
auto status = handleRuleFiles(element.first, element.second, rules_);
|
||||
if (!status.ok()) {
|
||||
VLOG(1) << "YARA rule compile error: " << status.getMessage();
|
||||
return status;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// The "file_paths" set maps the rule groups to the "file_paths" top level
|
||||
// configuration key. That similar key keeps the groups of file paths.
|
||||
if (yara_config.count("file_paths") > 0) {
|
||||
const auto& file_paths = yara_config.get_child("file_paths");
|
||||
data_.add_child("file_paths", file_paths);
|
||||
|
@ -28,7 +28,7 @@ Status compileSingleFile(const std::string& file, YR_RULES** rule);
|
||||
|
||||
Status handleRuleFiles(const std::string& category,
|
||||
const pt::ptree& rule_files,
|
||||
std::map<std::string, YR_RULES *>* rules);
|
||||
std::map<std::string, YR_RULES*>& rules);
|
||||
|
||||
int YARACallback(int message, void *message_data, void *user_data);
|
||||
|
||||
@ -48,7 +48,7 @@ class YARAConfigParserPlugin : public ConfigParserPlugin {
|
||||
std::vector<std::string> keys() { return {"yara"}; }
|
||||
|
||||
// Retrieve compiled rules.
|
||||
std::map<std::string, YR_RULES *> rules() { return rules_; }
|
||||
std::map<std::string, YR_RULES*>& rules() { return rules_; }
|
||||
|
||||
Status setUp();
|
||||
|
||||
|
@ -313,10 +313,22 @@ QueryData genAppSchemes(QueryContext& context) {
|
||||
}
|
||||
|
||||
// Check the default handler assigned to the protocol scheme.
|
||||
#if !defined(DARWIN_10_9)
|
||||
auto default_app =
|
||||
LSCopyDefaultApplicationURLForURL(url, kLSRolesAll, nullptr);
|
||||
#endif
|
||||
// This only applies to 10.10, so resolve the symbol at runtime.
|
||||
CFBundleRef ls_bundle =
|
||||
CFBundleGetBundleWithIdentifier(CFSTR("com.apple.LaunchServices"));
|
||||
CFURLRef default_app = nullptr;
|
||||
if (ls_bundle != nullptr) {
|
||||
auto _LSCopyDefaultApplicationURLForURL =
|
||||
(CFURLRef (*)(CFURLRef, LSRolesMask, CFErrorRef*))
|
||||
CFBundleGetFunctionPointerForName(
|
||||
ls_bundle, CFSTR("LSCopyDefaultApplicationURLForURL"));
|
||||
// If the symbol did not exist we will not have a handle.
|
||||
if (_LSCopyDefaultApplicationURLForURL != nullptr) {
|
||||
default_app =
|
||||
_LSCopyDefaultApplicationURLForURL(url, kLSRolesAll, nullptr);
|
||||
}
|
||||
}
|
||||
|
||||
CFRelease(url);
|
||||
for (CFIndex i = 0; i < CFArrayGetCount(apps); i++) {
|
||||
Row r;
|
||||
@ -336,24 +348,21 @@ QueryData genAppSchemes(QueryContext& context) {
|
||||
r["handler"] = stringFromCFString(path);
|
||||
CFRelease(path);
|
||||
// Check if the handler is set (in the OS) as the default.
|
||||
#if !defined(DARWIN_10_9)
|
||||
if (default_app != nullptr &&
|
||||
CFEqual((CFTypeRef)app, (CFTypeRef)default_app)) {
|
||||
r["enabled"] = "1";
|
||||
} else {
|
||||
r["enabled"] = "0";
|
||||
}
|
||||
#endif
|
||||
|
||||
r["external"] = (scheme.second & kSchemeSystemDefault) ? "0" : "1";
|
||||
r["protected"] = (scheme.second & kSchemeProtected) ? "1" : "0";
|
||||
results.push_back(r);
|
||||
}
|
||||
|
||||
#if !defined(DARWIN_10_9)
|
||||
if (default_app != nullptr) {
|
||||
CFRelease(default_app);
|
||||
}
|
||||
#endif
|
||||
CFRelease(apps);
|
||||
}
|
||||
|
||||
|
@ -14,10 +14,12 @@
|
||||
#include <boost/filesystem.hpp>
|
||||
#include <boost/property_tree/json_parser.hpp>
|
||||
|
||||
#include <osquery/logger.h>
|
||||
#include <osquery/tables.h>
|
||||
#include <osquery/filesystem.h>
|
||||
#include <osquery/core/conversions.h>
|
||||
#include <osquery/logger.h>
|
||||
#include <osquery/sql.h>
|
||||
#include <osquery/tables.h>
|
||||
|
||||
#include "osquery/core/conversions.h"
|
||||
|
||||
namespace osquery {
|
||||
namespace tables {
|
||||
@ -166,29 +168,11 @@ void parseQuarantineFile(QueryData &results, const std::string &path) {
|
||||
return;
|
||||
}
|
||||
|
||||
CFTypeRef quarantine_properties;
|
||||
#if defined(DARWIN_10_9)
|
||||
FSRef fs_url;
|
||||
if (!CFURLGetFSRef(url, &fs_url)) {
|
||||
VLOG(1) << "Error obtaining FSRef for " << path;
|
||||
VLOG(1) << "Unable to fetch quarantine data";
|
||||
CFRelease(url);
|
||||
return;
|
||||
}
|
||||
if (LSCopyItemAttribute(&fs_url, kLSRolesAll, kLSItemQuarantineProperties,
|
||||
&quarantine_properties) != noErr) {
|
||||
VLOG(1) << "Error retrieving quarantine properties for " << path;
|
||||
CFRelease(url);
|
||||
return;
|
||||
}
|
||||
#else
|
||||
if (!CFURLCopyResourcePropertyForKey(url, kCFURLQuarantinePropertiesKey,
|
||||
&quarantine_properties, nullptr)) {
|
||||
VLOG(1) << "Error retrieving quarantine properties for " << path;
|
||||
CFRelease(url);
|
||||
return;
|
||||
}
|
||||
#endif
|
||||
CFTypeRef quarantine_properties = nullptr;
|
||||
// This is the non-10.10-symbolic version of kCFURLQuarantinePropertiesKey.
|
||||
CFStringRef qp_key = CFSTR("NSURLQuarantinePropertiesKey");
|
||||
CFURLCopyResourcePropertyForKey(url, qp_key, &quarantine_properties, nullptr);
|
||||
CFRelease(qp_key);
|
||||
|
||||
if (quarantine_properties == nullptr) {
|
||||
VLOG(1) << "Error retrieving quarantine properties for " << path;
|
||||
@ -196,16 +180,17 @@ void parseQuarantineFile(QueryData &results, const std::string &path) {
|
||||
return;
|
||||
}
|
||||
|
||||
CFTypeRef property;
|
||||
CFTypeRef property = nullptr;
|
||||
for (const auto &kv : kQuarantineKeys) {
|
||||
CFStringRef key = CFStringCreateWithCString(
|
||||
kCFAllocatorDefault, kv.second.c_str(), kCFStringEncodingUTF8);
|
||||
|
||||
if (CFDictionaryGetValueIfPresent((CFDictionaryRef)quarantine_properties,
|
||||
key, &property)) {
|
||||
extractQuarantineProperty(kv.first, property, path, results);
|
||||
if (key != nullptr) {
|
||||
if (CFDictionaryGetValueIfPresent(
|
||||
(CFDictionaryRef)quarantine_properties, key, &property)) {
|
||||
extractQuarantineProperty(kv.first, property, path, results);
|
||||
}
|
||||
CFRelease(key);
|
||||
}
|
||||
CFRelease(key);
|
||||
}
|
||||
|
||||
CFRelease(quarantine_properties);
|
||||
|
Loading…
Reference in New Issue
Block a user