mirror of
https://github.com/valitydev/osquery-1.git
synced 2024-11-06 09:35:20 +00:00
parent
5652877ee0
commit
383e07e5be
@ -100,9 +100,9 @@ class Config : private boost::noncopyable {
|
||||
/**
|
||||
* @brief Calculate the hash of the osquery config
|
||||
*
|
||||
* @return The MD5 of the osquery config
|
||||
* @return The SHA1 hash of the osquery config
|
||||
*/
|
||||
Status getMD5(std::string& hash);
|
||||
Status genHash(std::string& hash);
|
||||
|
||||
/**
|
||||
* @brief Hash a source's config data
|
||||
|
@ -21,7 +21,6 @@
|
||||
#include <osquery/extensions.h>
|
||||
#include <osquery/filesystem.h>
|
||||
#include <osquery/flags.h>
|
||||
#include <osquery/hash.h>
|
||||
#include <osquery/logger.h>
|
||||
#include <osquery/registry.h>
|
||||
#include <osquery/sql.h>
|
||||
@ -130,4 +129,3 @@ REGISTER_INTERNAL(ExternalSQLPlugin, "sql", "sql");
|
||||
#undef CREATE_LAZY_REGISTRY
|
||||
#define CREATE_LAZY_REGISTRY "Do not CREATE_LAZY_REGISTRY in the osquery SDK"
|
||||
}
|
||||
|
||||
|
@ -89,8 +89,6 @@ if(NOT WINDOWS)
|
||||
ADD_OSQUERY_LINK_ADDITIONAL("crypto")
|
||||
ADD_OSQUERY_LINK_ADDITIONAL("libpthread")
|
||||
|
||||
ADD_OSQUERY_LINK_CORE("crypto")
|
||||
|
||||
# Linenoise will be used when compiling osqueryi.
|
||||
ADD_OSQUERY_LINK_ADDITIONAL("linenoise")
|
||||
endif()
|
||||
|
@ -19,7 +19,6 @@
|
||||
#include <osquery/database.h>
|
||||
#include <osquery/filesystem.h>
|
||||
#include <osquery/flags.h>
|
||||
#include <osquery/hash.h>
|
||||
#include <osquery/logger.h>
|
||||
#include <osquery/packs.h>
|
||||
#include <osquery/registry.h>
|
||||
@ -716,11 +715,10 @@ void Config::getPerformanceStats(
|
||||
|
||||
void Config::hashSource(const std::string& source, const std::string& content) {
|
||||
WriteLock wlock(config_hash_mutex_);
|
||||
hash_[source] =
|
||||
hashFromBuffer(HASH_TYPE_MD5, &(content.c_str())[0], content.size());
|
||||
hash_[source] = getBufferSHA1(content.c_str(), content.size());
|
||||
}
|
||||
|
||||
Status Config::getMD5(std::string& hash) {
|
||||
Status Config::genHash(std::string& hash) {
|
||||
if (!valid_) {
|
||||
return Status(1, "Current config is not valid");
|
||||
}
|
||||
@ -736,8 +734,8 @@ Status Config::getMD5(std::string& hash) {
|
||||
for (const auto& it : hash_) {
|
||||
add(it.second);
|
||||
}
|
||||
hash = getBufferSHA1(buffer.data(), buffer.size());
|
||||
|
||||
hash = hashFromBuffer(HASH_TYPE_MD5, &buffer[0], buffer.size());
|
||||
return Status(0, "OK");
|
||||
}
|
||||
|
||||
|
@ -12,7 +12,6 @@
|
||||
#include <random>
|
||||
|
||||
#include <osquery/core.h>
|
||||
#include <osquery/hash.h>
|
||||
#include <osquery/logger.h>
|
||||
#include <osquery/packs.h>
|
||||
#include <osquery/sql.h>
|
||||
@ -71,7 +70,8 @@ size_t getMachineShard(const std::string& hostname = "", bool force = false) {
|
||||
|
||||
// An optional input hostname may override hostname detection for testing.
|
||||
auto hn = (hostname.empty()) ? getHostname() : hostname;
|
||||
auto hn_hash = hashFromBuffer(HASH_TYPE_MD5, hn.c_str(), hn.size());
|
||||
auto hn_hash = getBufferSHA1(hn.c_str(), hn.size());
|
||||
|
||||
if (hn_hash.size() >= 2) {
|
||||
long hn_char;
|
||||
if (safeStrtol(hn_hash.substr(0, 2), 16, hn_char)) {
|
||||
|
@ -57,7 +57,8 @@ TEST_F(TLSConfigTests, test_retrieve_config) {
|
||||
c.load();
|
||||
|
||||
const auto& hashes = c.hash_;
|
||||
EXPECT_EQ("c109cd4fc0a928dba787384a89f9d03d", hashes.at("tls_plugin"));
|
||||
EXPECT_EQ("b7718020a76ced2eda82336bd15165009603d4fb",
|
||||
hashes.at("tls_plugin"));
|
||||
|
||||
// Configure the plugin to use the node API.
|
||||
Flag::updateValue("tls_node_api", "1");
|
||||
|
@ -15,7 +15,6 @@ ADD_OSQUERY_LIBRARY(TRUE osquery_core
|
||||
${OS_CORE_SOURCE}
|
||||
tables.cpp
|
||||
flags.cpp
|
||||
hash.cpp
|
||||
watcher.cpp
|
||||
process_shared.cpp
|
||||
)
|
||||
|
@ -8,12 +8,14 @@
|
||||
*
|
||||
*/
|
||||
|
||||
#include <iomanip>
|
||||
#include <sstream>
|
||||
|
||||
#include <boost/algorithm/string.hpp>
|
||||
#include <boost/archive/iterators/transform_width.hpp>
|
||||
#include <boost/archive/iterators/binary_from_base64.hpp>
|
||||
#include <boost/archive/iterators/base64_from_binary.hpp>
|
||||
#include <boost/archive/iterators/binary_from_base64.hpp>
|
||||
#include <boost/archive/iterators/transform_width.hpp>
|
||||
#include <boost/uuid/sha1.hpp>
|
||||
|
||||
#include "osquery/core/conversions.h"
|
||||
|
||||
@ -116,4 +118,20 @@ std::vector<std::string> split(const std::string& s,
|
||||
std::string join(const std::vector<std::string>& s, const std::string& tok) {
|
||||
return boost::algorithm::join(s, tok);
|
||||
}
|
||||
|
||||
std::string getBufferSHA1(const char* buffer, size_t size) {
|
||||
// SHA1 produces 160-bit digests, so allocate (5 * 32) bits.
|
||||
uint32_t digest[5] = {0};
|
||||
boost::uuids::detail::sha1 sha1;
|
||||
sha1.process_bytes(buffer, size);
|
||||
sha1.get_digest(digest);
|
||||
|
||||
// Convert digest to desired hex string representation.
|
||||
std::stringstream result;
|
||||
result << std::hex << std::setfill('0');
|
||||
for (size_t i = 0; i < 5; ++i) {
|
||||
result << std::setw(sizeof(uint32_t) * 2) << digest[i];
|
||||
}
|
||||
return result.str();
|
||||
}
|
||||
}
|
||||
|
@ -233,6 +233,15 @@ inline size_t utf8StringSize(const std::string& str) {
|
||||
return res;
|
||||
}
|
||||
|
||||
/*
|
||||
* @brief Request a SHA1 hash from the contents of a buffer.
|
||||
*
|
||||
* @param buffer A caller-controlled buffer (already allocated).
|
||||
* @param size The length of the controlled buffer.
|
||||
* @return A string (hex) representation of the hash digest.
|
||||
*/
|
||||
std::string getBufferSHA1(const char* buffer, size_t size);
|
||||
|
||||
#ifdef DARWIN
|
||||
/**
|
||||
* @brief Convert a CFStringRef to a std::string.
|
||||
|
@ -94,4 +94,10 @@ TEST_F(ConversionsTests, test_split_occurences) {
|
||||
};
|
||||
EXPECT_EQ(split(content, ":", 1), expected);
|
||||
}
|
||||
|
||||
TEST_F(ConversionsTests, test_buffer_sha1) {
|
||||
std::string test = "test\n";
|
||||
EXPECT_EQ("4e1243bd22c66e76c2ba9eddc1f91394e57f9f83",
|
||||
getBufferSHA1(test.c_str(), test.size()));
|
||||
}
|
||||
}
|
||||
|
@ -1,49 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2014-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
*/
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include <osquery/hash.h>
|
||||
|
||||
#include "osquery/tests/test_util.h"
|
||||
|
||||
namespace osquery {
|
||||
|
||||
class HashTests : public testing::Test {};
|
||||
|
||||
TEST_F(HashTests, test_algorithms) {
|
||||
const unsigned char buffer[1] = {'0'};
|
||||
|
||||
auto digest = hashFromBuffer(HASH_TYPE_MD5, buffer, 1);
|
||||
EXPECT_EQ(digest, "cfcd208495d565ef66e7dff9f98764da");
|
||||
|
||||
digest = hashFromBuffer(HASH_TYPE_SHA1, buffer, 1);
|
||||
EXPECT_EQ(digest, "b6589fc6ab0dc82cf12099d1c2d40ab994e8410c");
|
||||
|
||||
digest = hashFromBuffer(HASH_TYPE_SHA256, buffer, 1);
|
||||
EXPECT_EQ(digest,
|
||||
"5feceb66ffc86f38d952786c6d696c79c2dbc239dd4e91b46729d73a27fb57e9");
|
||||
}
|
||||
|
||||
TEST_F(HashTests, test_update) {
|
||||
const unsigned char buffer[1] = {'0'};
|
||||
|
||||
Hash hash(HASH_TYPE_MD5);
|
||||
hash.update(buffer, 1);
|
||||
hash.update(buffer, 1);
|
||||
auto digest = hash.digest();
|
||||
EXPECT_EQ(digest, "b4b147bc522828731f1a016bfa72c073");
|
||||
}
|
||||
|
||||
TEST_F(HashTests, test_file_hashing) {
|
||||
auto digest = hashFromFile(HASH_TYPE_MD5, kTestDataPath + "test_hashing.bin");
|
||||
EXPECT_EQ(digest, "88ee11f2aa7903f34b8b8785d92208b1");
|
||||
}
|
||||
}
|
@ -9,10 +9,10 @@
|
||||
*/
|
||||
|
||||
#include <osquery/events.h>
|
||||
#include <osquery/hash.h>
|
||||
#include <osquery/sql.h>
|
||||
|
||||
#include "osquery/tables/events/event_utils.h"
|
||||
#include "osquery/tables/system/hash.h"
|
||||
|
||||
namespace osquery {
|
||||
|
||||
|
@ -17,11 +17,11 @@
|
||||
#include <tsk/libtsk.h>
|
||||
|
||||
#include <osquery/filesystem.h>
|
||||
#include <osquery/hash.h>
|
||||
#include <osquery/logger.h>
|
||||
#include <osquery/tables.h>
|
||||
|
||||
#include "osquery/core/conversions.h"
|
||||
#include "osquery/tables/system/hash.h"
|
||||
|
||||
namespace fs = boost::filesystem;
|
||||
|
||||
|
@ -11,10 +11,10 @@
|
||||
#include <CoreFoundation/CoreFoundation.h>
|
||||
#include <IOKit/IOKitLib.h>
|
||||
|
||||
#include <osquery/hash.h>
|
||||
#include <osquery/tables.h>
|
||||
|
||||
#include "osquery/core/conversions.h"
|
||||
#include "osquery/tables/system/hash.h"
|
||||
|
||||
namespace osquery {
|
||||
namespace tables {
|
||||
@ -24,14 +24,12 @@ namespace tables {
|
||||
|
||||
void genACPITable(const void* key, const void* value, void* results) {
|
||||
Row r;
|
||||
|
||||
r["name"] = stringFromCFString((CFStringRef)key);
|
||||
|
||||
auto data = (CFDataRef)value;
|
||||
auto length = CFDataGetLength(data);
|
||||
|
||||
r["name"] = stringFromCFString((CFStringRef)key);
|
||||
r["size"] = INTEGER(length);
|
||||
r["md5"] =
|
||||
osquery::hashFromBuffer(HASH_TYPE_MD5, CFDataGetBytePtr(data), length);
|
||||
r["md5"] = hashFromBuffer(HASH_TYPE_MD5, CFDataGetBytePtr(data), length);
|
||||
|
||||
((QueryData*)results)->push_back(r);
|
||||
}
|
||||
|
@ -9,7 +9,6 @@
|
||||
*/
|
||||
|
||||
#include <osquery/filesystem.h>
|
||||
#include <osquery/hash.h>
|
||||
#include <osquery/sql.h>
|
||||
#include <osquery/tables.h>
|
||||
|
||||
|
@ -12,7 +12,6 @@
|
||||
#include <boost/uuid/uuid.hpp>
|
||||
#include <boost/uuid/uuid_io.hpp>
|
||||
|
||||
#include <osquery/hash.h>
|
||||
#include <osquery/logger.h>
|
||||
#include <osquery/tables.h>
|
||||
|
||||
|
@ -14,10 +14,10 @@
|
||||
#include <boost/lexical_cast.hpp>
|
||||
|
||||
#include <osquery/filesystem.h>
|
||||
#include <osquery/hash.h>
|
||||
#include <osquery/core.h>
|
||||
|
||||
#include "osquery/tables/system/darwin/keychain.h"
|
||||
#include "osquery/tables/system/hash.h"
|
||||
|
||||
namespace osquery {
|
||||
namespace tables {
|
||||
|
@ -10,7 +10,6 @@
|
||||
|
||||
#include <osquery/filesystem.h>
|
||||
#include <osquery/logger.h>
|
||||
#include <osquery/hash.h>
|
||||
#include <osquery/sql.h>
|
||||
#include <osquery/tables.h>
|
||||
|
||||
|
@ -12,9 +12,13 @@
|
||||
#include <sstream>
|
||||
#include <vector>
|
||||
|
||||
#include <boost/filesystem.hpp>
|
||||
|
||||
#include <osquery/filesystem.h>
|
||||
#include <osquery/hash.h>
|
||||
#include <osquery/logger.h>
|
||||
#include <osquery/tables.h>
|
||||
|
||||
#include "osquery/tables/system/hash.h"
|
||||
|
||||
namespace osquery {
|
||||
|
||||
@ -22,8 +26,8 @@ namespace osquery {
|
||||
#import <CommonCrypto/CommonDigest.h>
|
||||
#define __HASH_API(name) CC_##name
|
||||
#else
|
||||
#include <openssl/sha.h>
|
||||
#include <openssl/md5.h>
|
||||
#include <openssl/sha.h>
|
||||
#define __HASH_API(name) name
|
||||
|
||||
#define SHA1_DIGEST_LENGTH SHA_DIGEST_LENGTH
|
||||
@ -139,4 +143,103 @@ std::string hashFromFile(HashType hash_type, const std::string& path) {
|
||||
return hashes.sha256;
|
||||
}
|
||||
}
|
||||
|
||||
namespace tables {
|
||||
|
||||
void genHashForFile(const std::string& path,
|
||||
const std::string& dir,
|
||||
QueryContext& context,
|
||||
QueryData& results) {
|
||||
// Must provide the path, filename, directory separate from boost path->string
|
||||
// helpers to match any explicit (query-parsed) predicate constraints.
|
||||
Row r;
|
||||
if (context.isCached(path)) {
|
||||
r = context.getCache(path);
|
||||
} else {
|
||||
auto hashes = hashMultiFromFile(
|
||||
HASH_TYPE_MD5 | HASH_TYPE_SHA1 | HASH_TYPE_SHA256, path);
|
||||
|
||||
r["path"] = path;
|
||||
r["directory"] = dir;
|
||||
r["md5"] = std::move(hashes.md5);
|
||||
r["sha1"] = std::move(hashes.sha1);
|
||||
r["sha256"] = std::move(hashes.sha256);
|
||||
context.setCache(path, r);
|
||||
}
|
||||
results.push_back(r);
|
||||
}
|
||||
|
||||
QueryData genHash(QueryContext& context) {
|
||||
QueryData results;
|
||||
boost::system::error_code ec;
|
||||
|
||||
// The query must provide a predicate with constraints including path or
|
||||
// directory. We search for the parsed predicate constraints with the equals
|
||||
// operator.
|
||||
auto paths = context.constraints["path"].getAll(EQUALS);
|
||||
context.expandConstraints(
|
||||
"path",
|
||||
LIKE,
|
||||
paths,
|
||||
([&](const std::string& pattern, std::set<std::string>& out) {
|
||||
std::vector<std::string> patterns;
|
||||
auto status =
|
||||
resolveFilePattern(pattern, patterns, GLOB_ALL | GLOB_NO_CANON);
|
||||
if (status.ok()) {
|
||||
for (const auto& resolved : patterns) {
|
||||
out.insert(resolved);
|
||||
}
|
||||
}
|
||||
return status;
|
||||
}));
|
||||
|
||||
// Iterate through the file paths, adding the hash results
|
||||
for (const auto& path_string : paths) {
|
||||
boost::filesystem::path path = path_string;
|
||||
if (!boost::filesystem::is_regular_file(path, ec)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
genHashForFile(path_string, path.parent_path().string(), context, results);
|
||||
}
|
||||
|
||||
// Now loop through constraints using the directory column constraint.
|
||||
auto directories = context.constraints["directory"].getAll(EQUALS);
|
||||
context.expandConstraints(
|
||||
"directory",
|
||||
LIKE,
|
||||
directories,
|
||||
([&](const std::string& pattern, std::set<std::string>& out) {
|
||||
std::vector<std::string> patterns;
|
||||
auto status =
|
||||
resolveFilePattern(pattern, patterns, GLOB_FOLDERS | GLOB_NO_CANON);
|
||||
if (status.ok()) {
|
||||
for (const auto& resolved : patterns) {
|
||||
out.insert(resolved);
|
||||
}
|
||||
}
|
||||
return status;
|
||||
}));
|
||||
|
||||
// Iterate over the directory paths
|
||||
for (const auto& directory_string : directories) {
|
||||
boost::filesystem::path directory = directory_string;
|
||||
if (!boost::filesystem::is_directory(directory, ec)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Iterate over the directory files and generate a hash for each regular
|
||||
// file.
|
||||
boost::filesystem::directory_iterator begin(directory), end;
|
||||
for (; begin != end; ++begin) {
|
||||
if (boost::filesystem::is_regular_file(begin->path(), ec)) {
|
||||
genHashForFile(
|
||||
begin->path().string(), directory_string, context, results);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
}
|
||||
}
|
@ -10,10 +10,10 @@
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <boost/noncopyable.hpp>
|
||||
|
||||
#include <string>
|
||||
|
||||
#include <boost/noncopyable.hpp>
|
||||
|
||||
namespace osquery {
|
||||
|
||||
/**
|
||||
@ -101,26 +101,32 @@ class Hash : private boost::noncopyable {
|
||||
size_t length_;
|
||||
};
|
||||
|
||||
/**
|
||||
* @brief Compute a hash digest from an already allocated buffer.
|
||||
*
|
||||
* @param hash_type The osquery-supported hash algorithm.
|
||||
* @param buffer A caller-controlled buffer.
|
||||
* @param size The length of buffer in bytes.
|
||||
* @return A string (hex) representation of the hash digest.
|
||||
*/
|
||||
std::string hashFromBuffer(HashType hash_type, const void* buffer, size_t size);
|
||||
|
||||
/**
|
||||
* @brief Compute a hash digest from the file content at a path.
|
||||
*
|
||||
*
|
||||
* @param hash_type The osquery-supported hash algorithm.
|
||||
* @param path Filesystem path, the hash target.
|
||||
* @param path Filesystem path (the hash target).
|
||||
* @return A string (hex) representation of the hash digest.
|
||||
*/
|
||||
std::string hashFromFile(HashType hash_type, const std::string& path);
|
||||
|
||||
/// Get multiple hashes from a file simultaneously.
|
||||
/**
|
||||
* @brief Compute multiple hashes from a files contents simultaneously.
|
||||
*
|
||||
* @param mask Bitmask specifying target osquery-supported algorithms.
|
||||
* @param path Filesystem path (the hash target).
|
||||
* @return A struct containing string (hex) representations
|
||||
* of the hash digests.
|
||||
*/
|
||||
MultiHashes hashMultiFromFile(int mask, const std::string& path);
|
||||
|
||||
/**
|
||||
* @brief Compute a hash digest from the contents of a buffer.
|
||||
*
|
||||
* @param hash_type The osquery-supported hash algorithm.
|
||||
* @param buffer A caller-controlled buffer (already allocated).
|
||||
* @param size The length of buffer in bytes.
|
||||
* @return A string (hex) representation of the hash digest.
|
||||
*/
|
||||
std::string hashFromBuffer(HashType hash_type, const void* buffer, size_t size);
|
||||
}
|
@ -12,9 +12,10 @@
|
||||
|
||||
#include <osquery/core.h>
|
||||
#include <osquery/filesystem.h>
|
||||
#include <osquery/hash.h>
|
||||
#include <osquery/tables.h>
|
||||
|
||||
#include "osquery/tables/system/hash.h"
|
||||
|
||||
namespace fs = boost::filesystem;
|
||||
|
||||
namespace osquery {
|
||||
@ -48,7 +49,7 @@ void genACPITable(const std::string& table, QueryData& results) {
|
||||
r["size"] = INTEGER(-1);
|
||||
} else {
|
||||
r["size"] = INTEGER(table_content.size());
|
||||
r["md5"] = osquery::hashFromBuffer(
|
||||
r["md5"] = hashFromBuffer(
|
||||
HASH_TYPE_MD5, table_content.c_str(), table_content.length());
|
||||
}
|
||||
|
||||
|
@ -8,9 +8,8 @@
|
||||
*
|
||||
*/
|
||||
|
||||
#include <osquery/hash.h>
|
||||
|
||||
#include "osquery/tables/system/smbios_utils.h"
|
||||
#include "osquery/tables/system/hash.h"
|
||||
|
||||
namespace osquery {
|
||||
namespace tables {
|
||||
|
@ -1,118 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2014-present, Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the BSD-style license found in the
|
||||
* LICENSE file in the root directory of this source tree. An additional grant
|
||||
* of patent rights can be found in the PATENTS file in the same directory.
|
||||
*
|
||||
*/
|
||||
|
||||
#include <boost/filesystem.hpp>
|
||||
|
||||
#include <osquery/filesystem.h>
|
||||
#include <osquery/hash.h>
|
||||
#include <osquery/tables.h>
|
||||
|
||||
namespace fs = boost::filesystem;
|
||||
|
||||
namespace osquery {
|
||||
namespace tables {
|
||||
|
||||
void genHashForFile(const std::string& path,
|
||||
const std::string& dir,
|
||||
QueryContext& context,
|
||||
QueryData& results) {
|
||||
// Must provide the path, filename, directory separate from boost path->string
|
||||
// helpers to match any explicit (query-parsed) predicate constraints.
|
||||
Row r;
|
||||
if (context.isCached(path)) {
|
||||
r = context.getCache(path);
|
||||
} else {
|
||||
auto hashes = hashMultiFromFile(
|
||||
HASH_TYPE_MD5 | HASH_TYPE_SHA1 | HASH_TYPE_SHA256, path);
|
||||
|
||||
r["path"] = path;
|
||||
r["directory"] = dir;
|
||||
r["md5"] = std::move(hashes.md5);
|
||||
r["sha1"] = std::move(hashes.sha1);
|
||||
r["sha256"] = std::move(hashes.sha256);
|
||||
context.setCache(path, r);
|
||||
}
|
||||
results.push_back(r);
|
||||
}
|
||||
|
||||
QueryData genHash(QueryContext& context) {
|
||||
QueryData results;
|
||||
boost::system::error_code ec;
|
||||
|
||||
// The query must provide a predicate with constraints including path or
|
||||
// directory. We search for the parsed predicate constraints with the equals
|
||||
// operator.
|
||||
auto paths = context.constraints["path"].getAll(EQUALS);
|
||||
context.expandConstraints(
|
||||
"path",
|
||||
LIKE,
|
||||
paths,
|
||||
([&](const std::string& pattern, std::set<std::string>& out) {
|
||||
std::vector<std::string> patterns;
|
||||
auto status =
|
||||
resolveFilePattern(pattern, patterns, GLOB_ALL | GLOB_NO_CANON);
|
||||
if (status.ok()) {
|
||||
for (const auto& resolved : patterns) {
|
||||
out.insert(resolved);
|
||||
}
|
||||
}
|
||||
return status;
|
||||
}));
|
||||
|
||||
// Iterate through the file paths, adding the hash results
|
||||
for (const auto& path_string : paths) {
|
||||
boost::filesystem::path path = path_string;
|
||||
if (!boost::filesystem::is_regular_file(path, ec)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
genHashForFile(path_string, path.parent_path().string(), context, results);
|
||||
}
|
||||
|
||||
// Now loop through constraints using the directory column constraint.
|
||||
auto directories = context.constraints["directory"].getAll(EQUALS);
|
||||
context.expandConstraints(
|
||||
"directory",
|
||||
LIKE,
|
||||
directories,
|
||||
([&](const std::string& pattern, std::set<std::string>& out) {
|
||||
std::vector<std::string> patterns;
|
||||
auto status =
|
||||
resolveFilePattern(pattern, patterns, GLOB_FOLDERS | GLOB_NO_CANON);
|
||||
if (status.ok()) {
|
||||
for (const auto& resolved : patterns) {
|
||||
out.insert(resolved);
|
||||
}
|
||||
}
|
||||
return status;
|
||||
}));
|
||||
|
||||
// Iterate over the directory paths
|
||||
for (const auto& directory_string : directories) {
|
||||
boost::filesystem::path directory = directory_string;
|
||||
if (!boost::filesystem::is_directory(directory, ec)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Iterate over the directory files and generate a hash for each regular
|
||||
// file.
|
||||
boost::filesystem::directory_iterator begin(directory), end;
|
||||
for (; begin != end; ++begin) {
|
||||
if (boost::filesystem::is_regular_file(begin->path(), ec)) {
|
||||
genHashForFile(
|
||||
begin->path().string(), directory_string, context, results);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
}
|
||||
}
|
@ -213,7 +213,7 @@ QueryData genOsqueryInfo(QueryContext& context) {
|
||||
r["version"] = kVersion;
|
||||
|
||||
std::string hash_string;
|
||||
auto s = Config::getInstance().getMD5(hash_string);
|
||||
auto s = Config::getInstance().genHash(hash_string);
|
||||
r["config_hash"] = (s.ok()) ? hash_string : "";
|
||||
r["config_valid"] = Config::getInstance().isValid() ? INTEGER(1) : INTEGER(0);
|
||||
r["extensions"] =
|
||||
|
@ -7,8 +7,7 @@ schema([
|
||||
Column("sha1", TEXT, "SHA1 hash of provided filesystem data"),
|
||||
Column("sha256", TEXT, "SHA256 hash of provided filesystem data"),
|
||||
])
|
||||
attributes(utility=True)
|
||||
implementation("utility/hash@genHash")
|
||||
implementation("hash@genHash")
|
||||
examples([
|
||||
"select * from hash where path = '/etc/passwd'",
|
||||
"select * from hash where directory = '/etc/'",
|
Loading…
Reference in New Issue
Block a user