Pack and testing fixups

This commit is contained in:
Teddy Reed 2015-05-27 16:50:57 -07:00
parent ff9243bce1
commit 4064fa6eb5
19 changed files with 490 additions and 266 deletions

View File

@ -8,17 +8,19 @@ macro(LOG MESSAGE)
message("-- ${MESSAGE}")
endmacro(LOG)
if(NOT DEFINED ENV{CC})
set(CMAKE_C_COMPILER "clang")
else()
# If no explicit compiler override and clang++ exists, prefer clang.
find_file(CLANGXX_EXISTS "clang++")
if(DEFINED ENV{CC})
set(CMAKE_C_COMPILER "$ENV{CC}")
LOG("Overriding C compiler from clang to $ENV{CC}")
elseif(CLANGXX_EXISTS)
set(CMAKE_C_COMPILER "clang")
endif()
if(NOT DEFINED ENV{CXX})
set(CMAKE_CXX_COMPILER "clang++")
else()
if(DEFINED ENV{CXX})
set(CMAKE_CXX_COMPILER "$ENV{CXX}")
LOG("Overriding CXX compiler from clang++ to $ENV{CXX}")
elseif(CLANGXX_EXISTS)
set(CMAKE_CXX_COMPILER "clang++")
endif()
add_compile_options(
@ -108,9 +110,11 @@ endif()
# Provisioning installs non-default C++11 runtimes on build hosts.
# List those distros that include a non-system-default runtime.
set(OSQUERY_REQUIRE_RUNTIMES
"lucid"
"precise"
"centos6"
"rhel6"
"oracle6"
)
# make debug (environment variable from Makefile)

35
Vagrantfile vendored
View File

@ -40,7 +40,34 @@ targets = {
"us-west-2" => "ami-7df0bd4d"
},
"username" => "ec2-user"
}
},
"aws-ubuntu10" => {
"box" => "andytson/aws-dummy",
"regions" => {
"us-east-1" => "ami-1e6f6176",
"us-west-1" => "ami-250fe361",
"us-west-2" => "ami-1b2a1c2b"
},
"username" => "ubuntu"
},
"aws-oracle6.6" => {
"box" => "andytson/aws-dummy",
"regions" => {
"us-east-1" => "ami-20e4b748",
"us-west-1" => "ami-f3d83db7",
"us-west-2" => "ami-b34f6e83"
},
"username" => "ec2-user"
},
"aws-oracle5.11" => {
"box" => "andytson/aws-dummy",
"regions" => {
"us-east-1" => "ami-0ecd7766",
"us-west-1" => "ami-4b00150e",
"us-west-2" => "ami-6b57185b"
},
"username" => "root"
},
}
Vagrant.configure("2") do |config|
@ -105,7 +132,11 @@ Vagrant.configure("2") do |config|
aws.tags = { 'Name' => 'osquery-vagrant-' + name }
end
build.vm.synced_folder ".", "/vagrant", type: "rsync",
rsync__exclude: ["build"]
rsync__exclude: [
"build",
".git/objects",
".git/modules/third-party/objects"
]
end
if name == 'freebsd10'
# configure the NICs

View File

@ -31,6 +31,19 @@ namespace osquery {
/// The builder or invoker may change the default config plugin.
DECLARE_string(config_plugin);
/**
* @brief The osquery config is updated names sources containing JSON.
*
* A ConfigSourceMap is a named mapping from source (the key) to a JSON blob.
* This map is generated by a ConfigPlugin an provided to the Config via an
* update call. ConfigPlugin%s may update the Config asynchronously.
*
* The osquery Config instance will perform source merging by amalgamating
* the JSON literal types (lists and maps) for well known top-level keys.
* The merging will happen in lexicographical order based on source name.
*/
typedef std::map<std::string, std::string> ConfigSourceMap;
/**
* @brief A native representation of osquery configuration data.
*
@ -90,7 +103,7 @@ class Config : private boost::noncopyable {
* @param config A map of domain or namespace to config data.
* @return If the config changes were applied.
*/
static Status update(const std::map<std::string, std::string>& config);
static Status update(const ConfigSourceMap& config);
/**
* @brief Calculate the has of the osquery config
@ -290,18 +303,19 @@ class ConfigPlugin : public Plugin {
* ConfigPlugin::genConfig should be implemented by a subclasses of
* ConfigPlugin which needs to retrieve config data in a custom way.
*
* @return a pair such that pair.first is an osquery::Status instance which
* indicates the success or failure of config retrieval. If pair.first
* indicates that config retrieval was successful, then the config data
* should be returned in pair.second.
* @param config The output ConfigSourceMap, a map of JSON to source names.
* @return A failure status will prevent the source map from merging.
*/
virtual Status genConfig(std::map<std::string, std::string>& config) = 0;
virtual Status genConfig(ConfigSourceMap& config) = 0;
Status call(const PluginRequest& request, PluginResponse& response);
};
/// Helper merged and parsed property tree.
typedef pt::ptree ConfigTree;
/// Helper for a map of requested keys to their merged and parsed property tree.
typedef std::map<std::string, ConfigTree> ConfigTreeMap;
/**
* @brief A pluggable configuration parser.
*
@ -349,7 +363,7 @@ class ConfigParserPlugin : public Plugin {
* @param config A JSON-parsed property tree map.
* @return Failure if the parser should no longer receive updates.
*/
virtual Status update(const std::map<std::string, ConfigTree>& config) = 0;
virtual Status update(const ConfigTreeMap& config) = 0;
protected:
/// Allow the config parser to keep some global state.

View File

@ -47,10 +47,12 @@ namespace osquery {
*/
extern const std::string kVersion;
extern const std::string kSDKVersion;
extern const std::string kSDKPlatform;
/// Use a macro for the version literal, set the kVersion symbol in the library.
#define OSQUERY_VERSION STR(OSQUERY_BUILD_VERSION)
#define OSQUERY_SDK_VERSION STR(OSQUERY_BUILD_SDK_VERSION)
#define OSQUERY_PLATFORM STR(OSQUERY_BUILD_PLATFORM)
/**
* @brief A helpful tool type to report when logging, print help, or debugging.

View File

@ -22,20 +22,6 @@ namespace pt = boost::property_tree;
namespace osquery {
typedef std::map<std::string, pt::ptree> query_pack_t;
inline pt::ptree queryPackSingleEntry(const pt::ptree& in) {
// Prepare result to be returned
pt::ptree out;
out.put("query", in.get("query", ""));
out.put("interval", in.get("interval", 0));
out.put("platform", in.get("platform", ""));
out.put("version", in.get("version", ""));
out.put("description", in.get("description", " "));
out.put("value", in.get("value", ""));
return out;
}
// Function to check if the pack is valid for this version of osquery.
// If the osquery version is greater or equal than the pack, it is good to go.
bool versionChecker(const std::string& pack, const std::string& version) {
@ -61,95 +47,88 @@ bool versionChecker(const std::string& pack, const std::string& version) {
return true;
}
query_pack_t queryPackParsePacks(const pt::ptree& raw_packs,
bool check_platform,
bool check_version) {
query_pack_t result;
// Iterate through all the pack elements
for (auto const& one_pack : raw_packs) {
// Grab query name and fields
std::string pack_query_name = one_pack.first.data();
// Get all the query fields
auto pack_query_element = raw_packs.get_child(pack_query_name);
auto single_pk = queryPackSingleEntry(pack_query_element);
// Check if pack is valid for this system
auto pk_platform = single_pk.get("platform", "");
if (check_platform) {
if (pk_platform.find(STR(OSQUERY_BUILD_PLATFORM)) == std::string::npos) {
continue;
}
}
// Check if current osquery version is equal or higher than needed
auto pk_version = single_pk.get("version", "");
if (check_version) {
if (!versionChecker(pk_version, STR(OSQUERY_VERSION))) {
continue;
}
}
result[pack_query_name] = single_pk;
}
return result;
// Perform a string string search for the actual platform within the required.
bool platformChecker(const std::string& required, const std::string& platform) {
// Match if platform is 'ubuntu12' and required is 'ubuntu'.
// Do not match if platform is 'ubuntu12' and required is 'ubuntu14'.
return (platform.find(required) == std::string::npos);
}
Status QueryPackConfigParserPlugin::update(
const std::map<std::string, ConfigTree>& config) {
Status status;
Status parsePack(const std::string& name, const pt::ptree& data) {
if (data.count("queries") == 0) {
return Status(0, "Pack contains no queries");
}
const auto& pack_config = config.at("packs");
// Check the pack-global minimum SDK version and platform.
auto version = data.get("version", "");
if (version.size() > 0 && !versionChecker(version, kSDKVersion)) {
return Status(0, "Minimum SDK version not met");
}
data_.add_child("packs", pack_config);
auto platform = data.get("platform", "");
if (platform.size() > 7 && !platformChecker(platform, kSDKPlatform)) {
return Status(0, "Platform version mismatch");
}
// Iterate through all the packs to get the configuration
for (auto const& pack_element : pack_config) {
auto pack_name = std::string(pack_element.first.data());
auto pack_path = std::string(pack_element.second.data());
// For each query in the pack's queries, check their version/platform.
for (const auto& query : data.get_child("queries")) {
auto query_string = query.second.get("query", "");
if (Config::checkScheduledQuery(query_string)) {
VLOG(1) << "Query pack " << name
<< " contains a duplicated query name: " << query.first;
continue;
}
// Check the specific query's required version.
version = query.second.get("version", "");
if (version.size() > 0 && !versionChecker(version, kSDKVersion)) {
continue;
}
// Check the specific query's required platform.
platform = query.second.get("platform", "");
if (platform.size() > 0 && !platformChecker(platform, kSDKPlatform)) {
continue;
}
// Hope there is a supplied/non-0 query interval to apply this query pack
// query to the osquery schedule.
auto query_interval = query.second.get("interval", 0);
if (query_interval > 0) {
auto query_name = "pack_" + name + "_" + query.first;
Config::addScheduledQuery(query_name, query_string, query_interval);
}
}
return Status(0, "OK");
}
Status QueryPackConfigParserPlugin::update(const ConfigTreeMap& config) {
// Iterate through all the packs to get the configuration.
for (auto const& pack : config.at("packs")) {
auto pack_name = std::string(pack.first.data());
auto pack_path = std::string(pack.second.data());
// Read each pack configuration in JSON
pt::ptree pack_tree;
status = osquery::parseJSON(pack_path, pack_tree);
pt::ptree pack_data;
auto status = osquery::parseJSON(pack_path, pack_data);
if (!status.ok()) {
LOG(WARNING) << "Error parsing Query Pack " << pack_name << ": "
<< status.getMessage();
continue;
}
// Get all the parsed elements from the pack JSON file
if (pack_tree.count(pack_name) == 0) {
continue;
// Parse the pack, meaning compare version/platform requirements and
// check the sanity of each query in the pack's queries.
status = parsePack(pack_name, pack_data);
if (!status.ok()) {
return status;
}
// Get all the valid packs and return them in a map
auto pack_file_element = pack_tree.get_child(pack_name);
auto clean_packs = queryPackParsePacks(pack_file_element, true, true);
// Iterate through the already parsed and valid packs
for (const auto& pack : clean_packs) {
// Preparing new queries to add to schedule
std::string new_query = pack.second.get("query", "");
int new_interval = pack.second.get("interval", 0);
// Adding extracted pack to the schedule, if values valid
if (!new_query.empty() && new_interval > 0) {
bool exists_in_schedule = Config::checkScheduledQuery(new_query);
// If query is already in schedule, do not add it again
if (exists_in_schedule) {
LOG(WARNING) << "Query already exist in the schedule: " << new_query;
} else {
// Adding a prefix to the pack queries, to be easily found in the
// scheduled queries
std::string pk_name = "pack_" + pack_name + "_" + pack.first;
Config::addScheduledQuery(pk_name, new_query, new_interval);
}
}
}
// Save the queries list for table-based introspection.
data_.put_child(pack_name, pack_data);
// Record the pack path.
data_.put(pack_name + ".path", pack_path);
}
return Status(0, "OK");

View File

@ -26,10 +26,6 @@ class QueryPackConfigParserPlugin : public ConfigParserPlugin {
private:
/// Store the signatures and file_paths and compile the rules.
Status update(const std::map<std::string, ConfigTree>& config);
Status update(const ConfigTreeMap& config);
};
std::map<std::string, pt::ptree> queryPackParsePacks(const pt::ptree& raw_packs,
bool check_platform,
bool check_version);
}

View File

@ -22,15 +22,11 @@ namespace osquery {
// Test the pack version checker.
bool versionChecker(const std::string& pack, const std::string& version);
std::map<std::string, pt::ptree> getQueryPacksContent() {
pt::ptree getQueryPacksContent() {
pt::ptree pack_tree;
std::string pack_path = kTestDataPath + "test_pack.conf";
Status status = osquery::parseJSON(pack_path, pack_tree);
pt::ptree pack_file_element = pack_tree.get_child("test_pack_test");
std::map<std::string, pt::ptree> result;
result = queryPackParsePacks(pack_file_element, false, false);
return result;
auto pack_path = kTestDataPath + "test_pack.conf";
auto status = osquery::parseJSON(pack_path, pack_tree);
return pack_tree.get_child("queries");
}
std::map<std::string, pt::ptree> getQueryPacksExpectedResults() {
@ -70,7 +66,7 @@ TEST_F(QueryPacksConfigTests, version_comparisons) {
TEST_F(QueryPacksConfigTests, test_query_packs_configuration) {
auto data = getQueryPacksContent();
auto expected = getQueryPacksExpectedResults();
auto& real_ld = data["launchd"];
auto& real_ld = data.get_child("launchd");
auto& expect_ld = expected["launchd"];
EXPECT_EQ(expect_ld.get("query", ""), real_ld.get("query", ""));

View File

@ -16,4 +16,5 @@ namespace osquery {
const std::string kVersion = OSQUERY_VERSION;
const std::string kSDKVersion = OSQUERY_SDK_VERSION;
const std::string kSDKPlatform = OSQUERY_PLATFORM;
}

View File

@ -21,71 +21,66 @@ namespace tables {
typedef pt::ptree::value_type tree_node;
void genQueryPack(const tree_node& pack_element, QueryData& results) {
// Find all the packs from loaded configuration
for (auto const& conf_element : pack_element.second) {
auto pack_name = std::string(conf_element.first.data());
auto pack_path = std::string(conf_element.second.data());
void genQueryPack(const tree_node& pack, QueryData& results) {
Row r;
// Packs are stored by name and contain configuration data.
r["name"] = pack.first;
r["path"] = pack.second.get("path", "");
// Read each pack configuration in JSON
pt::ptree pack_tree;
Status status = osquery::parseJSON(pack_path, pack_tree);
// There are optional restrictions on the set of queries applied pack-wide.
auto pack_wide_version = pack.second.get("version", "");
auto pack_wide_platform = pack.second.get("platform", "");
// Get all the parsed elements from the pack JSON file
if (pack_tree.count(pack_name) == 0) {
continue;
// Iterate through each query in the pack.
for (auto const& query : pack.second.get_child("queries")) {
r["query_name"] = query.first;
r["query"] = query.second.get("query", "");
r["interval"] = INTEGER(query.second.get("interval", 0));
r["description"] = query.second.get("description", "");
r["value"] = query.second.get("value", "");
// Set the version requirement based on the query-specific or pack-wide.
if (query.second.count("version") > 0) {
r["version"] = query.second.get("version", "");
} else {
r["version"] = pack_wide_platform;
}
// Get all the valid packs and return them in a map
auto pack_file_element = pack_tree.get_child(pack_name);
auto clean_packs = queryPackParsePacks(pack_file_element, false, false);
// Iterate through the already parsed and valid packs
for (const auto& pack : clean_packs) {
Row r;
// Query data to return as Row
r["name"] = pack_name;
r["path"] = pack_path;
r["query_name"] = pack.first;
r["query"] = pack.second.get("query", "");
r["interval"] = INTEGER(pack.second.get("interval", 0));
r["platform"] = pack.second.get("platform", "");
r["version"] = pack.second.get("version", "");
r["description"] = pack.second.get("description", "");
r["value"] = pack.second.get("value", "");
// Adding a prefix to the pack queries, to be easily found in the
// scheduled queries
r["scheduled_name"] = "pack_" + pack_name + "_" + pack.first;
int scheduled =
Config::checkScheduledQueryName(r.at("scheduled_name")) ? 1 : 0;
r["scheduled"] = INTEGER(scheduled);
results.push_back(r);
// Set the platform requirement based on the query-specific or pack-wide.
if (query.second.count("platform") > 0) {
r["platform"] = query.second.get("platform", "");
} else {
r["platform"] = pack_wide_platform;
}
// Adding a prefix to the pack queries to differentiate packs from schedule.
r["scheduled_name"] = "pack_" + r.at("name") + "_" + r.at("query_name");
if (Config::checkScheduledQueryName(r.at("scheduled_name"))) {
r["scheduled"] = INTEGER(1);
} else {
r["scheduled"] = INTEGER(0);
}
results.push_back(r);
}
}
QueryData genOsqueryPacks(QueryContext& context) {
QueryData results;
// Get a lock on the config instance
// Get a lock on the config instance.
ConfigDataInstance config;
// Get the loaded data tree from global JSON configuration
// Get the loaded data tree from global JSON configuration.
const auto& packs_parsed_data = config.getParsedData("packs");
if (packs_parsed_data.count("packs") == 0) {
return results;
}
// Iterate through all the packs to get the configuration
for (auto const& pack_element : packs_parsed_data) {
// Make sure the element has items
if (pack_element.second.size() == 0) {
// Iterate through all the packs to get each configuration and set of queries.
for (auto const& pack : packs_parsed_data) {
// Make sure the pack data contains queries.
if (pack.second.count("queries") == 0) {
continue;
}
genQueryPack(pack_element, results);
genQueryPack(pack, results);
}
return results;

View File

@ -9,7 +9,7 @@
function install_gcc() {
TARBALL=gcc-4.8.4.tar.gz
URL=http://www.netgull.com/gcc/releases/gcc-4.8.4/gcc-4.8.4.tar.gz
URL=https://s3.amazonaws.com/osquery-packages/deps/gcc-4.8.4.tar.gz
SOURCE=gcc-4.8.4
TARGET=/opt/osquery/gcc
@ -18,8 +18,8 @@ function install_gcc() {
TARGET_SOURCE=$SOURCE
# GCC-dependency: GMP
TARBALL=gmp-6.0.0a.tar.bz2
URL=https://gmplib.org/download/gmp/gmp-6.0.0a.tar.bz2
TARBALL=gmp-6.0.0a.tar.gz
URL=https://s3.amazonaws.com/osquery-packages/deps/gmp-6.0.0a.tar.gz
SOURCE=gmp-6.0.0
if provision gmp $WORKING_DIR/$TARGET_SOURCE/gmp/README; then
log "Moving gmp sources into $TARGET_SOURCE"
@ -28,7 +28,7 @@ function install_gcc() {
# GCC-dependency: MPFR
TARBALL=mpfr-3.1.2.tar.gz
URL=http://www.mpfr.org/mpfr-current/mpfr-3.1.2.tar.gz
URL=https://s3.amazonaws.com/osquery-packages/deps/mpfr-3.1.2.tar.gz
SOURCE=mpfr-3.1.2
if provision mpfr $WORKING_DIR/$TARGET_SOURCE/mpfr/README; then
log "Moving mpfr sources into $TARGET_SOURCE"
@ -37,7 +37,7 @@ function install_gcc() {
# GCC-dependency: MPC
TARBALL=mpc-1.0.3.tar.gz
URL=http://www.multiprecision.org/mpc/download/mpc-1.0.3.tar.gz
URL=https://s3.amazonaws.com/osquery-packages/deps/mpc-1.0.3.tar.gz
SOURCE=mpc-1.0.3
if provision mpc $WORKING_DIR/$TARGET_SOURCE/mpc/README; then
log "Moving mpc sources into $TARGET_SOURCE"
@ -155,7 +155,7 @@ function install_snappy() {
function install_yara() {
TARBALL=yara-3.3.0.tar.gz
URL=https://s3.amazonaws.com/osquery-packages/deps/yara-3.3.0.tar.gz
URL=https://osquery-packages.s3.amazonaws.com/deps/yara-3.3.0.tar.gz
SOURCE=yara-3.3.0
if provision yara /usr/local/lib/libyara.a; then
@ -217,7 +217,7 @@ function install_iptables_dev() {
function install_libcryptsetup() {
TARBALL=cryptsetup-1.6.7.tar.gz
URL=https://s3.amazonaws.com/osquery-packages/deps/cryptsetup-1.6.7.tar.gz
URL=https://osquery-packages.s3.amazonaws.com/deps/cryptsetup-1.6.7.tar.gz
SOURCE=cryptsetup-1.6.7
if provision libcryptsetup /usr/local/lib/libcryptsetup.a; then
@ -286,7 +286,7 @@ function install_libtool() {
function install_pkgconfig() {
TARBALL=pkg-config-0.28.tar.gz
URL=http://pkgconfig.freedesktop.org/releases/pkg-config-0.28.tar.gz
URL=https://osquery-packages.s3.amazonaws.com/deps/pkg-config-0.28.tar.gz
SOURCE=pkg-config-0.28
if provision pkg-config /usr/bin/pkg-config; then
@ -314,7 +314,7 @@ function install_udev_devel_095() {
function install_pip() {
PYTHON_EXECUTABLE=$1
URL=https://bootstrap.pypa.io/get-pip.py
URL=https://osquery-packages.s3.amazonaws.com/deps/get-pip.py
if [[ ! -e /usr/bin/pip ]]; then
curl $URL | sudo $PYTHON_EXECUTABLE -
@ -323,7 +323,7 @@ function install_pip() {
function install_ruby() {
TARBALL=ruby-1.8.7-p370.tar.gz
URL=ftp://ftp.ruby-lang.org/pub/ruby/1.8/ruby-1.8.7-p370.tar.gz
URL=https://osquery-packages.s3.amazonaws.com/deps/ruby-1.8.7-p370.tar.gz
SOURCE=ruby-1.8.7-p370
if provision ruby-1.8.7 /usr/local/bin/ruby; then
@ -334,8 +334,8 @@ function install_ruby() {
popd
fi
TARBALL=rubygems-1.8.24.tgz
URL=http://production.cf.rubygems.org/rubygems/rubygems-1.8.24.tgz
TARBALL=rubygems-1.8.24.tar.gz
URL=https://osquery-packages.s3.amazonaws.com/deps/rubygems-1.8.24.tar.gz
SOURCE=rubygems-1.8.24
if provision rubygems-1.8.24 /usr/local/bin/gem; then
@ -345,13 +345,29 @@ function install_ruby() {
fi
}
function install_libaptpkg() {
TARBALL=apt-0.8.16-12.10.22.tar.gz
URL=https://s3.amazonaws.com/osquery-packages/deps/apt-0.8.16-12.10.22.tar.gz
SOURCE=apt-0.8.16-12.10.22
if provision libaptpkg /usr/local/lib/libapt-pkg.a; then
pushd $SOURCE
mkdir build
pushd build
../configure --prefix=/usr/local
make -j $THREADS
sudo make install
popd
popd
fi
}
function package() {
if [[ $FAMILY = "debian" ]]; then
if [[ -n "$(dpkg --get-selections | grep $1)" ]]; then
log "$1 is already installed. skipping."
else
log "installing $1"
sudo apt-get install $1 -y
sudo DEBIAN_FRONTEND=noninteractive apt-get install $1 -y
fi
elif [[ $FAMILY = "redhat" ]]; then
if [[ ! -n "$(rpm -V $1)" ]]; then

View File

@ -37,10 +37,41 @@ function main_oracle() {
if [[ $DISTRO = "oracle5" ]]; then
package gcc
install_gcc
elif [[ $DISTRO = "oracle6" ]]; then
# Install the CentOS6 Devtools-2 yum repository.
sudo cp $FILES_DIR/centos6.devtools-2.repo /etc/yum.repos.d/
package devtoolset-2-gcc
package devtoolset-2-binutils
package devtoolset-2-gcc-c++
if [[ ! -e /usr/bin/gcc ]]; then
sudo ln -s /opt/rh/devtoolset-2/root/usr/bin/gcc /usr/bin/gcc
fi
if [[ ! -e /usr/bin/g++ ]]; then
sudo ln -s /opt/rh/devtoolset-2/root/usr/bin/gcc /usr/bin/g++
fi
source /opt/rh/devtoolset-2/enable
if [[ ! -d /usr/lib/gcc ]]; then
sudo ln -s /opt/rh/devtoolset-2/root/usr/lib/gcc /usr/lib/
fi
else
package gcc
package binutils
package gcc-c++
fi
set_cc gcc
set_cxx g++
if [[ $DISTRO = "oracle5" ]]; then
set_cc gcc
set_cxx g++
else
package clang
package clang-devel
set_cc clang
set_cxx clang++
fi
install_cmake
install_boost
@ -48,6 +79,8 @@ function main_oracle() {
if [[ $DISTRO = "oracle5" ]]; then
package cryptsetup-luks-devel
install_udev_devel_095
elif [[ $DISTRO = "oracle6" ]]; then
package libudev-devel
fi
install_gflags
@ -58,10 +91,14 @@ function main_oracle() {
package flex
package bison
if [[ $DISTRO = "oracle5" ]]; then
if [[ $DISTRO = "oracle5" || $DISTRO = "oracle6" ]]; then
install_autoconf
install_automake
install_libtool
else
package autoconf
package automake
package libtool
fi
install_snappy
@ -77,6 +114,10 @@ function main_oracle() {
# Install ruby 1.8.7/gems.
install_ruby
else
package python-pip
package ruby-devel
package rubygems
fi
gem_install fpm

View File

@ -7,25 +7,39 @@
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
function add_repo() {
REPO=$1
echo "Adding repository: $REPO"
sudo add-apt-repository -y $REPO
}
function main_ubuntu() {
if [[ $DISTRO = "precise" ]]; then
sudo add-apt-repository -y ppa:ubuntu-toolchain-r/test
add_repo ppa:ubuntu-toolchain-r/test
elif [[ $DISTRO = "lucid" ]]; then
add_repo ppa:lucid-bleed/ppa
fi
sudo apt-get update -y
if [[ $DISTRO = "lucid" ]]; then
package git-core
else
package git
package autopoint
fi
package wget
package git
package unzip
package build-essential
package autopoint
package bison
package flex
package devscripts
package debhelper
package python-pip
package python-dev
package linux-headers-generic
# package linux-headers-generic
package libcurl3-dev
package ruby-dev
package gcc
package doxygen
@ -35,15 +49,18 @@ function main_ubuntu() {
package uuid-dev
package libpopt-dev
package libdpkg-dev
package libapt-pkg-dev
package libudev-dev
package libblkid-dev
package libsnappy-dev
package libbz2-dev
package libreadline-dev
if [[ $DISTRO = "precise" ]]; then
if [[ $DISTRO = "lucid" ]]; then
package libopenssl-ruby
package clang
install_gcc
elif [[ $DISTRO = "precise" ]]; then
# Need gcc 4.8 from ubuntu-toolchain-r/test to compile RocksDB/osquery.
package gcc-4.8
package g++-4.8
@ -53,6 +70,9 @@ function main_ubuntu() {
package clang-3.4
package clang-format-3.4
fi
if [[ $DISTRO = "precise" || $DISTRO = "lucid" ]]; then
package rubygems
# Temporary removes (so we can override default paths).
@ -89,8 +109,16 @@ function main_ubuntu() {
install_gflags
install_iptables_dev
set_cc clang
set_cxx clang++
if [[ $DISTRO = "lucid" ]]; then
install_snappy
install_libaptpkg
else
# No clang++ on lucid
set_cc clang
set_cxx clang++
package libsnappy-dev
package libapt-pkg-dev
fi
install_thrift
install_rocksdb

View File

@ -3,3 +3,4 @@ ADD_OSQUERY_PYTHON_TEST(test_osqueryi test_osqueryi.py)
ADD_OSQUERY_PYTHON_TEST(test_osqueryd test_osqueryd.py)
ADD_OSQUERY_PYTHON_TEST(test_modules test_modules.py)
ADD_OSQUERY_PYTHON_TEST(test_extensions test_extensions.py)
ADD_OSQUERY_PYTHON_TEST(test_additional test_additional.py)

View File

@ -17,7 +17,7 @@
"additional_monitoring" : {
"file_paths": {
"downloads": [
"/tmp/osquery-fstests-pattern/%%"
"/tmp/osquery-tests/fstests-pattern/%%"
]
}
},
@ -25,11 +25,17 @@
// New, recommended file monitoring (top-level)
"file_paths": {
"downloads2": [
"/tmp/osquery-fstests-pattern/%%"
"/tmp/osquery-tests/fstests-pattern/%%"
],
"system_binaries": [
"/tmp/osquery-fstests-pattern/%",
"/tmp/osquery-fstests-pattern/deep11/%"
"/tmp/osquery-tests/fstests-pattern/%",
"/tmp/osquery-tests/fstests-pattern/deep11/%"
]
},
// Add files containing packs of queries.
// The queries may have platform and version requirements.
"packs": {
"test_pack": "/tmp/osquery-tests/test_pack.conf"
}
}

74
tools/tests/test_additional.py Executable file
View File

@ -0,0 +1,74 @@
#!/usr/bin/env python
# Copyright (c) 2014, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import os
import shutil
import time
import unittest
# osquery-specific testing utils
import test_base
import utils
class AdditionalFeatureTests(test_base.ProcessGenerator, unittest.TestCase):
def test_query_packs(self):
query_pack_path = test_base.CONFIG_DIR + "/test_pack.conf"
utils.write_config({
"queries": {
"simple_test": {
"query": "select * from time",
"interval": 60,
},
"simple_test2": {
"query": "select * from time",
"interval": 60,
"platform": "does_not_exist",
}
}
}, path=query_pack_path)
# Get a daemon process, loaded with the default test configuration.
# We'll add a config override (overwrite) for the "packs" key.
# THis will point a single pack at the config written above.
daemon = self._run_daemon(overwrite={
"packs": {
"test_pack": query_pack_path
}
})
self.assertTrue(daemon.isAlive())
# Introspect into the daemon's query packs.
client = test_base.EXClient(daemon.options["extensions_socket"])
test_base.expectTrue(client.open)
self.assertTrue(client.open())
em = client.getEM()
# Every query from the pack(s) is added to the packs table.
result = em.query("select * from osquery_packs")
self.assertEqual(len(result.response), 2)
# Only the applicable queries are added to the schedule.
# There will be len(pack_queries) - 1 since "simple_test2" is bound
# to an unknown/non-existing platform.
result = em.query("select * from osquery_schedule")
self.assertEqual(len(result.response), 1)
daemon.kill()
if __name__ == '__main__':
module = test_base.Tester()
# Find and import the thrift-generated python interface
test_base.loadThriftFromBuild(test_base.ARGS.build)
module.run()

View File

@ -42,7 +42,17 @@ except ImportError:
print ("Cannot import argparse: pip install argparse?")
exit(1)
try:
from thrift import Thrift
from thrift.transport import TSocket
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol
except ImportError:
print ("Cannot import thrift: pip install thrift?")
exit(1)
'''Defaults that should be used in integration tests.'''
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
CONFIG_DIR = "/tmp/osquery-tests/"
CONFIG_NAME = CONFIG_DIR + "tests"
DEFAULT_CONFIG = {
@ -57,7 +67,7 @@ DEFAULT_CONFIG = {
"disable_logging": "true",
"force": "true",
},
"scheduledQueries": [],
"schedule": {},
}
# osquery-specific python tooling and utilities
@ -242,7 +252,8 @@ class ProcessGenerator(object):
shutil.rmtree(CONFIG_DIR)
os.makedirs(CONFIG_DIR)
def _run_daemon(self, options={}, silent=False, options_only={}):
def _run_daemon(self, options={}, silent=False, options_only={},
overwrite={}):
'''Spawn an osquery daemon process'''
global ARGS, CONFIG_NAME, CONFIG
config = copy.deepcopy(CONFIG)
@ -253,6 +264,8 @@ class ProcessGenerator(object):
flags = ["--%s=%s" % (k, v) for k, v in config["options"].items()]
for option in options_only.keys():
config["options"][option] = options_only[option]
for key in overwrite:
config[key] = overwrite[key]
utils.write_config(config)
binary = os.path.join(ARGS.build, "osquery", "osqueryd")
@ -296,6 +309,59 @@ class ProcessGenerator(object):
pass
class EXClient:
'''An osquery Thrift/extensions python client generator.'''
transport = None
'''The instance transport object.'''
_manager = None
'''The client class's reference to run-time discovered manager.'''
_client = None
'''The client class's reference to run-time discovered client.'''
def __init__(self, path=None, uuid=None):
global CONFIG
'''Create a extensions client to a UNIX path and optional UUID.'''
if path is None:
path = CONFIG["options"]["extensions_socket"]
self.path = path
if uuid:
self.path += ".%s" % str(uuid)
transport = TSocket.TSocket(unix_socket=self.path)
transport = TTransport.TBufferedTransport(transport)
self.protocol = TBinaryProtocol.TBinaryProtocol(transport)
self.transport = transport
@classmethod
def setUp(cls, manager, client):
'''Set the manager and client modules to generate clients from.'''
cls._manager = manager
cls._client = client
def close(self):
if self.transport:
self.transport.close()
def open(self):
'''Attempt to open the UNIX domain socket.'''
try:
self.transport.open()
except Exception as e:
return False
return True
def getEM(self):
'''Return an extension manager (osquery core) client.'''
if self._manager is None:
raise(Exception, "The EXClient must be 'setUp' with a manager")
return self._manager.Client(self.protocol)
def getEX(self):
'''Return an extension (osquery extension) client.'''
if self._client is None:
raise(Exception, "The EXClient must be 'setUp' with a client")
return self._client.Client(self.protocol)
class Autoloader(object):
'''Helper class to write a module or extension autoload file.'''
def __init__(self, autoloads=[]):
@ -405,3 +471,18 @@ def assertPermissions():
print (utils.lightred("Repository owner (%d) executer (%d) mismatch" % (
stat_info.st_uid, os.getuid())))
exit(1)
def loadThriftFromBuild(build_dir):
'''Find and import the thrift-generated python interface.'''
thrift_path = build_dir + "/generated/gen-py"
try:
sys.path.append(thrift_path)
sys.path.append(thrift_path + "/osquery")
from osquery import ExtensionManager, Extension
EXClient.setUp(ExtensionManager, Extension)
except ImportError as e:
print ("Cannot import osquery thrift API from %s" % (thrift_path))
print ("Exception: %s" % (str(e)))
print ("You must first run: make")
exit(1)

View File

@ -22,52 +22,10 @@ import time
import threading
import unittest
try:
from thrift import Thrift
from thrift.transport import TSocket
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol
except ImportError:
print ("Cannot import thrift: pip install thrift?")
exit(1)
# osquery-specific testing utils
import test_base
class EXClient:
transport = None
def __init__(self, path=None, uuid=None):
if path is None:
path = test_base.CONFIG["options"]["extensions_socket"]
self.path = path
if uuid:
self.path += ".%s" % str(uuid)
transport = TSocket.TSocket(unix_socket=self.path)
transport = TTransport.TBufferedTransport(transport)
self.protocol = TBinaryProtocol.TBinaryProtocol(transport)
self.transport = transport
def close(self):
if self.transport:
self.transport.close()
def open(self):
'''Attempt to open the UNIX domain socket.'''
try:
self.transport.open()
except Exception as e:
return False
return True
def getEM(self):
'''Return an extension manager (osquery core) client.'''
return ExtensionManager.Client(self.protocol)
def getEX(self):
'''Return an extension (osquery extension) client.'''
return Extension.Client(self.protocol)
class ExtensionTests(test_base.ProcessGenerator, unittest.TestCase):
def test_1_daemon_without_extensions(self):
@ -80,7 +38,7 @@ class ExtensionTests(test_base.ProcessGenerator, unittest.TestCase):
self.assertTrue(daemon.isAlive())
# Now try to connect to the disabled API
client = EXClient(daemon.options["extensions_socket"])
client = test_base.EXClient(daemon.options["extensions_socket"])
self.assertFalse(client.open())
daemon.kill()
@ -89,7 +47,7 @@ class ExtensionTests(test_base.ProcessGenerator, unittest.TestCase):
self.assertTrue(daemon.isAlive())
# Get a python-based thrift client
client = EXClient(daemon.options["extensions_socket"])
client = test_base.EXClient(daemon.options["extensions_socket"])
test_base.expectTrue(client.open)
self.assertTrue(client.open())
em = client.getEM()
@ -121,7 +79,7 @@ class ExtensionTests(test_base.ProcessGenerator, unittest.TestCase):
self.assertTrue(daemon.isAlive())
# Get a python-based thrift client
client = EXClient(daemon.options["extensions_socket"])
client = test_base.EXClient(daemon.options["extensions_socket"])
test_base.expectTrue(client.open)
self.assertTrue(client.open())
em = client.getEM()
@ -131,7 +89,8 @@ class ExtensionTests(test_base.ProcessGenerator, unittest.TestCase):
self.assertEqual(len(result), 0)
# Make sure the extension process starts
extension = self._run_extension(path=daemon.options["extensions_socket"])
extension = self._run_extension(
path=daemon.options["extensions_socket"])
self.assertTrue(extension.isAlive())
# Now that an extension has started, check extension list
@ -144,7 +103,8 @@ class ExtensionTests(test_base.ProcessGenerator, unittest.TestCase):
self.assertEqual(ex_data.min_sdk_version, "0.0.0")
# Get a python-based thrift client to the extension's service
client2 = EXClient(daemon.options["extensions_socket"], uuid=ex_uuid)
client2 = test_base.EXClient(daemon.options["extensions_socket"],
uuid=ex_uuid)
client2.open()
ex = client2.getEX()
self.assertEqual(ex.ping().code, 0)
@ -182,7 +142,7 @@ class ExtensionTests(test_base.ProcessGenerator, unittest.TestCase):
self.assertTrue(daemon.isAlive())
# Get a python-based thrift client
client = EXClient(daemon.options["extensions_socket"])
client = test_base.EXClient(daemon.options["extensions_socket"])
test_base.expectTrue(client.open)
self.assertTrue(client.open())
em = client.getEM()
@ -192,7 +152,8 @@ class ExtensionTests(test_base.ProcessGenerator, unittest.TestCase):
self.assertEqual(len(result), 0)
# Make sure the extension process starts
extension = self._run_extension(path=daemon.options["extensions_socket"])
extension = self._run_extension(
path=daemon.options["extensions_socket"])
self.assertTrue(extension.isAlive())
# Now that an extension has started, check extension list
@ -207,7 +168,8 @@ class ExtensionTests(test_base.ProcessGenerator, unittest.TestCase):
self.assertEqual(len(result), 0)
# Make sure the extension restarts
extension = self._run_extension(path=daemon.options["extensions_socket"])
extension = self._run_extension(
path=daemon.options["extensions_socket"])
self.assertTrue(extension.isAlive())
# With the reset there should be 1 extension again
@ -235,7 +197,7 @@ class ExtensionTests(test_base.ProcessGenerator, unittest.TestCase):
self.assertTrue(daemon.isAlive())
# Get a python-based thrift client
client = EXClient(extension.options["extensions_socket"])
client = test_base.EXClient(extension.options["extensions_socket"])
test_base.expectTrue(client.open)
self.assertTrue(client.open())
em = client.getEM()
@ -258,7 +220,7 @@ class ExtensionTests(test_base.ProcessGenerator, unittest.TestCase):
self.assertTrue(daemon.isAlive())
# Get a python-based thrift client
client = EXClient(daemon.options["extensions_socket"])
client = test_base.EXClient(daemon.options["extensions_socket"])
test_base.expectTrue(client.open)
self.assertTrue(client.open())
em = client.getEM()
@ -277,7 +239,7 @@ class ExtensionTests(test_base.ProcessGenerator, unittest.TestCase):
self.assertTrue(daemon.isAlive())
# Get a python-based thrift client
client = EXClient(daemon.options["extensions_socket"])
client = test_base.EXClient(daemon.options["extensions_socket"])
test_base.expectTrue(client.open)
self.assertTrue(client.open())
em = client.getEM()
@ -300,7 +262,7 @@ class ExtensionTests(test_base.ProcessGenerator, unittest.TestCase):
self.assertTrue(daemon.isAlive())
# Get a python-based thrift client
client = EXClient(daemon.options["extensions_socket"])
client = test_base.EXClient(daemon.options["extensions_socket"])
test_base.expectTrue(client.open)
self.assertTrue(client.open())
em = client.getEM()
@ -326,14 +288,15 @@ class ExtensionTests(test_base.ProcessGenerator, unittest.TestCase):
self.assertTrue(daemon.isAlive())
# Get a python-based thrift client to the manager and extension.
client = EXClient(extension.options["extensions_socket"])
client = test_base.EXClient(extension.options["extensions_socket"])
client.open()
em = client.getEM()
# Need the manager to request the extension's UUID.
result = test_base.expect(em.extensions, 1)
self.assertTrue(result is not None)
ex_uuid = result.keys()[0]
client2 = EXClient(extension.options["extensions_socket"], uuid=ex_uuid)
client2 = test_base.EXClient(extension.options["extensions_socket"],
uuid=ex_uuid)
client2.open()
ex = client2.getEX()
@ -361,15 +324,6 @@ if __name__ == "__main__":
module = test_base.Tester()
# Find and import the thrift-generated python interface
thrift_path = test_base.ARGS.build + "/generated/gen-py"
try:
sys.path.append(thrift_path)
sys.path.append(thrift_path + "/osquery")
from osquery import *
except ImportError as e:
print ("Cannot import osquery thrift API from %s" % (thrift_path))
print ("Exception: %s" % (str(e)))
print ("You must first run: make")
exit(1)
test_base.loadThriftFromBuild(test_base.ARGS.build)
module.run()

View File

@ -1,5 +1,5 @@
{
"test_pack_test": {
"queries": {
"launchd": {
"query": "select * from launchd",
"interval" : "414141",
@ -15,7 +15,10 @@
"version" : "9.9.9",
"description" : "More descriptive description",
"value" : "It is dangerous to go alone, take this"
},
"simple": {
"query": "select * from osquery_info",
"interval": "10"
}
}
}

View File

@ -41,8 +41,10 @@ def read_config(path):
return json.loads(fh.read())
def write_config(data={}):
with open(data["options"]["config_path"], "w") as fh:
def write_config(data={}, path=None):
if path is None:
path = data["options"]["config_path"]
with open(path, "w") as fh:
fh.write(json.dumps(data))
def queries_from_config(config_path):