mirror of
https://github.com/valitydev/osquery-1.git
synced 2024-11-07 18:08:53 +00:00
Merge pull request #1832 from theopolis/pack_valid
Valid bool in packs for shard/plaform/version checking
This commit is contained in:
commit
02eb57fc47
@ -130,7 +130,7 @@ class Config : private boost::noncopyable {
|
||||
/**
|
||||
* @brief Iterate through all packs
|
||||
*/
|
||||
void packs(std::function<void(Pack& pack)> predicate);
|
||||
void packs(std::function<void(std::shared_ptr<Pack>& pack)> predicate);
|
||||
|
||||
/**
|
||||
* @brief Add a file
|
||||
@ -292,6 +292,7 @@ class Config : private boost::noncopyable {
|
||||
FRIEND_TEST(ConfigTests, test_get_parser);
|
||||
FRIEND_TEST(ConfigTests, test_add_remove_pack);
|
||||
FRIEND_TEST(ConfigTests, test_update_clear);
|
||||
FRIEND_TEST(ConfigTests, test_pack_file_paths);
|
||||
FRIEND_TEST(ConfigTests, test_noninline_pack);
|
||||
|
||||
friend class FilePathsConfigParserPluginTests;
|
||||
|
@ -10,11 +10,13 @@
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <atomic>
|
||||
#include <map>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include <boost/property_tree/ptree.hpp>
|
||||
#include <boost/noncopyable.hpp>
|
||||
|
||||
#include <osquery/database.h>
|
||||
|
||||
@ -33,7 +35,7 @@ struct PackStats {
|
||||
* Instantiating a new Pack object parses JSON and may throw a
|
||||
* boost::property_tree::json_parser::json_parser_error exception
|
||||
*/
|
||||
class Pack {
|
||||
class Pack : private boost::noncopyable {
|
||||
public:
|
||||
Pack(const std::string& name, const boost::property_tree::ptree& tree)
|
||||
: Pack(name, "", tree) {}
|
||||
@ -122,6 +124,9 @@ class Pack {
|
||||
/// Cached time and result from previous discovery step.
|
||||
std::pair<size_t, bool> discovery_cache_;
|
||||
|
||||
/// Aggregate appropriateness of pack for this host.
|
||||
std::atomic<bool> valid_{false};
|
||||
|
||||
/// Pack discovery statistics.
|
||||
PackStats stats_;
|
||||
|
||||
|
@ -63,6 +63,8 @@ boost::shared_mutex config_files_mutex_;
|
||||
boost::shared_mutex config_hash_mutex_;
|
||||
boost::shared_mutex config_valid_mutex_;
|
||||
|
||||
using PackRef = std::shared_ptr<Pack>;
|
||||
|
||||
/**
|
||||
* The schedule is an iterable collection of Packs. When you iterate through
|
||||
* a schedule, you only get the packs that should be running on the host that
|
||||
@ -71,7 +73,7 @@ boost::shared_mutex config_valid_mutex_;
|
||||
class Schedule : private boost::noncopyable {
|
||||
public:
|
||||
/// Under the hood, the schedule is just a list of the Pack objects
|
||||
using container = std::list<Pack>;
|
||||
using container = std::list<PackRef>;
|
||||
|
||||
/**
|
||||
* @brief Create a schedule maintained by the configuration.
|
||||
@ -90,12 +92,12 @@ class Schedule : private boost::noncopyable {
|
||||
* next iterator element or skipped.
|
||||
*/
|
||||
struct Step {
|
||||
bool operator()(Pack& pack) { return pack.shouldPackExecute(); }
|
||||
bool operator()(PackRef& pack) { return pack->shouldPackExecute(); }
|
||||
};
|
||||
|
||||
/// Add a pack to the schedule
|
||||
void add(const Pack& pack) {
|
||||
remove(pack.getName(), pack.getSource());
|
||||
void add(PackRef&& pack) {
|
||||
remove(pack->getName(), pack->getSource());
|
||||
packs_.push_back(pack);
|
||||
}
|
||||
|
||||
@ -104,10 +106,10 @@ class Schedule : private boost::noncopyable {
|
||||
|
||||
/// Remove a pack by name and source.
|
||||
void remove(const std::string& pack, const std::string& source) {
|
||||
packs_.remove_if([pack, source](Pack& p) {
|
||||
if (p.getName() == pack && (p.getSource() == source || source == "")) {
|
||||
packs_.remove_if([pack, source](PackRef& p) {
|
||||
if (p->getName() == pack && (p->getSource() == source || source == "")) {
|
||||
Config::getInstance().removeFiles(source + FLAGS_pack_delimiter +
|
||||
p.getName());
|
||||
p->getName());
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
@ -116,10 +118,10 @@ class Schedule : private boost::noncopyable {
|
||||
|
||||
/// Remove all packs by source.
|
||||
void removeAll(const std::string& source) {
|
||||
packs_.remove_if(([source](Pack& p) {
|
||||
if (p.getSource() == source) {
|
||||
packs_.remove_if(([source](PackRef& p) {
|
||||
if (p->getSource() == source) {
|
||||
Config::getInstance().removeFiles(source + FLAGS_pack_delimiter +
|
||||
p.getName());
|
||||
p->getName());
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
@ -132,6 +134,8 @@ class Schedule : private boost::noncopyable {
|
||||
iterator begin() { return iterator(packs_.begin(), packs_.end()); }
|
||||
iterator end() { return iterator(packs_.end(), packs_.end()); }
|
||||
|
||||
PackRef& last() { return packs_.back(); }
|
||||
|
||||
private:
|
||||
/// Underlying storage for the packs
|
||||
container packs_;
|
||||
@ -217,8 +221,10 @@ void Config::addPack(const std::string& name,
|
||||
const pt::ptree& tree) {
|
||||
WriteLock wlock(config_schedule_mutex_);
|
||||
try {
|
||||
schedule_->add(Pack(name, source, tree));
|
||||
applyParsers(source + FLAGS_pack_delimiter + name, tree, true);
|
||||
schedule_->add(std::make_shared<Pack>(name, source, tree));
|
||||
if (schedule_->last()->shouldPackExecute()) {
|
||||
applyParsers(source + FLAGS_pack_delimiter + name, tree, true);
|
||||
}
|
||||
} catch (const std::exception& e) {
|
||||
LOG(WARNING) << "Error adding pack: " << name << ": " << e.what();
|
||||
}
|
||||
@ -246,12 +252,12 @@ void Config::removeFiles(const std::string& source) {
|
||||
void Config::scheduledQueries(std::function<
|
||||
void(const std::string& name, const ScheduledQuery& query)> predicate) {
|
||||
ReadLock rlock(config_schedule_mutex_);
|
||||
for (const Pack& pack : *schedule_) {
|
||||
for (const auto& it : pack.getSchedule()) {
|
||||
for (const PackRef& pack : *schedule_) {
|
||||
for (const auto& it : pack->getSchedule()) {
|
||||
std::string name = it.first;
|
||||
// The query name may be synthetic.
|
||||
if (pack.getName() != "main" && pack.getName() != "legacy_main") {
|
||||
name = "pack" + FLAGS_pack_delimiter + pack.getName() +
|
||||
if (pack->getName() != "main" && pack->getName() != "legacy_main") {
|
||||
name = "pack" + FLAGS_pack_delimiter + pack->getName() +
|
||||
FLAGS_pack_delimiter + it.first;
|
||||
}
|
||||
// They query may have failed and been added to the schedule's blacklist.
|
||||
@ -272,9 +278,9 @@ void Config::scheduledQueries(std::function<
|
||||
}
|
||||
}
|
||||
|
||||
void Config::packs(std::function<void(Pack& pack)> predicate) {
|
||||
void Config::packs(std::function<void(PackRef& pack)> predicate) {
|
||||
ReadLock rlock(config_schedule_mutex_);
|
||||
for (Pack& pack : schedule_->packs_) {
|
||||
for (PackRef& pack : schedule_->packs_) {
|
||||
predicate(pack);
|
||||
}
|
||||
}
|
||||
@ -508,7 +514,7 @@ void Config::purge() {
|
||||
const auto& schedule = this->schedule_;
|
||||
auto queryExists = [&schedule](const std::string& query_name) {
|
||||
for (const auto& pack : schedule->packs_) {
|
||||
const auto& pack_queries = pack.getSchedule();
|
||||
const auto& pack_queries = pack->getSchedule();
|
||||
if (pack_queries.count(query_name)) {
|
||||
return true;
|
||||
}
|
||||
|
@ -134,12 +134,6 @@ void Pack::initialize(const std::string& name,
|
||||
return;
|
||||
}
|
||||
|
||||
schedule_.clear();
|
||||
if (tree.count("queries") == 0) {
|
||||
// This pack contained no queries.
|
||||
return;
|
||||
}
|
||||
|
||||
discovery_queries_.clear();
|
||||
if (tree.count("discovery") > 0) {
|
||||
for (const auto& item : tree.get_child("discovery")) {
|
||||
@ -149,12 +143,19 @@ void Pack::initialize(const std::string& name,
|
||||
|
||||
// Initialize a discovery cache at time 0.
|
||||
discovery_cache_ = std::make_pair<size_t, bool>(0, false);
|
||||
valid_ = true;
|
||||
|
||||
// If the splay percent is less than 1 reset to a sane estimate.
|
||||
if (FLAGS_schedule_splay_percent <= 1) {
|
||||
FLAGS_schedule_splay_percent = 10;
|
||||
}
|
||||
|
||||
schedule_.clear();
|
||||
if (tree.count("queries") == 0) {
|
||||
// This pack contained no queries.
|
||||
return;
|
||||
}
|
||||
|
||||
// Iterate the queries (or schedule) and check platform/version/sanity.
|
||||
for (const auto& q : tree.get_child("queries")) {
|
||||
if (q.second.count("shard") > 0) {
|
||||
@ -207,7 +208,7 @@ const std::string& Pack::getPlatform() const { return platform_; }
|
||||
|
||||
const std::string& Pack::getVersion() const { return version_; }
|
||||
|
||||
bool Pack::shouldPackExecute() { return checkDiscovery(); }
|
||||
bool Pack::shouldPackExecute() { return (valid_ && checkDiscovery()); }
|
||||
|
||||
const std::string& Pack::getName() const { return name_; }
|
||||
|
||||
|
@ -172,18 +172,18 @@ TEST_F(ConfigTests, test_parse) {
|
||||
std::map<std::string, bool> results = {
|
||||
{"unrestricted_pack", true},
|
||||
{"discovery_pack", false},
|
||||
{"fake_version_pack", true},
|
||||
{"fake_version_pack", false},
|
||||
// Although this is a valid discovery query, there is no SQL plugin in
|
||||
// the core tests.
|
||||
{"valid_discovery_pack", false},
|
||||
{"restricted_pack", true},
|
||||
{"restricted_pack", false},
|
||||
};
|
||||
|
||||
c.packs(([&results](Pack& pack) {
|
||||
if (results[pack.getName()]) {
|
||||
EXPECT_TRUE(pack.shouldPackExecute());
|
||||
c.packs(([&results](std::shared_ptr<Pack>& pack) {
|
||||
if (results[pack->getName()]) {
|
||||
EXPECT_TRUE(pack->shouldPackExecute());
|
||||
} else {
|
||||
EXPECT_FALSE(pack.shouldPackExecute());
|
||||
EXPECT_FALSE(pack->shouldPackExecute());
|
||||
}
|
||||
}));
|
||||
}
|
||||
@ -193,24 +193,26 @@ TEST_F(ConfigTests, test_remove) {
|
||||
c.addPack("unrestricted_pack", "", getUnrestrictedPack());
|
||||
c.removePack("unrestricted_pack");
|
||||
|
||||
c.packs(([](Pack& pack) { EXPECT_NE("unrestricted_pack", pack.getName()); }));
|
||||
c.packs(([](std::shared_ptr<Pack>& pack) {
|
||||
EXPECT_NE("unrestricted_pack", pack->getName());
|
||||
}));
|
||||
}
|
||||
|
||||
TEST_F(ConfigTests, test_add_remove_pack) {
|
||||
Config c;
|
||||
|
||||
size_t pack_count = 0;
|
||||
c.packs(([&pack_count](Pack& pack) { pack_count++; }));
|
||||
c.packs(([&pack_count](std::shared_ptr<Pack>& pack) { pack_count++; }));
|
||||
EXPECT_EQ(pack_count, 0U);
|
||||
|
||||
pack_count = 0;
|
||||
c.addPack("unrestricted_pack", "", getUnrestrictedPack());
|
||||
c.packs(([&pack_count](Pack& pack) { pack_count++; }));
|
||||
c.packs(([&pack_count](std::shared_ptr<Pack>& pack) { pack_count++; }));
|
||||
EXPECT_EQ(pack_count, 1U);
|
||||
|
||||
pack_count = 0;
|
||||
c.removePack("unrestricted_pack");
|
||||
c.packs(([&pack_count](Pack& pack) { pack_count++; }));
|
||||
c.packs(([&pack_count](std::shared_ptr<Pack>& pack) { pack_count++; }));
|
||||
EXPECT_EQ(pack_count, 0U);
|
||||
}
|
||||
|
||||
@ -227,7 +229,7 @@ TEST_F(ConfigTests, test_update_clear) {
|
||||
Config c;
|
||||
c.update(config_data);
|
||||
size_t count = 0;
|
||||
auto packCounter = [&count](Pack& pack) { count++; };
|
||||
auto packCounter = [&count](std::shared_ptr<Pack>& pack) { count++; };
|
||||
c.packs(packCounter);
|
||||
EXPECT_GT(count, 0U);
|
||||
|
||||
@ -244,8 +246,9 @@ TEST_F(ConfigTests, test_get_scheduled_queries) {
|
||||
std::vector<ScheduledQuery> queries;
|
||||
c.addPack("unrestricted_pack", "", getUnrestrictedPack());
|
||||
c.scheduledQueries(
|
||||
([&queries](const std::string&,
|
||||
const ScheduledQuery& query) { queries.push_back(query); }));
|
||||
([&queries](const std::string&, const ScheduledQuery& query) {
|
||||
queries.push_back(query);
|
||||
}));
|
||||
EXPECT_EQ(queries.size(), getUnrestrictedPack().get_child("queries").size());
|
||||
}
|
||||
|
||||
@ -270,6 +273,30 @@ TEST_F(ConfigTests, test_get_parser) {
|
||||
EXPECT_EQ(data.count("dictionary"), 1U);
|
||||
}
|
||||
|
||||
TEST_F(ConfigTests, test_pack_file_paths) {
|
||||
Config c;
|
||||
|
||||
size_t count = 0;
|
||||
auto fileCounter =
|
||||
[&count](const std::string& c, const std::vector<std::string>& files) {
|
||||
count += files.size();
|
||||
};
|
||||
|
||||
c.addPack("unrestricted_pack", "", getUnrestrictedPack());
|
||||
Config::getInstance().files(fileCounter);
|
||||
EXPECT_EQ(count, 7U);
|
||||
|
||||
c.removePack("unrestricted_pack");
|
||||
count = 0;
|
||||
Config::getInstance().files(fileCounter);
|
||||
EXPECT_EQ(count, 5U);
|
||||
|
||||
c.addPack("restricted_pack", "", getRestrictedPack());
|
||||
count = 0;
|
||||
Config::getInstance().files(fileCounter);
|
||||
EXPECT_EQ(count, 5U);
|
||||
}
|
||||
|
||||
TEST_F(ConfigTests, test_noninline_pack) {
|
||||
Registry::add<TestConfigPlugin>("config", "test");
|
||||
|
||||
@ -284,7 +311,7 @@ TEST_F(ConfigTests, test_noninline_pack) {
|
||||
EXPECT_EQ(plugin->genPackCount, 1);
|
||||
|
||||
int total_packs = 0;
|
||||
c.packs([&total_packs](const Pack& pack) { total_packs++; });
|
||||
c.packs([&total_packs](const std::shared_ptr<Pack>& pack) { total_packs++; });
|
||||
EXPECT_EQ(total_packs, 2);
|
||||
}
|
||||
|
||||
|
@ -32,36 +32,36 @@ TEST_F(PacksTests, test_parse) {
|
||||
}
|
||||
|
||||
TEST_F(PacksTests, test_should_pack_execute) {
|
||||
auto kpack = Pack("unrestricted_pack", getUnrestrictedPack());
|
||||
Pack kpack("unrestricted_pack", getUnrestrictedPack());
|
||||
EXPECT_TRUE(kpack.shouldPackExecute());
|
||||
|
||||
auto fpack = Pack("discovery_pack", getPackWithDiscovery());
|
||||
Pack fpack("discovery_pack", getPackWithDiscovery());
|
||||
EXPECT_FALSE(fpack.shouldPackExecute());
|
||||
}
|
||||
|
||||
TEST_F(PacksTests, test_get_discovery_queries) {
|
||||
std::vector<std::string> expected;
|
||||
|
||||
auto kpack = Pack("unrestricted_pack", getUnrestrictedPack());
|
||||
Pack kpack("unrestricted_pack", getUnrestrictedPack());
|
||||
EXPECT_EQ(kpack.getDiscoveryQueries(), expected);
|
||||
|
||||
expected = {"select pid from processes where name = 'foobar';"};
|
||||
auto fpack = Pack("discovery_pack", getPackWithDiscovery());
|
||||
Pack fpack("discovery_pack", getPackWithDiscovery());
|
||||
EXPECT_EQ(fpack.getDiscoveryQueries(), expected);
|
||||
}
|
||||
|
||||
TEST_F(PacksTests, test_platform) {
|
||||
auto fpack = Pack("discovery_pack", getPackWithDiscovery());
|
||||
Pack fpack("discovery_pack", getPackWithDiscovery());
|
||||
EXPECT_EQ(fpack.getPlatform(), "all");
|
||||
}
|
||||
|
||||
TEST_F(PacksTests, test_version) {
|
||||
auto fpack = Pack("discovery_pack", getPackWithDiscovery());
|
||||
Pack fpack("discovery_pack", getPackWithDiscovery());
|
||||
EXPECT_EQ(fpack.getVersion(), "1.5.0");
|
||||
}
|
||||
|
||||
TEST_F(PacksTests, test_name) {
|
||||
auto fpack = Pack("discovery_pack", getPackWithDiscovery());
|
||||
Pack fpack("discovery_pack", getPackWithDiscovery());
|
||||
fpack.setName("also_discovery_pack");
|
||||
EXPECT_EQ(fpack.getName(), "also_discovery_pack");
|
||||
}
|
||||
@ -78,7 +78,7 @@ TEST_F(PacksTests, test_sharding) {
|
||||
}
|
||||
|
||||
TEST_F(PacksTests, test_check_platform) {
|
||||
auto fpack = Pack("discovery_pack", getPackWithDiscovery());
|
||||
Pack fpack("discovery_pack", getPackWithDiscovery());
|
||||
EXPECT_TRUE(fpack.checkPlatform());
|
||||
|
||||
// Depending on the current build platform, this check will be true or false.
|
||||
@ -99,10 +99,10 @@ TEST_F(PacksTests, test_check_platform) {
|
||||
}
|
||||
|
||||
TEST_F(PacksTests, test_check_version) {
|
||||
auto zpack = Pack("fake_version_pack", getPackWithFakeVersion());
|
||||
Pack zpack("fake_version_pack", getPackWithFakeVersion());
|
||||
EXPECT_FALSE(zpack.checkVersion());
|
||||
|
||||
auto fpack = Pack("discovery_pack", getPackWithDiscovery());
|
||||
Pack fpack("discovery_pack", getPackWithDiscovery());
|
||||
EXPECT_TRUE(fpack.checkVersion());
|
||||
}
|
||||
|
||||
@ -110,7 +110,7 @@ TEST_F(PacksTests, test_restriction_population) {
|
||||
// Require that all potential restrictions are populated before being checked.
|
||||
auto tree = getExamplePacksConfig();
|
||||
auto packs = tree.get_child("packs");
|
||||
auto fpack = Pack("fake_pack", packs.get_child("restricted_pack"));
|
||||
Pack fpack("fake_pack", packs.get_child("restricted_pack"));
|
||||
|
||||
ASSERT_FALSE(fpack.getPlatform().empty());
|
||||
ASSERT_FALSE(fpack.getVersion().empty());
|
||||
@ -118,7 +118,7 @@ TEST_F(PacksTests, test_restriction_population) {
|
||||
}
|
||||
|
||||
TEST_F(PacksTests, test_schedule) {
|
||||
auto fpack = Pack("discovery_pack", getPackWithDiscovery());
|
||||
Pack fpack("discovery_pack", getPackWithDiscovery());
|
||||
// Expect a single query in the schedule since one query has an explicit
|
||||
// invalid/fake platform requirement.
|
||||
EXPECT_EQ(fpack.getSchedule().size(), 1U);
|
||||
@ -132,24 +132,26 @@ TEST_F(PacksTests, test_discovery_cache) {
|
||||
size_t query_attemts = 5U;
|
||||
for (size_t i = 0; i < query_attemts; i++) {
|
||||
c.scheduledQueries(
|
||||
([&query_count](const std::string& name,
|
||||
const ScheduledQuery& query) { query_count++; }));
|
||||
([&query_count](const std::string& name, const ScheduledQuery& query) {
|
||||
query_count++;
|
||||
}));
|
||||
}
|
||||
EXPECT_EQ(query_count, query_attemts);
|
||||
|
||||
size_t pack_count = 0U;
|
||||
c.packs(([&pack_count, query_attemts](Pack& p) {
|
||||
c.packs(([&pack_count, query_attemts](std::shared_ptr<Pack>& p) {
|
||||
pack_count++;
|
||||
EXPECT_EQ(p.getStats().total, query_attemts);
|
||||
EXPECT_EQ(p.getStats().hits, query_attemts - 1);
|
||||
EXPECT_EQ(p.getStats().misses, 1U);
|
||||
// There is one pack without a discovery query.
|
||||
EXPECT_EQ(p->getStats().total, query_attemts + 1);
|
||||
EXPECT_EQ(p->getStats().hits, query_attemts);
|
||||
EXPECT_EQ(p->getStats().misses, 1U);
|
||||
}));
|
||||
|
||||
EXPECT_EQ(pack_count, 1U);
|
||||
}
|
||||
|
||||
TEST_F(PacksTests, test_discovery_zero_state) {
|
||||
auto pack = Pack("discovery_pack", getPackWithDiscovery());
|
||||
Pack pack("discovery_pack", getPackWithDiscovery());
|
||||
auto stats = pack.getStats();
|
||||
EXPECT_EQ(stats.total, 0U);
|
||||
EXPECT_EQ(stats.hits, 0U);
|
||||
|
@ -110,6 +110,13 @@ pt::ptree getUnrestrictedPack() {
|
||||
return packs.get_child("unrestricted_pack");
|
||||
}
|
||||
|
||||
// several restrictions (version, platform, shard)
|
||||
pt::ptree getRestrictedPack() {
|
||||
auto tree = getExamplePacksConfig();
|
||||
auto packs = tree.get_child("packs");
|
||||
return packs.get_child("restricted_pack");
|
||||
}
|
||||
|
||||
/// 1 discovery query, darwin platform restriction
|
||||
pt::ptree getPackWithDiscovery() {
|
||||
auto tree = getExamplePacksConfig();
|
||||
|
@ -51,6 +51,7 @@ std::map<std::string, std::string> getTestConfigMap();
|
||||
|
||||
pt::ptree getExamplePacksConfig();
|
||||
pt::ptree getUnrestrictedPack();
|
||||
pt::ptree getRestrictedPack();
|
||||
pt::ptree getPackWithDiscovery();
|
||||
pt::ptree getPackWithValidDiscovery();
|
||||
pt::ptree getPackWithFakeVersion();
|
||||
|
@ -78,14 +78,14 @@ QueryData genOsqueryEvents(QueryContext& context) {
|
||||
QueryData genOsqueryPacks(QueryContext& context) {
|
||||
QueryData results;
|
||||
|
||||
Config::getInstance().packs([&results](Pack& pack) {
|
||||
Config::getInstance().packs([&results](std::shared_ptr<Pack>& pack) {
|
||||
Row r;
|
||||
r["name"] = pack.getName();
|
||||
r["version"] = pack.getVersion();
|
||||
r["platform"] = pack.getPlatform();
|
||||
r["shard"] = INTEGER(pack.getShard());
|
||||
r["name"] = pack->getName();
|
||||
r["version"] = pack->getVersion();
|
||||
r["platform"] = pack->getPlatform();
|
||||
r["shard"] = INTEGER(pack->getShard());
|
||||
|
||||
auto stats = pack.getStats();
|
||||
auto stats = pack->getStats();
|
||||
r["discovery_cache_hits"] = INTEGER(stats.hits);
|
||||
r["discovery_executions"] = INTEGER(stats.misses);
|
||||
results.push_back(r);
|
||||
|
@ -12,6 +12,12 @@
|
||||
"version": "1.5.1-26",
|
||||
"removed": false
|
||||
}
|
||||
},
|
||||
"file_paths": {
|
||||
"unrestricted_pack": [
|
||||
"/unrestricted",
|
||||
"/unrestricted/also"
|
||||
]
|
||||
}
|
||||
},
|
||||
"discovery_pack": {
|
||||
@ -50,7 +56,10 @@
|
||||
"restricted_pack": {
|
||||
"version": "9.9.9",
|
||||
"platform": "none",
|
||||
"shard": "1"
|
||||
"shard": "1",
|
||||
"file_paths": {
|
||||
"restricted_pack": ["/restricted"]
|
||||
}
|
||||
}
|
||||
},
|
||||
"schedule": {
|
||||
|
Loading…
Reference in New Issue
Block a user