mirror of
https://github.com/valitydev/salt.git
synced 2024-11-09 01:36:48 +00:00
Remove Salt Search system, this never worked
This commit is contained in:
parent
97baddaa10
commit
ee3c5d0341
@ -1,23 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
Runner frontend to search system
|
||||
'''
|
||||
from __future__ import absolute_import
|
||||
|
||||
# Import salt libs
|
||||
import salt.search
|
||||
|
||||
|
||||
def query(term):
|
||||
'''
|
||||
Query the search system
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt-run search.query foo
|
||||
'''
|
||||
search = salt.search.Search(__opts__)
|
||||
result = search.query(term)
|
||||
return result
|
@ -1,118 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
Set up the correct search system
|
||||
'''
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
import os
|
||||
|
||||
# Import salt libs
|
||||
import salt.minion
|
||||
import salt.loader
|
||||
import salt.utils
|
||||
|
||||
# Import 3rd-party libs
|
||||
import salt.ext.six as six
|
||||
|
||||
|
||||
def iter_ret(opts, ret):
|
||||
'''
|
||||
Yield returner data if the external job cache is enabled
|
||||
'''
|
||||
if not opts['ext_job_cache']:
|
||||
raise StopIteration
|
||||
get_load = '{0}.get_load'.format(opts['ext_job_cache'])
|
||||
get_jid = '{0}.get_jid'.format(opts['ext_job_cache'])
|
||||
get_jids = '{0}.get_jids'.format(opts['ext_job_cache'])
|
||||
if get_load not in ret:
|
||||
raise StopIteration
|
||||
else:
|
||||
get_load = ret[get_load]
|
||||
if get_jid not in ret:
|
||||
raise StopIteration
|
||||
else:
|
||||
get_jid = ret[get_jid]
|
||||
if get_jids not in ret:
|
||||
raise StopIteration
|
||||
else:
|
||||
get_jids = ret[get_jids]
|
||||
for jid in get_jids():
|
||||
jids = {}
|
||||
jids['load'] = get_load(jid)
|
||||
jids['ret'] = get_jid(jid)
|
||||
jids['jid'] = jid
|
||||
yield jids
|
||||
|
||||
|
||||
def _iter_dir(dir_, saltenv):
|
||||
'''
|
||||
Walk a dir path looking for files and marking their content type
|
||||
'''
|
||||
ret = []
|
||||
for fn_ in os.listdir(dir_):
|
||||
path = os.path.join(dir_, fn_)
|
||||
if os.path.isdir(path):
|
||||
yield _iter_dir(path, saltenv)
|
||||
elif os.path.isfile(path):
|
||||
with salt.utils.fopen(path) as fp_:
|
||||
if salt.utils.istextfile(fp_):
|
||||
ret.append(
|
||||
{'path': six.text_type(path),
|
||||
'saltenv': six.text_type(saltenv),
|
||||
'content': six.text_type(fp_.read())}
|
||||
)
|
||||
else:
|
||||
ret.append(
|
||||
{'path': six.text_type(path),
|
||||
'saltenv': six.text_type(saltenv),
|
||||
'content': u'bin'}
|
||||
)
|
||||
yield ret
|
||||
|
||||
|
||||
def iter_roots(roots):
|
||||
'''
|
||||
Accepts the file_roots or the pillar_roots structures and yields
|
||||
{'path': <path>,
|
||||
'saltenv': <saltenv>,
|
||||
'cont': <contents>}
|
||||
'''
|
||||
for saltenv, dirs in six.iteritems(roots):
|
||||
for dir_ in dirs:
|
||||
if not os.path.isdir(dir_):
|
||||
continue
|
||||
for ret in _iter_dir(dir_, saltenv):
|
||||
yield ret
|
||||
|
||||
|
||||
class Search(object):
|
||||
'''
|
||||
Set up the object than manages search operations
|
||||
'''
|
||||
def __init__(self, opts):
|
||||
self.opts = opts
|
||||
self.mminion = salt.minion.MasterMinion(
|
||||
self.opts,
|
||||
states=False,
|
||||
rend=False,
|
||||
matcher=False)
|
||||
self.search = salt.loader.search(self.opts, self.mminion.returners)
|
||||
|
||||
def index(self):
|
||||
'''
|
||||
Execute a search index run
|
||||
'''
|
||||
ifun = '{0}.index'.format(self.opts.get('search', ''))
|
||||
if ifun not in self.search:
|
||||
return
|
||||
return self.search[ifun]()
|
||||
|
||||
def query(self, term):
|
||||
'''
|
||||
Search the index for the given term
|
||||
'''
|
||||
qfun = '{0}.query'.format(self.opts.get('search', ''))
|
||||
if qfun not in self.search:
|
||||
return
|
||||
return self.search[qfun](term)
|
@ -1,92 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
Routines to manage interactions with the whoosh search system
|
||||
'''
|
||||
from __future__ import absolute_import
|
||||
|
||||
# Import python libs
|
||||
import os
|
||||
|
||||
# Import salt libs
|
||||
import salt.search
|
||||
import salt.ext.six as six
|
||||
|
||||
# Import third party libs
|
||||
HAS_WHOOSH = False
|
||||
try:
|
||||
import whoosh.index
|
||||
import whoosh.fields
|
||||
import whoosh.store
|
||||
import whoosh.qparser
|
||||
HAS_WHOOSH = True
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# Define the module's virtual name
|
||||
__virtualname__ = 'whoosh'
|
||||
|
||||
|
||||
def __virtual__():
|
||||
'''
|
||||
Only load if the whoosh libs are available
|
||||
'''
|
||||
return __virtualname__ if HAS_WHOOSH else False
|
||||
|
||||
|
||||
def index():
|
||||
'''
|
||||
Build the search index
|
||||
'''
|
||||
schema = whoosh.fields.Schema(
|
||||
path=whoosh.fields.TEXT, # Path for sls files
|
||||
content=whoosh.fields.TEXT, # All content is indexed here
|
||||
env=whoosh.fields.ID, # The environment associated with a file
|
||||
fn_type=whoosh.fields.ID, # Set to pillar or state
|
||||
minion=whoosh.fields.ID, # The minion id associated with the content
|
||||
jid=whoosh.fields.ID, # The job id
|
||||
load=whoosh.fields.ID, # The load data
|
||||
)
|
||||
index_dir = os.path.join(__opts__['cachedir'], 'whoosh')
|
||||
if not os.path.isdir(index_dir):
|
||||
os.makedirs(index_dir)
|
||||
if whoosh.index.exists_in(index_dir):
|
||||
ix_ = whoosh.index.open_dir(index_dir)
|
||||
else:
|
||||
ix_ = whoosh.index.create_in(index_dir, schema)
|
||||
|
||||
try:
|
||||
writer = ix_.writer()
|
||||
except whoosh.store.LockError:
|
||||
return False
|
||||
|
||||
for data in salt.search.iter_roots(__opts__['file_roots']):
|
||||
for chunk in data:
|
||||
writer.add_document(fn_type=u'file', **chunk)
|
||||
|
||||
for data in salt.search.iter_roots(__opts__['pillar_roots']):
|
||||
for chunk in data:
|
||||
writer.add_document(fn_type=u'pillar', **chunk)
|
||||
|
||||
for data in salt.search.iter_ret(__opts__, __ret__):
|
||||
writer.add_document(jid=data['jid'], load=data['load'])
|
||||
for minion in data['ret']:
|
||||
writer.add_document(
|
||||
jid=data['jid'],
|
||||
minion=minion,
|
||||
content=data['ret'][minion])
|
||||
writer.commit()
|
||||
|
||||
|
||||
def query(qstr, limit=10):
|
||||
'''
|
||||
Execute a query
|
||||
'''
|
||||
index_dir = os.path.join(__opts__['cachedir'], 'whoosh')
|
||||
if whoosh.index.exists_in(index_dir):
|
||||
ix_ = whoosh.index.open_dir(index_dir)
|
||||
else:
|
||||
return {}
|
||||
qp_ = whoosh.qparser.QueryParser(u'content', schema=ix_.schema)
|
||||
qobj = qp_.parse(six.text_type(qstr), limit)
|
||||
with ix_.searcher() as searcher:
|
||||
return searcher.search(qobj)
|
Loading…
Reference in New Issue
Block a user