Add config flags for data source configuration
This commit adds options to the config file for the elastic recheck bot configuration file. This enables users to specify how to connect to an elastic recheck server and a subunit2sql database, but things will still default to using the openstack-infra servers to prevent breaking the running service. Change-Id: I10db1a568cc01e137e5f4d8a8814b17201c4c438
This commit is contained in:
parent
59545fea4f
commit
48ebc14283
@ -11,3 +11,7 @@ user=treinish
|
|||||||
host=review.openstack.org
|
host=review.openstack.org
|
||||||
query_file=/home/mtreinish/elasticRecheck/queries
|
query_file=/home/mtreinish/elasticRecheck/queries
|
||||||
key=/home/mtreinish/.ssh/id_rsa
|
key=/home/mtreinish/.ssh/id_rsa
|
||||||
|
|
||||||
|
[data_source]
|
||||||
|
es_url=http://logstash.openstack.org/elasticsearch
|
||||||
|
db_uri=mysql+pymysql://query:query@logstash.openstack.org/subunit2sql
|
||||||
|
@ -27,6 +27,10 @@ channel_config=/path/to/yaml/config
|
|||||||
|
|
||||||
[gerrit]
|
[gerrit]
|
||||||
user=gerrit2
|
user=gerrit2
|
||||||
|
|
||||||
|
[data_source[
|
||||||
|
es_url=URLofELASTICSEARCH
|
||||||
|
db_uri=SQLALCHEMY_URI_TO_SUBUNIT2SQL
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# The yaml channel config should look like:
|
# The yaml channel config should look like:
|
||||||
@ -108,7 +112,8 @@ class RecheckWatchBot(irc.bot.SingleServerIRCBot):
|
|||||||
|
|
||||||
class RecheckWatch(threading.Thread):
|
class RecheckWatch(threading.Thread):
|
||||||
def __init__(self, ircbot, channel_config, msgs, username,
|
def __init__(self, ircbot, channel_config, msgs, username,
|
||||||
queries, host, key, commenting=True):
|
queries, host, key, commenting=True, es_url=None,
|
||||||
|
db_uri=None):
|
||||||
super(RecheckWatch, self).__init__()
|
super(RecheckWatch, self).__init__()
|
||||||
self.ircbot = ircbot
|
self.ircbot = ircbot
|
||||||
self.channel_config = channel_config
|
self.channel_config = channel_config
|
||||||
@ -124,6 +129,8 @@ class RecheckWatch(threading.Thread):
|
|||||||
'production',
|
'production',
|
||||||
LPCACHEDIR,
|
LPCACHEDIR,
|
||||||
timeout=60)
|
timeout=60)
|
||||||
|
self.es_url = es_url
|
||||||
|
self.db_uri = db_uri
|
||||||
|
|
||||||
def display(self, channel, event):
|
def display(self, channel, event):
|
||||||
display = False
|
display = False
|
||||||
@ -193,8 +200,8 @@ class RecheckWatch(threading.Thread):
|
|||||||
def run(self):
|
def run(self):
|
||||||
# Import here because it needs to happen after daemonization
|
# Import here because it needs to happen after daemonization
|
||||||
import elastic_recheck.elasticRecheck as er
|
import elastic_recheck.elasticRecheck as er
|
||||||
classifier = er.Classifier(self.queries)
|
classifier = er.Classifier(self.queries, self.es_url, self.db_uri)
|
||||||
stream = er.Stream(self.username, self.host, self.key)
|
stream = er.Stream(self.username, self.host, self.key, self.es_url)
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
event = stream.get_failed_tempest()
|
event = stream.get_failed_tempest()
|
||||||
@ -308,7 +315,12 @@ def _main(args, config):
|
|||||||
config.get('gerrit', 'query_file'),
|
config.get('gerrit', 'query_file'),
|
||||||
config.get('gerrit', 'host', 'review.openstack.org'),
|
config.get('gerrit', 'host', 'review.openstack.org'),
|
||||||
config.get('gerrit', 'key'),
|
config.get('gerrit', 'key'),
|
||||||
not args.nocomment
|
not args.nocomment,
|
||||||
|
config.get('data_source', 'es_url',
|
||||||
|
'http://logstash.openstack.org/elasticsearch'),
|
||||||
|
config.get('data_source, db_uri',
|
||||||
|
'mysql+pymysql://query:query@logstash.openstack.org/'
|
||||||
|
'subunit2sql'),
|
||||||
)
|
)
|
||||||
|
|
||||||
recheck.start()
|
recheck.start()
|
||||||
|
@ -195,10 +195,11 @@ class Stream(object):
|
|||||||
|
|
||||||
log = logging.getLogger("recheckwatchbot")
|
log = logging.getLogger("recheckwatchbot")
|
||||||
|
|
||||||
def __init__(self, user, host, key, thread=True):
|
def __init__(self, user, host, key, thread=True, es_url=None):
|
||||||
|
self.es_url = es_url or ES_URL
|
||||||
port = 29418
|
port = 29418
|
||||||
self.gerrit = gerritlib.gerrit.Gerrit(host, user, port, key)
|
self.gerrit = gerritlib.gerrit.Gerrit(host, user, port, key)
|
||||||
self.es = results.SearchEngine(ES_URL)
|
self.es = results.SearchEngine(self.es_url)
|
||||||
if thread:
|
if thread:
|
||||||
self.gerrit.startWatching()
|
self.gerrit.startWatching()
|
||||||
|
|
||||||
@ -370,8 +371,10 @@ class Classifier(object):
|
|||||||
|
|
||||||
queries = None
|
queries = None
|
||||||
|
|
||||||
def __init__(self, queries_dir):
|
def __init__(self, queries_dir, es_url=None, db_uri=None):
|
||||||
self.es = results.SearchEngine(ES_URL)
|
self.es_url = es_url or ES_URL
|
||||||
|
self.db_uri = db_uri or DB_URI
|
||||||
|
self.es = results.SearchEngine(self.es_url)
|
||||||
self.queries_dir = queries_dir
|
self.queries_dir = queries_dir
|
||||||
self.queries = loader.load(self.queries_dir)
|
self.queries = loader.load(self.queries_dir)
|
||||||
|
|
||||||
@ -398,7 +401,7 @@ class Classifier(object):
|
|||||||
# Reload each time
|
# Reload each time
|
||||||
self.queries = loader.load(self.queries_dir)
|
self.queries = loader.load(self.queries_dir)
|
||||||
bug_matches = []
|
bug_matches = []
|
||||||
engine = sqlalchemy.create_engine(DB_URI)
|
engine = sqlalchemy.create_engine(self.db_uri)
|
||||||
Session = orm.sessionmaker(bind=engine)
|
Session = orm.sessionmaker(bind=engine)
|
||||||
session = Session()
|
session = Session()
|
||||||
for x in self.queries:
|
for x in self.queries:
|
||||||
|
Loading…
x
Reference in New Issue
Block a user