Optimize the - Split brain detection script - Printable Version +- Python Forum (https://python-forum.io) +-- Forum: General (https://python-forum.io/forum-1.html) +--- Forum: Code sharing (https://python-forum.io/forum-5.html) +--- Thread: Optimize the - Split brain detection script (/thread-21636.html) |
Optimize the - Split brain detection script - 0gg1 - Oct-08-2019 Hello, I'm not that good on Python, so I have a question about optimizing my code. Here is the code. It's written in Python 3.6 #!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Nagios/Icinga script for split brain monitoring. Usage: The IP addresses must be in quotes and separated with # ``` ./galera_split_brain_detection.py "192.168.1.1#192.168.3.17#192.168.3.18#192.168.3.19#192.168.3.2" ``` """ import sys from time import sleep import mysql.connector MYSQL_USER = "database_username" MYSQL_PASS = "database_password" MYSQL_DB = "database_name" nodes = sys.argv[1:][0].split('#') def check_dbnodes(host): """ Check if all DB nodes are alive """ try: connection = mysql.connector.connect(host=host, database=MYSQL_DB, user=MYSQL_USER, password=MYSQL_PASS, connection_timeout=10) if connection.is_connected(): connection.close() return True except mysql.connector.Error as e: print(e.msg) def get_max_id(host): """ Get max id from a database """ try: connection = mysql.connector.connect(host=host, database=MYSQL_DB, user=MYSQL_USER, password=MYSQL_PASS, connection_timeout=10) sql_maxid_Query = "SELECT max(id) FROM processing_logs" cursor = connection.cursor() cursor.execute(sql_maxid_Query) max_id = cursor.fetchone() return max_id[0] except mysql.connector.Error as e: print("Error while taking max_id!", e) finally: if connection.is_connected(): connection.close() cursor.close() def get_md5_of_max_id(host, max_id): """ Select query in a DB where the record was return from get_max_id(). Hash the record to md5. """ try: connection = mysql.connector.connect(host=host, database=MYSQL_DB, user=MYSQL_USER, password=MYSQL_PASS) sql_md5_Query = """ SELECT md5( concat( id, ifnull(unique_id,'0'), ifnull(title,'EMPTY'), ifnull(message,'EMPTY'), ifnull(merchant_ip,'0'), ifnull(level,'0'), created_at, updated_at)) FROM gw_logs_production.processing_logs WHERE id = {} """.format(max_id) cursor = connection.cursor() cursor.execute(sql_md5_Query) results = cursor.fetchone() return results[0] except mysql.connector.Error as e: print("Error while comparing the max_id with all nodes!", e) finally: if connection.is_connected(): connection.close() cursor.close() def wsrep_cluster_status(host): """ `show global status like 'wsrep_cluster_status'` -> This must return `Primary` as result """ try: connection = mysql.connector.connect(host=host, database=MYSQL_DB, user=MYSQL_USER, password=MYSQL_PASS) cursor_wsrep_Query = connection.cursor() sql_show_wsrep_Query = "show global status like 'wsrep_cluster_status'" cursor_wsrep_Query.execute(sql_show_wsrep_Query) result_wsrep_Query = cursor_wsrep_Query.fetchone()[1] return result_wsrep_Query except mysql.connector.Error as e: print("Error while checking the `wsrep_cluster_status`!", e) finally: if connection.is_connected(): connection.close() cursor_wsrep_Query.close() def auto_increment_increment(host): """ `show global variables like 'auto_increment_increment'` -> Must return the number of nodes with `auto_increment_increment`. This is a number of nodes in the DB cluster. The results count from wsrep_cluster_status() must be equal to results count from this function """ try: connection = mysql.connector.connect(host=host, database=MYSQL_DB, user=MYSQL_USER, password=MYSQL_PASS) cursor_auto_increment = connection.cursor(buffered=True) sql_show_auto_increment = "show global variables like 'auto_increment_increment'" cursor_auto_increment.execute(sql_show_auto_increment) result_auto_increment = cursor_auto_increment.fetchone()[1] return result_auto_increment except mysql.connector.Error as e: print("Error while checking the `auto_increment_increment`!", e) finally: if connection.is_connected(): connection.close() cursor_auto_increment.close() aliveDBnodes = [node for node in nodes if check_dbnodes(node)] if not aliveDBnodes: print("There are no alive DB nodes. All DB nodes are down!") exit() max_id_to_check = get_max_id(aliveDBnodes[0]) primary_nodes = [wsrep_cluster_status(node) for node in aliveDBnodes if wsrep_cluster_status(node) == 'Primary'] sleep(5) md5set = set() for node in aliveDBnodes: md5set.add(get_md5_of_max_id(node, max_id_to_check)) if len(md5set) == 1 and len(primary_nodes) == int(auto_increment_increment(aliveDBnodes[0])): print("OK") sys.exit(0) elif len(md5set) > 1 or len(primary_nodes) != int(auto_increment_increment(aliveDBnodes[0])): print("Split Brain detected") sys.exit(2) else: print("UNKNOWN - Missing data") sys.exit(3)My question here is how to optimize the MySQL connection. This part of the code was repeated a couple of times: connection = mysql.connector.connect(host=host, database=MYSQL_DB, user=MYSQL_USER, password=MYSQL_PASS, connection_timeout=10)Let me know please how to optimize this and if you see anything else. RE: Optimize the - Split brain detection script - DeaD_EyE - Oct-08-2019 Just make a class: class Foo: def __init__(self, mysql_host, mysql_db, mysql_user, mysql_password, mysql_timeout): self.mysql_host = mysql_host self.mysql_db = mysql_db self.mysql_user = mysql_user self.mysql_password = mysql_password def get_connection(self): return mysql.connector.connect( host=self.mysql_host, database=self.mysql_db, user=self.mysql_user, password=self.mysql_password, connection_timeout=self.mysql_timeout, ) RE: Optimize the - Split brain detection script - 0gg1 - Oct-08-2019 DeaD_EyE I thought about this, but I wondering if there is another way without using class -es
|