veza/ansible/roles/haproxy/files/haproxy_stat.py
2025-12-03 22:56:50 +01:00

93 lines
3.2 KiB
Python

#!/usr/bin/python3
import argparse
import datetime
import os
import subprocess
###########################################################################
# BEGIN FUNCTIONS
###########################################################################
def parse_haproxy_stats(line):
haproxy_dict = {}
line_as_dict = line.rstrip(",\n").split(",")
index = 0
for item in line_as_dict:
field_name = field_name_list[index]
haproxy_dict[field_name] = item
index = index + 1
return haproxy_dict
def refresh_cache_file():
CMD = 'echo "show stat" | socat /run/haproxy/monitoring.sock stdio > ' + stat_file + ".tmp" # noqa: N806
RESULT = subprocess.check_output(CMD, shell=True) # noqa: N806, F841
os.rename(stat_file + ".tmp", stat_file) # noqa: PTH104
###########################################################################
# END FUNCTIONS - BEGIN PARSER
###########################################################################
# begin parser
parser = argparse.ArgumentParser(
description="return a specific stat for a specific pxname and svname, it uses a cache file that is automatically refreshed if it is older than 1 minute"
)
parser.add_argument(
"--pxname",
help="this is the name of the backend, frontend or server exactly as it appears in the configuration",
type=str,
required=True,
)
parser.add_argument(
"--svname",
help="the type of the service, either FRONTEND, BACKEND or the name of the server",
type=str,
required=True,
)
parser.add_argument("--stat", help="the stat wanted", type=str, required=True)
args = parser.parse_args()
###########################################################################
# END PARSER - BEGIN PROGRAM
###########################################################################
stat_file = "/dev/shm/haproxy_stat.txt"
localtime = datetime.datetime.now()
maxage = datetime.timedelta(seconds=60)
if os.path.isfile(stat_file): # noqa: PTH113
last_modified_date = datetime.datetime.fromtimestamp(os.path.getmtime(stat_file)) # noqa: PTH204
if localtime - last_modified_date > maxage:
# the cache file is too old, let's refresh it
refresh_cache_file()
else:
# the cache file doesn't exists, so we need to create it
refresh_cache_file()
# transform the stat file into a dict, line by line
with open(stat_file) as stat: # noqa: PTH123
stat_by_line = stat.readlines()
# the first element of the dict contains the name of the fields
field_name_list = stat_by_line[0].rstrip(",\n").lstrip("# ").split(",")
# we don't care about the first element now, remove it from the dict
del stat_by_line[0]
# the last element of the dict is an empty line, remove it too
del stat_by_line[-1]
# define the final object that we will query for the stats
STATS_AS_LIST = []
# populate the final object
for line in stat_by_line:
STATS_AS_LIST.append(parse_haproxy_stats(line))
# Search for the final object with pxname and svname
CORRECT_LINE = next(
(query for query in STATS_AS_LIST if query["pxname"] == args.pxname and query["svname"] == args.svname), None
)
# Print the stat if a matching element is found, otherwise print "non-existent"
print(CORRECT_LINE[args.stat] if CORRECT_LINE else "non-existent")