From 7f83be22c69b23b4620bffc6a2e0db65332cd6b9 Mon Sep 17 00:00:00 2001 From: Oliver Smith Date: Fri, 26 Sep 2025 15:01:01 +0200 Subject: [PATCH 1/9] tools/hss_Async_client: fix parsing config twice The config is already getting parsed a few lines above, don't do it twice. --- tools/hss_Async_client.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/tools/hss_Async_client.py b/tools/hss_Async_client.py index c540a73f..644214c0 100644 --- a/tools/hss_Async_client.py +++ b/tools/hss_Async_client.py @@ -12,9 +12,6 @@ import redis import diameter import time -import yaml -with open("config.yaml", 'r') as stream: - yaml_config = (yaml.safe_load(stream)) #Values to change / tweak recv_ip = yaml_config['hss']['bind_ip'] #IP of this Machine From d462970f9432d99e22bcf2174a19218256f0acc3 Mon Sep 17 00:00:00 2001 From: Oliver Smith Date: Thu, 9 Oct 2025 09:06:08 +0200 Subject: [PATCH 2/9] lib/S6a_crypt: remove unused config loading The config is not used here, so remove code for loading it. Remove commented out alternative logging initializing code while at it too. --- lib/S6a_crypt.py | 13 +------------ 1 file changed, 1 insertion(+), 12 deletions(-) diff --git a/lib/S6a_crypt.py b/lib/S6a_crypt.py index f8e30477..95e53b4d 100755 --- a/lib/S6a_crypt.py +++ b/lib/S6a_crypt.py @@ -5,18 +5,7 @@ import logging import os import sys -sys.path.append(os.path.realpath('../')) -import yaml - -try: - with open("../config.yaml", 'r') as stream: - config = (yaml.safe_load(stream)) -except: - with open("config.yaml", 'r') as stream: - config = (yaml.safe_load(stream)) - -# logtool = logtool.LogTool() -# logtool.setup_logger('CryptoLogger', yaml_config['logging']['logfiles']['database_logging_file'], level=yaml_config['logging']['level']) + CryptoLogger = logging.getLogger('CryptoLogger') CryptoLogger.info("Initialised Diameter Logger, importing database") From cb9e8d0dabe975ea431cc5c888001476bfdea640 Mon Sep 17 00:00:00 2001 From: Oliver Smith Date: Thu, 9 Oct 2025 09:52:04 +0200 Subject: [PATCH 3/9] tools/MIB_generator: remove unused config loading --- tools/MIB_generator.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/tools/MIB_generator.py b/tools/MIB_generator.py index 4ada2126..78acd1bc 100644 --- a/tools/MIB_generator.py +++ b/tools/MIB_generator.py @@ -4,11 +4,7 @@ import sys import re import os - -import yaml import sys -with open(os.path.dirname(__file__) + '/../config.yaml') as stream: - yaml_config = (yaml.safe_load(stream)) global generic_counter global oid_dict From a3343fb4998dfb5e402676e077028e6f215d2eda Mon Sep 17 00:00:00 2001 From: Oliver Smith Date: Thu, 9 Oct 2025 09:57:17 +0200 Subject: [PATCH 4/9] tools/databaseUpgrade/alembic/env: config -> alembic_config Rename config to alembic_config, so we can use config as the pyhss config (as it is used in most other files). --- tools/databaseUpgrade/alembic/env.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tools/databaseUpgrade/alembic/env.py b/tools/databaseUpgrade/alembic/env.py index 4cf83b41..1e4658b8 100644 --- a/tools/databaseUpgrade/alembic/env.py +++ b/tools/databaseUpgrade/alembic/env.py @@ -9,12 +9,12 @@ # this is the Alembic Config object, which provides # access to the values within the .ini file in use. -config = context.config +alembic_config = context.config # Interpret the config file for Python logging. # This line sets up loggers basically. -if config.config_file_name is not None: - fileConfig(config.config_file_name) +if alembic_config.config_file_name is not None: + fileConfig(alembic_config.config_file_name) # add your model's MetaData object here # for 'autogenerate' support @@ -52,7 +52,7 @@ def run_migrations_offline() -> None: script output. """ - url = config.get_main_option("sqlalchemy.url") + url = alembic_config.get_main_option("sqlalchemy.url") context.configure( url=url, target_metadata=target_metadata, From c71b0c756db96aef8c55ca3102acf879e1d83b84 Mon Sep 17 00:00:00 2001 From: Oliver Smith Date: Thu, 9 Oct 2025 10:06:20 +0200 Subject: [PATCH 5/9] Rework loading of config.yaml Move the config file loading to a new lib/pyhss_config.py file, instead of having it duplicated in all files that load the config. This allows replacing code like: import yaml try: with open("../config.yaml", 'r') as stream: config = (yaml.safe_load(stream)) except: with open("config.yaml", 'r') as stream: config = (yaml.safe_load(stream)) with the following in all files: from pyhss_config import config Replace code that loaded the config to self.config in various classes with just using the global config variable. Adjust the few scripts that had named it yaml_config instead of config to just config, so it is consistent. Let the new code not only load the config from the top dir of the git repository, but also from a PYHSS_CONFIG env var, /etc/pyhss/config.yaml and /usr/share/pyhss/config.yaml. This is useful for being able to still load configs after installing PyHSS into a different location, for example with pip or apt. It also allows running tests against PyHSS with a config file in a separate directory (e.g. in osmo-ttcn3-hacks). --- README.md | 6 +- lib/database.py | 98 ++++++++++++---------------- lib/diameter.py | 70 ++++++++++---------- lib/diameterAsync.py | 15 ++--- lib/pyhss_config.py | 54 +++++++++++++++ services/apiService.py | 4 +- services/databaseService.py | 33 ++++------ services/diameterService.py | 56 +++++++--------- services/georedService.py | 35 +++++----- services/gsupService.py | 13 +--- services/hssService.py | 36 +++++----- services/logService.py | 24 +++---- services/metricService.py | 27 ++++---- tools/Diameter_client.py | 16 ++--- tools/databaseUpgrade/alembic/env.py | 16 ++--- tools/hss_Async_client.py | 19 +++--- tools/snmp_service.py | 10 ++- tools/view_stats.py | 8 +-- 18 files changed, 267 insertions(+), 273 deletions(-) create mode 100644 lib/pyhss_config.py diff --git a/README.md b/README.md index 1679c011..81b346ac 100644 --- a/README.md +++ b/README.md @@ -37,7 +37,11 @@ The underlying library - ``diameter.py`` can be easily worked with to add suppor ## Usage -Basic configuration is set in the ``config.yaml`` file, +Basic configuration is set in the `config.yaml` file, which gets loaded from: +* The path in the `PYHSS_CONFIG` environment variable (if set) +* `/etc/pyhss/config.yaml` +* `/usr/share/pyhss/config.yaml` +* The same directory as this `README.md` You will need to set the IP address to bind to (IPv4 or IPv6), the Diameter hostname, realm, your PLMN and transport type to use (SCTP or TCP). diff --git a/lib/database.py b/lib/database.py index ae622c71..877815ba 100755 --- a/lib/database.py +++ b/lib/database.py @@ -19,17 +19,11 @@ import S6a_crypt from gsup.protocol.ipa_peer import IPAPeerRole from messaging import RedisMessaging -import yaml import json import socket import traceback +from pyhss_config import config -try: - with open("../config.yaml", 'r') as stream: - config = (yaml.safe_load(stream)) -except: - with open("config.yaml", 'r') as stream: - config = (yaml.safe_load(stream)) Base = declarative_base() class APN(Base): @@ -357,23 +351,17 @@ class SUBSCRIBER_ATTRIBUTES_OPERATION_LOG(OPERATION_LOG_BASE): class Database: def __init__(self, logTool, redisMessaging=None): - try: - with open("../config.yaml", 'r') as stream: - self.config = (yaml.safe_load(stream)) - except: - with open("config.yaml", 'r') as stream: - self.config = (yaml.safe_load(stream)) - - self.redisUseUnixSocket = self.config.get('redis', {}).get('useUnixSocket', False) - self.redisUnixSocketPath = self.config.get('redis', {}).get('unixSocketPath', '/var/run/redis/redis-server.sock') - self.redisHost = self.config.get('redis', {}).get('host', 'localhost') - self.redisPort = self.config.get('redis', {}).get('port', 6379) - self.tacDatabasePath = self.config.get('eir', {}).get('tac_database_csv', None) - self.imsiImeiLogging = self.config.get('eir', {}).get('imsi_imei_logging', True) - self.simSwapNotificationEnabled = self.config.get('eir', {}).get('simSwapNotification', False) - self.georedEnabled = self.config.get('geored', {}).get('enabled', True) - self.eirNoMatchResponse = int(self.config.get('eir', {}).get('no_match_response', 2)) - self.eirStoreOffnetImsi = self.config.get('eir', {}).get('store_offnet_imsi', False) + + self.redisUseUnixSocket = config.get('redis', {}).get('useUnixSocket', False) + self.redisUnixSocketPath = config.get('redis', {}).get('unixSocketPath', '/var/run/redis/redis-server.sock') + self.redisHost = config.get('redis', {}).get('host', 'localhost') + self.redisPort = config.get('redis', {}).get('port', 6379) + self.tacDatabasePath = config.get('eir', {}).get('tac_database_csv', None) + self.imsiImeiLogging = config.get('eir', {}).get('imsi_imei_logging', True) + self.simSwapNotificationEnabled = config.get('eir', {}).get('simSwapNotification', False) + self.georedEnabled = config.get('geored', {}).get('enabled', True) + self.eirNoMatchResponse = int(config.get('eir', {}).get('no_match_response', 2)) + self.eirStoreOffnetImsi = config.get('eir', {}).get('store_offnet_imsi', False) self.logTool = logTool if redisMessaging: @@ -381,14 +369,14 @@ def __init__(self, logTool, redisMessaging=None): else: self.redisMessaging = RedisMessaging(host=self.redisHost, port=self.redisPort, useUnixSocket=self.redisUseUnixSocket, unixSocketPath=self.redisUnixSocketPath) - db_type = str(self.config['database']['db_type']) + db_type = str(config['database']['db_type']) if db_type == 'postgresql': - db_string = 'postgresql+psycopg2://' + str(self.config['database']['username']) + ':' + str(self.config['database']['password']) + '@' + str(self.config['database']['server']) + '/' + str(self.config['database']['database']) + db_string = 'postgresql+psycopg2://' + str(config['database']['username']) + ':' + str(config['database']['password']) + '@' + str(config['database']['server']) + '/' + str(config['database']['database']) elif db_type == 'mysql': - db_string = 'mysql://' + str(self.config['database']['username']) + ':' + str(self.config['database']['password']) + '@' + str(self.config['database']['server']) + '/' + str(self.config['database']['database'] + "?autocommit=true") + db_string = 'mysql://' + str(config['database']['username']) + ':' + str(config['database']['password']) + '@' + str(config['database']['server']) + '/' + str(config['database']['database'] + "?autocommit=true") elif db_type == 'sqlite': - db_string = "sqlite:///" + str(self.config['database']['database']) + db_string = "sqlite:///" + str(config['database']['database']) else: raise RuntimeError(f'Invalid database.db_type set "{db_type}"') @@ -396,10 +384,10 @@ def __init__(self, logTool, redisMessaging=None): self.engine = create_engine( db_string, - echo = self.config['logging'].get('sqlalchemy_sql_echo', False), - pool_recycle=self.config['logging'].get('sqlalchemy_pool_recycle', 5), - pool_size=self.config['logging'].get('sqlalchemy_pool_size', 30), - max_overflow=self.config['logging'].get('sqlalchemy_max_overflow', 0)) + echo = config['logging'].get('sqlalchemy_sql_echo', False), + pool_recycle=config['logging'].get('sqlalchemy_pool_recycle', 5), + pool_size=config['logging'].get('sqlalchemy_pool_size', 30), + max_overflow=config['logging'].get('sqlalchemy_max_overflow', 0)) # Create database if it does not exist. if not database_exists(self.engine.url): @@ -975,9 +963,9 @@ def handleGeored(self, jsonData, operation: str="PATCH", asymmetric: bool=False, self.logTool.log(service='Database', level='warning', message="Failed to send Geored message invalid operation type, received: " + str(operation), redisClient=self.redisMessaging) return georedDict = {} - if self.config.get('geored', {}).get('enabled', False): - if self.config.get('geored', {}).get('endpoints', []) is not None: - if len(self.config.get('geored', {}).get('endpoints', [])) > 0: + if config.get('geored', {}).get('enabled', False): + if config.get('geored', {}).get('endpoints', []) is not None: + if len(config.get('geored', {}).get('endpoints', [])) > 0: georedDict['body'] = jsonData georedDict['operation'] = operation georedDict['timestamp'] = time.time_ns() @@ -996,8 +984,8 @@ def handleGeored(self, jsonData, operation: str="PATCH", asymmetric: bool=False, return False def handleWebhook(self, objectData, operation: str="PATCH"): - webhooksEnabled = self.config.get('webhooks', {}).get('enabled', False) - endpointList = self.config.get('webhooks', {}).get('endpoints', []) + webhooksEnabled = config.get('webhooks', {}).get('enabled', False) + endpointList = config.get('webhooks', {}).get('endpoints', []) webhook = {} if not webhooksEnabled: @@ -1478,8 +1466,8 @@ def Get_Served_Subscribers(self, get_local_users_only=False): self.logTool.log(service='Database', level='debug', message="Filtering to locally served IMS Subs only", redisClient=self.redisMessaging) try: serving_hss = result['serving_mme_peer'].split(';')[1] - self.logTool.log(service='Database', level='debug', message="Serving HSS: " + str(serving_hss) + " and this is: " + str(self.config['hss']['OriginHost']), redisClient=self.redisMessaging) - if serving_hss == self.config['hss']['OriginHost']: + self.logTool.log(service='Database', level='debug', message="Serving HSS: " + str(serving_hss) + " and this is: " + str(config['hss']['OriginHost']), redisClient=self.redisMessaging) + if serving_hss == config['hss']['OriginHost']: self.logTool.log(service='Database', level='debug', message="Serving HSS matches local HSS", redisClient=self.redisMessaging) Served_Subs[result['imsi']] = {} Served_Subs[result['imsi']] = result @@ -1522,8 +1510,8 @@ def Get_Served_IMS_Subscribers(self, get_local_users_only=False): self.logTool.log(service='Database', level='debug', message="Filtering Get_Served_IMS_Subscribers to locally served IMS Subs only", redisClient=self.redisMessaging) try: serving_ims_hss = result['scscf_peer'].split(';')[1] - self.logTool.log(service='Database', level='debug', message="Serving IMS-HSS: " + str(serving_ims_hss) + " and this is: " + str(self.config['hss']['OriginHost']), redisClient=self.redisMessaging) - if serving_ims_hss == self.config['hss']['OriginHost']: + self.logTool.log(service='Database', level='debug', message="Serving IMS-HSS: " + str(serving_ims_hss) + " and this is: " + str(config['hss']['OriginHost']), redisClient=self.redisMessaging) + if serving_ims_hss == config['hss']['OriginHost']: self.logTool.log(service='Database', level='debug', message="Serving IMS-HSS matches local HSS for " + str(result['imsi']), redisClient=self.redisMessaging) Served_Subs[result['imsi']] = {} Served_Subs[result['imsi']] = result @@ -1563,8 +1551,8 @@ def Get_Served_PCRF_Subscribers(self, get_local_users_only=False): self.logTool.log(service='Database', level='debug', message="Filtering to locally served IMS Subs only", redisClient=self.redisMessaging) try: serving_pcrf = result['serving_pgw_peer'].split(';')[1] - self.logTool.log(service='Database', level='debug', message="Serving PCRF: " + str(serving_pcrf) + " and this is: " + str(self.config['hss']['OriginHost']), redisClient=self.redisMessaging) - if serving_pcrf == self.config['hss']['OriginHost']: + self.logTool.log(service='Database', level='debug', message="Serving PCRF: " + str(serving_pcrf) + " and this is: " + str(config['hss']['OriginHost']), redisClient=self.redisMessaging) + if serving_pcrf == config['hss']['OriginHost']: self.logTool.log(service='Database', level='debug', message="Serving PCRF matches local PCRF", redisClient=self.redisMessaging) self.logTool.log(service='Database', level='debug', message="Processed result", redisClient=self.redisMessaging) @@ -1718,7 +1706,7 @@ def Update_AuC(self, auc_id, sqn=1, propagate=True): self.logTool.log(service='Database', level='debug', message=self.UpdateObj(AUC, {'sqn': sqn}, auc_id, True), redisClient=self.redisMessaging) if propagate: - if self.config['geored'].get('enabled', False) == True: + if config['geored'].get('enabled', False) == True: aucBody = { "auc_id": auc_id, "sqn": sqn, @@ -1785,7 +1773,7 @@ def update_subscriber_location(self, imsi: str, last_seen_eci=None, last_seen_en pass if propagate == True: - if 'HSS' in self.config['geored'].get('sync_actions', []) and self.config['geored'].get('enabled', False) == True: + if 'HSS' in config['geored'].get('sync_actions', []) and config['geored'].get('enabled', False) == True: self.logTool.log(service='Database', level='debug', message="Propagate Subscriber Location changes to Geographic PyHSS instances", redisClient=self.redisMessaging) self.handleGeored({"imsi": str(imsi), "last_seen_eci": last_seen_eci, "last_seen_enodeb_id": last_seen_enodeb_id, "last_seen_cell_id": last_seen_cell_id, "last_seen_tac": last_seen_tac, "last_seen_mcc": last_seen_mcc, @@ -1805,7 +1793,7 @@ def Update_Serving_MME(self, imsi, serving_mme, serving_mme_realm=None, serving_ session = Session() try: result = session.query(SUBSCRIBER).filter_by(imsi=imsi).one() - if self.config['hss']['CancelLocationRequest_Enabled'] == True: + if config['hss']['CancelLocationRequest_Enabled'] == True: self.logTool.log(service='Database', level='debug', message="Evaluating if we should trigger sending a CLR.", redisClient=self.redisMessaging) if result.serving_mme != None: serving_hss = str(result.serving_mme_peer).split(';',1)[1] @@ -1817,13 +1805,13 @@ def Update_Serving_MME(self, imsi, serving_mme, serving_mme_realm=None, serving_ self.logTool.log(service='Database', level='debug', message="This MME is unchanged (" + str(serving_mme) + ") - so no need to send a CLR", redisClient=self.redisMessaging) elif (str(result.serving_mme) != str(serving_mme)): self.logTool.log(service='Database', level='debug', message="There is a difference in serving MME, old MME is '" + str(result.serving_mme) + "' new MME is '" + str(serving_mme) + "' - We need to trigger sending a CLR", redisClient=self.redisMessaging) - if serving_hss != self.config['hss']['OriginHost']: + if serving_hss != config['hss']['OriginHost']: self.logTool.log(service='Database', level='debug', message="This subscriber is not served by this HSS it is served by HSS at " + serving_hss + " - We need to trigger sending a CLR on " + str(serving_hss), redisClient=self.redisMessaging) - URL = 'http://' + serving_hss + '.' + self.config['hss']['OriginRealm'] + ':8080/push/clr/' + str(imsi) + URL = 'http://' + serving_hss + '.' + config['hss']['OriginRealm'] + ':8080/push/clr/' + str(imsi) else: self.logTool.log(service='Database', level='debug', message="This subscriber is served by this HSS we need to send a CLR to old MME from this HSS", redisClient=self.redisMessaging) - URL = 'http://' + serving_hss + '.' + self.config['hss']['OriginRealm'] + ':8080/push/clr/' + str(imsi) + URL = 'http://' + serving_hss + '.' + config['hss']['OriginRealm'] + ':8080/push/clr/' + str(imsi) self.logTool.log(service='Database', level='debug', message="Sending CLR to API at " + str(URL), redisClient=self.redisMessaging) clrBody = { @@ -1872,7 +1860,7 @@ def Update_Serving_MME(self, imsi, serving_mme, serving_mme_realm=None, serving_ #Sync state change with geored if propagate == True: - if 'HSS' in self.config['geored'].get('sync_actions', []) and self.config['geored'].get('enabled', False) == True: + if 'HSS' in config['geored'].get('sync_actions', []) and config['geored'].get('enabled', False) == True: self.logTool.log(service='Database', level='debug', message="Propagate MME changes to Geographic PyHSS instances", redisClient=self.redisMessaging) self.handleGeored({ "imsi": str(imsi), @@ -1932,7 +1920,7 @@ def Update_Proxy_CSCF(self, imsi, proxy_cscf, pcscf_realm=None, pcscf_peer=None, #Sync state change with geored if propagate == True: - if 'IMS' in self.config['geored']['sync_actions'] and self.georedEnabled == True: + if 'IMS' in config['geored']['sync_actions'] and self.georedEnabled == True: self.logTool.log(service='Database', level='debug', message="Propagate IMS changes to Geographic PyHSS instances", redisClient=self.redisMessaging) self.handleGeored({"imsi": str(imsi), "pcscf": result.pcscf, "pcscf_realm": result.pcscf_realm, "pcscf_timestamp": pcscf_timestamp_string, "pcscf_peer": result.pcscf_peer, "pcscf_active_session": pcscf_active_session}) else: @@ -1986,7 +1974,7 @@ def Update_Serving_CSCF(self, imsi, serving_cscf, scscf_realm=None, scscf_peer=N #Sync state change with geored if propagate == True: - if 'IMS' in self.config['geored']['sync_actions'] and self.georedEnabled == True: + if 'IMS' in config['geored']['sync_actions'] and self.georedEnabled == True: self.logTool.log(service='Database', level='debug', message="Propagate IMS changes to Geographic PyHSS instances", redisClient=self.redisMessaging) self.handleGeored({"imsi": str(imsi), "scscf": result.scscf, "scscf_realm": result.scscf_realm, "scscf_timestamp": scscf_timestamp_string, "scscf_peer": result.scscf_peer}) else: @@ -2138,7 +2126,7 @@ def Update_Serving_APN(self, imsi, apn, pcrf_session_id, serving_pgw, subscriber #Sync state change with geored if propagate == True: try: - if 'PCRF' in self.config['geored']['sync_actions'] and self.georedEnabled == True: + if 'PCRF' in config['geored']['sync_actions'] and self.georedEnabled == True: self.logTool.log(service='Database', level='debug', message="Propagate PCRF changes to Geographic PyHSS instances", redisClient=self.redisMessaging) self.handleGeored({"imsi": str(imsi), 'serving_apn' : apn, @@ -2617,7 +2605,7 @@ def Store_IMSI_IMEI_Binding(self, imsi, imei, match_response_code, propagate=Tru #Sync state change with geored if propagate == True: try: - if 'EIR' in self.config['geored']['sync_actions'] and self.georedEnabled == True: + if 'EIR' in config['geored']['sync_actions'] and self.georedEnabled == True: self.logTool.log(service='Database', level='debug', message="Propagate EIR changes to Geographic PyHSS instances", redisClient=self.redisMessaging) self.handleGeored( {"imsi": str(imsi), @@ -2725,7 +2713,7 @@ def Check_EIR(self, imsi, imei): except Exception as e: self.logTool.log(service='Database', level='error', message=f"Error Storing IMSI / IMEI Binding: {traceback.format_exc()}", redisClient=self.redisMessaging) self.safe_close(session) - return self.config['eir']['no_match_response'] + return config['eir']['no_match_response'] def Get_EIR_Rules(self): self.logTool.log(service='Database', level='debug', message="Getting all EIR Rules", redisClient=self.redisMessaging) diff --git a/lib/diameter.py b/lib/diameter.py index c1497b17..f0e4b508 100755 --- a/lib/diameter.py +++ b/lib/diameter.py @@ -11,7 +11,6 @@ from messaging import RedisMessaging from redis import Redis import datetime -import yaml import json import time import socket @@ -21,13 +20,12 @@ from baseModels import Peer, OutboundData import pydantic_core import xml.etree.ElementTree as ET +from pyhss_config import config + class Diameter: def __init__(self, logTool, originHost: str="hss01", originRealm: str="epc.mnc999.mcc999.3gppnetwork.org", productName: str="PyHSS", mcc: str="999", mnc: str="999", redisMessaging=None): - with open("../config.yaml", 'r') as stream: - self.config = (yaml.safe_load(stream)) - self.OriginHost = self.string_to_hex(originHost) self.OriginRealm = self.string_to_hex(originRealm) self.ProductName = self.string_to_hex(productName) @@ -35,11 +33,11 @@ def __init__(self, logTool, originHost: str="hss01", originRealm: str="epc.mnc99 self.MCC = str(mcc) self.logTool = logTool - self.redisUseUnixSocket = self.config.get('redis', {}).get('useUnixSocket', False) - self.redisUnixSocketPath = self.config.get('redis', {}).get('unixSocketPath', '/var/run/redis/redis-server.sock') - self.redisHost = self.config.get('redis', {}).get('host', 'localhost') - self.redisPort = self.config.get('redis', {}).get('port', 6379) - self.redisAdditionalPeers = self.config.get('redis', {}).get('additionalPeers', []) + self.redisUseUnixSocket = config.get('redis', {}).get('useUnixSocket', False) + self.redisUnixSocketPath = config.get('redis', {}).get('unixSocketPath', '/var/run/redis/redis-server.sock') + self.redisHost = config.get('redis', {}).get('host', 'localhost') + self.redisPort = config.get('redis', {}).get('port', 6379) + self.redisAdditionalPeers = config.get('redis', {}).get('additionalPeers', []) if redisMessaging: self.redisMessaging = redisMessaging else: @@ -48,13 +46,13 @@ def __init__(self, logTool, originHost: str="hss01", originRealm: str="epc.mnc99 self.hostname = socket.gethostname() self.database = Database(logTool=logTool) - self.diameterRequestTimeout = int(self.config.get('hss', {}).get('diameter_request_timeout', 10)) - self.diameterPeerKey = self.config.get('hss', {}).get('diameter_peer_key', 'diameterPeers') - self.useDraFallback = self.config.get('hss', {}).get('use_dra_fallback', False) - self.emergency_subscriber_expiry = self.config.get('hss', {}).get('emergency_subscriber_expiry', 3600) - self.sendDsrOnMmeChange = self.config.get('hss', {}).get('send_dsr_on_mme_change', False) - self.dsrExternalIdentifier = self.config.get('hss', {}).get('dsr_external_identifier', "subscriber") - self.ignorePurgeUeRequest = self.config.get('hss', {}).get('ignore_purge_ue_request', False) + self.diameterRequestTimeout = int(config.get('hss', {}).get('diameter_request_timeout', 10)) + self.diameterPeerKey = config.get('hss', {}).get('diameter_peer_key', 'diameterPeers') + self.useDraFallback = config.get('hss', {}).get('use_dra_fallback', False) + self.emergency_subscriber_expiry = config.get('hss', {}).get('emergency_subscriber_expiry', 3600) + self.sendDsrOnMmeChange = config.get('hss', {}).get('send_dsr_on_mme_change', False) + self.dsrExternalIdentifier = config.get('hss', {}).get('dsr_external_identifier', "subscriber") + self.ignorePurgeUeRequest = config.get('hss', {}).get('ignore_purge_ue_request', False) self.templateLoader = jinja2.FileSystemLoader(searchpath="../") self.templateEnv = jinja2.Environment(loader=self.templateLoader) @@ -910,7 +908,7 @@ def update_stored_peer(self, peerKey: str, peer: dict) -> bool: } """ try: - self.diameterPeerKey = self.config.get('hss', {}).get('diameter_peer_key', 'diameterPeers') + self.diameterPeerKey = config.get('hss', {}).get('diameter_peer_key', 'diameterPeers') existing_peer = self.redisMessaging.getHashValue( name=self.diameterPeerKey, @@ -1429,7 +1427,7 @@ def validateOutboundRoamingNetwork(self, assignedRoamingRules: str, mcc: str, mn If the subscriber doesn't have any roaming rules applied, the network roaming rule is applied as a rule of last resort. """ - allowUndefinedNetworks = self.config.get('roaming', {}).get('outbound', {}).get('allow_undefined_networks', True) + allowUndefinedNetworks = config.get('roaming', {}).get('outbound', {}).get('allow_undefined_networks', True) roamingRules = self.database.GetAll(ROAMING_RULE) subscriberRoamingRules = [] if assignedRoamingRules: @@ -1724,7 +1722,7 @@ def Answer_257(self, packet_vars, avps): for avps_to_check in avps: #Only include AVP 278 (Origin State) if inital request included it if avps_to_check['avp_code'] == 278: avp += self.generate_avp(278, 40, self.AVP_278_Origin_State_Incriment(avps)) #Origin State (Has to be incrimented (Handled by AVP_278_Origin_State_Incriment)) - for host in self.config['hss']['bind_ip']: #Loop through all IPs from Config and add to response + for host in config['hss']['bind_ip']: #Loop through all IPs from Config and add to response avp += self.generate_avp(257, 40, self.ip_to_hex(host)) #Host-IP-Address (For this to work on Linux this is the IP defined in the hostsfile for localhost) avp += self.generate_avp(266, 40, "00000000") #Vendor-Id avp += self.generate_avp(269, "00", self.ProductName) #Product-Name @@ -1751,7 +1749,7 @@ def Answer_257(self, packet_vars, avps): avp += self.generate_avp(265, 40, format(int(13019),"x").zfill(8)) #Supported-Vendor-ID 13019 (ETSI) try: - external_socket_service_enabled = self.config.get('hss', {}).get('use_external_socket_service', False) + external_socket_service_enabled = config.get('hss', {}).get('use_external_socket_service', False) if external_socket_service_enabled == True: originHost = binascii.unhexlify(self.get_avp_data(avps, 264)[0]).decode() originRealm = binascii.unhexlify(self.get_avp_data(avps, 296)[0]).decode() @@ -1919,7 +1917,7 @@ def Answer_16777251_316(self, packet_vars, avps): remote_peer = binascii.unhexlify(remote_peer).decode('utf-8') #Format it except: #If we don't have a record-route set, we'll send the response to the OriginHost remote_peer = OriginHost - remote_peer = remote_peer + ";" + str(self.config['hss']['OriginHost']) + remote_peer = remote_peer + ";" + str(config['hss']['OriginHost']) self.logTool.log(service='HSS', level='debug', message="[diameter.py] [Answer_16777251_316] [ULA] Remote Peer is " + str(remote_peer), redisClient=self.redisMessaging) self.database.Update_Serving_MME(imsi=imsi, serving_mme=OriginHost, serving_mme_peer=remote_peer, serving_mme_realm=OriginRealm) @@ -2431,7 +2429,7 @@ def Answer_16777238_272(self, packet_vars, avps): except: #If we don't have a record-route set, we'll send the response to the OriginHost remote_peer = OriginHost self.logTool.log(service='HSS', level='debug', message="[diameter.py] [Answer_16777238_272] [CCA] Remote Peer is " + str(remote_peer), redisClient=self.redisMessaging) - remote_peer = remote_peer + ";" + str(self.config['hss']['OriginHost']) + remote_peer = remote_peer + ";" + str(config['hss']['OriginHost']) avp = '' #Initiate empty var AVP session_id = self.get_avp_data(avps, 263)[0] #Get Session-ID @@ -2676,7 +2674,7 @@ def Answer_16777238_272(self, packet_vars, avps): ue_ip = 'Failed to Decode / Get UE IP' #Store PGW location into Database - remote_peer = remote_peer + ";" + str(self.config['hss']['OriginHost']) + remote_peer = remote_peer + ";" + str(config['hss']['OriginHost']) self.database.Update_Serving_APN(imsi=imsi, apn=apn, pcrf_session_id=binascii.unhexlify(session_id).decode(), serving_pgw=OriginHost, subscriber_routing=str(ue_ip), serving_pgw_realm=OriginRealm, serving_pgw_peer=remote_peer) # Update Subscriber location information @@ -2941,9 +2939,9 @@ def Answer_16777216_300(self, packet_vars, avps): avp += self.generate_avp(297, 40, experimental_avp) #Expermental-Result else: self.logTool.log(service='HSS', level='debug', message="No SCSCF Assigned from DB", redisClient=self.redisMessaging) - if 'scscf_pool' in self.config['hss']: + if 'scscf_pool' in config['hss']: try: - scscf = random.choice(self.config['hss']['scscf_pool']) + scscf = random.choice(config['hss']['scscf_pool']) self.logTool.log(service='HSS', level='debug', message="Randomly picked SCSCF address " + str(scscf) + " from pool", redisClient=self.redisMessaging) avp += self.generate_vendor_avp(602, "c0", 10415, str(binascii.hexlify(str.encode(scscf)),'ascii')) except Exception as E: @@ -3033,7 +3031,7 @@ def Answer_16777216_301(self, packet_vars, avps): self.logTool.log(service='HSS', level='debug', message="Subscriber is served by S-CSCF " + str(ServingCSCF), redisClient=self.redisMessaging) if (Server_Assignment_Type == 1) or (Server_Assignment_Type == 2): self.logTool.log(service='HSS', level='debug', message="SAR is Register / Re-Register", redisClient=self.redisMessaging) - remote_peer = remote_peer + ";" + str(self.config['hss']['OriginHost']) + remote_peer = remote_peer + ";" + str(config['hss']['OriginHost']) self.database.Update_Serving_CSCF(imsi, serving_cscf=ServingCSCF, scscf_realm=OriginRealm, scscf_peer=remote_peer) else: self.logTool.log(service='HSS', level='debug', message="SAR is not Register", redisClient=self.redisMessaging) @@ -3073,9 +3071,9 @@ def Answer_16777216_302(self, packet_vars, avps): avp += self.generate_vendor_avp(602, "c0", 10415, str(binascii.hexlify(str.encode(str(ims_subscriber_details['scscf']))),'ascii')) else: self.logTool.log(service='HSS', level='debug', message="No SCSF assigned - Using SCSCF Pool", redisClient=self.redisMessaging) - if 'scscf_pool' in self.config['hss']: + if 'scscf_pool' in config['hss']: try: - scscf = random.choice(self.config['hss']['scscf_pool']) + scscf = random.choice(config['hss']['scscf_pool']) self.logTool.log(service='HSS', level='debug', message="Randomly picked SCSCF address " + str(scscf) + " from pool", redisClient=self.redisMessaging) avp += self.generate_vendor_avp(602, "c0", 10415, str(binascii.hexlify(str.encode(scscf)),'ascii')) except Exception as E: @@ -3396,7 +3394,7 @@ def Answer_16777217_306(self, packet_vars, avps): #Sh-User-Data (XML) #This loads a Jinja XML template containing the Sh-User-Data - sh_userdata_template = self.config['hss']['Default_Sh_UserData'] + sh_userdata_template = config['hss']['Default_Sh_UserData'] self.logTool.log(service='HSS', level='debug', message="Using template " + str(sh_userdata_template) + " for SH user data", redisClient=self.redisMessaging) template = self.templateEnv.get_template(sh_userdata_template) #These variables are passed to the template for use @@ -3589,7 +3587,7 @@ def Answer_16777236_265(self, packet_vars, avps): ipApnName = ipApnName.get('apn', None) else: #If we didn't find a serving APN for the IP, try the other local HSS'. - localGeoredEndpoints = self.config.get('geored', {}).get('local_endpoints', []) + localGeoredEndpoints = config.get('geored', {}).get('local_endpoints', []) for localGeoredEndpoint in localGeoredEndpoints: endpointUrl = f"{localGeoredEndpoint}/pcrf/pcrf_serving_apn_ip/{ueIp}" self.logTool.log(service='HSS', level='debug', message=f"[diameter.py] [Answer_16777236_265] [AAA] Searching remote HSS for serving apn: {endpointUrl}", redisClient=self.redisMessaging) @@ -3680,7 +3678,7 @@ def Answer_16777236_265(self, packet_vars, avps): #If we don't have a record-route set, we'll send the response to the OriginHost remotePeer = aarOriginHost - remotePeer = f"{remotePeer};{self.config['hss']['OriginHost']}" + remotePeer = f"{remotePeer};{config['hss']['OriginHost']}" self.database.Update_Proxy_CSCF(imsi=imsi, proxy_cscf=aarOriginHost, pcscf_realm=aarOriginRealm, pcscf_peer=remotePeer, pcscf_active_session=None) self.logTool.log(service='HSS', level='info', message=f"[diameter.py] [Answer_16777236_265] [AAA] Updated Serving P-CSCF in database OK", redisClient=self.redisMessaging) @@ -4072,7 +4070,7 @@ def Answer_16777236_275(self, packet_vars, avps): try: if not servingApn or servingApn == None or servingApn == 'None': #If we didn't find a serving APN for the Subscriber, try the other local HSS'. - localGeoredEndpoints = self.config.get('geored', {}).get('local_endpoints', []) + localGeoredEndpoints = config.get('geored', {}).get('local_endpoints', []) for localGeoredEndpoint in localGeoredEndpoints: endpointUrl = f"{localGeoredEndpoint}/pcrf/pcrf_subscriber_imsi/{imsi}" self.logTool.log(service='HSS', level='debug', message=f"[diameter.py] [Answer_16777236_275] [STA] Searching remote HSS for serving apn: {endpointUrl}", redisClient=self.redisMessaging) @@ -4347,7 +4345,7 @@ def Answer_16777291_8388622(self, packet_vars, avps): avp_serving_node = '' avp_serving_node += self.generate_vendor_avp(2402, "c0", 10415, self.string_to_hex(subscriber_details['serving_mme'])) #MME-Name avp_serving_node += self.generate_vendor_avp(2408, "c0", 10415, self.OriginRealm) #MME-Realm - avp_serving_node += self.generate_vendor_avp(2405, "c0", 10415, self.ip_to_hex(self.config['hss']['bind_ip'][0])) #GMLC-Address + avp_serving_node += self.generate_vendor_avp(2405, "c0", 10415, self.ip_to_hex(config['hss']['bind_ip'][0])) #GMLC-Address avp += self.generate_vendor_avp(2401, "c0", 10415, avp_serving_node) #Serving-Node AVP #Set Result-Code @@ -4426,7 +4424,7 @@ def Request_16777251_316(self, imsi, DestinationRealm): sessionid = str(bytes.fromhex(self.OriginHost).decode('ascii')) + ';' + self.generate_id(5) + ';1;app_s6a' #Session state generate avp += self.generate_avp(263, 40, str(binascii.hexlify(str.encode(sessionid)),'ascii')) #Session State set AVP avp += self.generate_avp(277, 40, "00000001") #Auth-Session-State - avp += self.generate_avp(264, 40, str(binascii.hexlify(str.encode("testclient." + self.config['hss']['OriginHost'])),'ascii')) + avp += self.generate_avp(264, 40, str(binascii.hexlify(str.encode("testclient." + config['hss']['OriginHost'])),'ascii')) avp += self.generate_avp(296, 40, self.OriginRealm) #Origin Realm avp += self.generate_avp(283, 40, self.string_to_hex(DestinationRealm)) #Destination Realm avp += self.generate_avp(1, 40, self.string_to_hex(imsi)) #Username (IMSI) @@ -4743,7 +4741,7 @@ def Request_16777216_301(self, imsi, domain, server_assignment_type): avp = '' #Initiate empty var AVP #Session-ID sessionid = str(bytes.fromhex(self.OriginHost).decode('ascii')) + ';' + self.generate_id(5) + ';1;app_cx' #Session state generate avp += self.generate_avp(263, 40, str(binascii.hexlify(str.encode(sessionid)),'ascii')) #Session Session ID - avp += self.generate_avp(264, 40, str(binascii.hexlify(str.encode("testclient." + self.config['hss']['OriginHost'])),'ascii')) #Origin Host + avp += self.generate_avp(264, 40, str(binascii.hexlify(str.encode("testclient." + config['hss']['OriginHost'])),'ascii')) #Origin Host avp += self.generate_avp(296, 40, self.OriginRealm) #Origin Realm avp += self.generate_avp(283, 40, str(binascii.hexlify(b'localdomain'),'ascii')) #Destination Realm avp += self.generate_avp(260, 40, "0000010a4000000c000028af000001024000000c01000000") #Vendor-Specific-Application-ID for Cx @@ -5085,7 +5083,7 @@ def Request_16777217_307(self, msisdn): #This loads a Jinja XML template containing the Sh-User-Data templateLoader = jinja2.FileSystemLoader(searchpath="./") templateEnv = jinja2.Environment(loader=templateLoader) - sh_userdata_template = self.config['hss']['Default_Sh_UserData'] + sh_userdata_template = config['hss']['Default_Sh_UserData'] self.logTool.log(service='HSS', level='debug', message="Using template " + str(sh_userdata_template) + " for SH user data", redisClient=self.redisMessaging) template = templateEnv.get_template(sh_userdata_template) #These variables are passed to the template for use diff --git a/lib/diameterAsync.py b/lib/diameterAsync.py index 93376e50..e47794a0 100755 --- a/lib/diameterAsync.py +++ b/lib/diameterAsync.py @@ -1,12 +1,12 @@ #Diameter Packet Decoder / Encoder & Tools import math import asyncio -import yaml import uuid import socket import traceback import binascii from messagingAsync import RedisMessagingAsync +from pyhss_config import config class DiameterAsync: @@ -50,13 +50,10 @@ def __init__(self, logTool): {"commandCode": 8388622, "applicationId": 16777291, "responseMethod": self.Answer_16777291_8388622, "failureResultCode": 4100 ,"requestAcronym": "LRR", "responseAcronym": "LRA", "requestName": "LCS Routing Info Request", "responseName": "LCS Routing Info Answer"}, ] - with open("../config.yaml", 'r') as stream: - self.config = (yaml.safe_load(stream)) - - self.redisUseUnixSocket = self.config.get('redis', {}).get('useUnixSocket', False) - self.redisUnixSocketPath = self.config.get('redis', {}).get('unixSocketPath', '/var/run/redis/redis-server.sock') - self.redisHost = self.config.get('redis', {}).get('host', 'localhost') - self.redisPort = self.config.get('redis', {}).get('port', 6379) + self.redisUseUnixSocket = config.get('redis', {}).get('useUnixSocket', False) + self.redisUnixSocketPath = config.get('redis', {}).get('unixSocketPath', '/var/run/redis/redis-server.sock') + self.redisHost = config.get('redis', {}).get('host', 'localhost') + self.redisPort = config.get('redis', {}).get('port', 6379) self.redisMessaging = RedisMessagingAsync(host=self.redisHost, port=self.redisPort, useUnixSocket=self.redisUseUnixSocket, unixSocketPath=self.redisUnixSocketPath) self.logTool = logTool @@ -469,4 +466,4 @@ async def Answer_16777236_274(self): pass async def Answer_16777238_258(self): - pass \ No newline at end of file + pass diff --git a/lib/pyhss_config.py b/lib/pyhss_config.py new file mode 100644 index 00000000..d4258726 --- /dev/null +++ b/lib/pyhss_config.py @@ -0,0 +1,54 @@ +""" + Copyright (C) 2025 sysmocom - s.f.m.c. GmbH + + SPDX-License-Identifier: AGPL-3.0-or-later + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published + by the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see . +""" +import os +import sys +import yaml +from pathlib import Path + +config = None + + +def load_config(): + global config + + if "PYHSS_CONFIG" in os.environ: + paths = [os.environ["PYHSS_CONFIG"]] + if not os.path.exists(paths[0]): + print(f"ERROR: PYHSS_CONFIG is set, but file does not exist: {paths[0]}") + sys.exit(1) + else: + paths = [ + "/etc/pyhss/config.yaml", + "/usr/share/pyhss/config.yaml", + Path(__file__).resolve().parent.parent / "config.yaml", + ] + + for path in paths: + if os.path.exists(path): + with open(path, "r") as stream: + config = yaml.safe_load(stream) + return + + print("ERROR: failed to find PyHSS config, tried these paths:") + for path in paths: + print(f" * {path}") + sys.exit(1) + + +load_config() diff --git a/services/apiService.py b/services/apiService.py index 840405b2..238050a9 100755 --- a/services/apiService.py +++ b/services/apiService.py @@ -15,10 +15,8 @@ from diameter import Diameter from messaging import RedisMessaging import database -import yaml +from pyhss_config import config -with open("../config.yaml", 'r') as stream: - config = (yaml.safe_load(stream)) siteName = config.get("hss", {}).get("site_name", "") originHostname = socket.gethostname() diff --git a/services/databaseService.py b/services/databaseService.py index 8fc79f16..a8f0e6d0 100644 --- a/services/databaseService.py +++ b/services/databaseService.py @@ -1,4 +1,4 @@ -import os, sys, json, yaml +import os, sys, json import uuid, time import asyncio import socket @@ -11,6 +11,7 @@ from sqlalchemy import create_engine, inspect from sqlalchemy.orm import sessionmaker from sqlalchemy import MetaData, Table +from pyhss_config import config class DatabaseService: """ @@ -20,35 +21,29 @@ class DatabaseService: """ def __init__(self, redisHost: str='127.0.0.1', redisPort: int=6379): - try: - with open("../config.yaml", "r") as self.configFile: - self.config = yaml.safe_load(self.configFile) - except: - print(f"[Database] Fatal Error - config.yaml not found, exiting.") - quit() - self.logTool = LogTool(self.config) + self.logTool = LogTool(config) self.banners = Banners() - self.redisUseUnixSocket = self.config.get('redis', {}).get('useUnixSocket', False) - self.redisUnixSocketPath = self.config.get('redis', {}).get('unixSocketPath', '/var/run/redis/redis-server.sock') - self.redisHost = self.config.get('redis', {}).get('host', 'localhost') - self.redisPort = self.config.get('redis', {}).get('port', 6379) + self.redisUseUnixSocket = config.get('redis', {}).get('useUnixSocket', False) + self.redisUnixSocketPath = config.get('redis', {}).get('unixSocketPath', '/var/run/redis/redis-server.sock') + self.redisHost = config.get('redis', {}).get('host', 'localhost') + self.redisPort = config.get('redis', {}).get('port', 6379) self.redisDatabaseReadMessaging = RedisMessagingAsync(host=self.redisHost, port=self.redisPort, useUnixSocket=self.redisUseUnixSocket, unixSocketPath=self.redisUnixSocketPath) self.redisLogMessaging = RedisMessagingAsync(host=self.redisHost, port=self.redisPort, useUnixSocket=self.redisUseUnixSocket, unixSocketPath=self.redisUnixSocketPath) self.hostname = socket.gethostname() supportedDatabaseTypes = ["mysql", "postgresql"] - self.databaseType = self.config.get('database', {}).get('db_type', 'mysql').lower() + self.databaseType = config.get('database', {}).get('db_type', 'mysql').lower() if not self.databaseType in supportedDatabaseTypes: print(f"[Database] Fatal Error - unsupported database type: {self.databaseType}. Supported database types are: {supportedDatabaseTypes}, exiting.") quit() - self.databaseHost = self.config.get('database', {}).get('server', '') - self.databaseUsername = self.config.get('database', {}).get('username', '') - self.databasePassword = self.config.get('database', {}).get('password', '') - self.database = self.config.get('database', {}).get('database', '') - self.readCacheEnabled = self.config.get('database', {}).get('readCacheEnabled', True) - self.cacheReadInterval = int(self.config.get('database', {}).get('cacheReadInterval', 60)) + self.databaseHost = config.get('database', {}).get('server', '') + self.databaseUsername = config.get('database', {}).get('username', '') + self.databasePassword = config.get('database', {}).get('password', '') + self.database = config.get('database', {}).get('database', '') + self.readCacheEnabled = config.get('database', {}).get('readCacheEnabled', True) + self.cacheReadInterval = int(config.get('database', {}).get('cacheReadInterval', 60)) self.sqlAlchemyEngine = create_engine( f"{self.databaseType}://{self.databaseUsername}:{self.databasePassword}@{self.databaseHost}/{self.database}" diff --git a/services/diameterService.py b/services/diameterService.py index 4191e5d9..c17f66c7 100755 --- a/services/diameterService.py +++ b/services/diameterService.py @@ -1,6 +1,6 @@ import asyncio import sys, os, json -import time, yaml, uuid +import time, uuid from datetime import datetime from tzlocal import get_localzone import sctp, socket @@ -12,6 +12,7 @@ from baseModels import Peer, InboundData, OutboundData import pydantic_core import traceback +from pyhss_config import config class DiameterService: """ @@ -21,17 +22,10 @@ class DiameterService: """ def __init__(self): - try: - with open("../config.yaml", "r") as self.configFile: - self.config = yaml.safe_load(self.configFile) - except: - print(f"[Diameter] [__init__] Fatal Error - config.yaml not found, exiting.") - quit() - - self.redisUseUnixSocket = self.config.get('redis', {}).get('useUnixSocket', False) - self.redisUnixSocketPath = self.config.get('redis', {}).get('unixSocketPath', '/var/run/redis/redis-server.sock') - self.redisHost = self.config.get('redis', {}).get('host', 'localhost') - self.redisPort = self.config.get('redis', {}).get('port', 6379) + self.redisUseUnixSocket = config.get('redis', {}).get('useUnixSocket', False) + self.redisUnixSocketPath = config.get('redis', {}).get('unixSocketPath', '/var/run/redis/redis-server.sock') + self.redisHost = config.get('redis', {}).get('host', 'localhost') + self.redisPort = config.get('redis', {}).get('port', 6379) self.redisReaderMessaging = RedisMessagingAsync(host=self.redisHost, port=self.redisPort, useUnixSocket=self.redisUseUnixSocket, unixSocketPath=self.redisUnixSocketPath) self.redisWriterMessaging = RedisMessagingAsync(host=self.redisHost, port=self.redisPort, useUnixSocket=self.redisUseUnixSocket, unixSocketPath=self.redisUnixSocketPath) self.redisPeerMessaging = RedisMessagingAsync(host=self.redisHost, port=self.redisPort, useUnixSocket=self.redisUseUnixSocket, unixSocketPath=self.redisUnixSocketPath) @@ -39,22 +33,22 @@ def __init__(self): self.redisMetricMessaging = RedisMessagingAsync(host=self.redisHost, port=self.redisPort, useUnixSocket=self.redisUseUnixSocket, unixSocketPath=self.redisUnixSocketPath) self.redisDwrMessaging = RedisMessagingAsync(host=self.redisHost, port=self.redisPort, useUnixSocket=self.redisUseUnixSocket, unixSocketPath=self.redisUnixSocketPath) self.banners = Banners() - self.logTool = LogTool(config=self.config) + self.logTool = LogTool(config=config) self.diameterLibrary = DiameterAsync(logTool=self.logTool) self.activePeers = {} - self.enableOutboundDwr = self.config.get('hss', {}).get('send_dwr', False) - self.outboundDwrInterval = int(self.config.get('hss', {}).get('send_dwr_interval', 5)) - self.originHost = self.config.get('hss', {}).get('OriginHost', 'hss01') - self.originRealm = self.config.get('hss', {}).get('OriginRealm', "epc.mnc001.mcc001.3gppnetwork.org") - self.diameterRequestTimeout = int(self.config.get('hss', {}).get('diameter_request_timeout', 10)) - self.benchmarking = self.config.get('benchmarking', {}).get('enabled', False) - self.benchmarkingInterval = self.config.get('benchmarking', {}).get('reporting_interval', 3600) + self.enableOutboundDwr = config.get('hss', {}).get('send_dwr', False) + self.outboundDwrInterval = int(config.get('hss', {}).get('send_dwr_interval', 5)) + self.originHost = config.get('hss', {}).get('OriginHost', 'hss01') + self.originRealm = config.get('hss', {}).get('OriginRealm', "epc.mnc001.mcc001.3gppnetwork.org") + self.diameterRequestTimeout = int(config.get('hss', {}).get('diameter_request_timeout', 10)) + self.benchmarking = config.get('benchmarking', {}).get('enabled', False) + self.benchmarkingInterval = config.get('benchmarking', {}).get('reporting_interval', 3600) self.diameterRequests = 0 self.diameterResponses = 0 - self.workerPoolSize = int(self.config.get('hss', {}).get('diameter_service_workers', 10)) + self.workerPoolSize = int(config.get('hss', {}).get('diameter_service_workers', 10)) self.hostname = socket.gethostname() - self.useExternalSocketService = self.config.get('hss', {}).get('use_external_socket_service', False) - self.diameterPeerKey = self.config.get('hss', {}).get('diameter_peer_key', 'diameterPeers') + self.useExternalSocketService = config.get('hss', {}).get('use_external_socket_service', False) + self.diameterPeerKey = config.get('hss', {}).get('diameter_peer_key', 'diameterPeers') async def validateDiameterInbound(self, clientAddress: str, clientPort: str, inboundData) -> bool: """ @@ -121,7 +115,7 @@ async def handleActiveDiameterPeers(self): await(asyncio.sleep(1)) continue - activeDiameterPeersTimeout = self.config.get('hss', {}).get('active_diameter_peers_timeout', 3600) + activeDiameterPeersTimeout = config.get('hss', {}).get('active_diameter_peers_timeout', 3600) activePeers = self.activePeers stalePeers = [] @@ -408,15 +402,15 @@ async def startServer(self, host: str=None, port: int=None, type: str=None): asyncio.create_task(self.inboundDataWorker(coroutineUuid=f'inboundDataWorker-{i}')) if host is None: - host=str(self.config.get('hss', {}).get('bind_ip', '0.0.0.0')[0]) + host=str(config.get('hss', {}).get('bind_ip', '0.0.0.0')[0]) if port is None: - port=int(self.config.get('hss', {}).get('bind_port', 3868)) + port=int(config.get('hss', {}).get('bind_port', 3868)) if type is None: - type=str(self.config.get('hss', {}).get('transport', 'TCP')) + type=str(config.get('hss', {}).get('transport', 'TCP')) - self.socketTimeout = int(self.config.get('hss', {}).get('client_socket_timeout', 300)) + self.socketTimeout = int(config.get('hss', {}).get('client_socket_timeout', 300)) if self.benchmarking: logProcessedMessagesTask = asyncio.create_task(self.logProcessedMessages()) @@ -429,9 +423,9 @@ async def startServer(self, host: str=None, port: int=None, type: str=None): self.sctpSocket.events.clear() self.sctpSocket.bind((host, port)) self.sctpRtoInfo = self.sctpSocket.get_rtoinfo() - self.sctpRtoMin = self.config.get('hss', {}).get('sctp', {}).get('rtoMin', 500) - self.sctpRtoMax = self.config.get('hss', {}).get('sctp', {}).get('rtoMax', 5000) - self.sctpRtoInitial = self.config.get('hss', {}).get('sctp', {}).get('rtoInitial', 1000) + self.sctpRtoMin = config.get('hss', {}).get('sctp', {}).get('rtoMin', 500) + self.sctpRtoMax = config.get('hss', {}).get('sctp', {}).get('rtoMax', 5000) + self.sctpRtoInitial = config.get('hss', {}).get('sctp', {}).get('rtoInitial', 1000) self.sctpRtoInfo.initial = int(self.sctpRtoInitial) self.sctpRtoInfo.max = int(self.sctpRtoMax) self.sctpRtoInfo.min = int(self.sctpRtoMin) diff --git a/services/georedService.py b/services/georedService.py index c421a3da..0087b57b 100755 --- a/services/georedService.py +++ b/services/georedService.py @@ -1,4 +1,4 @@ -import os, sys, json, yaml +import os, sys, json import uuid, time import asyncio, aiohttp import socket @@ -7,6 +7,7 @@ from messagingAsync import RedisMessagingAsync from banners import Banners from logtool import LogTool +from pyhss_config import config class GeoredService: """ @@ -15,30 +16,24 @@ class GeoredService: """ def __init__(self, redisHost: str='127.0.0.1', redisPort: int=6379): - try: - with open("../config.yaml", "r") as self.configFile: - self.config = yaml.safe_load(self.configFile) - except: - print(f"[Geored] Fatal Error - config.yaml not found, exiting.") - quit() - self.logTool = LogTool(self.config) + self.logTool = LogTool(config) self.banners = Banners() - self.redisUseUnixSocket = self.config.get('redis', {}).get('useUnixSocket', False) - self.redisUnixSocketPath = self.config.get('redis', {}).get('unixSocketPath', '/var/run/redis/redis-server.sock') - self.redisHost = self.config.get('redis', {}).get('host', 'localhost') - self.redisPort = self.config.get('redis', {}).get('port', 6379) + self.redisUseUnixSocket = config.get('redis', {}).get('useUnixSocket', False) + self.redisUnixSocketPath = config.get('redis', {}).get('unixSocketPath', '/var/run/redis/redis-server.sock') + self.redisHost = config.get('redis', {}).get('host', 'localhost') + self.redisPort = config.get('redis', {}).get('port', 6379) self.redisGeoredMessaging = RedisMessagingAsync(host=self.redisHost, port=self.redisPort, useUnixSocket=self.redisUseUnixSocket, unixSocketPath=self.redisUnixSocketPath) self.redisWebhookMessaging = RedisMessagingAsync(host=self.redisHost, port=self.redisPort, useUnixSocket=self.redisUseUnixSocket, unixSocketPath=self.redisUnixSocketPath) - self.georedPeers = self.config.get('geored', {}).get('endpoints', []) - self.webhookPeers = self.config.get('webhooks', {}).get('endpoints', []) - self.ocsPeers = self.config.get('ocs', {}).get('endpoints', []) - self.ocsNotificationsEnabled = self.config.get('ocs', {}).get('enabled', False) - self.benchmarking = self.config.get('hss').get('enable_benchmarking', False) + self.georedPeers = config.get('geored', {}).get('endpoints', []) + self.webhookPeers = config.get('webhooks', {}).get('endpoints', []) + self.ocsPeers = config.get('ocs', {}).get('endpoints', []) + self.ocsNotificationsEnabled = config.get('ocs', {}).get('enabled', False) + self.benchmarking = config.get('hss').get('enable_benchmarking', False) self.hostname = socket.gethostname() - if not self.config.get('geored', {}).get('enabled'): + if not config.get('geored', {}).get('enabled'): self.logger.error("[Geored] Fatal Error - geored not enabled under geored.enabled, exiting.") quit() if self.georedPeers is not None: @@ -405,8 +400,8 @@ async def startService(self): await(self.logTool.logAsync(service='Geored', level='info', message=f"{self.banners.georedService()}")) while True: - georedEnabled = self.config.get('geored', {}).get('enabled', False) - webhooksEnabled = self.config.get('webhooks', {}).get('enabled', False) + georedEnabled = config.get('geored', {}).get('enabled', False) + webhooksEnabled = config.get('webhooks', {}).get('enabled', False) if self.georedPeers is not None: if not len(self.georedPeers) > 0: diff --git a/services/gsupService.py b/services/gsupService.py index 68cc0d96..38610b22 100644 --- a/services/gsupService.py +++ b/services/gsupService.py @@ -24,22 +24,15 @@ import sys sys.path.append(os.path.realpath(os.path.dirname(__file__) + "/../lib")) -import yaml from gsup.server import GsupServer from logtool import LogTool +from pyhss_config import config -if __name__ == '__main__': - config = None - try: - with open("../config.yaml", "r") as configFile: - config = yaml.safe_load(configFile) - except: # noqa - print("Error reading configuration file") - exit(1) +if __name__ == '__main__': bind_ip = config['hss']['gsup']['bind_ip'] bind_port = config['hss']['gsup']['bind_port'] gsup_server = GsupServer(bind_ip, bind_port, 60, LogTool(config)) - asyncio.run(gsup_server.start_server()) \ No newline at end of file + asyncio.run(gsup_server.start_server()) diff --git a/services/hssService.py b/services/hssService.py index eda61764..0878b196 100755 --- a/services/hssService.py +++ b/services/hssService.py @@ -1,4 +1,4 @@ -import os, sys, json, yaml, time, traceback, socket +import os, sys, json, time, traceback, socket sys.path.append(os.path.realpath('../lib')) from messaging import RedisMessaging from diameter import Diameter @@ -6,35 +6,29 @@ from logtool import LogTool from baseModels import Peer, InboundData, OutboundData import pydantic_core +from pyhss_config import config class HssService: def __init__(self): - - try: - with open("../config.yaml", "r") as self.configFile: - self.config = yaml.safe_load(self.configFile) - except: - print(f"[HSS] Fatal Error - config.yaml not found, exiting.") - quit() - self.redisUseUnixSocket = self.config.get('redis', {}).get('useUnixSocket', False) - self.redisUnixSocketPath = self.config.get('redis', {}).get('unixSocketPath', '/var/run/redis/redis-server.sock') - self.redisHost = self.config.get('redis', {}).get('host', 'localhost') - self.redisPort = self.config.get('redis', {}).get('port', 6379) + self.redisUseUnixSocket = config.get('redis', {}).get('useUnixSocket', False) + self.redisUnixSocketPath = config.get('redis', {}).get('unixSocketPath', '/var/run/redis/redis-server.sock') + self.redisHost = config.get('redis', {}).get('host', 'localhost') + self.redisPort = config.get('redis', {}).get('port', 6379) self.redisMessaging = RedisMessaging(host=self.redisHost, port=self.redisPort, useUnixSocket=self.redisUseUnixSocket, unixSocketPath=self.redisUnixSocketPath) - self.logTool = LogTool(config=self.config) + self.logTool = LogTool(config=config) self.banners = Banners() - self.mnc = self.config.get('hss', {}).get('MNC', '999') - self.mcc = self.config.get('hss', {}).get('MCC', '999') - self.originRealm = self.config.get('hss', {}).get('OriginRealm', f'mnc{self.mnc}.mcc{self.mcc}.3gppnetwork.org') - self.originHost = self.config.get('hss', {}).get('OriginHost', f'hss01') - self.productName = self.config.get('hss', {}).get('ProductName', f'PyHSS') + self.mnc = config.get('hss', {}).get('MNC', '999') + self.mcc = config.get('hss', {}).get('MCC', '999') + self.originRealm = config.get('hss', {}).get('OriginRealm', f'mnc{self.mnc}.mcc{self.mcc}.3gppnetwork.org') + self.originHost = config.get('hss', {}).get('OriginHost', f'hss01') + self.productName = config.get('hss', {}).get('ProductName', f'PyHSS') self.logTool.log(service='HSS', level='info', message=f"{self.banners.hssService()}", redisClient=self.redisMessaging) self.diameterLibrary = Diameter(logTool=self.logTool, originHost=self.originHost, originRealm=self.originRealm, productName=self.productName, mcc=self.mcc, mnc=self.mnc) - self.benchmarking = self.config.get('hss').get('enable_benchmarking', False) + self.benchmarking = config.get('hss').get('enable_benchmarking', False) self.hostname = socket.gethostname() - self.diameterPeerKey = self.config.get('hss', {}).get('diameter_peer_key', 'diameterPeers') + self.diameterPeerKey = config.get('hss', {}).get('diameter_peer_key', 'diameterPeers') def handleQueue(self): """ @@ -153,4 +147,4 @@ def handleQueue(self): if __name__ == '__main__': hssService = HssService() - hssService.handleQueue() \ No newline at end of file + hssService.handleQueue() diff --git a/services/logService.py b/services/logService.py index 827662ae..fd5b36b6 100755 --- a/services/logService.py +++ b/services/logService.py @@ -1,4 +1,4 @@ -import os, sys, json, yaml, socket +import os, sys, json, socket from datetime import datetime import time import logging @@ -6,6 +6,8 @@ from messaging import RedisMessaging from banners import Banners from logtool import LogTool +from pyhss_config import config + class LogService: """ @@ -15,20 +17,14 @@ class LogService: """ def __init__(self): - try: - with open("../config.yaml", "r") as self.configFile: - self.config = yaml.safe_load(self.configFile) - except: - print(f"[Log] Fatal Error - config.yaml not found, exiting.") - quit() - self.logTool = LogTool(config=self.config) + self.logTool = LogTool(config=config) self.banners = Banners() - self.redisUseUnixSocket = self.config.get('redis', {}).get('useUnixSocket', False) - self.redisUnixSocketPath = self.config.get('redis', {}).get('unixSocketPath', '/var/run/redis/redis-server.sock') - self.redisHost = self.config.get('redis', {}).get('host', 'localhost') - self.redisPort = self.config.get('redis', {}).get('port', 6379) + self.redisUseUnixSocket = config.get('redis', {}).get('useUnixSocket', False) + self.redisUnixSocketPath = config.get('redis', {}).get('unixSocketPath', '/var/run/redis/redis-server.sock') + self.redisHost = config.get('redis', {}).get('host', 'localhost') + self.redisPort = config.get('redis', {}).get('port', 6379) self.redisMessaging = RedisMessaging(host=self.redisHost, port=self.redisPort, useUnixSocket=self.redisUseUnixSocket, unixSocketPath=self.redisUnixSocketPath) - self.logFilePaths = self.config.get('logging', {}).get('logfiles', {}) + self.logFilePaths = config.get('logging', {}).get('logfiles', {}) self.logLevels = { 'CRITICAL': {'verbosity': 1, 'logging': logging.CRITICAL}, 'ERROR': {'verbosity': 2, 'logging': logging.ERROR}, @@ -76,4 +72,4 @@ def handleLogs(self): if __name__ == '__main__': logService = LogService() - logService.handleLogs() \ No newline at end of file + logService.handleLogs() diff --git a/services/metricService.py b/services/metricService.py index f144581a..4371a7f8 100755 --- a/services/metricService.py +++ b/services/metricService.py @@ -1,6 +1,6 @@ import asyncio import sys, os, json -import time, json, yaml +import time, json import socket from prometheus_client import make_wsgi_app, start_http_server, Counter, Gauge, Summary, Histogram, CollectorRegistry from werkzeug.middleware.dispatcher import DispatcherMiddleware @@ -12,29 +12,24 @@ from messaging import RedisMessaging from banners import Banners from logtool import LogTool +from pyhss_config import config + class MetricService: def __init__(self, redisHost: str='127.0.0.1', redisPort: int=6379): - try: - with open("../config.yaml", "r") as self.configFile: - self.config = yaml.safe_load(self.configFile) - except: - print(f"[Metric] Fatal Error - config.yaml not found, exiting.") - quit() - self.redisMessaging = RedisMessaging(host=redisHost, port=redisPort) self.banners = Banners() - self.logTool = LogTool(config=self.config) + self.logTool = LogTool(config=config) self.registry = CollectorRegistry(auto_describe=True) self.logTool.log(service='Metric', level='info', message=f"{self.banners.metricService()}", redisClient=self.redisMessaging) self.hostname = socket.gethostname() - self.influxEnabled = self.config.get('influxdb', {}).get('enabled', None) - self.influxDatabase = self.config.get('influxdb', {}).get('database', None) - self.influxUser = self.config.get('influxdb', {}).get('username', None) - self.influxPassword = self.config.get('influxdb', {}).get('password', None) - self.influxHost = self.config.get('influxdb', {}).get('host', None) - self.influxPort = self.config.get('influxdb', {}).get('port', None) + self.influxEnabled = config.get('influxdb', {}).get('enabled', None) + self.influxDatabase = config.get('influxdb', {}).get('database', None) + self.influxUser = config.get('influxdb', {}).get('username', None) + self.influxPassword = config.get('influxdb', {}).get('password', None) + self.influxHost = config.get('influxdb', {}).get('host', None) + self.influxPort = config.get('influxdb', {}).get('port', None) def processInfluxdb(self, influxData: dict) -> bool: """ @@ -139,4 +134,4 @@ def getMetrics(self): '/metrics': make_wsgi_app(registry=metricService.registry) }) - prometheusWebClient.run(host='0.0.0.0', port=9191) \ No newline at end of file + prometheusWebClient.run(host='0.0.0.0', port=9191) diff --git a/tools/Diameter_client.py b/tools/Diameter_client.py index 62cbee21..4e0c50a9 100644 --- a/tools/Diameter_client.py +++ b/tools/Diameter_client.py @@ -6,20 +6,18 @@ import time import _thread global recv_ip -import yaml -with open("config.yaml", 'r') as stream: - yaml_config = (yaml.safe_load(stream)) +from pyhss_config import config #Values to change / tweak -recv_ip = yaml_config['hss']['bind_ip'] #IP of this Machine -diameter_host = yaml_config['hss']['OriginHost'] #Diameter Host of this Machine -realm = yaml_config['hss']['OriginRealm'] #Diameter Realm of this machine +recv_ip = config['hss']['bind_ip'] #IP of this Machine +diameter_host = config['hss']['OriginHost'] #Diameter Host of this Machine +realm = config['hss']['OriginRealm'] #Diameter Realm of this machine DestinationHost = "" #Diameter Host of Destination DestinationRealm = input("Enter Diameter Realm: ") #Diameter Realm of Destination hostname = input("Enter IP of Diameter Peer to connect to: ") #IP of Remote Diameter Host -mcc = yaml_config['hss']['MCC'] #Mobile Country Code -mnc = yaml_config['hss']['MNC'] #Mobile Network Code -transport = yaml_config['hss']['transport'] #Transport Type - TCP or SCTP (SCTP Support is basic) +mcc = config['hss']['MCC'] #Mobile Country Code +mnc = config['hss']['MNC'] #Mobile Network Code +transport = config['hss']['transport'] #Transport Type - TCP or SCTP (SCTP Support is basic) diameter = diameter.Diameter(diameter_host, realm, 'PyHSS-client', str(mcc), str(mnc)) sessionid = str(diameter_host) + ';' + diameter.generate_id(5) + ';1;app_gy' diff --git a/tools/databaseUpgrade/alembic/env.py b/tools/databaseUpgrade/alembic/env.py index 1e4658b8..2063d60d 100644 --- a/tools/databaseUpgrade/alembic/env.py +++ b/tools/databaseUpgrade/alembic/env.py @@ -1,11 +1,11 @@ from logging.config import fileConfig from sqlalchemy import create_engine from alembic import context -import yaml import sys import os sys.path.append(os.path.realpath('lib')) from database import Base +from pyhss_config import config # this is the Alembic Config object, which provides # access to the values within the .ini file in use. @@ -29,15 +29,13 @@ def get_url_from_config() -> str: """ - Reads config.yaml and returns the database url. + Returns the database url from the PyHSS config. """ - with open("../../config.yaml", 'r') as stream: - try: - config = yaml.safe_load(stream) - db_string = 'mysql://' + str(config['database']['username']) + ':' + str(config['database']['password']) + '@' + str(config['database']['server']) + '/' + str(config['database']['database']) - return db_string - except Exception as e: - print(e) + try: + db_string = 'mysql://' + str(config['database']['username']) + ':' + str(config['database']['password']) + '@' + str(config['database']['server']) + '/' + str(config['database']['database']) + return db_string + except Exception as e: + print(e) def run_migrations_offline() -> None: diff --git a/tools/hss_Async_client.py b/tools/hss_Async_client.py index 644214c0..50deccd8 100644 --- a/tools/hss_Async_client.py +++ b/tools/hss_Async_client.py @@ -5,30 +5,29 @@ currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) parentdir = os.path.dirname(currentdir) sys.path.insert(0, parentdir) -import yaml -with open('config.yaml') as stream: - yaml_config = (yaml.safe_load(stream)) import json import redis import diameter import time +from pyhss_config import config + #Values to change / tweak -recv_ip = yaml_config['hss']['bind_ip'] #IP of this Machine -diameter_host = yaml_config['hss']['OriginHost'] #Diameter Host of this Machine -realm = yaml_config['hss']['OriginRealm'] #Diameter Realm of this machine +recv_ip = config['hss']['bind_ip'] #IP of this Machine +diameter_host = config['hss']['OriginHost'] #Diameter Host of this Machine +realm = config['hss']['OriginRealm'] #Diameter Realm of this machine DestinationHost = "" #Diameter Host of Destination DestinationRealm = input("Enter Diameter Realm: ") #Diameter Realm of Destination hostname = input("Enter IP of Diameter Peer to connect to: ") #IP of Remote Diameter Host -mcc = yaml_config['hss']['MCC'] #Mobile Country Code -mnc = yaml_config['hss']['MNC'] #Mobile Network Code -transport = yaml_config['hss']['transport'] #Transport Type - TCP or SCTP (SCTP Support is basic) +mcc = config['hss']['MCC'] #Mobile Country Code +mnc = config['hss']['MNC'] #Mobile Network Code +transport = config['hss']['transport'] #Transport Type - TCP or SCTP (SCTP Support is basic) diameter = diameter.Diameter(diameter_host, realm, 'PyHSS-client', str(mcc), str(mnc)) supported_calls = ["CER", "DWR", "AIR", "ULR", "UAR", "PUR", "SAR", "MAR", "MCR", "LIR", "RIR", "CLR", "NOR", "DEP", "UDR"] -r = redis.Redis(host=str(yaml_config['redis']['host']), port=str(yaml_config['redis']['port']), db=0) +r = redis.Redis(host=str(config['redis']['host']), port=str(config['redis']['port']), db=0) print("\n\nDiameter Peers:") ActivePeerDict = r.get('ActivePeerDict') diff --git a/tools/snmp_service.py b/tools/snmp_service.py index f0ac3230..848183dc 100644 --- a/tools/snmp_service.py +++ b/tools/snmp_service.py @@ -1,8 +1,5 @@ #This SNMP service pulls stats written to Redis by PyHSS and presents them as SNMP -import yaml import sys -with open(sys.path[0] + '/../config.yaml') as stream: - yaml_config = (yaml.safe_load(stream)) #Pulled from https://stackoverflow.com/questions/58909285/how-to-add-variable-in-the-mib-tree @@ -13,10 +10,11 @@ from pysnmp.proto.api import v2c import datetime import redis +from pyhss_config import config import redis -redis_store = redis.Redis(host=str(yaml_config['redis']['host']), port=str(yaml_config['redis']['port']), db=0) +redis_store = redis.Redis(host=str(config['redis']['host']), port=str(config['redis']['port']), db=0) # Create SNMP engine snmpEngine = engine.SnmpEngine() @@ -26,7 +24,7 @@ config.addTransport( snmpEngine, udp.domainName, - udp.UdpTransport().openServerMode((str(yaml_config['snmp']['listen_address']), int(yaml_config['snmp']['port']))) + udp.UdpTransport().openServerMode((str(config['snmp']['listen_address']), int(config['snmp']['port']))) ) # SNMPv3/USM setup @@ -481,4 +479,4 @@ def getValue(self, name, idx): except: snmpEngine.transportDispatcher.closeDispatcher() - raise \ No newline at end of file + raise diff --git a/tools/view_stats.py b/tools/view_stats.py index defe4ee4..1762fbc8 100644 --- a/tools/view_stats.py +++ b/tools/view_stats.py @@ -1,11 +1,11 @@ # This utility prints PyHSS stats stored in Redis -import yaml import sys -with open(sys.path[0] + '/../config.yaml') as stream: - yaml_config = (yaml.safe_load(stream)) import json import redis -r = redis.Redis(host=str(yaml_config['redis']['host']), port=str(yaml_config['redis']['port']), db=0) +from pyhss_config import config + + +r = redis.Redis(host=str(config['redis']['host']), port=str(config['redis']['port']), db=0) keys = r.keys() for key in sorted(keys): if key != b'ActivePeerDict': From 66bf3485e8d64a24f60a044a8f9ab936361d97bc Mon Sep 17 00:00:00 2001 From: Oliver Smith Date: Thu, 9 Oct 2025 11:13:27 +0200 Subject: [PATCH 6/9] lib/milenage: remove unused sys.path.append This script doesn't import anything from lib, so this is not needed. --- lib/milenage.py | 1 - 1 file changed, 1 deletion(-) diff --git a/lib/milenage.py b/lib/milenage.py index 0172fd6e..2d0e9372 100755 --- a/lib/milenage.py +++ b/lib/milenage.py @@ -16,7 +16,6 @@ import logging import os import sys -sys.path.append(os.path.realpath('../')) CryptoLogger = logging.getLogger('CryptoLogger') From 31a67195733e302e64bc40280b8c739603f38c13 Mon Sep 17 00:00:00 2001 From: Oliver Smith Date: Thu, 9 Oct 2025 11:17:16 +0200 Subject: [PATCH 7/9] Fix running services outside of source tree To run services outside of the source tree, the lib path must be added correctly to sys.path. Fix this by determining it relative to the current script's path instead of assuming that it is in ../lib (relative to the current work dir). --- lib/logtool.py | 6 ++++-- services/apiService.py | 4 +++- services/databaseService.py | 4 +++- services/diameterService.py | 4 +++- services/georedService.py | 4 +++- services/hssService.py | 4 +++- services/logService.py | 4 +++- services/metricService.py | 4 +++- tools/databaseUpgrade/alembic/env.py | 4 +++- 9 files changed, 28 insertions(+), 10 deletions(-) diff --git a/lib/logtool.py b/lib/logtool.py index b5877aa3..92df358b 100755 --- a/lib/logtool.py +++ b/lib/logtool.py @@ -3,7 +3,9 @@ import os, sys, time import socket from datetime import datetime -sys.path.append(os.path.realpath('../')) + +sys.path.append(os.path.realpath(os.path.dirname(__file__) + "/../lib")) + import asyncio from messagingAsync import RedisMessagingAsync from messaging import RedisMessaging @@ -97,4 +99,4 @@ def setupFileLogger(self, loggerName: str, logFilePath: str): rolloverHandler.setFormatter(formatter) fileLogger.addHandler(rolloverHandler) fileLogger.setLevel(logging.DEBUG) - return fileLogger \ No newline at end of file + return fileLogger diff --git a/services/apiService.py b/services/apiService.py index 238050a9..7c6192ec 100755 --- a/services/apiService.py +++ b/services/apiService.py @@ -5,7 +5,9 @@ from werkzeug.middleware.proxy_fix import ProxyFix from functools import wraps import os -sys.path.append(os.path.realpath('../lib')) + +sys.path.append(os.path.realpath(os.path.dirname(__file__) + "/../lib")) + import time import requests import traceback diff --git a/services/databaseService.py b/services/databaseService.py index a8f0e6d0..d2136177 100644 --- a/services/databaseService.py +++ b/services/databaseService.py @@ -4,7 +4,9 @@ import socket import datetime import traceback -sys.path.append(os.path.realpath('../lib')) + +sys.path.append(os.path.realpath(os.path.dirname(__file__) + "/../lib")) + from messagingAsync import RedisMessagingAsync from banners import Banners from logtool import LogTool diff --git a/services/diameterService.py b/services/diameterService.py index c17f66c7..d38f7dba 100755 --- a/services/diameterService.py +++ b/services/diameterService.py @@ -4,7 +4,9 @@ from datetime import datetime from tzlocal import get_localzone import sctp, socket -sys.path.append(os.path.realpath('../lib')) + +sys.path.append(os.path.realpath(os.path.dirname(__file__) + "/../lib")) + from messagingAsync import RedisMessagingAsync from diameterAsync import DiameterAsync from banners import Banners diff --git a/services/georedService.py b/services/georedService.py index 0087b57b..07df1f35 100755 --- a/services/georedService.py +++ b/services/georedService.py @@ -3,7 +3,9 @@ import asyncio, aiohttp import socket import traceback -sys.path.append(os.path.realpath('../lib')) + +sys.path.append(os.path.realpath(os.path.dirname(__file__) + "/../lib")) + from messagingAsync import RedisMessagingAsync from banners import Banners from logtool import LogTool diff --git a/services/hssService.py b/services/hssService.py index 0878b196..61823b9a 100755 --- a/services/hssService.py +++ b/services/hssService.py @@ -1,5 +1,7 @@ import os, sys, json, time, traceback, socket -sys.path.append(os.path.realpath('../lib')) + +sys.path.append(os.path.realpath(os.path.dirname(__file__) + "/../lib")) + from messaging import RedisMessaging from diameter import Diameter from banners import Banners diff --git a/services/logService.py b/services/logService.py index fd5b36b6..198084c4 100755 --- a/services/logService.py +++ b/services/logService.py @@ -2,7 +2,9 @@ from datetime import datetime import time import logging -sys.path.append(os.path.realpath('../lib')) + +sys.path.append(os.path.realpath(os.path.dirname(__file__) + "/../lib")) + from messaging import RedisMessaging from banners import Banners from logtool import LogTool diff --git a/services/metricService.py b/services/metricService.py index 4371a7f8..ccb4be90 100755 --- a/services/metricService.py +++ b/services/metricService.py @@ -8,7 +8,9 @@ from influxdb import InfluxDBClient import threading import traceback -sys.path.append(os.path.realpath('../lib')) + +sys.path.append(os.path.realpath(os.path.dirname(__file__) + "/../lib")) + from messaging import RedisMessaging from banners import Banners from logtool import LogTool diff --git a/tools/databaseUpgrade/alembic/env.py b/tools/databaseUpgrade/alembic/env.py index 2063d60d..2a6db80b 100644 --- a/tools/databaseUpgrade/alembic/env.py +++ b/tools/databaseUpgrade/alembic/env.py @@ -3,7 +3,9 @@ from alembic import context import sys import os -sys.path.append(os.path.realpath('lib')) + +sys.path.append(os.path.realpath(os.path.dirname(__file__) + "/../../../lib")) + from database import Base from pyhss_config import config From 33ee7a314b246cc877bec7b46ae22addec3eec6b Mon Sep 17 00:00:00 2001 From: Oliver Smith Date: Fri, 26 Sep 2025 15:21:16 +0200 Subject: [PATCH 8/9] services: add main() functions Add main() functions, so pyproject.toml can create launcher scripts for the services. --- services/apiService.py | 6 +++++- services/databaseService.py | 6 +++++- services/diameterService.py | 6 +++++- services/georedService.py | 6 +++++- services/gsupService.py | 6 +++++- services/hssService.py | 7 +++++-- services/logService.py | 7 ++++++- services/metricService.py | 7 +++++-- 8 files changed, 41 insertions(+), 10 deletions(-) diff --git a/services/apiService.py b/services/apiService.py index 7c6192ec..b72507c0 100755 --- a/services/apiService.py +++ b/services/apiService.py @@ -2372,6 +2372,10 @@ def put(self, imsi): response_json = {'result': 'Failed', 'Reason' : "Unable to send CLR: " + str(E)} return response_json -if __name__ == '__main__': + +def main(): apiService.run(debug=False, host='0.0.0.0', port=8080) + +if __name__ == '__main__': + main() diff --git a/services/databaseService.py b/services/databaseService.py index d2136177..98dd75f1 100644 --- a/services/databaseService.py +++ b/services/databaseService.py @@ -136,6 +136,10 @@ async def startService(self): pass -if __name__ == '__main__': +def main(): databaseService = DatabaseService() asyncio.run(databaseService.startService()) + + +if __name__ == '__main__': + main() diff --git a/services/diameterService.py b/services/diameterService.py index d38f7dba..cec1ebc5 100755 --- a/services/diameterService.py +++ b/services/diameterService.py @@ -450,6 +450,10 @@ async def startServer(self, host: str=None, port: int=None, type: str=None): await(server.serve_forever()) -if __name__ == '__main__': +def main(): diameterService = DiameterService() asyncio.run(diameterService.startServer()) + + +if __name__ == '__main__': + main() diff --git a/services/georedService.py b/services/georedService.py index 07df1f35..36f95859 100755 --- a/services/georedService.py +++ b/services/georedService.py @@ -436,6 +436,10 @@ async def startService(self): pass -if __name__ == '__main__': +def main(): georedService = GeoredService() asyncio.run(georedService.startService()) + + +if __name__ == '__main__': + main() diff --git a/services/gsupService.py b/services/gsupService.py index 38610b22..b102e3ba 100644 --- a/services/gsupService.py +++ b/services/gsupService.py @@ -30,9 +30,13 @@ from pyhss_config import config -if __name__ == '__main__': +def main(): bind_ip = config['hss']['gsup']['bind_ip'] bind_port = config['hss']['gsup']['bind_port'] gsup_server = GsupServer(bind_ip, bind_port, 60, LogTool(config)) asyncio.run(gsup_server.start_server()) + + +if __name__ == '__main__': + main() diff --git a/services/hssService.py b/services/hssService.py index 61823b9a..12abe147 100755 --- a/services/hssService.py +++ b/services/hssService.py @@ -144,9 +144,12 @@ def handleQueue(self): except Exception as e: self.logTool.log(service='HSS', level='error', message=f"[HSS] [handleQueue] Exception: {traceback.format_exc()}", redisClient=self.redisMessaging) continue - -if __name__ == '__main__': +def main(): hssService = HssService() hssService.handleQueue() + + +if __name__ == '__main__': + main() diff --git a/services/logService.py b/services/logService.py index 198084c4..60e02a1d 100755 --- a/services/logService.py +++ b/services/logService.py @@ -72,6 +72,11 @@ def handleLogs(self): self.logTool.log(service='Log', level='error', message=f"[Log] Error: {e}", redisClient=self.redisMessaging) continue -if __name__ == '__main__': + +def main(): logService = LogService() logService.handleLogs() + + +if __name__ == '__main__': + main() diff --git a/services/metricService.py b/services/metricService.py index ccb4be90..78cb1cc4 100755 --- a/services/metricService.py +++ b/services/metricService.py @@ -125,8 +125,7 @@ def getMetrics(self): self.handleMetrics() -if __name__ == '__main__': - +def main(): metricService = MetricService() metricServiceThread = threading.Thread(target=metricService.getMetrics) metricServiceThread.start() @@ -137,3 +136,7 @@ def getMetrics(self): }) prometheusWebClient.run(host='0.0.0.0', port=9191) + + +if __name__ == '__main__': + main() From 43622274a2d2b84f669c4724e6fea342d0a33142 Mon Sep 17 00:00:00 2001 From: Oliver Smith Date: Fri, 26 Sep 2025 14:18:08 +0200 Subject: [PATCH 9/9] pyproject.toml: new file Add a pyproject.toml file, so it becomes possible to build python whl packages with: $ python3 -m build With this change, Linux distributions could package PyHSS similar to other python projects, it should be possible to install it via pip (from the git repo), and the whl can be used to directly install PyHSS into a venv. --- .gitignore | 1 + pyproject.toml | 30 ++++++++++++++++++++++++++++++ 2 files changed, 31 insertions(+) create mode 100644 pyproject.toml diff --git a/.gitignore b/.gitignore index b26aca27..09d1ccd1 100755 --- a/.gitignore +++ b/.gitignore @@ -28,3 +28,4 @@ lib/__pycache__/milenage.cpython-36.pyc .gitignore .vscode/settings.json .cspell/custom-dictionary-workspace.txt +pyhss.egg-info diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..ca10e221 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,30 @@ +[build-system] +requires = ["setuptools", "wheel"] +build-backend = "setuptools.build_meta" + +[project] +name = "pyhss" +version = "1.0.2" + +[project.scripts] +pyhss_api = "pyhss.services.apiService:main" +pyhss_database = "pyhss.services.databaseService:main" +pyhss_diameter = "pyhss.services.diameterService:main" +pyhss_geored = "pyhss.services.georedService:main" +pyhss_gsup = "pyhss.services.gsupService:main" +pyhss_hss = "pyhss.services.hssService:main" +pyhss_log = "pyhss.services.logService:main" +pyhss_metric = "pyhss.services.metricService:main" + +[tool.setuptools] +packages = [ + "pyhss.lib", + "pyhss.lib.gsup", + "pyhss.lib.gsup.controller", + "pyhss.lib.gsup.protocol", + "pyhss.services", + "pyhss.tools", +] + +[tool.setuptools.package-dir] +pyhss = ""