add self-termination in case the processes freeze
This commit is contained in:
parent
3263194b52
commit
349a76d4e6
|
@ -17,7 +17,6 @@ import coloredlogs
|
|||
from proxy import proxy
|
||||
from redis import Redis
|
||||
|
||||
from . import suicide
|
||||
from .background import validate_proxies
|
||||
|
||||
coloredlogs.install(level='INFO')
|
||||
|
|
|
@ -1,12 +1,15 @@
|
|||
import concurrent
|
||||
import logging
|
||||
import os
|
||||
import random
|
||||
import signal
|
||||
import time
|
||||
|
||||
import requests
|
||||
from redis import Redis
|
||||
|
||||
from .config import PROXY_POOL, SMARTPROXY_POOL, IP_CHECKER, MAX_PROXY_CHECKERS
|
||||
from .pid import zombie_slayer
|
||||
from .redis_cycle import add_backend_cycler
|
||||
from .smartproxy import transform_smartproxy
|
||||
|
||||
|
@ -21,6 +24,18 @@ def validate_proxies():
|
|||
logger.info('Doing inital backend check, please wait...')
|
||||
started = False
|
||||
while True:
|
||||
# Health checks. If one of these fails, the process is killed to be restarted by systemd.
|
||||
if int(redis.get('balancer_online')):
|
||||
zombie_slayer()
|
||||
try:
|
||||
response = requests.get('http://localhost:9000', headers={'User-Agent': 'HEALTHCHECK'}, timeout=10)
|
||||
if response.status_code != 404:
|
||||
logger.critical(f"Frontend HTTP check failed with status code: {response.status_code}")
|
||||
os.kill(os.getpid(), signal.SIGKILL)
|
||||
except requests.exceptions.RequestException as e:
|
||||
logger.critical(f"Frontend HTTP check failed with exception: {e}")
|
||||
os.kill(os.getpid(), signal.SIGKILL)
|
||||
|
||||
our_online_backends = {}
|
||||
smartproxy_online_backends = {}
|
||||
ip_addresses = set()
|
||||
|
@ -40,10 +55,6 @@ def validate_proxies():
|
|||
logger.debug(f'PROXY TEST failed - {pxy} - got code {r.status_code}')
|
||||
return
|
||||
|
||||
# if r_test.status_code != 200:
|
||||
# logger.debug(f'PROXY TEST failed - {pxy} - test download got code {r_test.status_code}')
|
||||
# return
|
||||
|
||||
ip = r.text
|
||||
if ip not in ip_addresses:
|
||||
proxy_dict = our_online_backends if not smartproxy else smartproxy_online_backends
|
||||
|
@ -76,7 +87,3 @@ def validate_proxies():
|
|||
|
||||
redis.set('balancer_online', 1)
|
||||
time.sleep(10)
|
||||
|
||||
# if int(redis.get('suicide_online')) == 1 and not suicide.SUICIDE_PACT.pact.is_alive():
|
||||
# logger.critical('Suicide thread not running!')
|
||||
# os.kill(os.getpid(), signal.SIGTERM)
|
||||
|
|
|
@ -27,6 +27,9 @@ def zombie_slayer():
|
|||
zombies.append(child_pid)
|
||||
|
||||
if zombies:
|
||||
print(f"Zombie processes detected: {zombies}")
|
||||
print("Killing parent process to reap zombies...")
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.setLevel(logging.INFO)
|
||||
logging.critical(f"Zombie processes detected: {zombies}")
|
||||
logging.critical("Killing parent process to reap zombies...")
|
||||
os.kill(pid, signal.SIGKILL)
|
||||
|
|
|
@ -1,35 +0,0 @@
|
|||
import logging
|
||||
import os
|
||||
import signal
|
||||
import threading
|
||||
import time
|
||||
|
||||
import requests
|
||||
from redis import Redis
|
||||
|
||||
|
||||
def check_url_thread(url: str):
|
||||
redis = Redis(host='localhost', port=6379, decode_responses=True)
|
||||
redis.set('suicide_online', 1)
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.setLevel(logging.INFO)
|
||||
time.sleep(30) # give the server some time to start up
|
||||
logger.info('Created a suicide pact.')
|
||||
while True:
|
||||
try:
|
||||
response = requests.get(url, timeout=10)
|
||||
if response.status_code != 404:
|
||||
logger.critical(f"Fetch failed with status code: {response.status_code}")
|
||||
os.kill(os.getpid(), signal.SIGTERM)
|
||||
except requests.exceptions.RequestException as e:
|
||||
logger.critical(f"Fetch failed with exception: {e}")
|
||||
os.kill(os.getpid(), signal.SIGTERM)
|
||||
time.sleep(10)
|
||||
|
||||
|
||||
class SuicidePact:
|
||||
def __init__(self):
|
||||
self.pact = threading.Thread()
|
||||
|
||||
|
||||
SUICIDE_PACT = SuicidePact()
|
Loading…
Reference in New Issue