Browse Source

Async fix

Fabrizio Furnari 1 month ago
parent
commit
5a467de644

+ 1 - 1
Makefile

@@ -4,7 +4,7 @@ install:
 	pip install -e .
 
 test:
-	httphound -l debug --test-dir example_tests
+	httphound -l debug example_tests
 
 lint:
 	flake8 httphound/* example_tests/*

+ 6 - 6
example_tests/01-simple.py

@@ -19,9 +19,9 @@ class BasicPassTest(BaseProxyTest):
             binary_path=Path.home() / "bin/haproxy"
         )
 
-    # def run_test(self):
-    #     """This is the bare minimum to run the test
-    #     """
-    #     self.make_request()
-    #     # Returning True as this must always pass
-    #     return True
+    async def run_test(self):
+        """This is the bare minimum to run the test
+        """
+        await self.make_request()
+        # Returning True as this must always pass
+        return True

+ 9 - 9
example_tests/02-basic_header_check.py

@@ -23,9 +23,9 @@ class FailHeaderCheck(BaseProxyTest):
             binary_path=Path.home() / "bin/haproxy"
         )
 
-    # def run_test(self):
-    #     self.make_request()
-    #     return True
+    async def run_test(self):
+        await self.make_request()
+        return True
 
 
 class PassHeaderCheck(BaseProxyTest):
@@ -54,9 +54,9 @@ class PassHeaderCheck(BaseProxyTest):
             response_headers={"X-Test": "1234"},
         )
 
-    # def run_test(self):
-    #     """The run_test() method is always the same as all logic
-    #     is defined entirely in the classes configuration.
-    #     """
-    #     self.make_request()
-    #     return True
+    async def run_test(self):
+        """The run_test() method is always the same as all logic
+        is defined entirely in the classes configuration.
+        """
+        await self.make_request()
+        return True

+ 1 - 1
example_tests/08-errors.py

@@ -14,7 +14,7 @@ class ErroredTest(BaseProxyTest):
         )
 
     async def run_test(self):
-        await self.make_request()
+        self.make_request()
         # This will fail with the standard error message
         return False
 

+ 3 - 3
example_tests/09-basic-logic.py

@@ -13,15 +13,15 @@ class ExampleLogicTest(BaseProxyTest):
             binary_path=Path.home() / "bin/haproxy"
         )
 
-    def run_test(self):
+    async def run_test(self):
         # Perform 2 requests with different headers
         self.headers = {"X-Test": "1234"}
-        res1 = self.make_request()
+        res1 = await self.make_request()
         print(f"Response 1 {res1}")
         # Check headers received by the backend
         print(self.backend.received_headers)
         self.headers = {"X-Another": "abcd"}
-        res2 = self.make_request()
+        res2 = await self.make_request()
         print(f"Response 2 {res2}")
         print(self.backend.received_headers)
         return True

+ 80 - 34
httphound/backend.py

@@ -1,5 +1,6 @@
 """
-Manages backend lifecycle
+Async Backend Module - HttpHound
+Manages backend lifecycle using pure async/await patterns
 """
 
 import logging
@@ -23,72 +24,117 @@ class BackendConfig:
 
 
 class DummyBackend:
-    """Dummy backend server using aiohttp"""
+    """Dummy backend server using aiohttp - Pure Async Implementation
+    
+    This backend runs entirely within the main event loop, eliminating
+    the need for threading or multiple event loops.
+    """
 
     def __init__(self, config: BackendConfig):
         self.config = config
-        self.app = None
         self.runner = None
         self.site = None
         self.received_headers = {}
         self.received_body = ""
         self.request_count = 0
-
-        # async control
-        self.stop_event = None
-        self.server_task = None
-
+        
     async def request_handler(self, request):
         """Handle incoming requests and validate them"""
         self.request_count += 1
-        logger.debug(f"Requests count: {self.request_count}")
+        logger.debug(f"Backend received request #{self.request_count}")
+        logger.debug(f"Request path: {request.path}")
+        logger.debug(f"Request method: {request.method}")
+        
+        # Store request data for test validation
         self.received_headers = dict(request.headers)
         self.received_body = await request.text()
+        
+        logger.debug(f"Request headers: {self.received_headers}")
+        if self.received_body:
+            logger.debug(f"Request body: {self.received_body}")
 
+        # Return configured response
         return web.Response(
             status=self.config.response_status,
             headers=self.config.response_headers,
             text=self.config.response_body
         )
 
-    async def _run_server(self):
-        """Start the dummy backend server"""
-
+    async def start(self):
+        """Start the backend server asynchronously
+        
+        This runs the server within the current event loop, so it stays
+        alive as long as the event loop is running.
+        """
+        logger.debug(f"Starting backend on {self.config.host}:{self.config.port}")
+        
+        # Create aiohttp application
         app = web.Application()
         app.router.add_route('*', '/{path:.*}', self.request_handler)
 
+        # Setup runner
         self.runner = web.AppRunner(app)
         await self.runner.setup()
 
+        # Start TCP site
         self.site = web.TCPSite(
             self.runner,
             self.config.host,
             self.config.port
         )
         await self.site.start()
-        logger.debug(f"Backend listening on {self.config.host}:{self.config.port}")
-        logger.debug(f"Site is {self.site}")
-        self.stop_event = asyncio.Event()
-        await self.stop_event.wait()
-
-    async def _start_async(self):
-        self.server_task = asyncio.create_task(self._run_server())
-        await asyncio.sleep(0.1)
-
-    async def _stop_async(self):
-        if self.stop_event:
-            self.stop_event.set()
-        if self.server_task:
-            await self.server_task
+        
+        logger.info(f"Backend listening on {self.config.host}:{self.config.port}")
+        
+        # Small delay to ensure the socket is fully bound
+        await asyncio.sleep(0.05)
+
+    async def stop(self):
+        """Stop the backend server gracefully"""
+        logger.debug("Stopping backend")
+        
         if self.site:
             await self.site.stop()
+            logger.debug("Backend site stopped")
+            
         if self.runner:
             await self.runner.cleanup()
-
-    def start(self):
-        """Synchronous interface to start server"""
-        asyncio.run(self._start_async(), debug=True)
-
-    def stop(self):
-        """Synchronous interface to stop server"""
-        asyncio.run(self._stop_async())
+            logger.debug("Backend runner cleaned up")
+            
+        logger.info("Backend stopped successfully")
+    
+    async def wait_until_ready(self, timeout: float = 5.0):
+        """Wait until the backend is ready to accept connections
+        
+        Args:
+            timeout: Maximum time to wait in seconds
+            
+        Raises:
+            TimeoutError: If backend doesn't become ready within timeout
+        """
+        import socket
+        
+        start_time = asyncio.get_event_loop().time()
+        
+        while True:
+            # Check if we've exceeded timeout
+            if asyncio.get_event_loop().time() - start_time > timeout:
+                raise TimeoutError(
+                    f"Backend did not become ready within {timeout} seconds"
+                )
+            
+            # Try to connect to the backend
+            try:
+                sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+                sock.settimeout(0.1)
+                result = sock.connect_ex((self.config.host, self.config.port))
+                sock.close()
+                
+                if result == 0:
+                    logger.debug("Backend is ready to accept connections")
+                    return
+            except Exception as e:
+                logger.debug(f"Backend not ready yet: {e}")
+            
+            # Wait a bit before retrying
+            await asyncio.sleep(0.05)

+ 214 - 123
httphound/main.py

@@ -1,7 +1,6 @@
 #!/usr/bin/env python3
 
 import argparse
-
 import glob
 import importlib.util
 import logging
@@ -44,49 +43,29 @@ class TestResult:
 
 
 class BaseProxyTest(ABC):
-    """Base class for all HTTP tests
-
-    All test classes must inherit from this class and eventually
-    override the required parameters.
-
+    """Base class for all HTTP tests - ASYNC VERSION
+    
+    All test classes must inherit from this class and implement the async run_test() method.
+    
     Attributes:
         description (str): Description for the test that will be printed in overall summary
         url (str): URL to run the test against (default: 'http://localhost:4242/')
         method (str): HTTP method to use (default: GET)
         headers (dict): A dictionary of headers that will be sent by the client
-            while performing the request.
         body (str): A string that will be sent as request body (if applicable)
-        expected_status (int): The HTTP status that will be compared with the received
-            one. The test fails if doesnt' match (default: 200)
-        expected_headers (dict): A dictionary (header_name: header_value) of headers
-            that needs to be present in the response. The test fails if the header_name
-            is not present in the response or if header_value doesn't match.
-        expected_body_pattern (str): A regex used to match the body content. The test
-            fails if the match isn't found.
-        expected_header_patterns (dict): a dictionary (header_name: header_value_pattern)
-            that will be checked against received headers. If header_name is not present
-            in the response headers or header_value_pattern doesn't match the header
-            content, the test will fail.
-        forbidden_client_headers (list): A list of header names that needs to be *absent*
-            from the header list sent to the client. If any of the header name is found in
-            the response the test will fail.
-        expected_backend_headers (list): A list of headers that needs to be present in the
-            request sent to the backend. This is used to check if the reverse proxy deletes
-            one or more headers before forwarding the request to the backend. The test will
-            fail if one or more header are not present.
-        forbidden_backend_headers (list): Similar to the above parameter but the test will
-            fail instead if any of the header is found among the headers received by the
-            backend.
-        backend_header_patterns (dict): A dictionary (header_name: header_value_pattern)
-            that will be checked against the headers received by the backend. If the header
-            name is not present in the backend headers or header_value_pattern doesn't match
-            the header value received by the backend, the test will fail.
+        expected_status (int): The HTTP status that will be compared with the received one
+        expected_headers (dict): Headers that must be present in the response
+        expected_body_pattern (str): A regex used to match the body content
+        expected_header_patterns (dict): Header name to regex pattern mapping
+        forbidden_client_headers (list): Headers that must NOT be in the response
+        expected_backend_headers (list): Headers that must be present in backend request
+        forbidden_backend_headers (list): Headers that must NOT be in backend request
+        backend_header_patterns (dict): Backend header name to regex pattern mapping
     """
 
     def __init__(self):
         self.test_id = self.__class__.__name__
-        self.description = getattr(self, 'description',
-                                   'No description provided')
+        self.description = getattr(self, 'description', 'No description provided')
         self.url = getattr(self, 'url', 'http://localhost:4242/')
         self.method = getattr(self, 'method', 'GET')
         self.headers = getattr(self, 'headers', {})
@@ -113,51 +92,68 @@ class BaseProxyTest(ABC):
         self.response_headers = {}
         self.response_body = ""
 
-    def setup(self):
+    async def setup(self):
+        """Async setup - start backend and proxy"""
         logger.debug("Setting up test environment")
+        
         # Start backend
         logger.debug(f"Instantiating backend with configuration: {self.backend_config}")
         self.backend = DummyBackend(self.backend_config)
         logger.debug("Starting backend")
-        self.backend.start()
-        time.sleep(999)
+        await self.backend.start()
+        
+        # Wait for backend to be ready
+        await self.backend.wait_until_ready()
 
         # Start proxy
         logger.debug(f"Instantiating reverse proxy with configuration: {self.proxy_config}")
         self.proxy = ProxyManager(self.proxy_config)
         logger.debug(f"Starting reverse proxy with configuration: {self.backend_config}")
         self.proxy.start(self.backend_config)
-        logger.debug("Sleeping for 0.1s before proceeding")
-        time.sleep(0.1)
-
-    def teardown(self):
+        
+        # Wait for proxy to be ready
+        logger.debug("Waiting for proxy to be ready")
+        await self.proxy.wait_until_ready(self.url)
+        logger.debug("Setup complete")
+
+    async def teardown(self):
+        """Async teardown - stop backend and proxy"""
         logger.debug("Cleaning up test environment")
         if self.backend:
             logger.debug("Stopping backend")
-            asyncio.run(self.backend.stop())
+            await self.backend.stop()
         if self.proxy:
             logger.debug("Stopping reverse proxy")
             self.proxy.stop()
 
-    def make_request(self):
-        """Make HTTP request through the proxy"""
-
-        request = httpx.Request(
-            method=self.method,
-            url=self.url,
-            headers=self.headers,
-            content=self.body)
-        logger.debug(f"Performing HTTP request: {request}")
-        with httpx.Client(http2=True) as client:
-            response = client.send(request=request)
-            logger.debug(f"Response: {response}")
+    async def make_request(self):
+        """Make async HTTP request through the proxy"""
+        logger.debug(f"Making {self.method} request to {self.url}")
+        logger.debug(f"Request headers: {self.headers}")
+        if self.body:
+            logger.debug(f"Request body: {self.body}")
+        
+        async with httpx.AsyncClient(http2=True, timeout=10.0) as client:
+            response = await client.request(
+                method=self.method,
+                url=self.url,
+                headers=self.headers,
+                content=self.body
+            )
+            
+            logger.debug(f"Response status: {response.status_code}")
+            logger.debug(f"Response headers: {dict(response.headers)}")
+            
             self.response = response
-
-        return response
+            self.response_headers = dict(response.headers)
+            self.response_body = response.text
+            
+            return response
 
     def validate_response(self) -> Tuple[TestStatus, Optional[List[str]]]:
         """Validate the HTTP response"""
         validation_errors = []
+        
         # Check status code
         if self.response.status_code != self.expected_status:
             logger.info(f"Expected status {self.expected_status}, got {self.response.status_code}")
@@ -182,8 +178,7 @@ class BaseProxyTest(ABC):
             if header not in self.response_headers:
                 logger.info(f"Header '{header}' for pattern matching not found")
                 validation_errors.append(f"Header '{header}' for pattern matching not found")
-
-            if not re.match(pattern, self.response_headers[header]):
+            elif not re.match(pattern, self.response_headers[header]):
                 logger.info(f"Header '{header}' value doesn't match pattern '{pattern}'")
                 validation_errors.append(
                     f"Header '{header}' value doesn't match pattern '{pattern}'")
@@ -218,8 +213,7 @@ class BaseProxyTest(ABC):
             if header not in self.backend.received_headers:
                 logger.info(f"Expected backend header '{header}' not found")
                 validation_errors.append(f"Expected backend header '{header}' not found")
-
-            if not re.match(pattern, self.backend.received_headers[header]):
+            elif not re.match(pattern, self.backend.received_headers[header]):
                 logger.info(f"Backend header '{header}' doesn't match pattern '{pattern}'")
                 validation_errors.append(
                     f"Backend header '{header}' doesn't match pattern '{pattern}'")
@@ -229,19 +223,22 @@ class BaseProxyTest(ABC):
             return TestStatus.FAIL, validation_errors
         return TestStatus.PASS, None
 
-    #@abstractmethod
-    def run_test(self) -> bool:
-        """Run the actual test logic. Must be implemented by subclasses"""
-        self.make_request()
+    @abstractmethod
+    async def run_test(self) -> bool:
+        """Run the actual test logic. Must be implemented by subclasses
+        
+        This method MUST be async and should use await for any async operations.
+        """
+        await self.make_request()
         return True
 
-    def execute(self) -> TestResult:
-        """Execute the complete test"""
+    async def execute(self) -> TestResult:
+        """Execute the complete test asynchronously"""
         start_time = time.time()
 
         try:
-            self.setup()
-            success = self.run_test()
+            await self.setup()
+            success = await self.run_test()
 
             if success:
                 logger.debug("Validating response...")
@@ -255,6 +252,14 @@ class BaseProxyTest(ABC):
                         time.time() - start_time,
                         errors
                     )
+                else:
+                    return TestResult(
+                        self.test_id,
+                        self.description,
+                        TestStatus.PASS,
+                        time.time() - start_time,
+                        None
+                    )
             else:
                 return TestResult(
                     self.test_id,
@@ -274,55 +279,104 @@ class BaseProxyTest(ABC):
                 [str(e)]
             )
         finally:
-            self.teardown()
+            await self.teardown()
 
 
 class TestRunner:
-    """Main test runner"""
+    """Async test runner"""
 
     def __init__(self):
         self.results = []
 
-    def run_all_tests(self, tests: List[BaseProxyTest]) -> TestResult:
-        """Run all tests"""
+    async def run_all_tests(self, tests: List[BaseProxyTest]):
+        """Run all tests sequentially (async)"""
         logger.debug("Running all tests")
         for test in tests:
             logger.info(colored(f"Running test: {test.test_id}", "blue"))
-            result = test.execute()
+            result = await test.execute()
             logger.debug(f"Test result for {test.test_id}: {result}")
             self.results.append(result)
-        return result
 
-    def print_summary(self):
+    async def run_all_tests_parallel(
+        self, 
+        tests: List[BaseProxyTest], 
+        max_concurrent: int = 3
+    ):
+        """Run tests in parallel with concurrency limit
+        
+        Args:
+            tests: List of tests to run
+            max_concurrent: Maximum number of tests to run concurrently
+        """
+        logger.debug(f"Running tests in parallel (max {max_concurrent} concurrent)")
+        
+        semaphore = asyncio.Semaphore(max_concurrent)
+        
+        async def run_with_semaphore(test):
+            async with semaphore:
+                logger.info(colored(f"Running test: {test.test_id}", "blue"))
+                result = await test.execute()
+                logger.debug(f"Test result for {test.test_id}: {result}")
+                return result
+        
+        # Run all tests concurrently with limit
+        results = await asyncio.gather(
+            *[run_with_semaphore(test) for test in tests],
+            return_exceptions=False
+        )
+        
+        self.results.extend(results)
 
+    def print_summary(self):
+        """Print test summary"""
         total_tests = len(self.results)
         passed_tests = sum(1 for r in self.results if r.status == TestStatus.PASS)
         failed_tests = total_tests - passed_tests
         total_time = sum(r.duration for r in self.results)
 
-        print("TEST EXECUTION SUMMARY")
-        print()
+        print("\nTEST EXECUTION SUMMARY")
+        print("=" * 80)
+        
+        # Print failures first
         for result in self.results:
             if result.status != TestStatus.PASS:
-                print(f"Test {colored(result.test_id, 'blue')} failed with following errors:")
-                for msg in result.error_messages:
-                    print(colored(f"\t{msg}", result.status.value))
-                print()
+                print(f"\nTest {colored(result.test_id, 'blue')} - {result.description}")
+                print(f"Status: {colored(result.status.name, result.status.value)}")
+                if result.error_messages:
+                    print("Errors:")
+                    for msg in result.error_messages:
+                        print(colored(f"    {msg}", result.status.value))
 
-        result_table = [["Status", "Name", "Description", "Duration"]]
+        # Print results table
+        print("\n" + "=" * 80)
+        result_table = [["Status", "Name", "Description", "Duration (s)"]]
 
         for result in self.results:
             status_color = colored(result.status.name, result.status.value)
-
-            result_table.append([status_color,
-                                 result.test_id,
-                                 result.description,
-                                 result.duration])
+            result_table.append([
+                status_color,
+                result.test_id,
+                result.description,
+                f"{result.duration:.3f}"
+            ])
 
         print(tabulate(result_table, headers="firstrow"))
-        print()
-        print(tabulate([["Total tests", "Passed", "Failed", "Total duration"],
-                        [total_tests, passed_tests, failed_tests, f"{total_time:.2f}"]]))
+        
+        # Print summary stats
+        print("\n" + "=" * 80)
+        summary_table = [[
+            "Total Tests",
+            "Passed",
+            "Failed",
+            "Total Duration (s)"
+        ], [
+            total_tests,
+            colored(str(passed_tests), "green"),
+            colored(str(failed_tests), "red" if failed_tests > 0 else "green"),
+            f"{total_time:.3f}"
+        ]]
+        print(tabulate(summary_table, headers="firstrow"))
+        print("=" * 80 + "\n")
 
 
 def discover_tests(paths: List[str]) -> List[BaseProxyTest]:
@@ -336,7 +390,7 @@ def discover_tests(paths: List[str]) -> List[BaseProxyTest]:
         test_path = Path(p)
 
         if not test_path.exists():
-            logger.error(f"{test_path} does not exists")
+            logger.error(f"{test_path} does not exist")
             continue
 
         logger.debug(f"Searching for test files in {test_path}")
@@ -345,18 +399,18 @@ def discover_tests(paths: List[str]) -> List[BaseProxyTest]:
         elif test_path.is_file():
             if test_path.suffix == ".py":
                 all_files.append(test_path)
-        elif '*' in test_path or '?' in test_path or '[' in test_path:
-            globbed_files = glob.glob(test_path)
+        elif '*' in str(test_path) or '?' in str(test_path) or '[' in str(test_path):
+            globbed_files = glob.glob(str(test_path))
             for f in globbed_files:
                 if os.path.isfile(f) and f.endswith(".py"):
-                    all_files.append(f)
+                    all_files.append(Path(f))
         else:
             logger.error(f"Cannot find test files in {test_path}")
             raise RuntimeError(f"Cannot find test files in {test_path}")
 
     if not all_files:
         logger.error(f"No test file to import from {paths}")
-        raise RuntimeError(f"Not test files to import from {paths}")
+        raise RuntimeError(f"No test files to import from {paths}")
 
     for test_file in all_files:
         try:
@@ -372,11 +426,12 @@ def discover_tests(paths: List[str]) -> List[BaseProxyTest]:
                 attr = getattr(module, attr_name)
                 if (isinstance(attr, type) and
                     issubclass(attr, BaseProxyTest) and
-                        attr != BaseProxyTest):
+                    attr != BaseProxyTest):
                     tests.append(attr())
-            logger.debug(f"Test classes: {tests}")
+            logger.debug(f"Loaded tests from {test_file}: {[t.test_id for t in tests]}")
         except Exception as e:
             logger.error(f"Failed to load test file {test_file}: {e}")
+            raise
 
     return tests
 
@@ -388,59 +443,95 @@ def setup_logging(level):
         format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
     )
 
-def main():
-    parser = argparse.ArgumentParser(description="Reverse roxy test tool")
-    parser.add_argument(
-        '--log-level', '-l',
-        type=str.upper,
-        choices=['DEBUG', 'INFO', 'WARNING', 'ERROR'],
-        default='INFO', help='Set logging level (Default: INFO)')
-    parser.add_argument(
-        'paths',
-        nargs='+',
-        help='Directories, file patterns, or individual Python files (.py)')
-    args = parser.parse_args()
-
-    log_level = getattr(logging, args.log_level.upper())
-    setup_logging(log_level)
-    logger = logging.getLogger("httphound")
-
-    logger.info("Starting httphound")
-    # a little Ascii art only in debug mode
 
-    hound = '''
+async def async_main(args):
+    """Async main function - runs all tests"""
+    logger.info("Starting httphound (async version)")
+    
+    # ASCII art only in debug mode
+    if args.log_level == 'DEBUG':
+        hound = '''
           __
  \ ______/ V`-,
   }        /~~
  /_)^ --,r'
 |b      |b
 '''
-    logger.debug(hound)
+        logger.debug(hound)
 
     try:
+        # Discover tests (sync operation)
         tests = discover_tests(args.paths)
         logger.info(f"Discovered {len(tests)} tests in {args.paths}")
 
         if not tests:
             logger.info("No tests found")
-            sys.exit(0)
+            return 0
 
         # Run tests
         runner = TestRunner()
-        runner.run_all_tests(tests)
+        
+        if args.parallel:
+            logger.info(f"Running tests in parallel (max {args.parallel} concurrent)")
+            await runner.run_all_tests_parallel(tests, max_concurrent=args.parallel)
+        else:
+            logger.info("Running tests sequentially")
+            await runner.run_all_tests(tests)
 
         # Print summary
         runner.print_summary()
 
         # Exit with error code if any tests failed
-        failed_count = sum(1 for r in runner.results if not r.status == TestStatus.PASS)
+        failed_count = sum(1 for r in runner.results if r.status != TestStatus.PASS)
         if failed_count > 0:
-            raise RuntimeError(f"{failed_count} over {len(runner.results)} test failed")
+            logger.error(f"{failed_count} out of {len(runner.results)} tests failed")
+            return 1
+        
+        logger.info("All tests passed!")
+        return 0
 
     except Exception as e:
         logger.error(f"Test execution failed: {e}")
-        sys.exit(1)
+        logger.exception("Full traceback:")
+        return 1
+
+
+def main():
+    """Synchronous entry point that launches async main"""
+    parser = argparse.ArgumentParser(description="Reverse proxy test tool (async)")
+    parser.add_argument(
+        '--log-level', '-l',
+        type=str.upper,
+        choices=['DEBUG', 'INFO', 'WARNING', 'ERROR'],
+        default='INFO',
+        help='Set logging level (Default: INFO)'
+    )
+    parser.add_argument(
+        '--parallel', '-p',
+        type=int,
+        default=None,
+        metavar='N',
+        help='Run tests in parallel with max N concurrent tests'
+    )
+    parser.add_argument(
+        'paths',
+        nargs='+',
+        help='Directories, file patterns, or individual Python files (.py)'
+    )
+    args = parser.parse_args()
+
+    log_level = getattr(logging, args.log_level.upper())
+    setup_logging(log_level)
+
+    # Run async main
+    try:
+        exit_code = asyncio.run(async_main(args))
+        sys.exit(exit_code)
+    except KeyboardInterrupt:
+        logger.info("\nInterrupted by user")
+        sys.exit(130)
 
 
 def start():
+    """Console script entry point"""
     main()

+ 60 - 14
httphound/proxy.py

@@ -1,10 +1,11 @@
 """
 Manages all proxy-related stuff
+Enhanced with async health checking
 """
 import logging
 import os
 import subprocess
-import time
+import asyncio
 
 from typing import Dict, Any
 from dataclasses import dataclass, field
@@ -48,11 +49,12 @@ class ProxyManager:
             with open(self.config.template_path, 'r', encoding="utf-8") as f:
                 template_content = f.read()
         except FileNotFoundError:
-            logger.error(f"Cannot finf template {self.config.template_path}")
+            logger.error(f"Cannot find template {self.config.template_path}")
             raise
+            
         template = Template(template_content)
         rendered_template = template.render(**template_vars)
-        logger.debug(f"Rendered template: \n{rendered_template}\n")
+        logger.debug(f"Rendered template:\n{rendered_template}\n")
         return rendered_template
 
     def start(self, backend_config: BackendConfig):
@@ -60,7 +62,7 @@ class ProxyManager:
 
         # Create working directory
         logger.debug(
-            f"Creating reverese proxy working dir: {self.config.working_dir}")
+            f"Creating reverse proxy working dir: {self.config.working_dir}")
         os.makedirs(self.config.working_dir, exist_ok=True)
 
         # Render and write config file
@@ -76,12 +78,12 @@ class ProxyManager:
             f.write(config_content)
 
         # Start proxy process
-        # TODO: customize
-        cmd = [self.config.binary_path,
-               '-V',
-               '-db',
-               '-f', self.config_file,
-               ]
+        cmd = [
+            str(self.config.binary_path),
+            '-V',
+            '-db',
+            '-f', self.config_file,
+        ]
         logger.debug(f"Running proxy cmd: {cmd}")
 
         try:
@@ -93,14 +95,15 @@ class ProxyManager:
             )
 
             # Give proxy time to start
-            logger.debug("Waiting 0.1s for proxy to start")
+            logger.debug("Waiting for proxy to start")
+            import time
             time.sleep(0.1)
 
             if self.process.poll() is not None:
                 _, stderr = self.process.communicate()
                 raise RuntimeError(f"Proxy failed to start: {stderr.decode()}")
 
-            logger.debug(f"Proxy started with PID {self.process.pid}")
+            logger.info(f"Proxy started with PID {self.process.pid}")
 
         except FileNotFoundError as e:
             raise RuntimeError(
@@ -114,11 +117,54 @@ class ProxyManager:
             try:
                 self.process.wait(timeout=5)
             except subprocess.TimeoutExpired:
+                logger.warning("Proxy did not terminate, killing")
                 self.process.kill()
                 self.process.wait()
-            logger.debug("Proxy stopped")
+            logger.info("Proxy stopped")
 
         # Cleanup config file
         if self.config_file and os.path.exists(self.config_file):
             logger.debug(f"Removing config file {self.config_file}")
-            #os.remove(self.config_file)
+            os.remove(self.config_file)
+    
+    async def wait_until_ready(self, url: str, timeout: float = 5.0):
+        """Wait until proxy is ready to accept connections
+        
+        Args:
+            url: URL to test (e.g., 'http://localhost:4242/')
+            timeout: Maximum time to wait in seconds
+            
+        Raises:
+            TimeoutError: If proxy doesn't become ready within timeout
+        """
+        import httpx
+        
+        start_time = asyncio.get_event_loop().time()
+        
+        while True:
+            # Check if we've exceeded timeout
+            if asyncio.get_event_loop().time() - start_time > timeout:
+                raise TimeoutError(
+                    f"Proxy did not become ready within {timeout} seconds"
+                )
+            
+            # Check if process is still running
+            if self.process and self.process.poll() is not None:
+                _, stderr = self.process.communicate()
+                raise RuntimeError(f"Proxy process died: {stderr.decode()}")
+            
+            # Try to connect to the proxy
+            try:
+                async with httpx.AsyncClient() as client:
+                    # We don't care about the response, just that we can connect
+                    await client.get(url, timeout=0.5)
+                    logger.debug("Proxy is ready to accept connections")
+                    return
+            except (httpx.ConnectError, httpx.TimeoutException, httpx.ReadTimeout):
+                # Expected - proxy might not be ready yet
+                pass
+            except Exception as e:
+                logger.debug(f"Proxy health check error: {e}")
+            
+            # Wait a bit before retrying
+            await asyncio.sleep(0.1)

+ 13 - 14
requirements.txt

@@ -1,24 +1,23 @@
 aiohappyeyeballs==2.6.1
-aiohttp==3.12.15
+aiohttp==3.13.3
 aiosignal==1.4.0
-anyio==4.10.0
-attrs==25.3.0
-certifi==2025.8.3
-frozenlist==1.7.0
+anyio==4.12.1
+attrs==26.1.0
+certifi==2026.2.25
+frozenlist==1.8.0
 h11==0.16.0
-h2==4.2.0
+h2==4.3.0
 hpack==4.1.0
 httpcore==1.0.9
--e git+ssh://git@gitlab.wikimedia.org/repos/sre/httphound.git@c36118b3fdd1d35d2697b1fd42a95229b13c8d47#egg=httphound
+-e git+ssh://git@git.fabfur.it/fabfur/httphound.git@daca17eb5f8e21290517ca823d13e932230e2054#egg=httphound
 httpx==0.28.1
 hyperframe==6.1.0
-idna==3.10
+idna==3.11
 Jinja2==3.1.6
-MarkupSafe==3.0.2
-multidict==6.6.3
-propcache==0.3.2
-sniffio==1.3.1
+MarkupSafe==3.0.3
+multidict==6.7.1
+propcache==0.4.1
 tabulate==0.9.0
 termcolor==3.1.0
-typing_extensions==4.14.1
-yarl==1.20.1
+typing_extensions==4.15.0
+yarl==1.23.0