diff --git a/.github/workflows/version_updater.yaml b/.github/workflows/version_updater.yaml index 4ffebef359..d2911bcba5 100644 --- a/.github/workflows/version_updater.yaml +++ b/.github/workflows/version_updater.yaml @@ -28,8 +28,9 @@ jobs: response = requests.get('https://api.github.com/repos/projectdiscovery/nuclei/releases/latest') version = response.json()['tag_name'].lstrip('v') release_notes = response.json()['body'] - os.system(f"echo 'latest_version={version}' >> $GITHUB_ENV") - os.system(f"echo 'release_notes={release_notes}' >> $GITHUB_ENV") + with open(os.getenv('GITHUB_ENV'), 'a') as env_file: + env_file.write(f"latest_version={version}\n") + env_file.write(f"release_notes< @@ -78,8 +79,9 @@ jobs: response = requests.get('https://api.github.com/repos/trufflesecurity/trufflehog/releases/latest') version = response.json()['tag_name'].lstrip('v') release_notes = response.json()['body'] - os.system(f"echo 'latest_version={version}' >> $GITHUB_ENV") - os.system(f"echo 'release_notes={release_notes}' >> $GITHUB_ENV") + with open(os.getenv('GITHUB_ENV'), 'a') as env_file: + env_file.write(f"latest_version={version}\n") + env_file.write(f"release_notes< diff --git a/README.md b/README.md index 51e7a53003..ad2ad61d08 100644 --- a/README.md +++ b/README.md @@ -29,6 +29,9 @@ Passive API sources plus a recursive DNS brute-force with target-specific subdom ```bash # find subdomains of evilcorp.com bbot -t evilcorp.com -p subdomain-enum + +# passive sources only +bbot -t evilcorp.com -p subdomain-enum -rf passive ``` diff --git a/bbot/core/engine.py b/bbot/core/engine.py index 70652d456e..9d42c9719f 100644 --- a/bbot/core/engine.py +++ b/bbot/core/engine.py @@ -40,9 +40,10 @@ class EngineBase: ERROR_CLASS = BBOTEngineError - def __init__(self): + def __init__(self, debug=False): self._shutdown_status = False self.log = logging.getLogger(f"bbot.core.{self.__class__.__name__.lower()}") + self._engine_debug = debug def pickle(self, obj): try: @@ -62,10 +63,10 @@ def unpickle(self, binary): return error_sentinel async def _infinite_retry(self, callback, *args, **kwargs): - interval = kwargs.pop("_interval", 15) + interval = kwargs.pop("_interval", 300) context = kwargs.pop("_context", "") - # default overall timeout of 5 minutes (15 second interval * 20 iterations) - max_retries = kwargs.pop("_max_retries", 4 * 5) + # default overall timeout of 10 minutes (300 second interval * 2 iterations) + max_retries = kwargs.pop("_max_retries", 1) if not context: context = f"{callback.__name__}({args}, {kwargs})" retries = 0 @@ -73,10 +74,14 @@ async def _infinite_retry(self, callback, *args, **kwargs): try: return await asyncio.wait_for(callback(*args, **kwargs), timeout=interval) except (TimeoutError, asyncio.exceptions.TimeoutError): - self.log.debug(f"{self.name}: Timeout after {interval:,} seconds{context}, retrying...") + self.log.debug(f"{self.name}: Timeout after {interval:,} seconds {context}, retrying...") retries += 1 if max_retries is not None and retries > max_retries: - raise TimeoutError(f"Timed out after {max_retries*interval:,} seconds {context}") + raise TimeoutError(f"Timed out after {(max_retries+1)*interval:,} seconds {context}") + + def engine_debug(self, *args, **kwargs): + if self._engine_debug: + self.log.trace(*args, **kwargs) class EngineClient(EngineBase): @@ -114,9 +119,9 @@ class EngineClient(EngineBase): SERVER_CLASS = None - def __init__(self, **kwargs): - super().__init__() + def __init__(self, debug=False, **kwargs): self.name = f"EngineClient {self.__class__.__name__}" + super().__init__(debug=debug) self.process = None if self.SERVER_CLASS is None: raise ValueError(f"Must set EngineClient SERVER_CLASS, {self.SERVER_CLASS}") @@ -133,15 +138,17 @@ def __init__(self, **kwargs): def check_error(self, message): if isinstance(message, dict) and len(message) == 1 and "_e" in message: + self.engine_debug(f"{self.name}: got error message: {message}") error, trace = message["_e"] error = self.ERROR_CLASS(error) error.engine_traceback = trace + self.engine_debug(f"{self.name}: raising {error.__class__.__name__}") raise error return False async def run_and_return(self, command, *args, **kwargs): fn_str = f"{command}({args}, {kwargs})" - self.log.debug(f"{self.name}: executing run-and-return {fn_str}") + self.engine_debug(f"{self.name}: executing run-and-return {fn_str}") if self._shutdown_status and not command == "_shutdown": self.log.verbose(f"{self.name} has been shut down and is not accepting new tasks") return @@ -150,7 +157,7 @@ async def run_and_return(self, command, *args, **kwargs): message = self.make_message(command, args=args, kwargs=kwargs) if message is error_sentinel: return - await self._infinite_retry(socket.send, message) + await socket.send(message) binary = await self._infinite_retry(socket.recv, _context=f"waiting for return value from {fn_str}") except BaseException: try: @@ -161,7 +168,7 @@ async def run_and_return(self, command, *args, **kwargs): raise # self.log.debug(f"{self.name}.{command}({kwargs}) got binary: {binary}") message = self.unpickle(binary) - self.log.debug(f"{self.name}: {fn_str} got return value: {message}") + self.engine_debug(f"{self.name}: {fn_str} got return value: {message}") # error handling if self.check_error(message): return @@ -169,7 +176,7 @@ async def run_and_return(self, command, *args, **kwargs): async def run_and_yield(self, command, *args, **kwargs): fn_str = f"{command}({args}, {kwargs})" - self.log.debug(f"{self.name}: executing run-and-yield {fn_str}") + self.engine_debug(f"{self.name}: executing run-and-yield {fn_str}") if self._shutdown_status: self.log.verbose("Engine has been shut down and is not accepting new tasks") return @@ -188,18 +195,18 @@ async def run_and_yield(self, command, *args, **kwargs): ) # self.log.debug(f"{self.name}.{command}({kwargs}) got binary: {binary}") message = self.unpickle(binary) - self.log.debug(f"{self.name} {command} got iteration: {message}") + self.engine_debug(f"{self.name}: {fn_str} got iteration: {message}") # error handling if self.check_error(message) or self.check_stop(message): break yield message except (StopAsyncIteration, GeneratorExit) as e: exc_name = e.__class__.__name__ - self.log.debug(f"{self.name}.{command} got {exc_name}") + self.engine_debug(f"{self.name}.{command} got {exc_name}") try: await self.send_cancel_message(socket, fn_str) except Exception: - self.log.debug(f"{self.name}.{command} failed to send cancel message after {exc_name}") + self.engine_debug(f"{self.name}.{command} failed to send cancel message after {exc_name}") self.log.trace(traceback.format_exc()) break @@ -266,6 +273,7 @@ def start_server(self): # this allows us to more easily mock http, etc. if os.environ.get("BBOT_TESTING", "") == "True": kwargs["_loop"] = get_event_loop() + kwargs["debug"] = self._engine_debug self.process = CORE.create_process( target=self.server_process, args=( @@ -305,10 +313,12 @@ async def new_socket(self): if self._server_process is None: self._server_process = self.start_server() while not self.socket_path.exists(): - self.log.debug(f"{self.name}: waiting for server process to start...") + self.engine_debug(f"{self.name}: waiting for server process to start...") await asyncio.sleep(0.1) socket = self.context.socket(zmq.DEALER) - socket.setsockopt(zmq.LINGER, 0) + socket.setsockopt(zmq.LINGER, 0) # Discard pending messages immediately disconnect() or close() + socket.setsockopt(zmq.SNDHWM, 0) # Unlimited send buffer + socket.setsockopt(zmq.RCVHWM, 0) # Unlimited receive buffer socket.connect(f"ipc://{self.socket_path}") self.sockets.add(socket) try: @@ -366,24 +376,28 @@ class EngineServer(EngineBase): CMDS = {} - def __init__(self, socket_path): - super().__init__() + def __init__(self, socket_path, debug=False): self.name = f"EngineServer {self.__class__.__name__}" + super().__init__(debug=debug) + self.engine_debug(f"{self.name}: finished setup 1 (_debug={self._engine_debug})") self.socket_path = socket_path self.client_id_var = contextvars.ContextVar("client_id", default=None) # task <--> client id mapping self.tasks = {} # child tasks spawned by main tasks self.child_tasks = {} + self.engine_debug(f"{self.name}: finished setup 2 (_debug={self._engine_debug})") if self.socket_path is not None: # create ZeroMQ context self.context = zmq.asyncio.Context() - self.context.setsockopt(zmq.LINGER, 0) # ROUTER socket can handle multiple concurrent requests self.socket = self.context.socket(zmq.ROUTER) - self.socket.setsockopt(zmq.LINGER, 0) + self.socket.setsockopt(zmq.LINGER, 0) # Discard pending messages immediately disconnect() or close() + self.socket.setsockopt(zmq.SNDHWM, 0) # Unlimited send buffer + self.socket.setsockopt(zmq.RCVHWM, 0) # Unlimited receive buffer # create socket file self.socket.bind(f"ipc://{self.socket_path}") + self.engine_debug(f"{self.name}: finished setup 3 (_debug={self._engine_debug})") @contextlib.contextmanager def client_id_context(self, value): @@ -395,51 +409,56 @@ def client_id_context(self, value): async def run_and_return(self, client_id, command_fn, *args, **kwargs): fn_str = f"{command_fn.__name__}({args}, {kwargs})" + self.engine_debug(fn_str) with self.client_id_context(client_id): try: - self.log.debug(f"{self.name} run-and-return {fn_str}") - result = error_sentinel + self.engine_debug(f"{self.name}: starting run-and-return {fn_str}") try: result = await command_fn(*args, **kwargs) except BaseException as e: - if not in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)): - error = f"Error in {self.name}.{fn_str}: {e}" - self.log.debug(error) - trace = traceback.format_exc() - self.log.debug(trace) - result = {"_e": (error, trace)} + if in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)): + log_fn = self.log.debug + else: + log_fn = self.log.error + error = f"{self.name}: error in {fn_str}: {e}" + trace = traceback.format_exc() + log_fn(error) + self.log.trace(trace) + result = {"_e": (error, trace)} finally: self.tasks.pop(client_id, None) - if result is not error_sentinel: - self.log.debug(f"{self.name}: Sending response to {fn_str}: {result}") - await self.send_socket_multipart(client_id, result) + self.engine_debug(f"{self.name}: sending response to {fn_str}: {result}") + await self.send_socket_multipart(client_id, result) except BaseException as e: self.log.critical( f"Unhandled exception in {self.name}.run_and_return({client_id}, {command_fn}, {args}, {kwargs}): {e}" ) self.log.critical(traceback.format_exc()) finally: - self.log.debug(f"{self.name} finished run-and-return {command_fn.__name__}({args}, {kwargs})") + self.engine_debug(f"{self.name} finished run-and-return {fn_str}") async def run_and_yield(self, client_id, command_fn, *args, **kwargs): fn_str = f"{command_fn.__name__}({args}, {kwargs})" with self.client_id_context(client_id): try: - self.log.debug(f"{self.name} run-and-yield {fn_str}") + self.engine_debug(f"{self.name}: starting run-and-yield {fn_str}") try: async for _ in command_fn(*args, **kwargs): - self.log.debug(f"{self.name}: sending iteration for {command_fn.__name__}(): {_}") + self.engine_debug(f"{self.name}: sending iteration for {fn_str}: {_}") await self.send_socket_multipart(client_id, _) except BaseException as e: - if not in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)): - error = f"Error in {self.name}.{fn_str}: {e}" - trace = traceback.format_exc() - self.log.debug(error) - self.log.debug(trace) - result = {"_e": (error, trace)} - await self.send_socket_multipart(client_id, result) + if in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)): + log_fn = self.log.debug + else: + log_fn = self.log.error + error = f"{self.name}: error in {fn_str}: {e}" + trace = traceback.format_exc() + log_fn(error) + self.log.trace(trace) + result = {"_e": (error, trace)} + await self.send_socket_multipart(client_id, result) finally: - self.log.debug(f"{self.name} reached end of run-and-yield iteration for {command_fn.__name__}()") + self.engine_debug(f"{self.name}: reached end of run-and-yield iteration for {fn_str}") # _s == special signal that means StopIteration await self.send_socket_multipart(client_id, {"_s": None}) self.tasks.pop(client_id, None) @@ -449,14 +468,14 @@ async def run_and_yield(self, client_id, command_fn, *args, **kwargs): ) self.log.critical(traceback.format_exc()) finally: - self.log.debug(f"{self.name} finished run-and-yield {command_fn.__name__}()") + self.engine_debug(f"{self.name}: finished run-and-yield {fn_str}") async def send_socket_multipart(self, client_id, message): try: message = pickle.dumps(message) await self._infinite_retry(self.socket.send_multipart, [client_id, message]) except Exception as e: - self.log.verbose(f"Error sending ZMQ message: {e}") + self.log.verbose(f"{self.name}: error sending ZMQ message: {e}") self.log.trace(traceback.format_exc()) def check_error(self, message): @@ -464,12 +483,12 @@ def check_error(self, message): return True async def worker(self): - self.log.debug(f"{self.name}: starting worker") + self.engine_debug(f"{self.name}: starting worker") try: while 1: client_id, binary = await self.socket.recv_multipart() message = self.unpickle(binary) - # self.log.debug(f"{self.name} got message: {message}") + self.engine_debug(f"{self.name} got message: {message}") if self.check_error(message): continue @@ -480,14 +499,14 @@ async def worker(self): # -1 == cancel task if cmd == -1: - self.log.debug(f"{self.name} got cancel signal") + self.engine_debug(f"{self.name} got cancel signal") await self.send_socket_multipart(client_id, {"m": "CANCEL_OK"}) await self.cancel_task(client_id) continue # -99 == shutdown task if cmd == -99: - self.log.debug(f"{self.name} got shutdown signal") + self.log.verbose(f"{self.name} got shutdown signal") await self.send_socket_multipart(client_id, {"m": "SHUTDOWN_OK"}) await self._shutdown() return @@ -509,75 +528,138 @@ async def worker(self): continue if inspect.isasyncgenfunction(command_fn): - # self.log.debug(f"{self.name}: creating run-and-yield coroutine for {command_name}()") + self.engine_debug(f"{self.name}: creating run-and-yield coroutine for {command_name}()") coroutine = self.run_and_yield(client_id, command_fn, *args, **kwargs) else: - # self.log.debug(f"{self.name}: creating run-and-return coroutine for {command_name}()") + self.engine_debug(f"{self.name}: creating run-and-return coroutine for {command_name}()") coroutine = self.run_and_return(client_id, command_fn, *args, **kwargs) - # self.log.debug(f"{self.name}: creating task for {command_name}() coroutine") + self.engine_debug(f"{self.name}: creating task for {command_name}() coroutine") task = asyncio.create_task(coroutine) self.tasks[client_id] = task, command_fn, args, kwargs - # self.log.debug(f"{self.name}: finished creating task for {command_name}() coroutine") + self.engine_debug(f"{self.name}: finished creating task for {command_name}() coroutine") except BaseException as e: await self._shutdown() if not in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)): self.log.error(f"{self.name}: error in EngineServer worker: {e}") self.log.trace(traceback.format_exc()) finally: - self.log.debug(f"{self.name}: finished worker()") + self.engine_debug(f"{self.name}: finished worker()") async def _shutdown(self): if not self._shutdown_status: self.log.verbose(f"{self.name}: shutting down...") self._shutdown_status = True await self.cancel_all_tasks() - try: - self.context.destroy(linger=0) - except Exception: - self.log.trace(traceback.format_exc()) - try: - self.context.term() - except Exception: - self.log.trace(traceback.format_exc()) - self.log.debug(f"{self.name}: finished shutting down") + context = getattr(self, "context", None) + if context is not None: + try: + context.destroy(linger=0) + except Exception: + self.log.trace(traceback.format_exc()) + try: + context.term() + except Exception: + self.log.trace(traceback.format_exc()) + self.log.verbose(f"{self.name}: finished shutting down") + + async def task_pool(self, fn, args_kwargs, threads=10, timeout=300, global_kwargs=None): + if global_kwargs is None: + global_kwargs = {} + + tasks = {} + args_kwargs = list(args_kwargs) + + def new_task(): + if args_kwargs: + kwargs = {} + tracker = None + args = args_kwargs.pop(0) + if isinstance(args, (list, tuple)): + # you can specify a custom tracker value if you want + # this helps with correlating results + with suppress(ValueError): + args, kwargs, tracker = args + # or you can just specify args/kwargs + with suppress(ValueError): + args, kwargs = args - def new_child_task(self, client_id, coro): + if not isinstance(kwargs, dict): + raise ValueError(f"kwargs must be dict (got: {kwargs})") + if not isinstance(args, (list, tuple)): + args = [args] + + task = self.new_child_task(fn(*args, **kwargs, **global_kwargs)) + tasks[task] = (args, kwargs, tracker) + + for _ in range(threads): # Start initial batch of tasks + new_task() + + while tasks: # While there are tasks pending + # Wait for the first task to complete + finished = await self.finished_tasks(tasks, timeout=timeout) + for task in finished: + result = task.result() + (args, kwargs, tracker) = tasks.pop(task) + yield (args, kwargs, tracker), result + new_task() + + def new_child_task(self, coro): + """ + Create a new asyncio task, making sure to track it based on the client id. + + This allows the task to be automatically cancelled if its parent is cancelled. + """ + client_id = self.client_id_var.get() task = asyncio.create_task(coro) - try: - self.child_tasks[client_id].add(task) - except KeyError: - self.child_tasks[client_id] = {task} + + if client_id: + + def remove_task(t): + tasks = self.child_tasks.get(client_id, set()) + tasks.discard(t) + if not tasks: + self.child_tasks.pop(client_id, None) + + task.add_done_callback(remove_task) + + try: + self.child_tasks[client_id].add(task) + except KeyError: + self.child_tasks[client_id] = {task} + return task - async def finished_tasks(self, client_id, timeout=None): - child_tasks = self.child_tasks.get(client_id, set()) - try: - done, pending = await asyncio.wait(child_tasks, return_when=asyncio.FIRST_COMPLETED, timeout=timeout) - except BaseException as e: - if isinstance(e, (TimeoutError, asyncio.exceptions.TimeoutError)): - done = set() - self.log.warning(f"{self.name}: Timeout after {timeout:,} seconds in finished_tasks({child_tasks})") - for task in child_tasks: - task.cancel() - else: - if not in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)): - self.log.error(f"{self.name}: Unhandled exception in finished_tasks({child_tasks}): {e}") - self.log.trace(traceback.format_exc()) - raise - self.child_tasks[client_id] = pending - return done + async def finished_tasks(self, tasks, timeout=None): + """ + Given a list of asyncio tasks, return the ones that are finished with an optional timeout + """ + if tasks: + try: + done, pending = await asyncio.wait(tasks, return_when=asyncio.FIRST_COMPLETED, timeout=timeout) + return done + except BaseException as e: + if isinstance(e, (TimeoutError, asyncio.exceptions.TimeoutError)): + self.log.warning(f"{self.name}: Timeout after {timeout:,} seconds in finished_tasks({tasks})") + for task in tasks: + task.cancel() + else: + if not in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)): + self.log.error(f"{self.name}: Unhandled exception in finished_tasks({tasks}): {e}") + self.log.trace(traceback.format_exc()) + raise + return set() async def cancel_task(self, client_id): parent_task = self.tasks.pop(client_id, None) if parent_task is None: return parent_task, _cmd, _args, _kwargs = parent_task - self.log.debug(f"{self.name}: Cancelling client id {client_id} (task: {parent_task})") + self.engine_debug(f"{self.name}: Cancelling client id {client_id} (task: {parent_task})") parent_task.cancel() child_tasks = self.child_tasks.pop(client_id, set()) if child_tasks: - self.log.debug(f"{self.name}: Cancelling {len(child_tasks):,} child tasks for client id {client_id}") + self.engine_debug(f"{self.name}: Cancelling {len(child_tasks):,} child tasks for client id {client_id}") for child_task in child_tasks: child_task.cancel() @@ -588,7 +670,7 @@ async def _cancel_task(self, task): try: await asyncio.wait_for(task, timeout=10) except (TimeoutError, asyncio.exceptions.TimeoutError): - self.log.debug(f"{self.name}: Timeout cancelling task") + self.log.trace(f"{self.name}: Timeout cancelling task: {task}") return except (KeyboardInterrupt, asyncio.CancelledError): return diff --git a/bbot/core/event/base.py b/bbot/core/event/base.py index 9a5a9b8690..3eb10625f7 100644 --- a/bbot/core/event/base.py +++ b/bbot/core/event/base.py @@ -127,7 +127,7 @@ def __init__( scan=None, scans=None, tags=None, - confidence=5, + confidence=100, timestamp=None, _dummy=False, _internal=None, @@ -146,7 +146,7 @@ def __init__( scan (Scan, optional): BBOT Scan object. Required unless _dummy is True. Defaults to None. scans (list of Scan, optional): BBOT Scan objects, used primarily when unserializing an Event from the database. Defaults to None. tags (list of str, optional): Descriptive tags for the event. Defaults to None. - confidence (int, optional): Confidence level for the event, on a scale of 1-10. Defaults to 5. + confidence (int, optional): Confidence level for the event, on a scale of 1-100. Defaults to 100. timestamp (datetime, optional): Time of event discovery. Defaults to current UTC time. _dummy (bool, optional): If True, disables certain data validations. Defaults to False. _internal (Any, optional): If specified, makes the event internal. Defaults to None. @@ -237,6 +237,27 @@ def __init__( def data(self): return self._data + @property + def confidence(self): + return self._confidence + + @confidence.setter + def confidence(self, confidence): + self._confidence = min(100, max(1, int(confidence))) + + @property + def cumulative_confidence(self): + """ + Considers the confidence of parent events. This is useful for filtering out speculative/unreliable events. + + E.g. an event with a confidence of 50 whose parent is also 50 would have a cumulative confidence of 25. + + A confidence of 100 will reset the cumulative confidence to 100. + """ + if self._confidence == 100 or self.parent is None or self.parent is self: + return self._confidence + return int(self._confidence * self.parent.cumulative_confidence / 100) + @property def resolved_hosts(self): if is_ip(self.host): @@ -359,7 +380,7 @@ def discovery_path(self): This event's full discovery context, including those of all its parents """ parent_path = [] - if self.parent is not None and self != self.parent: + if self.parent is not None and self.parent is not self: parent_path = self.parent.discovery_path return parent_path + [[self.id, self.discovery_context]] @@ -387,6 +408,10 @@ def tags(self, tags): def add_tag(self, tag): self._tags.add(tagify(tag)) + def add_tags(self, tags): + for tag in set(tags): + self.add_tag(tag) + def remove_tag(self, tag): with suppress(KeyError): self._tags.remove(tagify(tag)) @@ -461,10 +486,10 @@ def scope_distance(self, scope_distance): self.remove_tag("in-scope") self.add_tag(f"distance-{new_scope_distance}") self._scope_distance = new_scope_distance - # apply recursively to parent events - parent_scope_distance = getattr(self.parent, "scope_distance", None) - if parent_scope_distance is not None and self != self.parent: - self.parent.scope_distance = scope_distance + 1 + # apply recursively to parent events + parent_scope_distance = getattr(self.parent, "scope_distance", None) + if parent_scope_distance is not None and self.parent is not self: + self.parent.scope_distance = new_scope_distance + 1 @property def scope_description(self): @@ -869,7 +894,7 @@ def __hash__(self): def __str__(self): max_event_len = 80 - d = str(self.data) + d = str(self.data).replace("\n", "\\n") return f'{self.type}("{d[:max_event_len]}{("..." if len(d) > max_event_len else "")}", module={self.module}, tags={self.tags})' def __repr__(self): @@ -923,19 +948,40 @@ def _host(self): return make_ip_type(parsed.hostname) -class DictPathEvent(DictEvent): - _path_keywords = ["path", "filename"] +class ClosestHostEvent(DictHostEvent): + # if a host/path/url isn't specified, this event type grabs it from the closest parent + # inherited by FINDING and VULNERABILITY + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + if not self.host: + for parent in self.get_parents(include_self=True): + # inherit closest URL + if not "url" in self.data: + parent_url = getattr(parent, "parsed_url", None) + if parent_url is not None: + self.data["url"] = parent_url.geturl() + # inherit closest path + if not "path" in self.data and isinstance(parent.data, dict): + parent_path = parent.data.get("path", None) + if parent_path is not None: + self.data["path"] = parent_path + # inherit closest host + if parent.host: + self.data["host"] = str(parent.host) + break + # die if we still haven't found a host + if not self.host: + raise ValueError("No host was found in event parents. Host must be specified!") + +class DictPathEvent(DictEvent): def sanitize_data(self, data): new_data = dict(data) file_blobs = getattr(self.scan, "_file_blobs", False) folder_blobs = getattr(self.scan, "_folder_blobs", False) - for path_keyword in self._path_keywords: - blob = None - try: - data_path = Path(data[path_keyword]) - except KeyError: - continue + blob = None + try: + data_path = Path(data["path"]) if data_path.is_file(): self.add_tag("file") if file_blobs: @@ -945,10 +991,10 @@ def sanitize_data(self, data): self.add_tag("folder") if folder_blobs: blob = self._tar_directory(data_path) - else: - continue - if blob: - new_data["blob"] = base64.b64encode(blob).decode("utf-8") + except KeyError: + pass + if blob: + new_data["blob"] = base64.b64encode(blob).decode("utf-8") return new_data @@ -1300,7 +1346,7 @@ def redirect_location(self): return location -class VULNERABILITY(DictHostEvent): +class VULNERABILITY(ClosestHostEvent): _always_emit = True _quick_emit = True severity_colors = { @@ -1316,10 +1362,11 @@ def sanitize_data(self, data): return data class _data_validator(BaseModel): - host: str + host: Optional[str] = None severity: str description: str url: Optional[str] = None + path: Optional[str] = None _validate_url = field_validator("url")(validators.validate_url) _validate_host = field_validator("host")(validators.validate_host) _validate_severity = field_validator("severity")(validators.validate_severity) @@ -1328,14 +1375,15 @@ def _pretty_string(self): return f'[{self.data["severity"]}] {self.data["description"]}' -class FINDING(DictHostEvent): +class FINDING(ClosestHostEvent): _always_emit = True _quick_emit = True class _data_validator(BaseModel): - host: str + host: Optional[str] = None description: str url: Optional[str] = None + path: Optional[str] = None _validate_url = field_validator("url")(validators.validate_url) _validate_host = field_validator("host")(validators.validate_host) @@ -1464,7 +1512,7 @@ def make_event( scan=None, scans=None, tags=None, - confidence=5, + confidence=100, dummy=False, internal=None, ): @@ -1484,7 +1532,7 @@ def make_event( scan (Scan, optional): BBOT Scan object associated with the event. scans (List[Scan], optional): Multiple BBOT Scan objects, primarily used for unserialization. tags (Union[str, List[str]], optional): Descriptive tags for the event, as a list or a single string. - confidence (int, optional): Confidence level for the event, on a scale of 1-10. Defaults to 5. + confidence (int, optional): Confidence level for the event, on a scale of 1-100. Defaults to 100. dummy (bool, optional): Disables data validations if set to True. Defaults to False. internal (Any, optional): Makes the event internal if set to True. Defaults to None. @@ -1613,7 +1661,7 @@ def event_from_json(j, siem_friendly=False): "event_type": event_type, "scans": j.get("scans", []), "tags": j.get("tags", []), - "confidence": j.get("confidence", 5), + "confidence": j.get("confidence", 100), "context": j.get("discovery_context", None), "dummy": True, } diff --git a/bbot/core/helpers/command.py b/bbot/core/helpers/command.py index 7283291fc8..6f43a401db 100644 --- a/bbot/core/helpers/command.py +++ b/bbot/core/helpers/command.py @@ -3,9 +3,9 @@ import logging import traceback from signal import SIGINT -from subprocess import CompletedProcess, CalledProcessError +from subprocess import CompletedProcess, CalledProcessError, SubprocessError -from .misc import smart_decode, smart_encode +from .misc import smart_decode, smart_encode, which log = logging.getLogger("bbot.core.helpers.command") @@ -182,7 +182,11 @@ async def _spawn_proc(self, *command, **kwargs): >>> _spawn_proc("ls", "-l", input="data") (, "data", ["ls", "-l"]) """ - command, kwargs = self._prepare_command_kwargs(command, kwargs) + try: + command, kwargs = self._prepare_command_kwargs(command, kwargs) + except SubprocessError as e: + log.warning(e) + return None, None, None _input = kwargs.pop("input", None) if _input is not None: if kwargs.get("stdin") is not None: @@ -276,6 +280,17 @@ def _prepare_command_kwargs(self, command, kwargs): command = command[0] command = [str(s) for s in command] + if not command: + raise SubprocessError("Must specify a command") + + # use full path of binary, if not already specified + binary = command[0] + if not "/" in binary: + binary_full_path = which(binary) + if binary_full_path is None: + raise SubprocessError(f'Command "{binary}" was not found') + command[0] = binary_full_path + env = kwargs.get("env", os.environ) if sudo and os.geteuid() != 0: self.depsinstaller.ensure_root() diff --git a/bbot/core/helpers/dns/dns.py b/bbot/core/helpers/dns/dns.py index 2f77ce081e..07f5621323 100644 --- a/bbot/core/helpers/dns/dns.py +++ b/bbot/core/helpers/dns/dns.py @@ -56,7 +56,8 @@ def __init__(self, parent_helper): self.parent_helper = parent_helper self.config = self.parent_helper.config self.dns_config = self.config.get("dns", {}) - super().__init__(server_kwargs={"config": self.config}) + engine_debug = self.config.get("engine", {}).get("debug", False) + super().__init__(server_kwargs={"config": self.config}, debug=engine_debug) # resolver self.timeout = self.dns_config.get("timeout", 5) diff --git a/bbot/core/helpers/dns/engine.py b/bbot/core/helpers/dns/engine.py index 6840d5506f..8a41c7c8ea 100644 --- a/bbot/core/helpers/dns/engine.py +++ b/bbot/core/helpers/dns/engine.py @@ -37,8 +37,8 @@ class DNSEngine(EngineServer): 99: "_mock_dns", } - def __init__(self, socket_path, config={}): - super().__init__(socket_path) + def __init__(self, socket_path, config={}, debug=False): + super().__init__(socket_path, debug=debug) self.config = config self.dns_config = self.config.get("dns", {}) @@ -349,57 +349,20 @@ async def resolve_batch(self, queries, threads=10, **kwargs): ('www.evilcorp.com', {'1.1.1.1'}) ('evilcorp.com', {'2.2.2.2'}) """ - tasks = {} - client_id = self.client_id_var.get() - - def new_task(query): - task = self.new_child_task(client_id, self.resolve(query, **kwargs)) - tasks[task] = query - - queries = list(queries) - for _ in range(threads): # Start initial batch of tasks - if queries: # Ensure there are args to process - new_task(queries.pop(0)) - - while tasks: # While there are tasks pending - # Wait for the first task to complete - finished = await self.finished_tasks(client_id, timeout=120) - - for task in finished: - results = task.result() - query = tasks.pop(task) - - if results: - yield (query, results) - - if queries: # Start a new task for each one completed, if URLs remain - new_task(queries.pop(0)) + async for (args, _, _), responses in self.task_pool( + self.resolve, args_kwargs=queries, threads=threads, global_kwargs=kwargs + ): + yield args[0], responses async def resolve_raw_batch(self, queries, threads=10, **kwargs): - tasks = {} - client_id = self.client_id_var.get() - - def new_task(query, rdtype): - task = self.new_child_task(client_id, self.resolve_raw(query, type=rdtype, **kwargs)) - tasks[task] = (query, rdtype) - - queries = list(queries) - for _ in range(threads): # Start initial batch of tasks - if queries: # Ensure there are args to process - new_task(*queries.pop(0)) - - while tasks: # While there are tasks pending - # Wait for the first task to complete - finished = await self.finished_tasks(client_id, timeout=120) - - for task in finished: - answers, errors = task.result() - query, rdtype = tasks.pop(task) - for answer in answers: - yield ((query, rdtype), (answer, errors)) - - if queries: # Start a new task for each one completed, if URLs remain - new_task(*queries.pop(0)) + queries_kwargs = [[q[0], {"type": q[1]}] for q in queries] + async for (args, kwargs, _), (answers, errors) in self.task_pool( + self.resolve_raw, args_kwargs=queries_kwargs, threads=threads, global_kwargs=kwargs + ): + query = args[0] + rdtype = kwargs["type"] + for answer in answers: + yield ((query, rdtype), (answer, errors)) async def _catch(self, callback, *args, **kwargs): """ diff --git a/bbot/core/helpers/dns/helpers.py b/bbot/core/helpers/dns/helpers.py index 061ed829ca..c18a2c1620 100644 --- a/bbot/core/helpers/dns/helpers.py +++ b/bbot/core/helpers/dns/helpers.py @@ -6,6 +6,154 @@ log = logging.getLogger("bbot.core.helpers.dns") +# the following are the result of a 1-day internet survey to find the top SRV records +# the scan resulted in 36,282 SRV records. the count for each one is shown. +common_srvs = [ + "_sipfederationtls._tcp", # 6909 + "_sip._tls", # 6853 + "_autodiscover._tcp", # 4268 + "_xmpp-server._tcp", # 1437 + "_sip._tcp", # 1193 + "_sips._tcp", # 1183 + "_caldavs._tcp", # 1179 + "_carddavs._tcp", # 1132 + "_caldav._tcp", # 1035 + "_carddav._tcp", # 1024 + "_sip._udp", # 1007 + "_imaps._tcp", # 1007 + "_submission._tcp", # 906 + "_h323cs._tcp", # 846 + "_h323ls._udp", # 782 + "_xmpp-client._tcp", # 689 + "_pop3s._tcp", # 394 + "_jabber._tcp", # 277 + "_imap._tcp", # 267 + "_turn._udp", # 256 + "_pop3._tcp", # 221 + "_ldap._tcp", # 213 + "_smtps._tcp", # 195 + "_sipinternaltls._tcp", # 192 + "_vlmcs._tcp", # 165 + "_kerberos._udp", # 163 + "_kerberos._tcp", # 148 + "_kpasswd._udp", # 128 + "_kpasswd._tcp", # 100 + "_ntp._udp", # 90 + "_gc._tcp", # 73 + "_kerberos-master._udp", # 66 + "_ldap._tcp.dc._msdcs", # 63 + "_matrix._tcp", # 62 + "_smtp._tcp", # 61 + "_stun._udp", # 57 + "_kerberos._tcp.dc._msdcs", # 54 + "_ldap._tcp.gc._msdcs", # 49 + "_kerberos-adm._tcp", # 44 + "_ldap._tcp.pdc._msdcs", # 43 + "_kerberos-master._tcp", # 43 + "_http._tcp", # 37 + "_h323rs._tcp", # 36 + "_sipinternal._tcp", # 35 + "_turn._tcp", # 33 + "_stun._tcp", # 33 + "_h323ls._tcp", # 33 + "_x-puppet._tcp", # 30 + "_h323cs._udp", # 27 + "_stuns._tcp", # 26 + "_jabber-client._tcp", # 25 + "_x-puppet-ca._tcp", # 22 + "_ts3._udp", # 22 + "_minecraft._tcp", # 22 + "_turns._tcp", # 21 + "_ldaps._tcp", # 21 + "_xmpps-client._tcp", # 20 + "_https._tcp", # 19 + "_ftp._tcp", # 19 + "_xmpp-server._udp", # 18 + "_xmpp-client._udp", # 17 + "_jabber._udp", # 17 + "_jabber-client._udp", # 17 + "_xmpps-server._tcp", # 15 + "_finger._tcp", # 14 + "_stuns._udp", # 12 + "_hkp._tcp", # 12 + "_vlmcs._udp", # 11 + "_turns._udp", # 11 + "_tftp._udp", # 11 + "_ssh._tcp", # 11 + "_rtps._udp", # 11 + "_mysqlsrv._tcp", # 11 + "_hkps._tcp", # 11 + "_h323be._udp", # 11 + "_dns._tcp", # 11 + "_wss._tcp", # 10 + "_wpad._tcp", # 10 + "_whois._tcp", # 10 + "_webexconnect._tcp", # 10 + "_webexconnects._tcp", # 10 + "_vnc._tcp", # 10 + "_test._tcp", # 10 + "_telnet._tcp", # 10 + "_telnets._tcp", # 10 + "_teamspeak._tcp", # 10 + "_svns._tcp", # 10 + "_svcp._tcp", # 10 + "_smb._tcp", # 10 + "_sip-tls._tcp", # 10 + "_sftp._tcp", # 10 + "_secure-pop3._tcp", # 10 + "_secure-imap._tcp", # 10 + "_rtsp._tcp", # 10 + "_rtps._tcp", # 10 + "_rpc._tcp", # 10 + "_rfb._tcp", # 10 + "_raop._tcp", # 10 + "_pstn._tcp", # 10 + "_presence._tcp", # 10 + "_pkixrep._tcp", # 10 + "_pgprevokations._tcp", # 10 + "_pgpkeys._tcp", # 10 + "_ocsp._tcp", # 10 + "_nntp._tcp", # 10 + "_nfs._tcp", # 10 + "_netbios-ssn._tcp", # 10 + "_netbios-ns._tcp", # 10 + "_netbios-dgm._tcp", # 10 + "_mumble._tcp", # 10 + "_msrpc._tcp", # 10 + "_mqtts._tcp", # 10 + "_minecraft._udp", # 10 + "_iscsi._tcp", # 10 + "_ircs._tcp", # 10 + "_ipp._tcp", # 10 + "_ipps._tcp", # 10 + "_h323be._tcp", # 10 + "_gits._tcp", # 10 + "_ftps._tcp", # 10 + "_ftpes._tcp", # 10 + "_dnss._udp", # 10 + "_dnss._tcp", # 10 + "_diameter._tcp", # 10 + "_crl._tcp", # 10 + "_crls._tcp", # 10 + "_cmp._tcp", # 10 + "_certificates._tcp", # 10 + "_aix._tcp", # 10 + "_afpovertcp._tcp", # 10 + "_collab-edge._tls", # 6 + "_tcp", # 5 + "_client._smtp", # 3 + "_udp", # 2 + "_tls", # 2 + "_msdcs", # 2 + "_gc._msdcs", # 2 + "_ldaps._tcp.dc._msdcs", # 1 + "_kerberos._tcp.kdc._msdcs", # 1 + "_kerberos.tcp.dc._msdcs", # 1 + "_imap", # 1 + "_iax", # 1 +] + + def extract_targets(record): """ Extracts hostnames or IP addresses from a given DNS record. @@ -59,3 +207,76 @@ def add_result(rdtype, _record): else: log.warning(f'Unknown DNS record type "{rdtype}"') return results + + +def service_record(host, rdtype=None): + """ + Indicates that the provided host name and optional rdtype is an SRV or related service record. + + These types of records do/should not have A/AAAA/CNAME or similar records, and are simply used to advertise configuration information and/or policy information for different Internet facing services. + + This function exists to provide a consistent way in which to perform this test, rather than having duplicated code in multiple places in different modules. + + The response provides a way for modules to quickly test whether a host name is relevant and worth inspecting or using in context of what the module does. + + NOTE: While underscores are technically not supposed to exist in DNS names as per RFC's, they can be used, so we can't assume that a name that contains or starts with an underscore is a service record and so must check for specific strings. + + Args: + host (string): A DNS host name + + Returns: + bool: A boolean, True indicates that the host is an SRV or similar record, False indicates that it is not. + + Examples: + >>> service_record('_xmpp._tcp.example.com') + True + + >>> service_record('_custom._service.example.com', 'SRV') + True + + >>> service_record('_dmarc.example.com') + True + + >>> service_record('www.example.com') + False + """ + + # if we were providing an rdtype, check if it is SRV + # NOTE: we don't care what the name is if rdtype == SRV + if rdtype and str(rdtype).upper() == "SRV": + return True + + # we did not receive rdtype, so we'll have to inspect host name parts + parts = str(host).split(".") + + if not parts: + return False + + # DMARC TXT records, e.g. _dmarc.example.com + if parts[0] == "_dmarc": + return True + + # MTA-STS TXT records, e.g. _mta-sts.example.com + if parts[0] == "_mta-sts": + return True + + if len(parts) < 2: + return False + + # classic SRV record names, e.g. _ldap._tcp.example.com + if parts[1] == "_udp" or parts[1] == "_tcp": + return True + + # TLS indicating records, used by SMTP TLS-RPT etc, e.g. _smtp._tls.example.com + if parts[1] == "_tls": + return True + + # BIMI TXT records, e.g. selector._bimi.example.com + if parts[1] == "_bimi": + return True + + # DKIM TXT records, e.g. selector._domainkey.example.com + if parts[1] == "_domainkey": + return True + + return False diff --git a/bbot/core/helpers/web/client.py b/bbot/core/helpers/web/client.py index cd925730d8..c09a0e4856 100644 --- a/bbot/core/helpers/web/client.py +++ b/bbot/core/helpers/web/client.py @@ -72,6 +72,7 @@ def __init__(self, *args, **kwargs): proxies = self._web_config.get("http_proxy", None) kwargs["proxies"] = proxies + log.verbose(f"Creating httpx.AsyncClient({args}, {kwargs})") super().__init__(*args, **kwargs) if not self._persist_cookies: self._cookies = DummyCookies() @@ -91,3 +92,7 @@ def _merge_cookies(self, cookies): if self._persist_cookies: return super()._merge_cookies(cookies) return cookies + + @property + def retries(self): + return self._transport._pool._retries diff --git a/bbot/core/helpers/web/engine.py b/bbot/core/helpers/web/engine.py index 30e037e6cb..60e7038aa9 100644 --- a/bbot/core/helpers/web/engine.py +++ b/bbot/core/helpers/web/engine.py @@ -27,19 +27,28 @@ class HTTPEngine(EngineServer): "max_redirects", ) - def __init__(self, socket_path, target, config={}): - super().__init__(socket_path) + def __init__(self, socket_path, target, config={}, debug=False): + super().__init__(socket_path, debug=debug) self.target = target self.config = config self.web_config = self.config.get("web", {}) self.http_debug = self.web_config.get("debug", False) self._ssl_context_noverify = None + self.web_clients = {} self.web_client = self.AsyncClient(persist_cookies=False) def AsyncClient(self, *args, **kwargs): - from .client import BBOTAsyncClient + # cache by retries to prevent unwanted accumulation of clients + # (they are not garbage-collected) + retries = kwargs.get("retries", 1) + try: + return self.web_clients[retries] + except KeyError: + from .client import BBOTAsyncClient - return BBOTAsyncClient.from_config(self.config, self.target, *args, **kwargs) + client = BBOTAsyncClient.from_config(self.config, self.target, *args, **kwargs) + self.web_clients[client.retries] = client + return client async def request(self, *args, **kwargs): raise_error = kwargs.pop("raise_error", False) @@ -74,8 +83,7 @@ async def request(self, *args, **kwargs): async with self._acatch(url, raise_error): if self.http_debug: - logstr = f"Web request: {str(args)}, {str(kwargs)}" - log.trace(logstr) + log.trace(f"Web request: {str(args)}, {str(kwargs)}") response = await client.request(*args, **kwargs) if self.http_debug: log.trace( @@ -83,54 +91,17 @@ async def request(self, *args, **kwargs): ) return response - async def request_batch(self, urls, *args, threads=10, **kwargs): - tasks = {} - client_id = self.client_id_var.get() - - urls = list(urls) - - def new_task(): - if urls: - url = urls.pop(0) - task = self.new_child_task(client_id, self.request(url, *args, **kwargs)) - tasks[task] = url - - for _ in range(threads): # Start initial batch of tasks - new_task() - - while tasks: # While there are tasks pending - # Wait for the first task to complete - finished = await self.finished_tasks(client_id, timeout=120) - - for task in finished: - response = task.result() - url = tasks.pop(task) - yield (url, response) - new_task() - - async def request_custom_batch(self, urls_and_kwargs, threads=10): - tasks = {} - client_id = self.client_id_var.get() - urls_and_kwargs = list(urls_and_kwargs) - - def new_task(): - if urls_and_kwargs: # Ensure there are args to process - url, kwargs, custom_tracker = urls_and_kwargs.pop(0) - task = self.new_child_task(client_id, self.request(url, **kwargs)) - tasks[task] = (url, kwargs, custom_tracker) - - for _ in range(threads): # Start initial batch of tasks - new_task() - - while tasks: # While there are tasks pending - # Wait for the first task to complete - done, pending = await asyncio.wait(tasks, return_when=asyncio.FIRST_COMPLETED) - - for task in done: - response = task.result() - url, kwargs, custom_tracker = tasks.pop(task) - yield (url, kwargs, custom_tracker, response) - new_task() + async def request_batch(self, urls, threads=10, **kwargs): + async for (args, _, _), response in self.task_pool( + self.request, args_kwargs=urls, threads=threads, global_kwargs=kwargs + ): + yield args[0], response + + async def request_custom_batch(self, urls_and_kwargs, threads=10, **kwargs): + async for (args, kwargs, tracker), response in self.task_pool( + self.request, args_kwargs=urls_and_kwargs, threads=threads, global_kwargs=kwargs + ): + yield args[0], kwargs, tracker, response async def download(self, url, **kwargs): warn = kwargs.pop("warn", True) diff --git a/bbot/core/helpers/web/web.py b/bbot/core/helpers/web/web.py index 1e8ca3c616..66be930c17 100644 --- a/bbot/core/helpers/web/web.py +++ b/bbot/core/helpers/web/web.py @@ -55,14 +55,27 @@ def __init__(self, parent_helper): self.web_config = self.config.get("web", {}) self.web_spider_depth = self.web_config.get("spider_depth", 1) self.web_spider_distance = self.web_config.get("spider_distance", 0) + self.web_clients = {} self.target = self.preset.target self.ssl_verify = self.config.get("ssl_verify", False) - super().__init__(server_kwargs={"config": self.config, "target": self.parent_helper.preset.target.radix_only}) + engine_debug = self.config.get("engine", {}).get("debug", False) + super().__init__( + server_kwargs={"config": self.config, "target": self.parent_helper.preset.target.radix_only}, + debug=engine_debug, + ) def AsyncClient(self, *args, **kwargs): - from .client import BBOTAsyncClient + # cache by retries to prevent unwanted accumulation of clients + # (they are not garbage-collected) + retries = kwargs.get("retries", 1) + try: + return self.web_clients[retries] + except KeyError: + from .client import BBOTAsyncClient - return BBOTAsyncClient.from_config(self.config, self.target, *args, persist_cookies=False, **kwargs) + client = BBOTAsyncClient.from_config(self.config, self.target, *args, persist_cookies=False, **kwargs) + self.web_clients[client.retries] = client + return client async def request(self, *args, **kwargs): """ @@ -498,6 +511,8 @@ def beautifulsoup( def is_login_page(self, html): """ + TODO: convert this into an excavate YARA rule + Determines if the provided HTML content contains a login page. This function parses the HTML to search for forms with input fields typically used for diff --git a/bbot/defaults.yml b/bbot/defaults.yml index 62f1788984..2ce8d42086 100644 --- a/bbot/defaults.yml +++ b/bbot/defaults.yml @@ -97,6 +97,11 @@ web: # Whether to verify SSL certificates ssl_verify: false +### ENGINE ### + +engine: + debug: false + # Tool dependencies deps: ffuf: diff --git a/bbot/modules/baddns.py b/bbot/modules/baddns.py index 6dbc7c1419..7425a985eb 100644 --- a/bbot/modules/baddns.py +++ b/bbot/modules/baddns.py @@ -22,7 +22,7 @@ class baddns(BaseModule): "enable_references": "Enable the references module (off by default)", } module_threads = 8 - deps_pip = ["baddns~=1.1.798"] + deps_pip = ["baddns~=1.1.815"] def select_modules(self): @@ -49,13 +49,20 @@ async def handle_event(self, event): tasks = [] for ModuleClass in self.select_modules(): - module_instance = ModuleClass( - event.data, - http_client_class=self.scan.helpers.web.AsyncClient, - dns_client=self.scan.helpers.dns.resolver, - custom_nameservers=self.custom_nameservers, - signatures=self.signatures, - ) + kwargs = { + "http_client_class": self.scan.helpers.web.AsyncClient, + "dns_client": self.scan.helpers.dns.resolver, + "custom_nameservers": self.custom_nameservers, + "signatures": self.signatures, + } + + if ModuleClass.name == "NS": + kwargs["raw_query_max_retries"] = 1 + kwargs["raw_query_timeout"] = 5.0 + kwargs["raw_query_retry_wait"] = 0 + + module_instance = ModuleClass(event.data, **kwargs) + tasks.append((module_instance, asyncio.create_task(module_instance.dispatch()))) for module_instance, task in tasks: diff --git a/bbot/modules/baddns_zone.py b/bbot/modules/baddns_zone.py index 5cb916ad85..a356f61b3b 100644 --- a/bbot/modules/baddns_zone.py +++ b/bbot/modules/baddns_zone.py @@ -17,7 +17,7 @@ class baddns_zone(baddns_module): "only_high_confidence": "Do not emit low-confidence or generic detections", } module_threads = 8 - deps_pip = ["baddns~=1.1.798"] + deps_pip = ["baddns~=1.1.815"] def select_modules(self): selected_modules = [] diff --git a/bbot/modules/bucket_azure.py b/bbot/modules/bucket_azure.py index 032e409b45..dcf90eb346 100644 --- a/bbot/modules/bucket_azure.py +++ b/bbot/modules/bucket_azure.py @@ -24,7 +24,7 @@ class bucket_azure(bucket_template): def build_bucket_request(self, bucket_name, base_domain, region): url = self.build_url(bucket_name, base_domain, region) url = url.strip("/") + f"/{bucket_name}?restype=container" - return url, {"retries": 0} + return url, {} def check_bucket_exists(self, bucket_name, response): status_code = getattr(response, "status_code", 0) diff --git a/bbot/modules/deadly/ffuf.py b/bbot/modules/deadly/ffuf.py index e0e88fbe8f..4643c98260 100644 --- a/bbot/modules/deadly/ffuf.py +++ b/bbot/modules/deadly/ffuf.py @@ -28,18 +28,18 @@ class ffuf(BaseModule): deps_common = ["ffuf"] - banned_characters = [" "] - + banned_characters = set([" "]) blacklist = ["images", "css", "image"] in_scope_only = True async def setup(self): + self.proxy = self.scan.web_config.get("http_proxy", "") self.canary = "".join(random.choice(string.ascii_lowercase) for i in range(10)) wordlist_url = self.config.get("wordlist", "") self.debug(f"Using wordlist [{wordlist_url}]") self.wordlist = await self.helpers.wordlist(wordlist_url) - self.wordlist_lines = list(self.helpers.read_file(self.wordlist)) + self.wordlist_lines = self.generate_wordlist(self.wordlist) self.tempfile, tempfile_len = self.generate_templist() self.verbose(f"Generated dynamic wordlist with length [{str(tempfile_len)}]") try: @@ -243,6 +243,9 @@ async def execute_ffuf( self.debug("invalid mode specified, aborting") return + if self.proxy: + command += ["-x", self.proxy] + if apply_filters: if ext in filters.keys(): if filters[ext][0] == ("ABORT"): @@ -311,19 +314,30 @@ async def execute_ffuf( self.debug("Received invalid JSON from FFUF") def generate_templist(self, prefix=None): - line_count = 0 - virtual_file = [] - for idx, val in enumerate(self.wordlist_lines): - if idx > self.config.get("lines"): - break - if len(val) > 0: - if val.strip().lower() in self.blacklist: - self.debug(f"Skipping adding [{val.strip()}] to wordlist because it was in the blacklist") - else: - if not prefix or val.strip().lower().startswith(prefix.strip().lower()): - if not any(char in val.strip().lower() for char in self.banned_characters): - line_count += 1 - virtual_file.append(f"{val.strip().lower()}") + if prefix: + prefix = prefix.strip().lower() + max_lines = self.config.get("lines") + + for line in self.wordlist_lines[:max_lines]: + # Check if it starts with the given prefix (if any) + if (not prefix) or line.lower().startswith(prefix): + virtual_file.append(line) + virtual_file.append(self.canary) - return self.helpers.tempfile(virtual_file, pipe=False), line_count + return self.helpers.tempfile(virtual_file, pipe=False), len(virtual_file) + + def generate_wordlist(self, wordlist_file): + wordlist = [] + for line in self.helpers.read_file(wordlist_file): + line = line.strip() + if not line: + continue + if line in self.blacklist: + self.debug(f"Skipping adding [{line}] to wordlist because it was in the blacklist") + continue + if any(x in line for x in self.banned_characters): + self.debug(f"Skipping adding [{line}] to wordlist because it has a banned character") + continue + wordlist.append(line) + return wordlist diff --git a/bbot/modules/deadly/nuclei.py b/bbot/modules/deadly/nuclei.py index 9eeae91090..1af628827e 100644 --- a/bbot/modules/deadly/nuclei.py +++ b/bbot/modules/deadly/nuclei.py @@ -15,7 +15,7 @@ class nuclei(BaseModule): } options = { - "version": "3.2.0", + "version": "3.3.0", "tags": "", "templates": "", "severity": "", @@ -75,7 +75,7 @@ async def setup(self): self.warning(f"Failure while updating nuclei templates: {update_results.stderr}") else: self.warning("Error running nuclei template update command") - self.proxy = self.scan.config.get("http_proxy", "") + self.proxy = self.scan.web_config.get("http_proxy", "") self.mode = self.config.get("mode", "severe").lower() self.ratelimit = int(self.config.get("ratelimit", 150)) self.concurrency = int(self.config.get("concurrency", 25)) diff --git a/bbot/modules/dnsbrute.py b/bbot/modules/dnsbrute.py index 76e2d18047..3b847933c4 100644 --- a/bbot/modules/dnsbrute.py +++ b/bbot/modules/dnsbrute.py @@ -2,7 +2,7 @@ class dnsbrute(subdomain_enum): - flags = ["subdomain-enum", "passive", "aggressive"] + flags = ["subdomain-enum", "active", "aggressive"] watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] meta = { diff --git a/bbot/modules/dnsbrute_mutations.py b/bbot/modules/dnsbrute_mutations.py index 78513fc2d5..ef0b7a0337 100644 --- a/bbot/modules/dnsbrute_mutations.py +++ b/bbot/modules/dnsbrute_mutations.py @@ -2,7 +2,7 @@ class dnsbrute_mutations(BaseModule): - flags = ["subdomain-enum", "passive", "aggressive", "slow"] + flags = ["subdomain-enum", "active", "aggressive", "slow"] watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] meta = { @@ -45,6 +45,10 @@ def get_parent_event(self, subdomain): return self.parent_events[parent_host] async def finish(self): + """ + TODO: speed up this loop. + We should see if we can combine multiple runs together instead of running them each individually. + """ found = sorted(self.found.items(), key=lambda x: len(x[-1]), reverse=True) # if we have a lot of rounds to make, don't try mutations on less-populated domains trimmed_found = [] diff --git a/bbot/modules/dnscommonsrv.py b/bbot/modules/dnscommonsrv.py index 819e4967b9..f6dc263cb6 100644 --- a/bbot/modules/dnscommonsrv.py +++ b/bbot/modules/dnscommonsrv.py @@ -1,159 +1,11 @@ +from bbot.core.helpers.dns.helpers import common_srvs from bbot.modules.templates.subdomain_enum import subdomain_enum -# the following are the result of a 1-day internet survey to find the top SRV records -# the scan resulted in 36,282 SRV records. the count for each one is shown. -common_srvs = [ - "_sipfederationtls._tcp", # 6909 - "_sip._tls", # 6853 - "_autodiscover._tcp", # 4268 - "_xmpp-server._tcp", # 1437 - "_sip._tcp", # 1193 - "_sips._tcp", # 1183 - "_caldavs._tcp", # 1179 - "_carddavs._tcp", # 1132 - "_caldav._tcp", # 1035 - "_carddav._tcp", # 1024 - "_sip._udp", # 1007 - "_imaps._tcp", # 1007 - "_submission._tcp", # 906 - "_h323cs._tcp", # 846 - "_h323ls._udp", # 782 - "_xmpp-client._tcp", # 689 - "_pop3s._tcp", # 394 - "_jabber._tcp", # 277 - "_imap._tcp", # 267 - "_turn._udp", # 256 - "_pop3._tcp", # 221 - "_ldap._tcp", # 213 - "_smtps._tcp", # 195 - "_sipinternaltls._tcp", # 192 - "_vlmcs._tcp", # 165 - "_kerberos._udp", # 163 - "_kerberos._tcp", # 148 - "_kpasswd._udp", # 128 - "_kpasswd._tcp", # 100 - "_ntp._udp", # 90 - "_gc._tcp", # 73 - "_kerberos-master._udp", # 66 - "_ldap._tcp.dc._msdcs", # 63 - "_matrix._tcp", # 62 - "_smtp._tcp", # 61 - "_stun._udp", # 57 - "_kerberos._tcp.dc._msdcs", # 54 - "_ldap._tcp.gc._msdcs", # 49 - "_kerberos-adm._tcp", # 44 - "_ldap._tcp.pdc._msdcs", # 43 - "_kerberos-master._tcp", # 43 - "_http._tcp", # 37 - "_h323rs._tcp", # 36 - "_sipinternal._tcp", # 35 - "_turn._tcp", # 33 - "_stun._tcp", # 33 - "_h323ls._tcp", # 33 - "_x-puppet._tcp", # 30 - "_h323cs._udp", # 27 - "_stuns._tcp", # 26 - "_jabber-client._tcp", # 25 - "_x-puppet-ca._tcp", # 22 - "_ts3._udp", # 22 - "_minecraft._tcp", # 22 - "_turns._tcp", # 21 - "_ldaps._tcp", # 21 - "_xmpps-client._tcp", # 20 - "_https._tcp", # 19 - "_ftp._tcp", # 19 - "_xmpp-server._udp", # 18 - "_xmpp-client._udp", # 17 - "_jabber._udp", # 17 - "_jabber-client._udp", # 17 - "_xmpps-server._tcp", # 15 - "_finger._tcp", # 14 - "_stuns._udp", # 12 - "_hkp._tcp", # 12 - "_vlmcs._udp", # 11 - "_turns._udp", # 11 - "_tftp._udp", # 11 - "_ssh._tcp", # 11 - "_rtps._udp", # 11 - "_mysqlsrv._tcp", # 11 - "_hkps._tcp", # 11 - "_h323be._udp", # 11 - "_dns._tcp", # 11 - "_wss._tcp", # 10 - "_wpad._tcp", # 10 - "_whois._tcp", # 10 - "_webexconnect._tcp", # 10 - "_webexconnects._tcp", # 10 - "_vnc._tcp", # 10 - "_test._tcp", # 10 - "_telnet._tcp", # 10 - "_telnets._tcp", # 10 - "_teamspeak._tcp", # 10 - "_svns._tcp", # 10 - "_svcp._tcp", # 10 - "_smb._tcp", # 10 - "_sip-tls._tcp", # 10 - "_sftp._tcp", # 10 - "_secure-pop3._tcp", # 10 - "_secure-imap._tcp", # 10 - "_rtsp._tcp", # 10 - "_rtps._tcp", # 10 - "_rpc._tcp", # 10 - "_rfb._tcp", # 10 - "_raop._tcp", # 10 - "_pstn._tcp", # 10 - "_presence._tcp", # 10 - "_pkixrep._tcp", # 10 - "_pgprevokations._tcp", # 10 - "_pgpkeys._tcp", # 10 - "_ocsp._tcp", # 10 - "_nntp._tcp", # 10 - "_nfs._tcp", # 10 - "_netbios-ssn._tcp", # 10 - "_netbios-ns._tcp", # 10 - "_netbios-dgm._tcp", # 10 - "_mumble._tcp", # 10 - "_msrpc._tcp", # 10 - "_mqtts._tcp", # 10 - "_minecraft._udp", # 10 - "_iscsi._tcp", # 10 - "_ircs._tcp", # 10 - "_ipp._tcp", # 10 - "_ipps._tcp", # 10 - "_h323be._tcp", # 10 - "_gits._tcp", # 10 - "_ftps._tcp", # 10 - "_ftpes._tcp", # 10 - "_dnss._udp", # 10 - "_dnss._tcp", # 10 - "_diameter._tcp", # 10 - "_crl._tcp", # 10 - "_crls._tcp", # 10 - "_cmp._tcp", # 10 - "_certificates._tcp", # 10 - "_aix._tcp", # 10 - "_afpovertcp._tcp", # 10 - "_collab-edge._tls", # 6 - "_tcp", # 5 - "_wildcard", # 3 - "_client._smtp", # 3 - "_udp", # 2 - "_tls", # 2 - "_msdcs", # 2 - "_gc._msdcs", # 2 - "_ldaps._tcp.dc._msdcs", # 1 - "_kerberos._tcp.kdc._msdcs", # 1 - "_kerberos.tcp.dc._msdcs", # 1 - "_imap", # 1 - "_iax", # 1 -] -num_srvs = len(common_srvs) - class dnscommonsrv(subdomain_enum): watched_events = ["DNS_NAME"] produced_events = ["DNS_NAME"] - flags = ["subdomain-enum", "passive", "safe"] + flags = ["subdomain-enum", "active", "safe"] meta = {"description": "Check for common SRV records", "created_date": "2022-05-15", "author": "@TheTechromancer"} dedup_strategy = "lowest_parent" @@ -162,6 +14,7 @@ class dnscommonsrv(subdomain_enum): async def setup(self): self.max_subdomain_depth = self.config.get("max_depth", 2) + self.num_srvs = len(common_srvs) return True async def filter_event(self, event): @@ -172,11 +25,11 @@ async def filter_event(self, event): async def handle_event(self, event): query = self.make_query(event) - self.verbose(f'Brute-forcing {num_srvs:,} SRV records for "{query}"') + self.verbose(f'Brute-forcing {self.num_srvs:,} SRV records for "{query}"') for hostname in await self.helpers.dns.brute(self, query, common_srvs, type="SRV"): await self.emit_event( hostname, "DNS_NAME", parent=event, - context=f'{{module}} tried {num_srvs:,} common SRV records against "{query}" and found {{event.type}}: {{event.data}}', + context=f'{{module}} tried {self.num_srvs:,} common SRV records against "{query}" and found {{event.type}}: {{event.data}}', ) diff --git a/bbot/modules/docker_pull.py b/bbot/modules/docker_pull.py index 987651fcd8..0d1f63c29d 100644 --- a/bbot/modules/docker_pull.py +++ b/bbot/modules/docker_pull.py @@ -8,7 +8,7 @@ class docker_pull(BaseModule): watched_events = ["CODE_REPOSITORY"] produced_events = ["FILESYSTEM"] - flags = ["passive", "safe", "slow"] + flags = ["passive", "safe", "slow", "code-enum"] meta = { "description": "Download images from a docker repository", "created_date": "2024-03-24", diff --git a/bbot/modules/ffuf_shortnames.py b/bbot/modules/ffuf_shortnames.py index 76e36de036..fa6f03ada2 100644 --- a/bbot/modules/ffuf_shortnames.py +++ b/bbot/modules/ffuf_shortnames.py @@ -67,13 +67,14 @@ class ffuf_shortnames(ffuf): in_scope_only = True async def setup(self): + self.proxy = self.scan.web_config.get("http_proxy", "") self.canary = "".join(random.choice(string.ascii_lowercase) for i in range(10)) wordlist = self.config.get("wordlist", "") if not wordlist: wordlist = f"{self.helpers.wordlist_dir}/ffuf_shortname_candidates.txt" self.debug(f"Using [{wordlist}] for shortname candidate list") self.wordlist = await self.helpers.wordlist(wordlist) - self.wordlist_lines = list(self.helpers.read_file(self.wordlist)) + self.wordlist_lines = self.generate_wordlist(self.wordlist) wordlist_extensions = self.config.get("wordlist_extensions", "") if not wordlist_extensions: diff --git a/bbot/modules/git_clone.py b/bbot/modules/git_clone.py index 6cda79f9da..4b64ee4806 100644 --- a/bbot/modules/git_clone.py +++ b/bbot/modules/git_clone.py @@ -6,7 +6,7 @@ class git_clone(github): watched_events = ["CODE_REPOSITORY"] produced_events = ["FILESYSTEM"] - flags = ["passive", "safe", "slow"] + flags = ["passive", "safe", "slow", "code-enum"] meta = { "description": "Clone code github repositories", "created_date": "2024-03-08", @@ -46,11 +46,14 @@ async def handle_event(self, event): ) async def clone_git_repository(self, repository_url): + owner = repository_url.split("/")[-2] + folder = self.output_dir / owner + self.helpers.mkdir(folder) if self.api_key: url = repository_url.replace("https://github.com", f"https://user:{self.api_key}@github.com") else: url = repository_url - command = ["git", "-C", self.output_dir, "clone", url] + command = ["git", "-C", folder, "clone", url] try: output = await self.run_process(command, env={"GIT_TERMINAL_PROMPT": "0"}, check=True) except CalledProcessError as e: @@ -58,4 +61,4 @@ async def clone_git_repository(self, repository_url): return folder_name = output.stderr.split("Cloning into '")[1].split("'")[0] - return self.output_dir / folder_name + return folder / folder_name diff --git a/bbot/modules/github_workflows.py b/bbot/modules/github_workflows.py index 15767ca63f..df46f155c8 100644 --- a/bbot/modules/github_workflows.py +++ b/bbot/modules/github_workflows.py @@ -7,9 +7,9 @@ class github_workflows(github): watched_events = ["CODE_REPOSITORY"] produced_events = ["FILESYSTEM"] - flags = ["passive", "safe"] + flags = ["passive", "safe", "code-enum"] meta = { - "description": "Download a github repositories workflow logs", + "description": "Download a github repositories workflow logs and workflow artifacts", "created_date": "2024-04-29", "author": "@domwhewell-sage", } @@ -46,9 +46,9 @@ async def handle_event(self, event): self.log.debug(f"Looking up runs for {workflow_name} in {owner}/{repo}") for run in await self.get_workflow_runs(owner, repo, workflow_id): run_id = run.get("id") + workflow_url = f"https://github.com/{owner}/{repo}/actions/runs/{run_id}" self.log.debug(f"Downloading logs for {workflow_name}/{run_id} in {owner}/{repo}") for log in await self.download_run_logs(owner, repo, run_id): - workflow_url = f"https://github.com/{owner}/{repo}/actions/runs/{run_id}" logfile_event = self.make_event( { "path": str(log), @@ -62,6 +62,28 @@ async def handle_event(self, event): logfile_event, context=f"{{module}} downloaded workflow run logs from {workflow_url} to {{event.type}}: {log}", ) + artifacts = await self.get_run_artifacts(owner, repo, run_id) + if artifacts: + for artifact in artifacts: + artifact_id = artifact.get("id") + artifact_name = artifact.get("name") + expired = artifact.get("expired") + if not expired: + filepath = await self.download_run_artifacts(owner, repo, artifact_id, artifact_name) + if filepath: + artifact_event = self.make_event( + { + "path": str(filepath), + "description": f"Workflow run artifact from {workflow_url}", + }, + "FILESYSTEM", + tags=["zipfile"], + parent=event, + ) + await self.emit_event( + artifact_event, + context=f"{{module}} downloaded workflow run artifact from {workflow_url} to {{event.type}}: {filepath}", + ) async def get_workflows(self, owner, repo): workflows = [] @@ -150,3 +172,51 @@ async def download_run_logs(self, owner, repo, run_id): return main_logs else: return [] + + async def get_run_artifacts(self, owner, repo, run_id): + artifacts = [] + url = f"{self.base_url}/repos/{owner}/{repo}/actions/runs/{run_id}/artifacts" + r = await self.helpers.request(url, headers=self.headers) + if r is None: + return artifacts + status_code = getattr(r, "status_code", 0) + if status_code == 403: + self.warning("Github is rate-limiting us (HTTP status: 403)") + return artifacts + if status_code != 200: + return artifacts + try: + j = r.json().get("artifacts", []) + except Exception as e: + self.warning(f"Failed to decode JSON for {r.url} (HTTP status: {status_code}): {e}") + return artifacts + if not j: + return artifacts + for item in j: + artifacts.append(item) + return artifacts + + async def download_run_artifacts(self, owner, repo, artifact_id, artifact_name): + folder = self.output_dir / owner / repo + self.helpers.mkdir(folder) + file_destination = folder / artifact_name + try: + await self.helpers.download( + f"{self.base_url}/repos/{owner}/{repo}/actions/artifacts/{artifact_id}/zip", + filename=file_destination, + headers=self.headers, + raise_error=True, + warn=False, + ) + self.info( + f"Downloaded workflow artifact {owner}/{repo}/{artifact_id}/{artifact_name} to {file_destination}" + ) + except Exception as e: + file_destination = None + response = getattr(e, "response", None) + status_code = getattr(response, "status_code", 0) + if status_code == 403: + self.warning( + f"The current access key does not have access to workflow artifacts {owner}/{repo}/{artifact_id} (status: {status_code})" + ) + return file_destination diff --git a/bbot/modules/gowitness.py b/bbot/modules/gowitness.py index 93950e340c..9d6d57483f 100644 --- a/bbot/modules/gowitness.py +++ b/bbot/modules/gowitness.py @@ -56,7 +56,7 @@ async def setup(self): self.threads = self.config.get("threads", 0) if not self.threads: self.threads = default_thread_count - self.proxy = self.scan.config.get("http_proxy", "") + self.proxy = self.scan.web_config.get("http_proxy", "") self.resolution_x = self.config.get("resolution_x") self.resolution_y = self.config.get("resolution_y") self.visit_social = self.config.get("social", True) @@ -140,7 +140,7 @@ async def handle_batch(self, *events): url = screenshot["url"] final_url = screenshot["final_url"] filename = self.screenshot_path / screenshot["filename"] - webscreenshot_data = {"filename": str(filename), "url": final_url} + webscreenshot_data = {"path": str(filename), "url": final_url} parent_event = event_dict[url] await self.emit_event( webscreenshot_data, diff --git a/bbot/modules/internal/dnsresolve.py b/bbot/modules/internal/dnsresolve.py index 0877c3aa7a..42ec8cf941 100644 --- a/bbot/modules/internal/dnsresolve.py +++ b/bbot/modules/internal/dnsresolve.py @@ -109,6 +109,8 @@ async def handle_event(self, event, **kwargs): main_host_event.scope_distance = 0 await self.handle_wildcard_event(main_host_event) + in_dns_scope = -1 < main_host_event.scope_distance < self._dns_search_distance + if event != main_host_event: await self.emit_event(main_host_event) for raw_record_event in raw_record_events: diff --git a/bbot/modules/internal/excavate.py b/bbot/modules/internal/excavate.py index f0286fd6bb..794adae255 100644 --- a/bbot/modules/internal/excavate.py +++ b/bbot/modules/internal/excavate.py @@ -181,8 +181,7 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte """ for identifier, results in yara_results.items(): for result in results: - event_data = {"host": str(event.host), "url": event.data.get("url", "")} - event_data["description"] = f"{discovery_context} {yara_rule_settings.description}" + event_data = {"description": f"{discovery_context} {yara_rule_settings.description}"} if yara_rule_settings.emit_match: event_data["description"] += f" [{result}]" await self.report(event_data, event, yara_rule_settings, discovery_context) @@ -212,7 +211,7 @@ async def report_prep(self, event_data, event_type, event, tags): event_draft = self.excavate.make_event(event_data, event_type, parent=event) if not event_draft: return None - event_draft.tags = tags + event_draft.add_tags(tags) return event_draft async def report( @@ -268,7 +267,7 @@ def __init__(self, excavate): async def process(self, yara_results, event, yara_rule_settings, discovery_context): for identifier, results in yara_results.items(): for result in results: - event_data = {"host": str(event.host), "url": event.data.get("url", "")} + event_data = {} description_string = ( f" with description: [{yara_rule_settings.description}]" if yara_rule_settings.description else "" ) @@ -291,7 +290,7 @@ class excavateTestRule(ExcavateRule): } """ - watched_events = ["HTTP_RESPONSE"] + watched_events = ["HTTP_RESPONSE", "RAW_TEXT"] produced_events = ["URL_UNVERIFIED", "WEB_PARAMETER"] flags = ["passive"] meta = { @@ -583,9 +582,7 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte for identifier in yara_results.keys(): for findings in yara_results[identifier]: event_data = { - "host": str(event.host), - "url": event.data.get("url", ""), - "description": f"{discovery_context} {yara_rule_settings.description} ({identifier})", + "description": f"{discovery_context} {yara_rule_settings.description} ({identifier})" } await self.report(event_data, event, yara_rule_settings, discovery_context, event_type="FINDING") @@ -615,9 +612,7 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte for identifier in yara_results.keys(): for findings in yara_results[identifier]: event_data = { - "host": str(event.host), - "url": event.data.get("url", ""), - "description": f"{discovery_context} {yara_rule_settings.description} ({identifier})", + "description": f"{discovery_context} {yara_rule_settings.description} ({identifier})" } await self.report(event_data, event, yara_rule_settings, discovery_context, event_type="FINDING") @@ -686,6 +681,7 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte for identifier, results in yara_results.items(): urls_found = 0 + final_url = "" for url_str in results: if identifier == "url_full": if not await self.helpers.re.search(self.full_url_regex, url_str): @@ -696,7 +692,7 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte final_url = url_str self.excavate.debug(f"Discovered Full URL [{final_url}]") - elif identifier == "url_attr": + elif identifier == "url_attr" and hasattr(event, "parsed_url"): m = await self.helpers.re.search(self.tag_attribute_regex, url_str) if not m: self.excavate.debug( @@ -715,17 +711,18 @@ async def process(self, yara_results, event, yara_rule_settings, discovery_conte f"Reconstructed Full URL [{final_url}] from extracted relative URL [{unescaped_url}] " ) - if self.excavate.scan.in_scope(final_url): - urls_found += 1 - - await self.report( - final_url, - event, - yara_rule_settings, - discovery_context, - event_type="URL_UNVERIFIED", - urls_found=urls_found, - ) + if final_url: + if self.excavate.scan.in_scope(final_url): + urls_found += 1 + + await self.report( + final_url, + event, + yara_rule_settings, + discovery_context, + event_type="URL_UNVERIFIED", + urls_found=urls_found, + ) async def report_prep(self, event_data, event_type, event, tags, **kwargs): event_draft = self.excavate.make_event(event_data, event_type, parent=event) @@ -737,7 +734,7 @@ async def report_prep(self, event_data, event_type, event, tags, **kwargs): exceeds_max_links = urls_found > self.excavate.scan.web_spider_links_per_page and url_in_scope if exceeds_max_links: tags.append("spider-max") - event_draft.tags = tags + event_draft.add_tags(tags) return event_draft class HostnameExtractor(ExcavateRule): @@ -860,33 +857,35 @@ async def search(self, data, event, content_type, discovery_context="HTTP respon decoded_data = await self.helpers.re.recursive_decode(data) - content_type_lower = content_type.lower() if content_type else "" - extraction_map = { - "json": self.helpers.extract_params_json, - "xml": self.helpers.extract_params_xml, - } - - for source_type, extract_func in extraction_map.items(): - if source_type in content_type_lower: - results = extract_func(data) - if results: - for parameter_name, original_value in results: - description = ( - f"HTTP Extracted Parameter (speculative from {source_type} content) [{parameter_name}]" - ) - data = { - "host": str(event.host), - "type": "SPECULATIVE", - "name": parameter_name, - "original_value": original_value, - "url": str(event.data["url"]), - "additional_params": {}, - "assigned_cookies": self.assigned_cookies, - "description": description, - } - context = f"excavate's Parameter extractor found a speculative WEB_PARAMETER: {parameter_name} by parsing {source_type} data from {str(event.host)}" - await self.emit_event(data, "WEB_PARAMETER", event, context=context) - return + if self.parameter_extraction: + + content_type_lower = content_type.lower() if content_type else "" + extraction_map = { + "json": self.helpers.extract_params_json, + "xml": self.helpers.extract_params_xml, + } + + for source_type, extract_func in extraction_map.items(): + if source_type in content_type_lower: + results = extract_func(data) + if results: + for parameter_name, original_value in results: + description = ( + f"HTTP Extracted Parameter (speculative from {source_type} content) [{parameter_name}]" + ) + data = { + "host": str(event.host), + "type": "SPECULATIVE", + "name": parameter_name, + "original_value": original_value, + "url": str(event.data["url"]), + "additional_params": {}, + "assigned_cookies": self.assigned_cookies, + "description": description, + } + context = f"excavate's Parameter extractor found a speculative WEB_PARAMETER: {parameter_name} by parsing {source_type} data from {str(event.host)}" + await self.emit_event(data, "WEB_PARAMETER", event, context=context) + return for result in self.yara_rules.match(data=f"{data}\n{decoded_data}"): rule_name = result.rule @@ -896,137 +895,146 @@ async def search(self, data, event, content_type, discovery_context="HTTP respon self.hugewarning(f"YARA Rule {rule_name} not found in pre-compiled rules") async def handle_event(self, event): - # Harvest GET parameters from URL, if it came directly from the target, and parameter extraction is enabled - if ( - self.parameter_extraction == True - and self.url_querystring_remove == False - and str(event.parent.parent.module) == "TARGET" - ): - self.debug(f"Processing target URL [{urlunparse(event.parsed_url)}] for GET parameters") - for ( - method, - parsed_url, - parameter_name, - original_value, - regex_name, - additional_params, - ) in extract_params_url(event.parsed_url): - if self.in_bl(parameter_name) == False: - description = f"HTTP Extracted Parameter [{parameter_name}] (Target URL)" - data = { - "host": parsed_url.hostname, - "type": "GETPARAM", - "name": parameter_name, - "original_value": original_value, - "url": self.url_unparse("GETPARAM", parsed_url), - "description": description, - "additional_params": additional_params, - } - context = f"Excavate parsed a URL directly from the scan target for parameters and found [GETPARAM] Parameter Name: [{parameter_name}] and emitted a WEB_PARAMETER for it" - await self.emit_event(data, "WEB_PARAMETER", event, context=context) - - data = event.data - - # process response data - body = event.data.get("body", "") - headers = event.data.get("header-dict", {}) - if body == "" and headers == {}: - return - - self.assigned_cookies = {} - content_type = None - reported_location_header = False - - for header, header_values in headers.items(): - for header_value in header_values: - if header.lower() == "set-cookie": - if "=" not in header_value: - self.debug(f"Cookie found without '=': {header_value}") - continue - else: - cookie_name = header_value.split("=")[0] - cookie_value = header_value.split("=")[1].split(";")[0] - if self.in_bl(cookie_value) == False: - self.assigned_cookies[cookie_name] = cookie_value - description = f"Set-Cookie Assigned Cookie [{cookie_name}]" - data = { - "host": str(event.host), - "type": "COOKIE", - "name": cookie_name, - "original_value": cookie_value, - "url": self.url_unparse("COOKIE", event.parsed_url), - "description": description, - } - context = f"Excavate noticed a set-cookie header for cookie [{cookie_name}] and emitted a WEB_PARAMETER for it" - await self.emit_event(data, "WEB_PARAMETER", event, context=context) + if event.type == "HTTP_RESPONSE": + # Harvest GET parameters from URL, if it came directly from the target, and parameter extraction is enabled + if ( + self.parameter_extraction == True + and self.url_querystring_remove == False + and str(event.parent.parent.module) == "TARGET" + ): + self.debug(f"Processing target URL [{urlunparse(event.parsed_url)}] for GET parameters") + for ( + method, + parsed_url, + parameter_name, + original_value, + regex_name, + additional_params, + ) in extract_params_url(event.parsed_url): + if self.in_bl(parameter_name) == False: + description = f"HTTP Extracted Parameter [{parameter_name}] (Target URL)" + data = { + "host": parsed_url.hostname, + "type": "GETPARAM", + "name": parameter_name, + "original_value": original_value, + "url": self.url_unparse("GETPARAM", parsed_url), + "description": description, + "additional_params": additional_params, + } + context = f"Excavate parsed a URL directly from the scan target for parameters and found [GETPARAM] Parameter Name: [{parameter_name}] and emitted a WEB_PARAMETER for it" + await self.emit_event(data, "WEB_PARAMETER", event, context=context) + + data = event.data + + # process response data + body = event.data.get("body", "") + headers = event.data.get("header-dict", {}) + if body == "" and headers == {}: + return + + self.assigned_cookies = {} + content_type = None + reported_location_header = False + + for header, header_values in headers.items(): + for header_value in header_values: + if header.lower() == "set-cookie" and self.parameter_extraction: + if "=" not in header_value: + self.debug(f"Cookie found without '=': {header_value}") + continue else: - self.debug(f"blocked cookie parameter [{cookie_name}] due to BL match") - if header.lower() == "location": - redirect_location = getattr(event, "redirect_location", "") - if redirect_location: - scheme = self.helpers.is_uri(redirect_location, return_scheme=True) - if scheme in ("http", "https"): - web_spider_distance = getattr(event, "web_spider_distance", 0) - num_redirects = max(getattr(event, "num_redirects", 0), web_spider_distance) - if num_redirects <= self.scan.web_max_redirects: - # we do not want to allow the web_spider_distance to be incremented on redirects, so we do not add spider-danger tag - url_event = self.make_event( - redirect_location, "URL_UNVERIFIED", event, tags="affiliate" - ) - if url_event is not None: - reported_location_header = True - await self.emit_event( - url_event, - context=f'excavate looked in "Location" header and found {url_event.type}: {url_event.data}', + cookie_name = header_value.split("=")[0] + cookie_value = header_value.split("=")[1].split(";")[0] + + if self.in_bl(cookie_value) == False: + self.assigned_cookies[cookie_name] = cookie_value + description = f"Set-Cookie Assigned Cookie [{cookie_name}]" + data = { + "host": str(event.host), + "type": "COOKIE", + "name": cookie_name, + "original_value": cookie_value, + "url": self.url_unparse("COOKIE", event.parsed_url), + "description": description, + } + context = f"Excavate noticed a set-cookie header for cookie [{cookie_name}] and emitted a WEB_PARAMETER for it" + await self.emit_event(data, "WEB_PARAMETER", event, context=context) + else: + self.debug(f"blocked cookie parameter [{cookie_name}] due to BL match") + if header.lower() == "location": + redirect_location = getattr(event, "redirect_location", "") + if redirect_location: + scheme = self.helpers.is_uri(redirect_location, return_scheme=True) + if scheme in ("http", "https"): + web_spider_distance = getattr(event, "web_spider_distance", 0) + num_redirects = max(getattr(event, "num_redirects", 0), web_spider_distance) + if num_redirects <= self.scan.web_max_redirects: + # we do not want to allow the web_spider_distance to be incremented on redirects, so we do not add spider-danger tag + url_event = self.make_event( + redirect_location, "URL_UNVERIFIED", event, tags="affiliate" ) + if url_event is not None: + reported_location_header = True + await self.emit_event( + url_event, + context=f'excavate looked in "Location" header and found {url_event.type}: {url_event.data}', + ) + + # Try to extract parameters from the redirect URL + if self.parameter_extraction: + + for ( + method, + parsed_url, + parameter_name, + original_value, + regex_name, + additional_params, + ) in extract_params_location(header_value, event.parsed_url): + if self.in_bl(parameter_name) == False: + description = f"HTTP Extracted Parameter [{parameter_name}] (Location Header)" + data = { + "host": parsed_url.hostname, + "type": "GETPARAM", + "name": parameter_name, + "original_value": original_value, + "url": self.url_unparse("GETPARAM", parsed_url), + "description": description, + "additional_params": additional_params, + } + context = f"Excavate parsed a location header for parameters and found [GETPARAM] Parameter Name: [{parameter_name}] and emitted a WEB_PARAMETER for it" + await self.emit_event(data, "WEB_PARAMETER", event, context=context) + else: + self.warning("location header found but missing redirect_location in HTTP_RESPONSE") + if header.lower() == "content-type": + content_type = headers["content-type"][0] + + await self.search( + body, + event, + content_type, + discovery_context="HTTP response (body)", + ) - # Try to extract parameters from the redirect URL - if self.parameter_extraction: - - for ( - method, - parsed_url, - parameter_name, - original_value, - regex_name, - additional_params, - ) in extract_params_location(header_value, event.parsed_url): - if self.in_bl(parameter_name) == False: - description = f"HTTP Extracted Parameter [{parameter_name}] (Location Header)" - data = { - "host": parsed_url.hostname, - "type": "GETPARAM", - "name": parameter_name, - "original_value": original_value, - "url": self.url_unparse("GETPARAM", parsed_url), - "description": description, - "additional_params": additional_params, - } - context = f"Excavate parsed a location header for parameters and found [GETPARAM] Parameter Name: [{parameter_name}] and emitted a WEB_PARAMETER for it" - await self.emit_event(data, "WEB_PARAMETER", event, context=context) - else: - self.warning("location header found but missing redirect_location in HTTP_RESPONSE") - if header.lower() == "content-type": - content_type = headers["content-type"][0] - - await self.search( - body, - event, - content_type, - discovery_context="HTTP response (body)", - ) - - if reported_location_header: - # Location header should be removed if we already found and emitted a result. - # Failure to do so results in a race against the same URL extracted by the URLExtractor submodule - # If the extracted URL wins, it will cause the manual one to be a dupe, but it will have a higher web_spider_distance. - headers.pop("location") - headers_str = "\n".join(f"{k}: {v}" for k, values in headers.items() for v in values) - - await self.search( - headers_str, - event, - content_type, - discovery_context="HTTP response (headers)", - ) + if reported_location_header: + # Location header should be removed if we already found and emitted a result. + # Failure to do so results in a race against the same URL extracted by the URLExtractor submodule + # If the extracted URL wins, it will cause the manual one to be a dupe, but it will have a higher web_spider_distance. + headers.pop("location") + headers_str = "\n".join(f"{k}: {v}" for k, values in headers.items() for v in values) + + await self.search( + headers_str, + event, + content_type, + discovery_context="HTTP response (headers)", + ) + else: + await self.search( + event.data, + event, + content_type="", + discovery_context="Parsed file content", + ) diff --git a/bbot/modules/report/asn.py b/bbot/modules/report/asn.py index 61e51a7255..ba5e1e39a4 100644 --- a/bbot/modules/report/asn.py +++ b/bbot/modules/report/asn.py @@ -18,6 +18,7 @@ class asn(BaseReportModule): async def setup(self): self.asn_counts = {} self.asn_cache = {} + self.ripe_cache = {} self.sources = ["bgpview", "ripe"] self.unknown_asn = { "asn": "UNKNOWN", @@ -144,38 +145,42 @@ async def get_asn_ripe(self, ip): return asns async def get_asn_metadata_ripe(self, asn_number): - metadata_keys = { - "name": ["ASName", "OrgId"], - "description": ["OrgName", "OrgTechName", "RTechName"], - "country": ["Country"], - } - url = f"https://stat.ripe.net/data/whois/data.json?resource={asn_number}" - response = await self.get_url(url, "ASN Metadata", cache=True) - if response == False: - return False - data = response.get("data", {}) - if not data: - data = {} - records = data.get("records", []) - if not records: - records = [] - emails = set() - asn = {k: "" for k in metadata_keys.keys()} - for record in records: - for item in record: - key = item.get("key", "") - value = item.get("value", "") - for email in await self.helpers.re.extract_emails(value): - emails.add(email.lower()) - if not key: - continue - if value: - for keyname, keyvals in metadata_keys.items(): - if key in keyvals and not asn.get(keyname, ""): - asn[keyname] = value - asn["emails"] = list(emails) - asn["asn"] = str(asn_number) - return asn + try: + return self.ripe_cache[asn_number] + except KeyError: + metadata_keys = { + "name": ["ASName", "OrgId"], + "description": ["OrgName", "OrgTechName", "RTechName"], + "country": ["Country"], + } + url = f"https://stat.ripe.net/data/whois/data.json?resource={asn_number}" + response = await self.get_url(url, "ASN Metadata", cache=True) + if response == False: + return False + data = response.get("data", {}) + if not data: + data = {} + records = data.get("records", []) + if not records: + records = [] + emails = set() + asn = {k: "" for k in metadata_keys.keys()} + for record in records: + for item in record: + key = item.get("key", "") + value = item.get("value", "") + for email in await self.helpers.re.extract_emails(value): + emails.add(email.lower()) + if not key: + continue + if value: + for keyname, keyvals in metadata_keys.items(): + if key in keyvals and not asn.get(keyname, ""): + asn[keyname] = value + asn["emails"] = list(emails) + asn["asn"] = str(asn_number) + self.ripe_cache[asn_number] = asn + return asn async def get_asn_bgpview(self, ip): url = f"https://api.bgpview.io/ip/{ip}" diff --git a/bbot/modules/templates/bucket.py b/bbot/modules/templates/bucket.py index 3b7bde7898..845ae0e9a4 100644 --- a/bbot/modules/templates/bucket.py +++ b/bbot/modules/templates/bucket.py @@ -89,7 +89,6 @@ async def handle_storage_bucket(self, event): async def emit_storage_bucket(self, event_data, event_type, parent, tags, context): event_data["url"] = self.clean_bucket_url(event_data["url"]) - self.hugewarning(event_data) await self.emit_event( event_data, event_type, diff --git a/bbot/modules/trufflehog.py b/bbot/modules/trufflehog.py index e0ff0fd2af..092e105de8 100644 --- a/bbot/modules/trufflehog.py +++ b/bbot/modules/trufflehog.py @@ -1,9 +1,10 @@ import json +from pathlib import Path from bbot.modules.base import BaseModule class trufflehog(BaseModule): - watched_events = ["FILESYSTEM"] + watched_events = ["CODE_REPOSITORY", "FILESYSTEM"] produced_events = ["FINDING", "VULNERABILITY"] flags = ["passive", "safe", "code-enum"] meta = { @@ -13,14 +14,18 @@ class trufflehog(BaseModule): } options = { - "version": "3.75.1", + "version": "3.81.9", + "config": "", "only_verified": True, "concurrency": 8, + "deleted_forks": False, } options_desc = { "version": "trufflehog version", + "config": "File path or URL to YAML trufflehog config", "only_verified": "Only report credentials that have been verified", "concurrency": "Number of concurrent workers", + "deleted_forks": "Scan for deleted github forks. WARNING: This is SLOW. For a smaller repository, this process can take 20 minutes. For a larger repository, it could take hours.", } deps_ansible = [ { @@ -38,29 +43,87 @@ class trufflehog(BaseModule): async def setup(self): self.verified = self.config.get("only_verified", True) + self.config_file = self.config.get("config", "") + if self.config_file: + self.config_file = await self.helpers.wordlist(self.config_file) self.concurrency = int(self.config.get("concurrency", 8)) + + self.deleted_forks = self.config.get("deleted_forks", False) + self.github_token = "" + if self.deleted_forks: + self.warning( + f"Deleted forks is enabled. Scanning for deleted forks is slooooooowwwww. For a smaller repository, this process can take 20 minutes. For a larger repository, it could take hours." + ) + for module_name in ("github", "github_codesearch", "github_org", "git_clone"): + module_config = self.scan.config.get("modules", {}).get(module_name, {}) + api_key = module_config.get("api_key", "") + if api_key: + self.github_token = api_key + break + + # soft-fail if we don't have a github token as well + if not self.github_token: + self.deleted_forks = False + return None, "A github api_key must be provided to the github modules for deleted forks to be scanned" + self.processed = set() + return True + + async def filter_event(self, event): + if event.type == "CODE_REPOSITORY": + if self.deleted_forks: + if "git" not in event.tags: + return False, "Module only accepts git CODE_REPOSITORY events" + if "github" not in event.data["url"]: + return False, "Module only accepts github CODE_REPOSITORY events" + else: + return False, "Deleted forks is not enabled" + else: + path = event.data["path"] + for processed in self.processed: + processed_path = Path(processed) + new_path = Path(path) + if new_path.is_relative_to(processed_path): + return False, "Parent folder has already been processed" return True async def handle_event(self, event): - path = event.data["path"] description = event.data.get("description", "") - if "git" in event.tags: - module = "git" - elif "docker" in event.tags: - module = "docker" + if event.type == "CODE_REPOSITORY": + path = event.data["url"] + if "git" in event.tags: + module = "github-experimental" + else: + path = event.data["path"] + self.processed.add(path) + if "git" in event.tags: + module = "git" + elif "docker" in event.tags: + module = "docker" + else: + module = "filesystem" + if event.type == "CODE_REPOSITORY": + host = event.host else: - module = "filesystem" - async for decoder_name, detector_name, raw_result, verified, source_metadata in self.execute_trufflehog( - module, path - ): + host = str(event.parent.host) + async for ( + decoder_name, + detector_name, + raw_result, + rawv2_result, + verified, + source_metadata, + ) in self.execute_trufflehog(module, path): if verified: data = { "severity": "High", - "description": f"Verified Secret Found. Detector Type: [{detector_name}] Decoder Type: [{decoder_name}] Secret: [{raw_result}] Details: [{source_metadata}]", - "host": str(event.parent.host), + "description": f"Verified Secret Found. Detector Type: [{detector_name}] Decoder Type: [{decoder_name}] Details: [{source_metadata}]", + "host": host, } if description: data["description"] += f" Description: [{description}]" + data["description"] += f" Raw result: [{raw_result}]" + if rawv2_result: + data["description"] += f" RawV2 result: [{rawv2_result}]" await self.emit_event( data, "VULNERABILITY", @@ -69,11 +132,14 @@ async def handle_event(self, event): ) else: data = { - "description": f"Potential Secret Found. Detector Type: [{detector_name}] Decoder Type: [{decoder_name}] Secret: [{raw_result}] Details: [{source_metadata}]", - "host": str(event.parent.host), + "description": f"Potential Secret Found. Detector Type: [{detector_name}] Decoder Type: [{decoder_name}] Details: [{source_metadata}]", + "host": host, } if description: data["description"] += f" Description: [{description}]" + data["description"] += f" Raw result: [{raw_result}]" + if rawv2_result: + data["description"] += f" RawV2 result: [{rawv2_result}]" await self.emit_event( data, "FINDING", @@ -89,6 +155,8 @@ async def execute_trufflehog(self, module, path): ] if self.verified: command.append("--only-verified") + if self.config_file: + command.append("--config=" + str(self.config_file)) command.append("--concurrency=" + str(self.concurrency)) if module == "git": command.append("git") @@ -99,6 +167,12 @@ async def execute_trufflehog(self, module, path): elif module == "filesystem": command.append("filesystem") command.append(path) + elif module == "github-experimental": + command.append("github-experimental") + command.append("--repo=" + path) + command.append("--object-discovery") + command.append("--delete-cached-data") + command.append("--token=" + self.github_token) stats_file = self.helpers.tempfile_tail(callback=self.log_trufflehog_status) try: @@ -116,11 +190,13 @@ async def execute_trufflehog(self, module, path): raw_result = j.get("Raw", "") + rawv2_result = j.get("RawV2", "") + verified = j.get("Verified", False) source_metadata = j.get("SourceMetadata", {}) - yield (decoder_name, detector_name, raw_result, verified, source_metadata) + yield (decoder_name, detector_name, raw_result, rawv2_result, verified, source_metadata) finally: stats_file.unlink() diff --git a/bbot/modules/unstructured.py b/bbot/modules/unstructured.py index 4143ea2fde..9c5e58996f 100644 --- a/bbot/modules/unstructured.py +++ b/bbot/modules/unstructured.py @@ -67,7 +67,9 @@ class unstructured(BaseModule): } deps_apt = ["libmagic-dev", "poppler-utils", "tesseract-ocr", "libreoffice", "pandoc"] - deps_pip = ["unstructured[all-docs]"] + deps_pip = ["unstructured[all-docs]>=0.15.7,<1.0", "nltk>=3.9.0,<4.0"] + + scope_distance_modifier = 1 async def setup(self): self.extensions = list(set([e.lower().strip(".") for e in self.config.get("extensions", [])])) @@ -102,6 +104,7 @@ async def handle_event(self, event): raw_text_event = self.make_event( content, "RAW_TEXT", + context=f"Extracted text from {file_path}", parent=event, ) await self.emit_event(raw_text_event) @@ -149,8 +152,12 @@ def extract_text(file_path): # If the file can be extracted with unstructured use its partition function or try and read it if any(file_path.lower().endswith(file_type) for file_type in unstructured_file_types): - elements = partition(filename=file_path) - return "\n\n".join(element.text for element in elements) + try: + elements = partition(filename=file_path) + return "\n\n".join(element.text for element in elements) + except ValueError: + with open(file_path, "rb") as file: + return file.read().decode("utf-8", errors="ignore") else: with open(file_path, "rb") as file: return file.read().decode("utf-8", errors="ignore") diff --git a/bbot/modules/wpscan.py b/bbot/modules/wpscan.py index 60f247af47..10dade4382 100644 --- a/bbot/modules/wpscan.py +++ b/bbot/modules/wpscan.py @@ -62,7 +62,7 @@ async def setup(self): self.ignore_events = ["xmlrpc", "readme"] self.api_key = self.config.get("api_key", "") self.enumerate = self.config.get("enumerate", "vp,vt,tt,cb,dbe,u,m") - self.proxy = self.scan.config.get("http_proxy", "") + self.proxy = self.scan.web_config.get("http_proxy", "") self.threads = self.config.get("threads", 5) self.request_timeout = self.config.get("request_timeout", 60) self.connection_timeout = self.config.get("connection_timeout", 30) diff --git a/bbot/scanner/preset/environ.py b/bbot/scanner/preset/environ.py index c4c2b8f5b7..4b7121e3cf 100644 --- a/bbot/scanner/preset/environ.py +++ b/bbot/scanner/preset/environ.py @@ -42,7 +42,7 @@ def add_to_path(v, k="PATH", environ=None): if _ != v and _ not in deduped_var_list: deduped_var_list.append(_) deduped_var_list = [v] + deduped_var_list - new_var_str = ":".join(deduped_var_list) + new_var_str = ":".join(deduped_var_list).strip(":") environ[k] = new_var_str @@ -107,7 +107,7 @@ def prepare(self): environ.update(bbot_environ) # handle HTTP proxy - http_proxy = self.preset.config.get("http_proxy", "") + http_proxy = self.preset.config.get("web", {}).get("http_proxy", "") if http_proxy: environ["HTTP_PROXY"] = http_proxy environ["HTTPS_PROXY"] = http_proxy diff --git a/bbot/scanner/preset/preset.py b/bbot/scanner/preset/preset.py index 9e8242ea9a..99ad86db5e 100644 --- a/bbot/scanner/preset/preset.py +++ b/bbot/scanner/preset/preset.py @@ -74,7 +74,7 @@ class Preset: "1.2.3.0/24", flags=["subdomain-enum"], modules=["nuclei"], - config={"http_proxy": "http://127.0.0.1"} + config={"web": {"http_proxy": "http://127.0.0.1"}} ) >>> scan = Scanner(preset=preset) diff --git a/bbot/scanner/scanner.py b/bbot/scanner/scanner.py index 4e1d5a1045..ba550f2177 100644 --- a/bbot/scanner/scanner.py +++ b/bbot/scanner/scanner.py @@ -656,14 +656,14 @@ def modules_status(self, _log=False): scan_active_status.append(f"scan.modules_finished: {self.modules_finished}") for m in sorted_modules: running = m.running - scan_active_status.append(f" {m}.finished: {m.finished}") - scan_active_status.append(f" running: {running}") + scan_active_status.append(f" {m}:") + # scan_active_status.append(f" running: {running}") if running: - scan_active_status.append(f" tasks:") + # scan_active_status.append(f" tasks:") for task in list(m._task_counter.tasks.values()): - scan_active_status.append(f" - {task}:") - scan_active_status.append(f" incoming_queue_size: {m.num_incoming_events}") - scan_active_status.append(f" outgoing_queue_size: {m.outgoing_event_queue.qsize()}") + scan_active_status.append(f" - {task}:") + # scan_active_status.append(f" incoming_queue_size: {m.num_incoming_events}") + # scan_active_status.append(f" outgoing_queue_size: {m.outgoing_event_queue.qsize()}") for line in scan_active_status: self.debug(line) @@ -740,11 +740,11 @@ def _drain_queues(self): for module in self.modules.values(): with contextlib.suppress(asyncio.queues.QueueEmpty): while 1: - if module.incoming_event_queue: + if module.incoming_event_queue not in (None, False): module.incoming_event_queue.get_nowait() with contextlib.suppress(asyncio.queues.QueueEmpty): while 1: - if module.outgoing_event_queue: + if module.outgoing_event_queue not in (None, False): module.outgoing_event_queue.get_nowait() self.debug("Finished draining queues") diff --git a/bbot/scanner/target.py b/bbot/scanner/target.py index 8b88882ce7..aff8b3227f 100644 --- a/bbot/scanner/target.py +++ b/bbot/scanner/target.py @@ -277,8 +277,8 @@ def __init__(self, *targets, strict_scope=False, scan=None, acl_mode=False): self.strict_scope = strict_scope self.acl_mode = acl_mode self.special_event_types = { - "ORG_STUB": re.compile(r"^ORG:(.*)", re.IGNORECASE), - "ASN": re.compile(r"^ASN:(.*)", re.IGNORECASE), + "ORG_STUB": re.compile(r"^(?:ORG|ORG_STUB):(.*)", re.IGNORECASE), + "USERNAME": re.compile(r"^(?:USER|USERNAME):(.*)", re.IGNORECASE), } self._events = set() self._radix = RadixTarget() diff --git a/bbot/test/test.conf b/bbot/test/test.conf index 8ae91bcf31..63914fe655 100644 --- a/bbot/test/test.conf +++ b/bbot/test/test.conf @@ -36,6 +36,8 @@ dns: - example.com - evilcorp.com - one +engine: + debug: true agent_url: ws://127.0.0.1:8765 agent_token: test speculate: false diff --git a/bbot/test/test_step_1/test_cli.py b/bbot/test/test_step_1/test_cli.py index 52b3867fe8..be861b8a29 100644 --- a/bbot/test/test_step_1/test_cli.py +++ b/bbot/test/test_step_1/test_cli.py @@ -268,7 +268,7 @@ async def test_cli_args(monkeypatch, caplog, capsys, clean_default_config): result = await cli._main() out, err = capsys.readouterr() assert result == None - assert "| dnsbrute " in out + assert "| chaos " in out assert not "| httpx " in out # list modules by flag + excluded flag @@ -276,7 +276,7 @@ async def test_cli_args(monkeypatch, caplog, capsys, clean_default_config): result = await cli._main() out, err = capsys.readouterr() assert result == None - assert "| dnsbrute " in out + assert "| chaos " in out assert not "| httpx " in out # list modules by flag + excluded module diff --git a/bbot/test/test_step_1/test_dns.py b/bbot/test/test_step_1/test_dns.py index 5f98f49394..b10fcc5440 100644 --- a/bbot/test/test_step_1/test_dns.py +++ b/bbot/test/test_step_1/test_dns.py @@ -1,6 +1,6 @@ from ..bbot_fixtures import * -from bbot.core.helpers.dns.helpers import extract_targets +from bbot.core.helpers.dns.helpers import extract_targets, service_record, common_srvs mock_records = { @@ -464,3 +464,16 @@ async def test_dns_graph_structure(bbot_scanner): assert str(events_by_data["www.evilcorp.com"].module) == "CNAME" assert events_by_data["evilcorp.com"].parent.data == "https://evilcorp.com/" assert str(events_by_data["evilcorp.com"].module) == "host" + + +def test_dns_helpers(): + assert service_record("") == False + assert service_record("localhost") == False + assert service_record("www.example.com") == False + assert service_record("www.example.com", "SRV") == True + assert service_record("_custom._service.example.com", "SRV") == True + assert service_record("_custom._service.example.com", "A") == False + # top 100 most common SRV records + for srv_record in common_srvs[:100]: + hostname = f"{srv_record}.example.com" + assert service_record(hostname) == True diff --git a/bbot/test/test_step_1/test_events.py b/bbot/test/test_step_1/test_events.py index c319559d44..913035d66f 100644 --- a/bbot/test/test_step_1/test_events.py +++ b/bbot/test/test_step_1/test_events.py @@ -771,3 +771,77 @@ async def test_event_web_spider_distance(bbot_scanner): assert url_event_5.web_spider_distance == 1 assert "spider-danger" in url_event_5.tags assert not "spider-max" in url_event_5.tags + + +def test_event_confidence(): + scan = Scanner() + # default 100 + event1 = scan.make_event("evilcorp.com", "DNS_NAME", dummy=True) + assert event1.confidence == 100 + assert event1.cumulative_confidence == 100 + # custom confidence + event2 = scan.make_event("evilcorp.com", "DNS_NAME", confidence=90, dummy=True) + assert event2.confidence == 90 + assert event2.cumulative_confidence == 90 + # max 100 + event3 = scan.make_event("evilcorp.com", "DNS_NAME", confidence=999, dummy=True) + assert event3.confidence == 100 + assert event3.cumulative_confidence == 100 + # min 1 + event4 = scan.make_event("evilcorp.com", "DNS_NAME", confidence=0, dummy=True) + assert event4.confidence == 1 + assert event4.cumulative_confidence == 1 + # first event in chain + event5 = scan.make_event("evilcorp.com", "DNS_NAME", confidence=90, parent=scan.root_event) + assert event5.confidence == 90 + assert event5.cumulative_confidence == 90 + # compounding confidence + event6 = scan.make_event("evilcorp.com", "DNS_NAME", confidence=50, parent=event5) + assert event6.confidence == 50 + assert event6.cumulative_confidence == 45 + event7 = scan.make_event("evilcorp.com", "DNS_NAME", confidence=50, parent=event6) + assert event7.confidence == 50 + assert event7.cumulative_confidence == 22 + # 100 confidence resets + event8 = scan.make_event("evilcorp.com", "DNS_NAME", confidence=100, parent=event7) + assert event8.confidence == 100 + assert event8.cumulative_confidence == 100 + + +def test_event_closest_host(): + scan = Scanner() + # first event has a host + event1 = scan.make_event("evilcorp.com", "DNS_NAME", parent=scan.root_event) + assert event1.host == "evilcorp.com" + # second event has a host + url + event2 = scan.make_event( + {"method": "GET", "url": "http://www.evilcorp.com/asdf", "hash": {"header_mmh3": "1", "body_mmh3": "2"}}, + "HTTP_RESPONSE", + parent=event1, + ) + assert event2.host == "www.evilcorp.com" + # third event has a path + event3 = scan.make_event({"path": "/tmp/asdf.txt"}, "FILESYSTEM", parent=event2) + assert not event3.host + # finding automatically uses the host from the second event + finding = scan.make_event({"description": "test"}, "FINDING", parent=event3) + assert finding.data["host"] == "www.evilcorp.com" + assert finding.data["url"] == "http://www.evilcorp.com/asdf" + assert finding.data["path"] == "/tmp/asdf.txt" + assert finding.host == "www.evilcorp.com" + # same with vuln + vuln = scan.make_event({"description": "test", "severity": "HIGH"}, "VULNERABILITY", parent=event3) + assert vuln.data["host"] == "www.evilcorp.com" + assert vuln.data["url"] == "http://www.evilcorp.com/asdf" + assert vuln.data["path"] == "/tmp/asdf.txt" + assert vuln.host == "www.evilcorp.com" + + # no host == not allowed + event3 = scan.make_event("wat", "ASDF", parent=scan.root_event) + assert not event3.host + with pytest.raises(ValueError): + finding = scan.make_event({"path": "/tmp/asdf.txt", "description": "test"}, "FINDING", parent=event3) + with pytest.raises(ValueError): + vuln = scan.make_event( + {"path": "/tmp/asdf.txt", "description": "test", "severity": "HIGH"}, "VULNERABILITY", parent=event3 + ) diff --git a/bbot/test/test_step_1/test_presets.py b/bbot/test/test_step_1/test_presets.py index 768ee34298..4488318b64 100644 --- a/bbot/test/test_step_1/test_presets.py +++ b/bbot/test/test_step_1/test_presets.py @@ -826,8 +826,8 @@ def get_module_flags(p): module_flags = list(get_module_flags(preset)) dnsbrute_flags = preset.preloaded_module("dnsbrute").get("flags", []) assert "subdomain-enum" in dnsbrute_flags - assert "passive" in dnsbrute_flags - assert not "active" in dnsbrute_flags + assert "active" in dnsbrute_flags + assert not "passive" in dnsbrute_flags assert "aggressive" in dnsbrute_flags assert not "safe" in dnsbrute_flags assert "dnsbrute" in [x[0] for x in module_flags] @@ -842,7 +842,8 @@ def get_module_flags(p): preset = Preset(flags=["subdomain-enum"], require_flags=["passive"]).bake() assert len(preset.modules) > 25 module_flags = list(get_module_flags(preset)) - assert "dnsbrute" in [x[0] for x in module_flags] + assert "chaos" in [x[0] for x in module_flags] + assert not "httpx" in [x[0] for x in module_flags] assert all("passive" in flags for module, flags in module_flags) assert not any("active" in flags for module, flags in module_flags) assert any("safe" in flags for module, flags in module_flags) @@ -852,7 +853,8 @@ def get_module_flags(p): preset = Preset(flags=["subdomain-enum"], exclude_flags=["active"]).bake() assert len(preset.modules) > 25 module_flags = list(get_module_flags(preset)) - assert "dnsbrute" in [x[0] for x in module_flags] + assert "chaos" in [x[0] for x in module_flags] + assert not "httpx" in [x[0] for x in module_flags] assert all("passive" in flags for module, flags in module_flags) assert not any("active" in flags for module, flags in module_flags) assert any("safe" in flags for module, flags in module_flags) @@ -863,6 +865,7 @@ def get_module_flags(p): assert len(preset.modules) > 25 module_flags = list(get_module_flags(preset)) assert not "dnsbrute" in [x[0] for x in module_flags] + assert "httpx" in [x[0] for x in module_flags] assert any("passive" in flags for module, flags in module_flags) assert any("active" in flags for module, flags in module_flags) assert any("safe" in flags for module, flags in module_flags) diff --git a/bbot/test/test_step_1/test_target.py b/bbot/test/test_step_1/test_target.py index 23175607f4..efdf089d33 100644 --- a/bbot/test/test_step_1/test_target.py +++ b/bbot/test/test_step_1/test_target.py @@ -178,10 +178,19 @@ async def test_target(bbot_scanner): assert list(bbottarget.whitelist) == ["evilcorp.net"] assert list(bbottarget.blacklist) == ["evilcorp.org"] - scan = bbot_scanner("ORG:evilcorp") - events = [e async for e in scan.async_start()] - assert len(events) == 2 - assert set([e.type for e in events]) == {"SCAN", "ORG_STUB"} + # test org stub as target + for org_target in ("ORG:evilcorp", "ORG_STUB:evilcorp"): + scan = bbot_scanner(org_target) + events = [e async for e in scan.async_start()] + assert len(events) == 2 + assert set([e.type for e in events]) == {"SCAN", "ORG_STUB"} + + # test username as target + for user_target in ("USER:vancerefrigeration", "USERNAME:vancerefrigeration"): + scan = bbot_scanner(user_target) + events = [e async for e in scan.async_start()] + assert len(events) == 2 + assert set([e.type for e in events]) == {"SCAN", "USERNAME"} # verify hash values bbottarget = BBOTTarget( diff --git a/bbot/test/test_step_2/module_tests/test_module_excavate.py b/bbot/test/test_step_2/module_tests/test_module_excavate.py index 2e00eacd60..e5655f1568 100644 --- a/bbot/test/test_step_2/module_tests/test_module_excavate.py +++ b/bbot/test/test_step_2/module_tests/test_module_excavate.py @@ -1,7 +1,9 @@ from bbot.modules.base import BaseModule from .base import ModuleTestBase, tempwordlist + from bbot.modules.internal.excavate import ExcavateRule +from pathlib import Path import yara @@ -84,6 +86,10 @@ def check(self, module_test, events): e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.1:8888/relative.html" and "spider-max" not in e.tags + and "endpoint" in e.tags + and "extension-html" in e.tags + and "in-scope" in e.tags + and e.scope_distance == 0 for e in events ) @@ -854,7 +860,7 @@ def check(self, module_test, events): class TestExcavateHeaders(ModuleTestBase): targets = ["http://127.0.0.1:8888/"] - modules_overrides = ["excavate", "httpx"] + modules_overrides = ["excavate", "httpx", "hunt"] config_overrides = {"web": {"spider_distance": 1, "spider_depth": 1}} async def setup_before_prep(self, module_test): @@ -884,3 +890,143 @@ def check(self, module_test, events): assert found_first_cookie == True assert found_second_cookie == True + + +class TestExcavateRAWTEXT(ModuleTestBase): + targets = ["http://127.0.0.1:8888/", "test.notreal"] + modules_overrides = ["excavate", "httpx", "filedownload", "unstructured"] + config_overrides = {"scope": {"report_distance": 1}, "web": {"spider_distance": 2, "spider_depth": 2}} + + pdf_data = r"""%PDF-1.3 +%���� ReportLab Generated PDF document http://www.reportlab.com +1 0 obj +<< +/F1 2 0 R +>> +endobj +2 0 obj +<< +/BaseFont /Helvetica /Encoding /WinAnsiEncoding /Name /F1 /Subtype /Type1 /Type /Font +>> +endobj +3 0 obj +<< +/Contents 7 0 R /MediaBox [ 0 0 595.2756 841.8898 ] /Parent 6 0 R /Resources << +/Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ] +>> /Rotate 0 /Trans << + +>> + /Type /Page +>> +endobj +4 0 obj +<< +/PageMode /UseNone /Pages 6 0 R /Type /Catalog +>> +endobj +5 0 obj +<< +/Author (anonymous) /CreationDate (D:20240807182842+00'00') /Creator (ReportLab PDF Library - www.reportlab.com) /Keywords () /ModDate (D:20240807182842+00'00') /Producer (ReportLab PDF Library - www.reportlab.com) + /Subject (unspecified) /Title (untitled) /Trapped /False +>> +endobj +6 0 obj +<< +/Count 1 /Kids [ 3 0 R ] /Type /Pages +>> +endobj +7 0 obj +<< +/Filter [ /ASCII85Decode /FlateDecode ] /Length 742 +>> +stream +Gas2F;0/Hc'SYHA/+V9II1V!>b>-epMEjN4$Udfu3WXha!?H`crq_UNGP5IS$'WT'SF]Hm/eEhd_JY>@!1knV$j`L/E!kN:0EQJ+FF:uKph>GV#ju48hu\;DS#c\h,:/udaV^[@;X>;"'ep>>)(B?I-n?2pLTEZKb$BFgKRF(b#Pc?SYeqN_Q<+X%64E)"g-fPCbq][OcNlQLW_hs%Z%g83]3b]0V$sluS:l]fd*^-UdD=#bCpInTen.cfe189iIh6\.p.U0GF:oK9b'->\lOqObp&ppaGMoCcp"4SVDq!<>6ZV]FD>,rrdc't<[N2!Ai12-2bU`S*gNOt?NS4WgtN@KuL)HOb>`9L>S$_ert"UNW*,("+*>]m)4`k"8SUOCpM7`cEe!(7?`JV*GMajff(^atd&EX#qdMBmI'Q(YYb&m.O>0MYJ4XfJH@("`jPF^W5.*84$HY?2JY[WU48,IqkD_]b:_615)BA3RM*]q4>2Gf_1aMGFGu.Zt]!p5h;`XYO/FCmQ4/3ZX09kH$X+QI/JJh`lb\dBu:d$%Ld1=H=-UbKXP_&26H00T.?":f@40#m]NM5JYq@VFSk+#OR+sc4eX`Oq]N([T/;kQ>>WZOJNWnM"#msq:#?Km~>endstream +endobj +xref +0 8 +0000000000 65535 f +0000000073 00000 n +0000000104 00000 n +0000000211 00000 n +0000000414 00000 n +0000000482 00000 n +0000000778 00000 n +0000000837 00000 n +trailer +<< +/ID +[<3c7340500fa2fe72523c5e6f07511599><3c7340500fa2fe72523c5e6f07511599>] +% ReportLab generated PDF document -- digest (http://www.reportlab.com) + +/Info 5 0 R +/Root 4 0 R +/Size 8 +>> +startxref +1669 +%%EOF""" + unstructured_response = """This is an email example@blacklanternsecurity.notreal + +An example JWT eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c + +A serialized DOTNET object AAEAAAD/////AQAAAAAAAAAMAgAAAFJTeXN0ZW0uQ29sbGVjdGlvbnMuR2VuZXJpYy5MaXN0YDFbW1N5c3RlbS5TdHJpbmddXSwgU3lzdGVtLCBWZXJzaW9uPTQuMC4wLjAsIEN1bHR1cmU9bmV1dHJhbCwgUHVibGljS2V5VG9rZW49YjAzZjVmN2YxMWQ1MGFlMwEAAAAIQ29tcGFyZXIQSXRlbUNvdW50AQMAAAAJAwAAAAlTeXN0ZW0uU3RyaW5nW10FAAAACQIAAAAJBAAAAAkFAAAACRcAAAAJCgAAAAkLAAAACQwAAAAJDQAAAAkOAAAACQ8AAAAJEAAAAAkRAAAACRIAAAAJEwAAAA== + +A full url https://www.test.notreal/about + +A href Click me""" + + async def setup_after_prep(self, module_test): + module_test.set_expect_requests( + dict(uri="/"), + dict(response_data=''), + ) + module_test.set_expect_requests( + dict(uri="/Test_PDF"), + dict(response_data=self.pdf_data, headers={"Content-Type": "application/pdf"}), + ) + + def check(self, module_test, events): + filesystem_events = [e for e in events if e.type == "FILESYSTEM"] + assert 1 == len(filesystem_events), filesystem_events + filesystem_event = filesystem_events[0] + file = Path(filesystem_event.data["path"]) + assert file.is_file(), "Destination file doesn't exist" + assert open(file).read() == self.pdf_data, f"File at {file} does not contain the correct content" + raw_text_events = [e for e in events if e.type == "RAW_TEXT"] + assert 1 == len(raw_text_events), "Failed to emit RAW_TEXT event" + assert ( + raw_text_events[0].data == self.unstructured_response + ), f"Text extracted from PDF is incorrect, got {raw_text_events[0].data}" + email_events = [e for e in events if e.type == "EMAIL_ADDRESS"] + assert 1 == len(email_events), "Failed to emit EMAIL_ADDRESS event" + assert ( + email_events[0].data == "example@blacklanternsecurity.notreal" + ), f"Email extracted from unstructured text is incorrect, got {email_events[0].data}" + finding_events = [e for e in events if e.type == "FINDING"] + assert 2 == len(finding_events), "Failed to emit FINDING events" + assert any( + e.type == "FINDING" + and "JWT" in e.data["description"] + and e.data["url"] == "http://127.0.0.1:8888/Test_PDF" + and e.data["host"] == "127.0.0.1" + and e.data["path"].endswith("http-127-0-0-1-8888-test-pdf.pdf") + and str(e.host) == "127.0.0.1" + for e in finding_events + ), f"Failed to emit JWT event got {finding_events}" + assert any( + e.type == "FINDING" + and "DOTNET" in e.data["description"] + and e.data["url"] == "http://127.0.0.1:8888/Test_PDF" + and e.data["host"] == "127.0.0.1" + and e.data["path"].endswith("http-127-0-0-1-8888-test-pdf.pdf") + and str(e.host) == "127.0.0.1" + for e in finding_events + ), f"Failed to emit serialized event got {finding_events}" + assert finding_events[0].data["path"] == str(file), "File path not included in finding event" + url_events = [e.data for e in events if e.type == "URL_UNVERIFIED"] + assert ( + "https://www.test.notreal/about" in url_events + ), f"URL extracted from unstructured text is incorrect, got {url_events}" + assert ( + "/donot_detect.js" not in url_events + ), f"URL extracted from unstructured text is incorrect, got {url_events}" diff --git a/bbot/test/test_step_2/module_tests/test_module_git_clone.py b/bbot/test/test_step_2/module_tests/test_module_git_clone.py index cd59f5dc26..15bc54fb37 100644 --- a/bbot/test/test_step_2/module_tests/test_module_git_clone.py +++ b/bbot/test/test_step_2/module_tests/test_module_git_clone.py @@ -196,7 +196,7 @@ def check(self, module_test, events): e for e in events if e.type == "FILESYSTEM" - and "git_repos/test_keys" in e.data["path"] + and "git_repos/.bbot_test/test_keys" in e.data["path"] and "git" in e.tags and e.scope_distance == 1 ] diff --git a/bbot/test/test_step_2/module_tests/test_module_github_codesearch.py b/bbot/test/test_step_2/module_tests/test_module_github_codesearch.py index ad3c5eae71..03c519a8cf 100644 --- a/bbot/test/test_step_2/module_tests/test_module_github_codesearch.py +++ b/bbot/test/test_step_2/module_tests/test_module_github_codesearch.py @@ -13,10 +13,11 @@ class TestGithub_Codesearch(ModuleTestBase): "/projectdiscovery/nuclei/06f242e5fce3439b7418877676810cbf57934875/v2/cmd/cve-annotate/main.go" ) github_file_url = f"http://127.0.0.1:8888{github_file_endpoint}" + github_file_content = "-----BEGIN PGP PRIVATE KEY BLOCK-----" async def setup_before_prep(self, module_test): expect_args = {"method": "GET", "uri": self.github_file_endpoint} - respond_args = {"response_data": "-----BEGIN PGP PRIVATE KEY BLOCK-----"} + respond_args = {"response_data": self.github_file_content} module_test.set_expect_requests(expect_args=expect_args, respond_args=respond_args) module_test.httpx_mock.add_response(url="https://api.github.com/zen") diff --git a/bbot/test/test_step_2/module_tests/test_module_github_workflows.py b/bbot/test/test_step_2/module_tests/test_module_github_workflows.py index 7d73409471..f3c4a2cf54 100644 --- a/bbot/test/test_step_2/module_tests/test_module_github_workflows.py +++ b/bbot/test/test_step_2/module_tests/test_module_github_workflows.py @@ -437,9 +437,47 @@ async def setup_before_prep(self, module_test): url="https://productionresultssa10.blob.core.windows.net/actions-results/7beb304e-f42c-4830-a027-4f5dec53107d/workflow-job-run-3a559e2a-952e-58d2-b8db-2e604a9266d7/logs/steps/step-logs-0e34a19a-18b0-4208-b27a-f8c031db2d17.txt?rsct=text%2Fplain&se=2024-04-26T16%3A25%3A39Z&sig=a%2FiN8dOw0e3tiBQZAfr80veI8OYChb9edJ1eFY136B4%3D&sp=r&spr=https&sr=b&st=2024-04-26T16%3A15%3A34Z&sv=2021-12-02", content=self.zip_content, ) + module_test.httpx_mock.add_response( + url="https://api.github.com/repos/blacklanternsecurity/bbot/actions/runs/8839360698/artifacts", + json={ + "total_count": 1, + "artifacts": [ + { + "id": 1829832535, + "node_id": "MDg6QXJ0aWZhY3QxODI5ODMyNTM1", + "name": "build.tar.gz", + "size_in_bytes": 245770648, + "url": "https://api.github.com/repos/blacklanternsecurity/bbot/actions/artifacts/1829832535", + "archive_download_url": "https://api.github.com/repos/blacklanternsecurity/bbot/actions/artifacts/1829832535/zip", + "expired": False, + "created_at": "2024-08-19T22:32:17Z", + "updated_at": "2024-08-19T22:32:18Z", + "expires_at": "2024-09-02T22:21:59Z", + "workflow_run": { + "id": 10461468466, + "repository_id": 89290483, + "head_repository_id": 799444840, + "head_branch": "not-a-real-branch", + "head_sha": "1eeb5354ab7b1e4141b8a6473846e2a5ea0dd2c6", + }, + } + ], + }, + ) + module_test.httpx_mock.add_response( + url="https://api.github.com/repos/blacklanternsecurity/bbot/actions/artifacts/1829832535/zip", + headers={ + "location": "https://pipelinesghubeus22.actions.githubusercontent.com/uYHz4cw2WwYcB2EU57uoCs3MaEDiz8veiVlAtReP3xevBriD1h/_apis/pipelines/1/runs/214601/signedartifactscontent?artifactName=build.tar.gz&urlExpires=2024-08-20T14%3A41%3A41.8000556Z&urlSigningMethod=HMACV2&urlSignature=OOBxLx4eE5A8uHjxOIvQtn3cLFQOBW927mg0hcTHO6U%3D" + }, + status_code=302, + ) + module_test.httpx_mock.add_response( + url="https://pipelinesghubeus22.actions.githubusercontent.com/uYHz4cw2WwYcB2EU57uoCs3MaEDiz8veiVlAtReP3xevBriD1h/_apis/pipelines/1/runs/214601/signedartifactscontent?artifactName=build.tar.gz&urlExpires=2024-08-20T14%3A41%3A41.8000556Z&urlSigningMethod=HMACV2&urlSignature=OOBxLx4eE5A8uHjxOIvQtn3cLFQOBW927mg0hcTHO6U%3D", + content=self.zip_content, + ) def check(self, module_test, events): - assert len(events) == 7 + assert len(events) == 8 assert 1 == len( [ e @@ -473,7 +511,7 @@ def check(self, module_test, events): ] ), "Failed to find blacklanternsecurity github repo" filesystem_events = [e for e in events if e.type == "FILESYSTEM"] - assert 2 == len(filesystem_events), filesystem_events + assert 3 == len(filesystem_events), filesystem_events for filesystem_event in filesystem_events: file = Path(filesystem_event.data["path"]) assert file.is_file(), "Destination file does not exist" diff --git a/bbot/test/test_step_2/module_tests/test_module_trufflehog.py b/bbot/test/test_step_2/module_tests/test_module_trufflehog.py index 9cd5be6015..68285a0011 100644 --- a/bbot/test/test_step_2/module_tests/test_module_trufflehog.py +++ b/bbot/test/test_step_2/module_tests/test_module_trufflehog.py @@ -13,6 +13,7 @@ class TestTrufflehog(ModuleTestBase): "github_org", "speculate", "git_clone", + "unstructured", "github_workflows", "dockerhub", "docker_pull", @@ -851,8 +852,10 @@ def check(self, module_test, events): if e.type == "VULNERABILITY" and (e.data["host"] == "hub.docker.com" or e.data["host"] == "github.com") and "Verified Secret Found." in e.data["description"] - and "Secret: [https://admin:admin@the-internet.herokuapp.com]" in e.data["description"] + and "Raw result: [https://admin:admin@the-internet.herokuapp.com]" in e.data["description"] + and "RawV2 result: [https://admin:admin@the-internet.herokuapp.com/basic_auth]" in e.data["description"] ] + # Trufflehog should find 3 verifiable secrets, 1 from the github, 1 from the workflow log and 1 from the docker image. Unstructured will extract the text file but trufflehog should reject it as its already scanned the containing folder assert 3 == len(vuln_events), "Failed to find secret in events" github_repo_event = [e for e in vuln_events if "test_keys" in e.data["description"]][0].parent folder = Path(github_repo_event.data["path"]) @@ -867,7 +870,7 @@ def check(self, module_test, events): [ e for e in filesystem_events - if e.data["path"].endswith("/git_repos/test_keys") and Path(e.data["path"]).is_dir() + if e.data["path"].endswith("/git_repos/.bbot_test/test_keys") and Path(e.data["path"]).is_dir() ] ), "Test keys repo dir does not exist" assert 1 == len( @@ -898,8 +901,9 @@ def check(self, module_test, events): if e.type == e.type == "FINDING" and (e.data["host"] == "hub.docker.com" or e.data["host"] == "github.com") and "Potential Secret Found." in e.data["description"] - and "Secret: [https://admin:admin@internal.host.com]" in e.data["description"] + and "Raw result: [https://admin:admin@internal.host.com]" in e.data["description"] ] + # Trufflehog should find 3 unverifiable secrets, 1 from the github, 1 from the workflow log and 1 from the docker image. Unstructured will extract the text file but trufflehog should reject it as its already scanned the containing folder assert 3 == len(finding_events), "Failed to find secret in events" github_repo_event = [e for e in finding_events if "test_keys" in e.data["description"]][0].parent folder = Path(github_repo_event.data["path"]) @@ -914,7 +918,7 @@ def check(self, module_test, events): [ e for e in filesystem_events - if e.data["path"].endswith("/git_repos/test_keys") and Path(e.data["path"]).is_dir() + if e.data["path"].endswith("/git_repos/.bbot_test/test_keys") and Path(e.data["path"]).is_dir() ] ), "Test keys repo dir does not exist" assert 1 == len( diff --git a/bbot/test/test_step_2/module_tests/test_module_unstructured.py b/bbot/test/test_step_2/module_tests/test_module_unstructured.py index 7acb24ad46..9d289377e2 100644 --- a/bbot/test/test_step_2/module_tests/test_module_unstructured.py +++ b/bbot/test/test_step_2/module_tests/test_module_unstructured.py @@ -96,7 +96,7 @@ def check(self, module_test, events): assert file.is_file(), "Destination file doesn't exist" assert open(file).read() == self.pdf_data, f"File at {file} does not contain the correct content" raw_text_events = [e for e in events if e.type == "RAW_TEXT"] - assert 1 == len(raw_text_events), "Failed to emmit RAW_TEXT event" + assert 1 == len(raw_text_events), "Failed to emit RAW_TEXT event" assert ( raw_text_events[0].data == self.unstructured_response ), f"Text extracted from PDF is incorrect, got {raw_text_events[0].data}" diff --git a/docs/dev/helpers/index.md b/docs/dev/helpers/index.md index d34a10d4a9..60d64f793d 100644 --- a/docs/dev/helpers/index.md +++ b/docs/dev/helpers/index.md @@ -18,7 +18,7 @@ class MyModule(BaseModule): self.hugesuccess(str(ip)) # Execute shell command - completed_process = self.run_process("ls", "-l") + completed_process = await self.run_process("ls", "-l") self.hugesuccess(completed_process.stdout) # Split a DNS name into subdomain / domain diff --git a/docs/javascripts/tablesort.min.js b/docs/javascripts/tablesort.min.js new file mode 100644 index 0000000000..65a83b1138 --- /dev/null +++ b/docs/javascripts/tablesort.min.js @@ -0,0 +1,6 @@ +/*! + * tablesort v5.2.1 (2021-10-30) + * http://tristen.ca/tablesort/demo/ + * Copyright (c) 2021 ; Licensed MIT +*/ +!function(){function a(b,c){if(!(this instanceof a))return new a(b,c);if(!b||"TABLE"!==b.tagName)throw new Error("Element must be a table");this.init(b,c||{})}var b=[],c=function(a){var b;return window.CustomEvent&&"function"==typeof window.CustomEvent?b=new CustomEvent(a):(b=document.createEvent("CustomEvent"),b.initCustomEvent(a,!1,!1,void 0)),b},d=function(a,b){return a.getAttribute(b.sortAttribute||"data-sort")||a.textContent||a.innerText||""},e=function(a,b){return a=a.trim().toLowerCase(),b=b.trim().toLowerCase(),a===b?0:a0)if(a.tHead&&a.tHead.rows.length>0){for(e=0;e0&&n.push(m),o++;if(!n)return}for(o=0;o=0.15.2)"] [[package]] name = "cachetools" -version = "5.3.3" +version = "5.5.0" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, - {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, + {file = "cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292"}, + {file = "cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a"}, ] [[package]] @@ -387,13 +387,13 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} [[package]] name = "cloudcheck" -version = "5.0.1.415" +version = "5.0.1.515" description = "Check whether an IP address belongs to a cloud provider" optional = false python-versions = "<4.0,>=3.9" files = [ - {file = "cloudcheck-5.0.1.415-py3-none-any.whl", hash = "sha256:e5f728106ddc2cdf43ee5a654d6ec069572ea925d30daec913c9a5a07209a52e"}, - {file = "cloudcheck-5.0.1.415.tar.gz", hash = "sha256:ef3f7351dde77c298d46d48dd69919c6c6d2563aeece46aa35ecd5281cbff0dd"}, + {file = "cloudcheck-5.0.1.515-py3-none-any.whl", hash = "sha256:427ee423b9abca9f742f21300c3968dde8784cbfdd99ba69b336a0a6723fe677"}, + {file = "cloudcheck-5.0.1.515.tar.gz", hash = "sha256:64c7c22567a3ae14731b4826c631585a9714858cc9dd70fafbfa40b5cef37049"}, ] [package.dependencies] @@ -673,13 +673,13 @@ dev = ["flake8", "markdown", "twine", "wheel"] [[package]] name = "griffe" -version = "0.45.3" +version = "1.1.0" description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." optional = false python-versions = ">=3.8" files = [ - {file = "griffe-0.45.3-py3-none-any.whl", hash = "sha256:ed1481a680ae3e28f91a06e0d8a51a5c9b97555aa2527abc2664447cc22337d6"}, - {file = "griffe-0.45.3.tar.gz", hash = "sha256:02ee71cc1a5035864b97bd0dbfff65c33f6f2c8854d3bd48a791905c2b8a44b9"}, + {file = "griffe-1.1.0-py3-none-any.whl", hash = "sha256:38ccc5721571c95ae427123074cf0dc0d36bce7c9701ab2ada9fe0566ff50c10"}, + {file = "griffe-1.1.0.tar.gz", hash = "sha256:c6328cbdec0d449549c1cc332f59227cd5603f903479d73e4425d828b782ffc3"}, ] [package.dependencies] @@ -757,13 +757,13 @@ license = ["ukkonen"] [[package]] name = "idna" -version = "3.7" +version = "3.8" description = "Internationalized Domain Names in Applications (IDNA)" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" files = [ - {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, - {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, + {file = "idna-3.8-py3-none-any.whl", hash = "sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac"}, + {file = "idna-3.8.tar.gz", hash = "sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603"}, ] [[package]] @@ -871,153 +871,149 @@ files = [ [[package]] name = "lxml" -version = "5.2.2" +version = "5.3.0" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." optional = false python-versions = ">=3.6" files = [ - {file = "lxml-5.2.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:364d03207f3e603922d0d3932ef363d55bbf48e3647395765f9bfcbdf6d23632"}, - {file = "lxml-5.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:50127c186f191b8917ea2fb8b206fbebe87fd414a6084d15568c27d0a21d60db"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74e4f025ef3db1c6da4460dd27c118d8cd136d0391da4e387a15e48e5c975147"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:981a06a3076997adf7c743dcd0d7a0415582661e2517c7d961493572e909aa1d"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aef5474d913d3b05e613906ba4090433c515e13ea49c837aca18bde190853dff"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1e275ea572389e41e8b039ac076a46cb87ee6b8542df3fff26f5baab43713bca"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5b65529bb2f21ac7861a0e94fdbf5dc0daab41497d18223b46ee8515e5ad297"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:bcc98f911f10278d1daf14b87d65325851a1d29153caaf146877ec37031d5f36"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:b47633251727c8fe279f34025844b3b3a3e40cd1b198356d003aa146258d13a2"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:fbc9d316552f9ef7bba39f4edfad4a734d3d6f93341232a9dddadec4f15d425f"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:13e69be35391ce72712184f69000cda04fc89689429179bc4c0ae5f0b7a8c21b"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3b6a30a9ab040b3f545b697cb3adbf3696c05a3a68aad172e3fd7ca73ab3c835"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:a233bb68625a85126ac9f1fc66d24337d6e8a0f9207b688eec2e7c880f012ec0"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:dfa7c241073d8f2b8e8dbc7803c434f57dbb83ae2a3d7892dd068d99e96efe2c"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1a7aca7964ac4bb07680d5c9d63b9d7028cace3e2d43175cb50bba8c5ad33316"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ae4073a60ab98529ab8a72ebf429f2a8cc612619a8c04e08bed27450d52103c0"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ffb2be176fed4457e445fe540617f0252a72a8bc56208fd65a690fdb1f57660b"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:e290d79a4107d7d794634ce3e985b9ae4f920380a813717adf61804904dc4393"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:96e85aa09274955bb6bd483eaf5b12abadade01010478154b0ec70284c1b1526"}, - {file = "lxml-5.2.2-cp310-cp310-win32.whl", hash = "sha256:f956196ef61369f1685d14dad80611488d8dc1ef00be57c0c5a03064005b0f30"}, - {file = "lxml-5.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:875a3f90d7eb5c5d77e529080d95140eacb3c6d13ad5b616ee8095447b1d22e7"}, - {file = "lxml-5.2.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:45f9494613160d0405682f9eee781c7e6d1bf45f819654eb249f8f46a2c22545"}, - {file = "lxml-5.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b0b3f2df149efb242cee2ffdeb6674b7f30d23c9a7af26595099afaf46ef4e88"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d28cb356f119a437cc58a13f8135ab8a4c8ece18159eb9194b0d269ec4e28083"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:657a972f46bbefdbba2d4f14413c0d079f9ae243bd68193cb5061b9732fa54c1"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b74b9ea10063efb77a965a8d5f4182806fbf59ed068b3c3fd6f30d2ac7bee734"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:07542787f86112d46d07d4f3c4e7c760282011b354d012dc4141cc12a68cef5f"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:303f540ad2dddd35b92415b74b900c749ec2010e703ab3bfd6660979d01fd4ed"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2eb2227ce1ff998faf0cd7fe85bbf086aa41dfc5af3b1d80867ecfe75fb68df3"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:1d8a701774dfc42a2f0b8ccdfe7dbc140500d1049e0632a611985d943fcf12df"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:56793b7a1a091a7c286b5f4aa1fe4ae5d1446fe742d00cdf2ffb1077865db10d"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:eb00b549b13bd6d884c863554566095bf6fa9c3cecb2e7b399c4bc7904cb33b5"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a2569a1f15ae6c8c64108a2cd2b4a858fc1e13d25846be0666fc144715e32ab"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:8cf85a6e40ff1f37fe0f25719aadf443686b1ac7652593dc53c7ef9b8492b115"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:d237ba6664b8e60fd90b8549a149a74fcc675272e0e95539a00522e4ca688b04"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0b3f5016e00ae7630a4b83d0868fca1e3d494c78a75b1c7252606a3a1c5fc2ad"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:23441e2b5339bc54dc949e9e675fa35efe858108404ef9aa92f0456929ef6fe8"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:2fb0ba3e8566548d6c8e7dd82a8229ff47bd8fb8c2da237607ac8e5a1b8312e5"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:79d1fb9252e7e2cfe4de6e9a6610c7cbb99b9708e2c3e29057f487de5a9eaefa"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6dcc3d17eac1df7859ae01202e9bb11ffa8c98949dcbeb1069c8b9a75917e01b"}, - {file = "lxml-5.2.2-cp311-cp311-win32.whl", hash = "sha256:4c30a2f83677876465f44c018830f608fa3c6a8a466eb223535035fbc16f3438"}, - {file = "lxml-5.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:49095a38eb333aaf44c06052fd2ec3b8f23e19747ca7ec6f6c954ffea6dbf7be"}, - {file = "lxml-5.2.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7429e7faa1a60cad26ae4227f4dd0459efde239e494c7312624ce228e04f6391"}, - {file = "lxml-5.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:50ccb5d355961c0f12f6cf24b7187dbabd5433f29e15147a67995474f27d1776"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc911208b18842a3a57266d8e51fc3cfaccee90a5351b92079beed912a7914c2"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33ce9e786753743159799fdf8e92a5da351158c4bfb6f2db0bf31e7892a1feb5"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ec87c44f619380878bd49ca109669c9f221d9ae6883a5bcb3616785fa8f94c97"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08ea0f606808354eb8f2dfaac095963cb25d9d28e27edcc375d7b30ab01abbf6"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75a9632f1d4f698b2e6e2e1ada40e71f369b15d69baddb8968dcc8e683839b18"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:74da9f97daec6928567b48c90ea2c82a106b2d500f397eeb8941e47d30b1ca85"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:0969e92af09c5687d769731e3f39ed62427cc72176cebb54b7a9d52cc4fa3b73"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:9164361769b6ca7769079f4d426a41df6164879f7f3568be9086e15baca61466"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:d26a618ae1766279f2660aca0081b2220aca6bd1aa06b2cf73f07383faf48927"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab67ed772c584b7ef2379797bf14b82df9aa5f7438c5b9a09624dd834c1c1aaf"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:3d1e35572a56941b32c239774d7e9ad724074d37f90c7a7d499ab98761bd80cf"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:8268cbcd48c5375f46e000adb1390572c98879eb4f77910c6053d25cc3ac2c67"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e282aedd63c639c07c3857097fc0e236f984ceb4089a8b284da1c526491e3f3d"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfdc2bfe69e9adf0df4915949c22a25b39d175d599bf98e7ddf620a13678585"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4aefd911793b5d2d7a921233a54c90329bf3d4a6817dc465f12ffdfe4fc7b8fe"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:8b8df03a9e995b6211dafa63b32f9d405881518ff1ddd775db4e7b98fb545e1c"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f11ae142f3a322d44513de1018b50f474f8f736bc3cd91d969f464b5bfef8836"}, - {file = "lxml-5.2.2-cp312-cp312-win32.whl", hash = "sha256:16a8326e51fcdffc886294c1e70b11ddccec836516a343f9ed0f82aac043c24a"}, - {file = "lxml-5.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:bbc4b80af581e18568ff07f6395c02114d05f4865c2812a1f02f2eaecf0bfd48"}, - {file = "lxml-5.2.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e3d9d13603410b72787579769469af730c38f2f25505573a5888a94b62b920f8"}, - {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38b67afb0a06b8575948641c1d6d68e41b83a3abeae2ca9eed2ac59892b36706"}, - {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c689d0d5381f56de7bd6966a4541bff6e08bf8d3871bbd89a0c6ab18aa699573"}, - {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:cf2a978c795b54c539f47964ec05e35c05bd045db5ca1e8366988c7f2fe6b3ce"}, - {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:739e36ef7412b2bd940f75b278749106e6d025e40027c0b94a17ef7968d55d56"}, - {file = "lxml-5.2.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d8bbcd21769594dbba9c37d3c819e2d5847656ca99c747ddb31ac1701d0c0ed9"}, - {file = "lxml-5.2.2-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:2304d3c93f2258ccf2cf7a6ba8c761d76ef84948d87bf9664e14d203da2cd264"}, - {file = "lxml-5.2.2-cp36-cp36m-win32.whl", hash = "sha256:02437fb7308386867c8b7b0e5bc4cd4b04548b1c5d089ffb8e7b31009b961dc3"}, - {file = "lxml-5.2.2-cp36-cp36m-win_amd64.whl", hash = "sha256:edcfa83e03370032a489430215c1e7783128808fd3e2e0a3225deee278585196"}, - {file = "lxml-5.2.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:28bf95177400066596cdbcfc933312493799382879da504633d16cf60bba735b"}, - {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a745cc98d504d5bd2c19b10c79c61c7c3df9222629f1b6210c0368177589fb8"}, - {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b590b39ef90c6b22ec0be925b211298e810b4856909c8ca60d27ffbca6c12e6"}, - {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b336b0416828022bfd5a2e3083e7f5ba54b96242159f83c7e3eebaec752f1716"}, - {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:c2faf60c583af0d135e853c86ac2735ce178f0e338a3c7f9ae8f622fd2eb788c"}, - {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:4bc6cb140a7a0ad1f7bc37e018d0ed690b7b6520ade518285dc3171f7a117905"}, - {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7ff762670cada8e05b32bf1e4dc50b140790909caa8303cfddc4d702b71ea184"}, - {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:57f0a0bbc9868e10ebe874e9f129d2917750adf008fe7b9c1598c0fbbfdde6a6"}, - {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:a6d2092797b388342c1bc932077ad232f914351932353e2e8706851c870bca1f"}, - {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:60499fe961b21264e17a471ec296dcbf4365fbea611bf9e303ab69db7159ce61"}, - {file = "lxml-5.2.2-cp37-cp37m-win32.whl", hash = "sha256:d9b342c76003c6b9336a80efcc766748a333573abf9350f4094ee46b006ec18f"}, - {file = "lxml-5.2.2-cp37-cp37m-win_amd64.whl", hash = "sha256:b16db2770517b8799c79aa80f4053cd6f8b716f21f8aca962725a9565ce3ee40"}, - {file = "lxml-5.2.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7ed07b3062b055d7a7f9d6557a251cc655eed0b3152b76de619516621c56f5d3"}, - {file = "lxml-5.2.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f60fdd125d85bf9c279ffb8e94c78c51b3b6a37711464e1f5f31078b45002421"}, - {file = "lxml-5.2.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a7e24cb69ee5f32e003f50e016d5fde438010c1022c96738b04fc2423e61706"}, - {file = "lxml-5.2.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23cfafd56887eaed93d07bc4547abd5e09d837a002b791e9767765492a75883f"}, - {file = "lxml-5.2.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:19b4e485cd07b7d83e3fe3b72132e7df70bfac22b14fe4bf7a23822c3a35bff5"}, - {file = "lxml-5.2.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:7ce7ad8abebe737ad6143d9d3bf94b88b93365ea30a5b81f6877ec9c0dee0a48"}, - {file = "lxml-5.2.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e49b052b768bb74f58c7dda4e0bdf7b79d43a9204ca584ffe1fb48a6f3c84c66"}, - {file = "lxml-5.2.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d14a0d029a4e176795cef99c056d58067c06195e0c7e2dbb293bf95c08f772a3"}, - {file = "lxml-5.2.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:be49ad33819d7dcc28a309b86d4ed98e1a65f3075c6acd3cd4fe32103235222b"}, - {file = "lxml-5.2.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:a6d17e0370d2516d5bb9062c7b4cb731cff921fc875644c3d751ad857ba9c5b1"}, - {file = "lxml-5.2.2-cp38-cp38-win32.whl", hash = "sha256:5b8c041b6265e08eac8a724b74b655404070b636a8dd6d7a13c3adc07882ef30"}, - {file = "lxml-5.2.2-cp38-cp38-win_amd64.whl", hash = "sha256:f61efaf4bed1cc0860e567d2ecb2363974d414f7f1f124b1df368bbf183453a6"}, - {file = "lxml-5.2.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:fb91819461b1b56d06fa4bcf86617fac795f6a99d12239fb0c68dbeba41a0a30"}, - {file = "lxml-5.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d4ed0c7cbecde7194cd3228c044e86bf73e30a23505af852857c09c24e77ec5d"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54401c77a63cc7d6dc4b4e173bb484f28a5607f3df71484709fe037c92d4f0ed"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:625e3ef310e7fa3a761d48ca7ea1f9d8718a32b1542e727d584d82f4453d5eeb"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:519895c99c815a1a24a926d5b60627ce5ea48e9f639a5cd328bda0515ea0f10c"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c7079d5eb1c1315a858bbf180000757db8ad904a89476653232db835c3114001"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:343ab62e9ca78094f2306aefed67dcfad61c4683f87eee48ff2fd74902447726"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:cd9e78285da6c9ba2d5c769628f43ef66d96ac3085e59b10ad4f3707980710d3"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:546cf886f6242dff9ec206331209db9c8e1643ae642dea5fdbecae2453cb50fd"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:02f6a8eb6512fdc2fd4ca10a49c341c4e109aa6e9448cc4859af5b949622715a"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:339ee4a4704bc724757cd5dd9dc8cf4d00980f5d3e6e06d5847c1b594ace68ab"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0a028b61a2e357ace98b1615fc03f76eb517cc028993964fe08ad514b1e8892d"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f90e552ecbad426eab352e7b2933091f2be77115bb16f09f78404861c8322981"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:d83e2d94b69bf31ead2fa45f0acdef0757fa0458a129734f59f67f3d2eb7ef32"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a02d3c48f9bb1e10c7788d92c0c7db6f2002d024ab6e74d6f45ae33e3d0288a3"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6d68ce8e7b2075390e8ac1e1d3a99e8b6372c694bbe612632606d1d546794207"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:453d037e09a5176d92ec0fd282e934ed26d806331a8b70ab431a81e2fbabf56d"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:3b019d4ee84b683342af793b56bb35034bd749e4cbdd3d33f7d1107790f8c472"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cb3942960f0beb9f46e2a71a3aca220d1ca32feb5a398656be934320804c0df9"}, - {file = "lxml-5.2.2-cp39-cp39-win32.whl", hash = "sha256:ac6540c9fff6e3813d29d0403ee7a81897f1d8ecc09a8ff84d2eea70ede1cdbf"}, - {file = "lxml-5.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:610b5c77428a50269f38a534057444c249976433f40f53e3b47e68349cca1425"}, - {file = "lxml-5.2.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b537bd04d7ccd7c6350cdaaaad911f6312cbd61e6e6045542f781c7f8b2e99d2"}, - {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4820c02195d6dfb7b8508ff276752f6b2ff8b64ae5d13ebe02e7667e035000b9"}, - {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a09f6184f17a80897172863a655467da2b11151ec98ba8d7af89f17bf63dae"}, - {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:76acba4c66c47d27c8365e7c10b3d8016a7da83d3191d053a58382311a8bf4e1"}, - {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b128092c927eaf485928cec0c28f6b8bead277e28acf56800e972aa2c2abd7a2"}, - {file = "lxml-5.2.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ae791f6bd43305aade8c0e22f816b34f3b72b6c820477aab4d18473a37e8090b"}, - {file = "lxml-5.2.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a2f6a1bc2460e643785a2cde17293bd7a8f990884b822f7bca47bee0a82fc66b"}, - {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e8d351ff44c1638cb6e980623d517abd9f580d2e53bfcd18d8941c052a5a009"}, - {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bec4bd9133420c5c52d562469c754f27c5c9e36ee06abc169612c959bd7dbb07"}, - {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:55ce6b6d803890bd3cc89975fca9de1dff39729b43b73cb15ddd933b8bc20484"}, - {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8ab6a358d1286498d80fe67bd3d69fcbc7d1359b45b41e74c4a26964ca99c3f8"}, - {file = "lxml-5.2.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:06668e39e1f3c065349c51ac27ae430719d7806c026fec462e5693b08b95696b"}, - {file = "lxml-5.2.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9cd5323344d8ebb9fb5e96da5de5ad4ebab993bbf51674259dbe9d7a18049525"}, - {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89feb82ca055af0fe797a2323ec9043b26bc371365847dbe83c7fd2e2f181c34"}, - {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e481bba1e11ba585fb06db666bfc23dbe181dbafc7b25776156120bf12e0d5a6"}, - {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9d6c6ea6a11ca0ff9cd0390b885984ed31157c168565702959c25e2191674a14"}, - {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3d98de734abee23e61f6b8c2e08a88453ada7d6486dc7cdc82922a03968928db"}, - {file = "lxml-5.2.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:69ab77a1373f1e7563e0fb5a29a8440367dec051da6c7405333699d07444f511"}, - {file = "lxml-5.2.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:34e17913c431f5ae01d8658dbf792fdc457073dcdfbb31dc0cc6ab256e664a8d"}, - {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05f8757b03208c3f50097761be2dea0aba02e94f0dc7023ed73a7bb14ff11eb0"}, - {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a520b4f9974b0a0a6ed73c2154de57cdfd0c8800f4f15ab2b73238ffed0b36e"}, - {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5e097646944b66207023bc3c634827de858aebc226d5d4d6d16f0b77566ea182"}, - {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b5e4ef22ff25bfd4ede5f8fb30f7b24446345f3e79d9b7455aef2836437bc38a"}, - {file = "lxml-5.2.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ff69a9a0b4b17d78170c73abe2ab12084bdf1691550c5629ad1fe7849433f324"}, - {file = "lxml-5.2.2.tar.gz", hash = "sha256:bb2dc4898180bea79863d5487e5f9c7c34297414bad54bcd0f0852aee9cfdb87"}, + {file = "lxml-5.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:dd36439be765e2dde7660212b5275641edbc813e7b24668831a5c8ac91180656"}, + {file = "lxml-5.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ae5fe5c4b525aa82b8076c1a59d642c17b6e8739ecf852522c6321852178119d"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:501d0d7e26b4d261fca8132854d845e4988097611ba2531408ec91cf3fd9d20a"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb66442c2546446944437df74379e9cf9e9db353e61301d1a0e26482f43f0dd8"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9e41506fec7a7f9405b14aa2d5c8abbb4dbbd09d88f9496958b6d00cb4d45330"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f7d4a670107d75dfe5ad080bed6c341d18c4442f9378c9f58e5851e86eb79965"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41ce1f1e2c7755abfc7e759dc34d7d05fd221723ff822947132dc934d122fe22"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:44264ecae91b30e5633013fb66f6ddd05c006d3e0e884f75ce0b4755b3e3847b"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:3c174dc350d3ec52deb77f2faf05c439331d6ed5e702fc247ccb4e6b62d884b7"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:2dfab5fa6a28a0b60a20638dc48e6343c02ea9933e3279ccb132f555a62323d8"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b1c8c20847b9f34e98080da785bb2336ea982e7f913eed5809e5a3c872900f32"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2c86bf781b12ba417f64f3422cfc302523ac9cd1d8ae8c0f92a1c66e56ef2e86"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:c162b216070f280fa7da844531169be0baf9ccb17263cf5a8bf876fcd3117fa5"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:36aef61a1678cb778097b4a6eeae96a69875d51d1e8f4d4b491ab3cfb54b5a03"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f65e5120863c2b266dbcc927b306c5b78e502c71edf3295dfcb9501ec96e5fc7"}, + {file = "lxml-5.3.0-cp310-cp310-win32.whl", hash = "sha256:ef0c1fe22171dd7c7c27147f2e9c3e86f8bdf473fed75f16b0c2e84a5030ce80"}, + {file = "lxml-5.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:052d99051e77a4f3e8482c65014cf6372e61b0a6f4fe9edb98503bb5364cfee3"}, + {file = "lxml-5.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:74bcb423462233bc5d6066e4e98b0264e7c1bed7541fff2f4e34fe6b21563c8b"}, + {file = "lxml-5.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a3d819eb6f9b8677f57f9664265d0a10dd6551d227afb4af2b9cd7bdc2ccbf18"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b8f5db71b28b8c404956ddf79575ea77aa8b1538e8b2ef9ec877945b3f46442"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3406b63232fc7e9b8783ab0b765d7c59e7c59ff96759d8ef9632fca27c7ee4"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ecdd78ab768f844c7a1d4a03595038c166b609f6395e25af9b0f3f26ae1230f"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:168f2dfcfdedf611eb285efac1516c8454c8c99caf271dccda8943576b67552e"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa617107a410245b8660028a7483b68e7914304a6d4882b5ff3d2d3eb5948d8c"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:69959bd3167b993e6e710b99051265654133a98f20cec1d9b493b931942e9c16"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:bd96517ef76c8654446fc3db9242d019a1bb5fe8b751ba414765d59f99210b79"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:ab6dd83b970dc97c2d10bc71aa925b84788c7c05de30241b9e96f9b6d9ea3080"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:eec1bb8cdbba2925bedc887bc0609a80e599c75b12d87ae42ac23fd199445654"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6a7095eeec6f89111d03dabfe5883a1fd54da319c94e0fb104ee8f23616b572d"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6f651ebd0b21ec65dfca93aa629610a0dbc13dbc13554f19b0113da2e61a4763"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:f422a209d2455c56849442ae42f25dbaaba1c6c3f501d58761c619c7836642ec"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:62f7fdb0d1ed2065451f086519865b4c90aa19aed51081979ecd05a21eb4d1be"}, + {file = "lxml-5.3.0-cp311-cp311-win32.whl", hash = "sha256:c6379f35350b655fd817cd0d6cbeef7f265f3ae5fedb1caae2eb442bbeae9ab9"}, + {file = "lxml-5.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:9c52100e2c2dbb0649b90467935c4b0de5528833c76a35ea1a2691ec9f1ee7a1"}, + {file = "lxml-5.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:e99f5507401436fdcc85036a2e7dc2e28d962550afe1cbfc07c40e454256a859"}, + {file = "lxml-5.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:384aacddf2e5813a36495233b64cb96b1949da72bef933918ba5c84e06af8f0e"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:874a216bf6afaf97c263b56371434e47e2c652d215788396f60477540298218f"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65ab5685d56914b9a2a34d67dd5488b83213d680b0c5d10b47f81da5a16b0b0e"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aac0bbd3e8dd2d9c45ceb82249e8bdd3ac99131a32b4d35c8af3cc9db1657179"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b369d3db3c22ed14c75ccd5af429086f166a19627e84a8fdade3f8f31426e52a"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c24037349665434f375645fa9d1f5304800cec574d0310f618490c871fd902b3"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:62d172f358f33a26d6b41b28c170c63886742f5b6772a42b59b4f0fa10526cb1"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:c1f794c02903c2824fccce5b20c339a1a14b114e83b306ff11b597c5f71a1c8d"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:5d6a6972b93c426ace71e0be9a6f4b2cfae9b1baed2eed2006076a746692288c"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:3879cc6ce938ff4eb4900d901ed63555c778731a96365e53fadb36437a131a99"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:74068c601baff6ff021c70f0935b0c7bc528baa8ea210c202e03757c68c5a4ff"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ecd4ad8453ac17bc7ba3868371bffb46f628161ad0eefbd0a855d2c8c32dd81a"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7e2f58095acc211eb9d8b5771bf04df9ff37d6b87618d1cbf85f92399c98dae8"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e63601ad5cd8f860aa99d109889b5ac34de571c7ee902d6812d5d9ddcc77fa7d"}, + {file = "lxml-5.3.0-cp312-cp312-win32.whl", hash = "sha256:17e8d968d04a37c50ad9c456a286b525d78c4a1c15dd53aa46c1d8e06bf6fa30"}, + {file = "lxml-5.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:c1a69e58a6bb2de65902051d57fde951febad631a20a64572677a1052690482f"}, + {file = "lxml-5.3.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8c72e9563347c7395910de6a3100a4840a75a6f60e05af5e58566868d5eb2d6a"}, + {file = "lxml-5.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e92ce66cd919d18d14b3856906a61d3f6b6a8500e0794142338da644260595cd"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d04f064bebdfef9240478f7a779e8c5dc32b8b7b0b2fc6a62e39b928d428e51"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c2fb570d7823c2bbaf8b419ba6e5662137f8166e364a8b2b91051a1fb40ab8b"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c120f43553ec759f8de1fee2f4794452b0946773299d44c36bfe18e83caf002"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:562e7494778a69086f0312ec9689f6b6ac1c6b65670ed7d0267e49f57ffa08c4"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:423b121f7e6fa514ba0c7918e56955a1d4470ed35faa03e3d9f0e3baa4c7e492"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:c00f323cc00576df6165cc9d21a4c21285fa6b9989c5c39830c3903dc4303ef3"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_ppc64le.whl", hash = "sha256:1fdc9fae8dd4c763e8a31e7630afef517eab9f5d5d31a278df087f307bf601f4"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_s390x.whl", hash = "sha256:658f2aa69d31e09699705949b5fc4719cbecbd4a97f9656a232e7d6c7be1a367"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:1473427aff3d66a3fa2199004c3e601e6c4500ab86696edffdbc84954c72d832"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a87de7dd873bf9a792bf1e58b1c3887b9264036629a5bf2d2e6579fe8e73edff"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:0d7b36afa46c97875303a94e8f3ad932bf78bace9e18e603f2085b652422edcd"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:cf120cce539453ae086eacc0130a324e7026113510efa83ab42ef3fcfccac7fb"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:df5c7333167b9674aa8ae1d4008fa4bc17a313cc490b2cca27838bbdcc6bb15b"}, + {file = "lxml-5.3.0-cp313-cp313-win32.whl", hash = "sha256:c802e1c2ed9f0c06a65bc4ed0189d000ada8049312cfeab6ca635e39c9608957"}, + {file = "lxml-5.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:406246b96d552e0503e17a1006fd27edac678b3fcc9f1be71a2f94b4ff61528d"}, + {file = "lxml-5.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:8f0de2d390af441fe8b2c12626d103540b5d850d585b18fcada58d972b74a74e"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1afe0a8c353746e610bd9031a630a95bcfb1a720684c3f2b36c4710a0a96528f"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56b9861a71575f5795bde89256e7467ece3d339c9b43141dbdd54544566b3b94"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:9fb81d2824dff4f2e297a276297e9031f46d2682cafc484f49de182aa5e5df99"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:2c226a06ecb8cdef28845ae976da407917542c5e6e75dcac7cc33eb04aaeb237"}, + {file = "lxml-5.3.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:7d3d1ca42870cdb6d0d29939630dbe48fa511c203724820fc0fd507b2fb46577"}, + {file = "lxml-5.3.0-cp36-cp36m-win32.whl", hash = "sha256:094cb601ba9f55296774c2d57ad68730daa0b13dc260e1f941b4d13678239e70"}, + {file = "lxml-5.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:eafa2c8658f4e560b098fe9fc54539f86528651f61849b22111a9b107d18910c"}, + {file = "lxml-5.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cb83f8a875b3d9b458cada4f880fa498646874ba4011dc974e071a0a84a1b033"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25f1b69d41656b05885aa185f5fdf822cb01a586d1b32739633679699f220391"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23e0553b8055600b3bf4a00b255ec5c92e1e4aebf8c2c09334f8368e8bd174d6"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ada35dd21dc6c039259596b358caab6b13f4db4d4a7f8665764d616daf9cc1d"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:81b4e48da4c69313192d8c8d4311e5d818b8be1afe68ee20f6385d0e96fc9512"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:2bc9fd5ca4729af796f9f59cd8ff160fe06a474da40aca03fcc79655ddee1a8b"}, + {file = "lxml-5.3.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:07da23d7ee08577760f0a71d67a861019103e4812c87e2fab26b039054594cc5"}, + {file = "lxml-5.3.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:ea2e2f6f801696ad7de8aec061044d6c8c0dd4037608c7cab38a9a4d316bfb11"}, + {file = "lxml-5.3.0-cp37-cp37m-win32.whl", hash = "sha256:5c54afdcbb0182d06836cc3d1be921e540be3ebdf8b8a51ee3ef987537455f84"}, + {file = "lxml-5.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:f2901429da1e645ce548bf9171784c0f74f0718c3f6150ce166be39e4dd66c3e"}, + {file = "lxml-5.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c56a1d43b2f9ee4786e4658c7903f05da35b923fb53c11025712562d5cc02753"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ee8c39582d2652dcd516d1b879451500f8db3fe3607ce45d7c5957ab2596040"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fdf3a3059611f7585a78ee10399a15566356116a4288380921a4b598d807a22"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:146173654d79eb1fc97498b4280c1d3e1e5d58c398fa530905c9ea50ea849b22"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:0a7056921edbdd7560746f4221dca89bb7a3fe457d3d74267995253f46343f15"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:9e4b47ac0f5e749cfc618efdf4726269441014ae1d5583e047b452a32e221920"}, + {file = "lxml-5.3.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:f914c03e6a31deb632e2daa881fe198461f4d06e57ac3d0e05bbcab8eae01945"}, + {file = "lxml-5.3.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:213261f168c5e1d9b7535a67e68b1f59f92398dd17a56d934550837143f79c42"}, + {file = "lxml-5.3.0-cp38-cp38-win32.whl", hash = "sha256:218c1b2e17a710e363855594230f44060e2025b05c80d1f0661258142b2add2e"}, + {file = "lxml-5.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:315f9542011b2c4e1d280e4a20ddcca1761993dda3afc7a73b01235f8641e903"}, + {file = "lxml-5.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1ffc23010330c2ab67fac02781df60998ca8fe759e8efde6f8b756a20599c5de"}, + {file = "lxml-5.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2b3778cb38212f52fac9fe913017deea2fdf4eb1a4f8e4cfc6b009a13a6d3fcc"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b0c7a688944891086ba192e21c5229dea54382f4836a209ff8d0a660fac06be"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:747a3d3e98e24597981ca0be0fd922aebd471fa99d0043a3842d00cdcad7ad6a"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86a6b24b19eaebc448dc56b87c4865527855145d851f9fc3891673ff97950540"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b11a5d918a6216e521c715b02749240fb07ae5a1fefd4b7bf12f833bc8b4fe70"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68b87753c784d6acb8a25b05cb526c3406913c9d988d51f80adecc2b0775d6aa"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:109fa6fede314cc50eed29e6e56c540075e63d922455346f11e4d7a036d2b8cf"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:02ced472497b8362c8e902ade23e3300479f4f43e45f4105c85ef43b8db85229"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:6b038cc86b285e4f9fea2ba5ee76e89f21ed1ea898e287dc277a25884f3a7dfe"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:7437237c6a66b7ca341e868cda48be24b8701862757426852c9b3186de1da8a2"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7f41026c1d64043a36fda21d64c5026762d53a77043e73e94b71f0521939cc71"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:482c2f67761868f0108b1743098640fbb2a28a8e15bf3f47ada9fa59d9fe08c3"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:1483fd3358963cc5c1c9b122c80606a3a79ee0875bcac0204149fa09d6ff2727"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2dec2d1130a9cda5b904696cec33b2cfb451304ba9081eeda7f90f724097300a"}, + {file = "lxml-5.3.0-cp39-cp39-win32.whl", hash = "sha256:a0eabd0a81625049c5df745209dc7fcef6e2aea7793e5f003ba363610aa0a3ff"}, + {file = "lxml-5.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:89e043f1d9d341c52bf2af6d02e6adde62e0a46e6755d5eb60dc6e4f0b8aeca2"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7b1cd427cb0d5f7393c31b7496419da594fe600e6fdc4b105a54f82405e6626c"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51806cfe0279e06ed8500ce19479d757db42a30fd509940b1701be9c86a5ff9a"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee70d08fd60c9565ba8190f41a46a54096afa0eeb8f76bd66f2c25d3b1b83005"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:8dc2c0395bea8254d8daebc76dcf8eb3a95ec2a46fa6fae5eaccee366bfe02ce"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6ba0d3dcac281aad8a0e5b14c7ed6f9fa89c8612b47939fc94f80b16e2e9bc83"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:6e91cf736959057f7aac7adfc83481e03615a8e8dd5758aa1d95ea69e8931dba"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:94d6c3782907b5e40e21cadf94b13b0842ac421192f26b84c45f13f3c9d5dc27"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c300306673aa0f3ed5ed9372b21867690a17dba38c68c44b287437c362ce486b"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78d9b952e07aed35fe2e1a7ad26e929595412db48535921c5013edc8aa4a35ce"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:01220dca0d066d1349bd6a1726856a78f7929f3878f7e2ee83c296c69495309e"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2d9b8d9177afaef80c53c0a9e30fa252ff3036fb1c6494d427c066a4ce6a282f"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:20094fc3f21ea0a8669dc4c61ed7fa8263bd37d97d93b90f28fc613371e7a875"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ace2c2326a319a0bb8a8b0e5b570c764962e95818de9f259ce814ee666603f19"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92e67a0be1639c251d21e35fe74df6bcc40cba445c2cda7c4a967656733249e2"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd5350b55f9fecddc51385463a4f67a5da829bc741e38cf689f38ec9023f54ab"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c1fefd7e3d00921c44dc9ca80a775af49698bbfd92ea84498e56acffd4c5469"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:71a8dd38fbd2f2319136d4ae855a7078c69c9a38ae06e0c17c73fd70fc6caad8"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:97acf1e1fd66ab53dacd2c35b319d7e548380c2e9e8c54525c6e76d21b1ae3b1"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:68934b242c51eb02907c5b81d138cb977b2129a0a75a8f8b60b01cb8586c7b21"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b710bc2b8292966b23a6a0121f7a6c51d45d2347edcc75f016ac123b8054d3f2"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18feb4b93302091b1541221196a2155aa296c363fd233814fa11e181adebc52f"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:3eb44520c4724c2e1a57c0af33a379eee41792595023f367ba3952a2d96c2aab"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:609251a0ca4770e5a8768ff902aa02bf636339c5a93f9349b48eb1f606f7f3e9"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:516f491c834eb320d6c843156440fe7fc0d50b33e44387fcec5b02f0bc118a4c"}, + {file = "lxml-5.3.0.tar.gz", hash = "sha256:4e109ca30d1edec1ac60cdbe341905dc3b8f55b16855e03a54aaf59e51ec8c6f"}, ] [package.extras] @@ -1025,7 +1021,7 @@ cssselect = ["cssselect (>=0.7)"] html-clean = ["lxml-html-clean"] html5 = ["html5lib"] htmlsoup = ["BeautifulSoup4"] -source = ["Cython (>=3.0.10)"] +source = ["Cython (>=3.0.11)"] [[package]] name = "markdown" @@ -1138,13 +1134,13 @@ files = [ [[package]] name = "mike" -version = "2.1.2" +version = "2.1.3" description = "Manage multiple versions of your MkDocs-powered documentation" optional = false python-versions = "*" files = [ - {file = "mike-2.1.2-py3-none-any.whl", hash = "sha256:d61d9b423ab412d634ca2bd520136d5114e3cc73f4bbd1aa6a0c6625c04918c0"}, - {file = "mike-2.1.2.tar.gz", hash = "sha256:d59cc8054c50f9c8a046cfd47f9b700cf9ff1b2b19f420bd8812ca6f94fa8bd3"}, + {file = "mike-2.1.3-py3-none-any.whl", hash = "sha256:d90c64077e84f06272437b464735130d380703a76a5738b152932884c60c062a"}, + {file = "mike-2.1.3.tar.gz", hash = "sha256:abd79b8ea483fb0275b7972825d3082e5ae67a41820f8d8a0dc7a3f49944e810"}, ] [package.dependencies] @@ -1243,13 +1239,13 @@ pyyaml = ">=5.1" [[package]] name = "mkdocs-material" -version = "9.5.26" +version = "9.5.33" description = "Documentation that simply works" optional = false python-versions = ">=3.8" files = [ - {file = "mkdocs_material-9.5.26-py3-none-any.whl", hash = "sha256:5d01fb0aa1c7946a1e3ae8689aa2b11a030621ecb54894e35aabb74c21016312"}, - {file = "mkdocs_material-9.5.26.tar.gz", hash = "sha256:56aeb91d94cffa43b6296fa4fbf0eb7c840136e563eecfd12c2d9e92e50ba326"}, + {file = "mkdocs_material-9.5.33-py3-none-any.whl", hash = "sha256:dbc79cf0fdc6e2c366aa987de8b0c9d4e2bb9f156e7466786ba2fd0f9bf7ffca"}, + {file = "mkdocs_material-9.5.33.tar.gz", hash = "sha256:d23a8b5e3243c9b2f29cdfe83051104a8024b767312dc8fde05ebe91ad55d89d"}, ] [package.dependencies] @@ -1311,17 +1307,17 @@ python-legacy = ["mkdocstrings-python-legacy (>=0.2.1)"] [[package]] name = "mkdocstrings-python" -version = "1.10.3" +version = "1.10.8" description = "A Python handler for mkdocstrings." optional = false python-versions = ">=3.8" files = [ - {file = "mkdocstrings_python-1.10.3-py3-none-any.whl", hash = "sha256:11ff6d21d3818fb03af82c3ea6225b1534837e17f790aa5f09626524171f949b"}, - {file = "mkdocstrings_python-1.10.3.tar.gz", hash = "sha256:321cf9c732907ab2b1fedaafa28765eaa089d89320f35f7206d00ea266889d03"}, + {file = "mkdocstrings_python-1.10.8-py3-none-any.whl", hash = "sha256:bb12e76c8b071686617f824029cb1dfe0e9afe89f27fb3ad9a27f95f054dcd89"}, + {file = "mkdocstrings_python-1.10.8.tar.gz", hash = "sha256:5856a59cbebbb8deb133224a540de1ff60bded25e54d8beacc375bb133d39016"}, ] [package.dependencies] -griffe = ">=0.44" +griffe = ">=0.49" mkdocstrings = ">=0.25" [[package]] @@ -1820,19 +1816,19 @@ windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pyjwt" -version = "2.8.0" +version = "2.9.0" description = "JSON Web Token implementation in Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"}, - {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"}, + {file = "PyJWT-2.9.0-py3-none-any.whl", hash = "sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850"}, + {file = "pyjwt-2.9.0.tar.gz", hash = "sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c"}, ] [package.extras] crypto = ["cryptography (>=3.4.0)"] -dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] -docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] @@ -1891,17 +1887,17 @@ dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments [[package]] name = "pytest-asyncio" -version = "0.23.8" +version = "0.24.0" description = "Pytest support for asyncio" optional = false python-versions = ">=3.8" files = [ - {file = "pytest_asyncio-0.23.8-py3-none-any.whl", hash = "sha256:50265d892689a5faefb84df80819d1ecef566eb3549cf915dfb33569359d1ce2"}, - {file = "pytest_asyncio-0.23.8.tar.gz", hash = "sha256:759b10b33a6dc61cce40a8bd5205e302978bbbcc00e279a8b61d9a6a3c82e4d3"}, + {file = "pytest_asyncio-0.24.0-py3-none-any.whl", hash = "sha256:a811296ed596b69bf0b6f3dc40f83bcaf341b155a269052d82efa2b25ac7037b"}, + {file = "pytest_asyncio-0.24.0.tar.gz", hash = "sha256:d081d828e576d85f875399194281e92bf8a68d60d72d1a2faf2feddb6c46b276"}, ] [package.dependencies] -pytest = ">=7.0.0,<9" +pytest = ">=8.2,<9" [package.extras] docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] @@ -1945,13 +1941,13 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "pytest-mock (>=3.12)"] [[package]] name = "pytest-httpserver" -version = "1.0.11" +version = "1.1.0" description = "pytest-httpserver is a httpserver for pytest" optional = false python-versions = ">=3.8" files = [ - {file = "pytest_httpserver-1.0.11-py3-none-any.whl", hash = "sha256:87e0017aa563b4fb9688822827495dd84d5dd0cae189ef0fa533ba693471fe6e"}, - {file = "pytest_httpserver-1.0.11.tar.gz", hash = "sha256:834531f278e99a22cf3920bc30fe06784518b742a189628b3b34953604ad83f6"}, + {file = "pytest_httpserver-1.1.0-py3-none-any.whl", hash = "sha256:7ef88be8ed3354b6784daa3daa75a422370327c634053cefb124903fa8d73a41"}, + {file = "pytest_httpserver-1.1.0.tar.gz", hash = "sha256:6b1cb0199e2ed551b1b94d43f096863bbf6ae5bcd7c75c2c06845e5ce2dc8701"}, ] [package.dependencies] @@ -2114,99 +2110,120 @@ pyyaml = "*" [[package]] name = "pyzmq" -version = "26.0.3" +version = "26.2.0" description = "Python bindings for 0MQ" optional = false python-versions = ">=3.7" files = [ - {file = "pyzmq-26.0.3-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:44dd6fc3034f1eaa72ece33588867df9e006a7303725a12d64c3dff92330f625"}, - {file = "pyzmq-26.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:acb704195a71ac5ea5ecf2811c9ee19ecdc62b91878528302dd0be1b9451cc90"}, - {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dbb9c997932473a27afa93954bb77a9f9b786b4ccf718d903f35da3232317de"}, - {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6bcb34f869d431799c3ee7d516554797f7760cb2198ecaa89c3f176f72d062be"}, - {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ece17ec5f20d7d9b442e5174ae9f020365d01ba7c112205a4d59cf19dc38ee"}, - {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:ba6e5e6588e49139a0979d03a7deb9c734bde647b9a8808f26acf9c547cab1bf"}, - {file = "pyzmq-26.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3bf8b000a4e2967e6dfdd8656cd0757d18c7e5ce3d16339e550bd462f4857e59"}, - {file = "pyzmq-26.0.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2136f64fbb86451dbbf70223635a468272dd20075f988a102bf8a3f194a411dc"}, - {file = "pyzmq-26.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e8918973fbd34e7814f59143c5f600ecd38b8038161239fd1a3d33d5817a38b8"}, - {file = "pyzmq-26.0.3-cp310-cp310-win32.whl", hash = "sha256:0aaf982e68a7ac284377d051c742610220fd06d330dcd4c4dbb4cdd77c22a537"}, - {file = "pyzmq-26.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:f1a9b7d00fdf60b4039f4455afd031fe85ee8305b019334b72dcf73c567edc47"}, - {file = "pyzmq-26.0.3-cp310-cp310-win_arm64.whl", hash = "sha256:80b12f25d805a919d53efc0a5ad7c0c0326f13b4eae981a5d7b7cc343318ebb7"}, - {file = "pyzmq-26.0.3-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:a72a84570f84c374b4c287183debc776dc319d3e8ce6b6a0041ce2e400de3f32"}, - {file = "pyzmq-26.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7ca684ee649b55fd8f378127ac8462fb6c85f251c2fb027eb3c887e8ee347bcd"}, - {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e222562dc0f38571c8b1ffdae9d7adb866363134299264a1958d077800b193b7"}, - {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f17cde1db0754c35a91ac00b22b25c11da6eec5746431d6e5092f0cd31a3fea9"}, - {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b7c0c0b3244bb2275abe255d4a30c050d541c6cb18b870975553f1fb6f37527"}, - {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:ac97a21de3712afe6a6c071abfad40a6224fd14fa6ff0ff8d0c6e6cd4e2f807a"}, - {file = "pyzmq-26.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:88b88282e55fa39dd556d7fc04160bcf39dea015f78e0cecec8ff4f06c1fc2b5"}, - {file = "pyzmq-26.0.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:72b67f966b57dbd18dcc7efbc1c7fc9f5f983e572db1877081f075004614fcdd"}, - {file = "pyzmq-26.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f4b6cecbbf3b7380f3b61de3a7b93cb721125dc125c854c14ddc91225ba52f83"}, - {file = "pyzmq-26.0.3-cp311-cp311-win32.whl", hash = "sha256:eed56b6a39216d31ff8cd2f1d048b5bf1700e4b32a01b14379c3b6dde9ce3aa3"}, - {file = "pyzmq-26.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:3191d312c73e3cfd0f0afdf51df8405aafeb0bad71e7ed8f68b24b63c4f36500"}, - {file = "pyzmq-26.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:b6907da3017ef55139cf0e417c5123a84c7332520e73a6902ff1f79046cd3b94"}, - {file = "pyzmq-26.0.3-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:068ca17214038ae986d68f4a7021f97e187ed278ab6dccb79f837d765a54d753"}, - {file = "pyzmq-26.0.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7821d44fe07335bea256b9f1f41474a642ca55fa671dfd9f00af8d68a920c2d4"}, - {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eeb438a26d87c123bb318e5f2b3d86a36060b01f22fbdffd8cf247d52f7c9a2b"}, - {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:69ea9d6d9baa25a4dc9cef5e2b77b8537827b122214f210dd925132e34ae9b12"}, - {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7daa3e1369355766dea11f1d8ef829905c3b9da886ea3152788dc25ee6079e02"}, - {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:6ca7a9a06b52d0e38ccf6bca1aeff7be178917893f3883f37b75589d42c4ac20"}, - {file = "pyzmq-26.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1b7d0e124948daa4d9686d421ef5087c0516bc6179fdcf8828b8444f8e461a77"}, - {file = "pyzmq-26.0.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e746524418b70f38550f2190eeee834db8850088c834d4c8406fbb9bc1ae10b2"}, - {file = "pyzmq-26.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:6b3146f9ae6af82c47a5282ac8803523d381b3b21caeae0327ed2f7ecb718798"}, - {file = "pyzmq-26.0.3-cp312-cp312-win32.whl", hash = "sha256:2b291d1230845871c00c8462c50565a9cd6026fe1228e77ca934470bb7d70ea0"}, - {file = "pyzmq-26.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:926838a535c2c1ea21c903f909a9a54e675c2126728c21381a94ddf37c3cbddf"}, - {file = "pyzmq-26.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:5bf6c237f8c681dfb91b17f8435b2735951f0d1fad10cc5dfd96db110243370b"}, - {file = "pyzmq-26.0.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c0991f5a96a8e620f7691e61178cd8f457b49e17b7d9cfa2067e2a0a89fc1d5"}, - {file = "pyzmq-26.0.3-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:dbf012d8fcb9f2cf0643b65df3b355fdd74fc0035d70bb5c845e9e30a3a4654b"}, - {file = "pyzmq-26.0.3-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:01fbfbeb8249a68d257f601deb50c70c929dc2dfe683b754659569e502fbd3aa"}, - {file = "pyzmq-26.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c8eb19abe87029c18f226d42b8a2c9efdd139d08f8bf6e085dd9075446db450"}, - {file = "pyzmq-26.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5344b896e79800af86ad643408ca9aa303a017f6ebff8cee5a3163c1e9aec987"}, - {file = "pyzmq-26.0.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:204e0f176fd1d067671157d049466869b3ae1fc51e354708b0dc41cf94e23a3a"}, - {file = "pyzmq-26.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a42db008d58530efa3b881eeee4991146de0b790e095f7ae43ba5cc612decbc5"}, - {file = "pyzmq-26.0.3-cp37-cp37m-win32.whl", hash = "sha256:8d7a498671ca87e32b54cb47c82a92b40130a26c5197d392720a1bce1b3c77cf"}, - {file = "pyzmq-26.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:3b4032a96410bdc760061b14ed6a33613ffb7f702181ba999df5d16fb96ba16a"}, - {file = "pyzmq-26.0.3-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:2cc4e280098c1b192c42a849de8de2c8e0f3a84086a76ec5b07bfee29bda7d18"}, - {file = "pyzmq-26.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5bde86a2ed3ce587fa2b207424ce15b9a83a9fa14422dcc1c5356a13aed3df9d"}, - {file = "pyzmq-26.0.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:34106f68e20e6ff253c9f596ea50397dbd8699828d55e8fa18bd4323d8d966e6"}, - {file = "pyzmq-26.0.3-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ebbbd0e728af5db9b04e56389e2299a57ea8b9dd15c9759153ee2455b32be6ad"}, - {file = "pyzmq-26.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6b1d1c631e5940cac5a0b22c5379c86e8df6a4ec277c7a856b714021ab6cfad"}, - {file = "pyzmq-26.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e891ce81edd463b3b4c3b885c5603c00141151dd9c6936d98a680c8c72fe5c67"}, - {file = "pyzmq-26.0.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9b273ecfbc590a1b98f014ae41e5cf723932f3b53ba9367cfb676f838038b32c"}, - {file = "pyzmq-26.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b32bff85fb02a75ea0b68f21e2412255b5731f3f389ed9aecc13a6752f58ac97"}, - {file = "pyzmq-26.0.3-cp38-cp38-win32.whl", hash = "sha256:f6c21c00478a7bea93caaaef9e7629145d4153b15a8653e8bb4609d4bc70dbfc"}, - {file = "pyzmq-26.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:3401613148d93ef0fd9aabdbddb212de3db7a4475367f49f590c837355343972"}, - {file = "pyzmq-26.0.3-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:2ed8357f4c6e0daa4f3baf31832df8a33334e0fe5b020a61bc8b345a3db7a606"}, - {file = "pyzmq-26.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c1c8f2a2ca45292084c75bb6d3a25545cff0ed931ed228d3a1810ae3758f975f"}, - {file = "pyzmq-26.0.3-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:b63731993cdddcc8e087c64e9cf003f909262b359110070183d7f3025d1c56b5"}, - {file = "pyzmq-26.0.3-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b3cd31f859b662ac5d7f4226ec7d8bd60384fa037fc02aee6ff0b53ba29a3ba8"}, - {file = "pyzmq-26.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:115f8359402fa527cf47708d6f8a0f8234f0e9ca0cab7c18c9c189c194dbf620"}, - {file = "pyzmq-26.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:715bdf952b9533ba13dfcf1f431a8f49e63cecc31d91d007bc1deb914f47d0e4"}, - {file = "pyzmq-26.0.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e1258c639e00bf5e8a522fec6c3eaa3e30cf1c23a2f21a586be7e04d50c9acab"}, - {file = "pyzmq-26.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:15c59e780be8f30a60816a9adab900c12a58d79c1ac742b4a8df044ab2a6d920"}, - {file = "pyzmq-26.0.3-cp39-cp39-win32.whl", hash = "sha256:d0cdde3c78d8ab5b46595054e5def32a755fc028685add5ddc7403e9f6de9879"}, - {file = "pyzmq-26.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:ce828058d482ef860746bf532822842e0ff484e27f540ef5c813d516dd8896d2"}, - {file = "pyzmq-26.0.3-cp39-cp39-win_arm64.whl", hash = "sha256:788f15721c64109cf720791714dc14afd0f449d63f3a5487724f024345067381"}, - {file = "pyzmq-26.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2c18645ef6294d99b256806e34653e86236eb266278c8ec8112622b61db255de"}, - {file = "pyzmq-26.0.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7e6bc96ebe49604df3ec2c6389cc3876cabe475e6bfc84ced1bf4e630662cb35"}, - {file = "pyzmq-26.0.3-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:971e8990c5cc4ddcff26e149398fc7b0f6a042306e82500f5e8db3b10ce69f84"}, - {file = "pyzmq-26.0.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8416c23161abd94cc7da80c734ad7c9f5dbebdadfdaa77dad78244457448223"}, - {file = "pyzmq-26.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:082a2988364b60bb5de809373098361cf1dbb239623e39e46cb18bc035ed9c0c"}, - {file = "pyzmq-26.0.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d57dfbf9737763b3a60d26e6800e02e04284926329aee8fb01049635e957fe81"}, - {file = "pyzmq-26.0.3-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:77a85dca4c2430ac04dc2a2185c2deb3858a34fe7f403d0a946fa56970cf60a1"}, - {file = "pyzmq-26.0.3-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4c82a6d952a1d555bf4be42b6532927d2a5686dd3c3e280e5f63225ab47ac1f5"}, - {file = "pyzmq-26.0.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4496b1282c70c442809fc1b151977c3d967bfb33e4e17cedbf226d97de18f709"}, - {file = "pyzmq-26.0.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:e4946d6bdb7ba972dfda282f9127e5756d4f299028b1566d1245fa0d438847e6"}, - {file = "pyzmq-26.0.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:03c0ae165e700364b266876d712acb1ac02693acd920afa67da2ebb91a0b3c09"}, - {file = "pyzmq-26.0.3-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:3e3070e680f79887d60feeda051a58d0ac36622e1759f305a41059eff62c6da7"}, - {file = "pyzmq-26.0.3-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6ca08b840fe95d1c2bd9ab92dac5685f949fc6f9ae820ec16193e5ddf603c3b2"}, - {file = "pyzmq-26.0.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e76654e9dbfb835b3518f9938e565c7806976c07b37c33526b574cc1a1050480"}, - {file = "pyzmq-26.0.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:871587bdadd1075b112e697173e946a07d722459d20716ceb3d1bd6c64bd08ce"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d0a2d1bd63a4ad79483049b26514e70fa618ce6115220da9efdff63688808b17"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0270b49b6847f0d106d64b5086e9ad5dc8a902413b5dbbb15d12b60f9c1747a4"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:703c60b9910488d3d0954ca585c34f541e506a091a41930e663a098d3b794c67"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74423631b6be371edfbf7eabb02ab995c2563fee60a80a30829176842e71722a"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4adfbb5451196842a88fda3612e2c0414134874bffb1c2ce83ab4242ec9e027d"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3516119f4f9b8671083a70b6afaa0a070f5683e431ab3dc26e9215620d7ca1ad"}, - {file = "pyzmq-26.0.3.tar.gz", hash = "sha256:dba7d9f2e047dfa2bca3b01f4f84aa5246725203d6284e3790f2ca15fba6b40a"}, + {file = "pyzmq-26.2.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:ddf33d97d2f52d89f6e6e7ae66ee35a4d9ca6f36eda89c24591b0c40205a3629"}, + {file = "pyzmq-26.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dacd995031a01d16eec825bf30802fceb2c3791ef24bcce48fa98ce40918c27b"}, + {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89289a5ee32ef6c439086184529ae060c741334b8970a6855ec0b6ad3ff28764"}, + {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5506f06d7dc6ecf1efacb4a013b1f05071bb24b76350832c96449f4a2d95091c"}, + {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ea039387c10202ce304af74def5021e9adc6297067f3441d348d2b633e8166a"}, + {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a2224fa4a4c2ee872886ed00a571f5e967c85e078e8e8c2530a2fb01b3309b88"}, + {file = "pyzmq-26.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:28ad5233e9c3b52d76196c696e362508959741e1a005fb8fa03b51aea156088f"}, + {file = "pyzmq-26.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:1c17211bc037c7d88e85ed8b7d8f7e52db6dc8eca5590d162717c654550f7282"}, + {file = "pyzmq-26.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b8f86dd868d41bea9a5f873ee13bf5551c94cf6bc51baebc6f85075971fe6eea"}, + {file = "pyzmq-26.2.0-cp310-cp310-win32.whl", hash = "sha256:46a446c212e58456b23af260f3d9fb785054f3e3653dbf7279d8f2b5546b21c2"}, + {file = "pyzmq-26.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:49d34ab71db5a9c292a7644ce74190b1dd5a3475612eefb1f8be1d6961441971"}, + {file = "pyzmq-26.2.0-cp310-cp310-win_arm64.whl", hash = "sha256:bfa832bfa540e5b5c27dcf5de5d82ebc431b82c453a43d141afb1e5d2de025fa"}, + {file = "pyzmq-26.2.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:8f7e66c7113c684c2b3f1c83cdd3376103ee0ce4c49ff80a648643e57fb22218"}, + {file = "pyzmq-26.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3a495b30fc91db2db25120df5847d9833af237546fd59170701acd816ccc01c4"}, + {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77eb0968da535cba0470a5165468b2cac7772cfb569977cff92e240f57e31bef"}, + {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ace4f71f1900a548f48407fc9be59c6ba9d9aaf658c2eea6cf2779e72f9f317"}, + {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92a78853d7280bffb93df0a4a6a2498cba10ee793cc8076ef797ef2f74d107cf"}, + {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:689c5d781014956a4a6de61d74ba97b23547e431e9e7d64f27d4922ba96e9d6e"}, + {file = "pyzmq-26.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0aca98bc423eb7d153214b2df397c6421ba6373d3397b26c057af3c904452e37"}, + {file = "pyzmq-26.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1f3496d76b89d9429a656293744ceca4d2ac2a10ae59b84c1da9b5165f429ad3"}, + {file = "pyzmq-26.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5c2b3bfd4b9689919db068ac6c9911f3fcb231c39f7dd30e3138be94896d18e6"}, + {file = "pyzmq-26.2.0-cp311-cp311-win32.whl", hash = "sha256:eac5174677da084abf378739dbf4ad245661635f1600edd1221f150b165343f4"}, + {file = "pyzmq-26.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:5a509df7d0a83a4b178d0f937ef14286659225ef4e8812e05580776c70e155d5"}, + {file = "pyzmq-26.2.0-cp311-cp311-win_arm64.whl", hash = "sha256:c0e6091b157d48cbe37bd67233318dbb53e1e6327d6fc3bb284afd585d141003"}, + {file = "pyzmq-26.2.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:ded0fc7d90fe93ae0b18059930086c51e640cdd3baebdc783a695c77f123dcd9"}, + {file = "pyzmq-26.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:17bf5a931c7f6618023cdacc7081f3f266aecb68ca692adac015c383a134ca52"}, + {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55cf66647e49d4621a7e20c8d13511ef1fe1efbbccf670811864452487007e08"}, + {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4661c88db4a9e0f958c8abc2b97472e23061f0bc737f6f6179d7a27024e1faa5"}, + {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea7f69de383cb47522c9c208aec6dd17697db7875a4674c4af3f8cfdac0bdeae"}, + {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:7f98f6dfa8b8ccaf39163ce872bddacca38f6a67289116c8937a02e30bbe9711"}, + {file = "pyzmq-26.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e3e0210287329272539eea617830a6a28161fbbd8a3271bf4150ae3e58c5d0e6"}, + {file = "pyzmq-26.2.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6b274e0762c33c7471f1a7471d1a2085b1a35eba5cdc48d2ae319f28b6fc4de3"}, + {file = "pyzmq-26.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:29c6a4635eef69d68a00321e12a7d2559fe2dfccfa8efae3ffb8e91cd0b36a8b"}, + {file = "pyzmq-26.2.0-cp312-cp312-win32.whl", hash = "sha256:989d842dc06dc59feea09e58c74ca3e1678c812a4a8a2a419046d711031f69c7"}, + {file = "pyzmq-26.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:2a50625acdc7801bc6f74698c5c583a491c61d73c6b7ea4dee3901bb99adb27a"}, + {file = "pyzmq-26.2.0-cp312-cp312-win_arm64.whl", hash = "sha256:4d29ab8592b6ad12ebbf92ac2ed2bedcfd1cec192d8e559e2e099f648570e19b"}, + {file = "pyzmq-26.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9dd8cd1aeb00775f527ec60022004d030ddc51d783d056e3e23e74e623e33726"}, + {file = "pyzmq-26.2.0-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:28c812d9757fe8acecc910c9ac9dafd2ce968c00f9e619db09e9f8f54c3a68a3"}, + {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d80b1dd99c1942f74ed608ddb38b181b87476c6a966a88a950c7dee118fdf50"}, + {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c997098cc65e3208eca09303630e84d42718620e83b733d0fd69543a9cab9cb"}, + {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ad1bc8d1b7a18497dda9600b12dc193c577beb391beae5cd2349184db40f187"}, + {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:bea2acdd8ea4275e1278350ced63da0b166421928276c7c8e3f9729d7402a57b"}, + {file = "pyzmq-26.2.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:23f4aad749d13698f3f7b64aad34f5fc02d6f20f05999eebc96b89b01262fb18"}, + {file = "pyzmq-26.2.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:a4f96f0d88accc3dbe4a9025f785ba830f968e21e3e2c6321ccdfc9aef755115"}, + {file = "pyzmq-26.2.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ced65e5a985398827cc9276b93ef6dfabe0273c23de8c7931339d7e141c2818e"}, + {file = "pyzmq-26.2.0-cp313-cp313-win32.whl", hash = "sha256:31507f7b47cc1ead1f6e86927f8ebb196a0bab043f6345ce070f412a59bf87b5"}, + {file = "pyzmq-26.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:70fc7fcf0410d16ebdda9b26cbd8bf8d803d220a7f3522e060a69a9c87bf7bad"}, + {file = "pyzmq-26.2.0-cp313-cp313-win_arm64.whl", hash = "sha256:c3789bd5768ab5618ebf09cef6ec2b35fed88709b104351748a63045f0ff9797"}, + {file = "pyzmq-26.2.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:034da5fc55d9f8da09015d368f519478a52675e558c989bfcb5cf6d4e16a7d2a"}, + {file = "pyzmq-26.2.0-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:c92d73464b886931308ccc45b2744e5968cbaade0b1d6aeb40d8ab537765f5bc"}, + {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:794a4562dcb374f7dbbfb3f51d28fb40123b5a2abadee7b4091f93054909add5"}, + {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aee22939bb6075e7afededabad1a56a905da0b3c4e3e0c45e75810ebe3a52672"}, + {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ae90ff9dad33a1cfe947d2c40cb9cb5e600d759ac4f0fd22616ce6540f72797"}, + {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:43a47408ac52647dfabbc66a25b05b6a61700b5165807e3fbd40063fcaf46386"}, + {file = "pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:25bf2374a2a8433633c65ccb9553350d5e17e60c8eb4de4d92cc6bd60f01d306"}, + {file = "pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_i686.whl", hash = "sha256:007137c9ac9ad5ea21e6ad97d3489af654381324d5d3ba614c323f60dab8fae6"}, + {file = "pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:470d4a4f6d48fb34e92d768b4e8a5cc3780db0d69107abf1cd7ff734b9766eb0"}, + {file = "pyzmq-26.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3b55a4229ce5da9497dd0452b914556ae58e96a4381bb6f59f1305dfd7e53fc8"}, + {file = "pyzmq-26.2.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9cb3a6460cdea8fe8194a76de8895707e61ded10ad0be97188cc8463ffa7e3a8"}, + {file = "pyzmq-26.2.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8ab5cad923cc95c87bffee098a27856c859bd5d0af31bd346035aa816b081fe1"}, + {file = "pyzmq-26.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ed69074a610fad1c2fda66180e7b2edd4d31c53f2d1872bc2d1211563904cd9"}, + {file = "pyzmq-26.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:cccba051221b916a4f5e538997c45d7d136a5646442b1231b916d0164067ea27"}, + {file = "pyzmq-26.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:0eaa83fc4c1e271c24eaf8fb083cbccef8fde77ec8cd45f3c35a9a123e6da097"}, + {file = "pyzmq-26.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9edda2df81daa129b25a39b86cb57dfdfe16f7ec15b42b19bfac503360d27a93"}, + {file = "pyzmq-26.2.0-cp37-cp37m-win32.whl", hash = "sha256:ea0eb6af8a17fa272f7b98d7bebfab7836a0d62738e16ba380f440fceca2d951"}, + {file = "pyzmq-26.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4ff9dc6bc1664bb9eec25cd17506ef6672d506115095411e237d571e92a58231"}, + {file = "pyzmq-26.2.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:2eb7735ee73ca1b0d71e0e67c3739c689067f055c764f73aac4cc8ecf958ee3f"}, + {file = "pyzmq-26.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a534f43bc738181aa7cbbaf48e3eca62c76453a40a746ab95d4b27b1111a7d2"}, + {file = "pyzmq-26.2.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:aedd5dd8692635813368e558a05266b995d3d020b23e49581ddd5bbe197a8ab6"}, + {file = "pyzmq-26.2.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8be4700cd8bb02cc454f630dcdf7cfa99de96788b80c51b60fe2fe1dac480289"}, + {file = "pyzmq-26.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fcc03fa4997c447dce58264e93b5aa2d57714fbe0f06c07b7785ae131512732"}, + {file = "pyzmq-26.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:402b190912935d3db15b03e8f7485812db350d271b284ded2b80d2e5704be780"}, + {file = "pyzmq-26.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8685fa9c25ff00f550c1fec650430c4b71e4e48e8d852f7ddcf2e48308038640"}, + {file = "pyzmq-26.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:76589c020680778f06b7e0b193f4b6dd66d470234a16e1df90329f5e14a171cd"}, + {file = "pyzmq-26.2.0-cp38-cp38-win32.whl", hash = "sha256:8423c1877d72c041f2c263b1ec6e34360448decfb323fa8b94e85883043ef988"}, + {file = "pyzmq-26.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:76589f2cd6b77b5bdea4fca5992dc1c23389d68b18ccc26a53680ba2dc80ff2f"}, + {file = "pyzmq-26.2.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:b1d464cb8d72bfc1a3adc53305a63a8e0cac6bc8c5a07e8ca190ab8d3faa43c2"}, + {file = "pyzmq-26.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4da04c48873a6abdd71811c5e163bd656ee1b957971db7f35140a2d573f6949c"}, + {file = "pyzmq-26.2.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d049df610ac811dcffdc147153b414147428567fbbc8be43bb8885f04db39d98"}, + {file = "pyzmq-26.2.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:05590cdbc6b902101d0e65d6a4780af14dc22914cc6ab995d99b85af45362cc9"}, + {file = "pyzmq-26.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c811cfcd6a9bf680236c40c6f617187515269ab2912f3d7e8c0174898e2519db"}, + {file = "pyzmq-26.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6835dd60355593de10350394242b5757fbbd88b25287314316f266e24c61d073"}, + {file = "pyzmq-26.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc6bee759a6bddea5db78d7dcd609397449cb2d2d6587f48f3ca613b19410cfc"}, + {file = "pyzmq-26.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c530e1eecd036ecc83c3407f77bb86feb79916d4a33d11394b8234f3bd35b940"}, + {file = "pyzmq-26.2.0-cp39-cp39-win32.whl", hash = "sha256:367b4f689786fca726ef7a6c5ba606958b145b9340a5e4808132cc65759abd44"}, + {file = "pyzmq-26.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:e6fa2e3e683f34aea77de8112f6483803c96a44fd726d7358b9888ae5bb394ec"}, + {file = "pyzmq-26.2.0-cp39-cp39-win_arm64.whl", hash = "sha256:7445be39143a8aa4faec43b076e06944b8f9d0701b669df4af200531b21e40bb"}, + {file = "pyzmq-26.2.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:706e794564bec25819d21a41c31d4df2d48e1cc4b061e8d345d7fb4dd3e94072"}, + {file = "pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b435f2753621cd36e7c1762156815e21c985c72b19135dac43a7f4f31d28dd1"}, + {file = "pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:160c7e0a5eb178011e72892f99f918c04a131f36056d10d9c1afb223fc952c2d"}, + {file = "pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c4a71d5d6e7b28a47a394c0471b7e77a0661e2d651e7ae91e0cab0a587859ca"}, + {file = "pyzmq-26.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:90412f2db8c02a3864cbfc67db0e3dcdbda336acf1c469526d3e869394fe001c"}, + {file = "pyzmq-26.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2ea4ad4e6a12e454de05f2949d4beddb52460f3de7c8b9d5c46fbb7d7222e02c"}, + {file = "pyzmq-26.2.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:fc4f7a173a5609631bb0c42c23d12c49df3966f89f496a51d3eb0ec81f4519d6"}, + {file = "pyzmq-26.2.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:878206a45202247781472a2d99df12a176fef806ca175799e1c6ad263510d57c"}, + {file = "pyzmq-26.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17c412bad2eb9468e876f556eb4ee910e62d721d2c7a53c7fa31e643d35352e6"}, + {file = "pyzmq-26.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:0d987a3ae5a71c6226b203cfd298720e0086c7fe7c74f35fa8edddfbd6597eed"}, + {file = "pyzmq-26.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:39887ac397ff35b7b775db7201095fc6310a35fdbae85bac4523f7eb3b840e20"}, + {file = "pyzmq-26.2.0-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:fdb5b3e311d4d4b0eb8b3e8b4d1b0a512713ad7e6a68791d0923d1aec433d919"}, + {file = "pyzmq-26.2.0-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:226af7dcb51fdb0109f0016449b357e182ea0ceb6b47dfb5999d569e5db161d5"}, + {file = "pyzmq-26.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bed0e799e6120b9c32756203fb9dfe8ca2fb8467fed830c34c877e25638c3fc"}, + {file = "pyzmq-26.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:29c7947c594e105cb9e6c466bace8532dc1ca02d498684128b339799f5248277"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:cdeabcff45d1c219636ee2e54d852262e5c2e085d6cb476d938aee8d921356b3"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35cffef589bcdc587d06f9149f8d5e9e8859920a071df5a2671de2213bef592a"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18c8dc3b7468d8b4bdf60ce9d7141897da103c7a4690157b32b60acb45e333e6"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7133d0a1677aec369d67dd78520d3fa96dd7f3dcec99d66c1762870e5ea1a50a"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6a96179a24b14fa6428cbfc08641c779a53f8fcec43644030328f44034c7f1f4"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4f78c88905461a9203eac9faac157a2a0dbba84a0fd09fd29315db27be40af9f"}, + {file = "pyzmq-26.2.0.tar.gz", hash = "sha256:070672c258581c8e4f640b5159297580a9974b026043bd4ab0470be9ed324f1f"}, ] [package.dependencies] @@ -2799,13 +2816,13 @@ files = [ [[package]] name = "werkzeug" -version = "3.0.3" +version = "3.0.4" description = "The comprehensive WSGI web application library." optional = false python-versions = ">=3.8" files = [ - {file = "werkzeug-3.0.3-py3-none-any.whl", hash = "sha256:fc9645dc43e03e4d630d23143a04a7f947a9a3b5727cd535fdfe155a17cc48c8"}, - {file = "werkzeug-3.0.3.tar.gz", hash = "sha256:097e5bfda9f0aba8da6b8545146def481d06aa7d3266e7448e2cccf67dd8bd18"}, + {file = "werkzeug-3.0.4-py3-none-any.whl", hash = "sha256:02c9eb92b7d6c06f31a782811505d2157837cea66aaede3e217c7c27c039476c"}, + {file = "werkzeug-3.0.4.tar.gz", hash = "sha256:34f2371506b250df4d4f84bfe7b0921e4762525762bbd936614909fe25cd7306"}, ] [package.dependencies] @@ -2956,4 +2973,4 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "d7c83d3aede8138e801e2936d5d63b5d60f4f9ab630581a1b0831c3cc9190bcc" +content-hash = "d5a58b845248d60d5cfc5111d6e611486e9137479952f180e2e01d719e440746" diff --git a/pyproject.toml b/pyproject.toml index feac446ec4..1bcfe4540c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "bbot" -version = "2.0.0" +version = "2.0.1" description = "OSINT automation for hackers." authors = [ "TheTechromancer", @@ -70,7 +70,7 @@ pytest-timeout = "^2.3.1" pytest-httpx = "^0.30.0" pytest-httpserver = "^1.0.11" pytest = "^8.3.1" -pytest-asyncio = "^0.23.8" +pytest-asyncio = ">=0.23.8,<0.25.0" [tool.poetry.group.docs.dependencies] mkdocs = "^1.5.2" @@ -98,7 +98,7 @@ extend-exclude = "(test_step_1/test_manager_*)" [tool.poetry-dynamic-versioning] enable = true metadata = false -format-jinja = 'v2.0.0{% if branch == "dev" %}.{{ distance }}rc{% endif %}' +format-jinja = 'v2.0.1{% if branch == "dev" %}.{{ distance }}rc{% endif %}' [tool.poetry-dynamic-versioning.substitution] files = ["*/__init__.py"]