Merge branch 'main' into noise-arch-change

This commit is contained in:
Manu Sheel Gupta
2025-09-05 02:55:55 +05:30
committed by GitHub
14 changed files with 1347 additions and 66 deletions

View File

@ -41,6 +41,7 @@ from libp2p.tools.async_service import (
from libp2p.tools.utils import (
info_from_p2p_addr,
)
from libp2p.utils.paths import get_script_dir, join_paths
# Configure logging
logging.basicConfig(
@ -53,8 +54,8 @@ logger = logging.getLogger("kademlia-example")
# Configure DHT module loggers to inherit from the parent logger
# This ensures all kademlia-example.* loggers use the same configuration
# Get the directory where this script is located
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
SERVER_ADDR_LOG = os.path.join(SCRIPT_DIR, "server_node_addr.txt")
SCRIPT_DIR = get_script_dir(__file__)
SERVER_ADDR_LOG = join_paths(SCRIPT_DIR, "server_node_addr.txt")
# Set the level for all child loggers
for module in [

View File

@ -37,3 +37,4 @@ SyncValidatorFn = Callable[[ID, rpc_pb2.Message], bool]
AsyncValidatorFn = Callable[[ID, rpc_pb2.Message], Awaitable[bool]]
ValidatorFn = Union[SyncValidatorFn, AsyncValidatorFn]
UnsubscribeFn = Callable[[], Awaitable[None]]
MessageID = NewType("MessageID", str)

View File

@ -2,15 +2,20 @@ import logging
from multiaddr import Multiaddr
from multiaddr.resolvers import DNSResolver
import trio
from libp2p.abc import ID, INetworkService, PeerInfo
from libp2p.discovery.bootstrap.utils import validate_bootstrap_addresses
from libp2p.discovery.events.peerDiscovery import peerDiscovery
from libp2p.network.exceptions import SwarmException
from libp2p.peer.peerinfo import info_from_p2p_addr
from libp2p.peer.peerstore import PERMANENT_ADDR_TTL
logger = logging.getLogger("libp2p.discovery.bootstrap")
resolver = DNSResolver()
DEFAULT_CONNECTION_TIMEOUT = 10
class BootstrapDiscovery:
"""
@ -19,68 +24,147 @@ class BootstrapDiscovery:
"""
def __init__(self, swarm: INetworkService, bootstrap_addrs: list[str]):
"""
Initialize BootstrapDiscovery.
Args:
swarm: The network service (swarm) instance
bootstrap_addrs: List of bootstrap peer multiaddresses
"""
self.swarm = swarm
self.peerstore = swarm.peerstore
self.bootstrap_addrs = bootstrap_addrs or []
self.discovered_peers: set[str] = set()
self.connection_timeout: int = DEFAULT_CONNECTION_TIMEOUT
async def start(self) -> None:
"""Process bootstrap addresses and emit peer discovery events."""
logger.debug(
"""Process bootstrap addresses and emit peer discovery events in parallel."""
logger.info(
f"Starting bootstrap discovery with "
f"{len(self.bootstrap_addrs)} bootstrap addresses"
)
# Show all bootstrap addresses being processed
for i, addr in enumerate(self.bootstrap_addrs):
logger.debug(f"{i + 1}. {addr}")
# Validate and filter bootstrap addresses
self.bootstrap_addrs = validate_bootstrap_addresses(self.bootstrap_addrs)
logger.info(f"Valid addresses after validation: {len(self.bootstrap_addrs)}")
for addr_str in self.bootstrap_addrs:
try:
await self._process_bootstrap_addr(addr_str)
except Exception as e:
logger.debug(f"Failed to process bootstrap address {addr_str}: {e}")
# Use Trio nursery for PARALLEL address processing
try:
async with trio.open_nursery() as nursery:
logger.debug(
f"Starting {len(self.bootstrap_addrs)} parallel address "
f"processing tasks"
)
# Start all bootstrap address processing tasks in parallel
for addr_str in self.bootstrap_addrs:
logger.debug(f"Starting parallel task for: {addr_str}")
nursery.start_soon(self._process_bootstrap_addr, addr_str)
# The nursery will wait for all address processing tasks to complete
logger.debug(
"Nursery active - waiting for address processing tasks to complete"
)
except trio.Cancelled:
logger.debug("Bootstrap address processing cancelled - cleaning up tasks")
raise
except Exception as e:
logger.error(f"Bootstrap address processing failed: {e}")
raise
logger.info("Bootstrap discovery startup complete - all tasks finished")
def stop(self) -> None:
"""Clean up bootstrap discovery resources."""
logger.debug("Stopping bootstrap discovery")
logger.info("Stopping bootstrap discovery and cleaning up tasks")
# Clear discovered peers
self.discovered_peers.clear()
logger.debug("Bootstrap discovery cleanup completed")
async def _process_bootstrap_addr(self, addr_str: str) -> None:
"""Convert string address to PeerInfo and add to peerstore."""
try:
multiaddr = Multiaddr(addr_str)
try:
multiaddr = Multiaddr(addr_str)
except Exception as e:
logger.debug(f"Invalid multiaddr format '{addr_str}': {e}")
return
if self.is_dns_addr(multiaddr):
resolved_addrs = await resolver.resolve(multiaddr)
if resolved_addrs is None:
logger.warning(f"DNS resolution returned None for: {addr_str}")
return
peer_id_str = multiaddr.get_peer_id()
if peer_id_str is None:
logger.warning(f"Missing peer ID in DNS address: {addr_str}")
return
peer_id = ID.from_base58(peer_id_str)
addrs = [addr for addr in resolved_addrs]
if not addrs:
logger.warning(f"No addresses resolved for DNS address: {addr_str}")
return
peer_info = PeerInfo(peer_id, addrs)
await self.add_addr(peer_info)
else:
peer_info = info_from_p2p_addr(multiaddr)
await self.add_addr(peer_info)
except Exception as e:
logger.debug(f"Invalid multiaddr format '{addr_str}': {e}")
return
if self.is_dns_addr(multiaddr):
resolved_addrs = await resolver.resolve(multiaddr)
peer_id_str = multiaddr.get_peer_id()
if peer_id_str is None:
logger.warning(f"Missing peer ID in DNS address: {addr_str}")
return
peer_id = ID.from_base58(peer_id_str)
addrs = [addr for addr in resolved_addrs]
if not addrs:
logger.warning(f"No addresses resolved for DNS address: {addr_str}")
return
peer_info = PeerInfo(peer_id, addrs)
self.add_addr(peer_info)
else:
self.add_addr(info_from_p2p_addr(multiaddr))
logger.warning(f"Failed to process bootstrap address {addr_str}: {e}")
def is_dns_addr(self, addr: Multiaddr) -> bool:
"""Check if the address is a DNS address."""
return any(protocol.name == "dnsaddr" for protocol in addr.protocols())
def add_addr(self, peer_info: PeerInfo) -> None:
"""Add a peer to the peerstore and emit discovery event."""
async def add_addr(self, peer_info: PeerInfo) -> None:
"""
Add a peer to the peerstore, emit discovery event,
and attempt connection in parallel.
"""
logger.debug(
f"Adding peer {peer_info.peer_id} with {len(peer_info.addrs)} addresses"
)
# Skip if it's our own peer
if peer_info.peer_id == self.swarm.get_peer_id():
logger.debug(f"Skipping own peer ID: {peer_info.peer_id}")
return
# Always add addresses to peerstore (allows multiple addresses for same peer)
self.peerstore.add_addrs(peer_info.peer_id, peer_info.addrs, 10)
# Filter addresses to only include IPv4+TCP (only supported protocol)
ipv4_tcp_addrs = []
filtered_out_addrs = []
for addr in peer_info.addrs:
if self._is_ipv4_tcp_addr(addr):
ipv4_tcp_addrs.append(addr)
else:
filtered_out_addrs.append(addr)
# Log filtering results
logger.debug(
f"Address filtering for {peer_info.peer_id}: "
f"{len(ipv4_tcp_addrs)} IPv4+TCP, {len(filtered_out_addrs)} filtered"
)
# Skip peer if no IPv4+TCP addresses available
if not ipv4_tcp_addrs:
logger.warning(
f"❌ No IPv4+TCP addresses for {peer_info.peer_id} - "
f"skipping connection attempts"
)
return
# Add only IPv4+TCP addresses to peerstore
self.peerstore.add_addrs(peer_info.peer_id, ipv4_tcp_addrs, PERMANENT_ADDR_TTL)
# Only emit discovery event if this is the first time we see this peer
peer_id_str = str(peer_info.peer_id)
@ -89,6 +173,140 @@ class BootstrapDiscovery:
self.discovered_peers.add(peer_id_str)
# Emit peer discovery event
peerDiscovery.emit_peer_discovered(peer_info)
logger.debug(f"Peer discovered: {peer_info.peer_id}")
logger.info(f"Peer discovered: {peer_info.peer_id}")
# Connect to peer (parallel across different bootstrap addresses)
logger.debug("Connecting to discovered peer...")
await self._connect_to_peer(peer_info.peer_id)
else:
logger.debug(f"Additional addresses added for peer: {peer_info.peer_id}")
logger.debug(
f"Additional addresses added for existing peer: {peer_info.peer_id}"
)
# Even for existing peers, try to connect if not already connected
if peer_info.peer_id not in self.swarm.connections:
logger.debug("Connecting to existing peer...")
await self._connect_to_peer(peer_info.peer_id)
async def _connect_to_peer(self, peer_id: ID) -> None:
"""
Attempt to establish a connection to a peer with timeout.
Uses swarm.dial_peer to connect using addresses stored in peerstore.
Times out after self.connection_timeout seconds to prevent hanging.
"""
logger.debug(f"Connection attempt for peer: {peer_id}")
# Pre-connection validation: Check if already connected
if peer_id in self.swarm.connections:
logger.debug(
f"Already connected to {peer_id} - skipping connection attempt"
)
return
# Check available addresses before attempting connection
available_addrs = self.peerstore.addrs(peer_id)
logger.debug(f"Connecting to {peer_id} ({len(available_addrs)} addresses)")
if not available_addrs:
logger.error(f"❌ No addresses available for {peer_id} - cannot connect")
return
# Record start time for connection attempt monitoring
connection_start_time = trio.current_time()
try:
with trio.move_on_after(self.connection_timeout):
# Log connection attempt
logger.debug(
f"Attempting connection to {peer_id} using "
f"{len(available_addrs)} addresses"
)
# Use swarm.dial_peer to connect using stored addresses
await self.swarm.dial_peer(peer_id)
# Calculate connection time
connection_time = trio.current_time() - connection_start_time
# Post-connection validation: Verify connection was actually established
if peer_id in self.swarm.connections:
logger.info(
f"✅ Connected to {peer_id} (took {connection_time:.2f}s)"
)
else:
logger.warning(
f"Dial succeeded but connection not found for {peer_id}"
)
except trio.TooSlowError:
logger.warning(
f"❌ Connection to {peer_id} timed out after {self.connection_timeout}s"
)
except SwarmException as e:
# Calculate failed connection time
failed_connection_time = trio.current_time() - connection_start_time
# Enhanced error logging
error_msg = str(e)
if "no addresses established a successful connection" in error_msg:
logger.warning(
f"❌ Failed to connect to {peer_id} after trying all "
f"{len(available_addrs)} addresses "
f"(took {failed_connection_time:.2f}s)"
)
# Log individual address failures if this is a MultiError
if (
e.__cause__ is not None
and hasattr(e.__cause__, "exceptions")
and getattr(e.__cause__, "exceptions", None) is not None
):
exceptions_list = getattr(e.__cause__, "exceptions")
logger.debug("📋 Individual address failure details:")
for i, addr_exception in enumerate(exceptions_list, 1):
logger.debug(f"Address {i}: {addr_exception}")
# Also log the actual address that failed
if i <= len(available_addrs):
logger.debug(f"Failed address: {available_addrs[i - 1]}")
else:
logger.warning("No detailed exception information available")
else:
logger.warning(
f"❌ Failed to connect to {peer_id}: {e} "
f"(took {failed_connection_time:.2f}s)"
)
except Exception as e:
# Handle unexpected errors that aren't swarm-specific
failed_connection_time = trio.current_time() - connection_start_time
logger.error(
f"❌ Unexpected error connecting to {peer_id}: "
f"{e} (took {failed_connection_time:.2f}s)"
)
# Don't re-raise to prevent killing the nursery and other parallel tasks
def _is_ipv4_tcp_addr(self, addr: Multiaddr) -> bool:
"""
Check if address is IPv4 with TCP protocol only.
Filters out IPv6, UDP, QUIC, WebSocket, and other unsupported protocols.
Only IPv4+TCP addresses are supported by the current transport.
"""
try:
protocols = addr.protocols()
# Must have IPv4 protocol
has_ipv4 = any(p.name == "ip4" for p in protocols)
if not has_ipv4:
return False
# Must have TCP protocol
has_tcp = any(p.name == "tcp" for p in protocols)
if not has_tcp:
return False
return True
except Exception:
# If we can't parse the address, don't use it
return False

View File

@ -1,6 +1,3 @@
from ast import (
literal_eval,
)
from collections import (
defaultdict,
)
@ -22,6 +19,7 @@ from libp2p.abc import (
IPubsubRouter,
)
from libp2p.custom_types import (
MessageID,
TProtocol,
)
from libp2p.peer.id import (
@ -56,6 +54,10 @@ from .pb import (
from .pubsub import (
Pubsub,
)
from .utils import (
parse_message_id_safe,
safe_parse_message_id,
)
PROTOCOL_ID = TProtocol("/meshsub/1.0.0")
PROTOCOL_ID_V11 = TProtocol("/meshsub/1.1.0")
@ -794,8 +796,8 @@ class GossipSub(IPubsubRouter, Service):
# Add all unknown message ids (ids that appear in ihave_msg but not in
# seen_seqnos) to list of messages we want to request
msg_ids_wanted: list[str] = [
msg_id
msg_ids_wanted: list[MessageID] = [
parse_message_id_safe(msg_id)
for msg_id in ihave_msg.messageIDs
if msg_id not in seen_seqnos_and_peers
]
@ -811,9 +813,9 @@ class GossipSub(IPubsubRouter, Service):
Forwards all request messages that are present in mcache to the
requesting peer.
"""
# FIXME: Update type of message ID
# FIXME: Find a better way to parse the msg ids
msg_ids: list[Any] = [literal_eval(msg) for msg in iwant_msg.messageIDs]
msg_ids: list[tuple[bytes, bytes]] = [
safe_parse_message_id(msg) for msg in iwant_msg.messageIDs
]
msgs_to_forward: list[rpc_pb2.Message] = []
for msg_id_iwant in msg_ids:
# Check if the wanted message ID is present in mcache

View File

@ -1,6 +1,10 @@
import ast
import logging
from libp2p.abc import IHost
from libp2p.custom_types import (
MessageID,
)
from libp2p.peer.envelope import consume_envelope
from libp2p.peer.id import ID
from libp2p.pubsub.pb.rpc_pb2 import RPC
@ -48,3 +52,29 @@ def maybe_consume_signed_record(msg: RPC, host: IHost, peer_id: ID) -> bool:
logger.error("Failed to update the Certified-Addr-Book: %s", e)
return False
return True
def parse_message_id_safe(msg_id_str: str) -> MessageID:
"""Safely handle message ID as string."""
return MessageID(msg_id_str)
def safe_parse_message_id(msg_id_str: str) -> tuple[bytes, bytes]:
"""
Safely parse message ID using ast.literal_eval with validation.
:param msg_id_str: String representation of message ID
:return: Tuple of (seqno, from_id) as bytes
:raises ValueError: If parsing fails
"""
try:
parsed = ast.literal_eval(msg_id_str)
if not isinstance(parsed, tuple) or len(parsed) != 2:
raise ValueError("Invalid message ID format")
seqno, from_id = parsed
if not isinstance(seqno, bytes) or not isinstance(from_id, bytes):
raise ValueError("Message ID components must be bytes")
return (seqno, from_id)
except (ValueError, SyntaxError) as e:
raise ValueError(f"Invalid message ID format: {e}")

View File

@ -1,7 +1,4 @@
import atexit
from datetime import (
datetime,
)
import logging
import logging.handlers
import os
@ -21,6 +18,9 @@ log_queue: "queue.Queue[Any]" = queue.Queue()
# Store the current listener to stop it on exit
_current_listener: logging.handlers.QueueListener | None = None
# Store the handlers for proper cleanup
_current_handlers: list[logging.Handler] = []
# Event to track when the listener is ready
_listener_ready = threading.Event()
@ -95,7 +95,7 @@ def setup_logging() -> None:
- Child loggers inherit their parent's level unless explicitly set
- The root libp2p logger controls the default level
"""
global _current_listener, _listener_ready
global _current_listener, _listener_ready, _current_handlers
# Reset the event
_listener_ready.clear()
@ -105,6 +105,12 @@ def setup_logging() -> None:
_current_listener.stop()
_current_listener = None
# Close and clear existing handlers
for handler in _current_handlers:
if isinstance(handler, logging.FileHandler):
handler.close()
_current_handlers.clear()
# Get the log level from environment variable
debug_str = os.environ.get("LIBP2P_DEBUG", "")
@ -148,13 +154,10 @@ def setup_logging() -> None:
log_path = Path(log_file)
log_path.parent.mkdir(parents=True, exist_ok=True)
else:
# Default log file with timestamp and unique identifier
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S_%f")
unique_id = os.urandom(4).hex() # Add a unique identifier to prevent collisions
if os.name == "nt": # Windows
log_file = f"C:\\Windows\\Temp\\py-libp2p_{timestamp}_{unique_id}.log"
else: # Unix-like
log_file = f"/tmp/py-libp2p_{timestamp}_{unique_id}.log"
# Use cross-platform temp file creation
from libp2p.utils.paths import create_temp_file
log_file = str(create_temp_file(prefix="py-libp2p_", suffix=".log"))
# Print the log file path so users know where to find it
print(f"Logging to: {log_file}", file=sys.stderr)
@ -195,6 +198,9 @@ def setup_logging() -> None:
logger.setLevel(level)
logger.propagate = False # Prevent message duplication
# Store handlers globally for cleanup
_current_handlers.extend(handlers)
# Start the listener AFTER configuring all loggers
_current_listener = logging.handlers.QueueListener(
log_queue, *handlers, respect_handler_level=True
@ -209,7 +215,13 @@ def setup_logging() -> None:
@atexit.register
def cleanup_logging() -> None:
"""Clean up logging resources on exit."""
global _current_listener
global _current_listener, _current_handlers
if _current_listener is not None:
_current_listener.stop()
_current_listener = None
# Close all file handlers to ensure proper cleanup on Windows
for handler in _current_handlers:
if isinstance(handler, logging.FileHandler):
handler.close()
_current_handlers.clear()

267
libp2p/utils/paths.py Normal file
View File

@ -0,0 +1,267 @@
"""
Cross-platform path utilities for py-libp2p.
This module provides standardized path operations to ensure consistent
behavior across Windows, macOS, and Linux platforms.
"""
import os
from pathlib import Path
import sys
import tempfile
from typing import Union
PathLike = Union[str, Path]
def get_temp_dir() -> Path:
"""
Get cross-platform temporary directory.
Returns:
Path: Platform-specific temporary directory path
"""
return Path(tempfile.gettempdir())
def get_project_root() -> Path:
"""
Get the project root directory.
Returns:
Path: Path to the py-libp2p project root
"""
# Navigate from libp2p/utils/paths.py to project root
return Path(__file__).parent.parent.parent
def join_paths(*parts: PathLike) -> Path:
"""
Cross-platform path joining.
Args:
*parts: Path components to join
Returns:
Path: Joined path using platform-appropriate separator
"""
return Path(*parts)
def ensure_dir_exists(path: PathLike) -> Path:
"""
Ensure directory exists, create if needed.
Args:
path: Directory path to ensure exists
Returns:
Path: Path object for the directory
"""
path_obj = Path(path)
path_obj.mkdir(parents=True, exist_ok=True)
return path_obj
def get_config_dir() -> Path:
"""
Get user config directory (cross-platform).
Returns:
Path: Platform-specific config directory
"""
if os.name == "nt": # Windows
appdata = os.environ.get("APPDATA", "")
if appdata:
return Path(appdata) / "py-libp2p"
else:
# Fallback to user home directory
return Path.home() / "AppData" / "Roaming" / "py-libp2p"
else: # Unix-like (Linux, macOS)
return Path.home() / ".config" / "py-libp2p"
def get_script_dir(script_path: PathLike | None = None) -> Path:
"""
Get the directory containing a script file.
Args:
script_path: Path to the script file. If None, uses __file__
Returns:
Path: Directory containing the script
Raises:
RuntimeError: If script path cannot be determined
"""
if script_path is None:
# This will be the directory of the calling script
import inspect
frame = inspect.currentframe()
if frame and frame.f_back:
script_path = frame.f_back.f_globals.get("__file__")
else:
raise RuntimeError("Could not determine script path")
if script_path is None:
raise RuntimeError("Script path is None")
return Path(script_path).parent.absolute()
def create_temp_file(prefix: str = "py-libp2p_", suffix: str = ".log") -> Path:
"""
Create a temporary file with a unique name.
Args:
prefix: File name prefix
suffix: File name suffix
Returns:
Path: Path to the created temporary file
"""
temp_dir = get_temp_dir()
# Create a unique filename using timestamp and random bytes
import secrets
import time
timestamp = time.strftime("%Y%m%d_%H%M%S")
microseconds = f"{time.time() % 1:.6f}"[2:] # Get microseconds as string
unique_id = secrets.token_hex(4)
filename = f"{prefix}{timestamp}_{microseconds}_{unique_id}{suffix}"
temp_file = temp_dir / filename
# Create the file by touching it
temp_file.touch()
return temp_file
def resolve_relative_path(base_path: PathLike, relative_path: PathLike) -> Path:
"""
Resolve a relative path from a base path.
Args:
base_path: Base directory path
relative_path: Relative path to resolve
Returns:
Path: Resolved absolute path
"""
base = Path(base_path).resolve()
relative = Path(relative_path)
if relative.is_absolute():
return relative
else:
return (base / relative).resolve()
def normalize_path(path: PathLike) -> Path:
"""
Normalize a path, resolving any symbolic links and relative components.
Args:
path: Path to normalize
Returns:
Path: Normalized absolute path
"""
return Path(path).resolve()
def get_venv_path() -> Path | None:
"""
Get virtual environment path if active.
Returns:
Path: Virtual environment path if active, None otherwise
"""
venv_path = os.environ.get("VIRTUAL_ENV")
if venv_path:
return Path(venv_path)
return None
def get_python_executable() -> Path:
"""
Get current Python executable path.
Returns:
Path: Path to the current Python executable
"""
return Path(sys.executable)
def find_executable(name: str) -> Path | None:
"""
Find executable in system PATH.
Args:
name: Name of the executable to find
Returns:
Path: Path to executable if found, None otherwise
"""
# Check if name already contains path
if os.path.dirname(name):
path = Path(name)
if path.exists() and os.access(path, os.X_OK):
return path
return None
# Search in PATH
for path_dir in os.environ.get("PATH", "").split(os.pathsep):
if not path_dir:
continue
path = Path(path_dir) / name
if path.exists() and os.access(path, os.X_OK):
return path
return None
def get_script_binary_path() -> Path:
"""
Get path to script's binary directory.
Returns:
Path: Directory containing the script's binary
"""
return get_python_executable().parent
def get_binary_path(binary_name: str) -> Path | None:
"""
Find binary in PATH or virtual environment.
Args:
binary_name: Name of the binary to find
Returns:
Path: Path to binary if found, None otherwise
"""
# First check in virtual environment if active
venv_path = get_venv_path()
if venv_path:
venv_bin = venv_path / "bin" if os.name != "nt" else venv_path / "Scripts"
binary_path = venv_bin / binary_name
if binary_path.exists() and os.access(binary_path, os.X_OK):
return binary_path
# Fall back to system PATH
return find_executable(binary_name)

View File

@ -0,0 +1 @@
Fixed message id type inconsistency in handle ihave and message id parsing improvement in handle iwant in pubsub module.

View File

@ -0,0 +1 @@
Add automatic peer dialing in bootstrap module using trio.Nursery.

View File

@ -0,0 +1,2 @@
Fixed cross-platform path handling by replacing hardcoded OS-specific
paths with standardized utilities in core modules and examples.

255
scripts/audit_paths.py Normal file
View File

@ -0,0 +1,255 @@
#!/usr/bin/env python3
"""
Audit script to identify path handling issues in the py-libp2p codebase.
This script scans for patterns that should be migrated to use the new
cross-platform path utilities.
"""
import argparse
from pathlib import Path
import re
from typing import Any
def scan_for_path_issues(directory: Path) -> dict[str, list[dict[str, Any]]]:
"""
Scan for path handling issues in the codebase.
Args:
directory: Root directory to scan
Returns:
Dictionary mapping issue types to lists of found issues
"""
issues = {
"hard_coded_slash": [],
"os_path_join": [],
"temp_hardcode": [],
"os_path_dirname": [],
"os_path_abspath": [],
"direct_path_concat": [],
}
# Patterns to search for
patterns = {
"hard_coded_slash": r'["\'][^"\']*\/[^"\']*["\']',
"os_path_join": r"os\.path\.join\(",
"temp_hardcode": r'["\']\/tmp\/|["\']C:\\\\',
"os_path_dirname": r"os\.path\.dirname\(",
"os_path_abspath": r"os\.path\.abspath\(",
"direct_path_concat": r'["\'][^"\']*["\']\s*\+\s*["\'][^"\']*["\']',
}
# Files to exclude
exclude_patterns = [
r"__pycache__",
r"\.git",
r"\.pytest_cache",
r"\.mypy_cache",
r"\.ruff_cache",
r"env/",
r"venv/",
r"\.venv/",
]
for py_file in directory.rglob("*.py"):
# Skip excluded files
if any(re.search(pattern, str(py_file)) for pattern in exclude_patterns):
continue
try:
content = py_file.read_text(encoding="utf-8")
except UnicodeDecodeError:
print(f"Warning: Could not read {py_file} (encoding issue)")
continue
for issue_type, pattern in patterns.items():
matches = re.finditer(pattern, content, re.MULTILINE)
for match in matches:
line_num = content[: match.start()].count("\n") + 1
line_content = content.split("\n")[line_num - 1].strip()
issues[issue_type].append(
{
"file": py_file,
"line": line_num,
"content": match.group(),
"full_line": line_content,
"relative_path": py_file.relative_to(directory),
}
)
return issues
def generate_migration_suggestions(issues: dict[str, list[dict[str, Any]]]) -> str:
"""
Generate migration suggestions for found issues.
Args:
issues: Dictionary of found issues
Returns:
Formatted string with migration suggestions
"""
suggestions = []
for issue_type, issue_list in issues.items():
if not issue_list:
continue
suggestions.append(f"\n## {issue_type.replace('_', ' ').title()}")
suggestions.append(f"Found {len(issue_list)} instances:")
for issue in issue_list[:10]: # Show first 10 examples
suggestions.append(f"\n### {issue['relative_path']}:{issue['line']}")
suggestions.append("```python")
suggestions.append("# Current code:")
suggestions.append(f"{issue['full_line']}")
suggestions.append("```")
# Add migration suggestion based on issue type
if issue_type == "os_path_join":
suggestions.append("```python")
suggestions.append("# Suggested fix:")
suggestions.append("from libp2p.utils.paths import join_paths")
suggestions.append(
"# Replace os.path.join(a, b, c) with join_paths(a, b, c)"
)
suggestions.append("```")
elif issue_type == "temp_hardcode":
suggestions.append("```python")
suggestions.append("# Suggested fix:")
suggestions.append(
"from libp2p.utils.paths import get_temp_dir, create_temp_file"
)
temp_fix_msg = (
"# Replace hard-coded temp paths with get_temp_dir() or "
"create_temp_file()"
)
suggestions.append(temp_fix_msg)
suggestions.append("```")
elif issue_type == "os_path_dirname":
suggestions.append("```python")
suggestions.append("# Suggested fix:")
suggestions.append("from libp2p.utils.paths import get_script_dir")
script_dir_fix_msg = (
"# Replace os.path.dirname(os.path.abspath(__file__)) with "
"get_script_dir(__file__)"
)
suggestions.append(script_dir_fix_msg)
suggestions.append("```")
if len(issue_list) > 10:
suggestions.append(f"\n... and {len(issue_list) - 10} more instances")
return "\n".join(suggestions)
def generate_summary_report(issues: dict[str, list[dict[str, Any]]]) -> str:
"""
Generate a summary report of all found issues.
Args:
issues: Dictionary of found issues
Returns:
Formatted summary report
"""
total_issues = sum(len(issue_list) for issue_list in issues.values())
report = [
"# Cross-Platform Path Handling Audit Report",
"",
"## Summary",
f"Total issues found: {total_issues}",
"",
"## Issue Breakdown:",
]
for issue_type, issue_list in issues.items():
if issue_list:
issue_title = issue_type.replace("_", " ").title()
instances_count = len(issue_list)
report.append(f"- **{issue_title}**: {instances_count} instances")
report.append("")
report.append("## Priority Matrix:")
report.append("")
report.append("| Priority | Issue Type | Risk Level | Impact |")
report.append("|----------|------------|------------|---------|")
priority_map = {
"temp_hardcode": (
"🔴 P0",
"HIGH",
"Core functionality fails on different platforms",
),
"os_path_join": ("🟡 P1", "MEDIUM", "Examples and utilities may break"),
"os_path_dirname": ("🟡 P1", "MEDIUM", "Script location detection issues"),
"hard_coded_slash": ("🟢 P2", "LOW", "Future-proofing and consistency"),
"os_path_abspath": ("🟢 P2", "LOW", "Path resolution consistency"),
"direct_path_concat": ("🟢 P2", "LOW", "String concatenation issues"),
}
for issue_type, issue_list in issues.items():
if issue_list:
priority, risk, impact = priority_map.get(
issue_type, ("🟢 P2", "LOW", "General improvement")
)
issue_title = issue_type.replace("_", " ").title()
report.append(f"| {priority} | {issue_title} | {risk} | {impact} |")
return "\n".join(report)
def main():
"""Main function to run the audit."""
parser = argparse.ArgumentParser(
description="Audit py-libp2p codebase for path handling issues"
)
parser.add_argument(
"--directory",
default=".",
help="Directory to scan (default: current directory)",
)
parser.add_argument("--output", help="Output file for detailed report")
parser.add_argument(
"--summary-only", action="store_true", help="Only show summary report"
)
args = parser.parse_args()
directory = Path(args.directory)
if not directory.exists():
print(f"Error: Directory {directory} does not exist")
return 1
print("🔍 Scanning for path handling issues...")
issues = scan_for_path_issues(directory)
# Generate and display summary
summary = generate_summary_report(issues)
print(summary)
if not args.summary_only:
# Generate detailed suggestions
suggestions = generate_migration_suggestions(issues)
if args.output:
with open(args.output, "w", encoding="utf-8") as f:
f.write(summary)
f.write(suggestions)
print(f"\n📄 Detailed report saved to {args.output}")
else:
print(suggestions)
return 0
if __name__ == "__main__":
exit(main())

View File

@ -1,4 +1,8 @@
import random
from unittest.mock import (
AsyncMock,
MagicMock,
)
import pytest
import trio
@ -7,6 +11,9 @@ from libp2p.pubsub.gossipsub import (
PROTOCOL_ID,
GossipSub,
)
from libp2p.pubsub.pb import (
rpc_pb2,
)
from libp2p.tools.utils import (
connect,
)
@ -754,3 +761,173 @@ async def test_single_host():
assert connected_peers == 0, (
f"Single host has {connected_peers} connections, expected 0"
)
@pytest.mark.trio
async def test_handle_ihave(monkeypatch):
async with PubsubFactory.create_batch_with_gossipsub(2) as pubsubs_gsub:
gossipsub_routers = []
for pubsub in pubsubs_gsub:
if isinstance(pubsub.router, GossipSub):
gossipsub_routers.append(pubsub.router)
gossipsubs = tuple(gossipsub_routers)
index_alice = 0
index_bob = 1
id_bob = pubsubs_gsub[index_bob].my_id
# Connect Alice and Bob
await connect(pubsubs_gsub[index_alice].host, pubsubs_gsub[index_bob].host)
await trio.sleep(0.1) # Allow connections to establish
# Mock emit_iwant to capture calls
mock_emit_iwant = AsyncMock()
monkeypatch.setattr(gossipsubs[index_alice], "emit_iwant", mock_emit_iwant)
# Create a test message ID as a string representation of a (seqno, from) tuple
test_seqno = b"1234"
test_from = id_bob.to_bytes()
test_msg_id = f"(b'{test_seqno.hex()}', b'{test_from.hex()}')"
ihave_msg = rpc_pb2.ControlIHave(messageIDs=[test_msg_id])
# Mock seen_messages.cache to avoid false positives
monkeypatch.setattr(pubsubs_gsub[index_alice].seen_messages, "cache", {})
# Simulate Bob sending IHAVE to Alice
await gossipsubs[index_alice].handle_ihave(ihave_msg, id_bob)
# Check if emit_iwant was called with the correct message ID
mock_emit_iwant.assert_called_once()
called_args = mock_emit_iwant.call_args[0]
assert called_args[0] == [test_msg_id] # Expected message IDs
assert called_args[1] == id_bob # Sender peer ID
@pytest.mark.trio
async def test_handle_iwant(monkeypatch):
async with PubsubFactory.create_batch_with_gossipsub(2) as pubsubs_gsub:
gossipsub_routers = []
for pubsub in pubsubs_gsub:
if isinstance(pubsub.router, GossipSub):
gossipsub_routers.append(pubsub.router)
gossipsubs = tuple(gossipsub_routers)
index_alice = 0
index_bob = 1
id_alice = pubsubs_gsub[index_alice].my_id
# Connect Alice and Bob
await connect(pubsubs_gsub[index_alice].host, pubsubs_gsub[index_bob].host)
await trio.sleep(0.1) # Allow connections to establish
# Mock mcache.get to return a message
test_message = rpc_pb2.Message(data=b"test_data")
test_seqno = b"1234"
test_from = id_alice.to_bytes()
# ✅ Correct: use raw tuple and str() to serialize, no hex()
test_msg_id = str((test_seqno, test_from))
mock_mcache_get = MagicMock(return_value=test_message)
monkeypatch.setattr(gossipsubs[index_bob].mcache, "get", mock_mcache_get)
# Mock write_msg to capture the sent packet
mock_write_msg = AsyncMock()
monkeypatch.setattr(gossipsubs[index_bob].pubsub, "write_msg", mock_write_msg)
# Simulate Alice sending IWANT to Bob
iwant_msg = rpc_pb2.ControlIWant(messageIDs=[test_msg_id])
await gossipsubs[index_bob].handle_iwant(iwant_msg, id_alice)
# Check if write_msg was called with the correct packet
mock_write_msg.assert_called_once()
packet = mock_write_msg.call_args[0][1]
assert isinstance(packet, rpc_pb2.RPC)
assert len(packet.publish) == 1
assert packet.publish[0] == test_message
# Verify that mcache.get was called with the correct parsed message ID
mock_mcache_get.assert_called_once()
called_msg_id = mock_mcache_get.call_args[0][0]
assert isinstance(called_msg_id, tuple)
assert called_msg_id == (test_seqno, test_from)
@pytest.mark.trio
async def test_handle_iwant_invalid_msg_id(monkeypatch):
"""
Test that handle_iwant raises ValueError for malformed message IDs.
"""
async with PubsubFactory.create_batch_with_gossipsub(2) as pubsubs_gsub:
gossipsub_routers = []
for pubsub in pubsubs_gsub:
if isinstance(pubsub.router, GossipSub):
gossipsub_routers.append(pubsub.router)
gossipsubs = tuple(gossipsub_routers)
index_alice = 0
index_bob = 1
id_alice = pubsubs_gsub[index_alice].my_id
await connect(pubsubs_gsub[index_alice].host, pubsubs_gsub[index_bob].host)
await trio.sleep(0.1)
# Malformed message ID (not a tuple string)
malformed_msg_id = "not_a_valid_msg_id"
iwant_msg = rpc_pb2.ControlIWant(messageIDs=[malformed_msg_id])
# Mock mcache.get and write_msg to ensure they are not called
mock_mcache_get = MagicMock()
monkeypatch.setattr(gossipsubs[index_bob].mcache, "get", mock_mcache_get)
mock_write_msg = AsyncMock()
monkeypatch.setattr(gossipsubs[index_bob].pubsub, "write_msg", mock_write_msg)
with pytest.raises(ValueError):
await gossipsubs[index_bob].handle_iwant(iwant_msg, id_alice)
mock_mcache_get.assert_not_called()
mock_write_msg.assert_not_called()
# Message ID that's a tuple string but not (bytes, bytes)
invalid_tuple_msg_id = "('abc', 123)"
iwant_msg = rpc_pb2.ControlIWant(messageIDs=[invalid_tuple_msg_id])
with pytest.raises(ValueError):
await gossipsubs[index_bob].handle_iwant(iwant_msg, id_alice)
mock_mcache_get.assert_not_called()
mock_write_msg.assert_not_called()
@pytest.mark.trio
async def test_handle_ihave_empty_message_ids(monkeypatch):
"""
Test that handle_ihave with an empty messageIDs list does not call emit_iwant.
"""
async with PubsubFactory.create_batch_with_gossipsub(2) as pubsubs_gsub:
gossipsub_routers = []
for pubsub in pubsubs_gsub:
if isinstance(pubsub.router, GossipSub):
gossipsub_routers.append(pubsub.router)
gossipsubs = tuple(gossipsub_routers)
index_alice = 0
index_bob = 1
id_bob = pubsubs_gsub[index_bob].my_id
# Connect Alice and Bob
await connect(pubsubs_gsub[index_alice].host, pubsubs_gsub[index_bob].host)
await trio.sleep(0.1) # Allow connections to establish
# Mock emit_iwant to capture calls
mock_emit_iwant = AsyncMock()
monkeypatch.setattr(gossipsubs[index_alice], "emit_iwant", mock_emit_iwant)
# Empty messageIDs list
ihave_msg = rpc_pb2.ControlIHave(messageIDs=[])
# Mock seen_messages.cache to avoid false positives
monkeypatch.setattr(pubsubs_gsub[index_alice].seen_messages, "cache", {})
# Simulate Bob sending IHAVE to Alice
await gossipsubs[index_alice].handle_ihave(ihave_msg, id_bob)
# emit_iwant should not be called since there are no message IDs
mock_emit_iwant.assert_not_called()

View File

@ -15,6 +15,7 @@ import pytest
import trio
from libp2p.utils.logging import (
_current_handlers,
_current_listener,
_listener_ready,
log_queue,
@ -24,13 +25,19 @@ from libp2p.utils.logging import (
def _reset_logging():
"""Reset all logging state."""
global _current_listener, _listener_ready
global _current_listener, _listener_ready, _current_handlers
# Stop existing listener if any
if _current_listener is not None:
_current_listener.stop()
_current_listener = None
# Close all file handlers to ensure proper cleanup on Windows
for handler in _current_handlers:
if isinstance(handler, logging.FileHandler):
handler.close()
_current_handlers.clear()
# Reset the event
_listener_ready = threading.Event()
@ -174,6 +181,15 @@ async def test_custom_log_file(clean_env):
if _current_listener is not None:
_current_listener.stop()
# Give a moment for the listener to fully stop
await trio.sleep(0.05)
# Close all file handlers to release the file
for handler in _current_handlers:
if isinstance(handler, logging.FileHandler):
handler.flush() # Ensure all writes are flushed
handler.close()
# Check if the file exists and contains our message
assert log_file.exists()
content = log_file.read_text()
@ -185,16 +201,15 @@ async def test_default_log_file(clean_env):
"""Test logging to the default file path."""
os.environ["LIBP2P_DEBUG"] = "INFO"
with patch("libp2p.utils.logging.datetime") as mock_datetime:
# Mock the timestamp to have a predictable filename
mock_datetime.now.return_value.strftime.return_value = "20240101_120000"
with patch("libp2p.utils.paths.create_temp_file") as mock_create_temp:
# Mock the temp file creation to return a predictable path
mock_temp_file = (
Path(tempfile.gettempdir()) / "test_py-libp2p_20240101_120000.log"
)
mock_create_temp.return_value = mock_temp_file
# Remove the log file if it exists
if os.name == "nt": # Windows
log_file = Path("C:/Windows/Temp/20240101_120000_py-libp2p.log")
else: # Unix-like
log_file = Path("/tmp/20240101_120000_py-libp2p.log")
log_file.unlink(missing_ok=True)
mock_temp_file.unlink(missing_ok=True)
setup_logging()
@ -211,9 +226,18 @@ async def test_default_log_file(clean_env):
if _current_listener is not None:
_current_listener.stop()
# Check the default log file
if log_file.exists(): # Only check content if we have write permission
content = log_file.read_text()
# Give a moment for the listener to fully stop
await trio.sleep(0.05)
# Close all file handlers to release the file
for handler in _current_handlers:
if isinstance(handler, logging.FileHandler):
handler.flush() # Ensure all writes are flushed
handler.close()
# Check the mocked temp file
if mock_temp_file.exists():
content = mock_temp_file.read_text()
assert "Test message" in content

290
tests/utils/test_paths.py Normal file
View File

@ -0,0 +1,290 @@
"""
Tests for cross-platform path utilities.
"""
import os
from pathlib import Path
import tempfile
import pytest
from libp2p.utils.paths import (
create_temp_file,
ensure_dir_exists,
find_executable,
get_binary_path,
get_config_dir,
get_project_root,
get_python_executable,
get_script_binary_path,
get_script_dir,
get_temp_dir,
get_venv_path,
join_paths,
normalize_path,
resolve_relative_path,
)
class TestPathUtilities:
"""Test cross-platform path utilities."""
def test_get_temp_dir(self):
"""Test that temp directory is accessible and exists."""
temp_dir = get_temp_dir()
assert isinstance(temp_dir, Path)
assert temp_dir.exists()
assert temp_dir.is_dir()
# Should match system temp directory
assert temp_dir == Path(tempfile.gettempdir())
def test_get_project_root(self):
"""Test that project root is correctly determined."""
project_root = get_project_root()
assert isinstance(project_root, Path)
assert project_root.exists()
# Should contain pyproject.toml
assert (project_root / "pyproject.toml").exists()
# Should contain libp2p directory
assert (project_root / "libp2p").exists()
def test_join_paths(self):
"""Test cross-platform path joining."""
# Test with strings
result = join_paths("a", "b", "c")
expected = Path("a") / "b" / "c"
assert result == expected
# Test with mixed types
result = join_paths("a", Path("b"), "c")
expected = Path("a") / "b" / "c"
assert result == expected
# Test with absolute path
result = join_paths("/absolute", "path")
expected = Path("/absolute") / "path"
assert result == expected
def test_ensure_dir_exists(self, tmp_path):
"""Test directory creation and existence checking."""
# Test creating new directory
new_dir = tmp_path / "new_dir"
result = ensure_dir_exists(new_dir)
assert result == new_dir
assert new_dir.exists()
assert new_dir.is_dir()
# Test creating nested directory
nested_dir = tmp_path / "parent" / "child" / "grandchild"
result = ensure_dir_exists(nested_dir)
assert result == nested_dir
assert nested_dir.exists()
assert nested_dir.is_dir()
# Test with existing directory
result = ensure_dir_exists(new_dir)
assert result == new_dir
assert new_dir.exists()
def test_get_config_dir(self):
"""Test platform-specific config directory."""
config_dir = get_config_dir()
assert isinstance(config_dir, Path)
if os.name == "nt": # Windows
# Should be in AppData/Roaming or user home
assert "AppData" in str(config_dir) or "py-libp2p" in str(config_dir)
else: # Unix-like
# Should be in ~/.config
assert ".config" in str(config_dir)
assert "py-libp2p" in str(config_dir)
def test_get_script_dir(self):
"""Test script directory detection."""
# Test with current file
script_dir = get_script_dir(__file__)
assert isinstance(script_dir, Path)
assert script_dir.exists()
assert script_dir.is_dir()
# Should contain this test file
assert (script_dir / "test_paths.py").exists()
def test_create_temp_file(self):
"""Test temporary file creation."""
temp_file = create_temp_file()
assert isinstance(temp_file, Path)
assert temp_file.parent == get_temp_dir()
assert temp_file.name.startswith("py-libp2p_")
assert temp_file.name.endswith(".log")
# Test with custom prefix and suffix
temp_file = create_temp_file(prefix="test_", suffix=".txt")
assert temp_file.name.startswith("test_")
assert temp_file.name.endswith(".txt")
def test_resolve_relative_path(self, tmp_path):
"""Test relative path resolution."""
base_path = tmp_path / "base"
base_path.mkdir()
# Test relative path
relative_path = "subdir/file.txt"
result = resolve_relative_path(base_path, relative_path)
expected = (base_path / "subdir" / "file.txt").resolve()
assert result == expected
# Test absolute path (platform-agnostic)
if os.name == "nt": # Windows
absolute_path = "C:\\absolute\\path"
else: # Unix-like
absolute_path = "/absolute/path"
result = resolve_relative_path(base_path, absolute_path)
assert result == Path(absolute_path)
def test_normalize_path(self, tmp_path):
"""Test path normalization."""
# Test with relative path
relative_path = tmp_path / ".." / "normalize_test"
result = normalize_path(relative_path)
assert result.is_absolute()
assert "normalize_test" in str(result)
# Test with absolute path
absolute_path = tmp_path / "test_file"
result = normalize_path(absolute_path)
assert result.is_absolute()
assert result == absolute_path.resolve()
def test_get_venv_path(self, monkeypatch):
"""Test virtual environment path detection."""
# Test when no virtual environment is active
# Temporarily clear VIRTUAL_ENV to test the "no venv" case
monkeypatch.delenv("VIRTUAL_ENV", raising=False)
result = get_venv_path()
assert result is None
# Test when virtual environment is active
test_venv_path = "/path/to/venv"
monkeypatch.setenv("VIRTUAL_ENV", test_venv_path)
result = get_venv_path()
assert result == Path(test_venv_path)
def test_get_python_executable(self):
"""Test Python executable path detection."""
result = get_python_executable()
assert isinstance(result, Path)
assert result.exists()
assert result.name.startswith("python")
def test_find_executable(self):
"""Test executable finding in PATH."""
# Test with non-existent executable
result = find_executable("nonexistent_executable")
assert result is None
# Test with existing executable (python should be available)
result = find_executable("python")
if result:
assert isinstance(result, Path)
assert result.exists()
def test_get_script_binary_path(self):
"""Test script binary path detection."""
result = get_script_binary_path()
assert isinstance(result, Path)
assert result.exists()
assert result.is_dir()
def test_get_binary_path(self, monkeypatch):
"""Test binary path resolution with virtual environment."""
# Test when no virtual environment is active
result = get_binary_path("python")
if result:
assert isinstance(result, Path)
assert result.exists()
# Test when virtual environment is active
test_venv_path = "/path/to/venv"
monkeypatch.setenv("VIRTUAL_ENV", test_venv_path)
# This test is more complex as it depends on the actual venv structure
# We'll just verify the function doesn't crash
result = get_binary_path("python")
# Result can be None if binary not found in venv
if result:
assert isinstance(result, Path)
class TestCrossPlatformCompatibility:
"""Test cross-platform compatibility."""
def test_config_dir_platform_specific_windows(self, monkeypatch):
"""Test config directory respects Windows conventions."""
import platform
# Only run this test on Windows systems
if platform.system() != "Windows":
pytest.skip("This test only runs on Windows systems")
monkeypatch.setattr("os.name", "nt")
monkeypatch.setenv("APPDATA", "C:\\Users\\Test\\AppData\\Roaming")
config_dir = get_config_dir()
assert "AppData" in str(config_dir)
assert "py-libp2p" in str(config_dir)
def test_path_separators_consistent(self):
"""Test that path separators are handled consistently."""
# Test that join_paths uses platform-appropriate separators
result = join_paths("dir1", "dir2", "file.txt")
expected = Path("dir1") / "dir2" / "file.txt"
assert result == expected
# Test that the result uses correct separators for the platform
if os.name == "nt": # Windows
assert "\\" in str(result) or "/" in str(result)
else: # Unix-like
assert "/" in str(result)
def test_temp_file_uniqueness(self):
"""Test that temporary files have unique names."""
files = set()
for _ in range(10):
temp_file = create_temp_file()
assert temp_file not in files
files.add(temp_file)
class TestBackwardCompatibility:
"""Test backward compatibility with existing code patterns."""
def test_path_operations_equivalent(self):
"""Test that new path operations are equivalent to old os.path operations."""
# Test join_paths vs os.path.join
parts = ["a", "b", "c"]
new_result = join_paths(*parts)
old_result = Path(os.path.join(*parts))
assert new_result == old_result
# Test get_script_dir vs os.path.dirname(os.path.abspath(__file__))
new_script_dir = get_script_dir(__file__)
old_script_dir = Path(os.path.dirname(os.path.abspath(__file__)))
assert new_script_dir == old_script_dir
def test_existing_functionality_preserved(self):
"""Ensure no existing functionality is broken."""
# Test that all functions return Path objects
assert isinstance(get_temp_dir(), Path)
assert isinstance(get_project_root(), Path)
assert isinstance(join_paths("a", "b"), Path)
assert isinstance(ensure_dir_exists(tempfile.gettempdir()), Path)
assert isinstance(get_config_dir(), Path)
assert isinstance(get_script_dir(__file__), Path)
assert isinstance(create_temp_file(), Path)
assert isinstance(resolve_relative_path(".", "test"), Path)
assert isinstance(normalize_path("."), Path)
assert isinstance(get_python_executable(), Path)
assert isinstance(get_script_binary_path(), Path)
# Test optional return types
venv_path = get_venv_path()
if venv_path is not None:
assert isinstance(venv_path, Path)