mirror of
https://github.com/varun-r-mallya/py-libp2p.git
synced 2025-12-31 20:36:24 +00:00
fix(app): 882 Comprehensive cross-platform path handling utilities
This commit is contained in:
@ -27,7 +27,9 @@ except ModuleNotFoundError:
|
|||||||
import tomli as tomllib # type: ignore (In case of >3.11 Pyrefly doesnt find tomli , which is right but a false flag)
|
import tomli as tomllib # type: ignore (In case of >3.11 Pyrefly doesnt find tomli , which is right but a false flag)
|
||||||
|
|
||||||
# Path to pyproject.toml (assuming conf.py is in a 'docs' subdirectory)
|
# Path to pyproject.toml (assuming conf.py is in a 'docs' subdirectory)
|
||||||
pyproject_path = os.path.join(os.path.dirname(__file__), "..", "pyproject.toml")
|
from libp2p.utils.paths import get_project_root, join_paths
|
||||||
|
|
||||||
|
pyproject_path = join_paths(get_project_root(), "pyproject.toml")
|
||||||
|
|
||||||
with open(pyproject_path, "rb") as f:
|
with open(pyproject_path, "rb") as f:
|
||||||
pyproject_data = tomllib.load(f)
|
pyproject_data = tomllib.load(f)
|
||||||
|
|||||||
@ -41,6 +41,7 @@ from libp2p.tools.async_service import (
|
|||||||
from libp2p.tools.utils import (
|
from libp2p.tools.utils import (
|
||||||
info_from_p2p_addr,
|
info_from_p2p_addr,
|
||||||
)
|
)
|
||||||
|
from libp2p.utils.paths import get_script_dir, join_paths
|
||||||
|
|
||||||
# Configure logging
|
# Configure logging
|
||||||
logging.basicConfig(
|
logging.basicConfig(
|
||||||
@ -53,8 +54,8 @@ logger = logging.getLogger("kademlia-example")
|
|||||||
# Configure DHT module loggers to inherit from the parent logger
|
# Configure DHT module loggers to inherit from the parent logger
|
||||||
# This ensures all kademlia-example.* loggers use the same configuration
|
# This ensures all kademlia-example.* loggers use the same configuration
|
||||||
# Get the directory where this script is located
|
# Get the directory where this script is located
|
||||||
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
|
SCRIPT_DIR = get_script_dir(__file__)
|
||||||
SERVER_ADDR_LOG = os.path.join(SCRIPT_DIR, "server_node_addr.txt")
|
SERVER_ADDR_LOG = join_paths(SCRIPT_DIR, "server_node_addr.txt")
|
||||||
|
|
||||||
# Set the level for all child loggers
|
# Set the level for all child loggers
|
||||||
for module in [
|
for module in [
|
||||||
|
|||||||
@ -1,7 +1,4 @@
|
|||||||
import atexit
|
import atexit
|
||||||
from datetime import (
|
|
||||||
datetime,
|
|
||||||
)
|
|
||||||
import logging
|
import logging
|
||||||
import logging.handlers
|
import logging.handlers
|
||||||
import os
|
import os
|
||||||
@ -148,13 +145,10 @@ def setup_logging() -> None:
|
|||||||
log_path = Path(log_file)
|
log_path = Path(log_file)
|
||||||
log_path.parent.mkdir(parents=True, exist_ok=True)
|
log_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
else:
|
else:
|
||||||
# Default log file with timestamp and unique identifier
|
# Use cross-platform temp file creation
|
||||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S_%f")
|
from libp2p.utils.paths import create_temp_file
|
||||||
unique_id = os.urandom(4).hex() # Add a unique identifier to prevent collisions
|
|
||||||
if os.name == "nt": # Windows
|
log_file = str(create_temp_file(prefix="py-libp2p_", suffix=".log"))
|
||||||
log_file = f"C:\\Windows\\Temp\\py-libp2p_{timestamp}_{unique_id}.log"
|
|
||||||
else: # Unix-like
|
|
||||||
log_file = f"/tmp/py-libp2p_{timestamp}_{unique_id}.log"
|
|
||||||
|
|
||||||
# Print the log file path so users know where to find it
|
# Print the log file path so users know where to find it
|
||||||
print(f"Logging to: {log_file}", file=sys.stderr)
|
print(f"Logging to: {log_file}", file=sys.stderr)
|
||||||
|
|||||||
@ -6,9 +6,10 @@ behavior across Windows, macOS, and Linux platforms.
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import tempfile
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Union, Optional
|
import sys
|
||||||
|
import tempfile
|
||||||
|
from typing import Union
|
||||||
|
|
||||||
PathLike = Union[str, Path]
|
PathLike = Union[str, Path]
|
||||||
|
|
||||||
@ -16,9 +17,10 @@ PathLike = Union[str, Path]
|
|||||||
def get_temp_dir() -> Path:
|
def get_temp_dir() -> Path:
|
||||||
"""
|
"""
|
||||||
Get cross-platform temporary directory.
|
Get cross-platform temporary directory.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Path: Platform-specific temporary directory path
|
Path: Platform-specific temporary directory path
|
||||||
|
|
||||||
"""
|
"""
|
||||||
return Path(tempfile.gettempdir())
|
return Path(tempfile.gettempdir())
|
||||||
|
|
||||||
@ -26,9 +28,10 @@ def get_temp_dir() -> Path:
|
|||||||
def get_project_root() -> Path:
|
def get_project_root() -> Path:
|
||||||
"""
|
"""
|
||||||
Get the project root directory.
|
Get the project root directory.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Path: Path to the py-libp2p project root
|
Path: Path to the py-libp2p project root
|
||||||
|
|
||||||
"""
|
"""
|
||||||
# Navigate from libp2p/utils/paths.py to project root
|
# Navigate from libp2p/utils/paths.py to project root
|
||||||
return Path(__file__).parent.parent.parent
|
return Path(__file__).parent.parent.parent
|
||||||
@ -37,12 +40,13 @@ def get_project_root() -> Path:
|
|||||||
def join_paths(*parts: PathLike) -> Path:
|
def join_paths(*parts: PathLike) -> Path:
|
||||||
"""
|
"""
|
||||||
Cross-platform path joining.
|
Cross-platform path joining.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
*parts: Path components to join
|
*parts: Path components to join
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Path: Joined path using platform-appropriate separator
|
Path: Joined path using platform-appropriate separator
|
||||||
|
|
||||||
"""
|
"""
|
||||||
return Path(*parts)
|
return Path(*parts)
|
||||||
|
|
||||||
@ -50,12 +54,13 @@ def join_paths(*parts: PathLike) -> Path:
|
|||||||
def ensure_dir_exists(path: PathLike) -> Path:
|
def ensure_dir_exists(path: PathLike) -> Path:
|
||||||
"""
|
"""
|
||||||
Ensure directory exists, create if needed.
|
Ensure directory exists, create if needed.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
path: Directory path to ensure exists
|
path: Directory path to ensure exists
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Path: Path object for the directory
|
Path: Path object for the directory
|
||||||
|
|
||||||
"""
|
"""
|
||||||
path_obj = Path(path)
|
path_obj = Path(path)
|
||||||
path_obj.mkdir(parents=True, exist_ok=True)
|
path_obj.mkdir(parents=True, exist_ok=True)
|
||||||
@ -65,64 +70,74 @@ def ensure_dir_exists(path: PathLike) -> Path:
|
|||||||
def get_config_dir() -> Path:
|
def get_config_dir() -> Path:
|
||||||
"""
|
"""
|
||||||
Get user config directory (cross-platform).
|
Get user config directory (cross-platform).
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Path: Platform-specific config directory
|
Path: Platform-specific config directory
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if os.name == 'nt': # Windows
|
if os.name == "nt": # Windows
|
||||||
appdata = os.environ.get('APPDATA', '')
|
appdata = os.environ.get("APPDATA", "")
|
||||||
if appdata:
|
if appdata:
|
||||||
return Path(appdata) / 'py-libp2p'
|
return Path(appdata) / "py-libp2p"
|
||||||
else:
|
else:
|
||||||
# Fallback to user home directory
|
# Fallback to user home directory
|
||||||
return Path.home() / 'AppData' / 'Roaming' / 'py-libp2p'
|
return Path.home() / "AppData" / "Roaming" / "py-libp2p"
|
||||||
else: # Unix-like (Linux, macOS)
|
else: # Unix-like (Linux, macOS)
|
||||||
return Path.home() / '.config' / 'py-libp2p'
|
return Path.home() / ".config" / "py-libp2p"
|
||||||
|
|
||||||
|
|
||||||
def get_script_dir(script_path: Optional[PathLike] = None) -> Path:
|
def get_script_dir(script_path: PathLike | None = None) -> Path:
|
||||||
"""
|
"""
|
||||||
Get the directory containing a script file.
|
Get the directory containing a script file.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
script_path: Path to the script file. If None, uses __file__
|
script_path: Path to the script file. If None, uses __file__
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Path: Directory containing the script
|
Path: Directory containing the script
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
RuntimeError: If script path cannot be determined
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if script_path is None:
|
if script_path is None:
|
||||||
# This will be the directory of the calling script
|
# This will be the directory of the calling script
|
||||||
import inspect
|
import inspect
|
||||||
|
|
||||||
frame = inspect.currentframe()
|
frame = inspect.currentframe()
|
||||||
if frame and frame.f_back:
|
if frame and frame.f_back:
|
||||||
script_path = frame.f_back.f_globals.get('__file__')
|
script_path = frame.f_back.f_globals.get("__file__")
|
||||||
else:
|
else:
|
||||||
raise RuntimeError("Could not determine script path")
|
raise RuntimeError("Could not determine script path")
|
||||||
|
|
||||||
|
if script_path is None:
|
||||||
|
raise RuntimeError("Script path is None")
|
||||||
|
|
||||||
return Path(script_path).parent.absolute()
|
return Path(script_path).parent.absolute()
|
||||||
|
|
||||||
|
|
||||||
def create_temp_file(prefix: str = "py-libp2p_", suffix: str = ".log") -> Path:
|
def create_temp_file(prefix: str = "py-libp2p_", suffix: str = ".log") -> Path:
|
||||||
"""
|
"""
|
||||||
Create a temporary file with a unique name.
|
Create a temporary file with a unique name.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
prefix: File name prefix
|
prefix: File name prefix
|
||||||
suffix: File name suffix
|
suffix: File name suffix
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Path: Path to the created temporary file
|
Path: Path to the created temporary file
|
||||||
|
|
||||||
"""
|
"""
|
||||||
temp_dir = get_temp_dir()
|
temp_dir = get_temp_dir()
|
||||||
# Create a unique filename using timestamp and random bytes
|
# Create a unique filename using timestamp and random bytes
|
||||||
import time
|
|
||||||
import secrets
|
import secrets
|
||||||
|
import time
|
||||||
|
|
||||||
timestamp = time.strftime("%Y%m%d_%H%M%S")
|
timestamp = time.strftime("%Y%m%d_%H%M%S")
|
||||||
microseconds = f"{time.time() % 1:.6f}"[2:] # Get microseconds as string
|
microseconds = f"{time.time() % 1:.6f}"[2:] # Get microseconds as string
|
||||||
unique_id = secrets.token_hex(4)
|
unique_id = secrets.token_hex(4)
|
||||||
filename = f"{prefix}{timestamp}_{microseconds}_{unique_id}{suffix}"
|
filename = f"{prefix}{timestamp}_{microseconds}_{unique_id}{suffix}"
|
||||||
|
|
||||||
temp_file = temp_dir / filename
|
temp_file = temp_dir / filename
|
||||||
# Create the file by touching it
|
# Create the file by touching it
|
||||||
temp_file.touch()
|
temp_file.touch()
|
||||||
@ -132,17 +147,18 @@ def create_temp_file(prefix: str = "py-libp2p_", suffix: str = ".log") -> Path:
|
|||||||
def resolve_relative_path(base_path: PathLike, relative_path: PathLike) -> Path:
|
def resolve_relative_path(base_path: PathLike, relative_path: PathLike) -> Path:
|
||||||
"""
|
"""
|
||||||
Resolve a relative path from a base path.
|
Resolve a relative path from a base path.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
base_path: Base directory path
|
base_path: Base directory path
|
||||||
relative_path: Relative path to resolve
|
relative_path: Relative path to resolve
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Path: Resolved absolute path
|
Path: Resolved absolute path
|
||||||
|
|
||||||
"""
|
"""
|
||||||
base = Path(base_path).resolve()
|
base = Path(base_path).resolve()
|
||||||
relative = Path(relative_path)
|
relative = Path(relative_path)
|
||||||
|
|
||||||
if relative.is_absolute():
|
if relative.is_absolute():
|
||||||
return relative
|
return relative
|
||||||
else:
|
else:
|
||||||
@ -152,11 +168,100 @@ def resolve_relative_path(base_path: PathLike, relative_path: PathLike) -> Path:
|
|||||||
def normalize_path(path: PathLike) -> Path:
|
def normalize_path(path: PathLike) -> Path:
|
||||||
"""
|
"""
|
||||||
Normalize a path, resolving any symbolic links and relative components.
|
Normalize a path, resolving any symbolic links and relative components.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
path: Path to normalize
|
path: Path to normalize
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Path: Normalized absolute path
|
Path: Normalized absolute path
|
||||||
|
|
||||||
"""
|
"""
|
||||||
return Path(path).resolve()
|
return Path(path).resolve()
|
||||||
|
|
||||||
|
|
||||||
|
def get_venv_path() -> Path | None:
|
||||||
|
"""
|
||||||
|
Get virtual environment path if active.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Path: Virtual environment path if active, None otherwise
|
||||||
|
|
||||||
|
"""
|
||||||
|
venv_path = os.environ.get("VIRTUAL_ENV")
|
||||||
|
if venv_path:
|
||||||
|
return Path(venv_path)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def get_python_executable() -> Path:
|
||||||
|
"""
|
||||||
|
Get current Python executable path.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Path: Path to the current Python executable
|
||||||
|
|
||||||
|
"""
|
||||||
|
return Path(sys.executable)
|
||||||
|
|
||||||
|
|
||||||
|
def find_executable(name: str) -> Path | None:
|
||||||
|
"""
|
||||||
|
Find executable in system PATH.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: Name of the executable to find
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Path: Path to executable if found, None otherwise
|
||||||
|
|
||||||
|
"""
|
||||||
|
# Check if name already contains path
|
||||||
|
if os.path.dirname(name):
|
||||||
|
path = Path(name)
|
||||||
|
if path.exists() and os.access(path, os.X_OK):
|
||||||
|
return path
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Search in PATH
|
||||||
|
for path_dir in os.environ.get("PATH", "").split(os.pathsep):
|
||||||
|
if not path_dir:
|
||||||
|
continue
|
||||||
|
path = Path(path_dir) / name
|
||||||
|
if path.exists() and os.access(path, os.X_OK):
|
||||||
|
return path
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def get_script_binary_path() -> Path:
|
||||||
|
"""
|
||||||
|
Get path to script's binary directory.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Path: Directory containing the script's binary
|
||||||
|
|
||||||
|
"""
|
||||||
|
return get_python_executable().parent
|
||||||
|
|
||||||
|
|
||||||
|
def get_binary_path(binary_name: str) -> Path | None:
|
||||||
|
"""
|
||||||
|
Find binary in PATH or virtual environment.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
binary_name: Name of the binary to find
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Path: Path to binary if found, None otherwise
|
||||||
|
|
||||||
|
"""
|
||||||
|
# First check in virtual environment if active
|
||||||
|
venv_path = get_venv_path()
|
||||||
|
if venv_path:
|
||||||
|
venv_bin = venv_path / "bin" if os.name != "nt" else venv_path / "Scripts"
|
||||||
|
binary_path = venv_bin / binary_name
|
||||||
|
if binary_path.exists() and os.access(binary_path, os.X_OK):
|
||||||
|
return binary_path
|
||||||
|
|
||||||
|
# Fall back to system PATH
|
||||||
|
return find_executable(binary_name)
|
||||||
|
|||||||
@ -6,215 +6,248 @@ This script scans for patterns that should be migrated to use the new
|
|||||||
cross-platform path utilities.
|
cross-platform path utilities.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import re
|
|
||||||
import os
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import List, Dict, Any
|
|
||||||
import argparse
|
import argparse
|
||||||
|
from pathlib import Path
|
||||||
|
import re
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
|
||||||
def scan_for_path_issues(directory: Path) -> Dict[str, List[Dict[str, Any]]]:
|
def scan_for_path_issues(directory: Path) -> dict[str, list[dict[str, Any]]]:
|
||||||
"""
|
"""
|
||||||
Scan for path handling issues in the codebase.
|
Scan for path handling issues in the codebase.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
directory: Root directory to scan
|
directory: Root directory to scan
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Dictionary mapping issue types to lists of found issues
|
Dictionary mapping issue types to lists of found issues
|
||||||
|
|
||||||
"""
|
"""
|
||||||
issues = {
|
issues = {
|
||||||
'hard_coded_slash': [],
|
"hard_coded_slash": [],
|
||||||
'os_path_join': [],
|
"os_path_join": [],
|
||||||
'temp_hardcode': [],
|
"temp_hardcode": [],
|
||||||
'os_path_dirname': [],
|
"os_path_dirname": [],
|
||||||
'os_path_abspath': [],
|
"os_path_abspath": [],
|
||||||
'direct_path_concat': [],
|
"direct_path_concat": [],
|
||||||
}
|
}
|
||||||
|
|
||||||
# Patterns to search for
|
# Patterns to search for
|
||||||
patterns = {
|
patterns = {
|
||||||
'hard_coded_slash': r'["\'][^"\']*\/[^"\']*["\']',
|
"hard_coded_slash": r'["\'][^"\']*\/[^"\']*["\']',
|
||||||
'os_path_join': r'os\.path\.join\(',
|
"os_path_join": r"os\.path\.join\(",
|
||||||
'temp_hardcode': r'["\']\/tmp\/|["\']C:\\\\',
|
"temp_hardcode": r'["\']\/tmp\/|["\']C:\\\\',
|
||||||
'os_path_dirname': r'os\.path\.dirname\(',
|
"os_path_dirname": r"os\.path\.dirname\(",
|
||||||
'os_path_abspath': r'os\.path\.abspath\(',
|
"os_path_abspath": r"os\.path\.abspath\(",
|
||||||
'direct_path_concat': r'["\'][^"\']*["\']\s*\+\s*["\'][^"\']*["\']',
|
"direct_path_concat": r'["\'][^"\']*["\']\s*\+\s*["\'][^"\']*["\']',
|
||||||
}
|
}
|
||||||
|
|
||||||
# Files to exclude
|
# Files to exclude
|
||||||
exclude_patterns = [
|
exclude_patterns = [
|
||||||
r'__pycache__',
|
r"__pycache__",
|
||||||
r'\.git',
|
r"\.git",
|
||||||
r'\.pytest_cache',
|
r"\.pytest_cache",
|
||||||
r'\.mypy_cache',
|
r"\.mypy_cache",
|
||||||
r'\.ruff_cache',
|
r"\.ruff_cache",
|
||||||
r'env/',
|
r"env/",
|
||||||
r'venv/',
|
r"venv/",
|
||||||
r'\.venv/',
|
r"\.venv/",
|
||||||
]
|
]
|
||||||
|
|
||||||
for py_file in directory.rglob("*.py"):
|
for py_file in directory.rglob("*.py"):
|
||||||
# Skip excluded files
|
# Skip excluded files
|
||||||
if any(re.search(pattern, str(py_file)) for pattern in exclude_patterns):
|
if any(re.search(pattern, str(py_file)) for pattern in exclude_patterns):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
try:
|
try:
|
||||||
content = py_file.read_text(encoding='utf-8')
|
content = py_file.read_text(encoding="utf-8")
|
||||||
except UnicodeDecodeError:
|
except UnicodeDecodeError:
|
||||||
print(f"Warning: Could not read {py_file} (encoding issue)")
|
print(f"Warning: Could not read {py_file} (encoding issue)")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
for issue_type, pattern in patterns.items():
|
for issue_type, pattern in patterns.items():
|
||||||
matches = re.finditer(pattern, content, re.MULTILINE)
|
matches = re.finditer(pattern, content, re.MULTILINE)
|
||||||
for match in matches:
|
for match in matches:
|
||||||
line_num = content[:match.start()].count('\n') + 1
|
line_num = content[: match.start()].count("\n") + 1
|
||||||
line_content = content.split('\n')[line_num - 1].strip()
|
line_content = content.split("\n")[line_num - 1].strip()
|
||||||
|
|
||||||
issues[issue_type].append({
|
issues[issue_type].append(
|
||||||
'file': py_file,
|
{
|
||||||
'line': line_num,
|
"file": py_file,
|
||||||
'content': match.group(),
|
"line": line_num,
|
||||||
'full_line': line_content,
|
"content": match.group(),
|
||||||
'relative_path': py_file.relative_to(directory)
|
"full_line": line_content,
|
||||||
})
|
"relative_path": py_file.relative_to(directory),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
return issues
|
return issues
|
||||||
|
|
||||||
|
|
||||||
def generate_migration_suggestions(issues: Dict[str, List[Dict[str, Any]]]) -> str:
|
def generate_migration_suggestions(issues: dict[str, list[dict[str, Any]]]) -> str:
|
||||||
"""
|
"""
|
||||||
Generate migration suggestions for found issues.
|
Generate migration suggestions for found issues.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
issues: Dictionary of found issues
|
issues: Dictionary of found issues
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Formatted string with migration suggestions
|
Formatted string with migration suggestions
|
||||||
|
|
||||||
"""
|
"""
|
||||||
suggestions = []
|
suggestions = []
|
||||||
|
|
||||||
for issue_type, issue_list in issues.items():
|
for issue_type, issue_list in issues.items():
|
||||||
if not issue_list:
|
if not issue_list:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
suggestions.append(f"\n## {issue_type.replace('_', ' ').title()}")
|
suggestions.append(f"\n## {issue_type.replace('_', ' ').title()}")
|
||||||
suggestions.append(f"Found {len(issue_list)} instances:")
|
suggestions.append(f"Found {len(issue_list)} instances:")
|
||||||
|
|
||||||
for issue in issue_list[:10]: # Show first 10 examples
|
for issue in issue_list[:10]: # Show first 10 examples
|
||||||
suggestions.append(f"\n### {issue['relative_path']}:{issue['line']}")
|
suggestions.append(f"\n### {issue['relative_path']}:{issue['line']}")
|
||||||
suggestions.append(f"```python")
|
suggestions.append("```python")
|
||||||
suggestions.append(f"# Current code:")
|
suggestions.append("# Current code:")
|
||||||
suggestions.append(f"{issue['full_line']}")
|
suggestions.append(f"{issue['full_line']}")
|
||||||
suggestions.append(f"```")
|
suggestions.append("```")
|
||||||
|
|
||||||
# Add migration suggestion based on issue type
|
# Add migration suggestion based on issue type
|
||||||
if issue_type == 'os_path_join':
|
if issue_type == "os_path_join":
|
||||||
suggestions.append(f"```python")
|
suggestions.append("```python")
|
||||||
suggestions.append(f"# Suggested fix:")
|
suggestions.append("# Suggested fix:")
|
||||||
suggestions.append(f"from libp2p.utils.paths import join_paths")
|
suggestions.append("from libp2p.utils.paths import join_paths")
|
||||||
suggestions.append(f"# Replace os.path.join(a, b, c) with join_paths(a, b, c)")
|
suggestions.append(
|
||||||
suggestions.append(f"```")
|
"# Replace os.path.join(a, b, c) with join_paths(a, b, c)"
|
||||||
elif issue_type == 'temp_hardcode':
|
)
|
||||||
suggestions.append(f"```python")
|
suggestions.append("```")
|
||||||
suggestions.append(f"# Suggested fix:")
|
elif issue_type == "temp_hardcode":
|
||||||
suggestions.append(f"from libp2p.utils.paths import get_temp_dir, create_temp_file")
|
suggestions.append("```python")
|
||||||
suggestions.append(f"# Replace hard-coded temp paths with get_temp_dir() or create_temp_file()")
|
suggestions.append("# Suggested fix:")
|
||||||
suggestions.append(f"```")
|
suggestions.append(
|
||||||
elif issue_type == 'os_path_dirname':
|
"from libp2p.utils.paths import get_temp_dir, create_temp_file"
|
||||||
suggestions.append(f"```python")
|
)
|
||||||
suggestions.append(f"# Suggested fix:")
|
temp_fix_msg = (
|
||||||
suggestions.append(f"from libp2p.utils.paths import get_script_dir")
|
"# Replace hard-coded temp paths with get_temp_dir() or "
|
||||||
suggestions.append(f"# Replace os.path.dirname(os.path.abspath(__file__)) with get_script_dir(__file__)")
|
"create_temp_file()"
|
||||||
suggestions.append(f"```")
|
)
|
||||||
|
suggestions.append(temp_fix_msg)
|
||||||
|
suggestions.append("```")
|
||||||
|
elif issue_type == "os_path_dirname":
|
||||||
|
suggestions.append("```python")
|
||||||
|
suggestions.append("# Suggested fix:")
|
||||||
|
suggestions.append("from libp2p.utils.paths import get_script_dir")
|
||||||
|
script_dir_fix_msg = (
|
||||||
|
"# Replace os.path.dirname(os.path.abspath(__file__)) with "
|
||||||
|
"get_script_dir(__file__)"
|
||||||
|
)
|
||||||
|
suggestions.append(script_dir_fix_msg)
|
||||||
|
suggestions.append("```")
|
||||||
|
|
||||||
if len(issue_list) > 10:
|
if len(issue_list) > 10:
|
||||||
suggestions.append(f"\n... and {len(issue_list) - 10} more instances")
|
suggestions.append(f"\n... and {len(issue_list) - 10} more instances")
|
||||||
|
|
||||||
return "\n".join(suggestions)
|
return "\n".join(suggestions)
|
||||||
|
|
||||||
|
|
||||||
def generate_summary_report(issues: Dict[str, List[Dict[str, Any]]]) -> str:
|
def generate_summary_report(issues: dict[str, list[dict[str, Any]]]) -> str:
|
||||||
"""
|
"""
|
||||||
Generate a summary report of all found issues.
|
Generate a summary report of all found issues.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
issues: Dictionary of found issues
|
issues: Dictionary of found issues
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Formatted summary report
|
Formatted summary report
|
||||||
|
|
||||||
"""
|
"""
|
||||||
total_issues = sum(len(issue_list) for issue_list in issues.values())
|
total_issues = sum(len(issue_list) for issue_list in issues.values())
|
||||||
|
|
||||||
report = [
|
report = [
|
||||||
"# Cross-Platform Path Handling Audit Report",
|
"# Cross-Platform Path Handling Audit Report",
|
||||||
"",
|
"",
|
||||||
f"## Summary",
|
"## Summary",
|
||||||
f"Total issues found: {total_issues}",
|
f"Total issues found: {total_issues}",
|
||||||
"",
|
"",
|
||||||
"## Issue Breakdown:",
|
"## Issue Breakdown:",
|
||||||
]
|
]
|
||||||
|
|
||||||
for issue_type, issue_list in issues.items():
|
for issue_type, issue_list in issues.items():
|
||||||
if issue_list:
|
if issue_list:
|
||||||
report.append(f"- **{issue_type.replace('_', ' ').title()}**: {len(issue_list)} instances")
|
issue_title = issue_type.replace("_", " ").title()
|
||||||
|
instances_count = len(issue_list)
|
||||||
|
report.append(f"- **{issue_title}**: {instances_count} instances")
|
||||||
|
|
||||||
report.append("")
|
report.append("")
|
||||||
report.append("## Priority Matrix:")
|
report.append("## Priority Matrix:")
|
||||||
report.append("")
|
report.append("")
|
||||||
report.append("| Priority | Issue Type | Risk Level | Impact |")
|
report.append("| Priority | Issue Type | Risk Level | Impact |")
|
||||||
report.append("|----------|------------|------------|---------|")
|
report.append("|----------|------------|------------|---------|")
|
||||||
|
|
||||||
priority_map = {
|
priority_map = {
|
||||||
'temp_hardcode': ('🔴 P0', 'HIGH', 'Core functionality fails on different platforms'),
|
"temp_hardcode": (
|
||||||
'os_path_join': ('🟡 P1', 'MEDIUM', 'Examples and utilities may break'),
|
"🔴 P0",
|
||||||
'os_path_dirname': ('🟡 P1', 'MEDIUM', 'Script location detection issues'),
|
"HIGH",
|
||||||
'hard_coded_slash': ('🟢 P2', 'LOW', 'Future-proofing and consistency'),
|
"Core functionality fails on different platforms",
|
||||||
'os_path_abspath': ('🟢 P2', 'LOW', 'Path resolution consistency'),
|
),
|
||||||
'direct_path_concat': ('🟢 P2', 'LOW', 'String concatenation issues'),
|
"os_path_join": ("🟡 P1", "MEDIUM", "Examples and utilities may break"),
|
||||||
|
"os_path_dirname": ("🟡 P1", "MEDIUM", "Script location detection issues"),
|
||||||
|
"hard_coded_slash": ("🟢 P2", "LOW", "Future-proofing and consistency"),
|
||||||
|
"os_path_abspath": ("🟢 P2", "LOW", "Path resolution consistency"),
|
||||||
|
"direct_path_concat": ("🟢 P2", "LOW", "String concatenation issues"),
|
||||||
}
|
}
|
||||||
|
|
||||||
for issue_type, issue_list in issues.items():
|
for issue_type, issue_list in issues.items():
|
||||||
if issue_list:
|
if issue_list:
|
||||||
priority, risk, impact = priority_map.get(issue_type, ('🟢 P2', 'LOW', 'General improvement'))
|
priority, risk, impact = priority_map.get(
|
||||||
report.append(f"| {priority} | {issue_type.replace('_', ' ').title()} | {risk} | {impact} |")
|
issue_type, ("🟢 P2", "LOW", "General improvement")
|
||||||
|
)
|
||||||
|
issue_title = issue_type.replace("_", " ").title()
|
||||||
|
report.append(f"| {priority} | {issue_title} | {risk} | {impact} |")
|
||||||
|
|
||||||
return "\n".join(report)
|
return "\n".join(report)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
"""Main function to run the audit."""
|
"""Main function to run the audit."""
|
||||||
parser = argparse.ArgumentParser(description="Audit py-libp2p codebase for path handling issues")
|
parser = argparse.ArgumentParser(
|
||||||
parser.add_argument("--directory", default=".", help="Directory to scan (default: current directory)")
|
description="Audit py-libp2p codebase for path handling issues"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--directory",
|
||||||
|
default=".",
|
||||||
|
help="Directory to scan (default: current directory)",
|
||||||
|
)
|
||||||
parser.add_argument("--output", help="Output file for detailed report")
|
parser.add_argument("--output", help="Output file for detailed report")
|
||||||
parser.add_argument("--summary-only", action="store_true", help="Only show summary report")
|
parser.add_argument(
|
||||||
|
"--summary-only", action="store_true", help="Only show summary report"
|
||||||
|
)
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
directory = Path(args.directory)
|
directory = Path(args.directory)
|
||||||
if not directory.exists():
|
if not directory.exists():
|
||||||
print(f"Error: Directory {directory} does not exist")
|
print(f"Error: Directory {directory} does not exist")
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
print("🔍 Scanning for path handling issues...")
|
print("🔍 Scanning for path handling issues...")
|
||||||
issues = scan_for_path_issues(directory)
|
issues = scan_for_path_issues(directory)
|
||||||
|
|
||||||
# Generate and display summary
|
# Generate and display summary
|
||||||
summary = generate_summary_report(issues)
|
summary = generate_summary_report(issues)
|
||||||
print(summary)
|
print(summary)
|
||||||
|
|
||||||
if not args.summary_only:
|
if not args.summary_only:
|
||||||
# Generate detailed suggestions
|
# Generate detailed suggestions
|
||||||
suggestions = generate_migration_suggestions(issues)
|
suggestions = generate_migration_suggestions(issues)
|
||||||
|
|
||||||
if args.output:
|
if args.output:
|
||||||
with open(args.output, 'w', encoding='utf-8') as f:
|
with open(args.output, "w", encoding="utf-8") as f:
|
||||||
f.write(summary)
|
f.write(summary)
|
||||||
f.write(suggestions)
|
f.write(suggestions)
|
||||||
print(f"\n📄 Detailed report saved to {args.output}")
|
print(f"\n📄 Detailed report saved to {args.output}")
|
||||||
else:
|
else:
|
||||||
print(suggestions)
|
print(suggestions)
|
||||||
|
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -3,20 +3,24 @@ Tests for cross-platform path utilities.
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import tempfile
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import pytest
|
import tempfile
|
||||||
|
|
||||||
from libp2p.utils.paths import (
|
from libp2p.utils.paths import (
|
||||||
get_temp_dir,
|
|
||||||
get_project_root,
|
|
||||||
join_paths,
|
|
||||||
ensure_dir_exists,
|
|
||||||
get_config_dir,
|
|
||||||
get_script_dir,
|
|
||||||
create_temp_file,
|
create_temp_file,
|
||||||
resolve_relative_path,
|
ensure_dir_exists,
|
||||||
|
find_executable,
|
||||||
|
get_binary_path,
|
||||||
|
get_config_dir,
|
||||||
|
get_project_root,
|
||||||
|
get_python_executable,
|
||||||
|
get_script_binary_path,
|
||||||
|
get_script_dir,
|
||||||
|
get_temp_dir,
|
||||||
|
get_venv_path,
|
||||||
|
join_paths,
|
||||||
normalize_path,
|
normalize_path,
|
||||||
|
resolve_relative_path,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -84,8 +88,8 @@ class TestPathUtilities:
|
|||||||
"""Test platform-specific config directory."""
|
"""Test platform-specific config directory."""
|
||||||
config_dir = get_config_dir()
|
config_dir = get_config_dir()
|
||||||
assert isinstance(config_dir, Path)
|
assert isinstance(config_dir, Path)
|
||||||
|
|
||||||
if os.name == 'nt': # Windows
|
if os.name == "nt": # Windows
|
||||||
# Should be in AppData/Roaming or user home
|
# Should be in AppData/Roaming or user home
|
||||||
assert "AppData" in str(config_dir) or "py-libp2p" in str(config_dir)
|
assert "AppData" in str(config_dir) or "py-libp2p" in str(config_dir)
|
||||||
else: # Unix-like
|
else: # Unix-like
|
||||||
@ -120,7 +124,7 @@ class TestPathUtilities:
|
|||||||
"""Test relative path resolution."""
|
"""Test relative path resolution."""
|
||||||
base_path = tmp_path / "base"
|
base_path = tmp_path / "base"
|
||||||
base_path.mkdir()
|
base_path.mkdir()
|
||||||
|
|
||||||
# Test relative path
|
# Test relative path
|
||||||
relative_path = "subdir/file.txt"
|
relative_path = "subdir/file.txt"
|
||||||
result = resolve_relative_path(base_path, relative_path)
|
result = resolve_relative_path(base_path, relative_path)
|
||||||
@ -128,7 +132,7 @@ class TestPathUtilities:
|
|||||||
assert result == expected
|
assert result == expected
|
||||||
|
|
||||||
# Test absolute path (platform-agnostic)
|
# Test absolute path (platform-agnostic)
|
||||||
if os.name == 'nt': # Windows
|
if os.name == "nt": # Windows
|
||||||
absolute_path = "C:\\absolute\\path"
|
absolute_path = "C:\\absolute\\path"
|
||||||
else: # Unix-like
|
else: # Unix-like
|
||||||
absolute_path = "/absolute/path"
|
absolute_path = "/absolute/path"
|
||||||
@ -149,14 +153,72 @@ class TestPathUtilities:
|
|||||||
assert result.is_absolute()
|
assert result.is_absolute()
|
||||||
assert result == absolute_path.resolve()
|
assert result == absolute_path.resolve()
|
||||||
|
|
||||||
|
def test_get_venv_path(self, monkeypatch):
|
||||||
|
"""Test virtual environment path detection."""
|
||||||
|
# Test when no virtual environment is active
|
||||||
|
# Temporarily clear VIRTUAL_ENV to test the "no venv" case
|
||||||
|
monkeypatch.delenv("VIRTUAL_ENV", raising=False)
|
||||||
|
result = get_venv_path()
|
||||||
|
assert result is None
|
||||||
|
|
||||||
|
# Test when virtual environment is active
|
||||||
|
test_venv_path = "/path/to/venv"
|
||||||
|
monkeypatch.setenv("VIRTUAL_ENV", test_venv_path)
|
||||||
|
result = get_venv_path()
|
||||||
|
assert result == Path(test_venv_path)
|
||||||
|
|
||||||
|
def test_get_python_executable(self):
|
||||||
|
"""Test Python executable path detection."""
|
||||||
|
result = get_python_executable()
|
||||||
|
assert isinstance(result, Path)
|
||||||
|
assert result.exists()
|
||||||
|
assert result.name.startswith("python")
|
||||||
|
|
||||||
|
def test_find_executable(self):
|
||||||
|
"""Test executable finding in PATH."""
|
||||||
|
# Test with non-existent executable
|
||||||
|
result = find_executable("nonexistent_executable")
|
||||||
|
assert result is None
|
||||||
|
|
||||||
|
# Test with existing executable (python should be available)
|
||||||
|
result = find_executable("python")
|
||||||
|
if result:
|
||||||
|
assert isinstance(result, Path)
|
||||||
|
assert result.exists()
|
||||||
|
|
||||||
|
def test_get_script_binary_path(self):
|
||||||
|
"""Test script binary path detection."""
|
||||||
|
result = get_script_binary_path()
|
||||||
|
assert isinstance(result, Path)
|
||||||
|
assert result.exists()
|
||||||
|
assert result.is_dir()
|
||||||
|
|
||||||
|
def test_get_binary_path(self, monkeypatch):
|
||||||
|
"""Test binary path resolution with virtual environment."""
|
||||||
|
# Test when no virtual environment is active
|
||||||
|
result = get_binary_path("python")
|
||||||
|
if result:
|
||||||
|
assert isinstance(result, Path)
|
||||||
|
assert result.exists()
|
||||||
|
|
||||||
|
# Test when virtual environment is active
|
||||||
|
test_venv_path = "/path/to/venv"
|
||||||
|
monkeypatch.setenv("VIRTUAL_ENV", test_venv_path)
|
||||||
|
# This test is more complex as it depends on the actual venv structure
|
||||||
|
# We'll just verify the function doesn't crash
|
||||||
|
result = get_binary_path("python")
|
||||||
|
# Result can be None if binary not found in venv
|
||||||
|
if result:
|
||||||
|
assert isinstance(result, Path)
|
||||||
|
|
||||||
|
|
||||||
class TestCrossPlatformCompatibility:
|
class TestCrossPlatformCompatibility:
|
||||||
"""Test cross-platform compatibility."""
|
"""Test cross-platform compatibility."""
|
||||||
|
|
||||||
def test_config_dir_platform_specific_windows(self, monkeypatch):
|
def test_config_dir_platform_specific_windows(self, monkeypatch):
|
||||||
"""Test config directory respects Windows conventions."""
|
"""Test config directory respects Windows conventions."""
|
||||||
monkeypatch.setattr('os.name', 'nt')
|
monkeypatch.setattr("os.name", "nt")
|
||||||
monkeypatch.setenv('APPDATA', 'C:\\Users\\Test\\AppData\\Roaming')
|
monkeypatch.setenv("APPDATA", "C:\\Users\\Test\\AppData\\Roaming")
|
||||||
config_dir = get_config_dir()
|
config_dir = get_config_dir()
|
||||||
assert "AppData" in str(config_dir)
|
assert "AppData" in str(config_dir)
|
||||||
assert "py-libp2p" in str(config_dir)
|
assert "py-libp2p" in str(config_dir)
|
||||||
@ -167,9 +229,9 @@ class TestCrossPlatformCompatibility:
|
|||||||
result = join_paths("dir1", "dir2", "file.txt")
|
result = join_paths("dir1", "dir2", "file.txt")
|
||||||
expected = Path("dir1") / "dir2" / "file.txt"
|
expected = Path("dir1") / "dir2" / "file.txt"
|
||||||
assert result == expected
|
assert result == expected
|
||||||
|
|
||||||
# Test that the result uses correct separators for the platform
|
# Test that the result uses correct separators for the platform
|
||||||
if os.name == 'nt': # Windows
|
if os.name == "nt": # Windows
|
||||||
assert "\\" in str(result) or "/" in str(result)
|
assert "\\" in str(result) or "/" in str(result)
|
||||||
else: # Unix-like
|
else: # Unix-like
|
||||||
assert "/" in str(result)
|
assert "/" in str(result)
|
||||||
@ -211,3 +273,10 @@ class TestBackwardCompatibility:
|
|||||||
assert isinstance(create_temp_file(), Path)
|
assert isinstance(create_temp_file(), Path)
|
||||||
assert isinstance(resolve_relative_path(".", "test"), Path)
|
assert isinstance(resolve_relative_path(".", "test"), Path)
|
||||||
assert isinstance(normalize_path("."), Path)
|
assert isinstance(normalize_path("."), Path)
|
||||||
|
assert isinstance(get_python_executable(), Path)
|
||||||
|
assert isinstance(get_script_binary_path(), Path)
|
||||||
|
|
||||||
|
# Test optional return types
|
||||||
|
venv_path = get_venv_path()
|
||||||
|
if venv_path is not None:
|
||||||
|
assert isinstance(venv_path, Path)
|
||||||
|
|||||||
Reference in New Issue
Block a user