mirror of
https://github.com/varun-r-mallya/Python-BPF.git
synced 2025-12-31 21:06:25 +00:00
Compare commits
39 Commits
v0.1.4
...
b09dc815fc
| Author | SHA1 | Date | |
|---|---|---|---|
| b09dc815fc | |||
| ceaac78633 | |||
| dc7a127fa6 | |||
| 552cd352f2 | |||
| c7f2955ee9 | |||
| ef36ea1e03 | |||
| d341cb24c0 | |||
| 2fabb67942 | |||
| a0b0ad370e | |||
| 283b947fc5 | |||
| bf78ac21fe | |||
| ac49cd8b1c | |||
| af44bd063c | |||
| 1239d1c35f | |||
| f41a9ccf26 | |||
| be05b5d102 | |||
| 3f061750cf | |||
| 6d5d6345e2 | |||
| 6fea580693 | |||
| b35134625b | |||
| c3db609a90 | |||
| cc626c38f7 | |||
| a8b3f4f86c | |||
| d593969408 | |||
| 6d5895ebc2 | |||
| c9ee6e4f17 | |||
| a622c53e0f | |||
| a4f1363aed | |||
| 3a819dcaee | |||
| 729270b34b | |||
| 44cbcccb6c | |||
| 86b9ec56d7 | |||
| 253944afd2 | |||
| 54993ce5c2 | |||
| 05083bd513 | |||
| 6e4c340780 | |||
| 9dbca410c2 | |||
| 62ca3b5ffe | |||
| f263c35156 |
@ -21,7 +21,7 @@ ci:
|
||||
repos:
|
||||
# Standard hooks
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.6.0
|
||||
rev: v6.0.0
|
||||
hooks:
|
||||
- id: check-added-large-files
|
||||
- id: check-case-conflict
|
||||
@ -36,7 +36,7 @@ repos:
|
||||
- id: trailing-whitespace
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: "v0.4.2"
|
||||
rev: "v0.13.2"
|
||||
hooks:
|
||||
- id: ruff
|
||||
args: ["--fix", "--show-fixes"]
|
||||
@ -45,7 +45,7 @@ repos:
|
||||
|
||||
# Checking static types
|
||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||
rev: "v1.10.0"
|
||||
rev: "v1.18.2"
|
||||
hooks:
|
||||
- id: mypy
|
||||
exclude: ^(tests)|^(examples)
|
||||
|
||||
1
TODO.md
1
TODO.md
@ -1,7 +1,6 @@
|
||||
## Short term
|
||||
|
||||
- Implement enough functionality to port the BCC tutorial examples in PythonBPF
|
||||
- Static Typing
|
||||
- Add all maps
|
||||
- XDP support in pylibbpf
|
||||
- ringbuf support
|
||||
|
||||
@ -12,7 +12,7 @@
|
||||
"from pythonbpf import bpf, map, section, bpfglobal, BPF\n",
|
||||
"from pythonbpf.helper import pid\n",
|
||||
"from pythonbpf.maps import HashMap\n",
|
||||
"from pylibbpf import *\n",
|
||||
"from pylibbpf import BpfMap\n",
|
||||
"from ctypes import c_void_p, c_int64, c_uint64, c_int32\n",
|
||||
"import matplotlib.pyplot as plt"
|
||||
]
|
||||
|
||||
@ -21,17 +21,17 @@ def last() -> HashMap:
|
||||
@section("tracepoint/syscalls/sys_enter_sync")
|
||||
def do_trace(ctx: c_void_p) -> c_int64:
|
||||
key = 0
|
||||
tsp = last().lookup(key)
|
||||
tsp = last.lookup(key)
|
||||
if tsp:
|
||||
kt = ktime()
|
||||
delta = kt - tsp
|
||||
if delta < 1000000000:
|
||||
time_ms = delta // 1000000
|
||||
print(f"sync called within last second, last {time_ms} ms ago")
|
||||
last().delete(key)
|
||||
last.delete(key)
|
||||
else:
|
||||
kt = ktime()
|
||||
last().update(key, kt)
|
||||
last.update(key, kt)
|
||||
return c_int64(0)
|
||||
|
||||
|
||||
|
||||
@ -8,68 +8,65 @@ logger: Logger = logging.getLogger(__name__)
|
||||
|
||||
def recursive_dereferencer(var, builder):
|
||||
"""dereference until primitive type comes out"""
|
||||
if var.type == ir.PointerType(ir.PointerType(ir.IntType(64))):
|
||||
# TODO: Not worrying about stack overflow for now
|
||||
logger.info(f"Dereferencing {var}, type is {var.type}")
|
||||
if isinstance(var.type, ir.PointerType):
|
||||
a = builder.load(var)
|
||||
return recursive_dereferencer(a, builder)
|
||||
elif var.type == ir.PointerType(ir.IntType(64)):
|
||||
a = builder.load(var)
|
||||
return recursive_dereferencer(a, builder)
|
||||
elif var.type == ir.IntType(64):
|
||||
elif isinstance(var.type, ir.IntType):
|
||||
return var
|
||||
else:
|
||||
raise TypeError(f"Unsupported type for dereferencing: {var.type}")
|
||||
|
||||
|
||||
def handle_binary_op(rval, module, builder, var_name, local_sym_tab, map_sym_tab, func):
|
||||
logger.info(f"module {module}")
|
||||
left = rval.left
|
||||
right = rval.right
|
||||
def get_operand_value(operand, builder, local_sym_tab):
|
||||
"""Extract the value from an operand, handling variables and constants."""
|
||||
if isinstance(operand, ast.Name):
|
||||
if operand.id in local_sym_tab:
|
||||
return recursive_dereferencer(local_sym_tab[operand.id].var, builder)
|
||||
raise ValueError(f"Undefined variable: {operand.id}")
|
||||
elif isinstance(operand, ast.Constant):
|
||||
if isinstance(operand.value, int):
|
||||
return ir.Constant(ir.IntType(64), operand.value)
|
||||
raise TypeError(f"Unsupported constant type: {type(operand.value)}")
|
||||
elif isinstance(operand, ast.BinOp):
|
||||
return handle_binary_op_impl(operand, builder, local_sym_tab)
|
||||
raise TypeError(f"Unsupported operand type: {type(operand)}")
|
||||
|
||||
|
||||
def handle_binary_op_impl(rval, builder, local_sym_tab):
|
||||
op = rval.op
|
||||
|
||||
# Handle left operand
|
||||
if isinstance(left, ast.Name):
|
||||
if left.id in local_sym_tab:
|
||||
left = recursive_dereferencer(local_sym_tab[left.id].var, builder)
|
||||
else:
|
||||
raise SyntaxError(f"Undefined variable: {left.id}")
|
||||
elif isinstance(left, ast.Constant):
|
||||
left = ir.Constant(ir.IntType(64), left.value)
|
||||
else:
|
||||
raise SyntaxError("Unsupported left operand type")
|
||||
|
||||
if isinstance(right, ast.Name):
|
||||
if right.id in local_sym_tab:
|
||||
right = recursive_dereferencer(local_sym_tab[right.id].var, builder)
|
||||
else:
|
||||
raise SyntaxError(f"Undefined variable: {right.id}")
|
||||
elif isinstance(right, ast.Constant):
|
||||
right = ir.Constant(ir.IntType(64), right.value)
|
||||
else:
|
||||
raise SyntaxError("Unsupported right operand type")
|
||||
|
||||
left = get_operand_value(rval.left, builder, local_sym_tab)
|
||||
right = get_operand_value(rval.right, builder, local_sym_tab)
|
||||
logger.info(f"left is {left}, right is {right}, op is {op}")
|
||||
|
||||
if isinstance(op, ast.Add):
|
||||
builder.store(builder.add(left, right), local_sym_tab[var_name].var)
|
||||
elif isinstance(op, ast.Sub):
|
||||
builder.store(builder.sub(left, right), local_sym_tab[var_name].var)
|
||||
elif isinstance(op, ast.Mult):
|
||||
builder.store(builder.mul(left, right), local_sym_tab[var_name].var)
|
||||
elif isinstance(op, ast.Div):
|
||||
builder.store(builder.sdiv(left, right), local_sym_tab[var_name].var)
|
||||
elif isinstance(op, ast.Mod):
|
||||
builder.store(builder.srem(left, right), local_sym_tab[var_name].var)
|
||||
elif isinstance(op, ast.LShift):
|
||||
builder.store(builder.shl(left, right), local_sym_tab[var_name].var)
|
||||
elif isinstance(op, ast.RShift):
|
||||
builder.store(builder.lshr(left, right), local_sym_tab[var_name].var)
|
||||
elif isinstance(op, ast.BitOr):
|
||||
builder.store(builder.or_(left, right), local_sym_tab[var_name].var)
|
||||
elif isinstance(op, ast.BitXor):
|
||||
builder.store(builder.xor(left, right), local_sym_tab[var_name].var)
|
||||
elif isinstance(op, ast.BitAnd):
|
||||
builder.store(builder.and_(left, right), local_sym_tab[var_name].var)
|
||||
elif isinstance(op, ast.FloorDiv):
|
||||
builder.store(builder.udiv(left, right), local_sym_tab[var_name].var)
|
||||
# Map AST operation nodes to LLVM IR builder methods
|
||||
op_map = {
|
||||
ast.Add: builder.add,
|
||||
ast.Sub: builder.sub,
|
||||
ast.Mult: builder.mul,
|
||||
ast.Div: builder.sdiv,
|
||||
ast.Mod: builder.srem,
|
||||
ast.LShift: builder.shl,
|
||||
ast.RShift: builder.lshr,
|
||||
ast.BitOr: builder.or_,
|
||||
ast.BitXor: builder.xor,
|
||||
ast.BitAnd: builder.and_,
|
||||
ast.FloorDiv: builder.udiv,
|
||||
}
|
||||
|
||||
if type(op) in op_map:
|
||||
result = op_map[type(op)](left, right)
|
||||
return result
|
||||
else:
|
||||
raise SyntaxError("Unsupported binary operation")
|
||||
|
||||
|
||||
def handle_binary_op(rval, builder, var_name, local_sym_tab):
|
||||
result = handle_binary_op_impl(rval, builder, local_sym_tab)
|
||||
if var_name and var_name in local_sym_tab:
|
||||
logger.info(
|
||||
f"Storing result {result} into variable {local_sym_tab[var_name].var}"
|
||||
)
|
||||
builder.store(result, local_sym_tab[var_name].var)
|
||||
return result, result.type
|
||||
|
||||
@ -1,11 +1,11 @@
|
||||
import ast
|
||||
from llvmlite import ir
|
||||
from .license_pass import license_processing
|
||||
from .functions_pass import func_proc
|
||||
from .functions import func_proc
|
||||
from .maps import maps_proc
|
||||
from .structs import structs_proc
|
||||
from .globals_pass import globals_processing
|
||||
from .debuginfo import DW_LANG_C11, DwarfBehaviorEnum
|
||||
from .debuginfo import DW_LANG_C11, DwarfBehaviorEnum, DebugInfoGenerator
|
||||
import os
|
||||
import subprocess
|
||||
import inspect
|
||||
@ -48,7 +48,7 @@ def processor(source_code, filename, module):
|
||||
globals_processing(tree, module)
|
||||
|
||||
|
||||
def compile_to_ir(filename: str, output: str, loglevel=logging.WARNING):
|
||||
def compile_to_ir(filename: str, output: str, loglevel=logging.INFO):
|
||||
logging.basicConfig(
|
||||
level=loglevel, format="%(asctime)s [%(levelname)s] %(name)s: %(message)s"
|
||||
)
|
||||
@ -60,33 +60,17 @@ def compile_to_ir(filename: str, output: str, loglevel=logging.WARNING):
|
||||
module.triple = "bpf"
|
||||
|
||||
if not hasattr(module, "_debug_compile_unit"):
|
||||
module._file_metadata = module.add_debug_info(
|
||||
"DIFile",
|
||||
{ # type: ignore
|
||||
"filename": filename,
|
||||
"directory": os.path.dirname(filename),
|
||||
},
|
||||
debug_generator = DebugInfoGenerator(module)
|
||||
debug_generator.generate_file_metadata(filename, os.path.dirname(filename))
|
||||
debug_generator.generate_debug_cu(
|
||||
DW_LANG_C11,
|
||||
f"PythonBPF {VERSION}",
|
||||
True, # TODO: This is probably not true
|
||||
# TODO: add a global field here that keeps track of all the globals. Works without it, but I think it might
|
||||
# be required for kprobes.
|
||||
True,
|
||||
)
|
||||
|
||||
module._debug_compile_unit = module.add_debug_info(
|
||||
"DICompileUnit",
|
||||
{ # type: ignore
|
||||
"language": DW_LANG_C11,
|
||||
"file": module._file_metadata, # type: ignore
|
||||
"producer": f"PythonBPF {VERSION}",
|
||||
"isOptimized": True, # TODO: This is probably not true
|
||||
# TODO: add a global field here that keeps track of all the globals. Works without it, but I think it might
|
||||
# be required for kprobes.
|
||||
"runtimeVersion": 0,
|
||||
"emissionKind": 1,
|
||||
"splitDebugInlining": False,
|
||||
"nameTableKind": 0,
|
||||
},
|
||||
is_distinct=True,
|
||||
)
|
||||
|
||||
module.add_named_metadata("llvm.dbg.cu", module._debug_compile_unit) # type: ignore
|
||||
|
||||
processor(source, filename, module)
|
||||
|
||||
wchar_size = module.add_metadata(
|
||||
@ -137,7 +121,7 @@ def compile_to_ir(filename: str, output: str, loglevel=logging.WARNING):
|
||||
return output
|
||||
|
||||
|
||||
def compile(loglevel=logging.WARNING) -> bool:
|
||||
def compile(loglevel=logging.INFO) -> bool:
|
||||
# Look one level up the stack to the caller of this function
|
||||
caller_frame = inspect.stack()[1]
|
||||
caller_file = Path(caller_frame.filename).resolve()
|
||||
@ -170,7 +154,7 @@ def compile(loglevel=logging.WARNING) -> bool:
|
||||
return success
|
||||
|
||||
|
||||
def BPF(loglevel=logging.WARNING) -> BpfProgram:
|
||||
def BPF(loglevel=logging.INFO) -> BpfProgram:
|
||||
caller_frame = inspect.stack()[1]
|
||||
src = inspect.getsource(caller_frame.frame)
|
||||
with tempfile.NamedTemporaryFile(
|
||||
|
||||
@ -12,6 +12,34 @@ class DebugInfoGenerator:
|
||||
self.module = module
|
||||
self._type_cache = {} # Cache for common debug types
|
||||
|
||||
def generate_file_metadata(self, filename, dirname):
|
||||
self.module._file_metadata = self.module.add_debug_info(
|
||||
"DIFile",
|
||||
{ # type: ignore
|
||||
"filename": filename,
|
||||
"directory": dirname,
|
||||
},
|
||||
)
|
||||
|
||||
def generate_debug_cu(
|
||||
self, language, producer: str, is_optimized: bool, is_distinct: bool
|
||||
):
|
||||
self.module._debug_compile_unit = self.module.add_debug_info(
|
||||
"DICompileUnit",
|
||||
{ # type: ignore
|
||||
"language": language,
|
||||
"file": self.module._file_metadata, # type: ignore
|
||||
"producer": producer,
|
||||
"isOptimized": is_optimized,
|
||||
"runtimeVersion": 0,
|
||||
"emissionKind": 1,
|
||||
"splitDebugInlining": False,
|
||||
"nameTableKind": 0,
|
||||
},
|
||||
is_distinct=is_distinct,
|
||||
)
|
||||
self.module.add_named_metadata("llvm.dbg.cu", self.module._debug_compile_unit) # type: ignore
|
||||
|
||||
def get_basic_type(self, name: str, size: int, encoding: int) -> Any:
|
||||
"""Get or create a basic type with caching"""
|
||||
key = (name, size, encoding)
|
||||
|
||||
@ -2,10 +2,92 @@ import ast
|
||||
from llvmlite import ir
|
||||
from logging import Logger
|
||||
import logging
|
||||
from typing import Dict
|
||||
|
||||
logger: Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _handle_name_expr(expr: ast.Name, local_sym_tab: Dict, builder: ir.IRBuilder):
|
||||
"""Handle ast.Name expressions."""
|
||||
if expr.id in local_sym_tab:
|
||||
var = local_sym_tab[expr.id].var
|
||||
val = builder.load(var)
|
||||
return val, local_sym_tab[expr.id].ir_type
|
||||
else:
|
||||
logger.info(f"Undefined variable {expr.id}")
|
||||
return None
|
||||
|
||||
|
||||
def _handle_constant_expr(expr: ast.Constant):
|
||||
"""Handle ast.Constant expressions."""
|
||||
if isinstance(expr.value, int):
|
||||
return ir.Constant(ir.IntType(64), expr.value), ir.IntType(64)
|
||||
elif isinstance(expr.value, bool):
|
||||
return ir.Constant(ir.IntType(1), int(expr.value)), ir.IntType(1)
|
||||
else:
|
||||
logger.info("Unsupported constant type")
|
||||
return None
|
||||
|
||||
|
||||
def _handle_attribute_expr(
|
||||
expr: ast.Attribute,
|
||||
local_sym_tab: Dict,
|
||||
structs_sym_tab: Dict,
|
||||
builder: ir.IRBuilder,
|
||||
):
|
||||
"""Handle ast.Attribute expressions for struct field access."""
|
||||
if isinstance(expr.value, ast.Name):
|
||||
var_name = expr.value.id
|
||||
attr_name = expr.attr
|
||||
if var_name in local_sym_tab:
|
||||
var_ptr, var_type, var_metadata = local_sym_tab[var_name]
|
||||
logger.info(f"Loading attribute {attr_name} from variable {var_name}")
|
||||
logger.info(f"Variable type: {var_type}, Variable ptr: {var_ptr}")
|
||||
|
||||
metadata = structs_sym_tab[var_metadata]
|
||||
if attr_name in metadata.fields:
|
||||
gep = metadata.gep(builder, var_ptr, attr_name)
|
||||
val = builder.load(gep)
|
||||
field_type = metadata.field_type(attr_name)
|
||||
return val, field_type
|
||||
return None
|
||||
|
||||
|
||||
def _handle_deref_call(expr: ast.Call, local_sym_tab: Dict, builder: ir.IRBuilder):
|
||||
"""Handle deref function calls."""
|
||||
logger.info(f"Handling deref {ast.dump(expr)}")
|
||||
if len(expr.args) != 1:
|
||||
logger.info("deref takes exactly one argument")
|
||||
return None
|
||||
|
||||
arg = expr.args[0]
|
||||
if (
|
||||
isinstance(arg, ast.Call)
|
||||
and isinstance(arg.func, ast.Name)
|
||||
and arg.func.id == "deref"
|
||||
):
|
||||
logger.info("Multiple deref not supported")
|
||||
return None
|
||||
|
||||
if isinstance(arg, ast.Name):
|
||||
if arg.id in local_sym_tab:
|
||||
arg_ptr = local_sym_tab[arg.id].var
|
||||
else:
|
||||
logger.info(f"Undefined variable {arg.id}")
|
||||
return None
|
||||
else:
|
||||
logger.info("Unsupported argument type for deref")
|
||||
return None
|
||||
|
||||
if arg_ptr is None:
|
||||
logger.info("Failed to evaluate deref argument")
|
||||
return None
|
||||
|
||||
# Load the value from pointer
|
||||
val = builder.load(arg_ptr)
|
||||
return val, local_sym_tab[arg.id].ir_type
|
||||
|
||||
|
||||
def eval_expr(
|
||||
func,
|
||||
module,
|
||||
@ -17,64 +99,28 @@ def eval_expr(
|
||||
):
|
||||
logger.info(f"Evaluating expression: {ast.dump(expr)}")
|
||||
if isinstance(expr, ast.Name):
|
||||
if expr.id in local_sym_tab:
|
||||
var = local_sym_tab[expr.id].var
|
||||
val = builder.load(var)
|
||||
return val, local_sym_tab[expr.id].ir_type # return value and type
|
||||
else:
|
||||
logger.info(f"Undefined variable {expr.id}")
|
||||
return None
|
||||
return _handle_name_expr(expr, local_sym_tab, builder)
|
||||
elif isinstance(expr, ast.Constant):
|
||||
if isinstance(expr.value, int):
|
||||
return ir.Constant(ir.IntType(64), expr.value), ir.IntType(64)
|
||||
elif isinstance(expr.value, bool):
|
||||
return ir.Constant(ir.IntType(1), int(expr.value)), ir.IntType(1)
|
||||
else:
|
||||
logger.info("Unsupported constant type")
|
||||
return None
|
||||
return _handle_constant_expr(expr)
|
||||
elif isinstance(expr, ast.Call):
|
||||
if isinstance(expr.func, ast.Name) and expr.func.id == "deref":
|
||||
return _handle_deref_call(expr, local_sym_tab, builder)
|
||||
|
||||
# delayed import to avoid circular dependency
|
||||
from pythonbpf.helper import HelperHandlerRegistry, handle_helper_call
|
||||
|
||||
if isinstance(expr.func, ast.Name):
|
||||
# check deref
|
||||
if expr.func.id == "deref":
|
||||
logger.info(f"Handling deref {ast.dump(expr)}")
|
||||
if len(expr.args) != 1:
|
||||
logger.info("deref takes exactly one argument")
|
||||
return None
|
||||
arg = expr.args[0]
|
||||
if (
|
||||
isinstance(arg, ast.Call)
|
||||
and isinstance(arg.func, ast.Name)
|
||||
and arg.func.id == "deref"
|
||||
):
|
||||
logger.info("Multiple deref not supported")
|
||||
return None
|
||||
if isinstance(arg, ast.Name):
|
||||
if arg.id in local_sym_tab:
|
||||
arg = local_sym_tab[arg.id].var
|
||||
else:
|
||||
logger.info(f"Undefined variable {arg.id}")
|
||||
return None
|
||||
if arg is None:
|
||||
logger.info("Failed to evaluate deref argument")
|
||||
return None
|
||||
# Since we are handling only name case, directly take type from sym tab
|
||||
val = builder.load(arg)
|
||||
return val, local_sym_tab[expr.args[0].id].ir_type
|
||||
|
||||
# check for helpers
|
||||
if HelperHandlerRegistry.has_handler(expr.func.id):
|
||||
return handle_helper_call(
|
||||
expr,
|
||||
module,
|
||||
builder,
|
||||
func,
|
||||
local_sym_tab,
|
||||
map_sym_tab,
|
||||
structs_sym_tab,
|
||||
)
|
||||
if isinstance(expr.func, ast.Name) and HelperHandlerRegistry.has_handler(
|
||||
expr.func.id
|
||||
):
|
||||
return handle_helper_call(
|
||||
expr,
|
||||
module,
|
||||
builder,
|
||||
func,
|
||||
local_sym_tab,
|
||||
map_sym_tab,
|
||||
structs_sym_tab,
|
||||
)
|
||||
elif isinstance(expr.func, ast.Attribute):
|
||||
logger.info(f"Handling method call: {ast.dump(expr.func)}")
|
||||
if isinstance(expr.func.value, ast.Call) and isinstance(
|
||||
@ -106,19 +152,7 @@ def eval_expr(
|
||||
structs_sym_tab,
|
||||
)
|
||||
elif isinstance(expr, ast.Attribute):
|
||||
if isinstance(expr.value, ast.Name):
|
||||
var_name = expr.value.id
|
||||
attr_name = expr.attr
|
||||
if var_name in local_sym_tab:
|
||||
var_ptr, var_type, var_metadata = local_sym_tab[var_name]
|
||||
logger.info(f"Loading attribute {attr_name} from variable {var_name}")
|
||||
logger.info(f"Variable type: {var_type}, Variable ptr: {var_ptr}")
|
||||
metadata = structs_sym_tab[var_metadata]
|
||||
if attr_name in metadata.fields:
|
||||
gep = metadata.gep(builder, var_ptr, attr_name)
|
||||
val = builder.load(gep)
|
||||
field_type = metadata.field_type(attr_name)
|
||||
return val, field_type
|
||||
return _handle_attribute_expr(expr, local_sym_tab, structs_sym_tab, builder)
|
||||
logger.info("Unsupported expression evaluation")
|
||||
return None
|
||||
|
||||
|
||||
3
pythonbpf/functions/__init__.py
Normal file
3
pythonbpf/functions/__init__.py
Normal file
@ -0,0 +1,3 @@
|
||||
from .functions_pass import func_proc
|
||||
|
||||
__all__ = ["func_proc"]
|
||||
22
pythonbpf/functions/func_registry_handlers.py
Normal file
22
pythonbpf/functions/func_registry_handlers.py
Normal file
@ -0,0 +1,22 @@
|
||||
from typing import Dict
|
||||
|
||||
|
||||
class StatementHandlerRegistry:
|
||||
"""Registry for statement handlers."""
|
||||
|
||||
_handlers: Dict = {}
|
||||
|
||||
@classmethod
|
||||
def register(cls, stmt_type):
|
||||
"""Register a handler for a specific statement type."""
|
||||
|
||||
def decorator(handler):
|
||||
cls._handlers[stmt_type] = handler
|
||||
return handler
|
||||
|
||||
return decorator
|
||||
|
||||
@classmethod
|
||||
def get_handler(cls, stmt_type):
|
||||
"""Get the handler for a specific statement type."""
|
||||
return cls._handlers.get(stmt_type, None)
|
||||
@ -4,10 +4,10 @@ import logging
|
||||
from typing import Any
|
||||
from dataclasses import dataclass
|
||||
|
||||
from .helper import HelperHandlerRegistry, handle_helper_call
|
||||
from .type_deducer import ctypes_to_ir
|
||||
from .binary_ops import handle_binary_op
|
||||
from .expr_pass import eval_expr, handle_expr
|
||||
from pythonbpf.helper import HelperHandlerRegistry, handle_helper_call
|
||||
from pythonbpf.type_deducer import ctypes_to_ir
|
||||
from pythonbpf.binary_ops import handle_binary_op
|
||||
from pythonbpf.expr_pass import eval_expr, handle_expr
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -146,8 +146,7 @@ def handle_assign(
|
||||
local_sym_tab[var_name].var,
|
||||
)
|
||||
logger.info(
|
||||
f"Assigned {call_type} constant "
|
||||
f"{rval.args[0].value} to {var_name}"
|
||||
f"Assigned {call_type} constant {rval.args[0].value} to {var_name}"
|
||||
)
|
||||
elif HelperHandlerRegistry.has_handler(call_type):
|
||||
# var = builder.alloca(ir.IntType(64), name=var_name)
|
||||
@ -192,8 +191,23 @@ def handle_assign(
|
||||
elif isinstance(rval.func, ast.Attribute):
|
||||
logger.info(f"Assignment call attribute: {ast.dump(rval.func)}")
|
||||
if isinstance(rval.func.value, ast.Name):
|
||||
# TODO: probably a struct access
|
||||
logger.info(f"TODO STRUCT ACCESS {ast.dump(rval)}")
|
||||
if rval.func.value.id in map_sym_tab:
|
||||
map_name = rval.func.value.id
|
||||
method_name = rval.func.attr
|
||||
if HelperHandlerRegistry.has_handler(method_name):
|
||||
val = handle_helper_call(
|
||||
rval,
|
||||
module,
|
||||
builder,
|
||||
func,
|
||||
local_sym_tab,
|
||||
map_sym_tab,
|
||||
structs_sym_tab,
|
||||
)
|
||||
builder.store(val[0], local_sym_tab[var_name].var)
|
||||
else:
|
||||
# TODO: probably a struct access
|
||||
logger.info(f"TODO STRUCT ACCESS {ast.dump(rval)}")
|
||||
elif isinstance(rval.func.value, ast.Call) and isinstance(
|
||||
rval.func.value.func, ast.Name
|
||||
):
|
||||
@ -218,9 +232,7 @@ def handle_assign(
|
||||
else:
|
||||
logger.info("Unsupported assignment call function type")
|
||||
elif isinstance(rval, ast.BinOp):
|
||||
handle_binary_op(
|
||||
rval, module, builder, var_name, local_sym_tab, map_sym_tab, func
|
||||
)
|
||||
handle_binary_op(rval, builder, var_name, local_sym_tab)
|
||||
else:
|
||||
logger.info("Unsupported assignment value type")
|
||||
|
||||
@ -372,24 +384,48 @@ def process_stmt(
|
||||
)
|
||||
elif isinstance(stmt, ast.Return):
|
||||
if stmt.value is None:
|
||||
builder.ret(ir.Constant(ir.IntType(32), 0))
|
||||
builder.ret(ir.Constant(ir.IntType(64), 0))
|
||||
did_return = True
|
||||
elif (
|
||||
isinstance(stmt.value, ast.Call)
|
||||
and isinstance(stmt.value.func, ast.Name)
|
||||
and len(stmt.value.args) == 1
|
||||
and isinstance(stmt.value.args[0], ast.Constant)
|
||||
and isinstance(stmt.value.args[0].value, int)
|
||||
):
|
||||
call_type = stmt.value.func.id
|
||||
if ctypes_to_ir(call_type) != ret_type:
|
||||
raise ValueError(
|
||||
"Return type mismatch: expected"
|
||||
f"{ctypes_to_ir(call_type)}, got {call_type}"
|
||||
)
|
||||
else:
|
||||
builder.ret(ir.Constant(ret_type, stmt.value.args[0].value))
|
||||
if isinstance(stmt.value.args[0], ast.Constant) and isinstance(
|
||||
stmt.value.args[0].value, int
|
||||
):
|
||||
call_type = stmt.value.func.id
|
||||
if ctypes_to_ir(call_type) != ret_type:
|
||||
raise ValueError(
|
||||
"Return type mismatch: expected"
|
||||
f"{ctypes_to_ir(call_type)}, got {call_type}"
|
||||
)
|
||||
else:
|
||||
builder.ret(ir.Constant(ret_type, stmt.value.args[0].value))
|
||||
did_return = True
|
||||
elif isinstance(stmt.value.args[0], ast.BinOp):
|
||||
# TODO: Should be routed through eval_expr
|
||||
val = handle_binary_op(stmt.value.args[0], builder, None, local_sym_tab)
|
||||
if val is None:
|
||||
raise ValueError("Failed to evaluate return expression")
|
||||
if val[1] != ret_type:
|
||||
raise ValueError(
|
||||
f"Return type mismatch: expected {ret_type}, got {val[1]}"
|
||||
)
|
||||
builder.ret(val[0])
|
||||
did_return = True
|
||||
elif isinstance(stmt.value.args[0], ast.Name):
|
||||
if stmt.value.args[0].id in local_sym_tab:
|
||||
var = local_sym_tab[stmt.value.args[0].id].var
|
||||
val = builder.load(var)
|
||||
if val.type != ret_type:
|
||||
raise ValueError(
|
||||
f"Return type mismatch: expected {ret_type}, got {val.type}"
|
||||
)
|
||||
builder.ret(val)
|
||||
did_return = True
|
||||
else:
|
||||
raise ValueError("Failed to evaluate return expression")
|
||||
elif isinstance(stmt.value, ast.Name):
|
||||
if stmt.value.id == "XDP_PASS":
|
||||
builder.ret(ir.Constant(ret_type, 2))
|
||||
@ -442,6 +478,9 @@ def allocate_mem(
|
||||
continue
|
||||
var_name = target.id
|
||||
rval = stmt.value
|
||||
if var_name in local_sym_tab:
|
||||
logger.info(f"Variable {var_name} already allocated")
|
||||
continue
|
||||
if isinstance(rval, ast.Call):
|
||||
if isinstance(rval.func, ast.Name):
|
||||
call_type = rval.func.id
|
||||
@ -470,8 +509,7 @@ def allocate_mem(
|
||||
var = builder.alloca(ir_type, name=var_name)
|
||||
has_metadata = True
|
||||
logger.info(
|
||||
f"Pre-allocated variable {var_name} "
|
||||
f"for struct {call_type}"
|
||||
f"Pre-allocated variable {var_name} for struct {call_type}"
|
||||
)
|
||||
elif isinstance(rval.func, ast.Attribute):
|
||||
ir_type = ir.PointerType(ir.IntType(64))
|
||||
@ -555,7 +593,7 @@ def process_func_body(
|
||||
)
|
||||
|
||||
if not did_return:
|
||||
builder.ret(ir.Constant(ir.IntType(32), 0))
|
||||
builder.ret(ir.Constant(ir.IntType(64), 0))
|
||||
|
||||
|
||||
def process_bpf_chunk(func_node, module, return_type, map_sym_tab, structs_sym_tab):
|
||||
@ -62,7 +62,7 @@ def bpf_map_lookup_elem_emitter(
|
||||
"""
|
||||
if not call.args or len(call.args) != 1:
|
||||
raise ValueError(
|
||||
"Map lookup expects exactly one argument (key), got " f"{len(call.args)}"
|
||||
f"Map lookup expects exactly one argument (key), got {len(call.args)}"
|
||||
)
|
||||
key_ptr = get_or_create_ptr_from_arg(call.args[0], builder, local_sym_tab)
|
||||
map_void_ptr = builder.bitcast(map_ptr, ir.PointerType())
|
||||
@ -145,8 +145,7 @@ def bpf_map_update_elem_emitter(
|
||||
"""
|
||||
if not call.args or len(call.args) < 2 or len(call.args) > 3:
|
||||
raise ValueError(
|
||||
"Map update expects 2 or 3 args (key, value, flags), "
|
||||
f"got {len(call.args)}"
|
||||
f"Map update expects 2 or 3 args (key, value, flags), got {len(call.args)}"
|
||||
)
|
||||
|
||||
key_arg = call.args[0]
|
||||
@ -196,7 +195,7 @@ def bpf_map_delete_elem_emitter(
|
||||
"""
|
||||
if not call.args or len(call.args) != 1:
|
||||
raise ValueError(
|
||||
"Map delete expects exactly one argument (key), got " f"{len(call.args)}"
|
||||
f"Map delete expects exactly one argument (key), got {len(call.args)}"
|
||||
)
|
||||
key_ptr = get_or_create_ptr_from_arg(call.args[0], builder, local_sym_tab)
|
||||
map_void_ptr = builder.bitcast(map_ptr, ir.PointerType())
|
||||
@ -255,7 +254,7 @@ def bpf_perf_event_output_handler(
|
||||
):
|
||||
if len(call.args) != 1:
|
||||
raise ValueError(
|
||||
"Perf event output expects exactly one argument, " f"got {len(call.args)}"
|
||||
f"Perf event output expects exactly one argument, got {len(call.args)}"
|
||||
)
|
||||
data_arg = call.args[0]
|
||||
ctx_ptr = func.args[0] # First argument to the function is ctx
|
||||
|
||||
@ -270,7 +270,7 @@ def _prepare_expr_args(expr, func, module, builder, local_sym_tab, struct_sym_ta
|
||||
val = builder.sext(val, ir.IntType(64))
|
||||
else:
|
||||
logger.warning(
|
||||
"Only int and ptr supported in bpf_printk args. " "Others default to 0."
|
||||
"Only int and ptr supported in bpf_printk args. Others default to 0."
|
||||
)
|
||||
val = ir.Constant(ir.IntType(64), 0)
|
||||
return val
|
||||
|
||||
@ -278,9 +278,7 @@ def process_bpf_map(func_node, module):
|
||||
if handler:
|
||||
return handler(map_name, rval, module)
|
||||
else:
|
||||
logger.warning(
|
||||
f"Unknown map type " f"{rval.func.id}, defaulting to HashMap"
|
||||
)
|
||||
logger.warning(f"Unknown map type {rval.func.id}, defaulting to HashMap")
|
||||
return process_hash_map(map_name, rval, module)
|
||||
else:
|
||||
raise ValueError("Function under @map must return a map")
|
||||
|
||||
@ -4,6 +4,18 @@ from pythonbpf.maps import HashMap
|
||||
|
||||
from ctypes import c_void_p, c_int64
|
||||
|
||||
# NOTE: I have decided to not fix this example for now.
|
||||
# The issue is in line 31, where we are passing an expression.
|
||||
# The update helper expects a pointer type. But the problem is
|
||||
# that we must allocate the space for said pointer in the first
|
||||
# basic block. As that usage is in a different basic block, we
|
||||
# are unable to cast the expression to a pointer type. (as we never
|
||||
# allocated space for it).
|
||||
# Shall we change our space allocation logic? That allows users to
|
||||
# spam the same helper with the same args, and still run out of
|
||||
# stack space. So we consider this usage invalid for now.
|
||||
# Might fix it later.
|
||||
|
||||
|
||||
@bpf
|
||||
@map
|
||||
@ -14,12 +26,12 @@ def count() -> HashMap:
|
||||
@bpf
|
||||
@section("xdp")
|
||||
def hello_world(ctx: c_void_p) -> c_int64:
|
||||
prev = count().lookup(0)
|
||||
prev = count.lookup(0)
|
||||
if prev:
|
||||
count().update(0, prev + 1)
|
||||
count.update(0, prev + 1)
|
||||
return XDP_PASS
|
||||
else:
|
||||
count().update(0, 1)
|
||||
count.update(0, 1)
|
||||
|
||||
return XDP_PASS
|
||||
|
||||
|
||||
40
tests/failing_tests/named_arg.py
Normal file
40
tests/failing_tests/named_arg.py
Normal file
@ -0,0 +1,40 @@
|
||||
from pythonbpf import bpf, map, section, bpfglobal, compile
|
||||
from pythonbpf.helper import XDP_PASS
|
||||
from pythonbpf.maps import HashMap
|
||||
|
||||
from ctypes import c_void_p, c_int64
|
||||
|
||||
# NOTE: This example exposes the problems with our typing system.
|
||||
# We can't do steps on line 25 and 27.
|
||||
# prev is of type i64**. For prev + 1, we deref it down to i64
|
||||
# To assign it back to prev, we need to go back to i64**.
|
||||
# We cannot allocate space for the intermediate type now.
|
||||
# We probably need to track the ref/deref chain for each variable.
|
||||
|
||||
@bpf
|
||||
@map
|
||||
def count() -> HashMap:
|
||||
return HashMap(key=c_int64, value=c_int64, max_entries=1)
|
||||
|
||||
|
||||
@bpf
|
||||
@section("xdp")
|
||||
def hello_world(ctx: c_void_p) -> c_int64:
|
||||
prev = count.lookup(0)
|
||||
if prev:
|
||||
prev = prev + 1
|
||||
count.update(0, prev)
|
||||
return XDP_PASS
|
||||
else:
|
||||
count.update(0, 1)
|
||||
|
||||
return XDP_PASS
|
||||
|
||||
|
||||
@bpf
|
||||
@bpfglobal
|
||||
def LICENSE() -> str:
|
||||
return "GPL"
|
||||
|
||||
|
||||
compile()
|
||||
@ -3,9 +3,9 @@ from ctypes import c_void_p, c_int64
|
||||
|
||||
|
||||
@bpf
|
||||
@section("sometag1")
|
||||
@section("tracepoint/syscalls/sys_enter_sync")
|
||||
def sometag(ctx: c_void_p) -> c_int64:
|
||||
a = 1 + 2 + 1
|
||||
a = 1 + 2 + 1 + 12 + 13
|
||||
print(f"{a}")
|
||||
return c_int64(0)
|
||||
|
||||
20
tests/passing_tests/binops1.py
Normal file
20
tests/passing_tests/binops1.py
Normal file
@ -0,0 +1,20 @@
|
||||
from pythonbpf import compile, bpf, section, bpfglobal
|
||||
from ctypes import c_void_p, c_int64
|
||||
|
||||
|
||||
@bpf
|
||||
@section("tracepoint/syscalls/sys_enter_sync")
|
||||
def sometag(ctx: c_void_p) -> c_int64:
|
||||
b = 1 + 2
|
||||
a = 1 + b
|
||||
print(f"{a}")
|
||||
return c_int64(0)
|
||||
|
||||
|
||||
@bpf
|
||||
@bpfglobal
|
||||
def LICENSE() -> str:
|
||||
return "GPL"
|
||||
|
||||
|
||||
compile()
|
||||
@ -1,3 +1,5 @@
|
||||
import logging
|
||||
|
||||
from pythonbpf import compile, bpf, section, bpfglobal
|
||||
from ctypes import c_void_p, c_int64
|
||||
|
||||
@ -5,8 +7,7 @@ from ctypes import c_void_p, c_int64
|
||||
@bpf
|
||||
@section("sometag1")
|
||||
def sometag(ctx: c_void_p) -> c_int64:
|
||||
b = 1 + 2
|
||||
a = 1 + b
|
||||
a = 1 - 1
|
||||
return c_int64(a)
|
||||
|
||||
|
||||
@ -16,4 +17,4 @@ def LICENSE() -> str:
|
||||
return "GPL"
|
||||
|
||||
|
||||
compile()
|
||||
compile(loglevel=logging.INFO)
|
||||
Reference in New Issue
Block a user