mirror of
https://github.com/varun-r-mallya/Python-BPF.git
synced 2026-02-09 22:50:56 +00:00
Compare commits
189 Commits
globals
...
80396c78a6
| Author | SHA1 | Date | |
|---|---|---|---|
| 80396c78a6 | |||
| 8774277000 | |||
| 8743ea17f3 | |||
| f8844104a6 | |||
| 3343bedd11 | |||
| 75d3ad4fe2 | |||
| abbf17748d | |||
| 7c559840f0 | |||
| 06773c895f | |||
| 1e3d775865 | |||
| 168e26268e | |||
| 2cf7b28793 | |||
| d24d59c2ba | |||
| f190a33e21 | |||
| eb636ef731 | |||
| 2ae3aade60 | |||
| f227fe9310 | |||
| 7940d02bc7 | |||
| 2483ef2840 | |||
| 68e9693f9a | |||
| e4575a6b1e | |||
| 3ec3ab30fe | |||
| 7fb3ecff48 | |||
| ec59dad025 | |||
| 28b7b1620c | |||
| 9f8e240a38 | |||
| e6c05ab494 | |||
| 8aa9cf7119 | |||
| 9683e3799f | |||
| 200d293750 | |||
| ed196caebf | |||
| a049796b81 | |||
| 384fc9dd40 | |||
| 5f2df57e64 | |||
| 130d8a9edc | |||
| 40ae3d825a | |||
| 484624104e | |||
| e7c4bdb150 | |||
| 7210366e7d | |||
| 435bf27176 | |||
| 1ba27ac7cf | |||
| e4ddec3a02 | |||
| bc7b5c97d1 | |||
| fa720f8e6b | |||
| eff0f66d95 | |||
| b43c252224 | |||
| aae7aa981d | |||
| 6f9a7301af | |||
| 48923d03d4 | |||
| 019a83cf11 | |||
| 140d9e6e35 | |||
| a351b0f1b5 | |||
| 3cb73ff0c3 | |||
| 3b08c2bede | |||
| 86378d6cc4 | |||
| 00d1c583af | |||
| cfc246c80d | |||
| f3c80f9e5f | |||
| 0d3a5748dd | |||
| 079431754c | |||
| 46f5eca33d | |||
| 7081e939fb | |||
| 1e29460d6f | |||
| e180a89644 | |||
| 34a267e982 | |||
| c81aad7c67 | |||
| 2e677c2c7b | |||
| 4ea7b22b44 | |||
| b8b937bfca | |||
| 6cc29c4fa1 | |||
| 5451ba646d | |||
| 7720437ca5 | |||
| eb0a7a917d | |||
| 6f65903552 | |||
| 97e74d09be | |||
| 9c7560ed2e | |||
| 2979ceedcf | |||
| 745f59278f | |||
| 49c59b32ca | |||
| ff78140a7d | |||
| 82ff71b753 | |||
| f46e7cd846 | |||
| 9d73eb67c4 | |||
| 21ce041353 | |||
| 6402cf7be5 | |||
| 9a96e1247b | |||
| 989134f4be | |||
| 120aec08da | |||
| e66ae7cc89 | |||
| b95fbd0ed0 | |||
| 32dc8e6636 | |||
| d84ce0c6fa | |||
| 8d07a4cd05 | |||
| 8485460374 | |||
| 9fdc6fa3ed | |||
| 17004d58df | |||
| 6362a5e665 | |||
| d38d73d5c6 | |||
| 0a6571726a | |||
| e62557bd1d | |||
| ee90ee9392 | |||
| 5f9eaff59c | |||
| b86341ce7a | |||
| 4857739eec | |||
| 3bb4b099c1 | |||
| e7912a088f | |||
| 95d63d969e | |||
| 1f96bab944 | |||
| f98491f3bd | |||
| 98f262ae22 | |||
| d2ff53052c | |||
| ecac24c1d2 | |||
| a764b095f8 | |||
| 95a196a91f | |||
| 6b59980874 | |||
| 0c977514af | |||
| 1207730ce3 | |||
| 0d9dcd122c | |||
| 8a69e05ee2 | |||
| 976af290af | |||
| a3443ab1d5 | |||
| a27360482b | |||
| 3f9604a370 | |||
| 480afd1341 | |||
| ab71275566 | |||
| 2d850f457f | |||
| c423cc647d | |||
| 9e1142bf05 | |||
| 1843ca6c53 | |||
| caa5d92c32 | |||
| f41693bc6d | |||
| b7092fa362 | |||
| 0e7dcafbab | |||
| a574527891 | |||
| 176673017c | |||
| 1d6226d829 | |||
| 12b712c217 | |||
| 2de280915a | |||
| 1cce49f5e0 | |||
| 682a7e6566 | |||
| fb63dbd698 | |||
| 4f433d00cc | |||
| 6cf5115ea9 | |||
| f11a43010d | |||
| d1055e4d41 | |||
| 8554688230 | |||
| 3e873f378e | |||
| 28ce14ce34 | |||
| 5066cd4cfe | |||
| 0bfb3855b6 | |||
| 2f0dd20f1e | |||
| abef68c274 | |||
| 9aff614ff5 | |||
| 7b0e8a2fca | |||
| 3e68d6df4f | |||
| b75dc82f90 | |||
| f53ca3bd5b | |||
| 02885af1ca | |||
| e6e2a69506 | |||
| e4e92710c0 | |||
| f08bc9976c | |||
| 23183da2e1 | |||
| c6fef1693e | |||
| 192e03aa98 | |||
| 6f02b61527 | |||
| a21ff5633c | |||
| f96a6b94dc | |||
| e9f3aa25d2 | |||
| d0a8e96b70 | |||
| b09dc815fc | |||
| ceaac78633 | |||
| dc7a127fa6 | |||
| 552cd352f2 | |||
| c7f2955ee9 | |||
| ef36ea1e03 | |||
| d341cb24c0 | |||
| 2fabb67942 | |||
| a0b0ad370e | |||
| 283b947fc5 | |||
| bf78ac21fe | |||
| ac49cd8b1c | |||
| af44bd063c | |||
| 1239d1c35f | |||
| f41a9ccf26 | |||
| be05b5d102 | |||
| 3f061750cf | |||
| 6d5d6345e2 | |||
| 6fea580693 | |||
| 86b9ec56d7 |
4
.github/workflows/format.yml
vendored
4
.github/workflows/format.yml
vendored
@ -12,8 +12,8 @@ jobs:
|
|||||||
name: Format
|
name: Format
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v5
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
- uses: pre-commit/action@v3.0.1
|
- uses: pre-commit/action@v3.0.1
|
||||||
|
|||||||
2
.gitignore
vendored
2
.gitignore
vendored
@ -7,3 +7,5 @@ __pycache__/
|
|||||||
*.ll
|
*.ll
|
||||||
*.o
|
*.o
|
||||||
.ipynb_checkpoints/
|
.ipynb_checkpoints/
|
||||||
|
vmlinux.py
|
||||||
|
~*
|
||||||
|
|||||||
@ -12,7 +12,7 @@
|
|||||||
#
|
#
|
||||||
# See https://github.com/pre-commit/pre-commit
|
# See https://github.com/pre-commit/pre-commit
|
||||||
|
|
||||||
exclude: 'vmlinux.*\.py$'
|
exclude: 'vmlinux.py'
|
||||||
|
|
||||||
ci:
|
ci:
|
||||||
autoupdate_commit_msg: "chore: update pre-commit hooks"
|
autoupdate_commit_msg: "chore: update pre-commit hooks"
|
||||||
@ -21,7 +21,7 @@ ci:
|
|||||||
repos:
|
repos:
|
||||||
# Standard hooks
|
# Standard hooks
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
rev: v4.6.0
|
rev: v6.0.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: check-added-large-files
|
- id: check-added-large-files
|
||||||
- id: check-case-conflict
|
- id: check-case-conflict
|
||||||
@ -36,16 +36,16 @@ repos:
|
|||||||
- id: trailing-whitespace
|
- id: trailing-whitespace
|
||||||
|
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: "v0.4.2"
|
rev: "v0.13.2"
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff
|
- id: ruff
|
||||||
args: ["--fix", "--show-fixes"]
|
args: ["--fix", "--show-fixes"]
|
||||||
- id: ruff-format
|
- id: ruff-format
|
||||||
exclude: ^(docs)|^(tests)|^(examples)
|
# exclude: ^(docs)|^(tests)|^(examples)
|
||||||
|
|
||||||
# Checking static types
|
# Checking static types
|
||||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||||
rev: "v1.10.0"
|
rev: "v1.18.2"
|
||||||
hooks:
|
hooks:
|
||||||
- id: mypy
|
- id: mypy
|
||||||
exclude: ^(tests)|^(examples)
|
exclude: ^(tests)|^(examples)
|
||||||
|
|||||||
@ -83,14 +83,14 @@ def hist() -> HashMap:
|
|||||||
def hello(ctx: c_void_p) -> c_int64:
|
def hello(ctx: c_void_p) -> c_int64:
|
||||||
process_id = pid()
|
process_id = pid()
|
||||||
one = 1
|
one = 1
|
||||||
prev = hist().lookup(process_id)
|
prev = hist.lookup(process_id)
|
||||||
if prev:
|
if prev:
|
||||||
previous_value = prev + 1
|
previous_value = prev + 1
|
||||||
print(f"count: {previous_value} with {process_id}")
|
print(f"count: {previous_value} with {process_id}")
|
||||||
hist().update(process_id, previous_value)
|
hist.update(process_id, previous_value)
|
||||||
return c_int64(0)
|
return c_int64(0)
|
||||||
else:
|
else:
|
||||||
hist().update(process_id, one)
|
hist.update(process_id, one)
|
||||||
return c_int64(0)
|
return c_int64(0)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
13
TODO.md
13
TODO.md
@ -1,13 +0,0 @@
|
|||||||
## Short term
|
|
||||||
|
|
||||||
- Implement enough functionality to port the BCC tutorial examples in PythonBPF
|
|
||||||
- Static Typing
|
|
||||||
- Add all maps
|
|
||||||
- XDP support in pylibbpf
|
|
||||||
- ringbuf support
|
|
||||||
- recursive expression resolution
|
|
||||||
|
|
||||||
## Long term
|
|
||||||
|
|
||||||
- Refactor the codebase to be better than a hackathon project
|
|
||||||
- Port to C++ and use actual LLVM?
|
|
||||||
@ -12,7 +12,7 @@
|
|||||||
"from pythonbpf import bpf, map, section, bpfglobal, BPF\n",
|
"from pythonbpf import bpf, map, section, bpfglobal, BPF\n",
|
||||||
"from pythonbpf.helper import pid\n",
|
"from pythonbpf.helper import pid\n",
|
||||||
"from pythonbpf.maps import HashMap\n",
|
"from pythonbpf.maps import HashMap\n",
|
||||||
"from pylibbpf import *\n",
|
"from pylibbpf import BpfMap\n",
|
||||||
"from ctypes import c_void_p, c_int64, c_uint64, c_int32\n",
|
"from ctypes import c_void_p, c_int64, c_uint64, c_int32\n",
|
||||||
"import matplotlib.pyplot as plt"
|
"import matplotlib.pyplot as plt"
|
||||||
]
|
]
|
||||||
@ -308,6 +308,7 @@
|
|||||||
"def hist() -> HashMap:\n",
|
"def hist() -> HashMap:\n",
|
||||||
" return HashMap(key=c_int32, value=c_uint64, max_entries=4096)\n",
|
" return HashMap(key=c_int32, value=c_uint64, max_entries=4096)\n",
|
||||||
"\n",
|
"\n",
|
||||||
|
"\n",
|
||||||
"@bpf\n",
|
"@bpf\n",
|
||||||
"@section(\"tracepoint/syscalls/sys_enter_clone\")\n",
|
"@section(\"tracepoint/syscalls/sys_enter_clone\")\n",
|
||||||
"def hello(ctx: c_void_p) -> c_int64:\n",
|
"def hello(ctx: c_void_p) -> c_int64:\n",
|
||||||
@ -329,6 +330,7 @@
|
|||||||
"def LICENSE() -> str:\n",
|
"def LICENSE() -> str:\n",
|
||||||
" return \"GPL\"\n",
|
" return \"GPL\"\n",
|
||||||
"\n",
|
"\n",
|
||||||
|
"\n",
|
||||||
"b = BPF()"
|
"b = BPF()"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
@ -357,7 +359,6 @@
|
|||||||
}
|
}
|
||||||
],
|
],
|
||||||
"source": [
|
"source": [
|
||||||
"\n",
|
|
||||||
"b.load_and_attach()\n",
|
"b.load_and_attach()\n",
|
||||||
"hist = BpfMap(b, hist)\n",
|
"hist = BpfMap(b, hist)\n",
|
||||||
"print(\"Recording\")\n",
|
"print(\"Recording\")\n",
|
||||||
|
|||||||
29
examples/kprobes.py
Normal file
29
examples/kprobes.py
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
from pythonbpf import bpf, section, bpfglobal, BPF
|
||||||
|
from ctypes import c_void_p, c_int64
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@section("kretprobe/do_unlinkat")
|
||||||
|
def hello_world(ctx: c_void_p) -> c_int64:
|
||||||
|
print("Hello, World!")
|
||||||
|
return c_int64(0)
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@section("kprobe/do_unlinkat")
|
||||||
|
def hello_world2(ctx: c_void_p) -> c_int64:
|
||||||
|
print("Hello, World!")
|
||||||
|
return c_int64(0)
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@bpfglobal
|
||||||
|
def LICENSE() -> str:
|
||||||
|
return "GPL"
|
||||||
|
|
||||||
|
|
||||||
|
b = BPF()
|
||||||
|
b.load_and_attach()
|
||||||
|
while True:
|
||||||
|
print("running")
|
||||||
|
# Now cat /sys/kernel/debug/tracing/trace_pipe to see results of unlink kprobe.
|
||||||
@ -27,7 +27,7 @@ def hello(ctx: c_void_p) -> c_int32:
|
|||||||
dataobj.pid = pid()
|
dataobj.pid = pid()
|
||||||
dataobj.ts = ktime()
|
dataobj.ts = ktime()
|
||||||
# dataobj.comm = strobj
|
# dataobj.comm = strobj
|
||||||
print(f"clone called at {dataobj.ts} by pid" f"{dataobj.pid}, comm {strobj}")
|
print(f"clone called at {dataobj.ts} by pid{dataobj.pid}, comm {strobj}")
|
||||||
events.output(dataobj)
|
events.output(dataobj)
|
||||||
return c_int32(0)
|
return c_int32(0)
|
||||||
|
|
||||||
|
|||||||
203381
examples/vmlinux.py
203381
examples/vmlinux.py
File diff suppressed because it is too large
Load Diff
@ -1,8 +1,8 @@
|
|||||||
from pythonbpf import bpf, map, section, bpfglobal, compile
|
from pythonbpf import bpf, map, section, bpfglobal, compile, compile_to_ir
|
||||||
from pythonbpf.helper import XDP_PASS
|
from pythonbpf.helper import XDP_PASS
|
||||||
from pythonbpf.maps import HashMap
|
from pythonbpf.maps import HashMap
|
||||||
|
from ctypes import c_int64, c_void_p
|
||||||
|
|
||||||
from ctypes import c_void_p, c_int64
|
|
||||||
|
|
||||||
# Instructions to how to run this program
|
# Instructions to how to run this program
|
||||||
# 1. Install PythonBPF: pip install pythonbpf
|
# 1. Install PythonBPF: pip install pythonbpf
|
||||||
@ -41,4 +41,5 @@ def LICENSE() -> str:
|
|||||||
return "GPL"
|
return "GPL"
|
||||||
|
|
||||||
|
|
||||||
|
compile_to_ir("xdp_pass.py", "xdp_pass.ll")
|
||||||
compile()
|
compile()
|
||||||
|
|||||||
@ -9,6 +9,7 @@ logger: Logger = logging.getLogger(__name__)
|
|||||||
def recursive_dereferencer(var, builder):
|
def recursive_dereferencer(var, builder):
|
||||||
"""dereference until primitive type comes out"""
|
"""dereference until primitive type comes out"""
|
||||||
# TODO: Not worrying about stack overflow for now
|
# TODO: Not worrying about stack overflow for now
|
||||||
|
logger.info(f"Dereferencing {var}, type is {var.type}")
|
||||||
if isinstance(var.type, ir.PointerType):
|
if isinstance(var.type, ir.PointerType):
|
||||||
a = builder.load(var)
|
a = builder.load(var)
|
||||||
return recursive_dereferencer(a, builder)
|
return recursive_dereferencer(a, builder)
|
||||||
@ -18,7 +19,7 @@ def recursive_dereferencer(var, builder):
|
|||||||
raise TypeError(f"Unsupported type for dereferencing: {var.type}")
|
raise TypeError(f"Unsupported type for dereferencing: {var.type}")
|
||||||
|
|
||||||
|
|
||||||
def get_operand_value(operand, module, builder, local_sym_tab):
|
def get_operand_value(operand, builder, local_sym_tab):
|
||||||
"""Extract the value from an operand, handling variables and constants."""
|
"""Extract the value from an operand, handling variables and constants."""
|
||||||
if isinstance(operand, ast.Name):
|
if isinstance(operand, ast.Name):
|
||||||
if operand.id in local_sym_tab:
|
if operand.id in local_sym_tab:
|
||||||
@ -29,14 +30,14 @@ def get_operand_value(operand, module, builder, local_sym_tab):
|
|||||||
return ir.Constant(ir.IntType(64), operand.value)
|
return ir.Constant(ir.IntType(64), operand.value)
|
||||||
raise TypeError(f"Unsupported constant type: {type(operand.value)}")
|
raise TypeError(f"Unsupported constant type: {type(operand.value)}")
|
||||||
elif isinstance(operand, ast.BinOp):
|
elif isinstance(operand, ast.BinOp):
|
||||||
return handle_binary_op_impl(operand, module, builder, local_sym_tab)
|
return handle_binary_op_impl(operand, builder, local_sym_tab)
|
||||||
raise TypeError(f"Unsupported operand type: {type(operand)}")
|
raise TypeError(f"Unsupported operand type: {type(operand)}")
|
||||||
|
|
||||||
|
|
||||||
def handle_binary_op_impl(rval, module, builder, local_sym_tab):
|
def handle_binary_op_impl(rval, builder, local_sym_tab):
|
||||||
op = rval.op
|
op = rval.op
|
||||||
left = get_operand_value(rval.left, module, builder, local_sym_tab)
|
left = get_operand_value(rval.left, builder, local_sym_tab)
|
||||||
right = get_operand_value(rval.right, module, builder, local_sym_tab)
|
right = get_operand_value(rval.right, builder, local_sym_tab)
|
||||||
logger.info(f"left is {left}, right is {right}, op is {op}")
|
logger.info(f"left is {left}, right is {right}, op is {op}")
|
||||||
|
|
||||||
# Map AST operation nodes to LLVM IR builder methods
|
# Map AST operation nodes to LLVM IR builder methods
|
||||||
@ -61,6 +62,11 @@ def handle_binary_op_impl(rval, module, builder, local_sym_tab):
|
|||||||
raise SyntaxError("Unsupported binary operation")
|
raise SyntaxError("Unsupported binary operation")
|
||||||
|
|
||||||
|
|
||||||
def handle_binary_op(rval, module, builder, var_name, local_sym_tab):
|
def handle_binary_op(rval, builder, var_name, local_sym_tab):
|
||||||
result = handle_binary_op_impl(rval, module, builder, local_sym_tab)
|
result = handle_binary_op_impl(rval, builder, local_sym_tab)
|
||||||
builder.store(result, local_sym_tab[var_name].var)
|
if var_name and var_name in local_sym_tab:
|
||||||
|
logger.info(
|
||||||
|
f"Storing result {result} into variable {local_sym_tab[var_name].var}"
|
||||||
|
)
|
||||||
|
builder.store(result, local_sym_tab[var_name].var)
|
||||||
|
return result, result.type
|
||||||
|
|||||||
@ -1,9 +1,10 @@
|
|||||||
import ast
|
import ast
|
||||||
from llvmlite import ir
|
from llvmlite import ir
|
||||||
from .license_pass import license_processing
|
from .license_pass import license_processing
|
||||||
from .functions_pass import func_proc
|
from .functions import func_proc
|
||||||
from .maps import maps_proc
|
from .maps import maps_proc
|
||||||
from .structs import structs_proc
|
from .structs import structs_proc
|
||||||
|
from .vmlinux_parser import vmlinux_proc
|
||||||
from .globals_pass import (
|
from .globals_pass import (
|
||||||
globals_list_creation,
|
globals_list_creation,
|
||||||
globals_processing,
|
globals_processing,
|
||||||
@ -44,6 +45,7 @@ def processor(source_code, filename, module):
|
|||||||
for func_node in bpf_chunks:
|
for func_node in bpf_chunks:
|
||||||
logger.info(f"Found BPF function/struct: {func_node.name}")
|
logger.info(f"Found BPF function/struct: {func_node.name}")
|
||||||
|
|
||||||
|
vmlinux_proc(tree, module)
|
||||||
populate_global_symbol_table(tree, module)
|
populate_global_symbol_table(tree, module)
|
||||||
license_processing(tree, module)
|
license_processing(tree, module)
|
||||||
globals_processing(tree, module)
|
globals_processing(tree, module)
|
||||||
@ -55,7 +57,7 @@ def processor(source_code, filename, module):
|
|||||||
globals_list_creation(tree, module)
|
globals_list_creation(tree, module)
|
||||||
|
|
||||||
|
|
||||||
def compile_to_ir(filename: str, output: str, loglevel=logging.WARNING):
|
def compile_to_ir(filename: str, output: str, loglevel=logging.INFO):
|
||||||
logging.basicConfig(
|
logging.basicConfig(
|
||||||
level=loglevel, format="%(asctime)s [%(levelname)s] %(name)s: %(message)s"
|
level=loglevel, format="%(asctime)s [%(levelname)s] %(name)s: %(message)s"
|
||||||
)
|
)
|
||||||
@ -128,7 +130,7 @@ def compile_to_ir(filename: str, output: str, loglevel=logging.WARNING):
|
|||||||
return output
|
return output
|
||||||
|
|
||||||
|
|
||||||
def compile(loglevel=logging.WARNING) -> bool:
|
def compile(loglevel=logging.INFO) -> bool:
|
||||||
# Look one level up the stack to the caller of this function
|
# Look one level up the stack to the caller of this function
|
||||||
caller_frame = inspect.stack()[1]
|
caller_frame = inspect.stack()[1]
|
||||||
caller_file = Path(caller_frame.filename).resolve()
|
caller_file = Path(caller_frame.filename).resolve()
|
||||||
@ -161,7 +163,7 @@ def compile(loglevel=logging.WARNING) -> bool:
|
|||||||
return success
|
return success
|
||||||
|
|
||||||
|
|
||||||
def BPF(loglevel=logging.WARNING) -> BpfProgram:
|
def BPF(loglevel=logging.INFO) -> BpfProgram:
|
||||||
caller_frame = inspect.stack()[1]
|
caller_frame = inspect.stack()[1]
|
||||||
src = inspect.getsource(caller_frame.frame)
|
src = inspect.getsource(caller_frame.frame)
|
||||||
with tempfile.NamedTemporaryFile(
|
with tempfile.NamedTemporaryFile(
|
||||||
|
|||||||
4
pythonbpf/expr/__init__.py
Normal file
4
pythonbpf/expr/__init__.py
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
from .expr_pass import eval_expr, handle_expr
|
||||||
|
from .type_normalization import convert_to_bool
|
||||||
|
|
||||||
|
__all__ = ["eval_expr", "handle_expr", "convert_to_bool"]
|
||||||
445
pythonbpf/expr/expr_pass.py
Normal file
445
pythonbpf/expr/expr_pass.py
Normal file
@ -0,0 +1,445 @@
|
|||||||
|
import ast
|
||||||
|
from llvmlite import ir
|
||||||
|
from logging import Logger
|
||||||
|
import logging
|
||||||
|
from typing import Dict
|
||||||
|
|
||||||
|
from pythonbpf.type_deducer import ctypes_to_ir, is_ctypes
|
||||||
|
from .type_normalization import convert_to_bool, handle_comparator
|
||||||
|
|
||||||
|
logger: Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def _handle_name_expr(expr: ast.Name, local_sym_tab: Dict, builder: ir.IRBuilder):
|
||||||
|
"""Handle ast.Name expressions."""
|
||||||
|
if expr.id in local_sym_tab:
|
||||||
|
var = local_sym_tab[expr.id].var
|
||||||
|
val = builder.load(var)
|
||||||
|
return val, local_sym_tab[expr.id].ir_type
|
||||||
|
else:
|
||||||
|
logger.info(f"Undefined variable {expr.id}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _handle_constant_expr(expr: ast.Constant):
|
||||||
|
"""Handle ast.Constant expressions."""
|
||||||
|
if isinstance(expr.value, int) or isinstance(expr.value, bool):
|
||||||
|
return ir.Constant(ir.IntType(64), int(expr.value)), ir.IntType(64)
|
||||||
|
else:
|
||||||
|
logger.error("Unsupported constant type")
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _handle_attribute_expr(
|
||||||
|
expr: ast.Attribute,
|
||||||
|
local_sym_tab: Dict,
|
||||||
|
structs_sym_tab: Dict,
|
||||||
|
builder: ir.IRBuilder,
|
||||||
|
):
|
||||||
|
"""Handle ast.Attribute expressions for struct field access."""
|
||||||
|
if isinstance(expr.value, ast.Name):
|
||||||
|
var_name = expr.value.id
|
||||||
|
attr_name = expr.attr
|
||||||
|
if var_name in local_sym_tab:
|
||||||
|
var_ptr, var_type, var_metadata = local_sym_tab[var_name]
|
||||||
|
logger.info(f"Loading attribute {attr_name} from variable {var_name}")
|
||||||
|
logger.info(f"Variable type: {var_type}, Variable ptr: {var_ptr}")
|
||||||
|
metadata = structs_sym_tab[var_metadata]
|
||||||
|
if attr_name in metadata.fields:
|
||||||
|
gep = metadata.gep(builder, var_ptr, attr_name)
|
||||||
|
val = builder.load(gep)
|
||||||
|
field_type = metadata.field_type(attr_name)
|
||||||
|
return val, field_type
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _handle_deref_call(expr: ast.Call, local_sym_tab: Dict, builder: ir.IRBuilder):
|
||||||
|
"""Handle deref function calls."""
|
||||||
|
logger.info(f"Handling deref {ast.dump(expr)}")
|
||||||
|
if len(expr.args) != 1:
|
||||||
|
logger.info("deref takes exactly one argument")
|
||||||
|
return None
|
||||||
|
|
||||||
|
arg = expr.args[0]
|
||||||
|
if (
|
||||||
|
isinstance(arg, ast.Call)
|
||||||
|
and isinstance(arg.func, ast.Name)
|
||||||
|
and arg.func.id == "deref"
|
||||||
|
):
|
||||||
|
logger.info("Multiple deref not supported")
|
||||||
|
return None
|
||||||
|
|
||||||
|
if isinstance(arg, ast.Name):
|
||||||
|
if arg.id in local_sym_tab:
|
||||||
|
arg_ptr = local_sym_tab[arg.id].var
|
||||||
|
else:
|
||||||
|
logger.info(f"Undefined variable {arg.id}")
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
logger.info("Unsupported argument type for deref")
|
||||||
|
return None
|
||||||
|
|
||||||
|
if arg_ptr is None:
|
||||||
|
logger.info("Failed to evaluate deref argument")
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Load the value from pointer
|
||||||
|
val = builder.load(arg_ptr)
|
||||||
|
return val, local_sym_tab[arg.id].ir_type
|
||||||
|
|
||||||
|
|
||||||
|
def _handle_ctypes_call(
|
||||||
|
func,
|
||||||
|
module,
|
||||||
|
builder,
|
||||||
|
expr,
|
||||||
|
local_sym_tab,
|
||||||
|
map_sym_tab,
|
||||||
|
structs_sym_tab=None,
|
||||||
|
):
|
||||||
|
"""Handle ctypes type constructor calls."""
|
||||||
|
if len(expr.args) != 1:
|
||||||
|
logger.info("ctypes constructor takes exactly one argument")
|
||||||
|
return None
|
||||||
|
|
||||||
|
arg = expr.args[0]
|
||||||
|
val = eval_expr(
|
||||||
|
func,
|
||||||
|
module,
|
||||||
|
builder,
|
||||||
|
arg,
|
||||||
|
local_sym_tab,
|
||||||
|
map_sym_tab,
|
||||||
|
structs_sym_tab,
|
||||||
|
)
|
||||||
|
if val is None:
|
||||||
|
logger.info("Failed to evaluate argument to ctypes constructor")
|
||||||
|
return None
|
||||||
|
call_type = expr.func.id
|
||||||
|
expected_type = ctypes_to_ir(call_type)
|
||||||
|
|
||||||
|
if val[1] != expected_type:
|
||||||
|
# NOTE: We are only considering casting to and from int types for now
|
||||||
|
if isinstance(val[1], ir.IntType) and isinstance(expected_type, ir.IntType):
|
||||||
|
if val[1].width < expected_type.width:
|
||||||
|
val = (builder.sext(val[0], expected_type), expected_type)
|
||||||
|
else:
|
||||||
|
val = (builder.trunc(val[0], expected_type), expected_type)
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Type mismatch: expected {expected_type}, got {val[1]}")
|
||||||
|
return val
|
||||||
|
|
||||||
|
|
||||||
|
def _handle_compare(
|
||||||
|
func, module, builder, cond, local_sym_tab, map_sym_tab, structs_sym_tab=None
|
||||||
|
):
|
||||||
|
"""Handle ast.Compare expressions."""
|
||||||
|
|
||||||
|
if len(cond.ops) != 1 or len(cond.comparators) != 1:
|
||||||
|
logger.error("Only single comparisons are supported")
|
||||||
|
return None
|
||||||
|
lhs = eval_expr(
|
||||||
|
func,
|
||||||
|
module,
|
||||||
|
builder,
|
||||||
|
cond.left,
|
||||||
|
local_sym_tab,
|
||||||
|
map_sym_tab,
|
||||||
|
structs_sym_tab,
|
||||||
|
)
|
||||||
|
rhs = eval_expr(
|
||||||
|
func,
|
||||||
|
module,
|
||||||
|
builder,
|
||||||
|
cond.comparators[0],
|
||||||
|
local_sym_tab,
|
||||||
|
map_sym_tab,
|
||||||
|
structs_sym_tab,
|
||||||
|
)
|
||||||
|
|
||||||
|
if lhs is None or rhs is None:
|
||||||
|
logger.error("Failed to evaluate comparison operands")
|
||||||
|
return None
|
||||||
|
|
||||||
|
lhs, _ = lhs
|
||||||
|
rhs, _ = rhs
|
||||||
|
return handle_comparator(func, builder, cond.ops[0], lhs, rhs)
|
||||||
|
|
||||||
|
|
||||||
|
def _handle_unary_op(
|
||||||
|
func,
|
||||||
|
module,
|
||||||
|
builder,
|
||||||
|
expr: ast.UnaryOp,
|
||||||
|
local_sym_tab,
|
||||||
|
map_sym_tab,
|
||||||
|
structs_sym_tab=None,
|
||||||
|
):
|
||||||
|
"""Handle ast.UnaryOp expressions."""
|
||||||
|
if not isinstance(expr.op, ast.Not):
|
||||||
|
logger.error("Only 'not' unary operator is supported")
|
||||||
|
return None
|
||||||
|
|
||||||
|
operand = eval_expr(
|
||||||
|
func, module, builder, expr.operand, local_sym_tab, map_sym_tab, structs_sym_tab
|
||||||
|
)
|
||||||
|
if operand is None:
|
||||||
|
logger.error("Failed to evaluate operand for unary operation")
|
||||||
|
return None
|
||||||
|
|
||||||
|
operand_val, operand_type = operand
|
||||||
|
true_const = ir.Constant(ir.IntType(1), 1)
|
||||||
|
result = builder.xor(convert_to_bool(builder, operand_val), true_const)
|
||||||
|
return result, ir.IntType(1)
|
||||||
|
|
||||||
|
|
||||||
|
def _handle_and_op(func, builder, expr, local_sym_tab, map_sym_tab, structs_sym_tab):
|
||||||
|
"""Handle `and` boolean operations."""
|
||||||
|
|
||||||
|
logger.debug(f"Handling 'and' operator with {len(expr.values)} operands")
|
||||||
|
|
||||||
|
merge_block = func.append_basic_block(name="and.merge")
|
||||||
|
false_block = func.append_basic_block(name="and.false")
|
||||||
|
|
||||||
|
incoming_values = []
|
||||||
|
|
||||||
|
for i, value in enumerate(expr.values):
|
||||||
|
is_last = i == len(expr.values) - 1
|
||||||
|
|
||||||
|
# Evaluate current operand
|
||||||
|
operand_result = eval_expr(
|
||||||
|
func, None, builder, value, local_sym_tab, map_sym_tab, structs_sym_tab
|
||||||
|
)
|
||||||
|
if operand_result is None:
|
||||||
|
logger.error(f"Failed to evaluate operand {i} in 'and' expression")
|
||||||
|
return None
|
||||||
|
|
||||||
|
operand_val, operand_type = operand_result
|
||||||
|
|
||||||
|
# Convert to boolean if needed
|
||||||
|
operand_bool = convert_to_bool(builder, operand_val)
|
||||||
|
current_block = builder.block
|
||||||
|
|
||||||
|
if is_last:
|
||||||
|
# Last operand: result is this value
|
||||||
|
builder.branch(merge_block)
|
||||||
|
incoming_values.append((operand_bool, current_block))
|
||||||
|
else:
|
||||||
|
# Not last: check if true, continue or short-circuit
|
||||||
|
next_check = func.append_basic_block(name=f"and.check_{i + 1}")
|
||||||
|
builder.cbranch(operand_bool, next_check, false_block)
|
||||||
|
builder.position_at_end(next_check)
|
||||||
|
|
||||||
|
# False block: short-circuit with false
|
||||||
|
builder.position_at_end(false_block)
|
||||||
|
builder.branch(merge_block)
|
||||||
|
false_value = ir.Constant(ir.IntType(1), 0)
|
||||||
|
incoming_values.append((false_value, false_block))
|
||||||
|
|
||||||
|
# Merge block: phi node
|
||||||
|
builder.position_at_end(merge_block)
|
||||||
|
phi = builder.phi(ir.IntType(1), name="and.result")
|
||||||
|
for val, block in incoming_values:
|
||||||
|
phi.add_incoming(val, block)
|
||||||
|
|
||||||
|
logger.debug(f"Generated 'and' with {len(incoming_values)} incoming values")
|
||||||
|
return phi, ir.IntType(1)
|
||||||
|
|
||||||
|
|
||||||
|
def _handle_or_op(func, builder, expr, local_sym_tab, map_sym_tab, structs_sym_tab):
|
||||||
|
"""Handle `or` boolean operations."""
|
||||||
|
|
||||||
|
logger.debug(f"Handling 'or' operator with {len(expr.values)} operands")
|
||||||
|
|
||||||
|
merge_block = func.append_basic_block(name="or.merge")
|
||||||
|
true_block = func.append_basic_block(name="or.true")
|
||||||
|
|
||||||
|
incoming_values = []
|
||||||
|
|
||||||
|
for i, value in enumerate(expr.values):
|
||||||
|
is_last = i == len(expr.values) - 1
|
||||||
|
|
||||||
|
# Evaluate current operand
|
||||||
|
operand_result = eval_expr(
|
||||||
|
func, None, builder, value, local_sym_tab, map_sym_tab, structs_sym_tab
|
||||||
|
)
|
||||||
|
if operand_result is None:
|
||||||
|
logger.error(f"Failed to evaluate operand {i} in 'or' expression")
|
||||||
|
return None
|
||||||
|
|
||||||
|
operand_val, operand_type = operand_result
|
||||||
|
|
||||||
|
# Convert to boolean if needed
|
||||||
|
operand_bool = convert_to_bool(builder, operand_val)
|
||||||
|
current_block = builder.block
|
||||||
|
|
||||||
|
if is_last:
|
||||||
|
# Last operand: result is this value
|
||||||
|
builder.branch(merge_block)
|
||||||
|
incoming_values.append((operand_bool, current_block))
|
||||||
|
else:
|
||||||
|
# Not last: check if false, continue or short-circuit
|
||||||
|
next_check = func.append_basic_block(name=f"or.check_{i + 1}")
|
||||||
|
builder.cbranch(operand_bool, true_block, next_check)
|
||||||
|
builder.position_at_end(next_check)
|
||||||
|
|
||||||
|
# True block: short-circuit with true
|
||||||
|
builder.position_at_end(true_block)
|
||||||
|
builder.branch(merge_block)
|
||||||
|
true_value = ir.Constant(ir.IntType(1), 1)
|
||||||
|
incoming_values.append((true_value, true_block))
|
||||||
|
|
||||||
|
# Merge block: phi node
|
||||||
|
builder.position_at_end(merge_block)
|
||||||
|
phi = builder.phi(ir.IntType(1), name="or.result")
|
||||||
|
for val, block in incoming_values:
|
||||||
|
phi.add_incoming(val, block)
|
||||||
|
|
||||||
|
logger.debug(f"Generated 'or' with {len(incoming_values)} incoming values")
|
||||||
|
return phi, ir.IntType(1)
|
||||||
|
|
||||||
|
|
||||||
|
def _handle_boolean_op(
|
||||||
|
func,
|
||||||
|
module,
|
||||||
|
builder,
|
||||||
|
expr: ast.BoolOp,
|
||||||
|
local_sym_tab,
|
||||||
|
map_sym_tab,
|
||||||
|
structs_sym_tab=None,
|
||||||
|
):
|
||||||
|
"""Handle `and` and `or` boolean operations."""
|
||||||
|
|
||||||
|
if isinstance(expr.op, ast.And):
|
||||||
|
return _handle_and_op(
|
||||||
|
func, builder, expr, local_sym_tab, map_sym_tab, structs_sym_tab
|
||||||
|
)
|
||||||
|
elif isinstance(expr.op, ast.Or):
|
||||||
|
return _handle_or_op(
|
||||||
|
func, builder, expr, local_sym_tab, map_sym_tab, structs_sym_tab
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger.error(f"Unsupported boolean operator: {type(expr.op).__name__}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def eval_expr(
|
||||||
|
func,
|
||||||
|
module,
|
||||||
|
builder,
|
||||||
|
expr,
|
||||||
|
local_sym_tab,
|
||||||
|
map_sym_tab,
|
||||||
|
structs_sym_tab=None,
|
||||||
|
):
|
||||||
|
logger.info(f"Evaluating expression: {ast.dump(expr)}")
|
||||||
|
if isinstance(expr, ast.Name):
|
||||||
|
return _handle_name_expr(expr, local_sym_tab, builder)
|
||||||
|
elif isinstance(expr, ast.Constant):
|
||||||
|
return _handle_constant_expr(expr)
|
||||||
|
elif isinstance(expr, ast.Call):
|
||||||
|
if isinstance(expr.func, ast.Name) and expr.func.id == "deref":
|
||||||
|
return _handle_deref_call(expr, local_sym_tab, builder)
|
||||||
|
|
||||||
|
if isinstance(expr.func, ast.Name) and is_ctypes(expr.func.id):
|
||||||
|
return _handle_ctypes_call(
|
||||||
|
func,
|
||||||
|
module,
|
||||||
|
builder,
|
||||||
|
expr,
|
||||||
|
local_sym_tab,
|
||||||
|
map_sym_tab,
|
||||||
|
structs_sym_tab,
|
||||||
|
)
|
||||||
|
|
||||||
|
# delayed import to avoid circular dependency
|
||||||
|
from pythonbpf.helper import HelperHandlerRegistry, handle_helper_call
|
||||||
|
|
||||||
|
if isinstance(expr.func, ast.Name) and HelperHandlerRegistry.has_handler(
|
||||||
|
expr.func.id
|
||||||
|
):
|
||||||
|
return handle_helper_call(
|
||||||
|
expr,
|
||||||
|
module,
|
||||||
|
builder,
|
||||||
|
func,
|
||||||
|
local_sym_tab,
|
||||||
|
map_sym_tab,
|
||||||
|
structs_sym_tab,
|
||||||
|
)
|
||||||
|
elif isinstance(expr.func, ast.Attribute):
|
||||||
|
logger.info(f"Handling method call: {ast.dump(expr.func)}")
|
||||||
|
if isinstance(expr.func.value, ast.Call) and isinstance(
|
||||||
|
expr.func.value.func, ast.Name
|
||||||
|
):
|
||||||
|
method_name = expr.func.attr
|
||||||
|
if HelperHandlerRegistry.has_handler(method_name):
|
||||||
|
return handle_helper_call(
|
||||||
|
expr,
|
||||||
|
module,
|
||||||
|
builder,
|
||||||
|
func,
|
||||||
|
local_sym_tab,
|
||||||
|
map_sym_tab,
|
||||||
|
structs_sym_tab,
|
||||||
|
)
|
||||||
|
elif isinstance(expr.func.value, ast.Name):
|
||||||
|
obj_name = expr.func.value.id
|
||||||
|
method_name = expr.func.attr
|
||||||
|
if obj_name in map_sym_tab:
|
||||||
|
if HelperHandlerRegistry.has_handler(method_name):
|
||||||
|
return handle_helper_call(
|
||||||
|
expr,
|
||||||
|
module,
|
||||||
|
builder,
|
||||||
|
func,
|
||||||
|
local_sym_tab,
|
||||||
|
map_sym_tab,
|
||||||
|
structs_sym_tab,
|
||||||
|
)
|
||||||
|
elif isinstance(expr, ast.Attribute):
|
||||||
|
return _handle_attribute_expr(expr, local_sym_tab, structs_sym_tab, builder)
|
||||||
|
elif isinstance(expr, ast.BinOp):
|
||||||
|
from pythonbpf.binary_ops import handle_binary_op
|
||||||
|
|
||||||
|
return handle_binary_op(expr, builder, None, local_sym_tab)
|
||||||
|
elif isinstance(expr, ast.Compare):
|
||||||
|
return _handle_compare(
|
||||||
|
func, module, builder, expr, local_sym_tab, map_sym_tab, structs_sym_tab
|
||||||
|
)
|
||||||
|
elif isinstance(expr, ast.UnaryOp):
|
||||||
|
return _handle_unary_op(
|
||||||
|
func, module, builder, expr, local_sym_tab, map_sym_tab, structs_sym_tab
|
||||||
|
)
|
||||||
|
elif isinstance(expr, ast.BoolOp):
|
||||||
|
return _handle_boolean_op(
|
||||||
|
func, module, builder, expr, local_sym_tab, map_sym_tab, structs_sym_tab
|
||||||
|
)
|
||||||
|
logger.info("Unsupported expression evaluation")
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def handle_expr(
|
||||||
|
func,
|
||||||
|
module,
|
||||||
|
builder,
|
||||||
|
expr,
|
||||||
|
local_sym_tab,
|
||||||
|
map_sym_tab,
|
||||||
|
structs_sym_tab,
|
||||||
|
):
|
||||||
|
"""Handle expression statements in the function body."""
|
||||||
|
logger.info(f"Handling expression: {ast.dump(expr)}")
|
||||||
|
call = expr.value
|
||||||
|
if isinstance(call, ast.Call):
|
||||||
|
eval_expr(
|
||||||
|
func,
|
||||||
|
module,
|
||||||
|
builder,
|
||||||
|
call,
|
||||||
|
local_sym_tab,
|
||||||
|
map_sym_tab,
|
||||||
|
structs_sym_tab,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger.info("Unsupported expression type")
|
||||||
128
pythonbpf/expr/type_normalization.py
Normal file
128
pythonbpf/expr/type_normalization.py
Normal file
@ -0,0 +1,128 @@
|
|||||||
|
from llvmlite import ir
|
||||||
|
import logging
|
||||||
|
import ast
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
COMPARISON_OPS = {
|
||||||
|
ast.Eq: "==",
|
||||||
|
ast.NotEq: "!=",
|
||||||
|
ast.Lt: "<",
|
||||||
|
ast.LtE: "<=",
|
||||||
|
ast.Gt: ">",
|
||||||
|
ast.GtE: ">=",
|
||||||
|
ast.Is: "==",
|
||||||
|
ast.IsNot: "!=",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _get_base_type_and_depth(ir_type):
|
||||||
|
"""Get the base type for pointer types."""
|
||||||
|
cur_type = ir_type
|
||||||
|
depth = 0
|
||||||
|
while isinstance(cur_type, ir.PointerType):
|
||||||
|
depth += 1
|
||||||
|
cur_type = cur_type.pointee
|
||||||
|
return cur_type, depth
|
||||||
|
|
||||||
|
|
||||||
|
def _deref_to_depth(func, builder, val, target_depth):
|
||||||
|
"""Dereference a pointer to a certain depth."""
|
||||||
|
|
||||||
|
cur_val = val
|
||||||
|
cur_type = val.type
|
||||||
|
|
||||||
|
for depth in range(target_depth):
|
||||||
|
if not isinstance(val.type, ir.PointerType):
|
||||||
|
logger.error("Cannot dereference further, non-pointer type")
|
||||||
|
return None
|
||||||
|
|
||||||
|
# dereference with null check
|
||||||
|
pointee_type = cur_type.pointee
|
||||||
|
null_check_block = builder.block
|
||||||
|
not_null_block = func.append_basic_block(name=f"deref_not_null_{depth}")
|
||||||
|
merge_block = func.append_basic_block(name=f"deref_merge_{depth}")
|
||||||
|
|
||||||
|
null_ptr = ir.Constant(cur_type, None)
|
||||||
|
is_not_null = builder.icmp_signed("!=", cur_val, null_ptr)
|
||||||
|
logger.debug(f"Inserted null check for pointer at depth {depth}")
|
||||||
|
|
||||||
|
builder.cbranch(is_not_null, not_null_block, merge_block)
|
||||||
|
|
||||||
|
builder.position_at_end(not_null_block)
|
||||||
|
dereferenced_val = builder.load(cur_val)
|
||||||
|
logger.debug(f"Dereferenced to depth {depth - 1}, type: {pointee_type}")
|
||||||
|
builder.branch(merge_block)
|
||||||
|
|
||||||
|
builder.position_at_end(merge_block)
|
||||||
|
phi = builder.phi(pointee_type, name=f"deref_result_{depth}")
|
||||||
|
|
||||||
|
zero_value = (
|
||||||
|
ir.Constant(pointee_type, 0)
|
||||||
|
if isinstance(pointee_type, ir.IntType)
|
||||||
|
else ir.Constant(pointee_type, None)
|
||||||
|
)
|
||||||
|
phi.add_incoming(zero_value, null_check_block)
|
||||||
|
|
||||||
|
phi.add_incoming(dereferenced_val, not_null_block)
|
||||||
|
|
||||||
|
# Continue with phi result
|
||||||
|
cur_val = phi
|
||||||
|
cur_type = pointee_type
|
||||||
|
return cur_val
|
||||||
|
|
||||||
|
|
||||||
|
def _normalize_types(func, builder, lhs, rhs):
|
||||||
|
"""Normalize types for comparison."""
|
||||||
|
|
||||||
|
logger.info(f"Normalizing types: {lhs.type} vs {rhs.type}")
|
||||||
|
if isinstance(lhs.type, ir.IntType) and isinstance(rhs.type, ir.IntType):
|
||||||
|
if lhs.type.width < rhs.type.width:
|
||||||
|
lhs = builder.sext(lhs, rhs.type)
|
||||||
|
else:
|
||||||
|
rhs = builder.sext(rhs, lhs.type)
|
||||||
|
return lhs, rhs
|
||||||
|
elif not isinstance(lhs.type, ir.PointerType) and not isinstance(
|
||||||
|
rhs.type, ir.PointerType
|
||||||
|
):
|
||||||
|
logger.error(f"Type mismatch: {lhs.type} vs {rhs.type}")
|
||||||
|
return None, None
|
||||||
|
else:
|
||||||
|
lhs_base, lhs_depth = _get_base_type_and_depth(lhs.type)
|
||||||
|
rhs_base, rhs_depth = _get_base_type_and_depth(rhs.type)
|
||||||
|
if lhs_base == rhs_base:
|
||||||
|
if lhs_depth < rhs_depth:
|
||||||
|
rhs = _deref_to_depth(func, builder, rhs, rhs_depth - lhs_depth)
|
||||||
|
elif rhs_depth < lhs_depth:
|
||||||
|
lhs = _deref_to_depth(func, builder, lhs, lhs_depth - rhs_depth)
|
||||||
|
return _normalize_types(func, builder, lhs, rhs)
|
||||||
|
|
||||||
|
|
||||||
|
def convert_to_bool(builder, val):
|
||||||
|
"""Convert a value to boolean."""
|
||||||
|
if val.type == ir.IntType(1):
|
||||||
|
return val
|
||||||
|
if isinstance(val.type, ir.PointerType):
|
||||||
|
zero = ir.Constant(val.type, None)
|
||||||
|
else:
|
||||||
|
zero = ir.Constant(val.type, 0)
|
||||||
|
return builder.icmp_signed("!=", val, zero)
|
||||||
|
|
||||||
|
|
||||||
|
def handle_comparator(func, builder, op, lhs, rhs):
|
||||||
|
"""Handle comparison operations."""
|
||||||
|
|
||||||
|
if lhs.type != rhs.type:
|
||||||
|
lhs, rhs = _normalize_types(func, builder, lhs, rhs)
|
||||||
|
|
||||||
|
if lhs is None or rhs is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if type(op) not in COMPARISON_OPS:
|
||||||
|
logger.error(f"Unsupported comparison operator: {type(op)}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
predicate = COMPARISON_OPS[type(op)]
|
||||||
|
result = builder.icmp_signed(predicate, lhs, rhs)
|
||||||
|
logger.debug(f"Comparison result: {result}")
|
||||||
|
return result, ir.IntType(1)
|
||||||
@ -1,183 +0,0 @@
|
|||||||
import ast
|
|
||||||
from llvmlite import ir
|
|
||||||
from logging import Logger
|
|
||||||
import logging
|
|
||||||
from typing import Dict
|
|
||||||
|
|
||||||
logger: Logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def _handle_name_expr(expr: ast.Name, local_sym_tab: Dict, builder: ir.IRBuilder):
|
|
||||||
"""Handle ast.Name expressions."""
|
|
||||||
if expr.id in local_sym_tab:
|
|
||||||
var = local_sym_tab[expr.id].var
|
|
||||||
val = builder.load(var)
|
|
||||||
return val, local_sym_tab[expr.id].ir_type
|
|
||||||
else:
|
|
||||||
logger.info(f"Undefined variable {expr.id}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def _handle_constant_expr(expr: ast.Constant):
|
|
||||||
"""Handle ast.Constant expressions."""
|
|
||||||
if isinstance(expr.value, int):
|
|
||||||
return ir.Constant(ir.IntType(64), expr.value), ir.IntType(64)
|
|
||||||
elif isinstance(expr.value, bool):
|
|
||||||
return ir.Constant(ir.IntType(1), int(expr.value)), ir.IntType(1)
|
|
||||||
else:
|
|
||||||
logger.info("Unsupported constant type")
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def _handle_attribute_expr(
|
|
||||||
expr: ast.Attribute,
|
|
||||||
local_sym_tab: Dict,
|
|
||||||
structs_sym_tab: Dict,
|
|
||||||
builder: ir.IRBuilder,
|
|
||||||
):
|
|
||||||
"""Handle ast.Attribute expressions for struct field access."""
|
|
||||||
if isinstance(expr.value, ast.Name):
|
|
||||||
var_name = expr.value.id
|
|
||||||
attr_name = expr.attr
|
|
||||||
if var_name in local_sym_tab:
|
|
||||||
var_ptr, var_type, var_metadata = local_sym_tab[var_name]
|
|
||||||
logger.info(f"Loading attribute {attr_name} from variable {var_name}")
|
|
||||||
logger.info(f"Variable type: {var_type}, Variable ptr: {var_ptr}")
|
|
||||||
|
|
||||||
metadata = structs_sym_tab[var_metadata]
|
|
||||||
if attr_name in metadata.fields:
|
|
||||||
gep = metadata.gep(builder, var_ptr, attr_name)
|
|
||||||
val = builder.load(gep)
|
|
||||||
field_type = metadata.field_type(attr_name)
|
|
||||||
return val, field_type
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def _handle_deref_call(expr: ast.Call, local_sym_tab: Dict, builder: ir.IRBuilder):
|
|
||||||
"""Handle deref function calls."""
|
|
||||||
logger.info(f"Handling deref {ast.dump(expr)}")
|
|
||||||
if len(expr.args) != 1:
|
|
||||||
logger.info("deref takes exactly one argument")
|
|
||||||
return None
|
|
||||||
|
|
||||||
arg = expr.args[0]
|
|
||||||
if (
|
|
||||||
isinstance(arg, ast.Call)
|
|
||||||
and isinstance(arg.func, ast.Name)
|
|
||||||
and arg.func.id == "deref"
|
|
||||||
):
|
|
||||||
logger.info("Multiple deref not supported")
|
|
||||||
return None
|
|
||||||
|
|
||||||
if isinstance(arg, ast.Name):
|
|
||||||
if arg.id in local_sym_tab:
|
|
||||||
arg_ptr = local_sym_tab[arg.id].var
|
|
||||||
else:
|
|
||||||
logger.info(f"Undefined variable {arg.id}")
|
|
||||||
return None
|
|
||||||
else:
|
|
||||||
logger.info("Unsupported argument type for deref")
|
|
||||||
return None
|
|
||||||
|
|
||||||
if arg_ptr is None:
|
|
||||||
logger.info("Failed to evaluate deref argument")
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Load the value from pointer
|
|
||||||
val = builder.load(arg_ptr)
|
|
||||||
return val, local_sym_tab[arg.id].ir_type
|
|
||||||
|
|
||||||
|
|
||||||
def eval_expr(
|
|
||||||
func,
|
|
||||||
module,
|
|
||||||
builder,
|
|
||||||
expr,
|
|
||||||
local_sym_tab,
|
|
||||||
map_sym_tab,
|
|
||||||
structs_sym_tab=None,
|
|
||||||
):
|
|
||||||
logger.info(f"Evaluating expression: {ast.dump(expr)}")
|
|
||||||
if isinstance(expr, ast.Name):
|
|
||||||
return _handle_name_expr(expr, local_sym_tab, builder)
|
|
||||||
elif isinstance(expr, ast.Constant):
|
|
||||||
return _handle_constant_expr(expr)
|
|
||||||
elif isinstance(expr, ast.Call):
|
|
||||||
if isinstance(expr.func, ast.Name) and expr.func.id == "deref":
|
|
||||||
return _handle_deref_call(expr, local_sym_tab, builder)
|
|
||||||
|
|
||||||
# delayed import to avoid circular dependency
|
|
||||||
from pythonbpf.helper import HelperHandlerRegistry, handle_helper_call
|
|
||||||
|
|
||||||
if isinstance(expr.func, ast.Name) and HelperHandlerRegistry.has_handler(
|
|
||||||
expr.func.id
|
|
||||||
):
|
|
||||||
return handle_helper_call(
|
|
||||||
expr,
|
|
||||||
module,
|
|
||||||
builder,
|
|
||||||
func,
|
|
||||||
local_sym_tab,
|
|
||||||
map_sym_tab,
|
|
||||||
structs_sym_tab,
|
|
||||||
)
|
|
||||||
elif isinstance(expr.func, ast.Attribute):
|
|
||||||
logger.info(f"Handling method call: {ast.dump(expr.func)}")
|
|
||||||
if isinstance(expr.func.value, ast.Call) and isinstance(
|
|
||||||
expr.func.value.func, ast.Name
|
|
||||||
):
|
|
||||||
method_name = expr.func.attr
|
|
||||||
if HelperHandlerRegistry.has_handler(method_name):
|
|
||||||
return handle_helper_call(
|
|
||||||
expr,
|
|
||||||
module,
|
|
||||||
builder,
|
|
||||||
func,
|
|
||||||
local_sym_tab,
|
|
||||||
map_sym_tab,
|
|
||||||
structs_sym_tab,
|
|
||||||
)
|
|
||||||
elif isinstance(expr.func.value, ast.Name):
|
|
||||||
obj_name = expr.func.value.id
|
|
||||||
method_name = expr.func.attr
|
|
||||||
if obj_name in map_sym_tab:
|
|
||||||
if HelperHandlerRegistry.has_handler(method_name):
|
|
||||||
return handle_helper_call(
|
|
||||||
expr,
|
|
||||||
module,
|
|
||||||
builder,
|
|
||||||
func,
|
|
||||||
local_sym_tab,
|
|
||||||
map_sym_tab,
|
|
||||||
structs_sym_tab,
|
|
||||||
)
|
|
||||||
elif isinstance(expr, ast.Attribute):
|
|
||||||
return _handle_attribute_expr(expr, local_sym_tab, structs_sym_tab, builder)
|
|
||||||
logger.info("Unsupported expression evaluation")
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def handle_expr(
|
|
||||||
func,
|
|
||||||
module,
|
|
||||||
builder,
|
|
||||||
expr,
|
|
||||||
local_sym_tab,
|
|
||||||
map_sym_tab,
|
|
||||||
structs_sym_tab,
|
|
||||||
):
|
|
||||||
"""Handle expression statements in the function body."""
|
|
||||||
logger.info(f"Handling expression: {ast.dump(expr)}")
|
|
||||||
call = expr.value
|
|
||||||
if isinstance(call, ast.Call):
|
|
||||||
eval_expr(
|
|
||||||
func,
|
|
||||||
module,
|
|
||||||
builder,
|
|
||||||
call,
|
|
||||||
local_sym_tab,
|
|
||||||
map_sym_tab,
|
|
||||||
structs_sym_tab,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
logger.info("Unsupported expression type")
|
|
||||||
3
pythonbpf/functions/__init__.py
Normal file
3
pythonbpf/functions/__init__.py
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
from .functions_pass import func_proc
|
||||||
|
|
||||||
|
__all__ = ["func_proc"]
|
||||||
22
pythonbpf/functions/func_registry_handlers.py
Normal file
22
pythonbpf/functions/func_registry_handlers.py
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
from typing import Dict
|
||||||
|
|
||||||
|
|
||||||
|
class StatementHandlerRegistry:
|
||||||
|
"""Registry for statement handlers."""
|
||||||
|
|
||||||
|
_handlers: Dict = {}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def register(cls, stmt_type):
|
||||||
|
"""Register a handler for a specific statement type."""
|
||||||
|
|
||||||
|
def decorator(handler):
|
||||||
|
cls._handlers[stmt_type] = handler
|
||||||
|
return handler
|
||||||
|
|
||||||
|
return decorator
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def __getitem__(cls, stmt_type):
|
||||||
|
"""Get the handler for a specific statement type."""
|
||||||
|
return cls._handlers.get(stmt_type, None)
|
||||||
@ -4,10 +4,13 @@ import logging
|
|||||||
from typing import Any
|
from typing import Any
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
|
|
||||||
from .helper import HelperHandlerRegistry, handle_helper_call
|
from pythonbpf.helper import HelperHandlerRegistry, handle_helper_call
|
||||||
from .type_deducer import ctypes_to_ir
|
from pythonbpf.type_deducer import ctypes_to_ir
|
||||||
from .binary_ops import handle_binary_op
|
from pythonbpf.binary_ops import handle_binary_op
|
||||||
from .expr_pass import eval_expr, handle_expr
|
from pythonbpf.expr import eval_expr, handle_expr, convert_to_bool
|
||||||
|
|
||||||
|
from .return_utils import _handle_none_return, _handle_xdp_return, _is_xdp_name
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -146,8 +149,7 @@ def handle_assign(
|
|||||||
local_sym_tab[var_name].var,
|
local_sym_tab[var_name].var,
|
||||||
)
|
)
|
||||||
logger.info(
|
logger.info(
|
||||||
f"Assigned {call_type} constant "
|
f"Assigned {call_type} constant {rval.args[0].value} to {var_name}"
|
||||||
f"{rval.args[0].value} to {var_name}"
|
|
||||||
)
|
)
|
||||||
elif HelperHandlerRegistry.has_handler(call_type):
|
elif HelperHandlerRegistry.has_handler(call_type):
|
||||||
# var = builder.alloca(ir.IntType(64), name=var_name)
|
# var = builder.alloca(ir.IntType(64), name=var_name)
|
||||||
@ -233,76 +235,18 @@ def handle_assign(
|
|||||||
else:
|
else:
|
||||||
logger.info("Unsupported assignment call function type")
|
logger.info("Unsupported assignment call function type")
|
||||||
elif isinstance(rval, ast.BinOp):
|
elif isinstance(rval, ast.BinOp):
|
||||||
handle_binary_op(rval, module, builder, var_name, local_sym_tab)
|
handle_binary_op(rval, builder, var_name, local_sym_tab)
|
||||||
else:
|
else:
|
||||||
logger.info("Unsupported assignment value type")
|
logger.info("Unsupported assignment value type")
|
||||||
|
|
||||||
|
|
||||||
def handle_cond(func, module, builder, cond, local_sym_tab, map_sym_tab):
|
def handle_cond(
|
||||||
if isinstance(cond, ast.Constant):
|
func, module, builder, cond, local_sym_tab, map_sym_tab, structs_sym_tab=None
|
||||||
if isinstance(cond.value, bool):
|
):
|
||||||
return ir.Constant(ir.IntType(1), int(cond.value))
|
val = eval_expr(
|
||||||
elif isinstance(cond.value, int):
|
func, module, builder, cond, local_sym_tab, map_sym_tab, structs_sym_tab
|
||||||
return ir.Constant(ir.IntType(1), int(bool(cond.value)))
|
)[0]
|
||||||
else:
|
return convert_to_bool(builder, val)
|
||||||
logger.info("Unsupported constant type in condition")
|
|
||||||
return None
|
|
||||||
elif isinstance(cond, ast.Name):
|
|
||||||
if cond.id in local_sym_tab:
|
|
||||||
var = local_sym_tab[cond.id].var
|
|
||||||
val = builder.load(var)
|
|
||||||
if val.type != ir.IntType(1):
|
|
||||||
# Convert nonzero values to true, zero to false
|
|
||||||
if isinstance(val.type, ir.PointerType):
|
|
||||||
# For pointer types, compare with null pointer
|
|
||||||
zero = ir.Constant(val.type, None)
|
|
||||||
else:
|
|
||||||
# For integer types, compare with zero
|
|
||||||
zero = ir.Constant(val.type, 0)
|
|
||||||
val = builder.icmp_signed("!=", val, zero)
|
|
||||||
return val
|
|
||||||
else:
|
|
||||||
logger.info(f"Undefined variable {cond.id} in condition")
|
|
||||||
return None
|
|
||||||
elif isinstance(cond, ast.Compare):
|
|
||||||
lhs = eval_expr(func, module, builder, cond.left, local_sym_tab, map_sym_tab)[0]
|
|
||||||
if len(cond.ops) != 1 or len(cond.comparators) != 1:
|
|
||||||
logger.info("Unsupported complex comparison")
|
|
||||||
return None
|
|
||||||
rhs = eval_expr(
|
|
||||||
func, module, builder, cond.comparators[0], local_sym_tab, map_sym_tab
|
|
||||||
)[0]
|
|
||||||
op = cond.ops[0]
|
|
||||||
|
|
||||||
if lhs.type != rhs.type:
|
|
||||||
if isinstance(lhs.type, ir.IntType) and isinstance(rhs.type, ir.IntType):
|
|
||||||
# Extend the smaller type to the larger type
|
|
||||||
if lhs.type.width < rhs.type.width:
|
|
||||||
lhs = builder.sext(lhs, rhs.type)
|
|
||||||
elif lhs.type.width > rhs.type.width:
|
|
||||||
rhs = builder.sext(rhs, lhs.type)
|
|
||||||
else:
|
|
||||||
logger.info("Type mismatch in comparison")
|
|
||||||
return None
|
|
||||||
|
|
||||||
if isinstance(op, ast.Eq):
|
|
||||||
return builder.icmp_signed("==", lhs, rhs)
|
|
||||||
elif isinstance(op, ast.NotEq):
|
|
||||||
return builder.icmp_signed("!=", lhs, rhs)
|
|
||||||
elif isinstance(op, ast.Lt):
|
|
||||||
return builder.icmp_signed("<", lhs, rhs)
|
|
||||||
elif isinstance(op, ast.LtE):
|
|
||||||
return builder.icmp_signed("<=", lhs, rhs)
|
|
||||||
elif isinstance(op, ast.Gt):
|
|
||||||
return builder.icmp_signed(">", lhs, rhs)
|
|
||||||
elif isinstance(op, ast.GtE):
|
|
||||||
return builder.icmp_signed(">=", lhs, rhs)
|
|
||||||
else:
|
|
||||||
logger.info("Unsupported comparison operator")
|
|
||||||
return None
|
|
||||||
else:
|
|
||||||
logger.info("Unsupported condition expression")
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def handle_if(
|
def handle_if(
|
||||||
@ -318,7 +262,9 @@ def handle_if(
|
|||||||
else:
|
else:
|
||||||
else_block = None
|
else_block = None
|
||||||
|
|
||||||
cond = handle_cond(func, module, builder, stmt.test, local_sym_tab, map_sym_tab)
|
cond = handle_cond(
|
||||||
|
func, module, builder, stmt.test, local_sym_tab, map_sym_tab, structs_sym_tab
|
||||||
|
)
|
||||||
if else_block:
|
if else_block:
|
||||||
builder.cbranch(cond, then_block, else_block)
|
builder.cbranch(cond, then_block, else_block)
|
||||||
else:
|
else:
|
||||||
@ -351,6 +297,27 @@ def handle_if(
|
|||||||
builder.position_at_end(merge_block)
|
builder.position_at_end(merge_block)
|
||||||
|
|
||||||
|
|
||||||
|
def handle_return(builder, stmt, local_sym_tab, ret_type):
|
||||||
|
logger.info(f"Handling return statement: {ast.dump(stmt)}")
|
||||||
|
if stmt.value is None:
|
||||||
|
return _handle_none_return(builder)
|
||||||
|
elif isinstance(stmt.value, ast.Name) and _is_xdp_name(stmt.value.id):
|
||||||
|
return _handle_xdp_return(stmt, builder, ret_type)
|
||||||
|
else:
|
||||||
|
val = eval_expr(
|
||||||
|
func=None,
|
||||||
|
module=None,
|
||||||
|
builder=builder,
|
||||||
|
expr=stmt.value,
|
||||||
|
local_sym_tab=local_sym_tab,
|
||||||
|
map_sym_tab={},
|
||||||
|
structs_sym_tab={},
|
||||||
|
)
|
||||||
|
logger.info(f"Evaluated return expression to {val}")
|
||||||
|
builder.ret(val[0])
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
def process_stmt(
|
def process_stmt(
|
||||||
func,
|
func,
|
||||||
module,
|
module,
|
||||||
@ -384,36 +351,12 @@ def process_stmt(
|
|||||||
func, module, builder, stmt, map_sym_tab, local_sym_tab, structs_sym_tab
|
func, module, builder, stmt, map_sym_tab, local_sym_tab, structs_sym_tab
|
||||||
)
|
)
|
||||||
elif isinstance(stmt, ast.Return):
|
elif isinstance(stmt, ast.Return):
|
||||||
if stmt.value is None:
|
did_return = handle_return(
|
||||||
builder.ret(ir.Constant(ir.IntType(32), 0))
|
builder,
|
||||||
did_return = True
|
stmt,
|
||||||
elif (
|
local_sym_tab,
|
||||||
isinstance(stmt.value, ast.Call)
|
ret_type,
|
||||||
and isinstance(stmt.value.func, ast.Name)
|
)
|
||||||
and len(stmt.value.args) == 1
|
|
||||||
and isinstance(stmt.value.args[0], ast.Constant)
|
|
||||||
and isinstance(stmt.value.args[0].value, int)
|
|
||||||
):
|
|
||||||
call_type = stmt.value.func.id
|
|
||||||
if ctypes_to_ir(call_type) != ret_type:
|
|
||||||
raise ValueError(
|
|
||||||
"Return type mismatch: expected"
|
|
||||||
f"{ctypes_to_ir(call_type)}, got {call_type}"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
builder.ret(ir.Constant(ret_type, stmt.value.args[0].value))
|
|
||||||
did_return = True
|
|
||||||
elif isinstance(stmt.value, ast.Name):
|
|
||||||
if stmt.value.id == "XDP_PASS":
|
|
||||||
builder.ret(ir.Constant(ret_type, 2))
|
|
||||||
did_return = True
|
|
||||||
elif stmt.value.id == "XDP_DROP":
|
|
||||||
builder.ret(ir.Constant(ret_type, 1))
|
|
||||||
did_return = True
|
|
||||||
else:
|
|
||||||
raise ValueError("Failed to evaluate return expression")
|
|
||||||
else:
|
|
||||||
raise ValueError("Unsupported return value")
|
|
||||||
return did_return
|
return did_return
|
||||||
|
|
||||||
|
|
||||||
@ -455,6 +398,9 @@ def allocate_mem(
|
|||||||
continue
|
continue
|
||||||
var_name = target.id
|
var_name = target.id
|
||||||
rval = stmt.value
|
rval = stmt.value
|
||||||
|
if var_name in local_sym_tab:
|
||||||
|
logger.info(f"Variable {var_name} already allocated")
|
||||||
|
continue
|
||||||
if isinstance(rval, ast.Call):
|
if isinstance(rval, ast.Call):
|
||||||
if isinstance(rval.func, ast.Name):
|
if isinstance(rval.func, ast.Name):
|
||||||
call_type = rval.func.id
|
call_type = rval.func.id
|
||||||
@ -483,8 +429,7 @@ def allocate_mem(
|
|||||||
var = builder.alloca(ir_type, name=var_name)
|
var = builder.alloca(ir_type, name=var_name)
|
||||||
has_metadata = True
|
has_metadata = True
|
||||||
logger.info(
|
logger.info(
|
||||||
f"Pre-allocated variable {var_name} "
|
f"Pre-allocated variable {var_name} for struct {call_type}"
|
||||||
f"for struct {call_type}"
|
|
||||||
)
|
)
|
||||||
elif isinstance(rval.func, ast.Attribute):
|
elif isinstance(rval.func, ast.Attribute):
|
||||||
ir_type = ir.PointerType(ir.IntType(64))
|
ir_type = ir.PointerType(ir.IntType(64))
|
||||||
@ -568,7 +513,7 @@ def process_func_body(
|
|||||||
)
|
)
|
||||||
|
|
||||||
if not did_return:
|
if not did_return:
|
||||||
builder.ret(ir.Constant(ir.IntType(32), 0))
|
builder.ret(ir.Constant(ir.IntType(64), 0))
|
||||||
|
|
||||||
|
|
||||||
def process_bpf_chunk(func_node, module, return_type, map_sym_tab, structs_sym_tab):
|
def process_bpf_chunk(func_node, module, return_type, map_sym_tab, structs_sym_tab):
|
||||||
45
pythonbpf/functions/return_utils.py
Normal file
45
pythonbpf/functions/return_utils.py
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
import logging
|
||||||
|
import ast
|
||||||
|
|
||||||
|
from llvmlite import ir
|
||||||
|
|
||||||
|
logger: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
XDP_ACTIONS = {
|
||||||
|
"XDP_ABORTED": 0,
|
||||||
|
"XDP_DROP": 1,
|
||||||
|
"XDP_PASS": 2,
|
||||||
|
"XDP_TX": 3,
|
||||||
|
"XDP_REDIRECT": 4,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _handle_none_return(builder) -> bool:
|
||||||
|
"""Handle return or return None -> returns 0."""
|
||||||
|
builder.ret(ir.Constant(ir.IntType(64), 0))
|
||||||
|
logger.debug("Generated default return: 0")
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def _is_xdp_name(name: str) -> bool:
|
||||||
|
"""Check if a name is an XDP action"""
|
||||||
|
return name in XDP_ACTIONS
|
||||||
|
|
||||||
|
|
||||||
|
def _handle_xdp_return(stmt: ast.Return, builder, ret_type) -> bool:
|
||||||
|
"""Handle XDP returns"""
|
||||||
|
if not isinstance(stmt.value, ast.Name):
|
||||||
|
return False
|
||||||
|
|
||||||
|
action_name = stmt.value.id
|
||||||
|
|
||||||
|
if action_name not in XDP_ACTIONS:
|
||||||
|
raise ValueError(
|
||||||
|
f"Unknown XDP action: {action_name}. Available: {XDP_ACTIONS.keys()}"
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
value = XDP_ACTIONS[action_name]
|
||||||
|
builder.ret(ir.Constant(ret_type, value))
|
||||||
|
logger.debug(f"Generated XDP action return: {action_name} = {value}")
|
||||||
|
return True
|
||||||
@ -62,7 +62,7 @@ def bpf_map_lookup_elem_emitter(
|
|||||||
"""
|
"""
|
||||||
if not call.args or len(call.args) != 1:
|
if not call.args or len(call.args) != 1:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Map lookup expects exactly one argument (key), got " f"{len(call.args)}"
|
f"Map lookup expects exactly one argument (key), got {len(call.args)}"
|
||||||
)
|
)
|
||||||
key_ptr = get_or_create_ptr_from_arg(call.args[0], builder, local_sym_tab)
|
key_ptr = get_or_create_ptr_from_arg(call.args[0], builder, local_sym_tab)
|
||||||
map_void_ptr = builder.bitcast(map_ptr, ir.PointerType())
|
map_void_ptr = builder.bitcast(map_ptr, ir.PointerType())
|
||||||
@ -145,8 +145,7 @@ def bpf_map_update_elem_emitter(
|
|||||||
"""
|
"""
|
||||||
if not call.args or len(call.args) < 2 or len(call.args) > 3:
|
if not call.args or len(call.args) < 2 or len(call.args) > 3:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Map update expects 2 or 3 args (key, value, flags), "
|
f"Map update expects 2 or 3 args (key, value, flags), got {len(call.args)}"
|
||||||
f"got {len(call.args)}"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
key_arg = call.args[0]
|
key_arg = call.args[0]
|
||||||
@ -196,7 +195,7 @@ def bpf_map_delete_elem_emitter(
|
|||||||
"""
|
"""
|
||||||
if not call.args or len(call.args) != 1:
|
if not call.args or len(call.args) != 1:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Map delete expects exactly one argument (key), got " f"{len(call.args)}"
|
f"Map delete expects exactly one argument (key), got {len(call.args)}"
|
||||||
)
|
)
|
||||||
key_ptr = get_or_create_ptr_from_arg(call.args[0], builder, local_sym_tab)
|
key_ptr = get_or_create_ptr_from_arg(call.args[0], builder, local_sym_tab)
|
||||||
map_void_ptr = builder.bitcast(map_ptr, ir.PointerType())
|
map_void_ptr = builder.bitcast(map_ptr, ir.PointerType())
|
||||||
@ -255,7 +254,7 @@ def bpf_perf_event_output_handler(
|
|||||||
):
|
):
|
||||||
if len(call.args) != 1:
|
if len(call.args) != 1:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Perf event output expects exactly one argument, " f"got {len(call.args)}"
|
f"Perf event output expects exactly one argument, got {len(call.args)}"
|
||||||
)
|
)
|
||||||
data_arg = call.args[0]
|
data_arg = call.args[0]
|
||||||
ctx_ptr = func.args[0] # First argument to the function is ctx
|
ctx_ptr = func.args[0] # First argument to the function is ctx
|
||||||
|
|||||||
@ -3,7 +3,7 @@ import logging
|
|||||||
from collections.abc import Callable
|
from collections.abc import Callable
|
||||||
|
|
||||||
from llvmlite import ir
|
from llvmlite import ir
|
||||||
from pythonbpf.expr_pass import eval_expr
|
from pythonbpf.expr import eval_expr
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -270,7 +270,7 @@ def _prepare_expr_args(expr, func, module, builder, local_sym_tab, struct_sym_ta
|
|||||||
val = builder.sext(val, ir.IntType(64))
|
val = builder.sext(val, ir.IntType(64))
|
||||||
else:
|
else:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"Only int and ptr supported in bpf_printk args. " "Others default to 0."
|
"Only int and ptr supported in bpf_printk args. Others default to 0."
|
||||||
)
|
)
|
||||||
val = ir.Constant(ir.IntType(64), 0)
|
val = ir.Constant(ir.IntType(64), 0)
|
||||||
return val
|
return val
|
||||||
|
|||||||
@ -15,5 +15,8 @@ def deref(ptr):
|
|||||||
return result if result is not None else 0
|
return result if result is not None else 0
|
||||||
|
|
||||||
|
|
||||||
|
XDP_ABORTED = ctypes.c_int64(0)
|
||||||
XDP_DROP = ctypes.c_int64(1)
|
XDP_DROP = ctypes.c_int64(1)
|
||||||
XDP_PASS = ctypes.c_int64(2)
|
XDP_PASS = ctypes.c_int64(2)
|
||||||
|
XDP_TX = ctypes.c_int64(3)
|
||||||
|
XDP_REDIRECT = ctypes.c_int64(4)
|
||||||
|
|||||||
@ -3,7 +3,7 @@ from logging import Logger
|
|||||||
from llvmlite import ir
|
from llvmlite import ir
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from .maps_utils import MapProcessorRegistry
|
from .maps_utils import MapProcessorRegistry
|
||||||
from ..debuginfo import DebugInfoGenerator
|
from pythonbpf.debuginfo import DebugInfoGenerator
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
logger: Logger = logging.getLogger(__name__)
|
logger: Logger = logging.getLogger(__name__)
|
||||||
@ -278,9 +278,7 @@ def process_bpf_map(func_node, module):
|
|||||||
if handler:
|
if handler:
|
||||||
return handler(map_name, rval, module)
|
return handler(map_name, rval, module)
|
||||||
else:
|
else:
|
||||||
logger.warning(
|
logger.warning(f"Unknown map type {rval.func.id}, defaulting to HashMap")
|
||||||
f"Unknown map type " f"{rval.func.id}, defaulting to HashMap"
|
|
||||||
)
|
|
||||||
return process_hash_map(map_name, rval, module)
|
return process_hash_map(map_name, rval, module)
|
||||||
else:
|
else:
|
||||||
raise ValueError("Function under @map must return a map")
|
raise ValueError("Function under @map must return a map")
|
||||||
|
|||||||
0
pythonbpf/tbaa_gen/__init__.py
Normal file
0
pythonbpf/tbaa_gen/__init__.py
Normal file
@ -1,24 +1,28 @@
|
|||||||
from llvmlite import ir
|
from llvmlite import ir
|
||||||
|
|
||||||
# TODO: THIS IS NOT SUPPOSED TO MATCH STRINGS :skull:
|
# TODO: THIS IS NOT SUPPOSED TO MATCH STRINGS :skull:
|
||||||
|
mapping = {
|
||||||
|
"c_int8": ir.IntType(8),
|
||||||
|
"c_uint8": ir.IntType(8),
|
||||||
|
"c_int16": ir.IntType(16),
|
||||||
|
"c_uint16": ir.IntType(16),
|
||||||
|
"c_int32": ir.IntType(32),
|
||||||
|
"c_uint32": ir.IntType(32),
|
||||||
|
"c_int64": ir.IntType(64),
|
||||||
|
"c_uint64": ir.IntType(64),
|
||||||
|
"c_float": ir.FloatType(),
|
||||||
|
"c_double": ir.DoubleType(),
|
||||||
|
"c_void_p": ir.IntType(64),
|
||||||
|
# Not so sure about this one
|
||||||
|
"str": ir.PointerType(ir.IntType(8)),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def ctypes_to_ir(ctype: str):
|
def ctypes_to_ir(ctype: str):
|
||||||
mapping = {
|
|
||||||
"c_int8": ir.IntType(8),
|
|
||||||
"c_uint8": ir.IntType(8),
|
|
||||||
"c_int16": ir.IntType(16),
|
|
||||||
"c_uint16": ir.IntType(16),
|
|
||||||
"c_int32": ir.IntType(32),
|
|
||||||
"c_uint32": ir.IntType(32),
|
|
||||||
"c_int64": ir.IntType(64),
|
|
||||||
"c_uint64": ir.IntType(64),
|
|
||||||
"c_float": ir.FloatType(),
|
|
||||||
"c_double": ir.DoubleType(),
|
|
||||||
"c_void_p": ir.IntType(64),
|
|
||||||
# Not so sure about this one
|
|
||||||
"str": ir.PointerType(ir.IntType(8)),
|
|
||||||
}
|
|
||||||
if ctype in mapping:
|
if ctype in mapping:
|
||||||
return mapping[ctype]
|
return mapping[ctype]
|
||||||
raise NotImplementedError(f"No mapping for {ctype}")
|
raise NotImplementedError(f"No mapping for {ctype}")
|
||||||
|
|
||||||
|
|
||||||
|
def is_ctypes(ctype: str) -> bool:
|
||||||
|
return ctype in mapping
|
||||||
|
|||||||
3
pythonbpf/vmlinux_parser/__init__.py
Normal file
3
pythonbpf/vmlinux_parser/__init__.py
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
from .import_detector import vmlinux_proc
|
||||||
|
|
||||||
|
__all__ = ["vmlinux_proc"]
|
||||||
120
pythonbpf/vmlinux_parser/class_handler.py
Normal file
120
pythonbpf/vmlinux_parser/class_handler.py
Normal file
@ -0,0 +1,120 @@
|
|||||||
|
import logging
|
||||||
|
from functools import lru_cache
|
||||||
|
import importlib
|
||||||
|
from .dependency_handler import DependencyHandler
|
||||||
|
from .dependency_node import DependencyNode
|
||||||
|
import ctypes
|
||||||
|
from typing import Optional, Any
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@lru_cache(maxsize=1)
|
||||||
|
def get_module_symbols(module_name: str):
|
||||||
|
imported_module = importlib.import_module(module_name)
|
||||||
|
return [name for name in dir(imported_module)], imported_module
|
||||||
|
|
||||||
|
|
||||||
|
def process_vmlinux_class(node, llvm_module, handler: DependencyHandler):
|
||||||
|
symbols_in_module, imported_module = get_module_symbols("vmlinux")
|
||||||
|
if node.name in symbols_in_module:
|
||||||
|
vmlinux_type = getattr(imported_module, node.name)
|
||||||
|
process_vmlinux_post_ast(vmlinux_type, llvm_module, handler)
|
||||||
|
else:
|
||||||
|
raise ImportError(f"{node.name} not in vmlinux")
|
||||||
|
|
||||||
|
|
||||||
|
def process_vmlinux_post_ast(
|
||||||
|
elem_type_class, llvm_handler, handler: DependencyHandler, processing_stack=None
|
||||||
|
):
|
||||||
|
# Initialize processing stack on first call
|
||||||
|
if processing_stack is None:
|
||||||
|
processing_stack = set()
|
||||||
|
symbols_in_module, imported_module = get_module_symbols("vmlinux")
|
||||||
|
|
||||||
|
current_symbol_name = elem_type_class.__name__
|
||||||
|
field_table = {}
|
||||||
|
is_complex_type = False
|
||||||
|
containing_type: Optional[Any] = None
|
||||||
|
ctype_complex_type: Optional[Any] = None
|
||||||
|
type_length: Optional[int] = None
|
||||||
|
module_name = getattr(elem_type_class, "__module__", None)
|
||||||
|
|
||||||
|
if hasattr(elem_type_class, "_length_") and is_complex_type:
|
||||||
|
type_length = elem_type_class._length_
|
||||||
|
|
||||||
|
if current_symbol_name in processing_stack:
|
||||||
|
logger.debug(
|
||||||
|
f"Circular dependency detected for {current_symbol_name}, skipping"
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Check if already processed
|
||||||
|
if handler.has_node(current_symbol_name):
|
||||||
|
existing_node = handler.get_node(current_symbol_name)
|
||||||
|
# If the node exists and is ready, we're done
|
||||||
|
if existing_node and existing_node.is_ready:
|
||||||
|
logger.info(f"Node {current_symbol_name} already processed and ready")
|
||||||
|
return True
|
||||||
|
|
||||||
|
processing_stack.add(current_symbol_name)
|
||||||
|
|
||||||
|
if module_name == "vmlinux":
|
||||||
|
if hasattr(elem_type_class, "_type_"):
|
||||||
|
is_complex_type = True
|
||||||
|
containing_type = elem_type_class._type_
|
||||||
|
if containing_type.__module__ == "vmlinux":
|
||||||
|
print("Very weird type ig for containing type", containing_type)
|
||||||
|
elif containing_type.__module__ == ctypes.__name__:
|
||||||
|
if isinstance(elem_type_class, type):
|
||||||
|
if issubclass(elem_type_class, ctypes.Array):
|
||||||
|
ctype_complex_type = ctypes.Array
|
||||||
|
elif issubclass(elem_type_class, ctypes._Pointer):
|
||||||
|
ctype_complex_type = ctypes._Pointer
|
||||||
|
else:
|
||||||
|
raise TypeError("Unsupported ctypes subclass")
|
||||||
|
# handle ctype complex type
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise ImportError(f"Unsupported module of {containing_type}")
|
||||||
|
else:
|
||||||
|
new_dep_node = DependencyNode(name=current_symbol_name)
|
||||||
|
handler.add_node(new_dep_node)
|
||||||
|
class_obj = getattr(imported_module, current_symbol_name)
|
||||||
|
# Inspect the class fields
|
||||||
|
if hasattr(class_obj, "_fields_"):
|
||||||
|
for field_name, field_type in class_obj._fields_:
|
||||||
|
field_table[field_name] = field_type
|
||||||
|
elif hasattr(class_obj, "__annotations__"):
|
||||||
|
for field_name, field_type in class_obj.__annotations__.items():
|
||||||
|
field_table[field_name] = field_type
|
||||||
|
else:
|
||||||
|
raise TypeError("Could not get required class and definition")
|
||||||
|
|
||||||
|
logger.info(f"Extracted fields for {current_symbol_name}: {field_table}")
|
||||||
|
|
||||||
|
for elem_name, elem_type in field_table.items():
|
||||||
|
local_module_name = getattr(elem_type, "__module__", None)
|
||||||
|
if local_module_name == ctypes.__name__:
|
||||||
|
new_dep_node.add_field(elem_name, elem_type, ready=True)
|
||||||
|
logger.info(f"Field {elem_name} is direct ctypes type: {elem_type}")
|
||||||
|
elif local_module_name == "vmlinux":
|
||||||
|
new_dep_node.add_field(elem_name, elem_type, ready=False)
|
||||||
|
logger.debug(
|
||||||
|
f"Processing vmlinux field: {elem_name}, type: {elem_type}"
|
||||||
|
)
|
||||||
|
if process_vmlinux_post_ast(
|
||||||
|
elem_type, llvm_handler, handler, processing_stack
|
||||||
|
):
|
||||||
|
new_dep_node.set_field_ready(elem_name, True)
|
||||||
|
else:
|
||||||
|
raise ValueError(
|
||||||
|
f"{elem_name} with type {elem_type} from module {module_name} not supported in recursive resolver"
|
||||||
|
)
|
||||||
|
print("")
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise ImportError("UNSUPPORTED Module")
|
||||||
|
|
||||||
|
print(current_symbol_name, "DONE")
|
||||||
|
print(f"handler readiness {handler.is_ready}")
|
||||||
149
pythonbpf/vmlinux_parser/dependency_handler.py
Normal file
149
pythonbpf/vmlinux_parser/dependency_handler.py
Normal file
@ -0,0 +1,149 @@
|
|||||||
|
from typing import Optional, Dict, List, Iterator
|
||||||
|
from .dependency_node import DependencyNode
|
||||||
|
|
||||||
|
|
||||||
|
class DependencyHandler:
|
||||||
|
"""
|
||||||
|
Manages a collection of DependencyNode objects with no duplicates.
|
||||||
|
|
||||||
|
Ensures that no two nodes with the same name can be added and provides
|
||||||
|
methods to check readiness and retrieve specific nodes.
|
||||||
|
|
||||||
|
Example usage:
|
||||||
|
# Create a handler
|
||||||
|
handler = DependencyHandler()
|
||||||
|
|
||||||
|
# Create some dependency nodes
|
||||||
|
node1 = DependencyNode(name="node1")
|
||||||
|
node1.add_field("field1", str)
|
||||||
|
node1.set_field_value("field1", "value1")
|
||||||
|
|
||||||
|
node2 = DependencyNode(name="node2")
|
||||||
|
node2.add_field("field1", int)
|
||||||
|
|
||||||
|
# Add nodes to the handler
|
||||||
|
handler.add_node(node1)
|
||||||
|
handler.add_node(node2)
|
||||||
|
|
||||||
|
# Check if a specific node exists
|
||||||
|
print(handler.has_node("node1")) # True
|
||||||
|
|
||||||
|
# Get a reference to a node and modify it
|
||||||
|
node = handler.get_node("node2")
|
||||||
|
node.set_field_value("field1", 42)
|
||||||
|
|
||||||
|
# Check if all nodes are ready
|
||||||
|
print(handler.is_ready) # False (node2 is ready, but node1 isn't)
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
# Using a dictionary with node names as keys ensures name uniqueness
|
||||||
|
# and provides efficient lookups
|
||||||
|
self._nodes: Dict[str, DependencyNode] = {}
|
||||||
|
|
||||||
|
def add_node(self, node: DependencyNode) -> bool:
|
||||||
|
"""
|
||||||
|
Add a dependency node to the handler.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
node: The DependencyNode to add
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if the node was added, False if a node with the same name already exists
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
TypeError: If the provided object is not a DependencyNode
|
||||||
|
"""
|
||||||
|
if not isinstance(node, DependencyNode):
|
||||||
|
raise TypeError(f"Expected DependencyNode, got {type(node).__name__}")
|
||||||
|
|
||||||
|
# Check if a node with this name already exists
|
||||||
|
if node.name in self._nodes:
|
||||||
|
return False
|
||||||
|
|
||||||
|
self._nodes[node.name] = node
|
||||||
|
return True
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_ready(self) -> bool:
|
||||||
|
"""
|
||||||
|
Check if all nodes are ready.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if all nodes are ready (or if there are no nodes), False otherwise
|
||||||
|
"""
|
||||||
|
if not self._nodes:
|
||||||
|
return True
|
||||||
|
|
||||||
|
return all(node.is_ready for node in self._nodes.values())
|
||||||
|
|
||||||
|
def has_node(self, name: str) -> bool:
|
||||||
|
"""
|
||||||
|
Check if a node with the given name exists.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: The name to check
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if a node with the given name exists, False otherwise
|
||||||
|
"""
|
||||||
|
return name in self._nodes
|
||||||
|
|
||||||
|
def get_node(self, name: str) -> Optional[DependencyNode]:
|
||||||
|
"""
|
||||||
|
Get a node by name for manipulation.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: The name of the node to retrieve
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Optional[DependencyNode]: The node with the given name, or None if not found
|
||||||
|
"""
|
||||||
|
return self._nodes.get(name)
|
||||||
|
|
||||||
|
def remove_node(self, node_or_name) -> bool:
|
||||||
|
"""
|
||||||
|
Remove a node by name or reference.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
node_or_name: The node to remove or its name
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if the node was removed, False if not found
|
||||||
|
"""
|
||||||
|
if isinstance(node_or_name, DependencyNode):
|
||||||
|
name = node_or_name.name
|
||||||
|
else:
|
||||||
|
name = node_or_name
|
||||||
|
|
||||||
|
if name in self._nodes:
|
||||||
|
del self._nodes[name]
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def get_all_nodes(self) -> List[DependencyNode]:
|
||||||
|
"""
|
||||||
|
Get all nodes stored in the handler.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List[DependencyNode]: List of all nodes
|
||||||
|
"""
|
||||||
|
return list(self._nodes.values())
|
||||||
|
|
||||||
|
def __iter__(self) -> Iterator[DependencyNode]:
|
||||||
|
"""
|
||||||
|
Iterate over all nodes.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Iterator[DependencyNode]: Iterator over all nodes
|
||||||
|
"""
|
||||||
|
return iter(self._nodes.values())
|
||||||
|
|
||||||
|
def __len__(self) -> int:
|
||||||
|
"""
|
||||||
|
Get the number of nodes in the handler.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
int: The number of nodes
|
||||||
|
"""
|
||||||
|
return len(self._nodes)
|
||||||
191
pythonbpf/vmlinux_parser/dependency_node.py
Normal file
191
pythonbpf/vmlinux_parser/dependency_node.py
Normal file
@ -0,0 +1,191 @@
|
|||||||
|
from dataclasses import dataclass, field
|
||||||
|
from typing import Dict, Any, Optional
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: FIX THE FUCKING TYPE NAME CONVENTION.
|
||||||
|
@dataclass
|
||||||
|
class Field:
|
||||||
|
"""Represents a field in a dependency node with its type and readiness state."""
|
||||||
|
|
||||||
|
name: str
|
||||||
|
type: type
|
||||||
|
containing_type: Optional[Any]
|
||||||
|
type_size: Optional[int]
|
||||||
|
value: Any = None
|
||||||
|
ready: bool = False
|
||||||
|
|
||||||
|
def set_ready(self, is_ready: bool = True) -> None:
|
||||||
|
"""Set the readiness state of this field."""
|
||||||
|
self.ready = is_ready
|
||||||
|
|
||||||
|
def set_value(self, value: Any, mark_ready: bool = True) -> None:
|
||||||
|
"""Set the value of this field and optionally mark it as ready."""
|
||||||
|
self.value = value
|
||||||
|
if mark_ready:
|
||||||
|
self.ready = True
|
||||||
|
|
||||||
|
def set_type(self, given_type, mark_ready: bool = True) -> None:
|
||||||
|
"""Set value of the type field and mark as ready"""
|
||||||
|
self.type = given_type
|
||||||
|
if mark_ready:
|
||||||
|
self.ready = True
|
||||||
|
|
||||||
|
def set_containing_type(
|
||||||
|
self, containing_type: Optional[Any], mark_ready: bool = True
|
||||||
|
) -> None:
|
||||||
|
"""Set the containing_type of this field and optionally mark it as ready."""
|
||||||
|
self.containing_type = containing_type
|
||||||
|
if mark_ready:
|
||||||
|
self.ready = True
|
||||||
|
|
||||||
|
def set_type_size(self, type_size: Any, mark_ready: bool = True) -> None:
|
||||||
|
"""Set the type_size of this field and optionally mark it as ready."""
|
||||||
|
self.type_size = type_size
|
||||||
|
if mark_ready:
|
||||||
|
self.ready = True
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class DependencyNode:
|
||||||
|
"""
|
||||||
|
A node with typed fields and readiness tracking.
|
||||||
|
|
||||||
|
Example usage:
|
||||||
|
# Create a dependency node for a Person
|
||||||
|
somestruct = DependencyNode(name="struct_1")
|
||||||
|
|
||||||
|
# Add fields with their types
|
||||||
|
somestruct.add_field("field_1", str)
|
||||||
|
somestruct.add_field("field_2", int)
|
||||||
|
somestruct.add_field("field_3", str)
|
||||||
|
|
||||||
|
# Check if the node is ready (should be False initially)
|
||||||
|
print(f"Is node ready? {somestruct.is_ready}") # False
|
||||||
|
|
||||||
|
# Set some field values
|
||||||
|
somestruct.set_field_value("field_1", "someproperty")
|
||||||
|
somestruct.set_field_value("field_2", 30)
|
||||||
|
|
||||||
|
# Check if the node is ready (still False because email is not ready)
|
||||||
|
print(f"Is node ready? {somestruct.is_ready}") # False
|
||||||
|
|
||||||
|
# Set the last field and make the node ready
|
||||||
|
somestruct.set_field_value("field_3", "anotherproperty")
|
||||||
|
|
||||||
|
# Now the node should be ready
|
||||||
|
print(f"Is node ready? {somestruct.is_ready}") # True
|
||||||
|
|
||||||
|
# You can also mark a field as not ready
|
||||||
|
somestruct.set_field_ready("field_3", False)
|
||||||
|
|
||||||
|
# Now the node is not ready again
|
||||||
|
print(f"Is node ready? {somestruct.is_ready}") # False
|
||||||
|
|
||||||
|
# Get all field values
|
||||||
|
print(somestruct.get_field_values()) # {'field_1': 'someproperty', 'field_2': 30, 'field_3': 'anotherproperty'}
|
||||||
|
|
||||||
|
# Get only ready fields
|
||||||
|
ready_fields = somestruct.get_ready_fields()
|
||||||
|
print(f"Ready fields: {[field.name for field in ready_fields.values()]}") # ['field_1', 'field_2']
|
||||||
|
"""
|
||||||
|
|
||||||
|
name: str
|
||||||
|
fields: Dict[str, Field] = field(default_factory=dict)
|
||||||
|
_ready_cache: Optional[bool] = field(default=None, repr=False)
|
||||||
|
|
||||||
|
def add_field(
|
||||||
|
self,
|
||||||
|
name: str,
|
||||||
|
field_type: type,
|
||||||
|
initial_value: Any = None,
|
||||||
|
containing_type: Optional[Any] = None,
|
||||||
|
type_size: Optional[int] = None,
|
||||||
|
ready: bool = False,
|
||||||
|
) -> None:
|
||||||
|
"""Add a field to the node with an optional initial value and readiness state."""
|
||||||
|
self.fields[name] = Field(
|
||||||
|
name=name,
|
||||||
|
type=field_type,
|
||||||
|
value=initial_value,
|
||||||
|
ready=ready,
|
||||||
|
containing_type=containing_type,
|
||||||
|
type_size=type_size,
|
||||||
|
)
|
||||||
|
# Invalidate readiness cache
|
||||||
|
self._ready_cache = None
|
||||||
|
|
||||||
|
def get_field(self, name: str) -> Field:
|
||||||
|
"""Get a field by name."""
|
||||||
|
return self.fields[name]
|
||||||
|
|
||||||
|
def set_field_value(self, name: str, value: Any, mark_ready: bool = True) -> None:
|
||||||
|
"""Set a field's value and optionally mark it as ready."""
|
||||||
|
if name not in self.fields:
|
||||||
|
raise KeyError(f"Field '{name}' does not exist in node '{self.name}'")
|
||||||
|
|
||||||
|
self.fields[name].set_value(value, mark_ready)
|
||||||
|
# Invalidate readiness cache
|
||||||
|
self._ready_cache = None
|
||||||
|
|
||||||
|
def set_field_type(self, name: str, type: Any, mark_ready: bool = True) -> None:
|
||||||
|
"""Set a field's type and optionally mark it as ready."""
|
||||||
|
if name not in self.fields:
|
||||||
|
raise KeyError(f"Field '{name}' does not exist in node '{self.name}'")
|
||||||
|
|
||||||
|
self.fields[name].set_type(type, mark_ready)
|
||||||
|
# Invalidate readiness cache
|
||||||
|
self._ready_cache = None
|
||||||
|
|
||||||
|
def set_field_containing_type(
|
||||||
|
self, name: str, containing_type: Any, mark_ready: bool = True
|
||||||
|
) -> None:
|
||||||
|
"""Set a field's containing_type and optionally mark it as ready."""
|
||||||
|
if name not in self.fields:
|
||||||
|
raise KeyError(f"Field '{name}' does not exist in node '{self.name}'")
|
||||||
|
|
||||||
|
self.fields[name].set_containing_type(containing_type, mark_ready)
|
||||||
|
# Invalidate readiness cache
|
||||||
|
self._ready_cache = None
|
||||||
|
|
||||||
|
def set_field_type_size(
|
||||||
|
self, name: str, type_size: Any, mark_ready: bool = True
|
||||||
|
) -> None:
|
||||||
|
"""Set a field's type_size and optionally mark it as ready."""
|
||||||
|
if name not in self.fields:
|
||||||
|
raise KeyError(f"Field '{name}' does not exist in node '{self.name}'")
|
||||||
|
|
||||||
|
self.fields[name].set_type_size(type_size, mark_ready)
|
||||||
|
# Invalidate readiness cache
|
||||||
|
self._ready_cache = None
|
||||||
|
|
||||||
|
def set_field_ready(self, name: str, is_ready: bool = True) -> None:
|
||||||
|
"""Mark a field as ready or not ready."""
|
||||||
|
if name not in self.fields:
|
||||||
|
raise KeyError(f"Field '{name}' does not exist in node '{self.name}'")
|
||||||
|
|
||||||
|
self.fields[name].set_ready(is_ready)
|
||||||
|
# Invalidate readiness cache
|
||||||
|
self._ready_cache = None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_ready(self) -> bool:
|
||||||
|
"""Check if the node is ready (all fields are ready)."""
|
||||||
|
# Use cached value if available
|
||||||
|
if self._ready_cache is not None:
|
||||||
|
return self._ready_cache
|
||||||
|
|
||||||
|
# Calculate readiness only when needed
|
||||||
|
if not self.fields:
|
||||||
|
self._ready_cache = False
|
||||||
|
return False
|
||||||
|
|
||||||
|
self._ready_cache = all(elem.ready for elem in self.fields.values())
|
||||||
|
return self._ready_cache
|
||||||
|
|
||||||
|
def get_field_values(self) -> Dict[str, Any]:
|
||||||
|
"""Get a dictionary of field names to their values."""
|
||||||
|
return {name: elem.value for name, elem in self.fields.items()}
|
||||||
|
|
||||||
|
def get_ready_fields(self) -> Dict[str, Field]:
|
||||||
|
"""Get all fields that are marked as ready."""
|
||||||
|
return {name: elem for name, elem in self.fields.items() if elem.ready}
|
||||||
135
pythonbpf/vmlinux_parser/import_detector.py
Normal file
135
pythonbpf/vmlinux_parser/import_detector.py
Normal file
@ -0,0 +1,135 @@
|
|||||||
|
import ast
|
||||||
|
import logging
|
||||||
|
from typing import List, Tuple, Dict
|
||||||
|
import importlib
|
||||||
|
import inspect
|
||||||
|
|
||||||
|
from .dependency_handler import DependencyHandler
|
||||||
|
from .ir_generation import IRGenerator
|
||||||
|
from .class_handler import process_vmlinux_class
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def detect_import_statement(tree: ast.AST) -> List[Tuple[str, ast.ImportFrom]]:
|
||||||
|
"""
|
||||||
|
Parse AST and detect import statements from vmlinux.
|
||||||
|
|
||||||
|
Returns a list of tuples (module_name, imported_item) for vmlinux imports.
|
||||||
|
Raises SyntaxError for invalid import patterns.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
tree: The AST to parse
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of tuples containing (module_name, imported_item) for each vmlinux import
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
SyntaxError: If multiple imports from vmlinux are attempted or import * is used
|
||||||
|
"""
|
||||||
|
vmlinux_imports = []
|
||||||
|
|
||||||
|
for node in ast.walk(tree):
|
||||||
|
# Handle "from vmlinux import ..." statements
|
||||||
|
if isinstance(node, ast.ImportFrom):
|
||||||
|
if node.module == "vmlinux":
|
||||||
|
# Check for wildcard import: from vmlinux import *
|
||||||
|
if any(alias.name == "*" for alias in node.names):
|
||||||
|
raise SyntaxError(
|
||||||
|
"Wildcard imports from vmlinux are not supported. "
|
||||||
|
"Please import specific types explicitly."
|
||||||
|
)
|
||||||
|
|
||||||
|
# Check for multiple imports: from vmlinux import A, B, C
|
||||||
|
if len(node.names) > 1:
|
||||||
|
imported_names = [alias.name for alias in node.names]
|
||||||
|
raise SyntaxError(
|
||||||
|
f"Multiple imports from vmlinux are not supported. "
|
||||||
|
f"Found: {', '.join(imported_names)}. "
|
||||||
|
f"Please use separate import statements for each type."
|
||||||
|
)
|
||||||
|
|
||||||
|
# Check if no specific import is specified (should not happen with valid Python)
|
||||||
|
if len(node.names) == 0:
|
||||||
|
raise SyntaxError(
|
||||||
|
"Import from vmlinux must specify at least one type."
|
||||||
|
)
|
||||||
|
|
||||||
|
# Valid single import
|
||||||
|
for alias in node.names:
|
||||||
|
import_name = alias.name
|
||||||
|
# Use alias if provided, otherwise use the original name (commented)
|
||||||
|
# as_name = alias.asname if alias.asname else alias.name
|
||||||
|
vmlinux_imports.append(("vmlinux", node))
|
||||||
|
logger.info(f"Found vmlinux import: {import_name}")
|
||||||
|
|
||||||
|
# Handle "import vmlinux" statements (not typical but should be rejected)
|
||||||
|
elif isinstance(node, ast.Import):
|
||||||
|
for alias in node.names:
|
||||||
|
if alias.name == "vmlinux" or alias.name.startswith("vmlinux."):
|
||||||
|
raise SyntaxError(
|
||||||
|
"Direct import of vmlinux module is not supported. "
|
||||||
|
"Use 'from vmlinux import <type>' instead."
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(f"Total vmlinux imports detected: {len(vmlinux_imports)}")
|
||||||
|
return vmlinux_imports
|
||||||
|
|
||||||
|
|
||||||
|
def vmlinux_proc(tree: ast.AST, module):
|
||||||
|
import_statements = detect_import_statement(tree)
|
||||||
|
|
||||||
|
# initialise dependency handler
|
||||||
|
handler = DependencyHandler()
|
||||||
|
# initialise assignment dictionary of name to type
|
||||||
|
assignments: Dict[str, type] = {}
|
||||||
|
|
||||||
|
if not import_statements:
|
||||||
|
logger.info("No vmlinux imports found")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Import vmlinux module directly
|
||||||
|
try:
|
||||||
|
vmlinux_mod = importlib.import_module("vmlinux")
|
||||||
|
except ImportError:
|
||||||
|
logger.warning("Could not import vmlinux module")
|
||||||
|
return
|
||||||
|
|
||||||
|
source_file = inspect.getsourcefile(vmlinux_mod)
|
||||||
|
if source_file is None:
|
||||||
|
logger.warning("Cannot find source for vmlinux module")
|
||||||
|
return
|
||||||
|
|
||||||
|
with open(source_file, "r") as f:
|
||||||
|
mod_ast = ast.parse(f.read(), filename=source_file)
|
||||||
|
|
||||||
|
for import_mod, import_node in import_statements:
|
||||||
|
for alias in import_node.names:
|
||||||
|
imported_name = alias.name
|
||||||
|
found = False
|
||||||
|
for mod_node in mod_ast.body:
|
||||||
|
if (
|
||||||
|
isinstance(mod_node, ast.ClassDef)
|
||||||
|
and mod_node.name == imported_name
|
||||||
|
):
|
||||||
|
process_vmlinux_class(mod_node, module, handler)
|
||||||
|
found = True
|
||||||
|
break
|
||||||
|
if isinstance(mod_node, ast.Assign):
|
||||||
|
for target in mod_node.targets:
|
||||||
|
if isinstance(target, ast.Name) and target.id == imported_name:
|
||||||
|
process_vmlinux_assign(mod_node, module, assignments)
|
||||||
|
found = True
|
||||||
|
break
|
||||||
|
if found:
|
||||||
|
break
|
||||||
|
if not found:
|
||||||
|
logger.info(
|
||||||
|
f"{imported_name} not found as ClassDef or Assign in vmlinux"
|
||||||
|
)
|
||||||
|
|
||||||
|
IRGenerator(module, handler)
|
||||||
|
|
||||||
|
|
||||||
|
def process_vmlinux_assign(node, module, assignments: Dict[str, type]):
|
||||||
|
raise NotImplementedError("Assignment handling has not been implemented yet")
|
||||||
8
pythonbpf/vmlinux_parser/ir_generation.py
Normal file
8
pythonbpf/vmlinux_parser/ir_generation.py
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
# here, we will iterate through the dependencies and generate IR once dependencies are resolved fully
|
||||||
|
from .dependency_handler import DependencyHandler
|
||||||
|
|
||||||
|
|
||||||
|
class IRGenerator:
|
||||||
|
def __init__(self, module, handler):
|
||||||
|
self.module = module
|
||||||
|
self.handler: DependencyHandler = handler
|
||||||
@ -1,11 +1,10 @@
|
|||||||
#include <linux/bpf.h>
|
#include "vmlinux.h"
|
||||||
#include <bpf/bpf_helpers.h>
|
#include <bpf/bpf_helpers.h>
|
||||||
#define u64 unsigned long long
|
#include <bpf/bpf_endian.h>
|
||||||
#define u32 unsigned int
|
|
||||||
|
|
||||||
SEC("xdp")
|
SEC("xdp")
|
||||||
int hello(struct xdp_md *ctx) {
|
int hello(struct xdp_md *ctx) {
|
||||||
bpf_printk("Hello, World!\n");
|
bpf_printk("Hello, World! %ud \n", ctx->data);
|
||||||
return XDP_PASS;
|
return XDP_PASS;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
19
tests/c-form/kprobe.bpf.c
Normal file
19
tests/c-form/kprobe.bpf.c
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
#include "vmlinux.h"
|
||||||
|
#include <bpf/bpf_helpers.h>
|
||||||
|
#include <bpf/bpf_tracing.h>
|
||||||
|
|
||||||
|
char LICENSE[] SEC("license") = "Dual BSD/GPL";
|
||||||
|
|
||||||
|
SEC("kprobe/do_unlinkat")
|
||||||
|
int kprobe_execve(struct pt_regs *ctx)
|
||||||
|
{
|
||||||
|
bpf_printk("unlinkat created");
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
SEC("kretprobe/do_unlinkat")
|
||||||
|
int kretprobe_execve(struct pt_regs *ctx)
|
||||||
|
{
|
||||||
|
bpf_printk("unlinkat returned\n");
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
34
tests/failing_tests/conditionals/helper_cond.py
Normal file
34
tests/failing_tests/conditionals/helper_cond.py
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
from pythonbpf import bpf, map, section, bpfglobal, compile
|
||||||
|
from ctypes import c_void_p, c_int64, c_uint64
|
||||||
|
from pythonbpf.maps import HashMap
|
||||||
|
|
||||||
|
# NOTE: Decided against fixing this
|
||||||
|
# as a workaround is assigning the result of lookup to a variable
|
||||||
|
# and then using that variable in the if statement.
|
||||||
|
# Might fix in future.
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@map
|
||||||
|
def last() -> HashMap:
|
||||||
|
return HashMap(key=c_uint64, value=c_uint64, max_entries=3)
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@section("tracepoint/syscalls/sys_enter_execve")
|
||||||
|
def hello_world(ctx: c_void_p) -> c_int64:
|
||||||
|
last.update(0, 1)
|
||||||
|
if last.lookup(0) > 0:
|
||||||
|
print("Hello, World!")
|
||||||
|
else:
|
||||||
|
print("Goodbye, World!")
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@bpfglobal
|
||||||
|
def LICENSE() -> str:
|
||||||
|
return "GPL"
|
||||||
|
|
||||||
|
|
||||||
|
compile()
|
||||||
18
tests/failing_tests/conditionals/oneline.py
Normal file
18
tests/failing_tests/conditionals/oneline.py
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
from pythonbpf import bpf, section, bpfglobal, compile
|
||||||
|
from ctypes import c_void_p, c_int64
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@section("tracepoint/syscalls/sys_enter_execve")
|
||||||
|
def hello_world(ctx: c_void_p) -> c_int64:
|
||||||
|
print("Hello, World!") if True else print("Goodbye, World!")
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@bpfglobal
|
||||||
|
def LICENSE() -> str:
|
||||||
|
return "GPL"
|
||||||
|
|
||||||
|
|
||||||
|
compile()
|
||||||
34
tests/failing_tests/conditionals/struct_ptr.py
Normal file
34
tests/failing_tests/conditionals/struct_ptr.py
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
from pythonbpf import bpf, struct, section, bpfglobal, compile
|
||||||
|
from ctypes import c_void_p, c_int64, c_uint64
|
||||||
|
|
||||||
|
# NOTE: Decided against fixing this
|
||||||
|
# as one workaround is to just check any field of the struct
|
||||||
|
# in the if statement. Ugly but works.
|
||||||
|
# Might fix in future.
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@struct
|
||||||
|
class data_t:
|
||||||
|
pid: c_uint64
|
||||||
|
ts: c_uint64
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@section("tracepoint/syscalls/sys_enter_execve")
|
||||||
|
def hello_world(ctx: c_void_p) -> c_int64:
|
||||||
|
dat = data_t()
|
||||||
|
if dat:
|
||||||
|
print("Hello, World!")
|
||||||
|
else:
|
||||||
|
print("Goodbye, World!")
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@bpfglobal
|
||||||
|
def LICENSE() -> str:
|
||||||
|
return "GPL"
|
||||||
|
|
||||||
|
|
||||||
|
compile()
|
||||||
@ -4,6 +4,18 @@ from pythonbpf.maps import HashMap
|
|||||||
|
|
||||||
from ctypes import c_void_p, c_int64
|
from ctypes import c_void_p, c_int64
|
||||||
|
|
||||||
|
# NOTE: I have decided to not fix this example for now.
|
||||||
|
# The issue is in line 31, where we are passing an expression.
|
||||||
|
# The update helper expects a pointer type. But the problem is
|
||||||
|
# that we must allocate the space for said pointer in the first
|
||||||
|
# basic block. As that usage is in a different basic block, we
|
||||||
|
# are unable to cast the expression to a pointer type. (as we never
|
||||||
|
# allocated space for it).
|
||||||
|
# Shall we change our space allocation logic? That allows users to
|
||||||
|
# spam the same helper with the same args, and still run out of
|
||||||
|
# stack space. So we consider this usage invalid for now.
|
||||||
|
# Might fix it later.
|
||||||
|
|
||||||
|
|
||||||
@bpf
|
@bpf
|
||||||
@map
|
@map
|
||||||
@ -14,12 +26,12 @@ def count() -> HashMap:
|
|||||||
@bpf
|
@bpf
|
||||||
@section("xdp")
|
@section("xdp")
|
||||||
def hello_world(ctx: c_void_p) -> c_int64:
|
def hello_world(ctx: c_void_p) -> c_int64:
|
||||||
prev = count().lookup(0)
|
prev = count.lookup(0)
|
||||||
if prev:
|
if prev:
|
||||||
count().update(0, prev + 1)
|
count.update(0, prev + 1)
|
||||||
return XDP_PASS
|
return XDP_PASS
|
||||||
else:
|
else:
|
||||||
count().update(0, 1)
|
count.update(0, 1)
|
||||||
|
|
||||||
return XDP_PASS
|
return XDP_PASS
|
||||||
|
|
||||||
|
|||||||
@ -3,16 +3,19 @@ import logging
|
|||||||
from pythonbpf import compile, bpf, section, bpfglobal, compile_to_ir
|
from pythonbpf import compile, bpf, section, bpfglobal, compile_to_ir
|
||||||
from ctypes import c_void_p, c_int64, c_int32
|
from ctypes import c_void_p, c_int64, c_int32
|
||||||
|
|
||||||
|
|
||||||
@bpf
|
@bpf
|
||||||
@bpfglobal
|
@bpfglobal
|
||||||
def somevalue() -> c_int32:
|
def somevalue() -> c_int32:
|
||||||
return c_int32(42)
|
return c_int32(42)
|
||||||
|
|
||||||
|
|
||||||
@bpf
|
@bpf
|
||||||
@bpfglobal
|
@bpfglobal
|
||||||
def somevalue2() -> c_int64:
|
def somevalue2() -> c_int64:
|
||||||
return c_int64(69)
|
return c_int64(69)
|
||||||
|
|
||||||
|
|
||||||
@bpf
|
@bpf
|
||||||
@bpfglobal
|
@bpfglobal
|
||||||
def somevalue1() -> c_int32:
|
def somevalue1() -> c_int32:
|
||||||
@ -21,12 +24,14 @@ def somevalue1() -> c_int32:
|
|||||||
|
|
||||||
# --- Passing examples ---
|
# --- Passing examples ---
|
||||||
|
|
||||||
|
|
||||||
# Simple constant return
|
# Simple constant return
|
||||||
@bpf
|
@bpf
|
||||||
@bpfglobal
|
@bpfglobal
|
||||||
def g1() -> c_int64:
|
def g1() -> c_int64:
|
||||||
return c_int64(42)
|
return c_int64(42)
|
||||||
|
|
||||||
|
|
||||||
# Constructor with one constant argument
|
# Constructor with one constant argument
|
||||||
@bpf
|
@bpf
|
||||||
@bpfglobal
|
@bpfglobal
|
||||||
@ -62,15 +67,17 @@ def g2() -> c_int64:
|
|||||||
# def g6() -> c_int64:
|
# def g6() -> c_int64:
|
||||||
# return c_int64(CONST)
|
# return c_int64(CONST)
|
||||||
|
|
||||||
|
|
||||||
# Constructor with multiple args
|
# Constructor with multiple args
|
||||||
#TODO: this is not working. should it work ?
|
# TODO: this is not working. should it work ?
|
||||||
@bpf
|
@bpf
|
||||||
@bpfglobal
|
@bpfglobal
|
||||||
def g7() -> c_int64:
|
def g7() -> c_int64:
|
||||||
return c_int64(1)
|
return c_int64(1)
|
||||||
|
|
||||||
|
|
||||||
# Dataclass call
|
# Dataclass call
|
||||||
#TODO: fails with dataclass
|
# TODO: fails with dataclass
|
||||||
# @dataclass
|
# @dataclass
|
||||||
# class Point:
|
# class Point:
|
||||||
# x: c_int64
|
# x: c_int64
|
||||||
@ -91,6 +98,7 @@ def sometag(ctx: c_void_p) -> c_int64:
|
|||||||
print(f"{somevalue}")
|
print(f"{somevalue}")
|
||||||
return c_int64(1)
|
return c_int64(1)
|
||||||
|
|
||||||
|
|
||||||
@bpf
|
@bpf
|
||||||
@bpfglobal
|
@bpfglobal
|
||||||
def LICENSE() -> str:
|
def LICENSE() -> str:
|
||||||
|
|||||||
41
tests/failing_tests/named_arg.py
Normal file
41
tests/failing_tests/named_arg.py
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
from pythonbpf import bpf, map, section, bpfglobal, compile
|
||||||
|
from pythonbpf.helper import XDP_PASS
|
||||||
|
from pythonbpf.maps import HashMap
|
||||||
|
|
||||||
|
from ctypes import c_void_p, c_int64
|
||||||
|
|
||||||
|
# NOTE: This example exposes the problems with our typing system.
|
||||||
|
# We can't do steps on line 25 and 27.
|
||||||
|
# prev is of type i64**. For prev + 1, we deref it down to i64
|
||||||
|
# To assign it back to prev, we need to go back to i64**.
|
||||||
|
# We cannot allocate space for the intermediate type now.
|
||||||
|
# We probably need to track the ref/deref chain for each variable.
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@map
|
||||||
|
def count() -> HashMap:
|
||||||
|
return HashMap(key=c_int64, value=c_int64, max_entries=1)
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@section("xdp")
|
||||||
|
def hello_world(ctx: c_void_p) -> c_int64:
|
||||||
|
prev = count.lookup(0)
|
||||||
|
if prev:
|
||||||
|
prev = prev + 1
|
||||||
|
count.update(0, prev)
|
||||||
|
return XDP_PASS
|
||||||
|
else:
|
||||||
|
count.update(0, 1)
|
||||||
|
|
||||||
|
return XDP_PASS
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@bpfglobal
|
||||||
|
def LICENSE() -> str:
|
||||||
|
return "GPL"
|
||||||
|
|
||||||
|
|
||||||
|
compile()
|
||||||
@ -3,6 +3,7 @@ import logging
|
|||||||
from pythonbpf import compile, bpf, section, bpfglobal, compile_to_ir
|
from pythonbpf import compile, bpf, section, bpfglobal, compile_to_ir
|
||||||
from ctypes import c_void_p, c_int64
|
from ctypes import c_void_p, c_int64
|
||||||
|
|
||||||
|
|
||||||
# This should not pass as somevalue is not declared at all.
|
# This should not pass as somevalue is not declared at all.
|
||||||
@bpf
|
@bpf
|
||||||
@section("tracepoint/syscalls/sys_enter_execve")
|
@section("tracepoint/syscalls/sys_enter_execve")
|
||||||
@ -11,6 +12,7 @@ def sometag(ctx: c_void_p) -> c_int64:
|
|||||||
print(f"{somevalue}") # noqa: F821
|
print(f"{somevalue}") # noqa: F821
|
||||||
return c_int64(1)
|
return c_int64(1)
|
||||||
|
|
||||||
|
|
||||||
@bpf
|
@bpf
|
||||||
@bpfglobal
|
@bpfglobal
|
||||||
def LICENSE() -> str:
|
def LICENSE() -> str:
|
||||||
|
|||||||
48
tests/failing_tests/xdp_pass.py
Normal file
48
tests/failing_tests/xdp_pass.py
Normal file
@ -0,0 +1,48 @@
|
|||||||
|
from pythonbpf import bpf, map, section, bpfglobal, compile, compile_to_ir
|
||||||
|
from pythonbpf.maps import HashMap
|
||||||
|
from pythonbpf.helper import XDP_PASS
|
||||||
|
from vmlinux import struct_xdp_md
|
||||||
|
from vmlinux import struct_ring_buffer_per_cpu # noqa: F401
|
||||||
|
|
||||||
|
# from vmlinux import struct_xdp_buff # noqa: F401
|
||||||
|
# from vmlinux import struct_xdp_md
|
||||||
|
from ctypes import c_int64
|
||||||
|
|
||||||
|
# Instructions to how to run this program
|
||||||
|
# 1. Install PythonBPF: pip install pythonbpf
|
||||||
|
# 2. Run the program: python examples/xdp_pass.py
|
||||||
|
# 3. Run the program with sudo: sudo tools/check.sh run examples/xdp_pass.o
|
||||||
|
# 4. Attach object file to any network device with something like ./check.sh xdp examples/xdp_pass.o tailscale0
|
||||||
|
# 5. send traffic through the device and observe effects
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@map
|
||||||
|
def count() -> HashMap:
|
||||||
|
return HashMap(key=c_int64, value=c_int64, max_entries=1)
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@section("xdp")
|
||||||
|
def hello_world(ctx: struct_xdp_md) -> c_int64:
|
||||||
|
key = 0
|
||||||
|
one = 1
|
||||||
|
prev = count().lookup(key)
|
||||||
|
if prev:
|
||||||
|
prevval = prev + 1
|
||||||
|
print(f"count: {prevval}")
|
||||||
|
count().update(key, prevval)
|
||||||
|
return XDP_PASS
|
||||||
|
else:
|
||||||
|
count().update(key, one)
|
||||||
|
|
||||||
|
return XDP_PASS
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@bpfglobal
|
||||||
|
def LICENSE() -> str:
|
||||||
|
return "GPL"
|
||||||
|
|
||||||
|
|
||||||
|
compile_to_ir("xdp_pass.py", "xdp_pass.ll")
|
||||||
32
tests/passing_tests/conditionals/and.py
Normal file
32
tests/passing_tests/conditionals/and.py
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
from pythonbpf import bpf, map, section, bpfglobal, compile
|
||||||
|
from ctypes import c_void_p, c_int64, c_uint64
|
||||||
|
from pythonbpf.maps import HashMap
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@map
|
||||||
|
def last() -> HashMap:
|
||||||
|
return HashMap(key=c_uint64, value=c_uint64, max_entries=3)
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@section("tracepoint/syscalls/sys_enter_execve")
|
||||||
|
def hello_world(ctx: c_void_p) -> c_int64:
|
||||||
|
last.update(0, 1)
|
||||||
|
last.update(1, 2)
|
||||||
|
x = last.lookup(0)
|
||||||
|
y = last.lookup(1)
|
||||||
|
if x and y:
|
||||||
|
print("Hello, World!")
|
||||||
|
else:
|
||||||
|
print("Goodbye, World!")
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@bpfglobal
|
||||||
|
def LICENSE() -> str:
|
||||||
|
return "GPL"
|
||||||
|
|
||||||
|
|
||||||
|
compile()
|
||||||
21
tests/passing_tests/conditionals/bool.py
Normal file
21
tests/passing_tests/conditionals/bool.py
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
from pythonbpf import bpf, section, bpfglobal, compile
|
||||||
|
from ctypes import c_void_p, c_int64
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@section("tracepoint/syscalls/sys_enter_execve")
|
||||||
|
def hello_world(ctx: c_void_p) -> c_int64:
|
||||||
|
if True:
|
||||||
|
print("Hello, World!")
|
||||||
|
else:
|
||||||
|
print("Goodbye, World!")
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@bpfglobal
|
||||||
|
def LICENSE() -> str:
|
||||||
|
return "GPL"
|
||||||
|
|
||||||
|
|
||||||
|
compile()
|
||||||
21
tests/passing_tests/conditionals/const_binop.py
Normal file
21
tests/passing_tests/conditionals/const_binop.py
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
from pythonbpf import bpf, section, bpfglobal, compile
|
||||||
|
from ctypes import c_void_p, c_int64
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@section("tracepoint/syscalls/sys_enter_execve")
|
||||||
|
def hello_world(ctx: c_void_p) -> c_int64:
|
||||||
|
if (0 + 1) * 0:
|
||||||
|
print("Hello, World!")
|
||||||
|
else:
|
||||||
|
print("Goodbye, World!")
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@bpfglobal
|
||||||
|
def LICENSE() -> str:
|
||||||
|
return "GPL"
|
||||||
|
|
||||||
|
|
||||||
|
compile()
|
||||||
21
tests/passing_tests/conditionals/const_int.py
Normal file
21
tests/passing_tests/conditionals/const_int.py
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
from pythonbpf import bpf, section, bpfglobal, compile
|
||||||
|
from ctypes import c_void_p, c_int64
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@section("tracepoint/syscalls/sys_enter_execve")
|
||||||
|
def hello_world(ctx: c_void_p) -> c_int64:
|
||||||
|
if 0:
|
||||||
|
print("Hello, World!")
|
||||||
|
else:
|
||||||
|
print("Goodbye, World!")
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@bpfglobal
|
||||||
|
def LICENSE() -> str:
|
||||||
|
return "GPL"
|
||||||
|
|
||||||
|
|
||||||
|
compile()
|
||||||
30
tests/passing_tests/conditionals/map.py
Normal file
30
tests/passing_tests/conditionals/map.py
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
from pythonbpf import bpf, map, section, bpfglobal, compile
|
||||||
|
from ctypes import c_void_p, c_int64, c_uint64
|
||||||
|
from pythonbpf.maps import HashMap
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@map
|
||||||
|
def last() -> HashMap:
|
||||||
|
return HashMap(key=c_uint64, value=c_uint64, max_entries=3)
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@section("tracepoint/syscalls/sys_enter_execve")
|
||||||
|
def hello_world(ctx: c_void_p) -> c_int64:
|
||||||
|
# last.update(0, 1)
|
||||||
|
tsp = last.lookup(0)
|
||||||
|
if tsp:
|
||||||
|
print("Hello, World!")
|
||||||
|
else:
|
||||||
|
print("Goodbye, World!")
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@bpfglobal
|
||||||
|
def LICENSE() -> str:
|
||||||
|
return "GPL"
|
||||||
|
|
||||||
|
|
||||||
|
compile()
|
||||||
30
tests/passing_tests/conditionals/map_comp.py
Normal file
30
tests/passing_tests/conditionals/map_comp.py
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
from pythonbpf import bpf, map, section, bpfglobal, compile
|
||||||
|
from ctypes import c_void_p, c_int64, c_uint64
|
||||||
|
from pythonbpf.maps import HashMap
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@map
|
||||||
|
def last() -> HashMap:
|
||||||
|
return HashMap(key=c_uint64, value=c_uint64, max_entries=3)
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@section("tracepoint/syscalls/sys_enter_execve")
|
||||||
|
def hello_world(ctx: c_void_p) -> c_int64:
|
||||||
|
last.update(0, 1)
|
||||||
|
tsp = last.lookup(0)
|
||||||
|
if tsp > 0:
|
||||||
|
print("Hello, World!")
|
||||||
|
else:
|
||||||
|
print("Goodbye, World!")
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@bpfglobal
|
||||||
|
def LICENSE() -> str:
|
||||||
|
return "GPL"
|
||||||
|
|
||||||
|
|
||||||
|
compile()
|
||||||
30
tests/passing_tests/conditionals/not.py
Normal file
30
tests/passing_tests/conditionals/not.py
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
from pythonbpf import bpf, map, section, bpfglobal, compile
|
||||||
|
from ctypes import c_void_p, c_int64, c_uint64
|
||||||
|
from pythonbpf.maps import HashMap
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@map
|
||||||
|
def last() -> HashMap:
|
||||||
|
return HashMap(key=c_uint64, value=c_uint64, max_entries=3)
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@section("tracepoint/syscalls/sys_enter_execve")
|
||||||
|
def hello_world(ctx: c_void_p) -> c_int64:
|
||||||
|
# last.update(0, 1)
|
||||||
|
tsp = last.lookup(0)
|
||||||
|
if not tsp:
|
||||||
|
print("Hello, World!")
|
||||||
|
else:
|
||||||
|
print("Goodbye, World!")
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@bpfglobal
|
||||||
|
def LICENSE() -> str:
|
||||||
|
return "GPL"
|
||||||
|
|
||||||
|
|
||||||
|
compile()
|
||||||
32
tests/passing_tests/conditionals/or.py
Normal file
32
tests/passing_tests/conditionals/or.py
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
from pythonbpf import bpf, map, section, bpfglobal, compile
|
||||||
|
from ctypes import c_void_p, c_int64, c_uint64
|
||||||
|
from pythonbpf.maps import HashMap
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@map
|
||||||
|
def last() -> HashMap:
|
||||||
|
return HashMap(key=c_uint64, value=c_uint64, max_entries=3)
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@section("tracepoint/syscalls/sys_enter_execve")
|
||||||
|
def hello_world(ctx: c_void_p) -> c_int64:
|
||||||
|
last.update(0, 1)
|
||||||
|
# last.update(1, 2)
|
||||||
|
x = last.lookup(0)
|
||||||
|
y = last.lookup(1)
|
||||||
|
if x or y:
|
||||||
|
print("Hello, World!")
|
||||||
|
else:
|
||||||
|
print("Goodbye, World!")
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@bpfglobal
|
||||||
|
def LICENSE() -> str:
|
||||||
|
return "GPL"
|
||||||
|
|
||||||
|
|
||||||
|
compile()
|
||||||
29
tests/passing_tests/conditionals/struct_access.py
Normal file
29
tests/passing_tests/conditionals/struct_access.py
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
from pythonbpf import bpf, struct, section, bpfglobal, compile
|
||||||
|
from ctypes import c_void_p, c_int64, c_uint64
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@struct
|
||||||
|
class data_t:
|
||||||
|
pid: c_uint64
|
||||||
|
ts: c_uint64
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@section("tracepoint/syscalls/sys_enter_execve")
|
||||||
|
def hello_world(ctx: c_void_p) -> c_int64:
|
||||||
|
dat = data_t()
|
||||||
|
if dat.ts:
|
||||||
|
print("Hello, World!")
|
||||||
|
else:
|
||||||
|
print("Goodbye, World!")
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@bpfglobal
|
||||||
|
def LICENSE() -> str:
|
||||||
|
return "GPL"
|
||||||
|
|
||||||
|
|
||||||
|
compile()
|
||||||
23
tests/passing_tests/conditionals/type_mismatch.py
Normal file
23
tests/passing_tests/conditionals/type_mismatch.py
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
from pythonbpf import bpf, section, bpfglobal, compile
|
||||||
|
from ctypes import c_void_p, c_int64, c_int32
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@section("tracepoint/syscalls/sys_enter_execve")
|
||||||
|
def hello_world(ctx: c_void_p) -> c_int64:
|
||||||
|
x = 0
|
||||||
|
y = c_int32(0)
|
||||||
|
if x == y:
|
||||||
|
print("Hello, World!")
|
||||||
|
else:
|
||||||
|
print("Goodbye, World!")
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@bpfglobal
|
||||||
|
def LICENSE() -> str:
|
||||||
|
return "GPL"
|
||||||
|
|
||||||
|
|
||||||
|
compile()
|
||||||
22
tests/passing_tests/conditionals/var.py
Normal file
22
tests/passing_tests/conditionals/var.py
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
from pythonbpf import bpf, section, bpfglobal, compile
|
||||||
|
from ctypes import c_void_p, c_int64
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@section("tracepoint/syscalls/sys_enter_execve")
|
||||||
|
def hello_world(ctx: c_void_p) -> c_int64:
|
||||||
|
x = 0
|
||||||
|
if x:
|
||||||
|
print("Hello, World!")
|
||||||
|
else:
|
||||||
|
print("Goodbye, World!")
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@bpfglobal
|
||||||
|
def LICENSE() -> str:
|
||||||
|
return "GPL"
|
||||||
|
|
||||||
|
|
||||||
|
compile()
|
||||||
22
tests/passing_tests/conditionals/var_binop.py
Normal file
22
tests/passing_tests/conditionals/var_binop.py
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
from pythonbpf import bpf, section, bpfglobal, compile
|
||||||
|
from ctypes import c_void_p, c_int64
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@section("tracepoint/syscalls/sys_enter_execve")
|
||||||
|
def hello_world(ctx: c_void_p) -> c_int64:
|
||||||
|
x = 0
|
||||||
|
if x * 1:
|
||||||
|
print("Hello, World!")
|
||||||
|
else:
|
||||||
|
print("Goodbye, World!")
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@bpfglobal
|
||||||
|
def LICENSE() -> str:
|
||||||
|
return "GPL"
|
||||||
|
|
||||||
|
|
||||||
|
compile()
|
||||||
22
tests/passing_tests/conditionals/var_comp.py
Normal file
22
tests/passing_tests/conditionals/var_comp.py
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
from pythonbpf import bpf, section, bpfglobal, compile
|
||||||
|
from ctypes import c_void_p, c_int64
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@section("tracepoint/syscalls/sys_enter_execve")
|
||||||
|
def hello_world(ctx: c_void_p) -> c_int64:
|
||||||
|
x = 2
|
||||||
|
if x > 3:
|
||||||
|
print("Hello, World!")
|
||||||
|
else:
|
||||||
|
print("Goodbye, World!")
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@bpfglobal
|
||||||
|
def LICENSE() -> str:
|
||||||
|
return "GPL"
|
||||||
|
|
||||||
|
|
||||||
|
compile()
|
||||||
18
tests/passing_tests/return/binop_const.py
Normal file
18
tests/passing_tests/return/binop_const.py
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
from pythonbpf import bpf, section, bpfglobal, compile
|
||||||
|
from ctypes import c_void_p, c_int64
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@section("tracepoint/syscalls/sys_enter_execve")
|
||||||
|
def hello_world(ctx: c_void_p) -> c_int64:
|
||||||
|
print("Hello, World!")
|
||||||
|
return 1 + 1 - 2
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@bpfglobal
|
||||||
|
def LICENSE() -> str:
|
||||||
|
return "GPL"
|
||||||
|
|
||||||
|
|
||||||
|
compile()
|
||||||
19
tests/passing_tests/return/binop_var.py
Normal file
19
tests/passing_tests/return/binop_var.py
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
from pythonbpf import bpf, section, bpfglobal, compile
|
||||||
|
from ctypes import c_void_p, c_int64
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@section("tracepoint/syscalls/sys_enter_execve")
|
||||||
|
def hello_world(ctx: c_void_p) -> c_int64:
|
||||||
|
print("Hello, World!")
|
||||||
|
a = 2
|
||||||
|
return a - 2
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@bpfglobal
|
||||||
|
def LICENSE() -> str:
|
||||||
|
return "GPL"
|
||||||
|
|
||||||
|
|
||||||
|
compile()
|
||||||
18
tests/passing_tests/return/bool.py
Normal file
18
tests/passing_tests/return/bool.py
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
from pythonbpf import bpf, section, bpfglobal, compile
|
||||||
|
from ctypes import c_void_p, c_int64
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@section("tracepoint/syscalls/sys_enter_execve")
|
||||||
|
def hello_world(ctx: c_void_p) -> c_int64:
|
||||||
|
print("Hello, World!")
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@bpfglobal
|
||||||
|
def LICENSE() -> str:
|
||||||
|
return "GPL"
|
||||||
|
|
||||||
|
|
||||||
|
compile()
|
||||||
18
tests/passing_tests/return/int.py
Normal file
18
tests/passing_tests/return/int.py
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
from pythonbpf import bpf, section, bpfglobal, compile
|
||||||
|
from ctypes import c_void_p, c_int64
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@section("tracepoint/syscalls/sys_enter_execve")
|
||||||
|
def hello_world(ctx: c_void_p) -> c_int64:
|
||||||
|
print("Hello, World!")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@bpfglobal
|
||||||
|
def LICENSE() -> str:
|
||||||
|
return "GPL"
|
||||||
|
|
||||||
|
|
||||||
|
compile()
|
||||||
18
tests/passing_tests/return/null.py
Normal file
18
tests/passing_tests/return/null.py
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
from pythonbpf import bpf, section, bpfglobal, compile
|
||||||
|
from ctypes import c_void_p, c_int64
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@section("tracepoint/syscalls/sys_enter_execve")
|
||||||
|
def hello_world(ctx: c_void_p) -> c_int64:
|
||||||
|
print("Hello, World!")
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@bpfglobal
|
||||||
|
def LICENSE() -> str:
|
||||||
|
return "GPL"
|
||||||
|
|
||||||
|
|
||||||
|
compile()
|
||||||
20
tests/passing_tests/return/typecast_binops.py
Normal file
20
tests/passing_tests/return/typecast_binops.py
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
from pythonbpf import bpf, section, bpfglobal, compile
|
||||||
|
from ctypes import c_void_p, c_int32
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@section("tracepoint/syscalls/sys_enter_execve")
|
||||||
|
def hello_world(ctx: c_void_p) -> c_int32:
|
||||||
|
print("Hello, World!")
|
||||||
|
a = 1 # int64
|
||||||
|
x = 1 # int64
|
||||||
|
return c_int32(a - x) # typecast to int32
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@bpfglobal
|
||||||
|
def LICENSE() -> str:
|
||||||
|
return "GPL"
|
||||||
|
|
||||||
|
|
||||||
|
compile()
|
||||||
18
tests/passing_tests/return/typecast_const.py
Normal file
18
tests/passing_tests/return/typecast_const.py
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
from pythonbpf import bpf, section, bpfglobal, compile
|
||||||
|
from ctypes import c_void_p, c_int32
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@section("tracepoint/syscalls/sys_enter_execve")
|
||||||
|
def hello_world(ctx: c_void_p) -> c_int32:
|
||||||
|
print("Hello, World!")
|
||||||
|
return c_int32(1)
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@bpfglobal
|
||||||
|
def LICENSE() -> str:
|
||||||
|
return "GPL"
|
||||||
|
|
||||||
|
|
||||||
|
compile()
|
||||||
19
tests/passing_tests/return/typecast_var.py
Normal file
19
tests/passing_tests/return/typecast_var.py
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
from pythonbpf import bpf, section, bpfglobal, compile
|
||||||
|
from ctypes import c_void_p, c_int32
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@section("tracepoint/syscalls/sys_enter_execve")
|
||||||
|
def hello_world(ctx: c_void_p) -> c_int32:
|
||||||
|
print("Hello, World!")
|
||||||
|
a = 1 # int64
|
||||||
|
return c_int32(a) # typecast to int32
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@bpfglobal
|
||||||
|
def LICENSE() -> str:
|
||||||
|
return "GPL"
|
||||||
|
|
||||||
|
|
||||||
|
compile()
|
||||||
19
tests/passing_tests/return/var.py
Normal file
19
tests/passing_tests/return/var.py
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
from pythonbpf import bpf, section, bpfglobal, compile
|
||||||
|
from ctypes import c_void_p, c_int64
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@section("tracepoint/syscalls/sys_enter_execve")
|
||||||
|
def hello_world(ctx: c_void_p) -> c_int64:
|
||||||
|
print("Hello, World!")
|
||||||
|
a = 1
|
||||||
|
return a
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@bpfglobal
|
||||||
|
def LICENSE() -> str:
|
||||||
|
return "GPL"
|
||||||
|
|
||||||
|
|
||||||
|
compile()
|
||||||
19
tests/passing_tests/return/xdp.py
Normal file
19
tests/passing_tests/return/xdp.py
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
from pythonbpf import bpf, section, bpfglobal, compile
|
||||||
|
from ctypes import c_void_p, c_int64
|
||||||
|
from pythonbpf.helper import XDP_PASS
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@section("tracepoint/syscalls/sys_enter_execve")
|
||||||
|
def hello_world(ctx: c_void_p) -> c_int64:
|
||||||
|
print("Hello, World!")
|
||||||
|
return XDP_PASS
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@bpfglobal
|
||||||
|
def LICENSE() -> str:
|
||||||
|
return "GPL"
|
||||||
|
|
||||||
|
|
||||||
|
compile()
|
||||||
20
tests/passing_tests/var_rval.py
Normal file
20
tests/passing_tests/var_rval.py
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
import logging
|
||||||
|
|
||||||
|
from pythonbpf import compile, bpf, section, bpfglobal
|
||||||
|
from ctypes import c_void_p, c_int64
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@section("sometag1")
|
||||||
|
def sometag(ctx: c_void_p) -> c_int64:
|
||||||
|
a = 1 - 1
|
||||||
|
return c_int64(a)
|
||||||
|
|
||||||
|
|
||||||
|
@bpf
|
||||||
|
@bpfglobal
|
||||||
|
def LICENSE() -> str:
|
||||||
|
return "GPL"
|
||||||
|
|
||||||
|
|
||||||
|
compile(loglevel=logging.INFO)
|
||||||
379
tools/vmlinux-gen.py
Executable file
379
tools/vmlinux-gen.py
Executable file
@ -0,0 +1,379 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
BTF to Python ctypes Converter
|
||||||
|
Converts Linux kernel BTF (BPF Type Format) to Python ctypes definitions.
|
||||||
|
|
||||||
|
This tool automates the process of:
|
||||||
|
1. Dumping BTF from vmlinux
|
||||||
|
2. Preprocessing enum definitions
|
||||||
|
3. Processing struct kioctx to extract anonymous nested structs
|
||||||
|
4. Running C preprocessor
|
||||||
|
5. Converting to Python ctypes using clang2py
|
||||||
|
6. Post-processing the output
|
||||||
|
|
||||||
|
Requirements:
|
||||||
|
- bpftool
|
||||||
|
- clang
|
||||||
|
- ctypeslib2 (pip install ctypeslib2)
|
||||||
|
"""
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
|
||||||
|
class BTFConverter:
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
btf_source="/sys/kernel/btf/vmlinux",
|
||||||
|
output_file="vmlinux.py",
|
||||||
|
keep_intermediate=False,
|
||||||
|
verbose=False,
|
||||||
|
):
|
||||||
|
self.btf_source = btf_source
|
||||||
|
self.output_file = output_file
|
||||||
|
self.keep_intermediate = keep_intermediate
|
||||||
|
self.verbose = verbose
|
||||||
|
self.temp_dir = tempfile.mkdtemp() if not keep_intermediate else "."
|
||||||
|
|
||||||
|
def log(self, message):
|
||||||
|
"""Print message if verbose mode is enabled."""
|
||||||
|
if self.verbose:
|
||||||
|
print(f"[*] {message}")
|
||||||
|
|
||||||
|
def run_command(self, cmd, description):
|
||||||
|
"""Run a shell command and handle errors."""
|
||||||
|
self.log(f"{description}...")
|
||||||
|
try:
|
||||||
|
result = subprocess.run(
|
||||||
|
cmd, shell=True, check=True, capture_output=True, text=True
|
||||||
|
)
|
||||||
|
if self.verbose and result.stdout:
|
||||||
|
print(result.stdout)
|
||||||
|
return result
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
print(f"Error during {description}:", file=sys.stderr)
|
||||||
|
print(e.stderr, file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
def step1_dump_btf(self):
|
||||||
|
"""Step 1: Dump BTF from vmlinux."""
|
||||||
|
vmlinux_h = os.path.join(self.temp_dir, "vmlinux.h")
|
||||||
|
cmd = f"bpftool btf dump file {self.btf_source} format c > {vmlinux_h}"
|
||||||
|
self.run_command(cmd, "Dumping BTF from vmlinux")
|
||||||
|
return vmlinux_h
|
||||||
|
|
||||||
|
def step2_preprocess_enums(self, input_file):
|
||||||
|
"""Step 1.5: Preprocess enum definitions."""
|
||||||
|
self.log("Preprocessing enum definitions...")
|
||||||
|
|
||||||
|
with open(input_file, "r") as f:
|
||||||
|
original_code = f.read()
|
||||||
|
|
||||||
|
# Extract anonymous enums
|
||||||
|
enums = re.findall(
|
||||||
|
r"(?<!typedef\s)(enum\s*\{[^}]*\})\s*(\w+)\s*(?::\s*\d+)?\s*;",
|
||||||
|
original_code,
|
||||||
|
)
|
||||||
|
enum_defs = [enum_block + ";" for enum_block, _ in enums]
|
||||||
|
|
||||||
|
# Replace anonymous enums with int declarations
|
||||||
|
processed_code = re.sub(
|
||||||
|
r"(?<!typedef\s)enum\s*\{[^}]*\}\s*(\w+)\s*(?::\s*\d+)?\s*;",
|
||||||
|
r"int \1;",
|
||||||
|
original_code,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Prepend enum definitions
|
||||||
|
if enum_defs:
|
||||||
|
enum_text = "\n".join(enum_defs) + "\n\n"
|
||||||
|
processed_code = enum_text + processed_code
|
||||||
|
|
||||||
|
output_file = os.path.join(self.temp_dir, "vmlinux_processed.h")
|
||||||
|
with open(output_file, "w") as f:
|
||||||
|
f.write(processed_code)
|
||||||
|
|
||||||
|
return output_file
|
||||||
|
|
||||||
|
def step2_5_process_kioctx(self, input_file):
|
||||||
|
# TODO: this is a very bad bug and design decision. A single struct has an issue mostly.
|
||||||
|
"""Step 2.5: Process struct kioctx to extract nested anonymous structs."""
|
||||||
|
self.log("Processing struct kioctx nested structs...")
|
||||||
|
|
||||||
|
with open(input_file, "r") as f:
|
||||||
|
content = f.read()
|
||||||
|
|
||||||
|
# Pattern to match struct kioctx with its full body (handles multiple nesting levels)
|
||||||
|
kioctx_pattern = (
|
||||||
|
r"struct\s+kioctx\s*\{(?:[^{}]|\{(?:[^{}]|\{[^{}]*\})*\})*\}\s*;"
|
||||||
|
)
|
||||||
|
|
||||||
|
def process_kioctx_replacement(match):
|
||||||
|
full_struct = match.group(0)
|
||||||
|
self.log(f"Found struct kioctx, length: {len(full_struct)} chars")
|
||||||
|
|
||||||
|
# Extract the struct body (everything between outermost { and })
|
||||||
|
body_match = re.search(
|
||||||
|
r"struct\s+kioctx\s*\{(.*)\}\s*;", full_struct, re.DOTALL
|
||||||
|
)
|
||||||
|
if not body_match:
|
||||||
|
return full_struct
|
||||||
|
|
||||||
|
body = body_match.group(1)
|
||||||
|
|
||||||
|
# Find all anonymous structs within the body
|
||||||
|
# Pattern: struct { ... } followed by ; (not a member name)
|
||||||
|
# anon_struct_pattern = r"struct\s*\{[^}]*\}"
|
||||||
|
|
||||||
|
anon_structs = []
|
||||||
|
anon_counter = 4 # Start from 4, counting down to 1
|
||||||
|
|
||||||
|
def replace_anonymous_struct(m):
|
||||||
|
nonlocal anon_counter
|
||||||
|
anon_struct_content = m.group(0)
|
||||||
|
|
||||||
|
# Extract the body of the anonymous struct
|
||||||
|
anon_body_match = re.search(
|
||||||
|
r"struct\s*\{(.*)\}", anon_struct_content, re.DOTALL
|
||||||
|
)
|
||||||
|
if not anon_body_match:
|
||||||
|
return anon_struct_content
|
||||||
|
|
||||||
|
anon_body = anon_body_match.group(1)
|
||||||
|
|
||||||
|
# Create the named struct definition
|
||||||
|
anon_name = f"__anon{anon_counter}"
|
||||||
|
member_name = f"a{anon_counter}"
|
||||||
|
|
||||||
|
# Store the struct definition
|
||||||
|
anon_structs.append(f"struct {anon_name} {{{anon_body}}};")
|
||||||
|
|
||||||
|
anon_counter -= 1
|
||||||
|
|
||||||
|
# Return the member declaration
|
||||||
|
return f"struct {anon_name} {member_name}"
|
||||||
|
|
||||||
|
# Process the body, finding and replacing anonymous structs
|
||||||
|
# We need to be careful to only match anonymous structs followed by ;
|
||||||
|
processed_body = body
|
||||||
|
|
||||||
|
# Find all occurrences and process them
|
||||||
|
pattern_with_semicolon = r"struct\s*\{([^}]*)\}\s*;"
|
||||||
|
matches = list(re.finditer(pattern_with_semicolon, body, re.DOTALL))
|
||||||
|
|
||||||
|
if not matches:
|
||||||
|
self.log("No anonymous structs found in kioctx")
|
||||||
|
return full_struct
|
||||||
|
|
||||||
|
self.log(f"Found {len(matches)} anonymous struct(s)")
|
||||||
|
|
||||||
|
# Process in reverse order to maintain string positions
|
||||||
|
for match in reversed(matches):
|
||||||
|
anon_struct_content = match.group(1)
|
||||||
|
start_pos = match.start()
|
||||||
|
end_pos = match.end()
|
||||||
|
|
||||||
|
# Create the named struct definition
|
||||||
|
anon_name = f"__anon{anon_counter}"
|
||||||
|
member_name = f"a{anon_counter}"
|
||||||
|
|
||||||
|
# Store the struct definition
|
||||||
|
anon_structs.insert(0, f"struct {anon_name} {{{anon_struct_content}}};")
|
||||||
|
|
||||||
|
# Replace in the body
|
||||||
|
replacement = f"struct {anon_name} {member_name};"
|
||||||
|
processed_body = (
|
||||||
|
processed_body[:start_pos] + replacement + processed_body[end_pos:]
|
||||||
|
)
|
||||||
|
|
||||||
|
anon_counter -= 1
|
||||||
|
|
||||||
|
# Rebuild the complete definition
|
||||||
|
if anon_structs:
|
||||||
|
# Prepend the anonymous struct definitions
|
||||||
|
anon_definitions = "\n".join(anon_structs) + "\n\n"
|
||||||
|
new_struct = f"struct kioctx {{{processed_body}}};"
|
||||||
|
return anon_definitions + new_struct
|
||||||
|
else:
|
||||||
|
return full_struct
|
||||||
|
|
||||||
|
# Apply the transformation
|
||||||
|
processed_content = re.sub(
|
||||||
|
kioctx_pattern, process_kioctx_replacement, content, flags=re.DOTALL
|
||||||
|
)
|
||||||
|
|
||||||
|
output_file = os.path.join(self.temp_dir, "vmlinux_kioctx_processed.h")
|
||||||
|
with open(output_file, "w") as f:
|
||||||
|
f.write(processed_content)
|
||||||
|
|
||||||
|
self.log(f"Saved kioctx-processed output to {output_file}")
|
||||||
|
return output_file
|
||||||
|
|
||||||
|
def step3_run_preprocessor(self, input_file):
|
||||||
|
"""Step 2: Run C preprocessor."""
|
||||||
|
output_file = os.path.join(self.temp_dir, "vmlinux.i")
|
||||||
|
cmd = f"clang -E {input_file} > {output_file}"
|
||||||
|
self.run_command(cmd, "Running C preprocessor")
|
||||||
|
return output_file
|
||||||
|
|
||||||
|
def step4_convert_to_ctypes(self, input_file):
|
||||||
|
"""Step 3: Convert to Python ctypes using clang2py."""
|
||||||
|
output_file = os.path.join(self.temp_dir, "vmlinux_raw.py")
|
||||||
|
cmd = (
|
||||||
|
f"clang2py {input_file} -o {output_file} "
|
||||||
|
f'--clang-args="-fno-ms-extensions -I/usr/include -I/usr/include/linux"'
|
||||||
|
)
|
||||||
|
self.run_command(cmd, "Converting to Python ctypes")
|
||||||
|
return output_file
|
||||||
|
|
||||||
|
def step5_postprocess(self, input_file):
|
||||||
|
"""Step 4: Post-process the generated Python file."""
|
||||||
|
self.log("Post-processing Python ctypes definitions...")
|
||||||
|
|
||||||
|
with open(input_file, "r") as f:
|
||||||
|
data = f.read()
|
||||||
|
|
||||||
|
# Remove lines like ('_45', ctypes.c_int64, 0)
|
||||||
|
data = re.sub(r"\('_[0-9]+',\s*ctypes\.[a-zA-Z0-9_]+,\s*0\),?\s*\n?", "", data)
|
||||||
|
|
||||||
|
# Replace ('_20', ctypes.c_uint64, 64) → ('_20', ctypes.c_uint64)
|
||||||
|
data = re.sub(
|
||||||
|
r"\('(_[0-9]+)',\s*(ctypes\.[a-zA-Z0-9_]+),\s*[0-9]+\)", r"('\1', \2)", data
|
||||||
|
)
|
||||||
|
|
||||||
|
# Replace ('_20', ctypes.c_char, 8) with ('_20', ctypes.c_uint8, 8)
|
||||||
|
data = re.sub(r"(ctypes\.c_char)(\s*,\s*\d+\))", r"ctypes.c_uint8\2", data)
|
||||||
|
|
||||||
|
# below to replace those c_bool with bitfield greater than 8
|
||||||
|
def repl(m):
|
||||||
|
name, bits = m.groups()
|
||||||
|
return (
|
||||||
|
f"('{name}', ctypes.c_uint32, {bits})" if int(bits) > 8 else m.group(0)
|
||||||
|
)
|
||||||
|
|
||||||
|
data = re.sub(r"\('([^']+)',\s*ctypes\.c_bool,\s*(\d+)\)", repl, data)
|
||||||
|
|
||||||
|
# Remove ctypes. prefix from invalid entries
|
||||||
|
invalid_ctypes = ["bpf_iter_state", "_cache_type", "fs_context_purpose"]
|
||||||
|
for name in invalid_ctypes:
|
||||||
|
data = re.sub(rf"\bctypes\.{name}\b", name, data)
|
||||||
|
|
||||||
|
with open(self.output_file, "w") as f:
|
||||||
|
f.write(data)
|
||||||
|
|
||||||
|
self.log(f"Saved final output to {self.output_file}")
|
||||||
|
|
||||||
|
def cleanup(self):
|
||||||
|
"""Remove temporary files if not keeping them."""
|
||||||
|
if not self.keep_intermediate and self.temp_dir != ".":
|
||||||
|
self.log(f"Cleaning up temporary directory: {self.temp_dir}")
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
shutil.rmtree(self.temp_dir, ignore_errors=True)
|
||||||
|
|
||||||
|
def convert(self):
|
||||||
|
"""Run the complete conversion pipeline."""
|
||||||
|
try:
|
||||||
|
self.log("Starting BTF to Python ctypes conversion...")
|
||||||
|
|
||||||
|
# Check dependencies
|
||||||
|
self.check_dependencies()
|
||||||
|
|
||||||
|
# Run conversion pipeline
|
||||||
|
vmlinux_h = self.step1_dump_btf()
|
||||||
|
vmlinux_processed_h = self.step2_preprocess_enums(vmlinux_h)
|
||||||
|
vmlinux_kioctx_h = self.step2_5_process_kioctx(vmlinux_processed_h)
|
||||||
|
vmlinux_i = self.step3_run_preprocessor(vmlinux_kioctx_h)
|
||||||
|
vmlinux_raw_py = self.step4_convert_to_ctypes(vmlinux_i)
|
||||||
|
self.step5_postprocess(vmlinux_raw_py)
|
||||||
|
|
||||||
|
print(f"\n✓ Conversion complete! Output saved to: {self.output_file}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"\n✗ Error during conversion: {e}", file=sys.stderr)
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
traceback.print_exc()
|
||||||
|
sys.exit(1)
|
||||||
|
finally:
|
||||||
|
self.cleanup()
|
||||||
|
|
||||||
|
def check_dependencies(self):
|
||||||
|
"""Check if required tools are available."""
|
||||||
|
self.log("Checking dependencies...")
|
||||||
|
|
||||||
|
dependencies = {
|
||||||
|
"bpftool": "bpftool --version",
|
||||||
|
"clang": "clang --version",
|
||||||
|
"clang2py": "clang2py --version",
|
||||||
|
}
|
||||||
|
|
||||||
|
missing = []
|
||||||
|
for tool, cmd in dependencies.items():
|
||||||
|
try:
|
||||||
|
subprocess.run(cmd, shell=True, check=True, capture_output=True)
|
||||||
|
except subprocess.CalledProcessError:
|
||||||
|
missing.append(tool)
|
||||||
|
|
||||||
|
if missing:
|
||||||
|
print("Error: Missing required dependencies:", file=sys.stderr)
|
||||||
|
for tool in missing:
|
||||||
|
print(f" - {tool}", file=sys.stderr)
|
||||||
|
if "clang2py" in missing:
|
||||||
|
print("\nInstall ctypeslib2: pip install ctypeslib2", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="Convert Linux kernel BTF to Python ctypes definitions",
|
||||||
|
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||||
|
epilog="""
|
||||||
|
Examples:
|
||||||
|
%(prog)s
|
||||||
|
%(prog)s -o kernel_types.py
|
||||||
|
%(prog)s --btf-source /sys/kernel/btf/custom_module -k -v
|
||||||
|
""",
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"--btf-source",
|
||||||
|
default="/sys/kernel/btf/vmlinux",
|
||||||
|
help="Path to BTF source (default: /sys/kernel/btf/vmlinux)",
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"-o",
|
||||||
|
"--output",
|
||||||
|
default="vmlinux.py",
|
||||||
|
help="Output Python file (default: vmlinux.py)",
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"-k",
|
||||||
|
"--keep-intermediate",
|
||||||
|
action="store_true",
|
||||||
|
help="Keep intermediate files (vmlinux.h, vmlinux_processed.h, etc.)",
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"-v", "--verbose", action="store_true", help="Enable verbose output"
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
converter = BTFConverter(
|
||||||
|
btf_source=args.btf_source,
|
||||||
|
output_file=args.output,
|
||||||
|
keep_intermediate=args.keep_intermediate,
|
||||||
|
verbose=args.verbose,
|
||||||
|
)
|
||||||
|
|
||||||
|
converter.convert()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
Reference in New Issue
Block a user