Initialized MiniProfiler project
- Contains the host code with a protocol implementation, data analyser and web-based visualiser
This commit is contained in:
6
host/miniprofiler/__init__.py
Normal file
6
host/miniprofiler/__init__.py
Normal file
@@ -0,0 +1,6 @@
|
||||
"""MiniProfiler Host Application
|
||||
|
||||
A Python-based host application for profiling embedded STM32 applications.
|
||||
"""
|
||||
|
||||
__version__ = "0.1.0"
|
||||
314
host/miniprofiler/analyzer.py
Normal file
314
host/miniprofiler/analyzer.py
Normal file
@@ -0,0 +1,314 @@
|
||||
"""Profiling data analysis and visualization data generation.
|
||||
|
||||
This module processes raw profiling records to build call trees,
|
||||
compute statistics, and generate data structures for visualization.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import List, Dict, Optional, Any
|
||||
from collections import defaultdict
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
from .protocol import ProfileRecord
|
||||
from .symbolizer import Symbolizer
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class FunctionStats:
|
||||
"""Statistics for a single function."""
|
||||
name: str
|
||||
address: int
|
||||
call_count: int = 0
|
||||
total_time_us: int = 0
|
||||
min_time_us: int = float('inf')
|
||||
max_time_us: int = 0
|
||||
self_time_us: int = 0 # Time excluding children
|
||||
|
||||
def update(self, duration_us: int):
|
||||
"""Update statistics with a new duration measurement."""
|
||||
self.call_count += 1
|
||||
self.total_time_us += duration_us
|
||||
self.min_time_us = min(self.min_time_us, duration_us)
|
||||
self.max_time_us = max(self.max_time_us, duration_us)
|
||||
|
||||
@property
|
||||
def avg_time_us(self) -> float:
|
||||
"""Average execution time in microseconds."""
|
||||
return self.total_time_us / self.call_count if self.call_count > 0 else 0
|
||||
|
||||
|
||||
@dataclass
|
||||
class CallTreeNode:
|
||||
"""Node in the call tree."""
|
||||
name: str
|
||||
address: int
|
||||
entry_time: int
|
||||
duration_us: int
|
||||
depth: int
|
||||
children: List['CallTreeNode'] = field(default_factory=list)
|
||||
|
||||
def add_child(self, node: 'CallTreeNode'):
|
||||
"""Add a child node to this node."""
|
||||
self.children.append(node)
|
||||
|
||||
def to_flamegraph_dict(self) -> Dict[str, Any]:
|
||||
"""Convert to d3-flame-graph format.
|
||||
|
||||
Returns:
|
||||
Dictionary in the format:
|
||||
{
|
||||
"name": "function_name",
|
||||
"value": duration_in_microseconds,
|
||||
"children": [child_dicts...]
|
||||
}
|
||||
"""
|
||||
result = {
|
||||
"name": self.name,
|
||||
"value": self.duration_us
|
||||
}
|
||||
|
||||
if self.children:
|
||||
result["children"] = [child.to_flamegraph_dict() for child in self.children]
|
||||
|
||||
return result
|
||||
|
||||
def to_timeline_dict(self) -> Dict[str, Any]:
|
||||
"""Convert to timeline/flame chart format.
|
||||
|
||||
Returns:
|
||||
Dictionary with timing information for Plotly timeline
|
||||
"""
|
||||
return {
|
||||
"name": self.name,
|
||||
"start": self.entry_time,
|
||||
"duration": self.duration_us,
|
||||
"depth": self.depth,
|
||||
"children": [child.to_timeline_dict() for child in self.children]
|
||||
}
|
||||
|
||||
|
||||
class ProfileAnalyzer:
|
||||
"""Analyzes profiling data and generates visualization data."""
|
||||
|
||||
def __init__(self, symbolizer: Optional[Symbolizer] = None):
|
||||
"""Initialize the analyzer.
|
||||
|
||||
Args:
|
||||
symbolizer: Symbolizer for resolving addresses to names
|
||||
"""
|
||||
self.symbolizer = symbolizer
|
||||
self.records: List[ProfileRecord] = []
|
||||
self.stats: Dict[int, FunctionStats] = {} # addr -> stats
|
||||
self.call_tree: Optional[CallTreeNode] = None
|
||||
self.timeline_events: List[Dict[str, Any]] = []
|
||||
|
||||
def add_records(self, records: List[ProfileRecord]):
|
||||
"""Add profiling records for analysis.
|
||||
|
||||
Args:
|
||||
records: List of ProfileRecord objects
|
||||
"""
|
||||
self.records.extend(records)
|
||||
logger.debug(f"Added {len(records)} records, total: {len(self.records)}")
|
||||
|
||||
def clear(self):
|
||||
"""Clear all recorded data."""
|
||||
self.records.clear()
|
||||
self.stats.clear()
|
||||
self.call_tree = None
|
||||
self.timeline_events.clear()
|
||||
logger.info("Cleared all profiling data")
|
||||
|
||||
def _resolve_name(self, addr: int) -> str:
|
||||
"""Resolve address to function name."""
|
||||
if self.symbolizer:
|
||||
return self.symbolizer.resolve_name(addr)
|
||||
return f"func_0x{addr:08x}"
|
||||
|
||||
def compute_statistics(self) -> Dict[int, FunctionStats]:
|
||||
"""Compute statistics for all functions.
|
||||
|
||||
Returns:
|
||||
Dictionary mapping addresses to FunctionStats
|
||||
"""
|
||||
self.stats.clear()
|
||||
|
||||
for record in self.records:
|
||||
addr = record.func_addr
|
||||
name = self._resolve_name(addr)
|
||||
|
||||
if addr not in self.stats:
|
||||
self.stats[addr] = FunctionStats(name=name, address=addr)
|
||||
|
||||
self.stats[addr].update(record.duration_us)
|
||||
|
||||
logger.info(f"Computed statistics for {len(self.stats)} functions")
|
||||
return self.stats
|
||||
|
||||
def build_call_tree(self) -> Optional[CallTreeNode]:
|
||||
"""Build call tree from profiling records.
|
||||
|
||||
The call tree is built using the depth field to determine
|
||||
parent-child relationships.
|
||||
|
||||
Returns:
|
||||
Root node of the call tree, or None if no records
|
||||
"""
|
||||
if not self.records:
|
||||
return None
|
||||
|
||||
# Sort records by entry time to process in chronological order
|
||||
sorted_records = sorted(self.records, key=lambda r: r.entry_time)
|
||||
|
||||
# Stack to track current path in the tree
|
||||
# stack[depth] = node at that depth
|
||||
stack: List[CallTreeNode] = []
|
||||
root = None
|
||||
|
||||
for record in sorted_records:
|
||||
name = self._resolve_name(record.func_addr)
|
||||
|
||||
node = CallTreeNode(
|
||||
name=name,
|
||||
address=record.func_addr,
|
||||
entry_time=record.entry_time,
|
||||
duration_us=record.duration_us,
|
||||
depth=record.depth
|
||||
)
|
||||
|
||||
# Adjust stack to current depth
|
||||
while len(stack) > record.depth:
|
||||
stack.pop()
|
||||
|
||||
# Add node to tree
|
||||
if record.depth == 0:
|
||||
# Root level function
|
||||
if root is None:
|
||||
root = node
|
||||
stack = [root]
|
||||
else:
|
||||
# Multiple root-level functions - create synthetic root
|
||||
if not isinstance(root.name, str) or not root.name.startswith("__root__"):
|
||||
synthetic_root = CallTreeNode(
|
||||
name="__root__",
|
||||
address=0,
|
||||
entry_time=0,
|
||||
duration_us=0,
|
||||
depth=-1
|
||||
)
|
||||
synthetic_root.add_child(root)
|
||||
root = synthetic_root
|
||||
stack = [root]
|
||||
|
||||
root.add_child(node)
|
||||
# Update root duration to encompass all children
|
||||
root.duration_us = max(root.duration_us,
|
||||
node.entry_time + node.duration_us)
|
||||
|
||||
else:
|
||||
# Child function
|
||||
if len(stack) >= record.depth:
|
||||
parent = stack[record.depth - 1]
|
||||
parent.add_child(node)
|
||||
else:
|
||||
logger.warning(f"Orphan node at depth {record.depth}: {name}")
|
||||
continue
|
||||
|
||||
# Push to stack if we're going deeper
|
||||
if len(stack) == record.depth:
|
||||
stack.append(node)
|
||||
elif len(stack) == record.depth + 1:
|
||||
stack[record.depth] = node
|
||||
|
||||
self.call_tree = root
|
||||
logger.info(f"Built call tree with {len(sorted_records)} nodes")
|
||||
return root
|
||||
|
||||
def to_flamegraph_json(self) -> Dict[str, Any]:
|
||||
"""Generate flame graph data in d3-flame-graph format.
|
||||
|
||||
Returns:
|
||||
Dictionary suitable for d3-flame-graph
|
||||
"""
|
||||
if self.call_tree is None:
|
||||
self.build_call_tree()
|
||||
|
||||
if self.call_tree is None:
|
||||
return {"name": "root", "value": 0, "children": []}
|
||||
|
||||
return self.call_tree.to_flamegraph_dict()
|
||||
|
||||
def to_timeline_json(self) -> List[Dict[str, Any]]:
|
||||
"""Generate timeline data for flame chart visualization.
|
||||
|
||||
Returns:
|
||||
List of events for timeline/flame chart
|
||||
"""
|
||||
events = []
|
||||
|
||||
for record in sorted(self.records, key=lambda r: r.entry_time):
|
||||
name = self._resolve_name(record.func_addr)
|
||||
|
||||
events.append({
|
||||
"name": name,
|
||||
"start": record.entry_time,
|
||||
"end": record.entry_time + record.duration_us,
|
||||
"duration": record.duration_us,
|
||||
"depth": record.depth
|
||||
})
|
||||
|
||||
return events
|
||||
|
||||
def to_statistics_json(self) -> List[Dict[str, Any]]:
|
||||
"""Generate statistics table data.
|
||||
|
||||
Returns:
|
||||
List of function statistics dictionaries
|
||||
"""
|
||||
if not self.stats:
|
||||
self.compute_statistics()
|
||||
|
||||
stats_list = []
|
||||
for func_stats in self.stats.values():
|
||||
stats_list.append({
|
||||
"name": func_stats.name,
|
||||
"address": f"0x{func_stats.address:08x}",
|
||||
"calls": func_stats.call_count,
|
||||
"total_us": func_stats.total_time_us,
|
||||
"avg_us": func_stats.avg_time_us,
|
||||
"min_us": func_stats.min_time_us,
|
||||
"max_us": func_stats.max_time_us,
|
||||
})
|
||||
|
||||
# Sort by total time (descending)
|
||||
stats_list.sort(key=lambda x: x["total_us"], reverse=True)
|
||||
|
||||
return stats_list
|
||||
|
||||
def get_summary(self) -> Dict[str, Any]:
|
||||
"""Get summary statistics.
|
||||
|
||||
Returns:
|
||||
Dictionary with summary information
|
||||
"""
|
||||
if not self.stats:
|
||||
self.compute_statistics()
|
||||
|
||||
total_records = len(self.records)
|
||||
total_functions = len(self.stats)
|
||||
total_time = sum(s.total_time_us for s in self.stats.values())
|
||||
|
||||
hottest = None
|
||||
if self.stats:
|
||||
hottest = max(self.stats.values(), key=lambda s: s.total_time_us)
|
||||
|
||||
return {
|
||||
"total_records": total_records,
|
||||
"total_functions": total_functions,
|
||||
"total_time_us": total_time,
|
||||
"hottest_function": hottest.name if hottest else None,
|
||||
"hottest_time_us": hottest.total_time_us if hottest else 0,
|
||||
}
|
||||
90
host/miniprofiler/cli.py
Normal file
90
host/miniprofiler/cli.py
Normal file
@@ -0,0 +1,90 @@
|
||||
"""Command-line interface for MiniProfiler."""
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from .web_server import create_app
|
||||
|
||||
|
||||
def setup_logging(verbose: bool = False):
|
||||
"""Configure logging.
|
||||
|
||||
Args:
|
||||
verbose: Enable verbose (DEBUG) logging
|
||||
"""
|
||||
level = logging.DEBUG if verbose else logging.INFO
|
||||
logging.basicConfig(
|
||||
level=level,
|
||||
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
||||
datefmt='%Y-%m-%d %H:%M:%S'
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
"""Main entry point for the CLI."""
|
||||
parser = argparse.ArgumentParser(
|
||||
description='MiniProfiler - Real-time Embedded Profiling Visualization'
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--host',
|
||||
type=str,
|
||||
default='0.0.0.0',
|
||||
help='Host address to bind to (default: 0.0.0.0)'
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--port',
|
||||
type=int,
|
||||
default=5000,
|
||||
help='Port number to listen on (default: 5000)'
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--debug',
|
||||
action='store_true',
|
||||
help='Enable debug mode'
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--verbose', '-v',
|
||||
action='store_true',
|
||||
help='Enable verbose logging'
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Setup logging
|
||||
setup_logging(args.verbose)
|
||||
|
||||
# Create and run the web server
|
||||
print(f"""
|
||||
╔═══════════════════════════════════════════════════════════╗
|
||||
║ MiniProfiler ║
|
||||
║ Real-time Embedded Profiling Tool ║
|
||||
╚═══════════════════════════════════════════════════════════╝
|
||||
|
||||
Starting web server...
|
||||
Host: {args.host}
|
||||
Port: {args.port}
|
||||
|
||||
Open your browser and navigate to:
|
||||
http://localhost:{args.port}
|
||||
|
||||
Press Ctrl+C to stop the server.
|
||||
""")
|
||||
|
||||
try:
|
||||
server = create_app(args.host, args.port)
|
||||
server.run(debug=args.debug)
|
||||
except KeyboardInterrupt:
|
||||
print("\n\nShutting down gracefully...")
|
||||
sys.exit(0)
|
||||
except Exception as e:
|
||||
print(f"\nError: {e}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
280
host/miniprofiler/protocol.py
Normal file
280
host/miniprofiler/protocol.py
Normal file
@@ -0,0 +1,280 @@
|
||||
"""Protocol definitions and packet structures for MiniProfiler.
|
||||
|
||||
This module defines the binary protocol used for communication between
|
||||
the embedded device and the host application.
|
||||
"""
|
||||
|
||||
import struct
|
||||
from enum import IntEnum
|
||||
from dataclasses import dataclass
|
||||
from typing import List, Optional
|
||||
import crc
|
||||
|
||||
|
||||
class Command(IntEnum):
|
||||
"""Commands sent from host to embedded device."""
|
||||
START_PROFILING = 0x01
|
||||
STOP_PROFILING = 0x02
|
||||
GET_STATUS = 0x03
|
||||
RESET_BUFFERS = 0x04
|
||||
GET_METADATA = 0x05
|
||||
SET_CONFIG = 0x06
|
||||
|
||||
|
||||
class ResponseType(IntEnum):
|
||||
"""Response types from embedded device to host."""
|
||||
ACK = 0x01
|
||||
NACK = 0x02
|
||||
METADATA = 0x03
|
||||
STATUS = 0x04
|
||||
PROFILE_DATA = 0x05
|
||||
|
||||
|
||||
# Protocol constants
|
||||
COMMAND_HEADER = 0x55
|
||||
RESPONSE_HEADER = 0xAA55
|
||||
RESPONSE_END = 0x0A
|
||||
PROTOCOL_VERSION = 0x01
|
||||
|
||||
|
||||
@dataclass
|
||||
class ProfileRecord:
|
||||
"""A single profiling record from the embedded device.
|
||||
|
||||
Attributes:
|
||||
func_addr: Function address (from instrumentation)
|
||||
entry_time: Entry timestamp in microseconds
|
||||
duration_us: Function duration in microseconds
|
||||
depth: Call stack depth
|
||||
"""
|
||||
func_addr: int
|
||||
entry_time: int
|
||||
duration_us: int
|
||||
depth: int
|
||||
|
||||
@classmethod
|
||||
def from_bytes(cls, data: bytes) -> 'ProfileRecord':
|
||||
"""Parse a ProfileRecord from binary data.
|
||||
|
||||
Format: <I (func_addr) <I (entry_time) <I (duration_us) <H (depth)
|
||||
Total: 14 bytes
|
||||
"""
|
||||
if len(data) < 14:
|
||||
raise ValueError(f"Invalid ProfileRecord size: {len(data)} bytes")
|
||||
|
||||
func_addr, entry_time, duration_us, depth = struct.unpack('<IIIH', data[:14])
|
||||
return cls(func_addr, entry_time, duration_us, depth)
|
||||
|
||||
def to_bytes(self) -> bytes:
|
||||
"""Serialize ProfileRecord to binary format."""
|
||||
return struct.pack('<IIIH', self.func_addr, self.entry_time,
|
||||
self.duration_us, self.depth)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Metadata:
|
||||
"""Metadata packet sent by embedded device at startup.
|
||||
|
||||
Attributes:
|
||||
mcu_clock_hz: MCU clock frequency in Hz
|
||||
timer_freq: Timer frequency in Hz
|
||||
elf_build_id: CRC32 of .text section for version matching
|
||||
fw_version: Firmware version string
|
||||
"""
|
||||
mcu_clock_hz: int
|
||||
timer_freq: int
|
||||
elf_build_id: int
|
||||
fw_version: str
|
||||
|
||||
@classmethod
|
||||
def from_bytes(cls, data: bytes) -> 'Metadata':
|
||||
"""Parse Metadata from binary data.
|
||||
|
||||
Format: <I (mcu_clock) <I (timer_freq) <I (build_id) 16s (fw_version)
|
||||
Total: 28 bytes
|
||||
"""
|
||||
if len(data) < 28:
|
||||
raise ValueError(f"Invalid Metadata size: {len(data)} bytes")
|
||||
|
||||
mcu_clock_hz, timer_freq, elf_build_id, fw_version_bytes = struct.unpack(
|
||||
'<III16s', data[:28]
|
||||
)
|
||||
fw_version = fw_version_bytes.decode('utf-8').rstrip('\x00')
|
||||
return cls(mcu_clock_hz, timer_freq, elf_build_id, fw_version)
|
||||
|
||||
def to_bytes(self) -> bytes:
|
||||
"""Serialize Metadata to binary format."""
|
||||
fw_version_bytes = self.fw_version.encode('utf-8')[:16].ljust(16, b'\x00')
|
||||
return struct.pack('<III16s', self.mcu_clock_hz, self.timer_freq,
|
||||
self.elf_build_id, fw_version_bytes)
|
||||
|
||||
|
||||
@dataclass
|
||||
class StatusInfo:
|
||||
"""Status information from embedded device.
|
||||
|
||||
Attributes:
|
||||
is_profiling: Whether profiling is currently active
|
||||
buffer_overflows: Number of buffer overflow events
|
||||
records_captured: Total number of records captured
|
||||
buffer_usage_percent: Current buffer usage percentage
|
||||
"""
|
||||
is_profiling: bool
|
||||
buffer_overflows: int
|
||||
records_captured: int
|
||||
buffer_usage_percent: int
|
||||
|
||||
@classmethod
|
||||
def from_bytes(cls, data: bytes) -> 'StatusInfo':
|
||||
"""Parse StatusInfo from binary data.
|
||||
|
||||
Format: <B (is_profiling) <I (overflows) <I (records) <B (buffer_pct)
|
||||
Total: 10 bytes
|
||||
"""
|
||||
if len(data) < 10:
|
||||
raise ValueError(f"Invalid StatusInfo size: {len(data)} bytes")
|
||||
|
||||
is_profiling, buffer_overflows, records_captured, buffer_usage_percent = \
|
||||
struct.unpack('<BIIB', data[:10])
|
||||
return cls(bool(is_profiling), buffer_overflows, records_captured,
|
||||
buffer_usage_percent)
|
||||
|
||||
|
||||
class CommandPacket:
|
||||
"""Command packet sent from host to embedded device."""
|
||||
|
||||
def __init__(self, cmd: Command, payload: bytes = b''):
|
||||
"""Create a command packet.
|
||||
|
||||
Args:
|
||||
cmd: Command type
|
||||
payload: Optional payload (max 8 bytes)
|
||||
"""
|
||||
if len(payload) > 8:
|
||||
raise ValueError("Command payload cannot exceed 8 bytes")
|
||||
self.cmd = cmd
|
||||
self.payload = payload.ljust(8, b'\x00')
|
||||
|
||||
def to_bytes(self) -> bytes:
|
||||
"""Serialize command packet to binary format.
|
||||
|
||||
Format: <B (header) <B (cmd) <B (payload_len) 8s (payload) <B (checksum)
|
||||
Total: 12 bytes
|
||||
"""
|
||||
payload_len = len(self.payload.rstrip(b'\x00'))
|
||||
data = struct.pack('<BBB8s', COMMAND_HEADER, self.cmd, payload_len, self.payload)
|
||||
checksum = sum(data) & 0xFF
|
||||
return data + struct.pack('<B', checksum)
|
||||
|
||||
|
||||
class ResponsePacket:
|
||||
"""Response packet from embedded device to host."""
|
||||
|
||||
def __init__(self, response_type: ResponseType, payload: bytes):
|
||||
"""Create a response packet.
|
||||
|
||||
Args:
|
||||
response_type: Type of response
|
||||
payload: Response payload
|
||||
"""
|
||||
self.response_type = response_type
|
||||
self.payload = payload
|
||||
|
||||
@staticmethod
|
||||
def calculate_crc16(data: bytes) -> int:
|
||||
"""Calculate CRC16-CCITT for data validation."""
|
||||
calculator = crc.Calculator(crc.Crc16.CCITT)
|
||||
return calculator.checksum(data)
|
||||
|
||||
def to_bytes(self) -> bytes:
|
||||
"""Serialize response packet to binary format.
|
||||
|
||||
Format: <H (header) <B (type) <H (payload_len) payload <H (crc16) <B (end)
|
||||
"""
|
||||
payload_len = len(self.payload)
|
||||
header_data = struct.pack('<HBH', RESPONSE_HEADER, self.response_type,
|
||||
payload_len)
|
||||
data = header_data + self.payload
|
||||
crc16 = self.calculate_crc16(data)
|
||||
return data + struct.pack('<HB', crc16, RESPONSE_END)
|
||||
|
||||
@classmethod
|
||||
def from_bytes(cls, data: bytes) -> Optional['ResponsePacket']:
|
||||
"""Parse response packet from binary data.
|
||||
|
||||
Returns:
|
||||
ResponsePacket if valid, None otherwise
|
||||
"""
|
||||
if len(data) < 8: # Minimum packet size
|
||||
return None
|
||||
|
||||
# Parse header
|
||||
header, response_type, payload_len = struct.unpack('<HBH', data[:5])
|
||||
|
||||
if header != RESPONSE_HEADER:
|
||||
raise ValueError(f"Invalid response header: 0x{header:04X}")
|
||||
|
||||
# Check if we have enough data
|
||||
total_len = 5 + payload_len + 3 # header + payload + crc + end
|
||||
if len(data) < total_len:
|
||||
return None
|
||||
|
||||
# Extract payload
|
||||
payload = data[5:5+payload_len]
|
||||
|
||||
# Verify CRC
|
||||
crc16_expected = struct.unpack('<H', data[5+payload_len:5+payload_len+2])[0]
|
||||
crc16_actual = cls.calculate_crc16(data[:5+payload_len])
|
||||
|
||||
if crc16_expected != crc16_actual:
|
||||
raise ValueError(f"CRC mismatch: expected 0x{crc16_expected:04X}, "
|
||||
f"got 0x{crc16_actual:04X}")
|
||||
|
||||
# Verify end marker
|
||||
end_marker = data[5+payload_len+2]
|
||||
if end_marker != RESPONSE_END:
|
||||
raise ValueError(f"Invalid end marker: 0x{end_marker:02X}")
|
||||
|
||||
return cls(ResponseType(response_type), payload)
|
||||
|
||||
def parse_payload(self) -> object:
|
||||
"""Parse the payload based on response type.
|
||||
|
||||
Returns:
|
||||
Parsed payload object (Metadata, StatusInfo, List[ProfileRecord], etc.)
|
||||
"""
|
||||
if self.response_type == ResponseType.METADATA:
|
||||
return Metadata.from_bytes(self.payload)
|
||||
|
||||
elif self.response_type == ResponseType.STATUS:
|
||||
return StatusInfo.from_bytes(self.payload)
|
||||
|
||||
elif self.response_type == ResponseType.PROFILE_DATA:
|
||||
# First byte is protocol version
|
||||
if len(self.payload) < 3:
|
||||
return []
|
||||
|
||||
version = self.payload[0]
|
||||
if version != PROTOCOL_VERSION:
|
||||
raise ValueError(f"Unsupported protocol version: {version}")
|
||||
|
||||
record_count = struct.unpack('<H', self.payload[1:3])[0]
|
||||
records = []
|
||||
|
||||
offset = 3
|
||||
for _ in range(record_count):
|
||||
if offset + 14 > len(self.payload):
|
||||
break
|
||||
record = ProfileRecord.from_bytes(self.payload[offset:offset+14])
|
||||
records.append(record)
|
||||
offset += 14
|
||||
|
||||
return records
|
||||
|
||||
elif self.response_type == ResponseType.ACK:
|
||||
return True
|
||||
|
||||
elif self.response_type == ResponseType.NACK:
|
||||
return False
|
||||
|
||||
return self.payload
|
||||
267
host/miniprofiler/serial_reader.py
Normal file
267
host/miniprofiler/serial_reader.py
Normal file
@@ -0,0 +1,267 @@
|
||||
"""Serial communication module for MiniProfiler.
|
||||
|
||||
Handles UART communication with the embedded device, including
|
||||
sending commands and receiving profiling data.
|
||||
"""
|
||||
|
||||
import serial
|
||||
import threading
|
||||
import time
|
||||
import logging
|
||||
from typing import Callable, Optional, List
|
||||
from queue import Queue
|
||||
|
||||
from .protocol import (
|
||||
Command, CommandPacket, ResponsePacket, ResponseType,
|
||||
ProfileRecord, Metadata, StatusInfo
|
||||
)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SerialReader:
|
||||
"""Manages serial communication with the embedded profiling device."""
|
||||
|
||||
def __init__(self, port: str, baudrate: int = 115200, timeout: float = 1.0):
|
||||
"""Initialize serial reader.
|
||||
|
||||
Args:
|
||||
port: Serial port name (e.g., '/dev/ttyUSB0', 'COM3')
|
||||
baudrate: Baud rate (default: 115200)
|
||||
timeout: Read timeout in seconds
|
||||
"""
|
||||
self.port = port
|
||||
self.baudrate = baudrate
|
||||
self.timeout = timeout
|
||||
self.serial: Optional[serial.Serial] = None
|
||||
self.running = False
|
||||
self.read_thread: Optional[threading.Thread] = None
|
||||
self.command_queue = Queue()
|
||||
|
||||
# Callbacks
|
||||
self.on_profile_data: Optional[Callable[[List[ProfileRecord]], None]] = None
|
||||
self.on_metadata: Optional[Callable[[Metadata], None]] = None
|
||||
self.on_status: Optional[Callable[[StatusInfo], None]] = None
|
||||
self.on_error: Optional[Callable[[Exception], None]] = None
|
||||
|
||||
def connect(self) -> bool:
|
||||
"""Open the serial port connection.
|
||||
|
||||
Returns:
|
||||
True if connection successful, False otherwise
|
||||
"""
|
||||
try:
|
||||
self.serial = serial.Serial(
|
||||
port=self.port,
|
||||
baudrate=self.baudrate,
|
||||
timeout=self.timeout,
|
||||
bytesize=serial.EIGHTBITS,
|
||||
parity=serial.PARITY_NONE,
|
||||
stopbits=serial.STOPBITS_ONE
|
||||
)
|
||||
logger.info(f"Connected to {self.port} at {self.baudrate} baud")
|
||||
return True
|
||||
except serial.SerialException as e:
|
||||
logger.error(f"Failed to connect to {self.port}: {e}")
|
||||
if self.on_error:
|
||||
self.on_error(e)
|
||||
return False
|
||||
|
||||
def disconnect(self):
|
||||
"""Close the serial port connection."""
|
||||
if self.serial and self.serial.is_open:
|
||||
self.serial.close()
|
||||
logger.info(f"Disconnected from {self.port}")
|
||||
|
||||
def send_command(self, cmd: Command, payload: bytes = b'') -> bool:
|
||||
"""Send a command to the embedded device.
|
||||
|
||||
Args:
|
||||
cmd: Command to send
|
||||
payload: Optional command payload
|
||||
|
||||
Returns:
|
||||
True if command sent successfully, False otherwise
|
||||
"""
|
||||
if not self.serial or not self.serial.is_open:
|
||||
logger.error("Serial port not open")
|
||||
return False
|
||||
|
||||
try:
|
||||
packet = CommandPacket(cmd, payload)
|
||||
data = packet.to_bytes()
|
||||
self.serial.write(data)
|
||||
logger.debug(f"Sent command: {cmd.name}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to send command {cmd.name}: {e}")
|
||||
if self.on_error:
|
||||
self.on_error(e)
|
||||
return False
|
||||
|
||||
def start_profiling(self) -> bool:
|
||||
"""Send START_PROFILING command."""
|
||||
return self.send_command(Command.START_PROFILING)
|
||||
|
||||
def stop_profiling(self) -> bool:
|
||||
"""Send STOP_PROFILING command."""
|
||||
return self.send_command(Command.STOP_PROFILING)
|
||||
|
||||
def get_metadata(self) -> bool:
|
||||
"""Request metadata from the device."""
|
||||
return self.send_command(Command.GET_METADATA)
|
||||
|
||||
def get_status(self) -> bool:
|
||||
"""Request status from the device."""
|
||||
return self.send_command(Command.GET_STATUS)
|
||||
|
||||
def reset_buffers(self) -> bool:
|
||||
"""Send RESET_BUFFERS command."""
|
||||
return self.send_command(Command.RESET_BUFFERS)
|
||||
|
||||
def _read_packet(self) -> Optional[ResponsePacket]:
|
||||
"""Read a response packet from the serial port.
|
||||
|
||||
Returns:
|
||||
ResponsePacket if valid packet received, None otherwise
|
||||
"""
|
||||
if not self.serial or not self.serial.is_open:
|
||||
return None
|
||||
|
||||
buffer = bytearray()
|
||||
|
||||
try:
|
||||
# Search for header (0xAA55)
|
||||
while self.running:
|
||||
byte = self.serial.read(1)
|
||||
if not byte:
|
||||
continue
|
||||
|
||||
buffer.append(byte[0])
|
||||
|
||||
# Check for header
|
||||
if len(buffer) >= 2:
|
||||
header = (buffer[-2] << 8) | buffer[-1]
|
||||
if header == 0xAA55:
|
||||
# Found header, read rest of packet
|
||||
buffer = bytearray([0xAA, 0x55])
|
||||
break
|
||||
# Keep only last byte for next iteration
|
||||
if len(buffer) > 1:
|
||||
buffer = bytearray([buffer[-1]])
|
||||
|
||||
if not self.running:
|
||||
return None
|
||||
|
||||
# Read type and length (3 bytes)
|
||||
header_rest = self.serial.read(3)
|
||||
if len(header_rest) < 3:
|
||||
return None
|
||||
buffer.extend(header_rest)
|
||||
|
||||
# Extract payload length
|
||||
payload_len = (header_rest[2] << 8) | header_rest[1]
|
||||
|
||||
# Read payload + CRC (2 bytes) + end marker (1 byte)
|
||||
remaining = payload_len + 3
|
||||
remaining_data = self.serial.read(remaining)
|
||||
if len(remaining_data) < remaining:
|
||||
logger.warning(f"Incomplete packet: expected {remaining}, got {len(remaining_data)}")
|
||||
return None
|
||||
|
||||
buffer.extend(remaining_data)
|
||||
|
||||
# Parse packet
|
||||
packet = ResponsePacket.from_bytes(bytes(buffer))
|
||||
return packet
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error reading packet: {e}")
|
||||
if self.on_error:
|
||||
self.on_error(e)
|
||||
return None
|
||||
|
||||
def _reader_thread(self):
|
||||
"""Background thread for reading serial data."""
|
||||
logger.info("Serial reader thread started")
|
||||
|
||||
while self.running:
|
||||
packet = self._read_packet()
|
||||
if not packet:
|
||||
continue
|
||||
|
||||
try:
|
||||
# Parse and dispatch based on response type
|
||||
if packet.response_type == ResponseType.PROFILE_DATA:
|
||||
records = packet.parse_payload()
|
||||
if self.on_profile_data and isinstance(records, list):
|
||||
self.on_profile_data(records)
|
||||
|
||||
elif packet.response_type == ResponseType.METADATA:
|
||||
metadata = packet.parse_payload()
|
||||
if self.on_metadata and isinstance(metadata, Metadata):
|
||||
self.on_metadata(metadata)
|
||||
|
||||
elif packet.response_type == ResponseType.STATUS:
|
||||
status = packet.parse_payload()
|
||||
if self.on_status and isinstance(status, StatusInfo):
|
||||
self.on_status(status)
|
||||
|
||||
elif packet.response_type == ResponseType.ACK:
|
||||
logger.debug("Received ACK")
|
||||
|
||||
elif packet.response_type == ResponseType.NACK:
|
||||
logger.warning("Received NACK")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing packet: {e}")
|
||||
if self.on_error:
|
||||
self.on_error(e)
|
||||
|
||||
logger.info("Serial reader thread stopped")
|
||||
|
||||
def start_reading(self) -> bool:
|
||||
"""Start background thread to read serial data.
|
||||
|
||||
Returns:
|
||||
True if thread started successfully, False otherwise
|
||||
"""
|
||||
if self.running:
|
||||
logger.warning("Reader thread already running")
|
||||
return False
|
||||
|
||||
if not self.serial or not self.serial.is_open:
|
||||
logger.error("Serial port not open")
|
||||
return False
|
||||
|
||||
self.running = True
|
||||
self.read_thread = threading.Thread(target=self._reader_thread, daemon=True)
|
||||
self.read_thread.start()
|
||||
logger.info("Started serial reading thread")
|
||||
return True
|
||||
|
||||
def stop_reading(self):
|
||||
"""Stop the background reading thread."""
|
||||
if not self.running:
|
||||
return
|
||||
|
||||
logger.info("Stopping serial reader thread...")
|
||||
self.running = False
|
||||
|
||||
if self.read_thread:
|
||||
self.read_thread.join(timeout=2.0)
|
||||
if self.read_thread.is_alive():
|
||||
logger.warning("Reader thread did not stop cleanly")
|
||||
else:
|
||||
logger.info("Reader thread stopped")
|
||||
|
||||
def __enter__(self):
|
||||
"""Context manager entry."""
|
||||
self.connect()
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
"""Context manager exit."""
|
||||
self.stop_reading()
|
||||
self.disconnect()
|
||||
205
host/miniprofiler/symbolizer.py
Normal file
205
host/miniprofiler/symbolizer.py
Normal file
@@ -0,0 +1,205 @@
|
||||
"""Symbol resolution using ELF/DWARF debug information.
|
||||
|
||||
This module resolves function addresses to human-readable names,
|
||||
file locations, and line numbers using the ELF symbol table and
|
||||
DWARF debugging information.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Dict, Optional, Tuple
|
||||
from pathlib import Path
|
||||
from elftools.elf.elffile import ELFFile
|
||||
from elftools.dwarf.descriptions import describe_form_class
|
||||
from elftools.dwarf.die import DIE
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SymbolInfo:
|
||||
"""Information about a symbol."""
|
||||
|
||||
def __init__(self, name: str, file: str = "", line: int = 0, size: int = 0):
|
||||
"""Initialize symbol information.
|
||||
|
||||
Args:
|
||||
name: Function or symbol name
|
||||
file: Source file path
|
||||
line: Line number in source file
|
||||
size: Symbol size in bytes
|
||||
"""
|
||||
self.name = name
|
||||
self.file = file
|
||||
self.line = line
|
||||
self.size = size
|
||||
|
||||
def __repr__(self) -> str:
|
||||
if self.file and self.line:
|
||||
return f"{self.name} ({self.file}:{self.line})"
|
||||
return self.name
|
||||
|
||||
|
||||
class Symbolizer:
|
||||
"""Resolves addresses to symbol names using ELF/DWARF information."""
|
||||
|
||||
def __init__(self, elf_path: str):
|
||||
"""Initialize symbolizer with an ELF file.
|
||||
|
||||
Args:
|
||||
elf_path: Path to the ELF file with debug symbols
|
||||
"""
|
||||
self.elf_path = Path(elf_path)
|
||||
self.symbols: Dict[int, SymbolInfo] = {}
|
||||
self.loaded = False
|
||||
|
||||
if self.elf_path.exists():
|
||||
self._load_symbols()
|
||||
else:
|
||||
logger.warning(f"ELF file not found: {elf_path}")
|
||||
|
||||
def _load_symbols(self):
|
||||
"""Load symbols from the ELF file."""
|
||||
try:
|
||||
with open(self.elf_path, 'rb') as f:
|
||||
elffile = ELFFile(f)
|
||||
|
||||
# Load symbol table
|
||||
self._load_symbol_table(elffile)
|
||||
|
||||
# Load DWARF debug info for line numbers
|
||||
if elffile.has_dwarf_info():
|
||||
self._load_dwarf_info(elffile)
|
||||
|
||||
self.loaded = True
|
||||
logger.info(f"Loaded {len(self.symbols)} symbols from {self.elf_path}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to load symbols from {self.elf_path}: {e}")
|
||||
self.loaded = False
|
||||
|
||||
def _load_symbol_table(self, elffile: ELFFile):
|
||||
"""Load function symbols from the symbol table.
|
||||
|
||||
Args:
|
||||
elffile: Parsed ELF file object
|
||||
"""
|
||||
symtab = elffile.get_section_by_name('.symtab')
|
||||
if not symtab:
|
||||
logger.warning("No symbol table found in ELF file")
|
||||
return
|
||||
|
||||
for symbol in symtab.iter_symbols():
|
||||
# Only interested in function symbols
|
||||
if symbol['st_info']['type'] == 'STT_FUNC':
|
||||
addr = symbol['st_value']
|
||||
name = symbol.name
|
||||
size = symbol['st_size']
|
||||
|
||||
if addr and name:
|
||||
self.symbols[addr] = SymbolInfo(name, size=size)
|
||||
|
||||
logger.debug(f"Loaded {len(self.symbols)} function symbols from symbol table")
|
||||
|
||||
def _load_dwarf_info(self, elffile: ELFFile):
|
||||
"""Load DWARF debug information for file/line mappings.
|
||||
|
||||
Args:
|
||||
elffile: Parsed ELF file object
|
||||
"""
|
||||
dwarfinfo = elffile.get_dwarf_info()
|
||||
|
||||
# Process line number information
|
||||
for CU in dwarfinfo.iter_CUs():
|
||||
lineprog = dwarfinfo.line_program_for_CU(CU)
|
||||
if not lineprog:
|
||||
continue
|
||||
|
||||
# Get the file table
|
||||
file_entries = lineprog.header['file_entry']
|
||||
|
||||
# Process line program entries
|
||||
prevstate = None
|
||||
for entry in lineprog.get_entries():
|
||||
if entry.state is None:
|
||||
continue
|
||||
|
||||
# Look for end of sequence or new addresses
|
||||
state = entry.state
|
||||
if prevstate and state.address != prevstate.address:
|
||||
addr = prevstate.address
|
||||
file_index = prevstate.file - 1
|
||||
|
||||
if 0 <= file_index < len(file_entries):
|
||||
file_entry = file_entries[file_index]
|
||||
filename = file_entry.name.decode('utf-8') if isinstance(
|
||||
file_entry.name, bytes) else file_entry.name
|
||||
|
||||
# Update existing symbol or create new one
|
||||
if addr in self.symbols:
|
||||
self.symbols[addr].file = filename
|
||||
self.symbols[addr].line = prevstate.line
|
||||
else:
|
||||
# Create placeholder symbol for addresses without symbol table entry
|
||||
self.symbols[addr] = SymbolInfo(
|
||||
f"func_0x{addr:08x}",
|
||||
file=filename,
|
||||
line=prevstate.line
|
||||
)
|
||||
|
||||
prevstate = state
|
||||
|
||||
logger.debug("Loaded DWARF debug information")
|
||||
|
||||
def resolve(self, addr: int) -> SymbolInfo:
|
||||
"""Resolve an address to symbol information.
|
||||
|
||||
Args:
|
||||
addr: Function address to resolve
|
||||
|
||||
Returns:
|
||||
SymbolInfo object (may contain placeholder name if not found)
|
||||
"""
|
||||
# Exact match
|
||||
if addr in self.symbols:
|
||||
return self.symbols[addr]
|
||||
|
||||
# Try to find the containing function (address within function range)
|
||||
for sym_addr, sym_info in self.symbols.items():
|
||||
if sym_addr <= addr < sym_addr + sym_info.size:
|
||||
return sym_info
|
||||
|
||||
# Not found - return placeholder
|
||||
return SymbolInfo(f"unknown_0x{addr:08x}")
|
||||
|
||||
def resolve_name(self, addr: int) -> str:
|
||||
"""Resolve an address to a function name.
|
||||
|
||||
Args:
|
||||
addr: Function address
|
||||
|
||||
Returns:
|
||||
Function name string
|
||||
"""
|
||||
return self.resolve(addr).name
|
||||
|
||||
def resolve_location(self, addr: int) -> str:
|
||||
"""Resolve an address to a file:line location string.
|
||||
|
||||
Args:
|
||||
addr: Function address
|
||||
|
||||
Returns:
|
||||
Location string in format "file:line" or empty string
|
||||
"""
|
||||
info = self.resolve(addr)
|
||||
if info.file and info.line:
|
||||
return f"{info.file}:{info.line}"
|
||||
return ""
|
||||
|
||||
def get_all_symbols(self) -> Dict[int, SymbolInfo]:
|
||||
"""Get all loaded symbols.
|
||||
|
||||
Returns:
|
||||
Dictionary mapping addresses to SymbolInfo objects
|
||||
"""
|
||||
return self.symbols.copy()
|
||||
315
host/miniprofiler/web_server.py
Normal file
315
host/miniprofiler/web_server.py
Normal file
@@ -0,0 +1,315 @@
|
||||
"""Flask web server for MiniProfiler visualization.
|
||||
|
||||
Provides a web interface for real-time profiling visualization including
|
||||
flame graphs, timelines, and statistics tables.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from flask import Flask, render_template, jsonify, request
|
||||
from flask_socketio import SocketIO, emit
|
||||
from typing import Optional
|
||||
import threading
|
||||
|
||||
from .serial_reader import SerialReader
|
||||
from .analyzer import ProfileAnalyzer
|
||||
from .symbolizer import Symbolizer
|
||||
from .protocol import ProfileRecord, Metadata, StatusInfo
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ProfilerWebServer:
|
||||
"""Web server for profiler visualization and control."""
|
||||
|
||||
def __init__(self, host: str = '0.0.0.0', port: int = 5000):
|
||||
"""Initialize the web server.
|
||||
|
||||
Args:
|
||||
host: Host address to bind to
|
||||
port: Port number to listen on
|
||||
"""
|
||||
self.host = host
|
||||
self.port = port
|
||||
|
||||
# Initialize Flask app
|
||||
self.app = Flask(__name__,
|
||||
template_folder='../web/templates',
|
||||
static_folder='../web/static')
|
||||
self.app.config['SECRET_KEY'] = 'miniprofiler-secret-key'
|
||||
|
||||
# Initialize SocketIO
|
||||
self.socketio = SocketIO(self.app, cors_allowed_origins="*")
|
||||
|
||||
# Profiler components
|
||||
self.serial_reader: Optional[SerialReader] = None
|
||||
self.analyzer = ProfileAnalyzer()
|
||||
self.symbolizer: Optional[Symbolizer] = None
|
||||
self.metadata: Optional[Metadata] = None
|
||||
|
||||
# State
|
||||
self.is_connected = False
|
||||
self.is_profiling = False
|
||||
|
||||
# Setup routes
|
||||
self._setup_routes()
|
||||
self._setup_socketio_handlers()
|
||||
|
||||
def _setup_routes(self):
|
||||
"""Setup Flask HTTP routes."""
|
||||
|
||||
@self.app.route('/')
|
||||
def index():
|
||||
"""Main page."""
|
||||
return render_template('index.html')
|
||||
|
||||
@self.app.route('/api/status')
|
||||
def status():
|
||||
"""Get server status."""
|
||||
return jsonify({
|
||||
'connected': self.is_connected,
|
||||
'profiling': self.is_profiling,
|
||||
'has_data': len(self.analyzer.records) > 0,
|
||||
'record_count': len(self.analyzer.records)
|
||||
})
|
||||
|
||||
@self.app.route('/api/summary')
|
||||
def summary():
|
||||
"""Get profiling summary statistics."""
|
||||
return jsonify(self.analyzer.get_summary())
|
||||
|
||||
@self.app.route('/api/flamegraph')
|
||||
def flamegraph():
|
||||
"""Get flame graph data."""
|
||||
return jsonify(self.analyzer.to_flamegraph_json())
|
||||
|
||||
@self.app.route('/api/timeline')
|
||||
def timeline():
|
||||
"""Get timeline data."""
|
||||
return jsonify(self.analyzer.to_timeline_json())
|
||||
|
||||
@self.app.route('/api/statistics')
|
||||
def statistics():
|
||||
"""Get statistics table data."""
|
||||
return jsonify(self.analyzer.to_statistics_json())
|
||||
|
||||
def _setup_socketio_handlers(self):
|
||||
"""Setup SocketIO event handlers."""
|
||||
|
||||
@self.socketio.on('connect')
|
||||
def handle_connect():
|
||||
"""Handle client connection."""
|
||||
logger.info("Client connected")
|
||||
emit('status', {
|
||||
'connected': self.is_connected,
|
||||
'profiling': self.is_profiling
|
||||
})
|
||||
|
||||
@self.socketio.on('disconnect')
|
||||
def handle_disconnect():
|
||||
"""Handle client disconnection."""
|
||||
logger.info("Client disconnected")
|
||||
|
||||
@self.socketio.on('connect_serial')
|
||||
def handle_connect_serial(data):
|
||||
"""Connect to serial port.
|
||||
|
||||
Args:
|
||||
data: Dict with 'port' and optional 'baudrate'
|
||||
"""
|
||||
port = data.get('port')
|
||||
baudrate = data.get('baudrate', 115200)
|
||||
elf_path = data.get('elf_path', None)
|
||||
|
||||
if not port:
|
||||
emit('error', {'message': 'No port specified'})
|
||||
return
|
||||
|
||||
try:
|
||||
# Load symbolizer if ELF path provided
|
||||
if elf_path:
|
||||
self.symbolizer = Symbolizer(elf_path)
|
||||
self.analyzer.symbolizer = self.symbolizer
|
||||
|
||||
# Create serial reader
|
||||
self.serial_reader = SerialReader(port, baudrate)
|
||||
|
||||
# Set up callbacks
|
||||
self.serial_reader.on_profile_data = self._on_profile_data
|
||||
self.serial_reader.on_metadata = self._on_metadata
|
||||
self.serial_reader.on_status = self._on_status
|
||||
self.serial_reader.on_error = self._on_error
|
||||
|
||||
# Connect
|
||||
if self.serial_reader.connect():
|
||||
self.serial_reader.start_reading()
|
||||
self.is_connected = True
|
||||
|
||||
# Request metadata
|
||||
self.serial_reader.get_metadata()
|
||||
|
||||
emit('connected', {'port': port, 'baudrate': baudrate})
|
||||
logger.info(f"Connected to {port} at {baudrate} baud")
|
||||
else:
|
||||
emit('error', {'message': f'Failed to connect to {port}'})
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error connecting to serial: {e}")
|
||||
emit('error', {'message': str(e)})
|
||||
|
||||
@self.socketio.on('disconnect_serial')
|
||||
def handle_disconnect_serial():
|
||||
"""Disconnect from serial port."""
|
||||
if self.serial_reader:
|
||||
self.serial_reader.stop_reading()
|
||||
self.serial_reader.disconnect()
|
||||
self.serial_reader = None
|
||||
self.is_connected = False
|
||||
self.is_profiling = False
|
||||
emit('disconnected', {})
|
||||
logger.info("Disconnected from serial port")
|
||||
|
||||
@self.socketio.on('start_profiling')
|
||||
def handle_start_profiling():
|
||||
"""Start profiling on the device."""
|
||||
if not self.serial_reader or not self.is_connected:
|
||||
emit('error', {'message': 'Not connected to device'})
|
||||
return
|
||||
|
||||
if self.serial_reader.start_profiling():
|
||||
self.is_profiling = True
|
||||
emit('profiling_started', {})
|
||||
logger.info("Started profiling")
|
||||
else:
|
||||
emit('error', {'message': 'Failed to start profiling'})
|
||||
|
||||
@self.socketio.on('stop_profiling')
|
||||
def handle_stop_profiling():
|
||||
"""Stop profiling on the device."""
|
||||
if not self.serial_reader or not self.is_connected:
|
||||
emit('error', {'message': 'Not connected to device'})
|
||||
return
|
||||
|
||||
if self.serial_reader.stop_profiling():
|
||||
self.is_profiling = False
|
||||
emit('profiling_stopped', {})
|
||||
logger.info("Stopped profiling")
|
||||
else:
|
||||
emit('error', {'message': 'Failed to stop profiling'})
|
||||
|
||||
@self.socketio.on('clear_data')
|
||||
def handle_clear_data():
|
||||
"""Clear all profiling data."""
|
||||
self.analyzer.clear()
|
||||
emit('data_cleared', {})
|
||||
self._emit_data_update()
|
||||
logger.info("Cleared profiling data")
|
||||
|
||||
@self.socketio.on('reset_buffers')
|
||||
def handle_reset_buffers():
|
||||
"""Reset device buffers."""
|
||||
if not self.serial_reader or not self.is_connected:
|
||||
emit('error', {'message': 'Not connected to device'})
|
||||
return
|
||||
|
||||
if self.serial_reader.reset_buffers():
|
||||
emit('buffers_reset', {})
|
||||
logger.info("Reset device buffers")
|
||||
else:
|
||||
emit('error', {'message': 'Failed to reset buffers'})
|
||||
|
||||
def _on_profile_data(self, records):
|
||||
"""Callback for receiving profile data.
|
||||
|
||||
Args:
|
||||
records: List of ProfileRecord objects
|
||||
"""
|
||||
logger.debug(f"Received {len(records)} profile records")
|
||||
self.analyzer.add_records(records)
|
||||
self._emit_data_update()
|
||||
|
||||
def _on_metadata(self, metadata: Metadata):
|
||||
"""Callback for receiving metadata.
|
||||
|
||||
Args:
|
||||
metadata: Metadata object
|
||||
"""
|
||||
logger.info(f"Received metadata: {metadata.fw_version}, "
|
||||
f"MCU: {metadata.mcu_clock_hz / 1e6:.1f} MHz")
|
||||
self.metadata = metadata
|
||||
self.socketio.emit('metadata', {
|
||||
'fw_version': metadata.fw_version,
|
||||
'mcu_clock_hz': metadata.mcu_clock_hz,
|
||||
'timer_freq': metadata.timer_freq,
|
||||
'build_id': f"0x{metadata.elf_build_id:08X}"
|
||||
})
|
||||
|
||||
def _on_status(self, status: StatusInfo):
|
||||
"""Callback for receiving status updates.
|
||||
|
||||
Args:
|
||||
status: StatusInfo object
|
||||
"""
|
||||
logger.debug(f"Device status: profiling={status.is_profiling}, "
|
||||
f"records={status.records_captured}, "
|
||||
f"overflows={status.buffer_overflows}")
|
||||
self.socketio.emit('device_status', {
|
||||
'is_profiling': status.is_profiling,
|
||||
'records_captured': status.records_captured,
|
||||
'buffer_overflows': status.buffer_overflows,
|
||||
'buffer_usage': status.buffer_usage_percent
|
||||
})
|
||||
|
||||
def _on_error(self, error: Exception):
|
||||
"""Callback for serial errors.
|
||||
|
||||
Args:
|
||||
error: Exception that occurred
|
||||
"""
|
||||
logger.error(f"Serial error: {error}")
|
||||
self.socketio.emit('error', {'message': str(error)})
|
||||
|
||||
def _emit_data_update(self):
|
||||
"""Emit updated profiling data to all clients."""
|
||||
try:
|
||||
# Send summary
|
||||
summary = self.analyzer.get_summary()
|
||||
self.socketio.emit('summary_update', summary)
|
||||
|
||||
# Send flamegraph data
|
||||
flamegraph_data = self.analyzer.to_flamegraph_json()
|
||||
self.socketio.emit('flamegraph_update', flamegraph_data)
|
||||
|
||||
# Send statistics
|
||||
stats_data = self.analyzer.to_statistics_json()
|
||||
self.socketio.emit('statistics_update', stats_data)
|
||||
|
||||
# Send timeline data (can be large, so only send periodically)
|
||||
if len(self.analyzer.records) % 50 == 0: # Every 50 records
|
||||
timeline_data = self.analyzer.to_timeline_json()
|
||||
self.socketio.emit('timeline_update', timeline_data)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error emitting data update: {e}")
|
||||
|
||||
def run(self, debug: bool = False):
|
||||
"""Run the web server.
|
||||
|
||||
Args:
|
||||
debug: Enable debug mode
|
||||
"""
|
||||
logger.info(f"Starting web server on {self.host}:{self.port}")
|
||||
self.socketio.run(self.app, host=self.host, port=self.port, debug=debug)
|
||||
|
||||
|
||||
def create_app(host: str = '0.0.0.0', port: int = 5000) -> ProfilerWebServer:
|
||||
"""Create and configure the profiler web server.
|
||||
|
||||
Args:
|
||||
host: Host address
|
||||
port: Port number
|
||||
|
||||
Returns:
|
||||
Configured ProfilerWebServer instance
|
||||
"""
|
||||
return ProfilerWebServer(host, port)
|
||||
Reference in New Issue
Block a user