Skip to content

Commit e2b4c1c

Browse files
authored
Bootloader GUI (#28)
* add python scripts + gui * update requirements.txt for bootloader gui * fix linting issue for python scripts
1 parent 19e7c4b commit e2b4c1c

15 files changed

+980
-0
lines changed

projects/bootloader/config.json

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
"CMSIS",
44
"HAL"
55
],
6+
"no_lint": true,
67
"can": false,
78
"arm_only": true
89
}
Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
SEQUENCING = 30
2+
FLASH = 31
3+
JUMP_APP = 32
4+
ACK = 33
5+
START = 34
6+
JUMP_BOOTLOADER = 35
7+
8+
STANDARD_CRC32_POLY = 0x04C11DB7
Lines changed: 356 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,356 @@
1+
'''This client script handles datagram protocol communication between devices on the CAN'''
2+
3+
import can
4+
import time
5+
from crc32 import CRC32
6+
from bootloader_id import *
7+
from log_manager import update_log
8+
9+
DEFAULT_CHANNEL = 'can0'
10+
CAN_BITRATE = 500000
11+
12+
DATA_SIZE_SIZE = 2
13+
MIN_BYTEARRAY_SIZE = 4
14+
15+
NODE_IDS_OFFSET = 0
16+
DATA_SIZE_OFFSET = 2
17+
18+
crc32 = CRC32(STANDARD_CRC32_POLY)
19+
20+
class DatagramTypeError(Exception):
21+
'''Error wrapper (NEEDS WORK)'''
22+
pass
23+
24+
25+
class Datagram:
26+
'''Custom CAN-datagram class'''
27+
28+
def __init__(self, **kwargs):
29+
'''Initialize datagram class'''
30+
self._check_kwargs(**kwargs)
31+
self._datagram_type_id = kwargs["datagram_type_id"] & 0xff
32+
33+
self._node_ids = []
34+
for val in kwargs["node_ids"]:
35+
self._node_ids.append(val & 0xff)
36+
37+
self._data = kwargs["data"]
38+
39+
@classmethod
40+
def _unpack(cls, datagram_bytearray, arbitration_id):
41+
'''This function returns an instance fo the class by unpacking a bytearray'''
42+
assert isinstance(datagram_bytearray, bytearray)
43+
if len(datagram_bytearray) < MIN_BYTEARRAY_SIZE:
44+
45+
raise DatagramTypeError(
46+
"Invalid Datagram format from bytearray: Does not meet minimum size requirement")
47+
48+
if arbitration_id == START:
49+
node_ids_raw = datagram_bytearray[NODE_IDS_OFFSET:DATA_SIZE_OFFSET]
50+
node_ids = cls._unpack_nodeids(cls, node_ids_raw)
51+
return cls(datagram_type_id=arbitration_id, node_ids=node_ids, data=bytearray())
52+
elif arbitration_id == FLASH:
53+
data = []
54+
data = datagram_bytearray
55+
return cls(datagram_type_id=arbitration_id, node_ids=[], data=data)
56+
57+
def pack(self):
58+
'''This function packs a new bytearray based on set data'''
59+
node_ids = self._pack_nodeids(self._node_ids)
60+
61+
return bytearray([
62+
*node_ids
63+
])
64+
65+
@property
66+
def datagram_type_id(self):
67+
'''This function describes the datagram id'''
68+
return self._datagram_type_id
69+
70+
@property
71+
def node_ids(self):
72+
'''This function assigns the datagram id'''
73+
return self._node_ids
74+
75+
@property
76+
def data(self):
77+
'''This function describe the datagram node ids'''
78+
return self._data
79+
80+
@datagram_type_id.setter
81+
def datagram_type_id(self, value):
82+
'''This function sets the datagram type'''
83+
assert value & 0xff == value
84+
self._datagram_type_id = value & 0xff
85+
86+
@node_ids.setter
87+
def node_ids(self, nodes):
88+
'''This function sets the datagram node ids'''
89+
assert isinstance(nodes, list)
90+
assert all(0 <= nodes < 0xff for node in nodes)
91+
self._node_ids = nodes
92+
93+
@data.setter
94+
def data(self, value):
95+
'''This function sets the datagram data'''
96+
assert isinstance(value, bytearray)
97+
self._data = value
98+
99+
@staticmethod
100+
def _check_kwargs(**kwargs):
101+
'''This function checks that all args passed in are correct'''
102+
103+
args = [
104+
"datagram_type_id",
105+
"node_ids",
106+
"data"
107+
]
108+
109+
for arg in args:
110+
assert arg in kwargs
111+
112+
assert not isinstance(kwargs["datagram_type_id"], list)
113+
assert isinstance(kwargs["node_ids"], list)
114+
assert isinstance(kwargs["data"], bytearray)
115+
116+
assert kwargs["datagram_type_id"] & 0xff == kwargs["datagram_type_id"]
117+
118+
@staticmethod
119+
def _convert_from_bytearray(in_bytearray, size):
120+
'''Helper function to get little endian value from byte array'''
121+
out_value = 0
122+
for i in range(size):
123+
out_value = out_value | ((in_bytearray[i] & 0xff) << (i * 8))
124+
return out_value
125+
126+
@staticmethod
127+
def _convert_to_bytearray(in_value, size):
128+
'''Helper function to get a little endian byte array from a value'''
129+
out_array = bytearray()
130+
for i in range(size):
131+
out_array.append((in_value >> (8 * i)) & 0xff)
132+
return out_array
133+
134+
def _unpack_nodeids(self, raw_nodes):
135+
'''This function takes in the raw bytearray of node ids and returns an ordered list'''
136+
node_vals = self._convert_from_bytearray(raw_nodes, 2)
137+
# Gets a list of node ID's from message
138+
out_nodeids = []
139+
while node_vals:
140+
count = 0
141+
byte = node_vals & (~node_vals + 1)
142+
node_vals ^= byte
143+
while byte:
144+
byte = byte >> 1
145+
count += 1
146+
out_nodeids.append(count)
147+
148+
return out_nodeids
149+
150+
def _pack_nodeids(self, raw_nodes):
151+
'''This function takes in an ordered list and outputs a bytearray of node ids'''
152+
out_value = 0
153+
for node in raw_nodes:
154+
out_value = (out_value) | (1 << (node - 1))
155+
out_value = self._convert_to_bytearray(out_value, DATA_SIZE_OFFSET - NODE_IDS_OFFSET)
156+
return out_value
157+
158+
159+
class DatagramSender:
160+
# pylint: disable=too-few-public-methods
161+
'''Class that acts as a distributor for the Datagram class on a CAN bus'''
162+
163+
def __init__(self, bustype="socketcan", channel=DEFAULT_CHANNEL,
164+
bitrate=CAN_BITRATE, receive_own_messages=False):
165+
# pylint: disable=abstract-class-instantiated
166+
self.bus = can.interface.Bus(
167+
bustype=bustype,
168+
channel=channel,
169+
bitrate=bitrate,
170+
receive_own_messages=receive_own_messages)
171+
172+
def send(self, message, sender_id=0):
173+
'''Send a Datagram over CAN'''
174+
assert isinstance(message, Datagram)
175+
datagram = message.pack()
176+
datagram.extend(message.data)
177+
178+
message_extended_arbitration = False
179+
can_message = can.Message(arbitration_id=message._datagram_type_id,
180+
data=datagram,
181+
is_extended_id=message_extended_arbitration)
182+
self.bus.send(can_message)
183+
update_log("Message was sent on {}".format(self.bus.channel_info))
184+
185+
ack_received = False
186+
retry_count = 0
187+
max_retries = 3
188+
189+
while not ack_received and retry_count < max_retries:
190+
try:
191+
ack_msg = self.bus.recv(timeout=5.0)
192+
193+
if ack_msg and ack_msg.arbitration_id == ACK:
194+
if ack_msg.data[0] == 0x01:
195+
ack_received = True
196+
update_log(f"Received ACK for start message")
197+
elif ack_msg.data[0] == 0x00:
198+
update_log(f"Received NACK for start message, aborting")
199+
break
200+
else:
201+
update_log(f"Received unknown response for start message, retrying...")
202+
retry_count += 1
203+
else:
204+
update_log(f"No ACK/NACK received for start message, retrying...")
205+
retry_count += 1
206+
207+
except can.CanError:
208+
update_log(f"Error waiting for ACK/NACK for start message, retrying...", error=True)
209+
retry_count += 1
210+
211+
if not ack_received:
212+
raise Exception(f"Failed to receive ACK for start message after {max_retries} attempts")
213+
214+
215+
update_log(f"Start message received succesfully!")
216+
217+
def send_data(self, message, sender_id=0):
218+
'''Send a Datagram over CAN'''
219+
assert isinstance(message, Datagram)
220+
start_time = time.time()
221+
message_extended_arbitration = False
222+
chunk_messages = list(self._chunkify(message.data, 8))
223+
sequence_number = 0
224+
225+
while chunk_messages:
226+
seq_num_bytes = sequence_number.to_bytes(2, byteorder='little')
227+
228+
# Prepare up to 1024 bytes (128 chunks of 8 bytes each)
229+
current_chunk = chunk_messages[:128]
230+
chunk_messages = chunk_messages[128:]
231+
232+
crc_chunk = b''.join(current_chunk)
233+
crc32_value = crc32.calculate(crc_chunk)
234+
crc_data = crc32_value.to_bytes(4, byteorder='little')
235+
236+
sequencing_data = seq_num_bytes + crc_data
237+
238+
# Send sequence message
239+
sequence_msg = can.Message(arbitration_id=SEQUENCING,
240+
data=sequencing_data,
241+
is_extended_id=message_extended_arbitration)
242+
243+
self.bus.send(sequence_msg)
244+
245+
# Send data chunks (up to 1024 bytes)
246+
for chunk in current_chunk:
247+
try:
248+
data_msg = can.Message(arbitration_id=FLASH,
249+
data=chunk,
250+
is_extended_id=message_extended_arbitration)
251+
self.bus.send(data_msg)
252+
except BaseException:
253+
time.sleep(0.01)
254+
self.bus.send(data_msg)
255+
256+
update_log(f"Sent {len(current_chunk) * 8} bytes for sequence {sequence_number}")
257+
258+
if sequence_number > 0 or chunk_messages:
259+
ack_received = False
260+
retry_count = 0
261+
max_retries = 3
262+
263+
while not ack_received and retry_count < max_retries:
264+
try:
265+
ack_msg = self.bus.recv(timeout=5.0)
266+
267+
if ack_msg and ack_msg.arbitration_id == ACK:
268+
if ack_msg.data[0] == 0x01:
269+
ack_received = True
270+
update_log(f"Received ACK for sequence {sequence_number}\n")
271+
elif ack_msg.data[0] == 0x00:
272+
update_log(f"Received NACK for sequence {sequence_number}, retrying...")
273+
retry_count += 1
274+
break
275+
else:
276+
update_log(f"Received unknown response for sequence {sequence_number}, retrying...")
277+
retry_count += 1
278+
else:
279+
update_log(f"No ACK/NACK received for sequence {sequence_number}, retrying...")
280+
retry_count += 1
281+
282+
except can.CanError:
283+
update_log(f"Error waiting for ACK/NACK for sequence {sequence_number}, retrying...", error=True)
284+
retry_count += 1
285+
286+
if not ack_received:
287+
raise Exception(f"Failed to receive ACK for sequence {sequence_number} after {max_retries} attempts")
288+
289+
sequence_number += 1
290+
291+
end_time = time.time()
292+
293+
update_log("--------------------------------- COMPLETED ---------------------------------")
294+
update_log(f"Time Elapsed: {end_time - start_time}")
295+
update_log(f"All data sent successfully. Total sequences: {sequence_number}\n")
296+
297+
@staticmethod
298+
def _chunkify(data, size):
299+
'''This chunks up the datagram bytearray for easy iteration'''
300+
return (data[pos:pos + size] for pos in range(0, len(data), size))
301+
302+
303+
class DatagramListener(can.BufferedReader):
304+
# pylint: disable=too-few-public-methods
305+
'''Class that acts as a listener for the Datagram class on a CAN bus'''
306+
307+
def __init__(self, callback):
308+
'''Registers the callback'''
309+
assert callable(callback)
310+
self.callback = callback
311+
self.board_ids = 0
312+
# self.callback("1", 10)
313+
# Messages are stored in a dictionary where key = board ID, value = message
314+
self.datagram_messages = {}
315+
super().__init__()
316+
317+
def on_message_received(self, msg: can.Message):
318+
'''Handles message sent from boards on the CAN'''
319+
super().on_message_received(msg)
320+
arbitration_id = (msg.arbitration_id & 0xff)
321+
322+
if arbitration_id == START:
323+
self.board_ids = self.extract_board_id(msg.data)
324+
for board_id in self.board_ids:
325+
if arbitration_id == START:
326+
# Reset the datagram message when receiving a start message
327+
self.datagram_messages[board_id] = msg.data
328+
329+
if arbitration_id != START:
330+
if board_id in self.datagram_messages:
331+
self.datagram_messages[board_id] += msg.data
332+
333+
try:
334+
datagram = Datagram._unpack(self.datagram_messages[board_id], arbitration_id)
335+
self.callback(msg, board_id)
336+
except DatagramTypeError:
337+
# Datagram is incomplete, continue until complete
338+
pass
339+
340+
def extract_board_id(self, data):
341+
'''Extracts the board ID from the data'''
342+
out_value = 0
343+
for i in range(2):
344+
out_value = out_value | ((data[i] & 0xff) << (i * 8))
345+
346+
out_nodeids = []
347+
while out_value:
348+
count = 0
349+
byte = out_value & (~out_value + 1)
350+
out_value ^= byte
351+
while byte:
352+
byte = byte >> 1
353+
count += 1
354+
out_nodeids.append(count)
355+
356+
return out_nodeids

0 commit comments

Comments
 (0)