1
0
mirror of https://github.com/bluekitchen/btstack.git synced 2025-03-25 16:43:28 +00:00

remove semicolons from python files

Random semicolons in Python are distracting when reading the code.
This commit is contained in:
David Lechner 2025-01-20 14:19:08 -06:00 committed by Matthias Ringwald
parent 73677349c9
commit 0ca3fe1c31
33 changed files with 108 additions and 108 deletions

@ -53,10 +53,10 @@ def convert_bin(basename):
print ('Size %u', size)
with open(basename + '.h', 'w') as fout:
fout.write(header.replace('BASENAME',basename));
fout.write(header.replace('BASENAME',basename))
with open(basename + '.c', 'w') as fout:
fout.write(code_start.replace('BASENAME',basename));
fout.write(code_start.replace('BASENAME',basename))
fout.write(' ')
for i in range(0,size):
if i % 10000 == 0:
@ -65,7 +65,7 @@ def convert_bin(basename):
fout.write("0x{0:02x}, ".format(byte))
if (i & 0x0f) == 0x0f:
fout.write('\n ')
fout.write(code_end);
fout.write(code_end)
print ('Done\n')
# check usage: 1 param

@ -53,48 +53,48 @@ def read_little_endian_16(f):
def append_power_vector_gfsk(additions, str_list, data_indent):
additions.append("- added HCI_VS_SET_POWER_VECTOR(GFSK) template")
str_list.append(data_indent)
str_list.append('// BTstack: added HCI_VS_SET_POWER_VECTOR(GFSK) 0xFD82 template\n');
str_list.append('// BTstack: added HCI_VS_SET_POWER_VECTOR(GFSK) 0xFD82 template\n')
str_list.append(data_indent)
str_list.append("0x01, 0x82, 0xfd, 0x14, 0x00, 0x9c, 0x18, 0xd2, 0xd2, 0xd2, 0xd2, 0xd2, 0xd2, 0xd2, 0xdc,\n");
str_list.append("0x01, 0x82, 0xfd, 0x14, 0x00, 0x9c, 0x18, 0xd2, 0xd2, 0xd2, 0xd2, 0xd2, 0xd2, 0xd2, 0xdc,\n")
str_list.append(data_indent)
str_list.append("0xe6, 0xf0, 0xfa, 0x04, 0x0e, 0x18, 0xff, 0x00, 0x00,\n\n");
str_list.append("0xe6, 0xf0, 0xfa, 0x04, 0x0e, 0x18, 0xff, 0x00, 0x00,\n\n")
return 24
def append_power_vector_edr2(additions, str_list, data_indent):
additions.append("- added HCI_VS_SET_POWER_VECTOR(EDR2) template")
str_list.append(data_indent)
str_list.append('// BTstack: added HCI_VS_SET_POWER_VECTOR(EDR2) 0xFD82 template\n');
str_list.append('// BTstack: added HCI_VS_SET_POWER_VECTOR(EDR2) 0xFD82 template\n')
str_list.append(data_indent)
str_list.append("0x01, 0x82, 0xfd, 0x14, 0x01, 0x9c, 0xce, 0xce, 0xce, 0xce, 0xce, 0xce, 0xce, 0xce, 0xd8, \n");
str_list.append("0x01, 0x82, 0xfd, 0x14, 0x01, 0x9c, 0xce, 0xce, 0xce, 0xce, 0xce, 0xce, 0xce, 0xce, 0xd8, \n")
str_list.append(data_indent)
str_list.append("0xe2, 0xec, 0xf6, 0x00, 0x0a, 0x14, 0xff, 0x00, 0x00,\n\n");
str_list.append("0xe2, 0xec, 0xf6, 0x00, 0x0a, 0x14, 0xff, 0x00, 0x00,\n\n")
return 24
def append_power_vector_edr3(additions, str_list, data_indent):
additions.append("- added HCI_VS_SET_POWER_VECTOR(EDR3) template")
str_list.append(data_indent)
str_list.append('// BTstack: added HCI_VS_SET_POWER_VECTOR(EDR3) 0xFD82 for EDR3 template\n');
str_list.append('// BTstack: added HCI_VS_SET_POWER_VECTOR(EDR3) 0xFD82 for EDR3 template\n')
str_list.append(data_indent)
str_list.append("0x01, 0x82, 0xfd, 0x14, 0x02, 0x9c, 0xce, 0xce, 0xce, 0xce, 0xce, 0xce, 0xce, 0xce, 0xd8,\n");
str_list.append("0x01, 0x82, 0xfd, 0x14, 0x02, 0x9c, 0xce, 0xce, 0xce, 0xce, 0xce, 0xce, 0xce, 0xce, 0xd8,\n")
str_list.append(data_indent)
str_list.append("0xe2, 0xec, 0xf6, 0x00, 0x0a, 0x14, 0xff, 0x00, 0x00,\n\n");
str_list.append("0xe2, 0xec, 0xf6, 0x00, 0x0a, 0x14, 0xff, 0x00, 0x00,\n\n")
return 24
def append_class2_single_power(additions, str_list, data_indent):
additions.append("- added HCI_VS_SET_CLASS2_SINGLE_POWER template")
str_list.append(data_indent)
str_list.append('// BTstack: added HCI_VS_SET_CLASS2_SINGLE_POWER 0xFD87 template\n');
str_list.append('// BTstack: added HCI_VS_SET_CLASS2_SINGLE_POWER 0xFD87 template\n')
str_list.append(data_indent)
str_list.append("0x01, 0x87, 0xfd, 0x03, 0x0d, 0x0d, 0x0d,\n\n");
str_list.append("0x01, 0x87, 0xfd, 0x03, 0x0d, 0x0d, 0x0d,\n\n")
return 7
def append_ehcill(additions, str_list, data_indent):
additions.append("- added eHCILL template")
str_list.append('\n')
str_list.append(data_indent)
str_list.append('// BTstack: added HCI_VS_Sleep_Mode_Configurations 0xFD0C template for eHCILL\n');
str_list.append('// BTstack: added HCI_VS_Sleep_Mode_Configurations 0xFD0C template for eHCILL\n')
str_list.append(data_indent)
str_list.append('0x01, 0x0c, 0xfd, 9 , 1, 0, 0, 0xff, 0xff, 0xff, 0xff, 100, 0,\n\n');
str_list.append('0x01, 0x0c, 0xfd, 9 , 1, 0, 0, 0xff, 0xff, 0xff, 0xff, 100, 0,\n\n')
return 13
def append_calibration_sequence(additions, str_list, data_indent):
@ -119,10 +119,10 @@ def convert_bts(output_file, main_bts_file, bts_add_on, aka, lmp_subversion):
# assert script contains templates for configuration by BTstack
have_eHCILL = False
have_power_vector_gfsk = False;
have_power_vector_edr2 = False;
have_power_vector_edr3 = False;
have_class2_single_power = False;
have_power_vector_gfsk = False
have_power_vector_edr2 = False
have_power_vector_edr3 = False
have_class2_single_power = False
print("Creating {0}".format(output_file))
@ -170,16 +170,16 @@ def convert_bts(output_file, main_bts_file, bts_add_on, aka, lmp_subversion):
# add missing power command templates
if not have_power_vector_gfsk:
part_size += append_power_vector_gfsk(additions, str_list, data_indent)
have_power_vector_gfsk = True;
have_power_vector_gfsk = True
if not have_power_vector_edr2:
part_size += append_power_vector_edr2(additions, str_list, data_indent)
have_power_vector_edr2 = True;
have_power_vector_edr2 = True
if not have_power_vector_edr3:
part_size += append_power_vector_edr3(additions, str_list, data_indent)
have_power_vector_edr3 = True;
have_power_vector_edr3 = True
if not have_class2_single_power:
part_size += append_class2_single_power(additions, str_list, data_indent)
have_class2_single_power = True;
have_class2_single_power = True
counter = 0
str_list.append(data_indent)
@ -212,7 +212,7 @@ def convert_bts(output_file, main_bts_file, bts_add_on, aka, lmp_subversion):
str_list.append("// " + action_data + "\n")
if (action_type < 0): # EOF
break;
break
if not have_eHCILL:
@ -281,7 +281,7 @@ def convert_bts(output_file, main_bts_file, bts_add_on, aka, lmp_subversion):
fout.write('};\n\n')
fout.write('const uint32_t {0}_init_script_size = sizeof({0}_init_script); // size = {1} bytes\n\n'.format(array_name,size));
fout.write('const uint32_t {0}_init_script_size = sizeof({0}_init_script); // size = {1} bytes\n\n'.format(array_name,size))
# check usage: 2-3 param
if len(sys.argv) < 3 or len(sys.argv) > 4:

@ -62,10 +62,10 @@ def convert_hex(basename):
print('- End: %x' % ih.maxaddr())
with open(basename + '.h', 'w') as fout:
fout.write(header.replace('BASENAME',basename));
fout.write(header.replace('BASENAME',basename))
with open(basename + '.c', 'w') as fout:
fout.write(code_start.replace('BASENAME',basename));
fout.write(code_start.replace('BASENAME',basename))
fout.write(' ')
for i in range(0,size):
if i % 1000 == 0:
@ -74,7 +74,7 @@ def convert_hex(basename):
fout.write("0x{0:02x}, ".format(byte))
if (i & 0x0f) == 0x0f:
fout.write('\n ')
fout.write(code_end);
fout.write(code_end)
print ('Done\n')

@ -62,10 +62,10 @@ def convert_emp(basename):
# don't write .h file as we would need to store its name in btstack_chipset_em9301.c, too
# with open(basename + '.h', 'w') as fout:
# fout.write(header.replace('BASENAME',basename));
# fout.write(header.replace('BASENAME',basename))
with open(basename + '.c', 'w') as fout:
fout.write(code_start.replace('BASENAME',basename).replace('SIZE',str(size)));
fout.write(code_start.replace('BASENAME',basename).replace('SIZE',str(size)))
fout.write(' ')
for i in range(0,size):
if i % 1000 == 0:
@ -74,7 +74,7 @@ def convert_emp(basename):
fout.write("0x{0:02x}, ".format(byte))
if (i & 0x0f) == 0x0f:
fout.write('\n ')
fout.write(code_end);
fout.write(code_end)
print ('Done\n')
# check usage: 1 param

@ -172,10 +172,10 @@ def createIndex(fin, filename, api_filepath, api_title, api_label, githuburl):
one_line_function_definition = re.match(r'(.*?)\s*\(.*\(*.*;\n', line)
if one_line_function_definition:
parts = one_line_function_definition.group(1).split(" ");
parts = one_line_function_definition.group(1).split(" ")
name = parts[len(parts)-1]
if len(name) == 0:
print(parts);
print(parts)
sys.exit(10)
# ignore typedef for callbacks
if parts[0] == 'typedef':
@ -185,11 +185,11 @@ def createIndex(fin, filename, api_filepath, api_title, api_label, githuburl):
multi_line_function_definition = re.match(r'.(.*?)\s*\(.*\(*.*', line)
if multi_line_function_definition:
parts = multi_line_function_definition.group(1).split(" ");
parts = multi_line_function_definition.group(1).split(" ")
name = parts[len(parts)-1]
if len(name) == 0:
print(parts);
print(parts)
sys.exit(10)
multiline_function_def = 1
functions[name] = codeReference(name, githuburl, filename, api_filepath, linenr)

@ -259,7 +259,7 @@ def writeListings(aout, infile_name, ref_prefix, git_branch_name):
if state == State.SearchListingEnd:
parts_end = re.match(r'.*(LISTING_END).*',line)
parts_pause = re.match(r'.*(LISTING_PAUSE).*',line)
end_comment_parts = re.match(r'.*(\*/)\s*\n', line);
end_comment_parts = re.match(r'.*(\*/)\s*\n', line)
if parts_end:
aout.write(code_in_listing)
@ -309,7 +309,7 @@ def processExamples(intro_file, examples_folder, examples_ofile, git_branch_name
example.append(example_title)
aout.write(examples_header)
aout.write("\n\n");
aout.write("\n\n")
for group_title in list_of_groups:
if not group_title in list_of_examples: continue
@ -320,7 +320,7 @@ def processExamples(intro_file, examples_folder, examples_ofile, git_branch_name
group_title = group_title + "s"
group_title = group_title + ":"
aout.write("- " + group_title + "\n");
aout.write("- " + group_title + "\n")
for example in examples:
ref_prefix = example[0].replace("_", "")
title = latexText(example[0], ref_prefix)

@ -2,7 +2,7 @@
import struct
BLUETOOTH_BASE_UUID = bytes ([ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x80, 0x00, 0x00, 0x80, 0x5F, 0x9B, 0x34, 0xFB ]);
BLUETOOTH_BASE_UUID = bytes ([ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x80, 0x00, 0x00, 0x80, 0x5F, 0x9B, 0x34, 0xFB ])
def hex_string(bytes):
return " ".join([('%02x' % a) for a in bytes])
@ -67,7 +67,7 @@ class BT_UUID(object):
def __repr__(self):
return "%02x%02x%02x%02x-%02x%02x-%02x%02x-%02x%02x-%02x%02x%02x%02x%02x%02x" % (
self.uuid[0], self.uuid[1], self.uuid[2], self.uuid[3], self.uuid[4], self.uuid[5], self.uuid[6], self.uuid[7],
self.uuid[8], self.uuid[9], self.uuid[10], self.uuid[11], self.uuid[12], self.uuid[13], self.uuid[14], self.uuid[15]);
self.uuid[8], self.uuid[9], self.uuid[10], self.uuid[11], self.uuid[12], self.uuid[13], self.uuid[14], self.uuid[15])
class GATTCharacteristic(object):
@ -162,10 +162,10 @@ class GATTService(object):
class Packet(object):
HCI_COMMAND_PACKET = 1;
HCI_EVENT_PACKET = 4;
L2CAP_DATA_PACKET = 6;
RFCOMM_DATA_PACKET = 7;
HCI_COMMAND_PACKET = 1
HCI_EVENT_PACKET = 4
L2CAP_DATA_PACKET = 6
RFCOMM_DATA_PACKET = 7
def __init__(self, packet_type, channel, payload):
self.packet_type = packet_type

@ -26,7 +26,7 @@ try:
except:
pass
if len(am_version_txt) == 0:
print("Cannot find AmbiqSuite root. Make sure BTstack is checked out as AmbiqSuite/third/btstack");
print("Cannot find AmbiqSuite root. Make sure BTstack is checked out as AmbiqSuite/third/btstack")
sys.exit(1)
# show WICED version
@ -54,7 +54,7 @@ for example in LE_EXAMPLES:
os.makedirs(apps_folder)
# copy project makefile
shutil.copyfile(example_template + "Makefile", apps_folder + "Makefile");
shutil.copyfile(example_template + "Makefile", apps_folder + "Makefile")
# create GCC folder
gcc_folder = apps_folder + "/gcc/"
@ -82,7 +82,7 @@ for example in LE_EXAMPLES:
# copy other files
for file in ['startup_gcc.c', 'btstack_template.ld']:
shutil.copyfile(example_template + "gcc/" + file, apps_folder + "/gcc/" + file);
shutil.copyfile(example_template + "gcc/" + file, apps_folder + "/gcc/" + file)
print("- %s" % example)

@ -229,7 +229,7 @@ def writeListings(aout, infile_name, ref_prefix):
if state == State.SearchListingEnd:
parts_end = re.match(r'.*(LISTING_END).*',line)
parts_pause = re.match(r'.*(LISTING_PAUSE).*',line)
end_comment_parts = re.match(r'.*(\*/)\s*\n', line);
end_comment_parts = re.match(r'.*(\*/)\s*\n', line)
if parts_end:
aout.write(code_in_listing)
@ -280,7 +280,7 @@ def processExamples(intro_file, examples_folder, examples_ofile):
example.append(example_title)
aout.write(examples_header)
aout.write("\n\n");
aout.write("\n\n")
for group_title in list_of_groups:
if not group_title in list_of_examples.keys(): continue
@ -291,7 +291,7 @@ def processExamples(intro_file, examples_folder, examples_ofile):
group_title = group_title + "s"
group_title = group_title + ":"
aout.write("- " + group_title + "\n");
aout.write("- " + group_title + "\n")
for example in examples:
ref_prefix = example[0].replace("_", "")
title = latexText(example[0], ref_prefix)

@ -10,7 +10,7 @@ import shutil
import create_examples
if not 'IDF_PATH' in os.environ:
print('Error: IDF_PATH not defined. Please set IDF_PATH as described here:\nhttp://esp-idf.readthedocs.io/en/latest/get-started/index.html#get-started-get-esp-idf');
print('Error: IDF_PATH not defined. Please set IDF_PATH as described here:\nhttp://esp-idf.readthedocs.io/en/latest/get-started/index.html#get-started-get-esp-idf')
sys.exit(10)
IDF_PATH=os.environ['IDF_PATH']

@ -195,8 +195,8 @@ def plot(exp_name, sensor_name, sensor_title, prefix):
plot_colors0 = ['r-','k-','b-','g-']
hist_colors0 = ['red','black','blue','green']
group_index1 = 2;
group_index2 = 3;
group_index1 = 2
group_index2 = 3
plot_data = [plot_data0[group_index1], plot_data0[group_index2]]
hist_data = [hist_data0[group_index1], hist_data0[group_index2]]

@ -89,7 +89,7 @@ def process_pklg(exp_name, sensor_name, scanning_type, pklg_file_name):
reset_timestamp(packet_type, packet, time_sec)
read_scan(packet_type, packet, time_sec)
f.close();
f.close()
prefix = '../data/processed/'
for k in devices.keys():

@ -24,7 +24,7 @@ def list_category_table():
print( row % ('', 'Hit', 'Total', 'Coverage'))
print("------------|------------|------------|------------")
categories = [ 'Line', 'Function', 'Branch'];
categories = [ 'Line', 'Function', 'Branch']
for category in categories:
hit = summary[category + "_hit"]
total = summary[category + "_total"]

@ -11,9 +11,9 @@ def aes_cmac(key, n):
cobj.update(n)
return cobj.digest()
db_message = bytes.fromhex('010000280018020003280a0300002a04000328020500012a06000028011807000328200800052a090002290a0003280a0b00292b0c000328020d002a2b0e00002808180f000228140016000f1810000328a21100182a12000229130000290000140001280f1815000328021600192a');
db_message = bytes.fromhex('010000280018020003280a0300002a04000328020500012a06000028011807000328200800052a090002290a0003280a0b00292b0c000328020d002a2b0e00002808180f000228140016000f1810000328a21100182a12000229130000290000140001280f1815000328021600192a')
db_hash_expected = bytes.fromhex('F1CA2D48ECF58BAC8A8830BBB9FBA990')
db_hash_actual = aes_cmac(bytes(16), db_message);
db_hash_actual = aes_cmac(bytes(16), db_message)
if db_hash_actual != db_hash_expected:
print("Expected: " + db_hash_actual.hex())
print("Actual: " + db_hash_actual.hex())

@ -616,7 +616,7 @@ with open (infile, 'rb') as fin:
ts_sec = read_net_32_from_file(fin)
ts_usec = read_net_32_from_file(fin)
type = ord(fin.read(1))
packet_len = payload_length - 9;
packet_len = payload_length - 9
if (packet_len > 66000):
print ("Error parsing pklg at offset %u (%x)." % (pos, pos))
break

@ -290,7 +290,7 @@ def add_timer(timeout_ms, callback, context):
global timers_timeouts
global timers_callbacks
timeout = get_time_millis() + timeout_ms;
timeout = get_time_millis() + timeout_ms
pos = bisect.bisect(timers_timeouts, timeout)
timers_timeouts.insert(pos, timeout)
timers_callbacks.insert(pos, (callback, context))

@ -430,7 +430,7 @@ def sbc_bit_allocation_mono_dual(frame):
for sb in range(frame.nr_subbands):
if bitneed[ch][sb] < bitslice+2 :
bits[ch][sb]=0;
bits[ch][sb]=0
else:
bits[ch][sb] = min(bitneed[ch][sb]-bitslice,16)

@ -278,7 +278,7 @@ def run(test_descriptor, nodes):
return
elif line.startswith('COUNTER'):
print('%s notification received' % node.get_name())
return;
return
def write_config(fout, test_descriptor):
attributes = [

@ -68,7 +68,7 @@ def scrape_page(fout, url):
# test: fetch from local file 'index.html'
# f = codecs.open("index.html", "r", "utf-8")
# content = f.read();
# content = f.read()
tree = html.fromstring(content)
rows = tree.xpath('//table/tbody/tr')

@ -105,7 +105,7 @@ def scrape_page(fout, url):
# test: fetch from local file 'service-discovery.html'
# f = codecs.open("service-discovery.html", "r", "utf-8")
# content = f.read();
# content = f.read()
tree = html.fromstring(content)
@ -166,7 +166,7 @@ def scrape_page(fout, url):
process_rows(fout, rows, '#define BLUETOOTH_ATTRIBUTE_%-54s %s // %s\n')
# scrape_attributes(fout, tree, table_name)
# see above
fout.write('#define BLUETOOTH_ATTRIBUTE_GNSS_SUPPORTED_FEATURES 0x0200\n');
fout.write('#define BLUETOOTH_ATTRIBUTE_GNSS_SUPPORTED_FEATURES 0x0200\n')

@ -317,7 +317,7 @@ def size_for_type(type):
def format_function_name(event_name):
event_name = event_name.lower()
if 'event' in event_name:
return event_name;
return event_name
return event_name+'_event'
def template_for_type(field_type):
@ -332,7 +332,7 @@ def template_for_type(field_type):
if field_type in types_with_struct_return:
return c_prototoype_struct_return
if listScope:
return c_prototype_iterator_return;
return c_prototype_iterator_return
return c_prototoype_simple_return
def all_fields_supported(format):
@ -361,7 +361,7 @@ def create_iterator( event_name, field_name, field_type, offset, offset_is_numbe
generated_has_next = c_prototype_iterator_has_next.format( list_field=offset, length_field=last_length_field_offset,
format=field_type, scope=list_name_scope, event=event_name,
field=field_name )
generated = generated_init + generated_has_next;
generated = generated_init + generated_has_next
else:
# the item length is either determiend statically, format "12"
# or dynamically by an list element, format "J"
@ -371,7 +371,7 @@ def create_iterator( event_name, field_name, field_type, offset, offset_is_numbe
code = '*iter[{0}] + 1;'.format( last_length_field_offset-list_base )
else:
code = '{0};'.format( list_static_size )
generated = c_prototype_iterator_next.format( event=event_name, scope=list_name_scope, format=field_type, code=code );
generated = c_prototype_iterator_next.format( event=event_name, scope=list_name_scope, format=field_type, code=code )
if field_type == closing_bracket:
listScope.pop()
@ -389,7 +389,7 @@ def create_getter(event_name, field_name, field_type, offset, offset_is_number,
list_length_field_offset = listScope[-1][2]
list_static_size = listScope[-1][3]
if offset_is_number:
offset = offset - list_base;
offset = offset - list_base
listScope[-1] = (list_name_scope, list_base, list_length_field_offset, list_static_size+size_for_type(field_type))
description = description_template.format(field_name, event_name.upper())
result_name = field_name

@ -341,7 +341,7 @@ writeln(f, copyright)
writeln(f, hfile_header_begin)
add_structs(f, header_template)
writeln(f, hfile_header_end)
f.close();
f.close()
f = open(file_name+".c", "w")
@ -352,7 +352,7 @@ add_structs(f, code_template)
f.write(init_header)
add_structs(f, init_template)
writeln(f, "}")
f.close();
f.close()
# also generate test code
test_header = """

@ -348,7 +348,7 @@ def write_wrappers_for_file(fout, file, header_name, need_lock):
type_and_name = function.group(1)
arg_string = function.group(2)
create_wrapper(fout, type_and_name, arg_string, need_lock)
inline_function = 'inline' in line;
inline_function = 'inline' in line
continue
# multi-line function declaration

@ -439,7 +439,7 @@ def parseService(fout, parts, service_type):
serviceDefinitionComplete(fout)
read_only_anybody_flags = property_flags['READ'];
read_only_anybody_flags = property_flags['READ']
write_indent(fout)
fout.write('// 0x%04x %s\n' % (handle, '-'.join(parts)))
@ -479,7 +479,7 @@ def parseIncludeService(fout, parts):
global handle
global total_size
read_only_anybody_flags = property_flags['READ'];
read_only_anybody_flags = property_flags['READ']
uuid = parseUUID(parts[1])
uuid_size = len(uuid)
@ -529,10 +529,10 @@ def parseCharacteristic(fout, parts):
global current_characteristic_uuid_string
global characteristic_indices
read_only_anybody_flags = property_flags['READ'];
read_only_anybody_flags = property_flags['READ']
# enumerate characteristics with same UUID, using optional name tag if available
current_characteristic_uuid_string = c_string_for_uuid(parts[1]);
current_characteristic_uuid_string = c_string_for_uuid(parts[1])
index = 1
if current_characteristic_uuid_string in characteristic_indices:
index = characteristic_indices[current_characteristic_uuid_string] + 1
@ -591,7 +591,7 @@ def parseCharacteristic(fout, parts):
# add UUID128 flag for value handle
if uuid_size == 16:
value_flags = value_flags | property_flags['LONG_UUID'];
value_flags = value_flags | property_flags['LONG_UUID']
write_indent(fout)
properties_string = prettyPrintProperties(parts[2])
@ -737,7 +737,7 @@ def parseCharacteristicFormat(fout, parts):
global handle
global total_size
read_only_anybody_flags = property_flags['READ'];
read_only_anybody_flags = property_flags['READ']
identifier = parts[1]
presentation_formats[identifier] = handle
@ -775,7 +775,7 @@ def parseCharacteristicAggregateFormat(fout, parts):
global handle
global total_size
read_only_anybody_flags = property_flags['READ'];
read_only_anybody_flags = property_flags['READ']
size = 2 + 2 + 2 + 2 + (len(parts)-1) * 2
write_indent(fout)
@ -803,7 +803,7 @@ def parseExternalReportReference(fout, parts):
global handle
global total_size
read_only_anybody_flags = property_flags['READ'];
read_only_anybody_flags = property_flags['READ']
size = 2 + 2 + 2 + 2 + 2
report_uuid = int(parts[2], 16)
@ -823,7 +823,7 @@ def parseReportReference(fout, parts):
global handle
global total_size
read_only_anybody_flags = property_flags['READ'];
read_only_anybody_flags = property_flags['READ']
size = 2 + 2 + 2 + 2 + 1 + 1
report_id = parts[2]
@ -845,7 +845,7 @@ def parseNumberOfDigitals(fout, parts):
global handle
global total_size
read_only_anybody_flags = property_flags['READ'];
read_only_anybody_flags = property_flags['READ']
size = 2 + 2 + 2 + 2 + 1
no_of_digitals = parts[1]
@ -865,7 +865,7 @@ def parseLines(fname_in, fin, fout):
global handle
global total_size
line_count = 0;
line_count = 0
for line in fin:
line = line.strip("\n\r ")
line_count += 1
@ -1017,13 +1017,13 @@ def parse(fname_in, fin, fname_out, tool_path, fout):
serviceDefinitionComplete(fout)
write_indent(fout)
fout.write("// END\n");
fout.write("// END\n")
write_indent(fout)
write_16(fout,0)
fout.write("\n")
total_size = total_size + 2
fout.write("}; // total size %u bytes \n" % total_size);
fout.write("}; // total size %u bytes \n" % total_size)
def listHandles(fout):
fout.write('\n\n')

@ -51,7 +51,7 @@ def generateTimestamp(t):
if len(parts) == 1:
# only time, prepend fixed date
have_date = False
t = "2000-01-01 " + t;
t = "2000-01-01 " + t
# handle ms
try:

@ -34,7 +34,7 @@ def uuid128_at_offset(data, offset):
return uuid128[0:4].hex() + "-" + uuid128[4:6].hex() + "-" + uuid128[6:8].hex() + "-" + uuid128[8:10].hex() + "-" + uuid128[10:].hex()
def handle_at_offset(data, offset):
return struct.unpack_from("<H", data, offset)[0];
return struct.unpack_from("<H", data, offset)[0]
def bd_addr_at_offset(data, offset):
peer_addr = reversed(data[8:8 + 6])
@ -143,7 +143,7 @@ class l2cap_reassembler:
payload_data = bytes()
payload_len = 0
channel = 0;
channel = 0
def handle_acl(self, pb, data):
if pb in [0, 2]:
@ -165,7 +165,7 @@ class hci_connection:
def __init__(self, bd_addr, con_handle):
self.bd_addr = bd_addr
self.con_handle = con_handle;
self.con_handle = con_handle
self.remote_gatt_server = gatt_server(bd_addr)
def handle_att_pdu(self, direction_in, pdu):
@ -210,7 +210,7 @@ def handle_evt(event):
if event[0] == 0x3e:
if event[2] == 0x01:
# LE Connection Complete
con_handle = handle_at_offset(event, 4);
con_handle = handle_at_offset(event, 4)
peer_addr = bd_addr_at_offset(event, 8)
connection = hci_connection(peer_addr, con_handle)
connections[con_handle] = connection
@ -242,7 +242,7 @@ with open (infile, 'rb') as fin:
(entry_len, ts_sec, ts_usec, type) = read_header(fin)
if entry_len < 0:
break
packet_len = entry_len - 9;
packet_len = entry_len - 9
if (packet_len > 66000):
print ("Error parsing pklg at offset %u (%x)." % (pos, pos))
break

@ -46,7 +46,7 @@ with open (infile, 'rb') as fin:
(len, ts_sec, ts_usec, type) = read_header(fin)
if len < 0:
break
packet_len = len - 9;
packet_len = len - 9
if (packet_len > 66000):
print ("Error parsing pklg at offset %u (%x)." % (pos, pos))
break

@ -38,7 +38,7 @@ def read_header(f):
def handle_at_offset(data, offset):
return struct.unpack_from("<H", data, offset)[0];
return struct.unpack_from("<H", data, offset)[0]
def bd_addr_at_offset(data, offset):
@ -50,7 +50,7 @@ class hci_connection:
def __init__(self, bd_addr, con_handle):
self.bd_addr = bd_addr
self.con_handle = con_handle;
self.con_handle = con_handle
def connection_for_handle(con_handle):
@ -61,7 +61,7 @@ def connection_for_handle(con_handle):
def handle_cmd(packet):
opcode = struct.unpack_from("<H", packet, 0)[0];
opcode = struct.unpack_from("<H", packet, 0)[0]
if opcode == 0x201a:
# LE Long Term Key Request Reply
con_handle = handle_at_offset(packet, 3)
@ -90,7 +90,7 @@ def handle_evt(event):
elif event[0] == 0x3e:
if event[2] == 0x01:
# LE Connection Complete
con_handle = handle_at_offset(event, 4);
con_handle = handle_at_offset(event, 4)
peer_addr = bd_addr_at_offset(event, 8)
connection = hci_connection(peer_addr, con_handle)
connections[con_handle] = connection

@ -46,7 +46,7 @@ with open (infile, 'rb') as fin:
(len, ts_sec, ts_usec, type) = read_header(fin)
if len < 0:
break
packet_len = len - 9;
packet_len = len - 9
if (packet_len > 66000):
print ("Error parsing pklg at offset %u (%x)." % (pos, pos))
break

@ -213,14 +213,14 @@ def create_command_java(fout, name, ogf, ocf, format, params):
if param_type in ['L', 'J']:
length_name = arg_name
if param_type == 'V':
store_params += ind + 'Util.storeBytes(command, offset, %s, %s);' % (arg_name, length_name) + '\n';
store_params += ind + 'offset += %s;\n' % length_name;
store_params += ind + 'Util.storeBytes(command, offset, %s, %s);' % (arg_name, length_name) + '\n'
store_params += ind + 'offset += %s;\n' % length_name
length_name = ''
else:
store_params += ind + (param_store[param_type] % arg_name) + '\n';
store_params += ind + (param_store[param_type] % arg_name) + '\n'
size = arg_size
if size > 0:
store_params += ind + 'offset += %u;\n' % arg_size;
store_params += ind + 'offset += %u;\n' % arg_size
else:
store_params += ind + 'offset += %s.length;\n' % arg_name
@ -253,7 +253,7 @@ def create_btstack_java(commands):
for command in commands:
(command_name, ogf, ocf, format, params) = command
create_command_java(fout, command_name, ogf, ocf, format, params);
create_command_java(fout, command_name, ogf, ocf, format, params)
mark_define_as_used(ogf)
mark_define_as_used(ocf)

@ -164,7 +164,7 @@ def list_metrics_table():
print( row % ('Name', 'Target', 'Deviations', 'Max value'))
print("------------|------------|------------|------------")
ordered_metrics = [ 'PATH', 'GOTO', 'CCN', 'CALLS', 'PARAM', 'STMT', 'LEVEL', 'RETURN', 'FUNC'];
ordered_metrics = [ 'PATH', 'GOTO', 'CCN', 'CALLS', 'PARAM', 'STMT', 'LEVEL', 'RETURN', 'FUNC']
for metric_name in ordered_metrics:
if metric_name in targets:
target = targets[metric_name]

@ -196,10 +196,10 @@ def create_command_python(fout, name, ogf, ocf, format, params):
if param_type in ['L', 'J']:
length_name = arg_name
if param_type == 'V':
store_params += ind + 'Util.storeBytes(command, offset, %s, %s);' % (arg_name, length_name) + '\n';
store_params += ind + 'Util.storeBytes(command, offset, %s, %s);' % (arg_name, length_name) + '\n'
length_name = ''
else:
store_params += ind + (param_store[param_type] % arg_name) + '\n';
store_params += ind + (param_store[param_type] % arg_name) + '\n'
size = arg_size
fout.write( command_builder_command.format(name=name, args=args_string, ogf=ogf, ocf=ocf, args_builder=store_params))
@ -231,7 +231,7 @@ def create_command_builder(commands):
for command in commands:
(command_name, ogf, ocf, format, params) = command
create_command_python(fout, command_name, ogf, ocf, format, params);
create_command_python(fout, command_name, ogf, ocf, format, params)
mark_define_as_used(ogf)
mark_define_as_used(ocf)

@ -44,7 +44,7 @@ def check_file(infile):
ts_sec = read_net_32(fin)
ts_usec = read_net_32(fin)
type = ord(fin.read(1))
packet_len = len - 9;
packet_len = len - 9
if (packet_len > 66000):
print ("Error parsing pklg at offset %u (%x)." % (pos, pos))
break