619 lines
22 KiB
Python
619 lines
22 KiB
Python
import sys
|
||
import os
|
||
import re
|
||
import lxml.etree as ET
|
||
from generate_debug_vars import map_type_to_pt, get_iq_define, type_map
|
||
from enum import IntEnum
|
||
import scan_vars
|
||
import myXML
|
||
import pickle
|
||
|
||
|
||
# Вспомогательные функции, которые теперь будут использоваться виджетом
|
||
def split_path(path):
|
||
"""
|
||
Разбивает путь на компоненты:
|
||
- 'foo[2].bar[1]->baz' → ['foo', '[2]', 'bar', '[1]', 'baz']
|
||
Если видит '-' в конце строки (без '>' после) — обрезает этот '-'
|
||
"""
|
||
tokens = []
|
||
token = ''
|
||
i = 0
|
||
length = len(path)
|
||
while i < length:
|
||
c = path[i]
|
||
# Разделители: '->' и '.'
|
||
if c == '-' and i + 1 < length and path[i:i+2] == '->':
|
||
if token:
|
||
tokens.append(token)
|
||
token = ''
|
||
i += 2
|
||
continue
|
||
elif c == '-' and i == length - 1:
|
||
# '-' на конце строки без '>' после — просто пропускаем его
|
||
i += 1
|
||
continue
|
||
elif c == '.':
|
||
if token:
|
||
tokens.append(token)
|
||
token = ''
|
||
i += 1
|
||
continue
|
||
elif c == '[':
|
||
if token:
|
||
tokens.append(token)
|
||
token = ''
|
||
idx = ''
|
||
while i < length and path[i] != ']':
|
||
idx += path[i]
|
||
i += 1
|
||
if i < length and path[i] == ']':
|
||
idx += ']'
|
||
i += 1
|
||
tokens.append(idx)
|
||
continue
|
||
else:
|
||
token += c
|
||
i += 1
|
||
if token:
|
||
tokens.append(token)
|
||
return tokens
|
||
|
||
|
||
def make_absolute_path(path, base_path):
|
||
if not os.path.isabs(path) and os.path.isdir(base_path):
|
||
try:
|
||
return os.path.abspath(os.path.join(base_path, path))
|
||
except Exception:
|
||
pass # На случай сбоя в os.path.join или abspath
|
||
elif os.path.isabs(path):
|
||
return os.path.abspath(path)
|
||
else:
|
||
return path
|
||
|
||
|
||
def make_relative_path(abs_path, base_path):
|
||
abs_path = os.path.abspath(abs_path)
|
||
base_path = os.path.abspath(base_path)
|
||
|
||
# Разбиваем на списки директорий
|
||
abs_parts = abs_path.split(os.sep)
|
||
base_parts = base_path.split(os.sep)
|
||
|
||
# Проверяем, является ли base_path настоящим префиксом пути (по папкам)
|
||
if abs_parts[:len(base_parts)] == base_parts:
|
||
rel_parts = abs_parts[len(base_parts):]
|
||
return "/".join(rel_parts)
|
||
|
||
# Иначе пробуем relpath
|
||
try:
|
||
return os.path.relpath(abs_path, base_path).replace("\\", "/")
|
||
except Exception:
|
||
return abs_path.replace("\\", "/")
|
||
|
||
|
||
def parse_vars(filename, typedef_map=None):
|
||
root, tree = myXML.safe_parse_xml(filename)
|
||
if root is None:
|
||
return []
|
||
|
||
if typedef_map is None:
|
||
typedef_map = {}
|
||
|
||
vars_list = []
|
||
variables_elem = root.find('variables')
|
||
if variables_elem is not None:
|
||
for var in variables_elem.findall('var'):
|
||
name = var.attrib.get('name', '')
|
||
var_type = var.findtext('type', 'unknown').strip()
|
||
|
||
# Вычисляем pt_type и iq_type
|
||
pt_type = var.findtext('pt_type')
|
||
if not pt_type:
|
||
pt_type = map_type_to_pt(var_type, name, typedef_map)
|
||
|
||
iq_type = var.findtext('iq_type')
|
||
if not iq_type:
|
||
iq_type = get_iq_define(var_type)
|
||
# Записываем iq_type в XML
|
||
iq_type_elem = var.find('iq_type')
|
||
if iq_type_elem is None:
|
||
iq_type_elem = ET.SubElement(var, 'iq_type')
|
||
iq_type_elem.text = iq_type
|
||
|
||
# Вычисляем pt_type и iq_type
|
||
pt_type = var.findtext('pt_type')
|
||
if not pt_type:
|
||
pt_type = map_type_to_pt(var_type, name, typedef_map)
|
||
# Записываем pt_type в XML
|
||
pt_type_elem = var.find('pt_type')
|
||
if pt_type_elem is None:
|
||
pt_type_elem = ET.SubElement(var, 'pt_type')
|
||
pt_type_elem.text = pt_type
|
||
|
||
vars_list.append({
|
||
'name': name,
|
||
'show_var': var.findtext('show_var', 'false'),
|
||
'enable': var.findtext('enable', 'false'),
|
||
'shortname': var.findtext('shortname', name),
|
||
'pt_type': pt_type,
|
||
'iq_type': iq_type,
|
||
'return_type': var.findtext('return_type', 't_iq_none'),
|
||
'type': var_type,
|
||
'file': var.findtext('file', ''),
|
||
'extern': var.findtext('extern', 'false') == 'true',
|
||
'static': var.findtext('static', 'false') == 'true',
|
||
})
|
||
|
||
myXML.fwrite(root, filename)
|
||
|
||
return vars_list
|
||
|
||
|
||
# 2. Парсим structSup.xml
|
||
def parse_structs(filename):
|
||
root, tree = myXML.safe_parse_xml(filename)
|
||
if root is None:
|
||
return {}, {}
|
||
|
||
structs = {}
|
||
typedef_map = {}
|
||
|
||
def parse_struct_element(elem):
|
||
fields = {}
|
||
|
||
for field in elem.findall("field"):
|
||
fname = field.attrib.get("name")
|
||
ftype = field.attrib.get("type", "")
|
||
|
||
# Проверка на вложенную структуру
|
||
nested_struct_elem = field.find("struct")
|
||
|
||
if nested_struct_elem is not None:
|
||
# Рекурсивно парсим вложенную структуру и вставляем её как подсловарь
|
||
nested_fields = parse_struct_element(nested_struct_elem)
|
||
|
||
# Оборачиваем в dict с ключом 'type' для хранения типа из XML
|
||
fields[fname] = {
|
||
'type': ftype, # здесь тип, например "BENDER_ERROR"
|
||
**nested_fields # развёрнутые поля вложенной структуры
|
||
}
|
||
else:
|
||
# Обычное поле
|
||
fields[fname] = ftype
|
||
|
||
return fields
|
||
|
||
structs_elem = root.find("structs")
|
||
if structs_elem is not None:
|
||
for struct in structs_elem.findall("struct"):
|
||
name = struct.attrib.get("name")
|
||
if name and name not in structs:
|
||
fields = parse_struct_element(struct)
|
||
structs[name] = fields
|
||
|
||
# typedefs без изменений
|
||
typedefs_elem = root.find("typedefs")
|
||
if typedefs_elem is not None:
|
||
for typedef in typedefs_elem.findall("typedef"):
|
||
name = typedef.attrib.get('name')
|
||
target_type = typedef.attrib.get('type')
|
||
if name and target_type:
|
||
typedef_map[name.strip()] = target_type.strip()
|
||
|
||
return structs, typedef_map
|
||
|
||
|
||
|
||
def parse_array_dims(type_str):
|
||
"""Возвращает базовый тип и список размеров массива"""
|
||
dims = list(map(int, re.findall(r'\[(\d+)\]', type_str)))
|
||
base_type = re.sub(r'\[\d+\]', '', type_str).strip()
|
||
return base_type, dims
|
||
|
||
def generate_array_names(prefix, dims, depth=0):
|
||
"""Рекурсивно генерирует имена для всех элементов многомерного массива"""
|
||
if not dims:
|
||
return [prefix]
|
||
|
||
result = []
|
||
for i in range(dims[0]):
|
||
new_prefix = f"{prefix}[{i}]"
|
||
children = generate_array_names(new_prefix, dims[1:], depth + 1)
|
||
result.append({
|
||
'name': new_prefix,
|
||
'children': children if len(dims) > 1 else None
|
||
})
|
||
return result
|
||
|
||
def flatten_array_tree(array_tree):
|
||
"""Разворачивает дерево массивов в линейный список с вложенными children"""
|
||
result = []
|
||
for node in array_tree:
|
||
entry = {'name': node['name']}
|
||
if node['children']:
|
||
entry['children'] = flatten_array_tree(node['children'])
|
||
result.append(entry)
|
||
return result
|
||
|
||
|
||
def expand_struct_recursively(prefix, type_str, structs, typedefs, var_attrs, depth=0):
|
||
if depth > 10:
|
||
return []
|
||
|
||
# Вспомогательная функция для обработки массивов
|
||
def process_array(prefix, type_str, structs, typedefs, var_attrs, depth=0):
|
||
base_type, array_dims = parse_array_dims(type_str)
|
||
if not array_dims:
|
||
return []
|
||
|
||
# На текущем уровне берем первый размер массива
|
||
current_dim = array_dims[0]
|
||
# Оставшиеся размеры — все, кроме первого
|
||
remaining_dims = array_dims[1:]
|
||
|
||
# Для создания типа с оставшимися размерами:
|
||
if remaining_dims:
|
||
# Формируем строку типа для оставшихся измерений массива, например int[16]
|
||
remaining_type_str = f"{base_type}{''.join(f'[{d}]' for d in remaining_dims)}"
|
||
else:
|
||
remaining_type_str = base_type
|
||
|
||
array_tree = []
|
||
for i in range(current_dim):
|
||
name = f"{prefix}[{i}]"
|
||
# Для каждого элемента передаем уже оставшийся тип массива
|
||
children = expand_struct_recursively(name, remaining_type_str, structs, typedefs, var_attrs, depth + 1)
|
||
node = {
|
||
'name': name,
|
||
'type': remaining_type_str if remaining_dims else base_type,
|
||
'pt_type': '',
|
||
'iq_type': '',
|
||
'return_type': '',
|
||
'file': var_attrs.get('file'),
|
||
'extern': var_attrs.get('extern'),
|
||
'static': var_attrs.get('static'),
|
||
}
|
||
if children:
|
||
node['children'] = children
|
||
array_tree.append(node)
|
||
|
||
return array_tree
|
||
|
||
|
||
# Если type_str — уже распарсенная структура (dict)
|
||
if isinstance(type_str, dict):
|
||
fields = type_str
|
||
else:
|
||
# Проверяем, массив ли это
|
||
base_type, array_dims = parse_array_dims(type_str)
|
||
if array_dims:
|
||
return process_array(prefix, type_str, structs, typedefs, var_attrs, depth)
|
||
|
||
# Ищем структуру по имени типа
|
||
base_type = scan_vars.strip_ptr_and_array(type_str)
|
||
fields = structs.get(base_type)
|
||
if not isinstance(fields, dict):
|
||
# Не структура и не массив — просто возвращаем пустой список
|
||
return []
|
||
|
||
children = []
|
||
for field_name, field_value in fields.items():
|
||
if field_name == 'type':
|
||
continue
|
||
|
||
# Формируем полное имя поля
|
||
if prefix.endswith('*'):
|
||
separator = '->'
|
||
full_name = f"{prefix[:-1]}{separator}{field_name}"
|
||
else:
|
||
separator = '.'
|
||
full_name = f"{prefix}{separator}{field_name}"
|
||
|
||
# Определяем тип поля
|
||
if isinstance(field_value, dict) and isinstance(field_value.get('type'), str):
|
||
field_type_str = field_value['type']
|
||
elif isinstance(field_value, str):
|
||
field_type_str = field_value
|
||
else:
|
||
field_type_str = None
|
||
|
||
|
||
if '*' in field_type_str:
|
||
full_name_prefix = full_name + '*'
|
||
else:
|
||
full_name_prefix = full_name
|
||
|
||
# Обработка, если поле — строка (тип или массив)
|
||
if field_type_str:
|
||
base_subtype, sub_dims = parse_array_dims(field_type_str)
|
||
if sub_dims:
|
||
# Массив — раскрываем элементы
|
||
array_parent = {
|
||
'name': full_name,
|
||
'type': field_type_str,
|
||
'pt_type': '',
|
||
'iq_type': '',
|
||
'return_type': '',
|
||
'file': var_attrs.get('file'),
|
||
'extern': var_attrs.get('extern'),
|
||
'static': var_attrs.get('static'),
|
||
}
|
||
|
||
array_children = []
|
||
flat_names = generate_array_names(full_name, sub_dims)
|
||
for node in flat_names:
|
||
# node — dict с ключом 'name' и (возможно) 'children'
|
||
sub_items = expand_struct_recursively(node['name'], base_subtype, structs, typedefs, var_attrs, depth + 1)
|
||
child_node = {
|
||
'name': node['name'],
|
||
'type': base_subtype,
|
||
'pt_type': '',
|
||
'iq_type': '',
|
||
'return_type': '',
|
||
'file': var_attrs.get('file'),
|
||
'extern': var_attrs.get('extern'),
|
||
'static': var_attrs.get('static'),
|
||
}
|
||
if sub_items:
|
||
child_node['children'] = sub_items
|
||
array_children.append(child_node)
|
||
|
||
array_parent['children'] = array_children
|
||
children.append(array_parent)
|
||
continue
|
||
|
||
# Игнорируем указатели на функции
|
||
if "(" in field_type_str and "*" in field_type_str and ")" in field_type_str:
|
||
continue
|
||
|
||
if isinstance(field_value, dict):
|
||
# Это одиночная структура — раскрываем рекурсивно
|
||
sub_items = expand_struct_recursively(full_name_prefix, field_value, structs, typedefs, var_attrs, depth + 1)
|
||
child = {
|
||
'name': full_name,
|
||
'type': field_type_str,
|
||
'pt_type': '',
|
||
'iq_type': '',
|
||
'return_type': '',
|
||
'file': var_attrs.get('file'),
|
||
'extern': var_attrs.get('extern'),
|
||
'static': var_attrs.get('static'),
|
||
}
|
||
if sub_items:
|
||
child['children'] = sub_items
|
||
children.append(child)
|
||
else:
|
||
# Обычное поле (int, float, etc.)
|
||
child = {
|
||
'name': full_name,
|
||
'type': field_type_str,
|
||
'pt_type': '',
|
||
'iq_type': '',
|
||
'return_type': '',
|
||
'file': var_attrs.get('file'),
|
||
'extern': var_attrs.get('extern'),
|
||
'static': var_attrs.get('static'),
|
||
}
|
||
children.append(child)
|
||
continue
|
||
|
||
# Если поле — dict без 'type' или со сложной структурой, обрабатываем как вложенную структуру
|
||
if isinstance(field_value, dict):
|
||
type_name = field_value.get('type', '')
|
||
child = {
|
||
'name': full_name,
|
||
'type': type_name,
|
||
'pt_type': '',
|
||
'iq_type': '',
|
||
'return_type': '',
|
||
'file': var_attrs.get('file'),
|
||
'extern': var_attrs.get('extern'),
|
||
'static': var_attrs.get('static'),
|
||
}
|
||
subchildren = expand_struct_recursively(full_name_prefix, field_value, structs, typedefs, var_attrs, depth + 1)
|
||
if subchildren:
|
||
child['children'] = subchildren
|
||
children.append(child)
|
||
|
||
return children
|
||
|
||
|
||
def expand_vars(vars_list, structs, typedefs):
|
||
"""
|
||
Раскрывает структуры и массивы структур в деревья.
|
||
"""
|
||
expanded = []
|
||
|
||
for var in vars_list:
|
||
pt_type = var.get('pt_type', '')
|
||
raw_type = var.get('type', '')
|
||
|
||
if var['name'] == 'project':
|
||
a = 1
|
||
|
||
if pt_type.startswith('pt_ptr_'):
|
||
new_var = var.copy()
|
||
new_var['children'] = expand_struct_recursively(var['name']+'*', raw_type, structs, typedefs, var)
|
||
expanded.append(new_var)
|
||
|
||
if pt_type.startswith('pt_arr_'):
|
||
new_var = var.copy()
|
||
new_var['children'] = expand_struct_recursively(var['name'], raw_type, structs, typedefs, var)
|
||
expanded.append(new_var)
|
||
|
||
elif pt_type == 'pt_struct':
|
||
new_var = var.copy()
|
||
new_var['children'] = expand_struct_recursively(var['name'], raw_type, structs, typedefs, var)
|
||
expanded.append(new_var)
|
||
|
||
elif pt_type == 'pt_union':
|
||
new_var = var.copy()
|
||
new_var['children'] = expand_struct_recursively(var['name'], raw_type, structs, typedefs, var)
|
||
expanded.append(new_var)
|
||
|
||
else:
|
||
expanded.append(var)
|
||
|
||
return expanded
|
||
|
||
|
||
def build_full_names(parts, full_name):
|
||
"""
|
||
Восстанавливает вложенные полные имена из списка частей,
|
||
ориентируясь на оригинальное полное имя (с '.', '->' и индексами).
|
||
|
||
Пример:
|
||
parts = ['arr', '[0]', '[1]', 'ptr', 'val']
|
||
full_name = 'arr[0][1].ptr->val'
|
||
|
||
→ [
|
||
'arr',
|
||
'arr[0]',
|
||
'arr[0][1]',
|
||
'arr[0][1].ptr',
|
||
'arr[0][1].ptr->val'
|
||
]
|
||
"""
|
||
names = []
|
||
acc = ''
|
||
idx = 0
|
||
for part in parts:
|
||
pos = full_name.find(part, idx)
|
||
if pos == -1:
|
||
acc += part
|
||
else:
|
||
acc = full_name[:pos + len(part)]
|
||
idx = pos + len(part)
|
||
names.append(acc)
|
||
return names
|
||
|
||
def find_var_by_name(tree, name):
|
||
for var in tree:
|
||
if var.get('name') == name:
|
||
return var
|
||
if 'children' in var:
|
||
found = find_var_by_name(var['children'], name)
|
||
if found:
|
||
return found
|
||
return None
|
||
|
||
|
||
def add_to_nested_tree(tree, var, path_parts, full_names=None, depth=0, source_tree=None):
|
||
if not path_parts:
|
||
return
|
||
|
||
if full_names is None:
|
||
full_names = build_full_names(path_parts, var['name'])
|
||
|
||
current_name = full_names[depth]
|
||
|
||
for child in tree:
|
||
if child.get('name') == current_name:
|
||
if depth == len(path_parts) - 1:
|
||
child.update(var)
|
||
return
|
||
if 'children' not in child:
|
||
child['children'] = []
|
||
add_to_nested_tree(child['children'], var, path_parts, full_names, depth + 1, source_tree)
|
||
return
|
||
|
||
# Ищем в source_tree (expanded_vars) родительский узел по current_name
|
||
parent_data = {}
|
||
if source_tree:
|
||
parent_var = find_var_by_name(source_tree, current_name)
|
||
if parent_var:
|
||
# Копируем все поля кроме детей (children)
|
||
parent_data = {k: v for k, v in parent_var.items() if k != 'children'}
|
||
|
||
new_node = {
|
||
'name': current_name,
|
||
'children': []
|
||
}
|
||
|
||
# Обновляем new_node данными родителя
|
||
new_node.update(parent_data)
|
||
|
||
if depth == len(path_parts) - 1:
|
||
new_node.update(var)
|
||
else:
|
||
add_to_nested_tree(new_node['children'], var, path_parts, full_names, depth + 1, source_tree)
|
||
|
||
tree.append(new_node)
|
||
|
||
|
||
|
||
|
||
def split_vars_by_show_flag(expanded_vars):
|
||
unselected_vars = pickle.loads(pickle.dumps(expanded_vars, protocol=pickle.HIGHEST_PROTOCOL))
|
||
selected_vars = []
|
||
|
||
def find_and_remove(var_list, target_name):
|
||
"""Удаляет элемент по полному имени и возвращает его"""
|
||
for i, var in enumerate(var_list):
|
||
if var.get("name") == target_name:
|
||
return var_list.pop(i)
|
||
if 'children' in var:
|
||
found = find_and_remove(var['children'], target_name)
|
||
if found:
|
||
return found
|
||
return None
|
||
|
||
def collect_selected_nodes(var):
|
||
"""Рекурсивно возвращает все show_var=true узлы (включая поддерево)"""
|
||
nodes = []
|
||
if var.get('show_var', 'false').lower() == 'true':
|
||
nodes.append(var)
|
||
for child in var.get('children', []):
|
||
nodes.extend(collect_selected_nodes(child))
|
||
return nodes
|
||
|
||
def exists_by_path(tree, full_name):
|
||
"""
|
||
Проверяет, существует ли переменная в дереве, следуя по частям пути (например: project → adc → status).
|
||
Каждая часть ('project', 'project.adc', ...) должна иметь точное совпадение с 'name' в узле.
|
||
"""
|
||
path_parts = split_path(full_name)
|
||
full_names = build_full_names(path_parts, full_name)
|
||
|
||
current_level = tree
|
||
for name in full_names:
|
||
found = False
|
||
for var in current_level:
|
||
if var.get('name') == name:
|
||
current_level = var.get('children', [])
|
||
found = True
|
||
break
|
||
if not found:
|
||
return False
|
||
return True
|
||
|
||
selected_nodes = []
|
||
for var in expanded_vars:
|
||
full_name = var['name']
|
||
# Проверка: если имя содержит вложенность, но целиком есть в корне — пропускаем
|
||
if ('.' in full_name or '[' in full_name or '->' in full_name):
|
||
path_parts = split_path(full_name)
|
||
if exists_by_path(expanded_vars, full_name):
|
||
# Удалим лишнюю копию из корня unselected_vars
|
||
find_and_remove(unselected_vars, full_name)
|
||
else:
|
||
add_to_nested_tree(unselected_vars, var, path_parts, source_tree=expanded_vars)
|
||
find_and_remove(unselected_vars, full_name)
|
||
selected_nodes.extend(collect_selected_nodes(var))
|
||
|
||
for node in selected_nodes:
|
||
full_name = node['name']
|
||
|
||
|
||
path_parts = split_path(full_name)
|
||
|
||
# Вырезать из unselected_vars
|
||
removed = find_and_remove(unselected_vars, full_name)
|
||
if removed:
|
||
add_to_nested_tree(selected_vars, removed, path_parts, source_tree=expanded_vars)
|
||
else:
|
||
# вдруг удалённый родитель — создаём вручную
|
||
add_to_nested_tree(selected_vars, node, path_parts, source_tree=expanded_vars)
|
||
|
||
return selected_vars, unselected_vars |