исходники перенесены в Src. итоговый файл - DebugVarEdit.exe
улучшена таблица (растянута и форматирована) добавлен скрипт для компиляции .exe новые переменные можно добавлять в xml через .c напрямую (записываешь в .c он видит новую переменную и записывает в xml) можно удалять переменные из xml по del в окне выбора всех переменных надо подумать как реализовать выбор массивов и пофиксить баги: - кривая запись пути к файлу переменной в xml
This commit is contained in:
279
Src/VariableSelector.py
Normal file
279
Src/VariableSelector.py
Normal file
@@ -0,0 +1,279 @@
|
||||
import re
|
||||
from PySide6.QtWidgets import (
|
||||
QDialog, QTreeWidget, QTreeWidgetItem, QVBoxLayout, QPushButton,
|
||||
QLineEdit, QLabel, QHeaderView
|
||||
)
|
||||
from PySide6.QtCore import Qt
|
||||
from setupVars import *
|
||||
from scanVars import *
|
||||
|
||||
|
||||
array_re = re.compile(r'^(\w+)\[(\d+)\]$')
|
||||
|
||||
class VariableSelectorDialog(QDialog):
|
||||
def __init__(self, all_vars, structs, typedefs, xml_path=None, parent=None):
|
||||
super().__init__(parent)
|
||||
self.setWindowTitle("Выбор переменных")
|
||||
self.resize(600, 500)
|
||||
self.selected_names = []
|
||||
|
||||
self.all_vars = all_vars
|
||||
self.structs = structs
|
||||
self.typedefs = typedefs
|
||||
self.expanded_vars = []
|
||||
self.var_map = {v['name']: v for v in all_vars}
|
||||
|
||||
self.xml_path = xml_path # сохраняем путь к xml
|
||||
|
||||
self.search_input = QLineEdit()
|
||||
self.search_input.setPlaceholderText("Поиск по имени переменной...")
|
||||
self.search_input.textChanged.connect(self.filter_tree)
|
||||
|
||||
self.tree = QTreeWidget()
|
||||
self.tree.setHeaderLabels(["Имя переменной", "Тип"])
|
||||
self.tree.setSelectionMode(QTreeWidget.ExtendedSelection)
|
||||
self.tree.setRootIsDecorated(True)
|
||||
self.tree.setUniformRowHeights(True)
|
||||
|
||||
self.tree.setStyleSheet("""
|
||||
QTreeWidget::item:selected {
|
||||
background-color: #87CEFA;
|
||||
color: black;
|
||||
}
|
||||
QTreeWidget::item:hover {
|
||||
background-color: #D3D3D3;
|
||||
}
|
||||
""")
|
||||
|
||||
self.btn_add = QPushButton("Добавить выбранные")
|
||||
self.btn_add.clicked.connect(self.on_add_clicked)
|
||||
|
||||
self.btn_delete = QPushButton("Удалить выбранные")
|
||||
self.btn_delete.clicked.connect(self.on_delete_clicked)
|
||||
|
||||
layout = QVBoxLayout()
|
||||
layout.addWidget(QLabel("Поиск:"))
|
||||
layout.addWidget(self.search_input)
|
||||
layout.addWidget(self.tree)
|
||||
layout.addWidget(self.btn_add)
|
||||
layout.addWidget(self.btn_delete) # Кнопка удаления
|
||||
self.setLayout(layout)
|
||||
|
||||
self.populate_tree()
|
||||
|
||||
|
||||
def add_tree_item_recursively(self, parent, var):
|
||||
"""
|
||||
Рекурсивно добавляет переменную и её дочерние поля в дерево.
|
||||
Если parent == None, добавляет на верхний уровень.
|
||||
"""
|
||||
name = var['name']
|
||||
type_str = var.get('type', '')
|
||||
show_var = var.get('show_var', 'false') == 'true'
|
||||
|
||||
item = QTreeWidgetItem([name, type_str])
|
||||
item.setData(0, Qt.UserRole, name)
|
||||
|
||||
# Делаем bitfield-поля неактивными
|
||||
if "(bitfield:" in type_str:
|
||||
item.setDisabled(True)
|
||||
self.set_tool(item, "Битовые поля недоступны для выбора")
|
||||
|
||||
for i, attr in enumerate(['file', 'extern', 'static']):
|
||||
item.setData(0, Qt.UserRole + 1 + i, var.get(attr))
|
||||
|
||||
if show_var:
|
||||
item.setForeground(0, Qt.gray)
|
||||
item.setForeground(1, Qt.gray)
|
||||
self.set_tool(item, "Уже добавлена")
|
||||
|
||||
if parent is None:
|
||||
self.tree.addTopLevelItem(item)
|
||||
else:
|
||||
parent.addChild(item)
|
||||
|
||||
for child in var.get('children', []):
|
||||
self.add_tree_item_recursively(item, child)
|
||||
|
||||
|
||||
def populate_tree(self):
|
||||
self.tree.clear()
|
||||
|
||||
expanded_vars = expand_vars(self.all_vars, self.structs, self.typedefs)
|
||||
|
||||
for var in expanded_vars:
|
||||
self.add_tree_item_recursively(None, var)
|
||||
|
||||
header = self.tree.header()
|
||||
header.setSectionResizeMode(QHeaderView.Interactive) # вручную можно менять
|
||||
self.tree.setColumnWidth(0, 400)
|
||||
self.tree.resizeColumnToContents(1)
|
||||
""" header.setSectionResizeMode(0, QHeaderView.Stretch)
|
||||
header.setSectionResizeMode(1, QHeaderView.ResizeToContents) """
|
||||
|
||||
def filter_tree(self):
|
||||
text = self.search_input.text().strip().lower()
|
||||
path_parts = text.split('.') if text else []
|
||||
|
||||
def hide_all(item):
|
||||
item.setHidden(True)
|
||||
for i in range(item.childCount()):
|
||||
hide_all(item.child(i))
|
||||
|
||||
def path_matches_search(name, search_parts):
|
||||
name_parts = name.lower().split('.')
|
||||
if len(name_parts) < len(search_parts):
|
||||
return False
|
||||
for sp, np in zip(search_parts, name_parts):
|
||||
if not np.startswith(sp):
|
||||
return False
|
||||
return True
|
||||
|
||||
def show_matching_path(item, level=0):
|
||||
name = item.text(0).lower()
|
||||
# Проверяем соответствие до длины path_parts
|
||||
if not path_parts:
|
||||
matched = True
|
||||
else:
|
||||
matched = False
|
||||
# Проверяем совпадение по пути
|
||||
if path_matches_search(name, path_parts[:level+1]):
|
||||
matched = True
|
||||
|
||||
item.setHidden(not matched)
|
||||
|
||||
# Раскрываем, если это не последний уровень поиска
|
||||
if matched and level < len(path_parts) - 1:
|
||||
item.setExpanded(True)
|
||||
else:
|
||||
item.setExpanded(False)
|
||||
|
||||
matched_any_child = False
|
||||
for i in range(item.childCount()):
|
||||
child = item.child(i)
|
||||
if show_matching_path(child, level + 1):
|
||||
matched_any_child = True
|
||||
|
||||
return matched or matched_any_child
|
||||
|
||||
for i in range(self.tree.topLevelItemCount()):
|
||||
item = self.tree.topLevelItem(i)
|
||||
hide_all(item)
|
||||
show_matching_path(item, 0)
|
||||
|
||||
|
||||
def on_add_clicked(self):
|
||||
self.selected_names = []
|
||||
|
||||
for item in self.tree.selectedItems():
|
||||
name = item.text(0) # имя переменной (в колонке 1)
|
||||
type_str = item.text(1) # тип переменной (в колонке 2)
|
||||
|
||||
if not name:
|
||||
continue
|
||||
|
||||
self.selected_names.append((name, type_str))
|
||||
|
||||
if name in self.var_map:
|
||||
# Если переменная уже есть, просто включаем её и показываем
|
||||
var = self.var_map[name]
|
||||
var['show_var'] = 'true'
|
||||
var['enable'] = 'true'
|
||||
else:
|
||||
# Создаём новый элемент переменной
|
||||
# Получаем родительские параметры
|
||||
file_val = item.data(0, Qt.UserRole + 1)
|
||||
extern_val = item.data(0, Qt.UserRole + 2)
|
||||
static_val = item.data(0, Qt.UserRole + 3)
|
||||
|
||||
new_var = {
|
||||
'name': name,
|
||||
'type': type_str,
|
||||
'show_var': 'true',
|
||||
'enable': 'true',
|
||||
'shortname': name,
|
||||
'pt_type': '',
|
||||
'iq_type': '',
|
||||
'return_type': 'iq_none',
|
||||
'file': file_val,
|
||||
'extern': str(extern_val).lower() if extern_val else 'false',
|
||||
'static': str(static_val).lower() if static_val else 'false',
|
||||
}
|
||||
|
||||
# Добавляем в список переменных
|
||||
self.all_vars.append(new_var)
|
||||
self.var_map[name] = new_var # Чтобы в будущем не добавлялось повторно
|
||||
|
||||
self.accept()
|
||||
|
||||
def on_delete_clicked(self):
|
||||
# Деактивируем (удаляем из видимых) выбранные переменные
|
||||
for item in self.tree.selectedItems():
|
||||
name = item.text(0)
|
||||
if not name:
|
||||
continue
|
||||
if name in self.var_map:
|
||||
var = self.var_map[name]
|
||||
var['show_var'] = 'false'
|
||||
var['enable'] = 'false'
|
||||
self.accept()
|
||||
|
||||
|
||||
def set_tool(self, item, text):
|
||||
item.setToolTip(0, text)
|
||||
item.setToolTip(1, text)
|
||||
|
||||
|
||||
def keyPressEvent(self, event):
|
||||
if event.key() == Qt.Key_Delete:
|
||||
self.delete_selected_vars()
|
||||
else:
|
||||
super().keyPressEvent(event)
|
||||
|
||||
def delete_selected_vars(self):
|
||||
# Деактивируем (удаляем из видимых) выбранные переменные
|
||||
for item in self.tree.selectedItems():
|
||||
name = item.text(0)
|
||||
if not name:
|
||||
continue
|
||||
if name in self.var_map:
|
||||
var = self.var_map[name]
|
||||
var['show_var'] = 'false'
|
||||
var['enable'] = 'false'
|
||||
|
||||
if not hasattr(self, 'xml_path') or not self.xml_path:
|
||||
from PySide6.QtWidgets import QMessageBox
|
||||
QMessageBox.warning(self, "Ошибка", "Путь к XML не задан, невозможно удалить переменные.")
|
||||
return
|
||||
|
||||
import xml.etree.ElementTree as ET
|
||||
tree = ET.parse(self.xml_path)
|
||||
root = tree.getroot()
|
||||
|
||||
if root is None:
|
||||
return
|
||||
|
||||
vars_section = root.find('variables')
|
||||
if vars_section is None:
|
||||
return # Нет секции variables — ничего удалять
|
||||
|
||||
selected_names = [item.text(0) for item in self.tree.selectedItems() if item.text(0)]
|
||||
|
||||
removed_any = False
|
||||
for var_elem in vars_section.findall('var'):
|
||||
name = var_elem.attrib.get('name')
|
||||
if name in selected_names:
|
||||
vars_section.remove(var_elem)
|
||||
removed_any = True
|
||||
if name in self.var_map:
|
||||
del self.var_map[name]
|
||||
# Удаляем элементы из списка на месте
|
||||
self.all_vars[:] = [v for v in self.all_vars if v['name'] != name]
|
||||
|
||||
|
||||
if removed_any:
|
||||
ET.indent(tree, space=" ", level=0)
|
||||
tree.write(self.xml_path, encoding='utf-8', xml_declaration=True)
|
||||
|
||||
|
||||
self.populate_tree()
|
||||
492
Src/generateVars.py
Normal file
492
Src/generateVars.py
Normal file
@@ -0,0 +1,492 @@
|
||||
# build command
|
||||
# pyinstaller --onefile --distpath . --workpath ./build --specpath ./build generateVars.py
|
||||
# start script
|
||||
# generateVars.exe F:\Work\Projects\TMS\TMS_new_bus\ Src/DebugTools/vars.xml Src/DebugTools
|
||||
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
import xml.etree.ElementTree as ET
|
||||
from pathlib import Path
|
||||
import argparse
|
||||
|
||||
|
||||
# === Словарь соответствия типов XML → DebugVarType_t ===
|
||||
type_map = dict([
|
||||
*[(k, 'pt_int8') for k in ('signed char', 'char')],
|
||||
*[(k, 'pt_int16') for k in ('int', 'int16', 'short')],
|
||||
*[(k, 'pt_int32') for k in ('long', 'int32', '_iqx')],
|
||||
*[(k, 'pt_int64') for k in ('long long', 'int64')],
|
||||
|
||||
*[(k, 'pt_uint8') for k in ('unsigned char',)],
|
||||
*[(k, 'pt_uint16') for k in ('unsigned int', 'unsigned short', 'Uint16')],
|
||||
*[(k, 'pt_uint32') for k in ('unsigned long', 'Uint32')],
|
||||
*[(k, 'pt_uint64') for k in ('unsigned long long', 'Uint64')],
|
||||
|
||||
('struct', 'pt_struct'),
|
||||
('union', 'pt_union'),
|
||||
|
||||
*[(k, 'pt_ptr_int8') for k in ('signed char*', 'char*')],
|
||||
*[(k, 'pt_ptr_int16') for k in ('int*', 'short*')],
|
||||
*[(k, 'pt_ptr_int32') for k in ('long*',)],
|
||||
*[(k, 'pt_ptr_uint8') for k in ('unsigned char*',)],
|
||||
*[(k, 'pt_ptr_uint16') for k in ('unsigned int*', 'unsigned short*')],
|
||||
*[(k, 'pt_ptr_uint32') for k in ('unsigned long*',)],
|
||||
('unsigned long long*', 'pt_int64'),
|
||||
|
||||
('struct*', 'pt_ptr_struct'),
|
||||
('union*', 'pt_ptr_union'),
|
||||
|
||||
|
||||
*[(k, 'pt_arr_int8') for k in ('signed char[]', 'char[]')],
|
||||
*[(k, 'pt_arr_int16') for k in ('int[]', 'short[]')],
|
||||
*[(k, 'pt_arr_int32') for k in ('long[]',)],
|
||||
*[(k, 'pt_arr_uint8') for k in ('unsigned char[]',)],
|
||||
*[(k, 'pt_arr_uint16') for k in ('unsigned int[]', 'unsigned short[]')],
|
||||
*[(k, 'pt_arr_uint32') for k in ('unsigned long[]',)],
|
||||
|
||||
*[(k, 'pt_float') for k in ('float', 'float32')],
|
||||
|
||||
('struct[]', 'pt_arr_struct'),
|
||||
('union[]', 'pt_arr_union'),
|
||||
])
|
||||
|
||||
def map_type_to_pt(typename, varname=None, typedef_map=None):
|
||||
typename_orig = typename.strip()
|
||||
|
||||
# Убираем const и volatile (чтобы не мешали проверке)
|
||||
for qualifier in ('const', 'volatile'):
|
||||
typename_orig = typename_orig.replace(qualifier, '')
|
||||
typename_orig = typename_orig.strip()
|
||||
|
||||
# Проверка наличия массива [] или указателя *
|
||||
is_array = bool(re.search(r'\[.*\]', typename_orig))
|
||||
is_ptr = '*' in typename_orig
|
||||
|
||||
# Убираем все [] и * для получения базового типа
|
||||
typename_base = re.sub(r'\[.*?\]', '', typename_orig).replace('*', '').strip()
|
||||
typedef_maybe = typename_base
|
||||
if typename_base.startswith('struct'):
|
||||
typename_base = 'struct'
|
||||
if typename_base.startswith('union'):
|
||||
typename_base = 'union'
|
||||
|
||||
# Добавляем [] или * к базовому типу для поиска
|
||||
if is_array:
|
||||
typename_base = typename_base + '[]'
|
||||
elif is_ptr:
|
||||
typename_base = typename_base + '*'
|
||||
else:
|
||||
typename_base = typename_base
|
||||
|
||||
if typename_base in type_map:
|
||||
return type_map[typename_base]
|
||||
|
||||
|
||||
if '_iq' in typename_base and '_iqx' in type_map:
|
||||
return type_map['_iqx']
|
||||
|
||||
# Если есть typedef_map — пробуем по нему
|
||||
if typedef_map and typedef_maybe in typedef_map:
|
||||
resolved = typedef_map[typedef_maybe].strip()
|
||||
|
||||
# Убираем const и volatile
|
||||
for qualifier in ('const', 'volatile'):
|
||||
resolved = resolved.replace(qualifier, '')
|
||||
resolved = resolved.strip()
|
||||
|
||||
# Получаем базовый тип из typedef-а
|
||||
base_t = re.sub(r'\[.*?\]', '', resolved).replace('*', '').strip()
|
||||
|
||||
if base_t.startswith('struct'):
|
||||
base_t = 'struct'
|
||||
if base_t.startswith('union'):
|
||||
base_t = 'union'
|
||||
|
||||
if is_array:
|
||||
base_t += '[]'
|
||||
elif is_ptr:
|
||||
base_t += '*'
|
||||
|
||||
# Пробуем по базовому имени
|
||||
if base_t in type_map:
|
||||
return type_map[base_t]
|
||||
if '_iq' in base_t and '_iqx' in type_map:
|
||||
return type_map['_iqx']
|
||||
|
||||
|
||||
return 'pt_unknown'
|
||||
|
||||
|
||||
|
||||
def get_iq_define(vtype):
|
||||
# Убираем все скобки массива, например: _iq[5] → _iq
|
||||
vtype = re.sub(r'\[.*?\]', '', vtype).strip()
|
||||
|
||||
if '_iq' in vtype:
|
||||
# Преобразуем _iqXX в t_iqXX
|
||||
return 't' + vtype[vtype.index('_iq'):]
|
||||
else:
|
||||
return 't_iq_none'
|
||||
|
||||
def add_new_vars_to_xml(proj_path, xml_rel_path, output_path):
|
||||
"""
|
||||
new_vars — dict: ключ = имя переменной, значение = словарь с info (type, file, extern, static, enable, show_var и т.п.)
|
||||
|
||||
Если переменной нет в XML (в <variables>), добавляем её и сохраняем XML-файл.
|
||||
|
||||
Возвращает True если что-то добавлено и XML перезаписан, иначе False.
|
||||
"""
|
||||
|
||||
# Считываем существующие переменные
|
||||
parsed_vars = {}
|
||||
if os.path.isfile(output_path):
|
||||
with open(output_path, 'r', encoding='utf-8', errors='ignore') as f:
|
||||
for line in f:
|
||||
# {(char *)&some.deep.var.name , pt_uint16 , t_iq15 , "ShortName"},
|
||||
m = re.match(
|
||||
r'{\s*\(char\s*\*\)\s*&([a-zA-Z_][a-zA-Z0-9_]*)\s*,\s*(pt_\w+)\s*,\s*(t?iq_\w+)\s*,\s*"([^"]+)"',
|
||||
line)
|
||||
if m:
|
||||
full_varname = m.group(1) # e.g., some.deep.var.name
|
||||
pt_type = m.group(2)
|
||||
iq_type = m.group(3)
|
||||
shortname = m.group(4)
|
||||
|
||||
parsed_vars[full_varname] = {
|
||||
'pt_type': pt_type,
|
||||
'iq_type': iq_type,
|
||||
'enable': True,
|
||||
'show_var': True,
|
||||
'shortname': shortname,
|
||||
'return_type': 'int',
|
||||
'type': '', # Можешь дополнить из externs
|
||||
'file': '', # Можешь дополнить из externs
|
||||
'extern': False,
|
||||
'static': False,
|
||||
'name': full_varname # Сохраняем исходное имя переменной
|
||||
}
|
||||
|
||||
if not parsed_vars:
|
||||
print("[INFO] Не удалось найти ни одной переменной в debug_vars.c")
|
||||
return False
|
||||
|
||||
xml_full_path = os.path.join(proj_path, xml_rel_path)
|
||||
xml_full_path = os.path.normpath(xml_full_path)
|
||||
|
||||
tree = ET.parse(xml_full_path)
|
||||
root = tree.getroot()
|
||||
|
||||
vars_section = root.find("variables")
|
||||
if vars_section is None:
|
||||
vars_section = ET.SubElement(root, "variables")
|
||||
|
||||
existing_var_names = {v.attrib['name'] for v in vars_section.findall("var")}
|
||||
added_count = 0
|
||||
|
||||
# 3. Добавляем переменные, которых нет в XML
|
||||
for name, info in parsed_vars.items():
|
||||
if name in existing_var_names:
|
||||
# Уже есть — обновляем enable, если нужно
|
||||
existing_elem = vars_section.find(f"./var[@name='{name}']")
|
||||
if existing_elem is not None:
|
||||
manual_elem = existing_elem.find("manual")
|
||||
if manual_elem and manual_elem.text == "true":
|
||||
show_elem = existing_elem.find("show_var")
|
||||
if show_elem is None:
|
||||
show_elem = ET.SubElement(existing_elem, "enable")
|
||||
enable_elem.text = "true"
|
||||
enable_elem = existing_elem.find("enable")
|
||||
if enable_elem is None:
|
||||
enable_elem = ET.SubElement(existing_elem, "enable")
|
||||
enable_elem.text = "true"
|
||||
added_count += 1
|
||||
continue
|
||||
var_elem = ET.SubElement(vars_section, "var", {"name": name})
|
||||
manual = ET.SubElement(var_elem, 'manual')
|
||||
manual.text = 'true'
|
||||
for key, val in info.items():
|
||||
elem = ET.SubElement(var_elem, key)
|
||||
if isinstance(val, bool):
|
||||
elem.text = "true" if val else "false"
|
||||
else:
|
||||
elem.text = str(val)
|
||||
added_count += 1
|
||||
|
||||
if added_count > 0:
|
||||
ET.indent(tree, space=" ", level=0)
|
||||
tree.write(xml_full_path, encoding="utf-8", xml_declaration=True)
|
||||
print(f"[INFO] В XML добавлено новых переменных: {added_count}")
|
||||
return True
|
||||
else:
|
||||
print("[INFO] Все переменные уже есть в XML.")
|
||||
return False
|
||||
|
||||
|
||||
def read_vars_from_xml(proj_path, xml_rel_path):
|
||||
xml_full_path = os.path.join(proj_path, xml_rel_path)
|
||||
xml_full_path = os.path.normpath(xml_full_path)
|
||||
|
||||
tree = ET.parse(xml_full_path)
|
||||
root = tree.getroot()
|
||||
|
||||
vars_section = root.find("variables")
|
||||
includes_section = root.find("includes")
|
||||
externs_section = root.find("externs")
|
||||
|
||||
unique_vars = {}
|
||||
vars_need_extern = {}
|
||||
|
||||
# Читаем переменные из <variables>
|
||||
for var in vars_section.findall("var"):
|
||||
name = var.attrib["name"]
|
||||
var_info = {}
|
||||
|
||||
# Обрабатываем дочерние элементы (type, file, extern, static и т.п.)
|
||||
for child in var:
|
||||
text = child.text.strip() if child.text else ""
|
||||
# Конвертируем "true"/"false" в bool для extern и static
|
||||
if child.tag in ("extern", "static"):
|
||||
var_info[child.tag] = (text.lower() == "true")
|
||||
else:
|
||||
var_info[child.tag] = text
|
||||
if child.tag == "enable":
|
||||
var_info["enable"] = (text.lower() == "true")
|
||||
|
||||
# Обрабатываем путь к файлу (если есть)
|
||||
if "file" in var_info:
|
||||
file_rel = var_info["file"]
|
||||
file_full = os.path.normpath(os.path.join(proj_path, file_rel))
|
||||
var_info["file"] = file_full
|
||||
|
||||
unique_vars[name] = var_info
|
||||
|
||||
# Читаем include-файлы (относительные) и преобразуем в полные пути
|
||||
include_files = []
|
||||
for node in includes_section.findall("file"):
|
||||
rel_path = node.text
|
||||
full_path = os.path.normpath(os.path.join(proj_path, rel_path))
|
||||
include_files.append(full_path)
|
||||
|
||||
# Читаем extern переменные из <externs>
|
||||
for var in externs_section.findall("var"):
|
||||
name = var.attrib["name"]
|
||||
type_ = var.find("type").text
|
||||
file_rel = var.find("file").text
|
||||
file_full = os.path.normpath(os.path.join(proj_path, file_rel))
|
||||
vars_need_extern[name] = {
|
||||
"type": type_,
|
||||
"file": file_full
|
||||
}
|
||||
|
||||
return unique_vars, include_files, vars_need_extern
|
||||
|
||||
|
||||
|
||||
|
||||
def generate_vars_file(proj_path, xml_path, output_dir):
|
||||
output_dir = os.path.join(proj_path, output_dir)
|
||||
os.makedirs(output_dir, exist_ok=True)
|
||||
output_path = os.path.join(output_dir, 'debug_vars.c')
|
||||
|
||||
|
||||
# Запись новых переменных для в XML
|
||||
add_new_vars_to_xml(proj_path, xml_path, output_path)
|
||||
# Генерируем новые переменные
|
||||
vars, includes, externs = read_vars_from_xml(proj_path, xml_path)
|
||||
|
||||
# Сортируем новые переменные по алфавиту по имени
|
||||
sorted_new_debug_vars = dict(sorted(vars.items()))
|
||||
|
||||
new_debug_vars = {}
|
||||
|
||||
def is_true(val):
|
||||
# Преобразуем значение к строке, если оно не None
|
||||
# и сравниваем с 'true' в нижнем регистре
|
||||
return str(val).lower() == 'true'
|
||||
|
||||
for vname, info in vars.items():
|
||||
# Проверяем, что show_var и enable включены (строки как строки 'true')
|
||||
if not is_true(info.get('show_var', 'false')):
|
||||
continue
|
||||
if not is_true(info.get('enable', 'false')):
|
||||
continue
|
||||
|
||||
vtype = info["type"]
|
||||
is_extern = info["extern"]
|
||||
is_static = info.get("static", False)
|
||||
if is_static:
|
||||
continue # пропускаем static переменные
|
||||
|
||||
path = info["file"]
|
||||
|
||||
iq_type = info.get('iq_type')
|
||||
if not iq_type:
|
||||
iq_type = get_iq_define(vtype)
|
||||
|
||||
pt_type = info.get('pt_type')
|
||||
if not pt_type:
|
||||
pt_type = map_type_to_pt(vtype, vname)
|
||||
|
||||
# Дополнительные поля, например комментарий
|
||||
comment = info.get("comment", "")
|
||||
|
||||
if pt_type not in ('pt_struct', 'pt_union'):
|
||||
formated_name = f'"{vname}"'
|
||||
# Добавим комментарий после записи, если он есть
|
||||
comment_str = f' // {comment}' if comment else ''
|
||||
line = f'{{(char *)&{vname:<41} , {pt_type:<21} , {iq_type:<21} , {formated_name:<42}}}, \\{comment_str}'
|
||||
new_debug_vars[vname] = line
|
||||
|
||||
else:
|
||||
continue
|
||||
# Если тип переменной — структура, добавляем поля
|
||||
base_type = vtype.split()[0]
|
||||
# Удаляем символы указателей '*' и всю квадратную скобку с содержимым (например [10])
|
||||
base_type = re.sub(r'\*|\[[^\]]*\]', '', base_type).strip()
|
||||
if base_type in all_structs:
|
||||
add_struct_fields(new_debug_vars, vname, base_type, all_structs, existing_debug_vars)
|
||||
|
||||
|
||||
# Объединяем все переменные
|
||||
all_debug_lines = new_debug_vars.values()
|
||||
|
||||
out_lines = []
|
||||
out_lines.append("// Этот файл сгенерирован автоматически")
|
||||
out_lines.append(f'#include "debug_tools.h"')
|
||||
|
||||
out_lines.append('\n\n// Инклюды для доступа к переменным')
|
||||
for incf in includes:
|
||||
filename = os.path.basename(incf)
|
||||
out_lines.append(f'#include "{filename}"')
|
||||
|
||||
|
||||
out_lines.append('\n\n// Экстерны для доступа к переменным')
|
||||
for vname, info in externs.items():
|
||||
vtype = info["type"].strip()
|
||||
|
||||
is_static = info.get("static", False) # <-- добавлено
|
||||
if is_static:
|
||||
continue # пропускаем static переменные
|
||||
|
||||
# Попытка выделить размер массива из типа, например int[20]
|
||||
array_match = re.match(r'^(.*?)(\s*\[.*\])$', vtype)
|
||||
if array_match:
|
||||
base_type = array_match.group(1).strip()
|
||||
array_size = array_match.group(2).strip()
|
||||
out_lines.append(f'extern {base_type} {vname}{array_size};')
|
||||
else:
|
||||
# Если не массив — обычный extern
|
||||
out_lines.append(f'extern {vtype} {vname};')
|
||||
|
||||
out_lines.append(f'\n\n// Определение массива с указателями на переменные для отладки')
|
||||
out_lines.append(f'int DebugVar_Qnt = {len(all_debug_lines)};')
|
||||
out_lines.append('#pragma DATA_SECTION(dbg_vars,".dbgvar_info")')
|
||||
out_lines.append('DebugVar_t dbg_vars[] = {\\')
|
||||
out_lines.extend(all_debug_lines)
|
||||
out_lines.append('};')
|
||||
out_lines.append('')
|
||||
# Выберем кодировку для записи файла
|
||||
# Если встречается несколько, возьмем первую из set
|
||||
enc_to_write = 'cp1251'
|
||||
|
||||
#print("== GLOBAL VARS FOUND ==")
|
||||
#for vname, (vtype, path) in vars_in_c.items():
|
||||
#print(f"{vtype:<20} {vname:<40} // {path}")
|
||||
|
||||
|
||||
with open(output_path, 'w', encoding=enc_to_write) as f:
|
||||
f.write('\n'.join(out_lines))
|
||||
|
||||
print(f'Файл debug_vars.c сгенерирован в кодировке, переменных: {len(all_debug_lines)}')
|
||||
|
||||
|
||||
#generate_vars_file("E:/.WORK/TMS/TMS_new_bus/", "Src/DebugTools/vars.xml", "E:/.WORK/TMS/TMS_new_bus/Src/DebugTools/")
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Generate debug_vars.c from project XML and output directory.",
|
||||
epilog="""\
|
||||
Usage example:
|
||||
%(prog)s /absolute/path/to/project /absolute/path/to/project/Src/DebugTools/vars.xml /absolute/path/to/project/Src/DebugTools/
|
||||
""",
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
add_help=False
|
||||
)
|
||||
parser.add_argument("proj_path", help="Absolute path to the project root directory")
|
||||
parser.add_argument("xml_path", help="Absolute path to the XML file (must be inside project)")
|
||||
parser.add_argument("output_dir", help="Absolute path to output directory (must be inside project)")
|
||||
parser.add_argument("-h", "--help", action="store_true", help="Show this help message and exit")
|
||||
|
||||
# Show help if requested
|
||||
if "-h" in sys.argv or "--help" in sys.argv:
|
||||
parser.print_help()
|
||||
sys.exit(0)
|
||||
|
||||
# Check minimum args count
|
||||
if len(sys.argv) < 4:
|
||||
print("Error: insufficient arguments.\n")
|
||||
print("Usage example:")
|
||||
print(f" {os.path.basename(sys.argv[0])} /absolute/path/to/project /absolute/path/to/project/Src/DebugTools/vars.xml /absolute/path/to/project/Src/DebugTools/\n")
|
||||
sys.exit(1)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Normalize absolute paths
|
||||
proj_path = os.path.abspath(args.proj_path)
|
||||
xml_path_abs = os.path.abspath(args.xml_path)
|
||||
output_dir_abs = os.path.abspath(args.output_dir)
|
||||
|
||||
# Check proj_path is directory
|
||||
if not os.path.isdir(proj_path):
|
||||
print(f"Error: Project path '{proj_path}' is not a directory or does not exist.")
|
||||
sys.exit(1)
|
||||
# Check xml_path inside proj_path
|
||||
if not xml_path_abs.startswith(proj_path + os.sep):
|
||||
print(f"Error: XML path '{xml_path_abs}' is not inside the project path '{proj_path}'.")
|
||||
sys.exit(1)
|
||||
# Check output_dir inside proj_path
|
||||
if not output_dir_abs.startswith(proj_path + os.sep):
|
||||
print(f"Error: Output directory '{output_dir_abs}' is not inside the project path '{proj_path}'.")
|
||||
sys.exit(1)
|
||||
|
||||
# Convert xml_path and output_dir to relative paths *relative to proj_path*
|
||||
xml_path_rel = os.path.relpath(xml_path_abs, proj_path)
|
||||
output_dir_rel = os.path.relpath(output_dir_abs, proj_path)
|
||||
|
||||
if not os.path.isdir(proj_path):
|
||||
print(f"Error: Project path '{proj_path}' не является директорией или не существует.")
|
||||
sys.exit(1)
|
||||
|
||||
generate_vars_file(proj_path, xml_path_rel, output_dir_rel)
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
def run_generate(proj_path, xml_path, output_dir):
|
||||
import os
|
||||
|
||||
# Normalize absolute paths
|
||||
proj_path = os.path.abspath(proj_path)
|
||||
xml_path_abs = os.path.abspath(xml_path)
|
||||
output_dir_abs = os.path.abspath(output_dir)
|
||||
|
||||
# Проверка валидности путей
|
||||
if not os.path.isdir(proj_path):
|
||||
raise FileNotFoundError(f"Project path '{proj_path}' is not a directory or does not exist.")
|
||||
|
||||
if not xml_path_abs.startswith(proj_path + os.sep):
|
||||
raise ValueError(f"XML path '{xml_path_abs}' is not inside the project path '{proj_path}'.")
|
||||
|
||||
if not output_dir_abs.startswith(proj_path + os.sep):
|
||||
raise ValueError(f"Output directory '{output_dir_abs}' is not inside the project path '{proj_path}'.")
|
||||
|
||||
# Преобразуем к относительным путям относительно проекта
|
||||
xml_path_rel = os.path.relpath(xml_path_abs, proj_path)
|
||||
output_dir_rel = os.path.relpath(output_dir_abs, proj_path)
|
||||
|
||||
# Запускаем генерацию
|
||||
generate_vars_file(proj_path, xml_path_rel, output_dir_rel)
|
||||
143
Src/parseMakefile.py
Normal file
143
Src/parseMakefile.py
Normal file
@@ -0,0 +1,143 @@
|
||||
import os
|
||||
import re
|
||||
|
||||
|
||||
def strip_single_line_comments(code):
|
||||
# Удалим // ... до конца строки
|
||||
return re.sub(r'//.*?$', '', code, flags=re.MULTILINE)
|
||||
|
||||
def read_file_try_encodings(filepath):
|
||||
for enc in ['utf-8', 'cp1251']:
|
||||
try:
|
||||
with open(filepath, 'r', encoding=enc) as f:
|
||||
content = f.read()
|
||||
content = strip_single_line_comments(content) # <=== ВАЖНО
|
||||
return content, enc
|
||||
except UnicodeDecodeError:
|
||||
continue
|
||||
raise UnicodeDecodeError(f"Не удалось прочитать файл {filepath} с кодировками utf-8 и cp1251")
|
||||
|
||||
def find_all_includes_recursive(c_files, include_dirs, processed_files=None):
|
||||
"""
|
||||
Рекурсивно ищет все include-файлы начиная с заданных c_files.
|
||||
Возвращает множество ПОЛНЫХ ПУТЕЙ к найденным include-файлам.
|
||||
|
||||
include_dirs — список директорий, в которых ищем include-файлы.
|
||||
processed_files — множество уже обработанных файлов (для избежания циклов).
|
||||
"""
|
||||
if processed_files is None:
|
||||
processed_files = set()
|
||||
|
||||
include_files = set()
|
||||
include_pattern = re.compile(r'#include\s+"([^"]+)"')
|
||||
|
||||
for cfile in c_files:
|
||||
norm_path = os.path.normpath(cfile)
|
||||
if norm_path in processed_files:
|
||||
continue
|
||||
processed_files.add(norm_path)
|
||||
|
||||
content, _ = read_file_try_encodings(cfile)
|
||||
includes = include_pattern.findall(content)
|
||||
for inc in includes:
|
||||
# Ищем полный путь к include-файлу в include_dirs
|
||||
inc_full_path = None
|
||||
for dir_ in include_dirs:
|
||||
candidate = os.path.normpath(os.path.join(dir_, inc))
|
||||
if os.path.isfile(candidate):
|
||||
inc_full_path = os.path.abspath(candidate)
|
||||
break
|
||||
|
||||
if inc_full_path:
|
||||
include_files.add(inc_full_path)
|
||||
|
||||
# Рекурсивный обход вложенных includes
|
||||
if inc_full_path not in processed_files:
|
||||
nested_includes = find_all_includes_recursive(
|
||||
[inc_full_path], include_dirs, processed_files
|
||||
)
|
||||
include_files.update(nested_includes)
|
||||
|
||||
return include_files
|
||||
|
||||
|
||||
def parse_makefile(makefile_path):
|
||||
makefile_dir = os.path.dirname(makefile_path)
|
||||
project_root = os.path.dirname(makefile_dir) # поднялись из Debug
|
||||
|
||||
with open(makefile_path, 'r', encoding='utf-8') as f:
|
||||
lines = f.readlines()
|
||||
|
||||
objs_lines = []
|
||||
collecting = False
|
||||
|
||||
for line in lines:
|
||||
stripped = line.strip()
|
||||
if stripped.startswith("ORDERED_OBJS") and "+=" in stripped:
|
||||
parts = stripped.split("\\")
|
||||
first_part = parts[0]
|
||||
idx = first_part.find("+=")
|
||||
tail = first_part[idx+2:].strip()
|
||||
if tail:
|
||||
objs_lines.append(tail)
|
||||
collecting = True
|
||||
if len(parts) > 1:
|
||||
for p in parts[1:]:
|
||||
p = p.strip()
|
||||
if p:
|
||||
objs_lines.append(p)
|
||||
continue
|
||||
|
||||
if collecting:
|
||||
if stripped.endswith("\\"):
|
||||
objs_lines.append(stripped[:-1].strip())
|
||||
else:
|
||||
objs_lines.append(stripped)
|
||||
collecting = False
|
||||
|
||||
objs_str = ' '.join(objs_lines)
|
||||
|
||||
objs_str = re.sub(r"\$\([^)]+\)", "", objs_str)
|
||||
|
||||
objs = []
|
||||
for part in objs_str.split():
|
||||
part = part.strip()
|
||||
if part.startswith('"') and part.endswith('"'):
|
||||
part = part[1:-1]
|
||||
if part:
|
||||
objs.append(part)
|
||||
|
||||
c_files = []
|
||||
include_dirs = set()
|
||||
|
||||
for obj_path in objs:
|
||||
if "DebugTools" in obj_path:
|
||||
continue
|
||||
if "v120" in obj_path:
|
||||
continue
|
||||
|
||||
if obj_path.startswith("Debug\\") or obj_path.startswith("Debug/"):
|
||||
rel_path = obj_path.replace("Debug\\", "Src\\").replace("Debug/", "Src/")
|
||||
else:
|
||||
rel_path = obj_path
|
||||
|
||||
abs_path = os.path.normpath(os.path.join(project_root, rel_path))
|
||||
|
||||
root, ext = os.path.splitext(abs_path)
|
||||
if ext.lower() == ".obj":
|
||||
c_path = root + ".c"
|
||||
else:
|
||||
c_path = abs_path
|
||||
|
||||
# Сохраняем только .c файлы
|
||||
if c_path.lower().endswith(".c"):
|
||||
c_files.append(c_path)
|
||||
dir_path = os.path.dirname(c_path)
|
||||
if dir_path and "DebugTools" not in dir_path:
|
||||
include_dirs.add(dir_path)
|
||||
|
||||
|
||||
h_files = find_all_includes_recursive(c_files, include_dirs)
|
||||
|
||||
|
||||
return sorted(c_files), sorted(h_files), sorted(include_dirs)
|
||||
874
Src/scanVars.py
Normal file
874
Src/scanVars.py
Normal file
@@ -0,0 +1,874 @@
|
||||
# build command
|
||||
# pyinstaller --onefile scanVars.py --add-binary "F:\Work\Projects\TMS\TMS_new_bus\Src\DebugTools/build/libclang.dll;." --distpath . --workpath ./build --specpath ./build
|
||||
# start script
|
||||
# scanVars.exe F:\Work\Projects\TMS\TMS_new_bus\ F:\Work\Projects\TMS\TMS_new_bus\Debug\makefile
|
||||
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
import clang.cindex
|
||||
from clang import cindex
|
||||
import xml.etree.ElementTree as ET
|
||||
from xml.dom import minidom
|
||||
from parseMakefile import parse_makefile
|
||||
from collections import deque
|
||||
import argparse
|
||||
BITFIELD_WIDTHS = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32}
|
||||
|
||||
|
||||
# Укажи полный путь к libclang.dll — поменяй на свой путь или оставь относительный
|
||||
dll_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "build/libclang.dll")
|
||||
|
||||
if hasattr(sys, '_MEIPASS'):
|
||||
dll_path = os.path.join(sys._MEIPASS, "libclang.dll")
|
||||
cindex.Config.set_library_file(dll_path)
|
||||
else:
|
||||
cindex.Config.set_library_file(r"build\libclang.dll") # путь для запуска без упаковки
|
||||
|
||||
index = cindex.Index.create()
|
||||
PRINT_LEVEL = 2
|
||||
|
||||
PRINT_ERROR = 1 # 0 = ничего, 1 = ошибки, 2 = статус, 3 = отладка
|
||||
PRINT_STATUS = 2 # 0 = ничего, 1 = ошибки, 2 = статус, 3 = отладка
|
||||
PRINT_DEBUG = 3 # 0 = ничего, 1 = ошибки, 2 = статус, 3 = отладка
|
||||
|
||||
def optional_printf(level, msg):
|
||||
"""
|
||||
Выводит сообщение, если заданный уровень level меньше или равен текущему уровню DEBUG_LEVEL.
|
||||
:param level: int — уровень важности сообщения # 0 = ничего, 1 = статус, 2 = подробно, 3 = отладка
|
||||
:param msg: str — текст сообщения
|
||||
"""
|
||||
if level <= PRINT_LEVEL:
|
||||
print(msg)
|
||||
|
||||
def get_canonical_typedef_file(var_type, include_dirs):
|
||||
"""
|
||||
Рекурсивно спускаемся к базовому типу, чтобы найти typedef-декларацию,
|
||||
возвращаем файл заголовка, если он есть и в include_dirs.
|
||||
"""
|
||||
# unwrap array, pointer, typedef, etc, пока не дойдём до базового типа
|
||||
t = var_type
|
||||
while True:
|
||||
# если массив, достаём тип элементов
|
||||
if t.kind == clang.cindex.TypeKind.CONSTANTARRAY or t.kind == clang.cindex.TypeKind.INCOMPLETEARRAY:
|
||||
t = t.element_type
|
||||
continue
|
||||
# если указатель — достаём тип, на который он указывает
|
||||
if t.kind == clang.cindex.TypeKind.POINTER:
|
||||
t = t.get_pointee()
|
||||
continue
|
||||
# если typedef — unwrap до underlying type
|
||||
if t.get_declaration().kind == clang.cindex.CursorKind.TYPEDEF_DECL:
|
||||
typedef_decl = t.get_declaration()
|
||||
if typedef_decl and typedef_decl.location and typedef_decl.location.file:
|
||||
typedef_header = str(typedef_decl.location.file)
|
||||
# Проверяем, внутри ли include_dirs
|
||||
path_abs = os.path.abspath(typedef_header)
|
||||
for inc in include_dirs:
|
||||
try:
|
||||
inc_abs = os.path.abspath(inc)
|
||||
if os.path.commonpath([path_abs, inc_abs]) == inc_abs and typedef_header.endswith('.h'):
|
||||
return os.path.normpath(typedef_header)
|
||||
except ValueError:
|
||||
continue
|
||||
# Если не нашли, пытаемся получить underlying type дальше
|
||||
t = t.get_canonical() # underlying type без typedef-ов
|
||||
continue
|
||||
# Если дошли до типа без typedef-а — возвращаем None
|
||||
break
|
||||
return None
|
||||
|
||||
def analyze_variables_across_files(c_files, h_files, include_dirs):
|
||||
optional_printf(PRINT_STATUS, "Starting analysis of variables across files...")
|
||||
index = clang.cindex.Index.create()
|
||||
args = [f"-I{inc}" for inc in include_dirs]
|
||||
|
||||
unique_vars = {} # имя переменной → словарь с инфой
|
||||
h_files_needed = set()
|
||||
vars_need_extern = {} # имя переменной → словарь без поля 'extern'
|
||||
|
||||
def is_inside_includes(path, include_dirs):
|
||||
path_abs = os.path.abspath(path)
|
||||
for inc in include_dirs:
|
||||
try:
|
||||
inc_abs = os.path.abspath(inc)
|
||||
if os.path.commonpath([path_abs, inc_abs]) == inc_abs:
|
||||
return True
|
||||
except ValueError:
|
||||
continue
|
||||
return False
|
||||
|
||||
|
||||
def parse_file(file_path):
|
||||
optional_printf(PRINT_DEBUG, f"\tParsing file: {file_path}")
|
||||
try:
|
||||
tu = index.parse(file_path, args=args)
|
||||
except Exception as e:
|
||||
optional_printf(PRINT_ERROR, f"\t\tFailed to parse {file_path}: {e}")
|
||||
return []
|
||||
|
||||
vars_in_file = []
|
||||
|
||||
def visit(node):
|
||||
def is_system_var(var_name: str) -> bool:
|
||||
# Проверяем, начинается ли имя с "_" и содержит заглавные буквы или служебные символы
|
||||
return bool(re.match(r"^_[_A-Z]", var_name))
|
||||
|
||||
if node.kind == clang.cindex.CursorKind.VAR_DECL:
|
||||
if node.semantic_parent.kind == clang.cindex.CursorKind.TRANSLATION_UNIT:
|
||||
is_extern = (node.storage_class == clang.cindex.StorageClass.EXTERN)
|
||||
is_static = (node.storage_class == clang.cindex.StorageClass.STATIC)
|
||||
var_type = node.type.spelling
|
||||
|
||||
# Проверка, есть ли определение
|
||||
definition = node.get_definition()
|
||||
if is_extern and definition is None:
|
||||
# Переменная extern без определения — игнорируем
|
||||
return
|
||||
|
||||
if is_system_var(node.spelling):
|
||||
return # игнорируем только явно известные служебные переменные
|
||||
if node.spelling == 'HUGE': # еще одна служеюная, которую хз как выделять
|
||||
return
|
||||
|
||||
# Проверяем, является ли тип указателем на функцию
|
||||
# Признак: в типе есть '(' и ')' и '*', например: "void (*)(int)"
|
||||
if "(" in var_type and "*" in var_type and ")" in var_type:
|
||||
# Пропускаем указатели на функции
|
||||
return
|
||||
|
||||
vars_in_file.append({
|
||||
"name": node.spelling,
|
||||
"type": var_type,
|
||||
"extern": is_extern,
|
||||
"static": is_static,
|
||||
"file": file_path
|
||||
})
|
||||
|
||||
# Если переменная extern и находится в .h — добавляем в includes
|
||||
if is_extern and file_path.endswith('.h') and is_inside_includes(file_path):
|
||||
h_files_needed.add(os.path.normpath(file_path))
|
||||
|
||||
# Добавляем файл с typedef, если есть
|
||||
typedef_header = get_canonical_typedef_file(node.type, include_dirs)
|
||||
if typedef_header:
|
||||
h_files_needed.add(typedef_header)
|
||||
|
||||
|
||||
|
||||
for child in node.get_children():
|
||||
visit(child)
|
||||
|
||||
visit(tu.cursor)
|
||||
return vars_in_file
|
||||
|
||||
optional_printf(PRINT_STATUS, "Parsing header files (.h)...")
|
||||
for h in h_files:
|
||||
vars_in_h = parse_file(h)
|
||||
for v in vars_in_h:
|
||||
name = v["name"]
|
||||
if name not in unique_vars:
|
||||
unique_vars[name] = {
|
||||
"type": v["type"],
|
||||
"extern": v["extern"],
|
||||
"static": v["static"],
|
||||
"file": v["file"]
|
||||
}
|
||||
|
||||
optional_printf(PRINT_STATUS, "Parsing source files (.c)...")
|
||||
for c in c_files:
|
||||
vars_in_c = parse_file(c)
|
||||
for v in vars_in_c:
|
||||
name = v["name"]
|
||||
if name in unique_vars:
|
||||
unique_vars[name].update({
|
||||
"type": v["type"],
|
||||
"extern": v["extern"],
|
||||
"static": v["static"],
|
||||
"file": v["file"]
|
||||
})
|
||||
else:
|
||||
unique_vars[name] = {
|
||||
"type": v["type"],
|
||||
"extern": v["extern"],
|
||||
"static": v["static"],
|
||||
"file": v["file"]
|
||||
}
|
||||
|
||||
optional_printf(PRINT_STATUS, "Checking which variables need explicit extern declaration...")
|
||||
for name, info in unique_vars.items():
|
||||
if not info["extern"] and not info["static"] and info["file"].endswith('.c'):
|
||||
extern_declared = False
|
||||
for h in h_files_needed:
|
||||
if h in unique_vars and unique_vars[h]["name"] == name and unique_vars[h]["extern"]:
|
||||
extern_declared = True
|
||||
break
|
||||
if not extern_declared:
|
||||
vars_need_extern[name] = {
|
||||
"type": info["type"],
|
||||
"file": info["file"]
|
||||
}
|
||||
|
||||
optional_printf(PRINT_STATUS, "Analysis complete.")
|
||||
optional_printf(PRINT_STATUS, f"\tTotal unique variables found: {len(unique_vars)}")
|
||||
optional_printf(PRINT_STATUS, f"\tHeader files with extern variables and declarations: {len(h_files_needed)}")
|
||||
optional_printf(PRINT_STATUS, f"\tVariables that need explicit extern declaration: {len(vars_need_extern)}\n")
|
||||
|
||||
return unique_vars, list(h_files_needed), vars_need_extern
|
||||
|
||||
|
||||
def resolve_typedef(typedefs, typename):
|
||||
"""
|
||||
Рекурсивно раскрывает typedef, пока не дойдёт до "примитивного" типа.
|
||||
Если typename нет в typedefs — возвращаем typename как есть.
|
||||
"""
|
||||
seen = set()
|
||||
current = typename
|
||||
while current in typedefs and current not in seen:
|
||||
seen.add(current)
|
||||
current = typedefs[current]
|
||||
return current
|
||||
|
||||
|
||||
def strip_ptr_and_array(typename):
|
||||
"""
|
||||
Убирает указатели и массивные скобки из типа,
|
||||
чтобы найти базовый тип (например, для typedef или struct).
|
||||
"""
|
||||
if not isinstance(typename, str):
|
||||
return typename
|
||||
|
||||
# Убираем [] и всё, что внутри скобок
|
||||
typename = re.sub(r'\[.*?\]', '', typename)
|
||||
|
||||
# Убираем звёздочки и пробелы рядом
|
||||
typename = typename.replace('*', '').strip()
|
||||
|
||||
return typename
|
||||
|
||||
def analyze_typedefs_and_struct(typedefs, structs):
|
||||
optional_printf(PRINT_STATUS, "Resolving typedefs and expanding struct field types...")
|
||||
|
||||
def simplify_type_name(typename):
|
||||
# Убираем ключевые слова типа "struct ", "union ", "enum " для поиска в typedefs и structs
|
||||
if isinstance(typename, str):
|
||||
for prefix in ("struct ", "union ", "enum "):
|
||||
if typename.startswith(prefix):
|
||||
return typename[len(prefix):]
|
||||
return typename
|
||||
|
||||
def resolve_typedef_rec(typename, depth=0):
|
||||
if depth > 50:
|
||||
optional_printf(PRINT_ERROR, f"Possible typedef recursion limit reached on '{typename}'")
|
||||
return typename
|
||||
|
||||
if not isinstance(typename, str):
|
||||
return typename
|
||||
|
||||
simple_name = typename
|
||||
|
||||
if simple_name in typedefs:
|
||||
underlying = typedefs[simple_name]
|
||||
|
||||
# Если раскрытие не меняет результат — считаем раскрытие завершённым
|
||||
if normalize_type_name(underlying) == normalize_type_name(typename):
|
||||
return underlying
|
||||
|
||||
return resolve_typedef_rec(underlying, depth + 1)
|
||||
else:
|
||||
return typename
|
||||
|
||||
|
||||
def resolve_struct_fields(fields, depth=0):
|
||||
if depth > 50:
|
||||
optional_printf(PRINT_ERROR, f"Possible struct recursion limit reached")
|
||||
return fields
|
||||
|
||||
if not isinstance(fields, dict):
|
||||
return fields
|
||||
|
||||
resolved_fields = {}
|
||||
|
||||
for fname, ftype in fields.items():
|
||||
base_type = strip_ptr_and_array(ftype)
|
||||
original_type = ftype # Сохраняем оригинальный вид типа
|
||||
|
||||
if base_type in structs:
|
||||
# Рекурсивно раскрываем вложенную структуру
|
||||
nested = resolve_struct_fields(structs[base_type], depth + 1)
|
||||
nested["__type__"] = original_type
|
||||
resolved_fields[fname] = nested
|
||||
else:
|
||||
resolved_fields[fname] = original_type # сохраняем оригинал
|
||||
|
||||
return resolved_fields
|
||||
|
||||
|
||||
""" # Сначала раскрываем typedef в именах структур и в полях
|
||||
substituted_structs = {}
|
||||
for sname, fields in structs.items():
|
||||
resolved_sname = resolve_typedef_rec(sname) # раскрываем имя структуры
|
||||
substituted_fields = {}
|
||||
for fname, ftype in fields.items():
|
||||
resolved_type = resolve_typedef_rec(ftype) # раскрываем тип поля
|
||||
substituted_fields[fname] = resolved_type
|
||||
substituted_structs[resolved_sname] = substituted_fields """
|
||||
|
||||
# Теперь раскрываем вложенные структуры
|
||||
resolved_structs = {}
|
||||
for sname, fields in structs.items():
|
||||
if "(unnamed" in sname:
|
||||
optional_printf(4, f" Skipping anonymous struct/union: {sname}")
|
||||
continue
|
||||
if sname == 'T_project':
|
||||
a = 1
|
||||
resolved_fields = resolve_struct_fields(fields)
|
||||
resolved_structs[sname] = resolved_fields
|
||||
optional_printf(PRINT_DEBUG, f"\tStruct {sname} resolved")
|
||||
|
||||
# Раскрываем typedef'ы в отдельном шаге
|
||||
resolved_typedefs = {}
|
||||
for tname in typedefs:
|
||||
resolved = resolve_typedef_rec(tname)
|
||||
resolved_typedefs[tname] = resolved
|
||||
optional_printf(4, f"\tTypedef {tname} resolved")
|
||||
|
||||
return resolved_typedefs, resolved_structs
|
||||
|
||||
def normalize_type_name(type_name: str) -> str:
|
||||
# Приводим тип к виду "union (unnamed union at ...)" или "struct (unnamed struct at ...)"
|
||||
m = re.match(r'^(union|struct) \((unnamed)( union| struct)? at .+\)$', type_name)
|
||||
if m:
|
||||
kind = m.group(1)
|
||||
unnamed = m.group(2)
|
||||
extra = m.group(3)
|
||||
if extra is None:
|
||||
type_name = f"{kind} ({unnamed} {kind} at {type_name.split(' at ')[1]}"
|
||||
return type_name
|
||||
|
||||
def contains_anywhere_in_node(node, target: str) -> bool:
|
||||
"""
|
||||
Рекурсивно ищет target во всех строковых значениях текущего узла и его потомков.
|
||||
"""
|
||||
for attr in dir(node):
|
||||
try:
|
||||
val = getattr(node, attr)
|
||||
if isinstance(val, str) and target in val:
|
||||
return True
|
||||
elif hasattr(val, 'spelling') and target in val.spelling:
|
||||
return True
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
for child in node.get_children():
|
||||
if contains_anywhere_in_node(child, target):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def analyze_typedefs_and_structs_across_files(c_files, include_dirs):
|
||||
optional_printf(PRINT_STATUS, "Starting analysis of typedefs and structs across files...")
|
||||
index = clang.cindex.Index.create()
|
||||
args = [f"-I{inc}" for inc in include_dirs]
|
||||
|
||||
unique_typedefs_raw = {}
|
||||
unique_structs_raw = {}
|
||||
|
||||
def parse_file(file_path):
|
||||
optional_printf(PRINT_DEBUG, f"\tParsing file: {file_path}")
|
||||
try:
|
||||
tu = index.parse(file_path, args=args)
|
||||
except Exception as e:
|
||||
optional_printf(PRINT_ERROR, f"\t\tFailed to parse {file_path}: {e}")
|
||||
return {}, {}
|
||||
|
||||
typedefs = {}
|
||||
structs = {}
|
||||
|
||||
def visit(node):
|
||||
if node.kind == clang.cindex.CursorKind.TYPEDEF_DECL:
|
||||
name = node.spelling
|
||||
underlying = node.underlying_typedef_type.spelling
|
||||
typedefs[name] = underlying
|
||||
|
||||
elif node.kind in (clang.cindex.CursorKind.STRUCT_DECL, clang.cindex.CursorKind.UNION_DECL):
|
||||
prefix = "struct " if node.kind == clang.cindex.CursorKind.STRUCT_DECL else "union "
|
||||
|
||||
raw_name = node.spelling
|
||||
normalized_name = normalize_type_name(raw_name)
|
||||
|
||||
# struct_name всегда с префиксом
|
||||
if node.spelling and "unnamed" not in normalized_name:
|
||||
struct_name = f"{prefix}{normalized_name}"
|
||||
else:
|
||||
struct_name = normalized_name
|
||||
|
||||
# Поиск typedef, соответствующего этой структуре
|
||||
typedef_name_for_struct = None
|
||||
for tname, underlying in typedefs.items():
|
||||
if normalize_type_name(underlying) == struct_name:
|
||||
typedef_name_for_struct = tname
|
||||
break
|
||||
|
||||
# Если нашли typedef → заменим struct_name на него
|
||||
final_name = typedef_name_for_struct if typedef_name_for_struct else struct_name
|
||||
|
||||
fields = {}
|
||||
for c in node.get_children():
|
||||
if c.kind == clang.cindex.CursorKind.FIELD_DECL:
|
||||
ftype = c.type.spelling
|
||||
bit_width = c.get_bitfield_width()
|
||||
if bit_width > 0:
|
||||
ftype += f" (bitfield:{bit_width})"
|
||||
fields[c.spelling] = ftype
|
||||
|
||||
if fields:
|
||||
structs[final_name] = fields
|
||||
|
||||
# Если это был struct с typedef, удалим старое имя (например, struct TS_project)
|
||||
if typedef_name_for_struct and struct_name in structs:
|
||||
del structs[struct_name]
|
||||
|
||||
for child in node.get_children():
|
||||
visit(child)
|
||||
|
||||
|
||||
|
||||
|
||||
visit(tu.cursor)
|
||||
return typedefs, structs
|
||||
|
||||
for c_file in c_files:
|
||||
typedefs_in_file, structs_in_file = parse_file(c_file)
|
||||
for name, underlying in typedefs_in_file.items():
|
||||
if name not in unique_typedefs_raw:
|
||||
unique_typedefs_raw[name] = underlying
|
||||
for sname, fields in structs_in_file.items():
|
||||
if sname not in unique_structs_raw:
|
||||
unique_structs_raw[sname] = fields
|
||||
|
||||
# Теперь раскроем typedef и структуры, учитывая вложения
|
||||
resolved_typedefs, resolved_structs = analyze_typedefs_and_struct(unique_typedefs_raw, unique_structs_raw)
|
||||
|
||||
optional_printf(PRINT_STATUS, "Analysis complete.")
|
||||
optional_printf(PRINT_STATUS, f"\tTotal unique typedefs found: {len(resolved_typedefs)}")
|
||||
optional_printf(PRINT_STATUS, f"\tTotal unique structs found: {len(resolved_structs)}\n")
|
||||
|
||||
return resolved_typedefs, resolved_structs
|
||||
|
||||
|
||||
def safe_parse_xml(xml_path):
|
||||
"""
|
||||
Безопасно парсит XML-файл.
|
||||
|
||||
Возвращает кортеж (root, tree) или (None, None) при ошибках.
|
||||
"""
|
||||
if not xml_path or not os.path.isfile(xml_path):
|
||||
print(f"Файл '{xml_path}' не найден или путь пустой")
|
||||
return None, None
|
||||
|
||||
try:
|
||||
if os.path.getsize(xml_path) == 0:
|
||||
return None, None
|
||||
|
||||
tree = ET.parse(xml_path)
|
||||
root = tree.getroot()
|
||||
return root, tree
|
||||
|
||||
except ET.ParseError as e:
|
||||
print(f"Ошибка парсинга XML файла '{xml_path}': {e}")
|
||||
return None, None
|
||||
except Exception as e:
|
||||
print(f"Неожиданная ошибка при чтении XML файла '{xml_path}': {e}")
|
||||
return None, None
|
||||
|
||||
def read_vars_from_xml(xml_path):
|
||||
xml_full_path = os.path.normpath(xml_path)
|
||||
vars_data = {}
|
||||
|
||||
if not os.path.exists(xml_full_path):
|
||||
return vars_data # пусто, если файла нет
|
||||
|
||||
root, tree = safe_parse_xml(xml_full_path)
|
||||
if root is None:
|
||||
return vars_data
|
||||
|
||||
vars_elem = root.find('variables')
|
||||
if vars_elem is None:
|
||||
return vars_data
|
||||
|
||||
for var_elem in vars_elem.findall('var'):
|
||||
name = var_elem.get('name')
|
||||
if not name:
|
||||
continue
|
||||
|
||||
def get_bool(tag, default='false'):
|
||||
return var_elem.findtext(tag, default).lower() == 'true'
|
||||
|
||||
vars_data[name] = {
|
||||
'show_var': get_bool('show_var'),
|
||||
'enable': get_bool('enable'),
|
||||
'shortname': var_elem.findtext('shortname', name),
|
||||
'pt_type': var_elem.findtext('pt_type', ''),
|
||||
'iq_type': var_elem.findtext('iq_type', ''),
|
||||
'return_type': var_elem.findtext('return_type', 'int'),
|
||||
'type': var_elem.findtext('type', 'unknown'),
|
||||
'file': var_elem.findtext('file', ''),
|
||||
'extern': get_bool('extern'),
|
||||
'static': get_bool('static'),
|
||||
}
|
||||
|
||||
return vars_data
|
||||
|
||||
|
||||
def generate_xml_output(proj_path, xml_path, unique_vars, h_files_needed, vars_need_extern, structs_xml_path=None, makefile_path=None):
|
||||
|
||||
xml_full_path = os.path.normpath(xml_path)
|
||||
|
||||
# Проверяем, существует ли файл, только тогда читаем из него
|
||||
existing_vars_data = {}
|
||||
if os.path.isfile(xml_full_path):
|
||||
existing_vars_data = read_vars_from_xml(xml_full_path)
|
||||
|
||||
# --- Новый блок: формируем атрибуты корневого тега ---
|
||||
analysis_attrs = {"proj_path": proj_path}
|
||||
if makefile_path:
|
||||
analysis_attrs["makefile_path"] = makefile_path
|
||||
if structs_xml_path:
|
||||
analysis_attrs["structs_path"] = structs_xml_path
|
||||
|
||||
root = ET.Element("analysis", attrib=analysis_attrs)
|
||||
|
||||
vars_elem = ET.SubElement(root, "variables")
|
||||
|
||||
# Объединяем старые и новые переменные
|
||||
combined_vars = {}
|
||||
if existing_vars_data:
|
||||
combined_vars.update(existing_vars_data)
|
||||
|
||||
for name, info in unique_vars.items():
|
||||
if name not in combined_vars:
|
||||
combined_vars[name] = {
|
||||
'show_var': info.get('enable', False),
|
||||
'enable': info.get('enable', False),
|
||||
'shortname': info.get('shortname', name),
|
||||
'pt_type': info.get('pt_type', ''),
|
||||
'iq_type': info.get('iq_type', ''),
|
||||
'return_type': info.get('return_type', 'int'),
|
||||
'type': info.get('type', 'unknown'),
|
||||
'file': info.get('file', ''),
|
||||
'extern': info.get('extern', False),
|
||||
'static': info.get('static', False),
|
||||
}
|
||||
else:
|
||||
# При необходимости можно обновить поля, например:
|
||||
# combined_vars[name].update(info)
|
||||
pass
|
||||
|
||||
# Записываем переменные с новыми полями
|
||||
for name, info in combined_vars.items():
|
||||
var_elem = ET.SubElement(vars_elem, "var", name=name)
|
||||
ET.SubElement(var_elem, "show_var").text = str(info.get('show_var', False)).lower()
|
||||
ET.SubElement(var_elem, "enable").text = str(info.get('enable', False)).lower()
|
||||
ET.SubElement(var_elem, "shortname").text = info.get('shortname', name)
|
||||
ET.SubElement(var_elem, "pt_type").text = info.get('pt_type', '')
|
||||
ET.SubElement(var_elem, "iq_type").text = info.get('iq_type', '')
|
||||
ET.SubElement(var_elem, "return_type").text = info.get('return_type', 'int')
|
||||
|
||||
ET.SubElement(var_elem, "type").text = info.get('type', 'unknown')
|
||||
rel_file = os.path.relpath(info.get('file', ''), proj_path) if info.get('file') else ''
|
||||
ET.SubElement(var_elem, "file").text = rel_file.replace("\\", "/") if rel_file else ''
|
||||
ET.SubElement(var_elem, "extern").text = str(info.get('extern', False)).lower()
|
||||
ET.SubElement(var_elem, "static").text = str(info.get('static', False)).lower()
|
||||
|
||||
# Секция includes (файлы)
|
||||
includes_elem = ET.SubElement(root, "includes")
|
||||
for path in h_files_needed:
|
||||
rel_path = os.path.relpath(path, proj_path)
|
||||
includes_elem_file = ET.SubElement(includes_elem, "file")
|
||||
includes_elem_file.text = rel_path.replace("\\", "/")
|
||||
|
||||
# Секция externs (переменные с extern)
|
||||
externs_elem = ET.SubElement(root, "externs")
|
||||
for name, info in vars_need_extern.items():
|
||||
var_elem = ET.SubElement(externs_elem, "var", name=name)
|
||||
ET.SubElement(var_elem, "type").text = info.get("type", "unknown")
|
||||
rel_file = os.path.relpath(info.get("file", ""), proj_path)
|
||||
ET.SubElement(var_elem, "file").text = rel_file.replace("\\", "/")
|
||||
|
||||
# Форматирование с отступами
|
||||
rough_string = ET.tostring(root, encoding="utf-8")
|
||||
reparsed = minidom.parseString(rough_string)
|
||||
pretty_xml = reparsed.toprettyxml(indent=" ")
|
||||
|
||||
with open(xml_full_path, "w", encoding="utf-8") as f:
|
||||
f.write(pretty_xml)
|
||||
|
||||
optional_printf(PRINT_STATUS, f"[XML] Variables saved to {xml_full_path}")
|
||||
|
||||
|
||||
def write_typedefs_and_structs_to_xml(proj_path, xml_path, typedefs, structs):
|
||||
def create_struct_element(parent_elem, struct_name, fields):
|
||||
struct_elem = ET.SubElement(parent_elem, "struct", name=struct_name)
|
||||
|
||||
for field_name, field_type in fields.items():
|
||||
if isinstance(field_type, dict):
|
||||
# Вложенная структура
|
||||
nested_elem = ET.SubElement(struct_elem, "field", name=field_name)
|
||||
# Сохраняем оригинальный тип (например: T_innerStruct[4], T_innerStruct*)
|
||||
if "__type__" in field_type:
|
||||
nested_elem.set("type", field_type["__type__"])
|
||||
else:
|
||||
nested_elem.set("type", "anonymous")
|
||||
|
||||
# Рекурсивно добавляем поля вложенной структуры
|
||||
create_struct_element(nested_elem, field_name, {
|
||||
k: v for k, v in field_type.items() if k != "__type__"
|
||||
})
|
||||
else:
|
||||
# Примитивное поле
|
||||
ET.SubElement(struct_elem, "field", name=field_name, type=field_type)
|
||||
|
||||
# Полный путь к xml файлу
|
||||
xml_full_path = os.path.normpath(xml_path)
|
||||
root = ET.Element("analysis")
|
||||
|
||||
# <structs>
|
||||
structs_elem = ET.SubElement(root, "structs")
|
||||
for struct_name, fields in sorted(structs.items()):
|
||||
create_struct_element(structs_elem, struct_name, fields)
|
||||
|
||||
# <typedefs>
|
||||
typedefs_elem = ET.SubElement(root, "typedefs")
|
||||
for name, underlying in sorted(typedefs.items()):
|
||||
ET.SubElement(typedefs_elem, "typedef", name=name, type=underlying)
|
||||
|
||||
# Преобразуем в красиво отформатированную XML-строку
|
||||
rough_string = ET.tostring(root, encoding="utf-8")
|
||||
reparsed = minidom.parseString(rough_string)
|
||||
pretty_xml = reparsed.toprettyxml(indent=" ")
|
||||
|
||||
with open(xml_full_path, "w", encoding="utf-8") as f:
|
||||
f.write(pretty_xml)
|
||||
|
||||
print(f"[XML] Typedefs and structs saved to: {xml_full_path}")
|
||||
|
||||
def topo_sort(graph):
|
||||
indegree = {}
|
||||
for node in graph:
|
||||
indegree.setdefault(node, 0)
|
||||
for neigh in graph[node]:
|
||||
indegree[neigh] = indegree.get(neigh, 0) + 1
|
||||
|
||||
queue = deque([node for node in indegree if indegree[node] == 0])
|
||||
sorted_list = []
|
||||
|
||||
while queue:
|
||||
node = queue.popleft()
|
||||
sorted_list.append(node)
|
||||
for neigh in graph.get(node, []):
|
||||
indegree[neigh] -= 1
|
||||
if indegree[neigh] == 0:
|
||||
queue.append(neigh)
|
||||
|
||||
if len(sorted_list) != len(indegree):
|
||||
print("Warning: include graph has cycles or disconnected components.")
|
||||
return sorted_list
|
||||
|
||||
def get_sorted_headers(c_files, h_files, include_dirs):
|
||||
index = clang.cindex.Index.create()
|
||||
args = [f"-I{inc}" for inc in include_dirs]
|
||||
|
||||
# Собираем граф зависимостей для заголовочных файлов
|
||||
include_graph = {}
|
||||
|
||||
# Проходим по всем исходникам и заголовкам, чтобы получить полный граф
|
||||
all_files_to_parse = set(c_files) | set(h_files)
|
||||
|
||||
for f in all_files_to_parse:
|
||||
try:
|
||||
tu = index.parse(f, args=args)
|
||||
except Exception as e:
|
||||
print(f"Failed to parse {f}: {e}")
|
||||
continue
|
||||
for include in tu.get_includes():
|
||||
inc_file = str(include.include)
|
||||
src_file = str(include.source)
|
||||
if not inc_file or not src_file:
|
||||
continue
|
||||
# Фокусируемся только на заголовочных файлах из списка h_files
|
||||
if src_file not in include_graph:
|
||||
include_graph[src_file] = set()
|
||||
include_graph[src_file].add(inc_file)
|
||||
|
||||
# Оставляем только заголовочные файлы из h_files, чтобы получить их зависимости
|
||||
h_files_set = set(h_files)
|
||||
filtered_graph = {}
|
||||
for src, incs in include_graph.items():
|
||||
if src in h_files_set:
|
||||
# Оставляем зависимости, которые тоже из h_files
|
||||
filtered_graph[src] = set(filter(lambda x: x in h_files_set, incs))
|
||||
|
||||
# Теперь топологическая сортировка заголовков
|
||||
sorted_h_files = topo_sort(filtered_graph)
|
||||
|
||||
# В случае если какие-то h_files не попали в граф (нет зависимостей) — добавим их в конец
|
||||
missing_headers = h_files_set - set(sorted_h_files)
|
||||
sorted_h_files.extend(sorted(missing_headers))
|
||||
|
||||
return sorted_h_files
|
||||
|
||||
|
||||
def build_include_graph(tu):
|
||||
# Возвращает dict: ключ — файл, значение — set файлов, которые он включает
|
||||
graph = {}
|
||||
for include in tu.get_includes():
|
||||
included_file = str(include.include)
|
||||
including_file = str(include.source)
|
||||
if including_file not in graph:
|
||||
graph[including_file] = set()
|
||||
graph[including_file].add(included_file)
|
||||
return graph
|
||||
|
||||
def topo_sort(graph):
|
||||
from collections import deque
|
||||
|
||||
indegree = {}
|
||||
for node in graph:
|
||||
indegree.setdefault(node, 0)
|
||||
for neigh in graph[node]:
|
||||
indegree[neigh] = indegree.get(neigh, 0) + 1
|
||||
|
||||
queue = deque([node for node in indegree if indegree[node] == 0])
|
||||
sorted_list = []
|
||||
|
||||
while queue:
|
||||
node = queue.popleft()
|
||||
sorted_list.append(node)
|
||||
for neigh in graph.get(node, []):
|
||||
indegree[neigh] -= 1
|
||||
if indegree[neigh] == 0:
|
||||
queue.append(neigh)
|
||||
|
||||
if len(sorted_list) != len(indegree):
|
||||
# Цикл или недостающие файлы
|
||||
print("Warning: include graph has cycles or disconnected components.")
|
||||
return sorted_list
|
||||
|
||||
|
||||
|
||||
def main():
|
||||
sys.stdout.reconfigure(line_buffering=True)
|
||||
global PRINT_LEVEL
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Analyze C project variables, typedefs, and structs using Clang.",
|
||||
epilog="""\
|
||||
Usage example:
|
||||
%(prog)s /path/to/project /path/to/Makefile /absolute/path/to/output_vars.xml
|
||||
""",
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
add_help=False
|
||||
)
|
||||
parser.add_argument("proj_path", help="Absolute path to the project root directory")
|
||||
parser.add_argument("makefile_path", help="Absolute path to the makefile to parse")
|
||||
parser.add_argument("output_xml", help="Absolute path to output XML file for variables")
|
||||
|
||||
parser.add_argument("-h", "--help", action="store_true", help="Show this help message and exit")
|
||||
parser.add_argument("-v", "--verbose", type=int, choices=range(0,6), default=2,
|
||||
help="Set verbosity level from 0 (quiet) to 5 (most detailed), default=2")
|
||||
|
||||
if "-h" in sys.argv or "--help" in sys.argv:
|
||||
parser.print_help()
|
||||
print("\nUsage example:")
|
||||
print(f" {os.path.basename(sys.argv[0])} /path/to/project /path/to/Makefile /absolute/path/to/output_vars.xml")
|
||||
sys.exit(0)
|
||||
|
||||
if len(sys.argv) < 4:
|
||||
print("Error: insufficient arguments.\n")
|
||||
print("Usage example:")
|
||||
print(f" {os.path.basename(sys.argv[0])} /path/to/project /path/to/Makefile /absolute/path/to/output_vars.xml")
|
||||
sys.exit(1)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
PRINT_LEVEL = args.verbose
|
||||
|
||||
proj_path = os.path.normpath(args.proj_path)
|
||||
makefile_path = os.path.normpath(args.makefile_path)
|
||||
output_xml = os.path.normpath(args.output_xml)
|
||||
|
||||
# Проверка абсолютности путей
|
||||
for path, name in [(proj_path, "Project path"), (makefile_path, "Makefile path"), (output_xml, "Output XML path")]:
|
||||
if not os.path.isabs(path):
|
||||
print(f"Error: {name} '{path}' is not an absolute path.")
|
||||
sys.exit(1)
|
||||
|
||||
if not os.path.isdir(proj_path):
|
||||
print(f"Error: Project path '{proj_path}' is not a directory or does not exist.")
|
||||
sys.exit(1)
|
||||
if not os.path.isfile(makefile_path):
|
||||
print(f"Error: Makefile path '{makefile_path}' does not exist.")
|
||||
sys.exit(1)
|
||||
|
||||
c_files, h_files, include_dirs = parse_makefile(makefile_path)
|
||||
|
||||
vars, includes, externs = analyze_variables_across_files(c_files, h_files, include_dirs)
|
||||
typedefs, structs = analyze_typedefs_and_structs_across_files(c_files, include_dirs)
|
||||
|
||||
vars = dict(sorted(vars.items()))
|
||||
includes = get_sorted_headers(c_files, includes, include_dirs)
|
||||
externs = dict(sorted(externs.items()))
|
||||
typedefs = dict(sorted(typedefs.items()))
|
||||
structs = dict(sorted(structs.items()))
|
||||
# Определяем путь к файлу с структурами рядом с output_xml
|
||||
structs_xml = os.path.join(os.path.dirname(output_xml), "structs.xml")
|
||||
|
||||
# Записываем структуры в structs_xml
|
||||
write_typedefs_and_structs_to_xml(proj_path, structs_xml, typedefs, structs)
|
||||
|
||||
# Передаем путь к structs.xml относительно proj_path в vars.xml
|
||||
# Модифицируем generate_xml_output так, чтобы принимать и путь к structs.xml (относительный)
|
||||
generate_xml_output(proj_path, output_xml, vars, includes, externs, structs_xml, makefile_path)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
|
||||
def run_scan(proj_path, makefile_path, output_xml, verbose=2):
|
||||
global PRINT_LEVEL
|
||||
PRINT_LEVEL = verbose
|
||||
|
||||
proj_path = os.path.normpath(proj_path)
|
||||
makefile_path = os.path.normpath(makefile_path)
|
||||
output_xml = os.path.normpath(output_xml)
|
||||
|
||||
# Проверка абсолютности путей
|
||||
for path, name in [(proj_path, "Project path"), (makefile_path, "Makefile path"), (output_xml, "Output XML path")]:
|
||||
if not os.path.isabs(path):
|
||||
raise ValueError(f"{name} '{path}' is not an absolute path.")
|
||||
|
||||
if not os.path.isdir(proj_path):
|
||||
raise FileNotFoundError(f"Project path '{proj_path}' is not a directory or does not exist.")
|
||||
if not os.path.isfile(makefile_path):
|
||||
raise FileNotFoundError(f"Makefile path '{makefile_path}' does not exist.")
|
||||
|
||||
c_files, h_files, include_dirs = parse_makefile(makefile_path)
|
||||
|
||||
vars, includes, externs = analyze_variables_across_files(c_files, h_files, include_dirs)
|
||||
typedefs, structs = analyze_typedefs_and_structs_across_files(c_files, include_dirs)
|
||||
|
||||
vars = dict(sorted(vars.items()))
|
||||
includes = get_sorted_headers(c_files, includes, include_dirs)
|
||||
externs = dict(sorted(externs.items()))
|
||||
typedefs = dict(sorted(typedefs.items()))
|
||||
structs = dict(sorted(structs.items()))
|
||||
|
||||
print("[XML] Creating structs.xml...")
|
||||
structs_xml = os.path.join(os.path.dirname(output_xml), "structs.xml")
|
||||
write_typedefs_and_structs_to_xml(proj_path, structs_xml, typedefs, structs)
|
||||
|
||||
print("[XML] Creating vars.xml...")
|
||||
generate_xml_output(proj_path, output_xml, vars, includes, externs, structs_xml, makefile_path)
|
||||
250
Src/setupVars.py
Normal file
250
Src/setupVars.py
Normal file
@@ -0,0 +1,250 @@
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
import xml.etree.ElementTree as ET
|
||||
from generateVars import map_type_to_pt, get_iq_define, type_map
|
||||
from enum import IntEnum
|
||||
from scanVars import *
|
||||
from generateVars import *
|
||||
|
||||
|
||||
def make_absolute_path(path, base_path):
|
||||
if not os.path.isabs(path) and os.path.isdir(base_path):
|
||||
try:
|
||||
return os.path.abspath(os.path.join(base_path, path))
|
||||
except Exception:
|
||||
pass # На случай сбоя в os.path.join или abspath
|
||||
elif os.path.isabs(path):
|
||||
return os.path.abspath(path)
|
||||
else:
|
||||
return path
|
||||
|
||||
|
||||
def make_relative_path(abs_path, base_path):
|
||||
abs_path = os.path.abspath(abs_path)
|
||||
base_path = os.path.abspath(base_path)
|
||||
|
||||
# Разбиваем на списки директорий
|
||||
abs_parts = abs_path.split(os.sep)
|
||||
base_parts = base_path.split(os.sep)
|
||||
|
||||
# Проверяем, является ли base_path настоящим префиксом пути (по папкам)
|
||||
if abs_parts[:len(base_parts)] == base_parts:
|
||||
rel_parts = abs_parts[len(base_parts):]
|
||||
return "/".join(rel_parts)
|
||||
|
||||
# Иначе пробуем relpath
|
||||
try:
|
||||
return os.path.relpath(abs_path, base_path).replace("\\", "/")
|
||||
except Exception:
|
||||
return abs_path.replace("\\", "/")
|
||||
|
||||
|
||||
def parse_vars(filename, typedef_map=None):
|
||||
root, tree = safe_parse_xml(filename)
|
||||
if root is None:
|
||||
return []
|
||||
|
||||
if typedef_map is None:
|
||||
typedef_map = {}
|
||||
|
||||
vars_list = []
|
||||
variables_elem = root.find('variables')
|
||||
if variables_elem is not None:
|
||||
for var in variables_elem.findall('var'):
|
||||
name = var.attrib.get('name', '')
|
||||
var_type = var.findtext('type', 'unknown').strip()
|
||||
|
||||
# Вычисляем pt_type и iq_type
|
||||
pt_type = var.findtext('pt_type')
|
||||
if not pt_type:
|
||||
pt_type = map_type_to_pt(var_type, name, typedef_map)
|
||||
|
||||
iq_type = var.findtext('iq_type')
|
||||
if not iq_type:
|
||||
iq_type = get_iq_define(var_type)
|
||||
|
||||
vars_list.append({
|
||||
'name': name,
|
||||
'show_var': var.findtext('show_var', 'false'),
|
||||
'enable': var.findtext('enable', 'false'),
|
||||
'shortname': var.findtext('shortname', name),
|
||||
'pt_type': pt_type,
|
||||
'iq_type': iq_type,
|
||||
'return_type': var.findtext('return_type', 'int'),
|
||||
'type': var_type,
|
||||
'file': var.findtext('file', ''),
|
||||
'extern': var.findtext('extern', 'false') == 'true',
|
||||
'static': var.findtext('static', 'false') == 'true',
|
||||
})
|
||||
|
||||
return vars_list
|
||||
|
||||
|
||||
# 2. Парсим structSup.xml
|
||||
def parse_structs(filename):
|
||||
root, tree = safe_parse_xml(filename)
|
||||
if root is None:
|
||||
return {}, {}
|
||||
|
||||
structs = {}
|
||||
typedef_map = {}
|
||||
|
||||
def parse_struct_element(elem):
|
||||
fields = {}
|
||||
|
||||
for field in elem.findall("field"):
|
||||
fname = field.attrib.get("name")
|
||||
ftype = field.attrib.get("type", "")
|
||||
|
||||
# Проверка на вложенную структуру
|
||||
nested_struct_elem = field.find("struct")
|
||||
|
||||
if nested_struct_elem is not None:
|
||||
# Рекурсивно парсим вложенную структуру и вставляем её как подсловарь
|
||||
nested_fields = parse_struct_element(nested_struct_elem)
|
||||
|
||||
# Оборачиваем в dict с ключом 'type' для хранения типа из XML
|
||||
fields[fname] = {
|
||||
'type': ftype, # здесь тип, например "BENDER_ERROR"
|
||||
**nested_fields # развёрнутые поля вложенной структуры
|
||||
}
|
||||
else:
|
||||
# Обычное поле
|
||||
fields[fname] = ftype
|
||||
|
||||
return fields
|
||||
|
||||
structs_elem = root.find("structs")
|
||||
if structs_elem is not None:
|
||||
for struct in structs_elem.findall("struct"):
|
||||
name = struct.attrib.get("name")
|
||||
if name and name not in structs:
|
||||
fields = parse_struct_element(struct)
|
||||
structs[name] = fields
|
||||
|
||||
# typedefs без изменений
|
||||
typedefs_elem = root.find("typedefs")
|
||||
if typedefs_elem is not None:
|
||||
for typedef in typedefs_elem.findall("typedef"):
|
||||
name = typedef.attrib.get('name')
|
||||
target_type = typedef.attrib.get('type')
|
||||
if name and target_type:
|
||||
typedef_map[name.strip()] = target_type.strip()
|
||||
|
||||
return structs, typedef_map
|
||||
|
||||
|
||||
def safe_parse_xml(xml_path):
|
||||
"""
|
||||
Безопасно парсит XML-файл.
|
||||
|
||||
Возвращает кортеж (root, tree) или (None, None) при ошибках.
|
||||
"""
|
||||
if not xml_path or not os.path.isfile(xml_path):
|
||||
#print(f"Файл '{xml_path}' не найден или путь пустой")
|
||||
return None, None
|
||||
|
||||
try:
|
||||
if os.path.getsize(xml_path) == 0:
|
||||
return None, None
|
||||
|
||||
tree = ET.parse(xml_path)
|
||||
root = tree.getroot()
|
||||
return root, tree
|
||||
|
||||
except ET.ParseError as e:
|
||||
print(f"Ошибка парсинга XML файла '{xml_path}': {e}")
|
||||
return None, None
|
||||
except Exception as e:
|
||||
print(f"Неожиданная ошибка при чтении XML файла '{xml_path}': {e}")
|
||||
return None, None
|
||||
def expand_struct_recursively(prefix, type_str, structs, typedefs, var_attrs, depth=0):
|
||||
if depth > 10:
|
||||
return []
|
||||
|
||||
# Если type_str — словарь структуры
|
||||
if isinstance(type_str, dict):
|
||||
fields = type_str
|
||||
else:
|
||||
base_type = strip_ptr_and_array(type_str)
|
||||
fields = structs.get(base_type)
|
||||
if not isinstance(fields, dict):
|
||||
return []
|
||||
|
||||
children = []
|
||||
for field_name, field_value in fields.items():
|
||||
# Пропускаем поле 'type', оно служит для хранения имени типа
|
||||
if field_name == 'type':
|
||||
continue
|
||||
|
||||
full_name = f"{prefix}.{field_name}"
|
||||
|
||||
if isinstance(field_value, dict):
|
||||
# Если вложенная структура — берем её имя типа из поля 'type' или пустую строку
|
||||
type_name = field_value.get('type', '')
|
||||
child = {
|
||||
'name': full_name,
|
||||
'type': type_name,
|
||||
'pt_type': '',
|
||||
'file': var_attrs.get('file'),
|
||||
'extern': var_attrs.get('extern'),
|
||||
'static': var_attrs.get('static'),
|
||||
}
|
||||
# Рекурсивно раскрываем вложенные поля
|
||||
subchildren = expand_struct_recursively(full_name, field_value, structs, typedefs, var_attrs, depth + 1)
|
||||
if subchildren:
|
||||
child['children'] = subchildren
|
||||
else:
|
||||
# Простое поле — строка типа
|
||||
# Пропускаем указатели на функции
|
||||
if isinstance(field_value, str) and "(" in field_value and "*" in field_value and ")" in field_value:
|
||||
continue
|
||||
|
||||
child = {
|
||||
'name': full_name,
|
||||
'type': field_value,
|
||||
'pt_type': '',
|
||||
'file': var_attrs.get('file'),
|
||||
'extern': var_attrs.get('extern'),
|
||||
'static': var_attrs.get('static'),
|
||||
}
|
||||
|
||||
children.append(child)
|
||||
|
||||
return children
|
||||
|
||||
|
||||
def expand_vars(vars_list, structs, typedefs):
|
||||
"""
|
||||
Раскрывает структуры и массивы структур в деревья.
|
||||
"""
|
||||
expanded = []
|
||||
|
||||
for var in vars_list:
|
||||
pt_type = var.get('pt_type', '')
|
||||
raw_type = var.get('type', '')
|
||||
base_type = strip_ptr_and_array(raw_type)
|
||||
|
||||
fields = structs.get(base_type)
|
||||
|
||||
if pt_type.startswith('pt_arr_') and isinstance(fields, dict):
|
||||
new_var = var.copy()
|
||||
new_var['children'] = expand_struct_recursively(var['name'], raw_type, structs, typedefs, var)
|
||||
expanded.append(new_var)
|
||||
|
||||
elif pt_type == 'pt_struct' and isinstance(fields, dict):
|
||||
new_var = var.copy()
|
||||
new_var['children'] = expand_struct_recursively(var['name'], raw_type, structs, typedefs, var)
|
||||
expanded.append(new_var)
|
||||
|
||||
elif pt_type == 'pt_union' and isinstance(fields, dict):
|
||||
new_var = var.copy()
|
||||
new_var['children'] = expand_struct_recursively(var['name'], raw_type, structs, typedefs, var)
|
||||
expanded.append(new_var)
|
||||
|
||||
else:
|
||||
expanded.append(var)
|
||||
|
||||
return expanded
|
||||
|
||||
753
Src/setupVars_GUI.py
Normal file
753
Src/setupVars_GUI.py
Normal file
@@ -0,0 +1,753 @@
|
||||
# build command
|
||||
# pyinstaller --onefile --name DebugVarEdit --add-binary "build/libclang.dll;build" --distpath ./ --workpath ./build_temp --specpath ./build_temp setupVars_GUI.py
|
||||
|
||||
import sys
|
||||
import os
|
||||
import subprocess
|
||||
import xml.etree.ElementTree as ET
|
||||
from generateVars import type_map
|
||||
from enum import IntEnum
|
||||
import threading
|
||||
from scanVars import run_scan
|
||||
from generateVars import run_generate
|
||||
from setupVars import *
|
||||
from VariableSelector import *
|
||||
|
||||
from PySide6.QtWidgets import (
|
||||
QApplication, QWidget, QTableWidget, QTableWidgetItem,
|
||||
QCheckBox, QComboBox, QLineEdit, QVBoxLayout, QHBoxLayout, QPushButton,
|
||||
QCompleter, QAbstractItemView, QLabel, QMessageBox, QFileDialog, QTextEdit,
|
||||
QDialog, QTreeWidget, QTreeWidgetItem, QSizePolicy
|
||||
)
|
||||
from PySide6.QtGui import QTextCursor, QKeyEvent
|
||||
from PySide6.QtCore import Qt, QProcess, QObject, Signal, QTimer
|
||||
|
||||
|
||||
class rows(IntEnum):
|
||||
No = 0
|
||||
include = 1
|
||||
name = 2
|
||||
type = 3
|
||||
pt_type = 4
|
||||
iq_type = 5
|
||||
ret_type = 6
|
||||
short_name = 7
|
||||
|
||||
|
||||
class EmittingStream(QObject):
|
||||
text_written = Signal(str)
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self._buffer = ""
|
||||
|
||||
def write(self, text):
|
||||
self._buffer += text
|
||||
while '\n' in self._buffer:
|
||||
line, self._buffer = self._buffer.split('\n', 1)
|
||||
# Отправляем строку без '\n'
|
||||
self.text_written.emit(line)
|
||||
|
||||
def flush(self):
|
||||
if self._buffer:
|
||||
self.text_written.emit(self._buffer)
|
||||
self._buffer = ""
|
||||
|
||||
|
||||
class ProcessOutputWindowDummy(QWidget):
|
||||
def __init__(self, on_done_callback):
|
||||
super().__init__()
|
||||
self.setWindowTitle("Поиск переменных...")
|
||||
self.resize(600, 400)
|
||||
|
||||
self.layout = QVBoxLayout(self)
|
||||
self.output_edit = QTextEdit()
|
||||
self.output_edit.setReadOnly(True)
|
||||
self.layout.addWidget(self.output_edit)
|
||||
|
||||
self.btn_close = QPushButton("Закрыть")
|
||||
self.btn_close.setEnabled(False)
|
||||
self.layout.addWidget(self.btn_close)
|
||||
|
||||
self.btn_close.clicked.connect(self.__handle_done)
|
||||
self._on_done_callback = on_done_callback
|
||||
|
||||
def __handle_done(self):
|
||||
if self._on_done_callback:
|
||||
self._on_done_callback()
|
||||
self.close()
|
||||
|
||||
def append_text(self, text):
|
||||
cursor = self.output_edit.textCursor()
|
||||
cursor.movePosition(QTextCursor.End)
|
||||
for line in text.splitlines():
|
||||
self.output_edit.append(line)
|
||||
self.output_edit.setTextCursor(cursor)
|
||||
self.output_edit.ensureCursorVisible()
|
||||
|
||||
|
||||
|
||||
# 3. UI: таблица с переменными
|
||||
class VarEditor(QWidget):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.vars_list = []
|
||||
self.structs = {}
|
||||
self.typedef_map = {}
|
||||
|
||||
self.proj_path = None
|
||||
self.xml_path = None
|
||||
self.makefile_path = None
|
||||
self.structs_path = None
|
||||
self.output_path = None
|
||||
self._updating = False # Флаг блокировки рекурсии
|
||||
self._resizing = False # флаг блокировки повторного вызова
|
||||
self.initUI()
|
||||
|
||||
def initUI(self):
|
||||
self.setWindowTitle("Variable Editor")
|
||||
|
||||
# --- Поля ввода пути проекта и XML ---
|
||||
|
||||
# XML Output
|
||||
xml_layout = QHBoxLayout()
|
||||
xml_layout.addWidget(QLabel("XML Output:"))
|
||||
self.xml_output_edit = QLineEdit()
|
||||
self.xml_output_edit.returnPressed.connect(self.update)
|
||||
self.xml_output_edit.textChanged.connect(self.__on_xml_path_changed)
|
||||
xml_layout.addWidget(self.xml_output_edit)
|
||||
btn_xml_browse = QPushButton("...")
|
||||
btn_xml_browse.setFixedWidth(30)
|
||||
xml_layout.addWidget(btn_xml_browse)
|
||||
btn_xml_browse.clicked.connect(self.__browse_xml_output)
|
||||
|
||||
# Project Path
|
||||
proj_layout = QHBoxLayout()
|
||||
proj_layout.addWidget(QLabel("Project Path:"))
|
||||
self.proj_path_edit = QLineEdit()
|
||||
self.proj_path_edit.returnPressed.connect(self.update)
|
||||
self.proj_path_edit.textChanged.connect(self.__on_proj_path_changed)
|
||||
proj_layout.addWidget(self.proj_path_edit)
|
||||
btn_proj_browse = QPushButton("...")
|
||||
btn_proj_browse.setFixedWidth(30)
|
||||
proj_layout.addWidget(btn_proj_browse)
|
||||
btn_proj_browse.clicked.connect(self.__browse_proj_path)
|
||||
|
||||
# Makefile Path
|
||||
makefile_layout = QHBoxLayout()
|
||||
makefile_layout.addWidget(QLabel("Makefile Path (relative path):"))
|
||||
self.makefile_edit = QLineEdit()
|
||||
self.makefile_edit.returnPressed.connect(self.update)
|
||||
self.makefile_edit.textChanged.connect(self.__on_makefile_path_changed)
|
||||
makefile_layout.addWidget(self.makefile_edit)
|
||||
btn_makefile_browse = QPushButton("...")
|
||||
btn_makefile_browse.setFixedWidth(30)
|
||||
makefile_layout.addWidget(btn_makefile_browse)
|
||||
btn_makefile_browse.clicked.connect(self.__browse_makefile)
|
||||
|
||||
|
||||
|
||||
# Source Output File/Directory
|
||||
source_output_layout = QHBoxLayout()
|
||||
source_output_layout.addWidget(QLabel("Source Output File:"))
|
||||
self.source_output_edit = QLineEdit()
|
||||
source_output_layout.addWidget(self.source_output_edit)
|
||||
btn_source_output_browse = QPushButton("...")
|
||||
btn_source_output_browse.setFixedWidth(30)
|
||||
source_output_layout.addWidget(btn_source_output_browse)
|
||||
btn_source_output_browse.clicked.connect(self.__browse_source_output)
|
||||
|
||||
|
||||
self.btn_update_vars = QPushButton("Обновить данные о переменных")
|
||||
self.btn_update_vars.clicked.connect(self.update_vars_data)
|
||||
|
||||
# Таблица переменных
|
||||
self.table = QTableWidget(len(self.vars_list), 8)
|
||||
self.table.setHorizontalHeaderLabels([
|
||||
'№', # новый столбец
|
||||
'En',
|
||||
'Name',
|
||||
'Origin Type',
|
||||
'Pointer Type',
|
||||
'IQ Type',
|
||||
'Return Type',
|
||||
'Short Name'
|
||||
])
|
||||
self.table.setEditTriggers(QAbstractItemView.AllEditTriggers)
|
||||
|
||||
# Кнопка сохранения
|
||||
btn_save = QPushButton("Build")
|
||||
btn_save.clicked.connect(self.save_build)
|
||||
|
||||
# Кнопка добавления переменных
|
||||
self.btn_add_vars = QPushButton("Add Variables")
|
||||
self.btn_add_vars.clicked.connect(self.__open_variable_selector)
|
||||
|
||||
|
||||
# Основной layout
|
||||
layout = QVBoxLayout()
|
||||
layout.addLayout(xml_layout)
|
||||
layout.addLayout(proj_layout)
|
||||
layout.addLayout(makefile_layout)
|
||||
layout.addWidget(self.btn_update_vars)
|
||||
layout.addWidget(self.table)
|
||||
layout.addWidget(self.btn_add_vars)
|
||||
layout.addLayout(source_output_layout)
|
||||
layout.addWidget(btn_save)
|
||||
|
||||
|
||||
header = self.table.horizontalHeader()
|
||||
# Для остальных колонок — растяжение (Stretch), чтобы они заняли всю оставшуюся ширину
|
||||
|
||||
for col in range(self.table.columnCount()):
|
||||
if col == self.table.columnCount() - 1:
|
||||
header.setSectionResizeMode(col, QHeaderView.Stretch)
|
||||
else:
|
||||
header.setSectionResizeMode(col, QHeaderView.Interactive)
|
||||
|
||||
parent_widget = self.table.parentWidget()
|
||||
if parent_widget:
|
||||
w = parent_widget.width()
|
||||
h = parent_widget.height()
|
||||
viewport_width = self.table.viewport().width()
|
||||
# Сделаем колонки с номерами фиксированной ширины
|
||||
self.table.setColumnWidth(rows.No, 30)
|
||||
self.table.setColumnWidth(rows.include, 30)
|
||||
self.table.setColumnWidth(rows.pt_type, 85)
|
||||
self.table.setColumnWidth(rows.iq_type, 85)
|
||||
self.table.setColumnWidth(rows.ret_type, 85)
|
||||
|
||||
self.table.setColumnWidth(rows.name, 300)
|
||||
self.table.setColumnWidth(rows.type, 100)
|
||||
|
||||
self.table.horizontalHeader().sectionResized.connect(self.on_section_resized)
|
||||
|
||||
self.setLayout(layout)
|
||||
|
||||
|
||||
def on_section_resized(self, logicalIndex, oldSize, newSize):
|
||||
if self._resizing:
|
||||
return # предотвращаем рекурсию
|
||||
|
||||
min_width = 50
|
||||
delta = newSize - oldSize
|
||||
right_index = logicalIndex + 1
|
||||
|
||||
if right_index >= self.table.columnCount():
|
||||
# Если правая колока - нет соседа, ограничиваем минимальную ширину
|
||||
if newSize < min_width:
|
||||
self._resizing = True
|
||||
self.table.setColumnWidth(logicalIndex, min_width)
|
||||
self._resizing = False
|
||||
return
|
||||
|
||||
self._resizing = True
|
||||
try:
|
||||
right_width = self.table.columnWidth(right_index)
|
||||
new_right_width = right_width - delta
|
||||
|
||||
# Если соседняя колонка станет уже минимальной - подкорректируем левую
|
||||
if new_right_width < min_width:
|
||||
new_right_width = min_width
|
||||
newSize = oldSize + (right_width - min_width)
|
||||
self.table.setColumnWidth(logicalIndex, newSize)
|
||||
|
||||
self.table.setColumnWidth(right_index, new_right_width)
|
||||
finally:
|
||||
self._resizing = False
|
||||
|
||||
|
||||
|
||||
def get_xml_path(self):
|
||||
xml_path = self.xml_output_edit.text().strip()
|
||||
return xml_path
|
||||
|
||||
def get_proj_path(self):
|
||||
proj_path = self.proj_path_edit.text().strip()
|
||||
return proj_path
|
||||
|
||||
def get_makefile_path(self):
|
||||
proj_path = self.get_proj_path()
|
||||
makefile_path = make_absolute_path(self.makefile_edit.text().strip(), proj_path)
|
||||
return makefile_path
|
||||
|
||||
def get_struct_path(self):
|
||||
proj_path = self.get_proj_path()
|
||||
xml_path = self.get_xml_path()
|
||||
root, tree = safe_parse_xml(xml_path)
|
||||
if root is None:
|
||||
return
|
||||
# --- structs_path из атрибута ---
|
||||
structs_path = root.attrib.get('structs_path', '').strip()
|
||||
structs_path_full = make_absolute_path(structs_path, proj_path)
|
||||
if structs_path_full and os.path.isfile(structs_path_full):
|
||||
structs_path = structs_path_full
|
||||
else:
|
||||
structs_path = None
|
||||
return structs_path
|
||||
|
||||
def get_output_path(self):
|
||||
output_path = os.path.abspath(self.source_output_edit.text().strip())
|
||||
return output_path
|
||||
|
||||
def update_all_paths(self):
|
||||
self.proj_path = self.get_proj_path()
|
||||
self.xml_path = self.get_xml_path()
|
||||
self.makefile_path = self.get_makefile_path()
|
||||
self.structs_path = self.get_struct_path()
|
||||
self.output_path = self.get_output_path()
|
||||
|
||||
|
||||
def update_vars_data(self):
|
||||
self.update_all_paths()
|
||||
|
||||
if not self.proj_path or not self.xml_path:
|
||||
QMessageBox.warning(self, "Ошибка", "Укажите пути проекта и XML.")
|
||||
return
|
||||
|
||||
if not os.path.isfile(self.makefile_path):
|
||||
QMessageBox.warning(self, "Ошибка", f"Makefile не найден:\n{self.makefile_path}")
|
||||
return
|
||||
|
||||
|
||||
# Создаём окно с кнопкой "Готово"
|
||||
self.proc_win = ProcessOutputWindowDummy(self.__after_scanvars_finished)
|
||||
self.proc_win.show()
|
||||
|
||||
self.emitting_stream = EmittingStream()
|
||||
self.emitting_stream.text_written.connect(self.proc_win.append_text)
|
||||
|
||||
def run_scan_wrapper():
|
||||
try:
|
||||
old_stdout = sys.stdout
|
||||
sys.stdout = self.emitting_stream
|
||||
|
||||
run_scan(self.proj_path, self.makefile_path, self.xml_path)
|
||||
|
||||
except Exception as e:
|
||||
self.emitting_stream.text_written.emit(f"\n[ОШИБКА] {e}")
|
||||
finally:
|
||||
sys.stdout = old_stdout
|
||||
self.emitting_stream.text_written.emit("\n--- Анализ завершён ---")
|
||||
self.proc_win.btn_close.setEnabled(True)
|
||||
|
||||
threading.Thread(target=run_scan_wrapper, daemon=True).start()
|
||||
|
||||
|
||||
def save_build(self):
|
||||
vars_out = []
|
||||
for row in range(self.table.rowCount()):
|
||||
include_cb = self.table.cellWidget(row, rows.include)
|
||||
if not include_cb.isChecked():
|
||||
continue
|
||||
name_edit = self.table.cellWidget(row, rows.name)
|
||||
pt_type_combo = self.table.cellWidget(row, rows.pt_type)
|
||||
iq_combo = self.table.cellWidget(row, rows.iq_type)
|
||||
ret_combo = self.table.cellWidget(row, rows.ret_type)
|
||||
short_name_edit = self.table.cellWidget(row, rows.short_name)
|
||||
|
||||
var_data = {
|
||||
'name': name_edit.text(),
|
||||
'type': 'pt_' + pt_type_combo.currentText(),
|
||||
'iq_type': iq_combo.currentText(),
|
||||
'return_type': ret_combo.currentText() if ret_combo.currentText() else 'int',
|
||||
'short_name': short_name_edit.text(),
|
||||
}
|
||||
vars_out.append(var_data)
|
||||
|
||||
self.update_all_paths()
|
||||
|
||||
if not self.proj_path or not self.xml_path or not self.output_path:
|
||||
QMessageBox.warning(self, "Ошибка", "Заполните все пути: проект, XML и output.")
|
||||
return
|
||||
|
||||
try:
|
||||
run_generate(self.proj_path, self.xml_path, self.output_path)
|
||||
QMessageBox.information(self, "Готово", "Файл debug_vars.c успешно сгенерирован.")
|
||||
self.update()
|
||||
except Exception as e:
|
||||
QMessageBox.critical(self, "Ошибка при генерации", str(e))
|
||||
|
||||
|
||||
def update(self):
|
||||
if self._updating:
|
||||
return # Уже в процессе обновления — выходим, чтобы избежать рекурсии
|
||||
self._updating = True
|
||||
|
||||
self.update_all_paths()
|
||||
try:
|
||||
if self.xml_path and not os.path.isfile(self.xml_path):
|
||||
return
|
||||
|
||||
try:
|
||||
root, tree = safe_parse_xml(self.xml_path)
|
||||
if root is None:
|
||||
return
|
||||
|
||||
if not self.proj_path:
|
||||
# Если в поле ничего нет, пробуем взять из XML
|
||||
proj_path_from_xml = root.attrib.get('proj_path', '').strip()
|
||||
if proj_path_from_xml and os.path.isdir(proj_path_from_xml):
|
||||
self.proj_path = proj_path_from_xml
|
||||
self.proj_path_edit.setText(proj_path_from_xml)
|
||||
else:
|
||||
QMessageBox.warning(
|
||||
self,
|
||||
"Внимание",
|
||||
"Путь к проекту (proj_path) не найден или не существует.\n"
|
||||
"Пожалуйста, укажите его вручную в поле 'Project Path'."
|
||||
)
|
||||
else:
|
||||
if not os.path.isdir(self.proj_path):
|
||||
QMessageBox.warning(
|
||||
self,
|
||||
"Внимание",
|
||||
f"Указанный путь к проекту не существует:\n{self.proj_path}\n"
|
||||
"Пожалуйста, исправьте путь в поле 'Project Path'."
|
||||
)
|
||||
|
||||
|
||||
if not self.makefile_path:
|
||||
# --- makefile_path из атрибута ---
|
||||
makefile_path = root.attrib.get('makefile_path', '').strip()
|
||||
makefile_path_full = make_absolute_path(makefile_path, self.proj_path)
|
||||
if makefile_path_full and os.path.isfile(makefile_path_full):
|
||||
self.makefile_edit.setText(makefile_path)
|
||||
else:
|
||||
self.makefile_path = None
|
||||
|
||||
if not self.structs_path:
|
||||
# --- structs_path из атрибута ---
|
||||
structs_path = root.attrib.get('structs_path', '').strip()
|
||||
structs_path_full = make_absolute_path(structs_path, self.proj_path)
|
||||
if structs_path_full and os.path.isfile(structs_path_full):
|
||||
self.structs_path = structs_path_full
|
||||
self.structs, self.typedef_map = parse_structs(structs_path_full)
|
||||
else:
|
||||
self.structs_path = None
|
||||
|
||||
self.vars_list = parse_vars(self.xml_path, self.typedef_map)
|
||||
self.update_table()
|
||||
except Exception as e:
|
||||
QMessageBox.warning(self, "Ошибка", f"Ошибка при чтении XML:\n{e}")
|
||||
|
||||
|
||||
finally:
|
||||
self._updating = False # Снимаем блокировку при выходе из функции
|
||||
|
||||
def __browse_proj_path(self):
|
||||
dir_path = QFileDialog.getExistingDirectory(self, "Выберите папку проекта")
|
||||
if dir_path:
|
||||
self.proj_path_edit.setText(dir_path)
|
||||
self.proj_path = dir_path
|
||||
|
||||
if self.makefile_path and self.proj_path:
|
||||
path = make_relative_path(self.makefile_path, self.proj_path)
|
||||
self.makefile_edit.setText(path)
|
||||
self.makefile_path = path
|
||||
|
||||
def __browse_xml_output(self):
|
||||
file_path, _ = QFileDialog.getSaveFileName(
|
||||
self,
|
||||
"Выберите XML файл",
|
||||
filter="XML files (*.xml);;All Files (*)"
|
||||
)
|
||||
self.xml_output_edit.setText(file_path)
|
||||
self.xml_path = file_path
|
||||
|
||||
def keyPressEvent(self, event: QKeyEvent):
|
||||
if event.key() == Qt.Key_Delete:
|
||||
self.delete_selected_rows()
|
||||
else:
|
||||
super().keyPressEvent(event)
|
||||
|
||||
def __browse_makefile(self):
|
||||
file_path, _ = QFileDialog.getOpenFileName(
|
||||
self, "Выберите Makefile", filter="Makefile (makefile);;All Files (*)"
|
||||
)
|
||||
if file_path and self.proj_path:
|
||||
path = make_relative_path(file_path, self.proj_path)
|
||||
else:
|
||||
path = file_path
|
||||
self.makefile_edit.setText(path)
|
||||
self.makefile_path = path
|
||||
|
||||
def __browse_source_output(self):
|
||||
dir_path = QFileDialog.getExistingDirectory(self, "Выберите папку для debug_vars.c")
|
||||
if dir_path:
|
||||
self.source_output_edit.setText(dir_path)
|
||||
self.output_path = dir_path
|
||||
else:
|
||||
self.output_path = ''
|
||||
|
||||
|
||||
def __on_xml_path_changed(self):
|
||||
self.xml_path = self.get_xml_path()
|
||||
self.update()
|
||||
|
||||
def __on_proj_path_changed(self):
|
||||
self.proj_path = self.get_proj_path()
|
||||
self.update()
|
||||
|
||||
def __on_makefile_path_changed(self):
|
||||
self.makefile_path = self.get_makefile_path()
|
||||
if self.makefile_path and self.proj_path:
|
||||
path = make_relative_path(self.makefile_path, self.proj_path)
|
||||
self.makefile_edit.setText(path)
|
||||
self.update()
|
||||
|
||||
|
||||
def __after_scanvars_finished(self):
|
||||
self.update_all_paths()
|
||||
if not os.path.isfile(self.xml_path):
|
||||
QMessageBox.critical(self, "Ошибка", f"Файл не найден: {self.xml_path}")
|
||||
return
|
||||
|
||||
try:
|
||||
self.makefile_path = None
|
||||
self.structs_path = None
|
||||
self.proj_path = None
|
||||
self.update()
|
||||
|
||||
|
||||
except Exception as e:
|
||||
QMessageBox.critical(self, "Ошибка", f"Не удалось загрузить переменные:\n{e}")
|
||||
|
||||
def delete_selected_rows(self):
|
||||
selected_rows = sorted(set(index.row() for index in self.table.selectedIndexes()), reverse=True)
|
||||
if not selected_rows:
|
||||
return
|
||||
|
||||
# Удаляем из vars_list те, у кого show_var == true и имя совпадает
|
||||
filtered_vars = [v for v in self.vars_list if v.get('show_var', 'false') == 'true']
|
||||
for row in selected_rows:
|
||||
if 0 <= row < len(filtered_vars):
|
||||
var_to_remove = filtered_vars[row]
|
||||
for v in self.vars_list:
|
||||
if v['name'] == var_to_remove['name']:
|
||||
v['show_var'] = 'false'
|
||||
break
|
||||
|
||||
self.update_table()
|
||||
|
||||
|
||||
def __open_variable_selector(self):
|
||||
if not self.vars_list:
|
||||
QMessageBox.warning(self, "Нет переменных", "Сначала загрузите или обновите переменные.")
|
||||
return
|
||||
|
||||
dlg = VariableSelectorDialog(self.vars_list, self.structs, self.typedef_map, self.xml_path, self)
|
||||
if dlg.exec():
|
||||
self.write_to_xml()
|
||||
self.update()
|
||||
|
||||
|
||||
|
||||
def update_table(self):
|
||||
self.type_options = list(dict.fromkeys(type_map.values()))
|
||||
self.display_type_options = [t.replace('pt_', '') for t in self.type_options]
|
||||
iq_types = ['iq_none', 'iq'] + [f'iq{i}' for i in range(1, 31)]
|
||||
filtered_vars = [v for v in self.vars_list if v.get('show_var', 'false') == 'true']
|
||||
self.table.setRowCount(len(filtered_vars))
|
||||
self.table.verticalHeader().setVisible(False)
|
||||
|
||||
for row, var in enumerate(filtered_vars):
|
||||
# Добавляем номер строки в колонку No (0)
|
||||
no_item = QTableWidgetItem(str(row))
|
||||
no_item.setFlags(Qt.ItemIsSelectable | Qt.ItemIsEnabled) # readonly
|
||||
self.table.setItem(row, rows.No, no_item)
|
||||
|
||||
cb = QCheckBox()
|
||||
enable_str = var.get('enable', 'false')
|
||||
cb.setChecked(enable_str.lower() == 'true')
|
||||
self.table.setCellWidget(row, rows.include, cb)
|
||||
|
||||
name_edit = QLineEdit(var['name'])
|
||||
if var['type'] in self.structs:
|
||||
completer = QCompleter(self.structs[var['type']].keys())
|
||||
completer.setCaseSensitivity(Qt.CaseInsensitive)
|
||||
name_edit.setCompleter(completer)
|
||||
self.table.setCellWidget(row, rows.name, name_edit)
|
||||
|
||||
# Type (origin)
|
||||
origin_type = var.get('type', '').strip()
|
||||
origin_item = QTableWidgetItem(origin_type)
|
||||
origin_item.setFlags(Qt.ItemIsSelectable | Qt.ItemIsEnabled) # read-only
|
||||
self.table.setItem(row, rows.type, origin_item)
|
||||
|
||||
pt_type_combo = QComboBox()
|
||||
pt_type_combo.addItems(self.display_type_options)
|
||||
internal_type = var['pt_type'].replace('pt_', '')
|
||||
if internal_type in self.display_type_options:
|
||||
pt_type_combo.setCurrentText(internal_type)
|
||||
else:
|
||||
pt_type_combo.addItem(internal_type)
|
||||
pt_type_combo.setCurrentText(internal_type)
|
||||
self.table.setCellWidget(row, rows.pt_type, pt_type_combo)
|
||||
|
||||
iq_combo = QComboBox()
|
||||
iq_combo.addItems(iq_types)
|
||||
iq_type = var['iq_type'].replace('t_', '')
|
||||
if iq_type in iq_types:
|
||||
iq_combo.setCurrentText(iq_type)
|
||||
else:
|
||||
iq_combo.addItem(iq_type)
|
||||
iq_combo.setCurrentText(iq_type)
|
||||
self.table.setCellWidget(row, rows.iq_type, iq_combo)
|
||||
|
||||
ret_combo = QComboBox()
|
||||
ret_combo.addItems(iq_types)
|
||||
self.table.setCellWidget(row, rows.ret_type, ret_combo)
|
||||
|
||||
short_name_edit = QLineEdit(var['name'])
|
||||
self.table.setCellWidget(row, rows.short_name, short_name_edit)
|
||||
|
||||
cb.stateChanged.connect(self.write_to_xml)
|
||||
name_edit.textChanged.connect(self.write_to_xml)
|
||||
pt_type_combo.currentTextChanged.connect(self.write_to_xml)
|
||||
iq_combo.currentTextChanged.connect(self.write_to_xml)
|
||||
ret_combo.currentTextChanged.connect(self.write_to_xml)
|
||||
short_name_edit.textChanged.connect(self.write_to_xml)
|
||||
|
||||
|
||||
self.write_to_xml()
|
||||
|
||||
|
||||
def read_table(self):
|
||||
vars_data = []
|
||||
for row in range(self.table.rowCount()):
|
||||
cb = self.table.cellWidget(row, rows.include)
|
||||
name_edit = self.table.cellWidget(row, rows.name)
|
||||
pt_type_combo = self.table.cellWidget(row, rows.pt_type)
|
||||
iq_combo = self.table.cellWidget(row, rows.iq_type)
|
||||
ret_combo = self.table.cellWidget(row, rows.ret_type)
|
||||
short_name_edit = self.table.cellWidget(row, rows.short_name)
|
||||
origin_item = self.table.item(row, rows.type)
|
||||
|
||||
vars_data.append({
|
||||
'show_var': True,
|
||||
'enable': cb.isChecked() if cb else False,
|
||||
'name': name_edit.text() if name_edit else '',
|
||||
'pt_type': 'pt_' + pt_type_combo.currentText() if pt_type_combo else '',
|
||||
'iq_type': iq_combo.currentText() if iq_combo else '',
|
||||
'return_type': ret_combo.currentText() if ret_combo else '',
|
||||
'shortname': short_name_edit.text() if short_name_edit else '',
|
||||
'type': origin_item.text() if origin_item else '',
|
||||
})
|
||||
return vars_data
|
||||
|
||||
|
||||
|
||||
def write_to_xml(self):
|
||||
self.update_all_paths()
|
||||
|
||||
if not self.xml_path or not os.path.isfile(self.xml_path):
|
||||
print("XML файл не найден или путь пустой")
|
||||
return
|
||||
if not self.proj_path or not os.path.isdir(self.proj_path):
|
||||
print("Project path не найден или путь пустой")
|
||||
return
|
||||
if not self.makefile_path or not os.path.isfile(self.makefile_path):
|
||||
print("makefile файл не найден или путь пустой")
|
||||
return
|
||||
|
||||
try:
|
||||
root, tree = safe_parse_xml(self.xml_path)
|
||||
if root is None:
|
||||
return
|
||||
|
||||
|
||||
root.set("proj_path", self.proj_path.replace("\\", "/"))
|
||||
|
||||
if self.makefile_path and os.path.isfile(self.makefile_path):
|
||||
rel_makefile = make_relative_path(self.makefile_path, self.proj_path)
|
||||
root.set("makefile_path", rel_makefile)
|
||||
|
||||
if self.structs_path and os.path.isfile(self.structs_path):
|
||||
rel_struct = make_relative_path(self.structs_path, self.proj_path)
|
||||
root.set("structs_path", rel_struct)
|
||||
|
||||
vars_elem = root.find('variables')
|
||||
if vars_elem is None:
|
||||
vars_elem = ET.SubElement(root, 'variables')
|
||||
|
||||
original_info = {}
|
||||
for var_elem in vars_elem.findall('var'):
|
||||
name = var_elem.attrib.get('name')
|
||||
if name:
|
||||
original_info[name] = {
|
||||
'file': var_elem.findtext('file', ''),
|
||||
'extern': var_elem.findtext('extern', ''),
|
||||
'static': var_elem.findtext('static', '')
|
||||
}
|
||||
|
||||
# Читаем переменные из таблицы (активные/изменённые)
|
||||
table_vars = {v['name']: v for v in self.read_table()}
|
||||
# Все переменные (в том числе новые, которых нет в таблице)
|
||||
all_vars_by_name = {v['name']: v for v in self.vars_list}
|
||||
|
||||
# Объединённый список переменных для записи
|
||||
all_names = list(all_vars_by_name.keys())
|
||||
for name in all_names:
|
||||
v = all_vars_by_name[name]
|
||||
v_table = table_vars.get(name)
|
||||
var_elem = None
|
||||
|
||||
# Ищем уже существующий <var> в XML
|
||||
for ve in vars_elem.findall('var'):
|
||||
if ve.attrib.get('name') == name:
|
||||
var_elem = ve
|
||||
break
|
||||
if var_elem is None:
|
||||
var_elem = ET.SubElement(vars_elem, 'var', {'name': name})
|
||||
|
||||
def set_sub_elem_text(parent, tag, text):
|
||||
el = parent.find(tag)
|
||||
if el is None:
|
||||
el = ET.SubElement(parent, tag)
|
||||
el.text = str(text)
|
||||
|
||||
set_sub_elem_text(var_elem, 'show_var', v.get('show_var', 'false'))
|
||||
set_sub_elem_text(var_elem, 'enable', v.get('enable', 'false'))
|
||||
|
||||
# Тут подтягиваем из таблицы, если есть, иначе из v
|
||||
shortname_val = v_table['shortname'] if v_table and 'shortname' in v_table else v.get('shortname', '')
|
||||
pt_type_val = v_table['pt_type'] if v_table and 'pt_type' in v_table else v.get('pt_type', '')
|
||||
iq_type_val = v_table['iq_type'] if v_table and 'iq_type' in v_table else v.get('iq_type', '')
|
||||
ret_type_val = v_table['return_type'] if v_table and 'return_type' in v_table else v.get('return_type', '')
|
||||
|
||||
set_sub_elem_text(var_elem, 'shortname', shortname_val)
|
||||
set_sub_elem_text(var_elem, 'pt_type', pt_type_val)
|
||||
set_sub_elem_text(var_elem, 'iq_type', iq_type_val)
|
||||
set_sub_elem_text(var_elem, 'return_type', ret_type_val)
|
||||
set_sub_elem_text(var_elem, 'type', v.get('type', ''))
|
||||
|
||||
# file/extern/static: из original_info, либо из v
|
||||
file_val = v.get('file') or original_info.get(name, {}).get('file', '')
|
||||
extern_val = v.get('extern') or original_info.get(name, {}).get('extern', '')
|
||||
static_val = v.get('static') or original_info.get(name, {}).get('static', '')
|
||||
|
||||
set_sub_elem_text(var_elem, 'file', file_val)
|
||||
set_sub_elem_text(var_elem, 'extern', extern_val)
|
||||
set_sub_elem_text(var_elem, 'static', static_val)
|
||||
|
||||
|
||||
ET.indent(tree, space=" ", level=0)
|
||||
tree.write(self.xml_path, encoding='utf-8', xml_declaration=True)
|
||||
|
||||
except Exception as e:
|
||||
print(f"Ошибка при сохранении XML: {e}")
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
app = QApplication(sys.argv)
|
||||
|
||||
editor = VarEditor()
|
||||
editor.resize(900, 600)
|
||||
editor.show()
|
||||
|
||||
sys.exit(app.exec())
|
||||
|
||||
Reference in New Issue
Block a user