@@ -52,64 +52,54 @@ def analyze_mode(args):
5252 """Modo análisis: analiza archivos y genera reporte HTML."""
5353
5454 # Buscar archivos en todos los directorios especificados
55+ logger .debug (f"[ANALYZER] args.source_dirs={ args .source_dirs } , args.extensions={ args .extensions } " )
5556 all_files = []
5657 for source_dir in args .source_dirs :
58+ logger .debug (f"[ANALYZER] Buscando archivos en: { source_dir } " )
5759 files = find_source_files (source_dir , extensions = args .extensions )
60+ logger .debug (f"[ANALYZER] Encontrados { len (files )} archivos en { source_dir } " )
5861 all_files .extend (files )
59-
6062 if not all_files :
6163 logger .error (f"[ERROR] No se encontraron archivos con extensiones { args .extensions } " )
6264 return 1
63-
6465 checkpatch_script = args .checkpatch
6566 kernel_root = args .kernel_root
66-
67- # Resetear estructuras globales
6867 reset_analysis ()
69-
70- # Estructura para JSON compatible con autofix
7168 json_data = []
72-
73- # Barra de progreso
7469 total = len (all_files )
7570 completed = 0
7671 lock = threading .Lock ()
77-
7872 def progress_bar (current , total ):
7973 percent = current / total * 100
8074 bar_len = 40
8175 filled = int (bar_len * current / total )
8276 bar = '#' * filled + ' ' * (bar_len - filled )
8377 return f"[{ bar } ] { percent :.1f} % ({ current } /{ total } )"
84-
8578 logger .info (f"[ANALYZER] Analizando { total } archivos con { args .workers } workers..." )
8679 logger .debug (f"[ANALYZER] Archivos a analizar: { [str (f ) for f in all_files [:5 ]]} { '...' if len (all_files ) > 5 else '' } " )
8780
81+ logger .debug (f"[ANALYZER] Lanzando ThreadPoolExecutor con { args .workers } workers" )
8882 with ThreadPoolExecutor (max_workers = args .workers ) as executor :
8983 futures = {executor .submit (analyze_file , f , checkpatch_script , kernel_root ): f for f in all_files }
90-
9184 for future in as_completed (futures ):
9285 file_path = futures [future ]
86+ logger .debug (f"[ANALYZER] Iniciando análisis de: { file_path } " )
9387 try :
9488 errors , warnings , is_correct = future .result ()
95-
96- # Agregar a JSON si tiene issues
89+ logger .debug (f"[ANALYZER] Finalizado análisis de: { file_path } - { len (errors )} errores, { len (warnings )} warnings" )
9790 if errors or warnings :
9891 json_data .append ({
9992 "file" : str (file_path ),
10093 "error" : errors ,
10194 "warning" : warnings
10295 })
103-
104- # Progreso
10596 with lock :
10697 completed += 1
10798 if completed % 10 == 0 or completed == total :
10899 print (f"\r [ANALYZER] Progreso: { progress_bar (completed , total )} " , end = "" )
109- logger .debug (f"[ANALYZER] Analizado { file_path } : { len (errors )} errores, { len (warnings )} warnings" )
110-
111100 except Exception as e :
112101 logger .error (f"\n [ERROR] { file_path } : { e } " )
102+ logger .debug (f"[ANALYZER] Error al analizar { file_path } : { e } " )
113103
114104 print () # Nueva línea después de la barra
115105
@@ -175,10 +165,10 @@ def fix_mode(args):
175165
176166 for entry in files_data :
177167 file_path = Path (entry ["file" ]).resolve ()
178-
168+ logger . debug ( f"[AUTOFIX] Procesando archivo: { file_path } " )
179169 if file_filter and file_filter != file_path :
170+ logger .debug (f"[AUTOFIX] Archivo filtrado: { file_path } (filtro: { file_filter } )" )
180171 continue
181-
182172 # Reunir issues según tipo
183173 issues_to_fix = []
184174 if args .type in ("warning" , "all" ):
@@ -187,31 +177,29 @@ def fix_mode(args):
187177 if args .type in ("error" , "all" ):
188178 for e in entry .get ("error" , []):
189179 issues_to_fix .append ({"type" : "error" , ** e })
190-
180+ logger . debug ( f"[AUTOFIX] Issues a corregir: { len ( issues_to_fix ) } " )
191181 if not issues_to_fix :
182+ logger .debug (f"[AUTOFIX] Ningún issue para corregir en: { file_path } " )
192183 continue
193-
194184 issues_to_fix .sort (key = lambda x : - x ["line" ]) # de abajo hacia arriba
195-
185+ logger . debug ( f"[AUTOFIX] Issues ordenados para aplicar fixes" )
196186 # Aplicar fixes
187+ logger .debug (f"[AUTOFIX] Llamando a apply_fixes para { file_path } " )
197188 fix_results = apply_fixes (file_path , issues_to_fix )
198-
189+ logger . debug ( f"[AUTOFIX] apply_fixes completado para { file_path } " )
199190 file_modified = False
200191 for orig_issue , res in zip (issues_to_fix , fix_results ):
201192 typ = orig_issue ["type" ]
202193 line = orig_issue ["line" ]
203194 message = orig_issue ["message" ]
204195 fixed = res .get ("fixed" , False )
205-
206196 report_data [str (file_path )][typ ].append ({
207197 "line" : line ,
208198 "message" : message ,
209199 "fixed" : fixed
210200 })
211-
212201 if fixed :
213202 file_modified = True
214-
215203 if file_modified :
216204 modified_files .add (str (file_path ))
217205 logger .info (f"[AUTOFIX] - { file_path .relative_to (file_path .parent .parent .parent )} " )
@@ -244,19 +232,20 @@ def fix_mode(args):
244232 # Generar HTML
245233 html_path = Path (args .html )
246234 html_path .parent .mkdir (parents = True , exist_ok = True )
247-
248- # Generar 3 archivos de autofix
235+ logger .debug (f"[AUTOFIX] Generando HTML principal: { html_path } " )
249236 generate_autofix_html (report_data , html_path )
237+ logger .debug (f"[AUTOFIX] Generando HTML detalle motivo: { html_path .parent / 'autofix-detail-reason.html' } " )
250238 generate_autofix_detail_reason_html (report_data , html_path .parent / "autofix-detail-reason.html" )
239+ logger .debug (f"[AUTOFIX] Generando HTML detalle archivo: { html_path .parent / 'autofix-detail-file.html' } " )
251240 generate_autofix_detail_file_html (report_data , html_path .parent / "autofix-detail-file.html" )
252-
253241 # Generar dashboard
254242 dashboard_path = html_path .parent / "dashboard.html"
243+ logger .debug (f"[AUTOFIX] Generando dashboard: { dashboard_path } " )
255244 generate_dashboard_html (dashboard_path )
256-
257245 # Guardar JSON de resultados
258246 json_out_path = Path (args .json_out )
259247 json_out_path .parent .mkdir (parents = True , exist_ok = True )
248+ logger .debug (f"[AUTOFIX] Guardando JSON de resultados: { json_out_path } " )
260249 with open (json_out_path , "w" , encoding = "utf-8" ) as f :
261250 json .dump (report_data , f , indent = 2 , default = str )
262251
@@ -455,19 +444,16 @@ def main():
455444 return analyze_mode (args )
456445
457446 elif args .fix :
458- if not args .json_input :
459- parser .error ("--fix requiere --json-input" )
460447 # Ajustar defaults para fix
448+ args .json_input = args .json_input or "json/checkpatch.json"
461449 args .html = args .html or "html/autofix.html"
462450 args .json_out = args .json_out or "json/fixed.json"
463451 return fix_mode (args )
464452
465453 elif args .compile :
466- if not args .json_input :
467- parser .error ("--compile requiere --json-input" )
468- if not args .kernel_root :
469- parser .error ("--compile requiere --kernel-root" )
470454 # Ajustar defaults para compile
455+ args .json_input = args .json_input or "json/fixed.json"
456+ args .kernel_root = kernel_root
471457 args .html = args .html or "html/compile.html"
472458 args .json_out = args .json_out or "json/compile.json"
473459 return compile_mode (args )
0 commit comments