66from typing import Any
77
88import click
9+ import yaml
910from pyshark import FileCapture # type: ignore
1011from pyshark .capture .live_capture import LiveCapture , UnknownInterfaceException # type: ignore
1112from pyshark .packet .packet import Packet # type: ignore
@@ -328,71 +329,36 @@ def on_package(packet: Packet):
328329@click .command ()
329330@click .pass_context
330331@run_sync ()
331- async def update_docs (ctx ):
332+ async def get_device_info (ctx : click . Context ):
332333 """
333- Generates or updates a markdown file with features for all devices .
334+ Connects to devices and prints their feature information in YAML format .
334335 """
335- MARKDOWN_FILE = Path ("../SUPPORTED_FEATURES.md" )
336-
337- def write_markdown_table (product_data : dict [str , dict [str , str ]], all_features : set [str ]):
338- """Writes the data into a markdown table (products as columns)."""
339- sorted_products = sorted (product_data .keys ())
340- sorted_features = sorted (list (all_features ))
341-
342- header = ["Feature" ] + sorted_products
343-
344- with open (MARKDOWN_FILE , "w" , encoding = "utf-8" ) as f :
345- f .write ("| " + " | " .join (header ) + " |\n " )
346- f .write ("|" + "---|" * len (header ) + "\n " )
336+ click .echo ("Discovering devices..." )
337+ context : RoborockContext = await _load_and_discover (ctx )
338+ cache_data = context .cache_data ()
347339
348- # Create data for special rows
349- special_rows = [
350- "Product Nickname" ,
351- "Protocol Version" ,
352- "New Feature Info" ,
353- "New Feature Info Str" ,
354- "Feature Info" ,
355- ]
356- for row in special_rows :
357- row_values = [str (product_data [p ].get (row , "" )) for p in sorted_products ]
358- f .write ("| " + " | " .join ([row ] + row_values ) + " |\n " )
359- for feature in sorted_features :
360- feature_row = [f"`{ feature } `" ]
361- for product in sorted_products :
362- feature_row .append (product_data [product ].get (feature , "" ))
363- f .write ("| " + " | " .join (feature_row ) + " |\n " )
340+ home_data = cache_data .home_data
364341
365- product_features_map = {}
366- all_feature_names = set ()
342+ all_devices = home_data .devices + home_data .received_devices
343+ if not all_devices :
344+ click .echo ("No devices found." )
345+ return
367346
368- context : RoborockContext = ctx .obj
369- login_data = context .login_data ()
370- if not login_data .home_data :
371- await _discover (ctx )
372- login_data = context .login_data ()
373- home_data = login_data .home_data
347+ click .echo (f"Found { len (all_devices )} devices. Fetching data..." )
374348
375- all_devices = home_data .devices + home_data .received_devices
376- click .echo (f"Found { len (all_devices )} devices. Fetching current data via MQTT..." )
349+ all_products_data = {}
377350
378351 for device in all_devices :
379352 click .echo (f" - Processing { device .name } ({ device .duid } )" )
380353 product_info = home_data .product_map [device .product_id ]
381354 device_data = DeviceData (device , product_info .model )
382- mqtt_client = RoborockMqttClientV1 (login_data .user_data , device_data )
355+ mqtt_client = RoborockMqttClientV1 (cache_data .user_data , device_data )
356+
383357 try :
384358 init_status_result = await mqtt_client .send_command (
385359 RoborockCommand .APP_GET_INIT_STATUS ,
386360 )
387- product_nickname = SHORT_MODEL_TO_ENUM .get (product_info .model .split ("." )[- 1 ])
388- device_features = DeviceFeatures .from_feature_flags (
389- new_feature_info = init_status_result .get ("new_feature_info" ),
390- new_feature_info_str = init_status_result .get ("new_feature_info_str" ),
391- feature_info = init_status_result .get ("feature_info" ),
392- product_nickname = product_nickname ,
393- )
394- features_dict = asdict (device_features )
395-
361+ product_nickname = SHORT_MODEL_TO_ENUM .get (product_info .model .split ("." )[- 1 ]).name
396362 current_product_data = {
397363 "Protocol Version" : device .pv ,
398364 "Product Nickname" : product_nickname ,
@@ -401,22 +367,114 @@ def write_markdown_table(product_data: dict[str, dict[str, str]], all_features:
401367 "Feature Info" : init_status_result .get ("feature_info" ),
402368 }
403369
404- for feature , is_supported in features_dict .items ():
405- all_feature_names .add (feature )
406- current_product_data [feature ] = "X" if is_supported else ""
407-
408- product_features_map [product_info .model ] = current_product_data
370+ all_products_data [product_info .model ] = current_product_data
409371
410372 except Exception as e :
411- click .echo (f" - Error processing device { device .name } : { e } " )
373+ click .echo (f" - Error processing device { device .name } : { e } " , err = True )
412374 finally :
413375 await mqtt_client .async_release ()
414376
415- if not product_features_map :
416- click .echo ("No device data could be gathered. File not updated." )
377+ if all_products_data :
378+ click .echo ("\n --- Device Information (copy to your YAML file) ---\n " )
379+ # Use yaml.dump to print in a clean, copy-paste friendly format
380+ click .echo (yaml .dump (all_products_data , sort_keys = False ))
381+
382+
383+ @click .command ()
384+ @click .option ("--data-file" , default = "../device_info.yaml" , help = "Path to the YAML file with device feature data." )
385+ @click .option ("--output-file" , default = "../SUPPORTED_FEATURES.md" , help = "Path to the output markdown file." )
386+ def update_docs (data_file : str , output_file : str ):
387+ """
388+ Generates a markdown file by processing raw feature data from a YAML file.
389+ """
390+ data_path = Path (data_file )
391+ output_path = Path (output_file )
392+
393+ if not data_path .exists ():
394+ click .echo (f"Error: Data file not found at '{ data_path } '" , err = True )
395+ return
396+
397+ click .echo (f"Loading data from { data_path } ..." )
398+ with open (data_path , encoding = "utf-8" ) as f :
399+ product_data_from_yaml = yaml .safe_load (f )
400+
401+ if not product_data_from_yaml :
402+ click .echo ("No data found in YAML file. Exiting." , err = True )
417403 return
418404
419- click .echo (f"Writing updated data to { MARKDOWN_FILE } ..." )
405+ product_features_map = {}
406+ all_feature_names = set ()
407+
408+ # Process the raw data from YAML to build the feature map
409+ for model , data in product_data_from_yaml .items ():
410+ # Reconstruct the DeviceFeatures object from the raw data in the YAML file
411+ device_features = DeviceFeatures .from_feature_flags (
412+ new_feature_info = data .get ("New Feature Info" ),
413+ new_feature_info_str = data .get ("New Feature Info Str" ),
414+ feature_info = data .get ("Feature Info" ),
415+ product_nickname = data .get ("Product Nickname" ),
416+ )
417+ features_dict = asdict (device_features )
418+
419+ # This dictionary will hold the final data for the markdown table row
420+ current_product_data = {
421+ "Product Nickname" : data .get ("Product Nickname" , "" ),
422+ "Protocol Version" : data .get ("Protocol Version" , "" ),
423+ "New Feature Info" : data .get ("New Feature Info" , "" ),
424+ "New Feature Info Str" : data .get ("New Feature Info Str" , "" ),
425+ }
426+
427+ # Populate features from the calculated DeviceFeatures object
428+ for feature , is_supported in features_dict .items ():
429+ all_feature_names .add (feature )
430+ if is_supported :
431+ current_product_data [feature ] = "X"
432+
433+ supported_codes = data .get ("Feature Info" , [])
434+ if isinstance (supported_codes , list ):
435+ for code in supported_codes :
436+ feature_name = str (code )
437+ all_feature_names .add (feature_name )
438+ current_product_data [feature_name ] = "X"
439+
440+ product_features_map [model ] = current_product_data
441+
442+ # --- Helper function to write the markdown table ---
443+ def write_markdown_table (product_features : dict [str , dict [str , any ]], all_features : set [str ]):
444+ """Writes the data into a markdown table (products as columns)."""
445+ sorted_products = sorted (product_features .keys ())
446+ special_rows = [
447+ "Product Nickname" ,
448+ "Protocol Version" ,
449+ "New Feature Info" ,
450+ "New Feature Info Str" ,
451+ ]
452+ # Regular features are the remaining keys, sorted alphabetically
453+ # We filter out the special rows to avoid duplicating them.
454+ sorted_features = sorted (list (all_features - set (special_rows )))
455+
456+ header = ["Feature" ] + sorted_products
457+
458+ click .echo (f"Writing documentation to { output_path } ..." )
459+ with open (output_path , "w" , encoding = "utf-8" ) as f :
460+ f .write ("| " + " | " .join (header ) + " |\n " )
461+ f .write ("|" + "---|" * len (header ) + "\n " )
462+
463+ # Write the special metadata rows first
464+ for row_name in special_rows :
465+ row_values = [str (product_features [p ].get (row_name , "" )) for p in sorted_products ]
466+ f .write ("| " + " | " .join ([row_name ] + row_values ) + " |\n " )
467+
468+ # Write the feature rows
469+ for feature in sorted_features :
470+ # Use backticks for feature names that are just numbers (from the list)
471+ display_feature = f"`{ feature } `"
472+ feature_row = [display_feature ]
473+ for product in sorted_products :
474+ # Use .get() to place an 'X' or an empty string
475+ feature_row .append (product_features [product ].get (feature , "" ))
476+ f .write ("| " + " | " .join (feature_row ) + " |\n " )
477+
420478 write_markdown_table (product_features_map , all_feature_names )
421479 click .echo ("Done." )
422480
@@ -430,6 +488,7 @@ def write_markdown_table(product_data: dict[str, dict[str, str]], all_features:
430488cli .add_command (command )
431489cli .add_command (parser )
432490cli .add_command (session )
491+ cli .add_command (get_device_info )
433492cli .add_command (update_docs )
434493
435494
0 commit comments