1#!/usr/bin/env python 2 3r""" 4See class prolog below for details. 5""" 6 7import os 8import re 9import sys 10import yaml 11import json 12import time 13import logging 14import platform 15from errno import EACCES, EPERM 16import subprocess 17from ssh_utility import SSHRemoteclient 18from telnet_utility import TelnetRemoteclient 19 20r""" 21User define plugins python functions. 22 23It will imports files from directory plugins 24 25plugins 26├── file1.py 27└── file2.py 28 29Example how to define in YAML: 30 - plugin: 31 - plugin_name: plugin.foo_func.foo_func_yaml 32 - plugin_args: 33 - arg1 34 - arg2 35""" 36plugin_dir = 'plugins' 37try: 38 for module in os.listdir(plugin_dir): 39 if module == '__init__.py' or module[-3:] != '.py': 40 continue 41 plugin_module = "plugins." + module[:-3] 42 # To access the module plugin.<module name>.<function> 43 # Example: plugin.foo_func.foo_func_yaml() 44 try: 45 plugin = __import__(plugin_module, globals(), locals(), [], 0) 46 except Exception as e: 47 print("PLUGIN: Module import failed: %s" % module) 48 pass 49except FileNotFoundError as e: 50 print("PLUGIN: %s" % e) 51 pass 52 53r""" 54This is for plugin functions returning data or responses to the caller 55in YAML plugin setup. 56 57Example: 58 59 - plugin: 60 - plugin_name: version = plugin.ssh_execution.ssh_execute_cmd 61 - plugin_args: 62 - ${hostname} 63 - ${username} 64 - ${password} 65 - "cat /etc/os-release | grep VERSION_ID | awk -F'=' '{print $2}'" 66 - plugin: 67 - plugin_name: plugin.print_vars.print_vars 68 - plugin_args: 69 - version 70 71where first plugin "version" var is used by another plugin in the YAML 72block or plugin 73 74""" 75global global_log_store_path 76global global_plugin_dict 77global global_plugin_list 78# Hold the plugin return values in dict and plugin return vars in list. 79global_plugin_dict = {} 80global_plugin_list = [] 81# Hold the plugin return named declared if function returned values are list,dict. 82# Refer this name list to look up the plugin dict for eval() args function 83# Example [ 'version'] 84global_plugin_type_list = [] 85global_log_store_path = '' 86 87 88class FFDCCollector: 89 90 r""" 91 Sends commands from configuration file to the targeted system to collect log files. 92 Fetch and store generated files at the specified location. 93 94 """ 95 96 def __init__(self, 97 hostname, 98 username, 99 password, 100 ffdc_config, 101 location, 102 remote_type, 103 remote_protocol, 104 env_vars, 105 econfig, 106 log_level): 107 r""" 108 Description of argument(s): 109 110 hostname name/ip of the targeted (remote) system 111 username user on the targeted system with access to FFDC files 112 password password for user on targeted system 113 ffdc_config configuration file listing commands and files for FFDC 114 location where to store collected FFDC 115 remote_type os type of the remote host 116 remote_protocol Protocol to use to collect data 117 env_vars User define CLI env vars '{"key : "value"}' 118 econfig User define env vars YAML file 119 120 """ 121 122 self.hostname = hostname 123 self.username = username 124 self.password = password 125 self.ffdc_config = ffdc_config 126 self.location = location + "/" + remote_type.upper() 127 self.ssh_remoteclient = None 128 self.telnet_remoteclient = None 129 self.ffdc_dir_path = "" 130 self.ffdc_prefix = "" 131 self.target_type = remote_type.upper() 132 self.remote_protocol = remote_protocol.upper() 133 self.env_vars = env_vars 134 self.econfig = econfig 135 self.start_time = 0 136 self.elapsed_time = '' 137 self.logger = None 138 139 # Set prefix values for scp files and directory. 140 # Since the time stamp is at second granularity, these values are set here 141 # to be sure that all files for this run will have same timestamps 142 # and they will be saved in the same directory. 143 # self.location == local system for now 144 self.set_ffdc_defaults() 145 146 # Logger for this run. Need to be after set_ffdc_defaults() 147 self.script_logging(getattr(logging, log_level.upper())) 148 149 # Verify top level directory exists for storage 150 self.validate_local_store(self.location) 151 152 if self.verify_script_env(): 153 # Load default or user define YAML configuration file. 154 with open(self.ffdc_config, 'r') as file: 155 self.ffdc_actions = yaml.load(file, Loader=yaml.FullLoader) 156 157 if self.target_type not in self.ffdc_actions.keys(): 158 self.logger.error( 159 "\n\tERROR: %s is not listed in %s.\n\n" % (self.target_type, self.ffdc_config)) 160 sys.exit(-1) 161 else: 162 sys.exit(-1) 163 164 # Load ENV vars from user. 165 self.logger.info("\n\tENV: User define input YAML variables") 166 self.env_dict = {} 167 self. load_env() 168 169 def verify_script_env(self): 170 171 # Import to log version 172 import click 173 import paramiko 174 175 run_env_ok = True 176 177 redfishtool_version = self.run_tool_cmd('redfishtool -V').split(' ')[2].strip('\n') 178 ipmitool_version = self.run_tool_cmd('ipmitool -V').split(' ')[2] 179 180 self.logger.info("\n\t---- Script host environment ----") 181 self.logger.info("\t{:<10} {:<10}".format('Script hostname', os.uname()[1])) 182 self.logger.info("\t{:<10} {:<10}".format('Script host os', platform.platform())) 183 self.logger.info("\t{:<10} {:>10}".format('Python', platform.python_version())) 184 self.logger.info("\t{:<10} {:>10}".format('PyYAML', yaml.__version__)) 185 self.logger.info("\t{:<10} {:>10}".format('click', click.__version__)) 186 self.logger.info("\t{:<10} {:>10}".format('paramiko', paramiko.__version__)) 187 self.logger.info("\t{:<10} {:>9}".format('redfishtool', redfishtool_version)) 188 self.logger.info("\t{:<10} {:>12}".format('ipmitool', ipmitool_version)) 189 190 if eval(yaml.__version__.replace('.', ',')) < (5, 4, 1): 191 self.logger.error("\n\tERROR: Python or python packages do not meet minimum version requirement.") 192 self.logger.error("\tERROR: PyYAML version 5.4.1 or higher is needed.\n") 193 run_env_ok = False 194 195 self.logger.info("\t---- End script host environment ----") 196 return run_env_ok 197 198 def script_logging(self, 199 log_level_attr): 200 r""" 201 Create logger 202 203 """ 204 self.logger = logging.getLogger() 205 self.logger.setLevel(log_level_attr) 206 log_file_handler = logging.FileHandler(self.ffdc_dir_path + "collector.log") 207 208 stdout_handler = logging.StreamHandler(sys.stdout) 209 self.logger.addHandler(log_file_handler) 210 self.logger.addHandler(stdout_handler) 211 212 # Turn off paramiko INFO logging 213 logging.getLogger("paramiko").setLevel(logging.WARNING) 214 215 def target_is_pingable(self): 216 r""" 217 Check if target system is ping-able. 218 219 """ 220 response = os.system("ping -c 1 %s 2>&1 >/dev/null" % self.hostname) 221 if response == 0: 222 self.logger.info("\n\t[Check] %s is ping-able.\t\t [OK]" % self.hostname) 223 return True 224 else: 225 self.logger.error( 226 "\n\tERROR: %s is not ping-able. FFDC collection aborted.\n" % self.hostname) 227 sys.exit(-1) 228 229 def collect_ffdc(self): 230 r""" 231 Initiate FFDC Collection depending on requested protocol. 232 233 """ 234 235 self.logger.info("\n\t---- Start communicating with %s ----" % self.hostname) 236 self.start_time = time.time() 237 238 # Find the list of target and protocol supported. 239 check_protocol_list = [] 240 config_dict = self.ffdc_actions 241 242 for target_type in config_dict.keys(): 243 if self.target_type != target_type: 244 continue 245 246 for k, v in config_dict[target_type].items(): 247 if config_dict[target_type][k]['PROTOCOL'][0] not in check_protocol_list: 248 check_protocol_list.append(config_dict[target_type][k]['PROTOCOL'][0]) 249 250 self.logger.info("\n\t %s protocol type: %s" % (self.target_type, check_protocol_list)) 251 252 verified_working_protocol = self.verify_protocol(check_protocol_list) 253 254 if verified_working_protocol: 255 self.logger.info("\n\t---- Completed protocol pre-requisite check ----\n") 256 257 # Verify top level directory exists for storage 258 self.validate_local_store(self.location) 259 260 if ((self.remote_protocol not in verified_working_protocol) and (self.remote_protocol != 'ALL')): 261 self.logger.info("\n\tWorking protocol list: %s" % verified_working_protocol) 262 self.logger.error( 263 '\tERROR: Requested protocol %s is not in working protocol list.\n' 264 % self.remote_protocol) 265 sys.exit(-1) 266 else: 267 self.generate_ffdc(verified_working_protocol) 268 269 def ssh_to_target_system(self): 270 r""" 271 Open a ssh connection to targeted system. 272 273 """ 274 275 self.ssh_remoteclient = SSHRemoteclient(self.hostname, 276 self.username, 277 self.password) 278 279 if self.ssh_remoteclient.ssh_remoteclient_login(): 280 self.logger.info("\n\t[Check] %s SSH connection established.\t [OK]" % self.hostname) 281 282 # Check scp connection. 283 # If scp connection fails, 284 # continue with FFDC generation but skip scp files to local host. 285 self.ssh_remoteclient.scp_connection() 286 return True 287 else: 288 self.logger.info("\n\t[Check] %s SSH connection.\t [NOT AVAILABLE]" % self.hostname) 289 return False 290 291 def telnet_to_target_system(self): 292 r""" 293 Open a telnet connection to targeted system. 294 """ 295 self.telnet_remoteclient = TelnetRemoteclient(self.hostname, 296 self.username, 297 self.password) 298 if self.telnet_remoteclient.tn_remoteclient_login(): 299 self.logger.info("\n\t[Check] %s Telnet connection established.\t [OK]" % self.hostname) 300 return True 301 else: 302 self.logger.info("\n\t[Check] %s Telnet connection.\t [NOT AVAILABLE]" % self.hostname) 303 return False 304 305 def generate_ffdc(self, working_protocol_list): 306 r""" 307 Determine actions based on remote host type 308 309 Description of argument(s): 310 working_protocol_list list of confirmed working protocols to connect to remote host. 311 """ 312 313 self.logger.info("\n\t---- Executing commands on " + self.hostname + " ----") 314 self.logger.info("\n\tWorking protocol list: %s" % working_protocol_list) 315 316 config_dict = self.ffdc_actions 317 for target_type in config_dict.keys(): 318 if self.target_type != target_type: 319 continue 320 321 self.logger.info("\n\tFFDC Path: %s " % self.ffdc_dir_path) 322 global_plugin_dict['global_log_store_path'] = self.ffdc_dir_path 323 self.logger.info("\tSystem Type: %s" % target_type) 324 for k, v in config_dict[target_type].items(): 325 326 if self.remote_protocol not in working_protocol_list \ 327 and self.remote_protocol != 'ALL': 328 continue 329 330 protocol = config_dict[target_type][k]['PROTOCOL'][0] 331 332 if protocol not in working_protocol_list: 333 continue 334 335 if protocol in working_protocol_list: 336 if protocol == 'SSH' or protocol == 'SCP': 337 self.protocol_ssh(protocol, target_type, k) 338 elif protocol == 'TELNET': 339 self.protocol_telnet(target_type, k) 340 elif protocol == 'REDFISH' or protocol == 'IPMI' or protocol == 'SHELL': 341 self.protocol_execute(protocol, target_type, k) 342 else: 343 self.logger.error("\n\tERROR: %s is not available for %s." % (protocol, self.hostname)) 344 345 # Close network connection after collecting all files 346 self.elapsed_time = time.strftime("%H:%M:%S", time.gmtime(time.time() - self.start_time)) 347 if self.ssh_remoteclient: 348 self.ssh_remoteclient.ssh_remoteclient_disconnect() 349 if self.telnet_remoteclient: 350 self.telnet_remoteclient.tn_remoteclient_disconnect() 351 352 def protocol_ssh(self, 353 protocol, 354 target_type, 355 sub_type): 356 r""" 357 Perform actions using SSH and SCP protocols. 358 359 Description of argument(s): 360 protocol Protocol to execute. 361 target_type OS Type of remote host. 362 sub_type Group type of commands. 363 """ 364 365 if protocol == 'SCP': 366 self.group_copy(self.ffdc_actions[target_type][sub_type]) 367 else: 368 self.collect_and_copy_ffdc(self.ffdc_actions[target_type][sub_type]) 369 370 def protocol_telnet(self, 371 target_type, 372 sub_type): 373 r""" 374 Perform actions using telnet protocol. 375 Description of argument(s): 376 target_type OS Type of remote host. 377 """ 378 self.logger.info("\n\t[Run] Executing commands on %s using %s" % (self.hostname, 'TELNET')) 379 telnet_files_saved = [] 380 progress_counter = 0 381 list_of_commands = self.ffdc_actions[target_type][sub_type]['COMMANDS'] 382 for index, each_cmd in enumerate(list_of_commands, start=0): 383 command_txt, command_timeout = self.unpack_command(each_cmd) 384 result = self.telnet_remoteclient.execute_command(command_txt, command_timeout) 385 if result: 386 try: 387 targ_file = self.ffdc_actions[target_type][sub_type]['FILES'][index] 388 except IndexError: 389 targ_file = command_txt 390 self.logger.warning( 391 "\n\t[WARN] Missing filename to store data from telnet %s." % each_cmd) 392 self.logger.warning("\t[WARN] Data will be stored in %s." % targ_file) 393 targ_file_with_path = (self.ffdc_dir_path 394 + self.ffdc_prefix 395 + targ_file) 396 # Creates a new file 397 with open(targ_file_with_path, 'w') as fp: 398 fp.write(result) 399 fp.close 400 telnet_files_saved.append(targ_file) 401 progress_counter += 1 402 self.print_progress(progress_counter) 403 self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]") 404 for file in telnet_files_saved: 405 self.logger.info("\n\t\tSuccessfully save file " + file + ".") 406 407 def protocol_execute(self, 408 protocol, 409 target_type, 410 sub_type): 411 r""" 412 Perform actions for a given protocol. 413 414 Description of argument(s): 415 protocol Protocol to execute. 416 target_type OS Type of remote host. 417 sub_type Group type of commands. 418 """ 419 420 self.logger.info("\n\t[Run] Executing commands to %s using %s" % (self.hostname, protocol)) 421 executed_files_saved = [] 422 progress_counter = 0 423 list_of_cmd = self.get_command_list(self.ffdc_actions[target_type][sub_type]) 424 for index, each_cmd in enumerate(list_of_cmd, start=0): 425 plugin_call = False 426 if isinstance(each_cmd, dict): 427 if 'plugin' in each_cmd: 428 plugin_call = True 429 # call the plugin 430 self.logger.info("\n\t[PLUGIN-START]") 431 result = self.execute_plugin_block(each_cmd['plugin']) 432 self.logger.info("\t[PLUGIN-END]\n") 433 else: 434 each_cmd = self.yaml_env_and_plugin_vars_populate(each_cmd) 435 436 if not plugin_call: 437 result = self.run_tool_cmd(each_cmd) 438 if result: 439 try: 440 file_name = self.get_file_list(self.ffdc_actions[target_type][sub_type])[index] 441 # If file is specified as None. 442 if file_name == "None": 443 continue 444 targ_file = self.yaml_env_and_plugin_vars_populate(file_name) 445 except IndexError: 446 targ_file = each_cmd.split('/')[-1] 447 self.logger.warning( 448 "\n\t[WARN] Missing filename to store data from %s." % each_cmd) 449 self.logger.warning("\t[WARN] Data will be stored in %s." % targ_file) 450 451 targ_file_with_path = (self.ffdc_dir_path 452 + self.ffdc_prefix 453 + targ_file) 454 455 # Creates a new file 456 with open(targ_file_with_path, 'w') as fp: 457 fp.write(result) 458 fp.close 459 executed_files_saved.append(targ_file) 460 461 progress_counter += 1 462 self.print_progress(progress_counter) 463 464 self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]") 465 466 for file in executed_files_saved: 467 self.logger.info("\n\t\tSuccessfully save file " + file + ".") 468 469 def collect_and_copy_ffdc(self, 470 ffdc_actions_for_target_type, 471 form_filename=False): 472 r""" 473 Send commands in ffdc_config file to targeted system. 474 475 Description of argument(s): 476 ffdc_actions_for_target_type commands and files for the selected remote host type. 477 form_filename if true, pre-pend self.target_type to filename 478 """ 479 480 # Executing commands, if any 481 self.ssh_execute_ffdc_commands(ffdc_actions_for_target_type, 482 form_filename) 483 484 # Copying files 485 if self.ssh_remoteclient.scpclient: 486 self.logger.info("\n\n\tCopying FFDC files from remote system %s.\n" % self.hostname) 487 488 # Retrieving files from target system 489 list_of_files = self.get_file_list(ffdc_actions_for_target_type) 490 self.scp_ffdc(self.ffdc_dir_path, self.ffdc_prefix, form_filename, list_of_files) 491 else: 492 self.logger.info("\n\n\tSkip copying FFDC files from remote system %s.\n" % self.hostname) 493 494 def get_command_list(self, 495 ffdc_actions_for_target_type): 496 r""" 497 Fetch list of commands from configuration file 498 499 Description of argument(s): 500 ffdc_actions_for_target_type commands and files for the selected remote host type. 501 """ 502 try: 503 list_of_commands = ffdc_actions_for_target_type['COMMANDS'] 504 except KeyError: 505 list_of_commands = [] 506 return list_of_commands 507 508 def get_file_list(self, 509 ffdc_actions_for_target_type): 510 r""" 511 Fetch list of commands from configuration file 512 513 Description of argument(s): 514 ffdc_actions_for_target_type commands and files for the selected remote host type. 515 """ 516 try: 517 list_of_files = ffdc_actions_for_target_type['FILES'] 518 except KeyError: 519 list_of_files = [] 520 return list_of_files 521 522 def unpack_command(self, 523 command): 524 r""" 525 Unpack command from config file 526 527 Description of argument(s): 528 command Command from config file. 529 """ 530 if isinstance(command, dict): 531 command_txt = next(iter(command)) 532 command_timeout = next(iter(command.values())) 533 elif isinstance(command, str): 534 command_txt = command 535 # Default command timeout 60 seconds 536 command_timeout = 60 537 538 return command_txt, command_timeout 539 540 def ssh_execute_ffdc_commands(self, 541 ffdc_actions_for_target_type, 542 form_filename=False): 543 r""" 544 Send commands in ffdc_config file to targeted system. 545 546 Description of argument(s): 547 ffdc_actions_for_target_type commands and files for the selected remote host type. 548 form_filename if true, pre-pend self.target_type to filename 549 """ 550 self.logger.info("\n\t[Run] Executing commands on %s using %s" 551 % (self.hostname, ffdc_actions_for_target_type['PROTOCOL'][0])) 552 553 list_of_commands = self.get_command_list(ffdc_actions_for_target_type) 554 # If command list is empty, returns 555 if not list_of_commands: 556 return 557 558 progress_counter = 0 559 for command in list_of_commands: 560 command_txt, command_timeout = self.unpack_command(command) 561 562 if form_filename: 563 command_txt = str(command_txt % self.target_type) 564 565 cmd_exit_code, err, response = \ 566 self.ssh_remoteclient.execute_command(command_txt, command_timeout) 567 568 if cmd_exit_code: 569 self.logger.warning( 570 "\n\t\t[WARN] %s exits with code %s." % (command_txt, str(cmd_exit_code))) 571 self.logger.warning("\t\t[WARN] %s " % err) 572 573 progress_counter += 1 574 self.print_progress(progress_counter) 575 576 self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]") 577 578 def group_copy(self, 579 ffdc_actions_for_target_type): 580 r""" 581 scp group of files (wild card) from remote host. 582 583 Description of argument(s): 584 fdc_actions_for_target_type commands and files for the selected remote host type. 585 """ 586 587 if self.ssh_remoteclient.scpclient: 588 self.logger.info("\n\tCopying files from remote system %s via SCP.\n" % self.hostname) 589 590 list_of_commands = self.get_command_list(ffdc_actions_for_target_type) 591 # If command list is empty, returns 592 if not list_of_commands: 593 return 594 595 for command in list_of_commands: 596 try: 597 command = self.yaml_env_and_plugin_vars_populate(command) 598 except IndexError: 599 self.logger.error("\t\tInvalid command %s" % command) 600 continue 601 602 cmd_exit_code, err, response = \ 603 self.ssh_remoteclient.execute_command(command) 604 605 # If file does not exist, code take no action. 606 # cmd_exit_code is ignored for this scenario. 607 if response: 608 scp_result = \ 609 self.ssh_remoteclient.scp_file_from_remote(response.split('\n'), 610 self.ffdc_dir_path) 611 if scp_result: 612 self.logger.info("\t\tSuccessfully copied from " + self.hostname + ':' + command) 613 else: 614 self.logger.info("\t\t%s has no result" % command) 615 616 else: 617 self.logger.info("\n\n\tSkip copying files from remote system %s.\n" % self.hostname) 618 619 def scp_ffdc(self, 620 targ_dir_path, 621 targ_file_prefix, 622 form_filename, 623 file_list=None, 624 quiet=None): 625 r""" 626 SCP all files in file_dict to the indicated directory on the local system. 627 628 Description of argument(s): 629 targ_dir_path The path of the directory to receive the files. 630 targ_file_prefix Prefix which will be pre-pended to each 631 target file's name. 632 file_dict A dictionary of files to scp from targeted system to this system 633 634 """ 635 636 progress_counter = 0 637 for filename in file_list: 638 if form_filename: 639 filename = str(filename % self.target_type) 640 source_file_path = filename 641 targ_file_path = targ_dir_path + targ_file_prefix + filename.split('/')[-1] 642 643 # If source file name contains wild card, copy filename as is. 644 if '*' in source_file_path: 645 scp_result = self.ssh_remoteclient.scp_file_from_remote(source_file_path, self.ffdc_dir_path) 646 else: 647 scp_result = self.ssh_remoteclient.scp_file_from_remote(source_file_path, targ_file_path) 648 649 if not quiet: 650 if scp_result: 651 self.logger.info( 652 "\t\tSuccessfully copied from " + self.hostname + ':' + source_file_path + ".\n") 653 else: 654 self.logger.info( 655 "\t\tFail to copy from " + self.hostname + ':' + source_file_path + ".\n") 656 else: 657 progress_counter += 1 658 self.print_progress(progress_counter) 659 660 def set_ffdc_defaults(self): 661 r""" 662 Set a default value for self.ffdc_dir_path and self.ffdc_prefix. 663 Collected ffdc file will be stored in dir /self.location/hostname_timestr/. 664 Individual ffdc file will have timestr_filename. 665 666 Description of class variables: 667 self.ffdc_dir_path The dir path where collected ffdc data files should be put. 668 669 self.ffdc_prefix The prefix to be given to each ffdc file name. 670 671 """ 672 673 timestr = time.strftime("%Y%m%d-%H%M%S") 674 self.ffdc_dir_path = self.location + "/" + self.hostname + "_" + timestr + "/" 675 self.ffdc_prefix = timestr + "_" 676 self.validate_local_store(self.ffdc_dir_path) 677 678 def validate_local_store(self, dir_path): 679 r""" 680 Ensure path exists to store FFDC files locally. 681 682 Description of variable: 683 dir_path The dir path where collected ffdc data files will be stored. 684 685 """ 686 687 if not os.path.exists(dir_path): 688 try: 689 os.makedirs(dir_path, 0o755) 690 except (IOError, OSError) as e: 691 # PermissionError 692 if e.errno == EPERM or e.errno == EACCES: 693 self.logger.error( 694 '\tERROR: os.makedirs %s failed with PermissionError.\n' % dir_path) 695 else: 696 self.logger.error( 697 '\tERROR: os.makedirs %s failed with %s.\n' % (dir_path, e.strerror)) 698 sys.exit(-1) 699 700 def print_progress(self, progress): 701 r""" 702 Print activity progress + 703 704 Description of variable: 705 progress Progress counter. 706 707 """ 708 709 sys.stdout.write("\r\t" + "+" * progress) 710 sys.stdout.flush() 711 time.sleep(.1) 712 713 def verify_redfish(self): 714 r""" 715 Verify remote host has redfish service active 716 717 """ 718 redfish_parm = 'redfishtool -r ' \ 719 + self.hostname + ' -S Always raw GET /redfish/v1/' 720 return(self.run_tool_cmd(redfish_parm, True)) 721 722 def verify_ipmi(self): 723 r""" 724 Verify remote host has IPMI LAN service active 725 726 """ 727 if self.target_type == 'OPENBMC': 728 ipmi_parm = 'ipmitool -I lanplus -C 17 -U ' + self.username + ' -P ' \ 729 + self.password + ' -H ' + self.hostname + ' power status' 730 else: 731 ipmi_parm = 'ipmitool -I lanplus -P ' \ 732 + self.password + ' -H ' + self.hostname + ' power status' 733 734 return(self.run_tool_cmd(ipmi_parm, True)) 735 736 def run_tool_cmd(self, 737 parms_string, 738 quiet=False): 739 r""" 740 Run CLI standard tool or scripts. 741 742 Description of variable: 743 parms_string tool command options. 744 quiet do not print tool error message if True 745 """ 746 747 result = subprocess.run([parms_string], 748 stdout=subprocess.PIPE, 749 stderr=subprocess.PIPE, 750 shell=True, 751 universal_newlines=True) 752 753 if result.stderr and not quiet: 754 self.logger.error('\n\t\tERROR with %s ' % parms_string) 755 self.logger.error('\t\t' + result.stderr) 756 757 return result.stdout 758 759 def verify_protocol(self, protocol_list): 760 r""" 761 Perform protocol working check. 762 763 Description of argument(s): 764 protocol_list List of protocol. 765 """ 766 767 tmp_list = [] 768 if self.target_is_pingable(): 769 tmp_list.append("SHELL") 770 771 for protocol in protocol_list: 772 if self.remote_protocol != 'ALL': 773 if self.remote_protocol != protocol: 774 continue 775 776 # Only check SSH/SCP once for both protocols 777 if protocol == 'SSH' or protocol == 'SCP' and protocol not in tmp_list: 778 if self.ssh_to_target_system(): 779 # Add only what user asked. 780 if self.remote_protocol != 'ALL': 781 tmp_list.append(self.remote_protocol) 782 else: 783 tmp_list.append('SSH') 784 tmp_list.append('SCP') 785 786 if protocol == 'TELNET': 787 if self.telnet_to_target_system(): 788 tmp_list.append(protocol) 789 790 if protocol == 'REDFISH': 791 if self.verify_redfish(): 792 tmp_list.append(protocol) 793 self.logger.info("\n\t[Check] %s Redfish Service.\t\t [OK]" % self.hostname) 794 else: 795 self.logger.info("\n\t[Check] %s Redfish Service.\t\t [NOT AVAILABLE]" % self.hostname) 796 797 if protocol == 'IPMI': 798 if self.verify_ipmi(): 799 tmp_list.append(protocol) 800 self.logger.info("\n\t[Check] %s IPMI LAN Service.\t\t [OK]" % self.hostname) 801 else: 802 self.logger.info("\n\t[Check] %s IPMI LAN Service.\t\t [NOT AVAILABLE]" % self.hostname) 803 804 return tmp_list 805 806 def load_env(self): 807 r""" 808 Perform protocol working check. 809 810 """ 811 # This is for the env vars a user can use in YAML to load it at runtime. 812 # Example YAML: 813 # -COMMANDS: 814 # - my_command ${hostname} ${username} ${password} 815 os.environ['hostname'] = self.hostname 816 os.environ['username'] = self.username 817 os.environ['password'] = self.password 818 819 # Append default Env. 820 self.env_dict['hostname'] = self.hostname 821 self.env_dict['username'] = self.username 822 self.env_dict['password'] = self.password 823 824 try: 825 tmp_env_dict = {} 826 if self.env_vars: 827 tmp_env_dict = json.loads(self.env_vars) 828 # Export ENV vars default. 829 for key, value in tmp_env_dict.items(): 830 os.environ[key] = value 831 self.env_dict[key] = str(value) 832 833 if self.econfig: 834 with open(self.econfig, 'r') as file: 835 tmp_env_dict = yaml.load(file, Loader=yaml.FullLoader) 836 # Export ENV vars. 837 for key, value in tmp_env_dict['env_params'].items(): 838 os.environ[key] = str(value) 839 self.env_dict[key] = str(value) 840 except json.decoder.JSONDecodeError as e: 841 self.logger.error("\n\tERROR: %s " % e) 842 sys.exit(-1) 843 844 # This to mask the password from displaying on the console. 845 mask_dict = self.env_dict.copy() 846 for k, v in mask_dict.items(): 847 if k.lower().find("password") != -1: 848 hidden_text = [] 849 hidden_text.append(v) 850 password_regex = '(' +\ 851 '|'.join([re.escape(x) for x in hidden_text]) + ')' 852 mask_dict[k] = re.sub(password_regex, "********", v) 853 854 self.logger.info(json.dumps(mask_dict, indent=8, sort_keys=False)) 855 856 def execute_python_eval(self, eval_string): 857 r""" 858 Execute qualified python function using eval. 859 860 Description of argument(s): 861 eval_string Execute the python object. 862 863 Example: 864 eval(plugin.foo_func.foo_func(10)) 865 """ 866 try: 867 self.logger.info("\tCall func: %s" % eval_string) 868 result = eval(eval_string) 869 self.logger.info("\treturn: %s" % str(result)) 870 except (ValueError, SyntaxError, NameError) as e: 871 self.logger.error("execute_python_eval: %s" % e) 872 pass 873 874 return result 875 876 def execute_plugin_block(self, plugin_cmd_list): 877 r""" 878 Pack the plugin command to quailifed python string object. 879 880 Description of argument(s): 881 plugin_list_dict Plugin block read from YAML 882 [{'plugin_name': 'plugin.foo_func.my_func'}, 883 {'plugin_args': [10]}] 884 885 Example: 886 - plugin: 887 - plugin_name: plugin.foo_func.my_func 888 - plugin_args: 889 - arg1 890 - arg2 891 892 - plugin: 893 - plugin_name: result = plugin.foo_func.my_func 894 - plugin_args: 895 - arg1 896 - arg2 897 898 - plugin: 899 - plugin_name: result1,result2 = plugin.foo_func.my_func 900 - plugin_args: 901 - arg1 902 - arg2 903 """ 904 try: 905 plugin_name = plugin_cmd_list[0]['plugin_name'] 906 # Equal separator means plugin function returns result. 907 if ' = ' in plugin_name: 908 # Ex. ['result', 'plugin.foo_func.my_func'] 909 plugin_name_args = plugin_name.split(' = ') 910 # plugin func return data. 911 for arg in plugin_name_args: 912 if arg == plugin_name_args[-1]: 913 plugin_name = arg 914 else: 915 plugin_resp = arg.split(',') 916 # ['result1','result2'] 917 for x in plugin_resp: 918 global_plugin_list.append(x) 919 global_plugin_dict[x] = "" 920 921 # Walk the plugin args ['arg1,'arg2'] 922 # If the YAML plugin statement 'plugin_args' is not declared. 923 if any('plugin_args' in d for d in plugin_cmd_list): 924 plugin_args = plugin_cmd_list[1]['plugin_args'] 925 if plugin_args: 926 plugin_args = self.yaml_args_populate(plugin_args) 927 else: 928 plugin_args = [] 929 else: 930 plugin_args = self.yaml_args_populate([]) 931 932 # Pack the args arg1, arg2, .... argn into 933 # "arg1","arg2","argn" string as params for function. 934 parm_args_str = self.yaml_args_string(plugin_args) 935 if parm_args_str: 936 plugin_func = plugin_name + '(' + parm_args_str + ')' 937 else: 938 plugin_func = plugin_name + '()' 939 940 # Execute plugin function. 941 if global_plugin_dict: 942 resp = self.execute_python_eval(plugin_func) 943 self.response_args_data(resp) 944 else: 945 resp = self.execute_python_eval(plugin_func) 946 return resp 947 except Exception as e: 948 self.logger.error("execute_plugin_block: %s" % e) 949 pass 950 951 def response_args_data(self, plugin_resp): 952 r""" 953 Parse the plugin function response. 954 955 plugin_resp Response data from plugin function. 956 """ 957 resp_list = [] 958 resp_data = "" 959 # There is nothing to update the plugin response. 960 if len(global_plugin_list) == 0 or plugin_resp == 'None': 961 return 962 963 if isinstance(plugin_resp, str): 964 resp_data = plugin_resp.strip('\r\n\t') 965 resp_list.append(resp_data) 966 elif isinstance(plugin_resp, bytes): 967 resp_data = str(plugin_resp, 'UTF-8').strip('\r\n\t') 968 resp_list.append(resp_data) 969 elif isinstance(plugin_resp, tuple): 970 if len(global_plugin_list) == 1: 971 resp_list.append(plugin_resp) 972 else: 973 resp_list = list(plugin_resp) 974 resp_list = [x.strip('\r\n\t') for x in resp_list] 975 elif isinstance(plugin_resp, list): 976 if len(global_plugin_list) == 1: 977 resp_list.append([x.strip('\r\n\t') for x in plugin_resp]) 978 else: 979 resp_list = [x.strip('\r\n\t') for x in plugin_resp] 980 elif isinstance(plugin_resp, int) or isinstance(plugin_resp, float): 981 resp_list.append(plugin_resp) 982 983 for idx, item in enumerate(resp_list, start=0): 984 # Exit loop 985 if idx >= len(global_plugin_list): 986 break 987 # Find the index of the return func in the list and 988 # update the global func return dictionary. 989 try: 990 dict_idx = global_plugin_list[idx] 991 global_plugin_dict[dict_idx] = item 992 except (IndexError, ValueError) as e: 993 self.logger.warn("\tresponse_args_data: %s" % e) 994 pass 995 996 # Done updating plugin dict irrespective of pass or failed, 997 # clear all the list element. 998 global_plugin_list.clear() 999 1000 def yaml_args_string(self, plugin_args): 1001 r""" 1002 Pack the args into string. 1003 1004 plugin_args arg list ['arg1','arg2,'argn'] 1005 """ 1006 args_str = '' 1007 for args in plugin_args: 1008 if args: 1009 if isinstance(args, (int, float)): 1010 args_str += str(args) 1011 elif args in global_plugin_type_list: 1012 args_str += str(global_plugin_dict[args]) 1013 else: 1014 args_str += '"' + str(args.strip('\r\n\t')) + '"' 1015 # Skip last list element. 1016 if args != plugin_args[-1]: 1017 args_str += "," 1018 return args_str 1019 1020 def yaml_args_populate(self, yaml_arg_list): 1021 r""" 1022 Decode ${MY_VAR} and load env data when read from YAML. 1023 1024 Description of argument(s): 1025 yaml_arg_list arg list read from YAML 1026 1027 Example: 1028 - plugin_args: 1029 - arg1 1030 - arg2 1031 1032 yaml_arg_list: [arg2, arg2] 1033 """ 1034 # Get the env loaded keys as list ['hostname', 'username', 'password']. 1035 env_vars_list = list(self.env_dict) 1036 1037 if isinstance(yaml_arg_list, list): 1038 tmp_list = [] 1039 for arg in yaml_arg_list: 1040 if isinstance(arg, (int, float)): 1041 tmp_list.append(arg) 1042 continue 1043 elif isinstance(arg, str): 1044 arg_str = self.yaml_env_and_plugin_vars_populate(str(arg)) 1045 tmp_list.append(arg_str) 1046 else: 1047 tmp_list.append(arg) 1048 1049 # return populated list. 1050 return tmp_list 1051 1052 def yaml_env_and_plugin_vars_populate(self, yaml_arg_str): 1053 r""" 1054 Update ${MY_VAR} and my_plugin_vars 1055 1056 Description of argument(s): 1057 yaml_arg_str arg string read from YAML 1058 1059 Example: 1060 - cat ${MY_VAR} 1061 - ls -AX my_plugin_var 1062 """ 1063 # Parse the string for env vars. 1064 try: 1065 # Example, list of matching env vars ['username', 'password', 'hostname'] 1066 # Extra escape \ for special symbols. '\$\{([^\}]+)\}' works good. 1067 var_name_regex = '\\$\\{([^\\}]+)\\}' 1068 env_var_names_list = re.findall(var_name_regex, yaml_arg_str) 1069 for var in env_var_names_list: 1070 env_var = os.environ[var] 1071 env_replace = '${' + var + '}' 1072 yaml_arg_str = yaml_arg_str.replace(env_replace, env_var) 1073 except Exception as e: 1074 self.logger.error("yaml_env_vars_populate: %s" % e) 1075 pass 1076 1077 # Parse the string for plugin vars. 1078 try: 1079 # Example, list of plugin vars ['my_username', 'my_data'] 1080 plugin_var_name_list = global_plugin_dict.keys() 1081 for var in plugin_var_name_list: 1082 # skip env var list already populated above block list. 1083 if var in env_var_names_list: 1084 continue 1085 # If this plugin var exist but empty value in dict, don't replace. 1086 # This is either a YAML plugin statement incorrectly used or 1087 # user added a plugin var which is not populated. 1088 if yaml_arg_str in global_plugin_dict: 1089 if isinstance(global_plugin_dict[var], (list, dict)): 1090 # List data type or dict can't be replaced, use directly 1091 # in eval function call. 1092 global_plugin_type_list.append(var) 1093 else: 1094 yaml_arg_str = yaml_arg_str.replace(str(var), str(global_plugin_dict[var])) 1095 # Just a string like filename or command. 1096 else: 1097 yaml_arg_str = yaml_arg_str.replace(str(var), str(global_plugin_dict[var])) 1098 except (IndexError, ValueError) as e: 1099 self.logger.error("yaml_plugin_vars_populate: %s" % e) 1100 pass 1101 1102 return yaml_arg_str 1103