xref: /openbmc/openbmc-test-automation/ffdc/ffdc_collector.py (revision 4adbff6ed3ddb675e9641bf2049568f82039806a)
1#!/usr/bin/env python
2
3r"""
4See class prolog below for details.
5"""
6
7import os
8import re
9import sys
10import yaml
11import json
12import time
13import logging
14import platform
15from errno import EACCES, EPERM
16import subprocess
17from ssh_utility import SSHRemoteclient
18from telnet_utility import TelnetRemoteclient
19
20r"""
21User define plugins python functions.
22
23It will imports files from directory plugins
24
25plugins
26├── file1.py
27└── file2.py
28
29Example how to define in YAML:
30 - plugin:
31   - plugin_name: plugin.foo_func.foo_func_yaml
32     - plugin_args:
33       - arg1
34       - arg2
35"""
36plugin_dir = 'plugins'
37try:
38    for module in os.listdir(plugin_dir):
39        if module == '__init__.py' or module[-3:] != '.py':
40            continue
41        plugin_module = "plugins." + module[:-3]
42        # To access the module plugin.<module name>.<function>
43        # Example: plugin.foo_func.foo_func_yaml()
44        try:
45            plugin = __import__(plugin_module, globals(), locals(), [], 0)
46        except Exception as e:
47            print("PLUGIN: Module import failed: %s" % module)
48            pass
49except FileNotFoundError as e:
50    print("PLUGIN: %s" % e)
51    pass
52
53r"""
54This is for plugin functions returning data or responses to the caller
55in YAML plugin setup.
56
57Example:
58
59    - plugin:
60      - plugin_name: version = plugin.ssh_execution.ssh_execute_cmd
61      - plugin_args:
62        - ${hostname}
63        - ${username}
64        - ${password}
65        - "cat /etc/os-release | grep VERSION_ID | awk -F'=' '{print $2}'"
66     - plugin:
67        - plugin_name: plugin.print_vars.print_vars
68        - plugin_args:
69          - version
70
71where first plugin "version" var is used by another plugin in the YAML
72block or plugin
73
74"""
75global global_log_store_path
76global global_plugin_dict
77global global_plugin_list
78# Hold the plugin return values in dict and plugin return vars in list.
79global_plugin_dict = {}
80global_plugin_list = []
81# Hold the plugin return named declared if function returned values are list,dict.
82# Refer this name list to look up the plugin dict for eval() args function
83# Example [ 'version']
84global_plugin_type_list = []
85global_log_store_path = ''
86
87# Plugin error state defaults.
88plugin_error_dict = {
89    'exit_on_error': False,
90    'continue_on_error': False,
91}
92
93
94class FFDCCollector:
95
96    r"""
97    Execute commands from configuration file to collect log files.
98    Fetch and store generated files at the specified location.
99
100    """
101
102    def __init__(self,
103                 hostname,
104                 username,
105                 password,
106                 ffdc_config,
107                 location,
108                 remote_type,
109                 remote_protocol,
110                 env_vars,
111                 econfig,
112                 log_level):
113        r"""
114        Description of argument(s):
115
116        hostname            name/ip of the targeted (remote) system
117        username            user on the targeted system with access to FFDC files
118        password            password for user on targeted system
119        ffdc_config         configuration file listing commands and files for FFDC
120        location            where to store collected FFDC
121        remote_type         os type of the remote host
122        remote_protocol     Protocol to use to collect data
123        env_vars            User define CLI env vars '{"key : "value"}'
124        econfig             User define env vars YAML file
125
126        """
127
128        self.hostname = hostname
129        self.username = username
130        self.password = password
131        self.ffdc_config = ffdc_config
132        self.location = location + "/" + remote_type.upper()
133        self.ssh_remoteclient = None
134        self.telnet_remoteclient = None
135        self.ffdc_dir_path = ""
136        self.ffdc_prefix = ""
137        self.target_type = remote_type.upper()
138        self.remote_protocol = remote_protocol.upper()
139        self.env_vars = env_vars
140        self.econfig = econfig
141        self.start_time = 0
142        self.elapsed_time = ''
143        self.logger = None
144
145        # Set prefix values for scp files and directory.
146        # Since the time stamp is at second granularity, these values are set here
147        # to be sure that all files for this run will have same timestamps
148        # and they will be saved in the same directory.
149        # self.location == local system for now
150        self.set_ffdc_defaults()
151
152        # Logger for this run.  Need to be after set_ffdc_defaults()
153        self.script_logging(getattr(logging, log_level.upper()))
154
155        # Verify top level directory exists for storage
156        self.validate_local_store(self.location)
157
158        if self.verify_script_env():
159            # Load default or user define YAML configuration file.
160            with open(self.ffdc_config, 'r') as file:
161                self.ffdc_actions = yaml.load(file, Loader=yaml.FullLoader)
162
163            if self.target_type not in self.ffdc_actions.keys():
164                self.logger.error(
165                    "\n\tERROR: %s is not listed in %s.\n\n" % (self.target_type, self.ffdc_config))
166                sys.exit(-1)
167        else:
168            sys.exit(-1)
169
170        # Load ENV vars from user.
171        self.logger.info("\n\tENV: User define input YAML variables")
172        self.env_dict = {}
173        self. load_env()
174
175    def verify_script_env(self):
176
177        # Import to log version
178        import click
179        import paramiko
180
181        run_env_ok = True
182
183        redfishtool_version = self.run_tool_cmd('redfishtool -V').split(' ')[2].strip('\n')
184        ipmitool_version = self.run_tool_cmd('ipmitool -V').split(' ')[2]
185
186        self.logger.info("\n\t---- Script host environment ----")
187        self.logger.info("\t{:<10}  {:<10}".format('Script hostname', os.uname()[1]))
188        self.logger.info("\t{:<10}  {:<10}".format('Script host os', platform.platform()))
189        self.logger.info("\t{:<10}  {:>10}".format('Python', platform.python_version()))
190        self.logger.info("\t{:<10}  {:>10}".format('PyYAML', yaml.__version__))
191        self.logger.info("\t{:<10}  {:>10}".format('click', click.__version__))
192        self.logger.info("\t{:<10}  {:>10}".format('paramiko', paramiko.__version__))
193        self.logger.info("\t{:<10}  {:>9}".format('redfishtool', redfishtool_version))
194        self.logger.info("\t{:<10}  {:>12}".format('ipmitool', ipmitool_version))
195
196        if eval(yaml.__version__.replace('.', ',')) < (5, 4, 1):
197            self.logger.error("\n\tERROR: Python or python packages do not meet minimum version requirement.")
198            self.logger.error("\tERROR: PyYAML version 5.4.1 or higher is needed.\n")
199            run_env_ok = False
200
201        self.logger.info("\t---- End script host environment ----")
202        return run_env_ok
203
204    def script_logging(self,
205                       log_level_attr):
206        r"""
207        Create logger
208
209        """
210        self.logger = logging.getLogger()
211        self.logger.setLevel(log_level_attr)
212        log_file_handler = logging.FileHandler(self.ffdc_dir_path + "collector.log")
213
214        stdout_handler = logging.StreamHandler(sys.stdout)
215        self.logger.addHandler(log_file_handler)
216        self.logger.addHandler(stdout_handler)
217
218        # Turn off paramiko INFO logging
219        logging.getLogger("paramiko").setLevel(logging.WARNING)
220
221    def target_is_pingable(self):
222        r"""
223        Check if target system is ping-able.
224
225        """
226        response = os.system("ping -c 1 %s  2>&1 >/dev/null" % self.hostname)
227        if response == 0:
228            self.logger.info("\n\t[Check] %s is ping-able.\t\t [OK]" % self.hostname)
229            return True
230        else:
231            self.logger.error(
232                "\n\tERROR: %s is not ping-able. FFDC collection aborted.\n" % self.hostname)
233            sys.exit(-1)
234
235    def collect_ffdc(self):
236        r"""
237        Initiate FFDC Collection depending on requested protocol.
238
239        """
240
241        self.logger.info("\n\t---- Start communicating with %s ----" % self.hostname)
242        self.start_time = time.time()
243
244        # Find the list of target and protocol supported.
245        check_protocol_list = []
246        config_dict = self.ffdc_actions
247
248        for target_type in config_dict.keys():
249            if self.target_type != target_type:
250                continue
251
252            for k, v in config_dict[target_type].items():
253                if config_dict[target_type][k]['PROTOCOL'][0] not in check_protocol_list:
254                    check_protocol_list.append(config_dict[target_type][k]['PROTOCOL'][0])
255
256        self.logger.info("\n\t %s protocol type: %s" % (self.target_type, check_protocol_list))
257
258        verified_working_protocol = self.verify_protocol(check_protocol_list)
259
260        if verified_working_protocol:
261            self.logger.info("\n\t---- Completed protocol pre-requisite check ----\n")
262
263        # Verify top level directory exists for storage
264        self.validate_local_store(self.location)
265
266        if ((self.remote_protocol not in verified_working_protocol) and (self.remote_protocol != 'ALL')):
267            self.logger.info("\n\tWorking protocol list: %s" % verified_working_protocol)
268            self.logger.error(
269                '\tERROR: Requested protocol %s is not in working protocol list.\n'
270                % self.remote_protocol)
271            sys.exit(-1)
272        else:
273            self.generate_ffdc(verified_working_protocol)
274
275    def ssh_to_target_system(self):
276        r"""
277        Open a ssh connection to targeted system.
278
279        """
280
281        self.ssh_remoteclient = SSHRemoteclient(self.hostname,
282                                                self.username,
283                                                self.password)
284
285        if self.ssh_remoteclient.ssh_remoteclient_login():
286            self.logger.info("\n\t[Check] %s SSH connection established.\t [OK]" % self.hostname)
287
288            # Check scp connection.
289            # If scp connection fails,
290            # continue with FFDC generation but skip scp files to local host.
291            self.ssh_remoteclient.scp_connection()
292            return True
293        else:
294            self.logger.info("\n\t[Check] %s SSH connection.\t [NOT AVAILABLE]" % self.hostname)
295            return False
296
297    def telnet_to_target_system(self):
298        r"""
299        Open a telnet connection to targeted system.
300        """
301        self.telnet_remoteclient = TelnetRemoteclient(self.hostname,
302                                                      self.username,
303                                                      self.password)
304        if self.telnet_remoteclient.tn_remoteclient_login():
305            self.logger.info("\n\t[Check] %s Telnet connection established.\t [OK]" % self.hostname)
306            return True
307        else:
308            self.logger.info("\n\t[Check] %s Telnet connection.\t [NOT AVAILABLE]" % self.hostname)
309            return False
310
311    def generate_ffdc(self, working_protocol_list):
312        r"""
313        Determine actions based on remote host type
314
315        Description of argument(s):
316        working_protocol_list    list of confirmed working protocols to connect to remote host.
317        """
318
319        self.logger.info("\n\t---- Executing commands on " + self.hostname + " ----")
320        self.logger.info("\n\tWorking protocol list: %s" % working_protocol_list)
321
322        config_dict = self.ffdc_actions
323        for target_type in config_dict.keys():
324            if self.target_type != target_type:
325                continue
326
327            self.logger.info("\n\tFFDC Path: %s " % self.ffdc_dir_path)
328            global_plugin_dict['global_log_store_path'] = self.ffdc_dir_path
329            self.logger.info("\tSystem Type: %s" % target_type)
330            for k, v in config_dict[target_type].items():
331
332                if self.remote_protocol not in working_protocol_list \
333                        and self.remote_protocol != 'ALL':
334                    continue
335
336                protocol = config_dict[target_type][k]['PROTOCOL'][0]
337
338                if protocol not in working_protocol_list:
339                    continue
340
341                if protocol in working_protocol_list:
342                    if protocol == 'SSH' or protocol == 'SCP':
343                        self.protocol_ssh(protocol, target_type, k)
344                    elif protocol == 'TELNET':
345                        self.protocol_telnet(target_type, k)
346                    elif protocol == 'REDFISH' or protocol == 'IPMI' or protocol == 'SHELL':
347                        self.protocol_execute(protocol, target_type, k)
348                else:
349                    self.logger.error("\n\tERROR: %s is not available for %s." % (protocol, self.hostname))
350
351        # Close network connection after collecting all files
352        self.elapsed_time = time.strftime("%H:%M:%S", time.gmtime(time.time() - self.start_time))
353        if self.ssh_remoteclient:
354            self.ssh_remoteclient.ssh_remoteclient_disconnect()
355        if self.telnet_remoteclient:
356            self.telnet_remoteclient.tn_remoteclient_disconnect()
357
358    def protocol_ssh(self,
359                     protocol,
360                     target_type,
361                     sub_type):
362        r"""
363        Perform actions using SSH and SCP protocols.
364
365        Description of argument(s):
366        protocol            Protocol to execute.
367        target_type         OS Type of remote host.
368        sub_type            Group type of commands.
369        """
370
371        if protocol == 'SCP':
372            self.group_copy(self.ffdc_actions[target_type][sub_type])
373        else:
374            self.collect_and_copy_ffdc(self.ffdc_actions[target_type][sub_type])
375
376    def protocol_telnet(self,
377                        target_type,
378                        sub_type):
379        r"""
380        Perform actions using telnet protocol.
381        Description of argument(s):
382        target_type          OS Type of remote host.
383        """
384        self.logger.info("\n\t[Run] Executing commands on %s using %s" % (self.hostname, 'TELNET'))
385        telnet_files_saved = []
386        progress_counter = 0
387        list_of_commands = self.ffdc_actions[target_type][sub_type]['COMMANDS']
388        for index, each_cmd in enumerate(list_of_commands, start=0):
389            command_txt, command_timeout = self.unpack_command(each_cmd)
390            result = self.telnet_remoteclient.execute_command(command_txt, command_timeout)
391            if result:
392                try:
393                    targ_file = self.ffdc_actions[target_type][sub_type]['FILES'][index]
394                except IndexError:
395                    targ_file = command_txt
396                    self.logger.warning(
397                        "\n\t[WARN] Missing filename to store data from telnet %s." % each_cmd)
398                    self.logger.warning("\t[WARN] Data will be stored in %s." % targ_file)
399                targ_file_with_path = (self.ffdc_dir_path
400                                       + self.ffdc_prefix
401                                       + targ_file)
402                # Creates a new file
403                with open(targ_file_with_path, 'w') as fp:
404                    fp.write(result)
405                    fp.close
406                    telnet_files_saved.append(targ_file)
407            progress_counter += 1
408            self.print_progress(progress_counter)
409        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
410        for file in telnet_files_saved:
411            self.logger.info("\n\t\tSuccessfully save file " + file + ".")
412
413    def protocol_execute(self,
414                         protocol,
415                         target_type,
416                         sub_type):
417        r"""
418        Perform actions for a given protocol.
419
420        Description of argument(s):
421        protocol            Protocol to execute.
422        target_type         OS Type of remote host.
423        sub_type            Group type of commands.
424        """
425
426        self.logger.info("\n\t[Run] Executing commands to %s using %s" % (self.hostname, protocol))
427        executed_files_saved = []
428        progress_counter = 0
429        list_of_cmd = self.get_command_list(self.ffdc_actions[target_type][sub_type])
430        for index, each_cmd in enumerate(list_of_cmd, start=0):
431            plugin_call = False
432            if isinstance(each_cmd, dict):
433                if 'plugin' in each_cmd:
434                    # If the error is set and plugin explicitly
435                    # requested to skip execution on error..
436                    if plugin_error_dict['exit_on_error'] and \
437                            self.plugin_error_check(each_cmd['plugin']):
438                        self.logger.info("\n\t[PLUGIN-ERROR] exit_on_error: %s" %
439                                         plugin_error_dict['exit_on_error'])
440                        self.logger.info("\t[PLUGIN-SKIP] %s" %
441                                         each_cmd['plugin'][0])
442                        continue
443                    plugin_call = True
444                    # call the plugin
445                    self.logger.info("\n\t[PLUGIN-START]")
446                    result = self.execute_plugin_block(each_cmd['plugin'])
447                    self.logger.info("\t[PLUGIN-END]\n")
448            else:
449                each_cmd = self.yaml_env_and_plugin_vars_populate(each_cmd)
450
451            if not plugin_call:
452                result = self.run_tool_cmd(each_cmd)
453            if result:
454                try:
455                    file_name = self.get_file_list(self.ffdc_actions[target_type][sub_type])[index]
456                    # If file is specified as None.
457                    if file_name == "None":
458                        continue
459                    targ_file = self.yaml_env_and_plugin_vars_populate(file_name)
460                except IndexError:
461                    targ_file = each_cmd.split('/')[-1]
462                    self.logger.warning(
463                        "\n\t[WARN] Missing filename to store data from %s." % each_cmd)
464                    self.logger.warning("\t[WARN] Data will be stored in %s." % targ_file)
465
466                targ_file_with_path = (self.ffdc_dir_path
467                                       + self.ffdc_prefix
468                                       + targ_file)
469
470                # Creates a new file
471                with open(targ_file_with_path, 'w') as fp:
472                    fp.write(result)
473                    fp.close
474                    executed_files_saved.append(targ_file)
475
476            progress_counter += 1
477            self.print_progress(progress_counter)
478
479        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
480
481        for file in executed_files_saved:
482            self.logger.info("\n\t\tSuccessfully save file " + file + ".")
483
484    def collect_and_copy_ffdc(self,
485                              ffdc_actions_for_target_type,
486                              form_filename=False):
487        r"""
488        Send commands in ffdc_config file to targeted system.
489
490        Description of argument(s):
491        ffdc_actions_for_target_type     commands and files for the selected remote host type.
492        form_filename                    if true, pre-pend self.target_type to filename
493        """
494
495        # Executing commands, if any
496        self.ssh_execute_ffdc_commands(ffdc_actions_for_target_type,
497                                       form_filename)
498
499        # Copying files
500        if self.ssh_remoteclient.scpclient:
501            self.logger.info("\n\n\tCopying FFDC files from remote system %s.\n" % self.hostname)
502
503            # Retrieving files from target system
504            list_of_files = self.get_file_list(ffdc_actions_for_target_type)
505            self.scp_ffdc(self.ffdc_dir_path, self.ffdc_prefix, form_filename, list_of_files)
506        else:
507            self.logger.info("\n\n\tSkip copying FFDC files from remote system %s.\n" % self.hostname)
508
509    def get_command_list(self,
510                         ffdc_actions_for_target_type):
511        r"""
512        Fetch list of commands from configuration file
513
514        Description of argument(s):
515        ffdc_actions_for_target_type    commands and files for the selected remote host type.
516        """
517        try:
518            list_of_commands = ffdc_actions_for_target_type['COMMANDS']
519        except KeyError:
520            list_of_commands = []
521        return list_of_commands
522
523    def get_file_list(self,
524                      ffdc_actions_for_target_type):
525        r"""
526        Fetch list of commands from configuration file
527
528        Description of argument(s):
529        ffdc_actions_for_target_type    commands and files for the selected remote host type.
530        """
531        try:
532            list_of_files = ffdc_actions_for_target_type['FILES']
533        except KeyError:
534            list_of_files = []
535        return list_of_files
536
537    def unpack_command(self,
538                       command):
539        r"""
540        Unpack command from config file
541
542        Description of argument(s):
543        command    Command from config file.
544        """
545        if isinstance(command, dict):
546            command_txt = next(iter(command))
547            command_timeout = next(iter(command.values()))
548        elif isinstance(command, str):
549            command_txt = command
550            # Default command timeout 60 seconds
551            command_timeout = 60
552
553        return command_txt, command_timeout
554
555    def ssh_execute_ffdc_commands(self,
556                                  ffdc_actions_for_target_type,
557                                  form_filename=False):
558        r"""
559        Send commands in ffdc_config file to targeted system.
560
561        Description of argument(s):
562        ffdc_actions_for_target_type    commands and files for the selected remote host type.
563        form_filename                    if true, pre-pend self.target_type to filename
564        """
565        self.logger.info("\n\t[Run] Executing commands on %s using %s"
566                         % (self.hostname, ffdc_actions_for_target_type['PROTOCOL'][0]))
567
568        list_of_commands = self.get_command_list(ffdc_actions_for_target_type)
569        # If command list is empty, returns
570        if not list_of_commands:
571            return
572
573        progress_counter = 0
574        for command in list_of_commands:
575            command_txt, command_timeout = self.unpack_command(command)
576
577            if form_filename:
578                command_txt = str(command_txt % self.target_type)
579
580            cmd_exit_code, err, response = \
581                self.ssh_remoteclient.execute_command(command_txt, command_timeout)
582
583            if cmd_exit_code:
584                self.logger.warning(
585                    "\n\t\t[WARN] %s exits with code %s." % (command_txt, str(cmd_exit_code)))
586                self.logger.warning("\t\t[WARN] %s " % err)
587
588            progress_counter += 1
589            self.print_progress(progress_counter)
590
591        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
592
593    def group_copy(self,
594                   ffdc_actions_for_target_type):
595        r"""
596        scp group of files (wild card) from remote host.
597
598        Description of argument(s):
599        fdc_actions_for_target_type    commands and files for the selected remote host type.
600        """
601
602        if self.ssh_remoteclient.scpclient:
603            self.logger.info("\n\tCopying files from remote system %s via SCP.\n" % self.hostname)
604
605            list_of_commands = self.get_command_list(ffdc_actions_for_target_type)
606            # If command list is empty, returns
607            if not list_of_commands:
608                return
609
610            for command in list_of_commands:
611                try:
612                    command = self.yaml_env_and_plugin_vars_populate(command)
613                except IndexError:
614                    self.logger.error("\t\tInvalid command %s" % command)
615                    continue
616
617                cmd_exit_code, err, response = \
618                    self.ssh_remoteclient.execute_command(command)
619
620                # If file does not exist, code take no action.
621                # cmd_exit_code is ignored for this scenario.
622                if response:
623                    scp_result = \
624                        self.ssh_remoteclient.scp_file_from_remote(response.split('\n'),
625                                                                   self.ffdc_dir_path)
626                    if scp_result:
627                        self.logger.info("\t\tSuccessfully copied from " + self.hostname + ':' + command)
628                else:
629                    self.logger.info("\t\t%s has no result" % command)
630
631        else:
632            self.logger.info("\n\n\tSkip copying files from remote system %s.\n" % self.hostname)
633
634    def scp_ffdc(self,
635                 targ_dir_path,
636                 targ_file_prefix,
637                 form_filename,
638                 file_list=None,
639                 quiet=None):
640        r"""
641        SCP all files in file_dict to the indicated directory on the local system.
642
643        Description of argument(s):
644        targ_dir_path                   The path of the directory to receive the files.
645        targ_file_prefix                Prefix which will be pre-pended to each
646                                        target file's name.
647        file_dict                       A dictionary of files to scp from targeted system to this system
648
649        """
650
651        progress_counter = 0
652        for filename in file_list:
653            if form_filename:
654                filename = str(filename % self.target_type)
655            source_file_path = filename
656            targ_file_path = targ_dir_path + targ_file_prefix + filename.split('/')[-1]
657
658            # If source file name contains wild card, copy filename as is.
659            if '*' in source_file_path:
660                scp_result = self.ssh_remoteclient.scp_file_from_remote(source_file_path, self.ffdc_dir_path)
661            else:
662                scp_result = self.ssh_remoteclient.scp_file_from_remote(source_file_path, targ_file_path)
663
664            if not quiet:
665                if scp_result:
666                    self.logger.info(
667                        "\t\tSuccessfully copied from " + self.hostname + ':' + source_file_path + ".\n")
668                else:
669                    self.logger.info(
670                        "\t\tFail to copy from " + self.hostname + ':' + source_file_path + ".\n")
671            else:
672                progress_counter += 1
673                self.print_progress(progress_counter)
674
675    def set_ffdc_defaults(self):
676        r"""
677        Set a default value for self.ffdc_dir_path and self.ffdc_prefix.
678        Collected ffdc file will be stored in dir /self.location/hostname_timestr/.
679        Individual ffdc file will have timestr_filename.
680
681        Description of class variables:
682        self.ffdc_dir_path  The dir path where collected ffdc data files should be put.
683
684        self.ffdc_prefix    The prefix to be given to each ffdc file name.
685
686        """
687
688        timestr = time.strftime("%Y%m%d-%H%M%S")
689        self.ffdc_dir_path = self.location + "/" + self.hostname + "_" + timestr + "/"
690        self.ffdc_prefix = timestr + "_"
691        self.validate_local_store(self.ffdc_dir_path)
692
693    def validate_local_store(self, dir_path):
694        r"""
695        Ensure path exists to store FFDC files locally.
696
697        Description of variable:
698        dir_path  The dir path where collected ffdc data files will be stored.
699
700        """
701
702        if not os.path.exists(dir_path):
703            try:
704                os.makedirs(dir_path, 0o755)
705            except (IOError, OSError) as e:
706                # PermissionError
707                if e.errno == EPERM or e.errno == EACCES:
708                    self.logger.error(
709                        '\tERROR: os.makedirs %s failed with PermissionError.\n' % dir_path)
710                else:
711                    self.logger.error(
712                        '\tERROR: os.makedirs %s failed with %s.\n' % (dir_path, e.strerror))
713                sys.exit(-1)
714
715    def print_progress(self, progress):
716        r"""
717        Print activity progress +
718
719        Description of variable:
720        progress  Progress counter.
721
722        """
723
724        sys.stdout.write("\r\t" + "+" * progress)
725        sys.stdout.flush()
726        time.sleep(.1)
727
728    def verify_redfish(self):
729        r"""
730        Verify remote host has redfish service active
731
732        """
733        redfish_parm = 'redfishtool -r ' \
734                       + self.hostname + ' -S Always raw GET /redfish/v1/'
735        return(self.run_tool_cmd(redfish_parm, True))
736
737    def verify_ipmi(self):
738        r"""
739        Verify remote host has IPMI LAN service active
740
741        """
742        if self.target_type == 'OPENBMC':
743            ipmi_parm = 'ipmitool -I lanplus -C 17  -U ' + self.username + ' -P ' \
744                + self.password + ' -H ' + self.hostname + ' power status'
745        else:
746            ipmi_parm = 'ipmitool -I lanplus  -P ' \
747                + self.password + ' -H ' + self.hostname + ' power status'
748
749        return(self.run_tool_cmd(ipmi_parm, True))
750
751    def run_tool_cmd(self,
752                     parms_string,
753                     quiet=False):
754        r"""
755        Run CLI standard tool or scripts.
756
757        Description of variable:
758        parms_string         tool command options.
759        quiet                do not print tool error message if True
760        """
761
762        result = subprocess.run([parms_string],
763                                stdout=subprocess.PIPE,
764                                stderr=subprocess.PIPE,
765                                shell=True,
766                                universal_newlines=True)
767
768        if result.stderr and not quiet:
769            self.logger.error('\n\t\tERROR with %s ' % parms_string)
770            self.logger.error('\t\t' + result.stderr)
771
772        return result.stdout
773
774    def verify_protocol(self, protocol_list):
775        r"""
776        Perform protocol working check.
777
778        Description of argument(s):
779        protocol_list        List of protocol.
780        """
781
782        tmp_list = []
783        if self.target_is_pingable():
784            tmp_list.append("SHELL")
785
786        for protocol in protocol_list:
787            if self.remote_protocol != 'ALL':
788                if self.remote_protocol != protocol:
789                    continue
790
791            # Only check SSH/SCP once for both protocols
792            if protocol == 'SSH' or protocol == 'SCP' and protocol not in tmp_list:
793                if self.ssh_to_target_system():
794                    # Add only what user asked.
795                    if self.remote_protocol != 'ALL':
796                        tmp_list.append(self.remote_protocol)
797                    else:
798                        tmp_list.append('SSH')
799                        tmp_list.append('SCP')
800
801            if protocol == 'TELNET':
802                if self.telnet_to_target_system():
803                    tmp_list.append(protocol)
804
805            if protocol == 'REDFISH':
806                if self.verify_redfish():
807                    tmp_list.append(protocol)
808                    self.logger.info("\n\t[Check] %s Redfish Service.\t\t [OK]" % self.hostname)
809                else:
810                    self.logger.info("\n\t[Check] %s Redfish Service.\t\t [NOT AVAILABLE]" % self.hostname)
811
812            if protocol == 'IPMI':
813                if self.verify_ipmi():
814                    tmp_list.append(protocol)
815                    self.logger.info("\n\t[Check] %s IPMI LAN Service.\t\t [OK]" % self.hostname)
816                else:
817                    self.logger.info("\n\t[Check] %s IPMI LAN Service.\t\t [NOT AVAILABLE]" % self.hostname)
818
819        return tmp_list
820
821    def load_env(self):
822        r"""
823        Perform protocol working check.
824
825        """
826        # This is for the env vars a user can use in YAML to load it at runtime.
827        # Example YAML:
828        # -COMMANDS:
829        #    - my_command ${hostname}  ${username}   ${password}
830        os.environ['hostname'] = self.hostname
831        os.environ['username'] = self.username
832        os.environ['password'] = self.password
833
834        # Append default Env.
835        self.env_dict['hostname'] = self.hostname
836        self.env_dict['username'] = self.username
837        self.env_dict['password'] = self.password
838
839        try:
840            tmp_env_dict = {}
841            if self.env_vars:
842                tmp_env_dict = json.loads(self.env_vars)
843                # Export ENV vars default.
844                for key, value in tmp_env_dict.items():
845                    os.environ[key] = value
846                    self.env_dict[key] = str(value)
847
848            if self.econfig:
849                with open(self.econfig, 'r') as file:
850                    tmp_env_dict = yaml.load(file, Loader=yaml.FullLoader)
851                # Export ENV vars.
852                for key, value in tmp_env_dict['env_params'].items():
853                    os.environ[key] = str(value)
854                    self.env_dict[key] = str(value)
855        except json.decoder.JSONDecodeError as e:
856            self.logger.error("\n\tERROR: %s " % e)
857            sys.exit(-1)
858
859        # This to mask the password from displaying on the console.
860        mask_dict = self.env_dict.copy()
861        for k, v in mask_dict.items():
862            if k.lower().find("password") != -1:
863                hidden_text = []
864                hidden_text.append(v)
865                password_regex = '(' +\
866                    '|'.join([re.escape(x) for x in hidden_text]) + ')'
867                mask_dict[k] = re.sub(password_regex, "********", v)
868
869        self.logger.info(json.dumps(mask_dict, indent=8, sort_keys=False))
870
871    def execute_python_eval(self, eval_string):
872        r"""
873        Execute qualified python function using eval.
874
875        Description of argument(s):
876        eval_string        Execute the python object.
877
878        Example:
879                eval(plugin.foo_func.foo_func(10))
880        """
881        try:
882            self.logger.info("\tCall func: %s" % eval_string)
883            result = eval(eval_string)
884            self.logger.info("\treturn: %s" % str(result))
885        except (ValueError, SyntaxError, NameError) as e:
886            self.logger.error("\tERROR: execute_python_eval: %s" % e)
887            # Set the plugin error state.
888            plugin_error_dict['exit_on_error'] = True
889            pass
890
891        return result
892
893    def execute_plugin_block(self, plugin_cmd_list):
894        r"""
895        Pack the plugin command to quailifed python string object.
896
897        Description of argument(s):
898        plugin_list_dict      Plugin block read from YAML
899                              [{'plugin_name': 'plugin.foo_func.my_func'},
900                               {'plugin_args': [10]}]
901
902        Example:
903            - plugin:
904              - plugin_name: plugin.foo_func.my_func
905              - plugin_args:
906                - arg1
907                - arg2
908
909            - plugin:
910              - plugin_name: result = plugin.foo_func.my_func
911              - plugin_args:
912                - arg1
913                - arg2
914
915            - plugin:
916              - plugin_name: result1,result2 = plugin.foo_func.my_func
917              - plugin_args:
918                - arg1
919                - arg2
920        """
921        try:
922            plugin_name = plugin_cmd_list[0]['plugin_name']
923            # Equal separator means plugin function returns result.
924            if ' = ' in plugin_name:
925                # Ex. ['result', 'plugin.foo_func.my_func']
926                plugin_name_args = plugin_name.split(' = ')
927                # plugin func return data.
928                for arg in plugin_name_args:
929                    if arg == plugin_name_args[-1]:
930                        plugin_name = arg
931                    else:
932                        plugin_resp = arg.split(',')
933                        # ['result1','result2']
934                        for x in plugin_resp:
935                            global_plugin_list.append(x)
936                            global_plugin_dict[x] = ""
937
938            # Walk the plugin args ['arg1,'arg2']
939            # If the YAML plugin statement 'plugin_args' is not declared.
940            if any('plugin_args' in d for d in plugin_cmd_list):
941                plugin_args = plugin_cmd_list[1]['plugin_args']
942                if plugin_args:
943                    plugin_args = self.yaml_args_populate(plugin_args)
944                else:
945                    plugin_args = []
946            else:
947                plugin_args = self.yaml_args_populate([])
948
949            # Pack the args arg1, arg2, .... argn into
950            # "arg1","arg2","argn"  string as params for function.
951            parm_args_str = self.yaml_args_string(plugin_args)
952            if parm_args_str:
953                plugin_func = plugin_name + '(' + parm_args_str + ')'
954            else:
955                plugin_func = plugin_name + '()'
956
957            # Execute plugin function.
958            if global_plugin_dict:
959                resp = self.execute_python_eval(plugin_func)
960                self.response_args_data(resp)
961            else:
962                resp = self.execute_python_eval(plugin_func)
963            return resp
964        except Exception as e:
965            # Set the plugin error state.
966            plugin_error_dict['exit_on_error'] = True
967            self.logger.error("\tERROR: execute_plugin_block: %s" % e)
968            pass
969
970    def response_args_data(self, plugin_resp):
971        r"""
972        Parse the plugin function response.
973
974        plugin_resp       Response data from plugin function.
975        """
976        resp_list = []
977        resp_data = ""
978        # There is nothing to update the plugin response.
979        if len(global_plugin_list) == 0 or plugin_resp == 'None':
980            return
981
982        if isinstance(plugin_resp, str):
983            resp_data = plugin_resp.strip('\r\n\t')
984            resp_list.append(resp_data)
985        elif isinstance(plugin_resp, bytes):
986            resp_data = str(plugin_resp, 'UTF-8').strip('\r\n\t')
987            resp_list.append(resp_data)
988        elif isinstance(plugin_resp, tuple):
989            if len(global_plugin_list) == 1:
990                resp_list.append(plugin_resp)
991            else:
992                resp_list = list(plugin_resp)
993                resp_list = [x.strip('\r\n\t') for x in resp_list]
994        elif isinstance(plugin_resp, list):
995            if len(global_plugin_list) == 1:
996                resp_list.append([x.strip('\r\n\t') for x in plugin_resp])
997            else:
998                resp_list = [x.strip('\r\n\t') for x in plugin_resp]
999        elif isinstance(plugin_resp, int) or isinstance(plugin_resp, float):
1000            resp_list.append(plugin_resp)
1001
1002        for idx, item in enumerate(resp_list, start=0):
1003            # Exit loop
1004            if idx >= len(global_plugin_list):
1005                break
1006            # Find the index of the return func in the list and
1007            # update the global func return dictionary.
1008            try:
1009                dict_idx = global_plugin_list[idx]
1010                global_plugin_dict[dict_idx] = item
1011            except (IndexError, ValueError) as e:
1012                self.logger.warn("\tWARN: response_args_data: %s" % e)
1013                pass
1014
1015        # Done updating plugin dict irrespective of pass or failed,
1016        # clear all the list element.
1017        global_plugin_list.clear()
1018
1019    def yaml_args_string(self, plugin_args):
1020        r"""
1021        Pack the args into string.
1022
1023        plugin_args            arg list ['arg1','arg2,'argn']
1024        """
1025        args_str = ''
1026        for args in plugin_args:
1027            if args:
1028                if isinstance(args, (int, float)):
1029                    args_str += str(args)
1030                elif args in global_plugin_type_list:
1031                    args_str += str(global_plugin_dict[args])
1032                else:
1033                    args_str += '"' + str(args.strip('\r\n\t')) + '"'
1034            # Skip last list element.
1035            if args != plugin_args[-1]:
1036                args_str += ","
1037        return args_str
1038
1039    def yaml_args_populate(self, yaml_arg_list):
1040        r"""
1041        Decode ${MY_VAR} and load env data when read from YAML.
1042
1043        Description of argument(s):
1044        yaml_arg_list         arg list read from YAML
1045
1046        Example:
1047          - plugin_args:
1048            - arg1
1049            - arg2
1050
1051                  yaml_arg_list:  [arg2, arg2]
1052        """
1053        # Get the env loaded keys as list ['hostname', 'username', 'password'].
1054        env_vars_list = list(self.env_dict)
1055
1056        if isinstance(yaml_arg_list, list):
1057            tmp_list = []
1058            for arg in yaml_arg_list:
1059                if isinstance(arg, (int, float)):
1060                    tmp_list.append(arg)
1061                    continue
1062                elif isinstance(arg, str):
1063                    arg_str = self.yaml_env_and_plugin_vars_populate(str(arg))
1064                    tmp_list.append(arg_str)
1065                else:
1066                    tmp_list.append(arg)
1067
1068            # return populated list.
1069            return tmp_list
1070
1071    def yaml_env_and_plugin_vars_populate(self, yaml_arg_str):
1072        r"""
1073        Update ${MY_VAR} and my_plugin_vars
1074
1075        Description of argument(s):
1076        yaml_arg_str         arg string read from YAML
1077
1078        Example:
1079            - cat ${MY_VAR}
1080            - ls -AX my_plugin_var
1081        """
1082        # Parse the string for env vars.
1083        try:
1084            # Example, list of matching env vars ['username', 'password', 'hostname']
1085            # Extra escape \ for special symbols. '\$\{([^\}]+)\}' works good.
1086            var_name_regex = '\\$\\{([^\\}]+)\\}'
1087            env_var_names_list = re.findall(var_name_regex, yaml_arg_str)
1088            for var in env_var_names_list:
1089                env_var = os.environ[var]
1090                env_replace = '${' + var + '}'
1091                yaml_arg_str = yaml_arg_str.replace(env_replace, env_var)
1092        except Exception as e:
1093            self.logger.error("\tERROR:yaml_env_vars_populate: %s" % e)
1094            pass
1095
1096        # Parse the string for plugin vars.
1097        try:
1098            # Example, list of plugin vars ['my_username', 'my_data']
1099            plugin_var_name_list = global_plugin_dict.keys()
1100            for var in plugin_var_name_list:
1101                # skip env var list already populated above block list.
1102                if var in env_var_names_list:
1103                    continue
1104                # If this plugin var exist but empty value in dict, don't replace.
1105                # This is either a YAML plugin statement incorrectly used or
1106                # user added a plugin var which is not populated.
1107                if yaml_arg_str in global_plugin_dict:
1108                    if isinstance(global_plugin_dict[var], (list, dict)):
1109                        # List data type or dict can't be replaced, use directly
1110                        # in eval function call.
1111                        global_plugin_type_list.append(var)
1112                    else:
1113                        yaml_arg_str = yaml_arg_str.replace(str(var), str(global_plugin_dict[var]))
1114                # Just a string like filename or command.
1115                else:
1116                    yaml_arg_str = yaml_arg_str.replace(str(var), str(global_plugin_dict[var]))
1117        except (IndexError, ValueError) as e:
1118            self.logger.error("\tERROR: yaml_plugin_vars_populate: %s" % e)
1119            pass
1120
1121        return yaml_arg_str
1122
1123    def plugin_error_check(self, plugin_dict):
1124        r"""
1125        Plugin error dict processing.
1126
1127        Description of argument(s):
1128        plugin_dict        Dictionary of plugin error.
1129        """
1130        if any('plugin_error' in d for d in plugin_dict):
1131            for d in plugin_dict:
1132                if 'plugin_error' in d:
1133                    value = d['plugin_error']
1134                    # Reference if the error is set or not by plugin.
1135                    return plugin_error_dict[value]
1136