1#!/usr/bin/env python
2
3r"""
4See class prolog below for details.
5"""
6
7import os
8import re
9import sys
10import yaml
11import json
12import time
13import logging
14import platform
15from errno import EACCES, EPERM
16import subprocess
17from ssh_utility import SSHRemoteclient
18from telnet_utility import TelnetRemoteclient
19
20r"""
21User define plugins python functions.
22
23It will imports files from directory plugins
24
25plugins
26├── file1.py
27└── file2.py
28
29Example how to define in YAML:
30 - plugin:
31   - plugin_name: plugin.foo_func.foo_func_yaml
32     - plugin_args:
33       - arg1
34       - arg2
35"""
36abs_path = os.path.abspath(os.path.dirname(sys.argv[0]))
37plugin_dir = abs_path + '/plugins'
38try:
39    for module in os.listdir(plugin_dir):
40        if module == '__init__.py' or module[-3:] != '.py':
41            continue
42        plugin_module = "plugins." + module[:-3]
43        # To access the module plugin.<module name>.<function>
44        # Example: plugin.foo_func.foo_func_yaml()
45        try:
46            plugin = __import__(plugin_module, globals(), locals(), [], 0)
47        except Exception as e:
48            print("PLUGIN: Module import failed: %s" % module)
49            pass
50except FileNotFoundError as e:
51    print("PLUGIN: %s" % e)
52    pass
53
54r"""
55This is for plugin functions returning data or responses to the caller
56in YAML plugin setup.
57
58Example:
59
60    - plugin:
61      - plugin_name: version = plugin.ssh_execution.ssh_execute_cmd
62      - plugin_args:
63        - ${hostname}
64        - ${username}
65        - ${password}
66        - "cat /etc/os-release | grep VERSION_ID | awk -F'=' '{print $2}'"
67     - plugin:
68        - plugin_name: plugin.print_vars.print_vars
69        - plugin_args:
70          - version
71
72where first plugin "version" var is used by another plugin in the YAML
73block or plugin
74
75"""
76global global_log_store_path
77global global_plugin_dict
78global global_plugin_list
79# Hold the plugin return values in dict and plugin return vars in list.
80global_plugin_dict = {}
81global_plugin_list = []
82# Hold the plugin return named declared if function returned values are list,dict.
83# Refer this name list to look up the plugin dict for eval() args function
84# Example [ 'version']
85global_plugin_type_list = []
86global_log_store_path = ''
87
88# Plugin error state defaults.
89plugin_error_dict = {
90    'exit_on_error': False,
91    'continue_on_error': False,
92}
93
94
95class FFDCCollector:
96
97    r"""
98    Execute commands from configuration file to collect log files.
99    Fetch and store generated files at the specified location.
100
101    """
102
103    def __init__(self,
104                 hostname,
105                 username,
106                 password,
107                 ffdc_config,
108                 location,
109                 remote_type,
110                 remote_protocol,
111                 env_vars,
112                 econfig,
113                 log_level):
114        r"""
115        Description of argument(s):
116
117        hostname            name/ip of the targeted (remote) system
118        username            user on the targeted system with access to FFDC files
119        password            password for user on targeted system
120        ffdc_config         configuration file listing commands and files for FFDC
121        location            where to store collected FFDC
122        remote_type         os type of the remote host
123        remote_protocol     Protocol to use to collect data
124        env_vars            User define CLI env vars '{"key : "value"}'
125        econfig             User define env vars YAML file
126
127        """
128
129        self.hostname = hostname
130        self.username = username
131        self.password = password
132        self.ffdc_config = ffdc_config
133        self.location = location + "/" + remote_type.upper()
134        self.ssh_remoteclient = None
135        self.telnet_remoteclient = None
136        self.ffdc_dir_path = ""
137        self.ffdc_prefix = ""
138        self.target_type = remote_type.upper()
139        self.remote_protocol = remote_protocol.upper()
140        self.env_vars = env_vars
141        self.econfig = econfig
142        self.start_time = 0
143        self.elapsed_time = ''
144        self.logger = None
145
146        # Set prefix values for scp files and directory.
147        # Since the time stamp is at second granularity, these values are set here
148        # to be sure that all files for this run will have same timestamps
149        # and they will be saved in the same directory.
150        # self.location == local system for now
151        self.set_ffdc_defaults()
152
153        # Logger for this run.  Need to be after set_ffdc_defaults()
154        self.script_logging(getattr(logging, log_level.upper()))
155
156        # Verify top level directory exists for storage
157        self.validate_local_store(self.location)
158
159        if self.verify_script_env():
160            # Load default or user define YAML configuration file.
161            with open(self.ffdc_config, 'r') as file:
162                self.ffdc_actions = yaml.load(file, Loader=yaml.FullLoader)
163
164            if self.target_type not in self.ffdc_actions.keys():
165                self.logger.error(
166                    "\n\tERROR: %s is not listed in %s.\n\n" % (self.target_type, self.ffdc_config))
167                sys.exit(-1)
168        else:
169            sys.exit(-1)
170
171        # Load ENV vars from user.
172        self.logger.info("\n\tENV: User define input YAML variables")
173        self.env_dict = {}
174        self. load_env()
175
176    def verify_script_env(self):
177
178        # Import to log version
179        import click
180        import paramiko
181
182        run_env_ok = True
183
184        redfishtool_version = self.run_tool_cmd('redfishtool -V').split(' ')[2].strip('\n')
185        ipmitool_version = self.run_tool_cmd('ipmitool -V').split(' ')[2]
186
187        self.logger.info("\n\t---- Script host environment ----")
188        self.logger.info("\t{:<10}  {:<10}".format('Script hostname', os.uname()[1]))
189        self.logger.info("\t{:<10}  {:<10}".format('Script host os', platform.platform()))
190        self.logger.info("\t{:<10}  {:>10}".format('Python', platform.python_version()))
191        self.logger.info("\t{:<10}  {:>10}".format('PyYAML', yaml.__version__))
192        self.logger.info("\t{:<10}  {:>10}".format('click', click.__version__))
193        self.logger.info("\t{:<10}  {:>10}".format('paramiko', paramiko.__version__))
194        self.logger.info("\t{:<10}  {:>9}".format('redfishtool', redfishtool_version))
195        self.logger.info("\t{:<10}  {:>12}".format('ipmitool', ipmitool_version))
196
197        if eval(yaml.__version__.replace('.', ',')) < (5, 4, 1):
198            self.logger.error("\n\tERROR: Python or python packages do not meet minimum version requirement.")
199            self.logger.error("\tERROR: PyYAML version 5.4.1 or higher is needed.\n")
200            run_env_ok = False
201
202        self.logger.info("\t---- End script host environment ----")
203        return run_env_ok
204
205    def script_logging(self,
206                       log_level_attr):
207        r"""
208        Create logger
209
210        """
211        self.logger = logging.getLogger()
212        self.logger.setLevel(log_level_attr)
213        log_file_handler = logging.FileHandler(self.ffdc_dir_path + "collector.log")
214
215        stdout_handler = logging.StreamHandler(sys.stdout)
216        self.logger.addHandler(log_file_handler)
217        self.logger.addHandler(stdout_handler)
218
219        # Turn off paramiko INFO logging
220        logging.getLogger("paramiko").setLevel(logging.WARNING)
221
222    def target_is_pingable(self):
223        r"""
224        Check if target system is ping-able.
225
226        """
227        response = os.system("ping -c 1 %s  2>&1 >/dev/null" % self.hostname)
228        if response == 0:
229            self.logger.info("\n\t[Check] %s is ping-able.\t\t [OK]" % self.hostname)
230            return True
231        else:
232            self.logger.error(
233                "\n\tERROR: %s is not ping-able. FFDC collection aborted.\n" % self.hostname)
234            sys.exit(-1)
235
236    def collect_ffdc(self):
237        r"""
238        Initiate FFDC Collection depending on requested protocol.
239
240        """
241
242        self.logger.info("\n\t---- Start communicating with %s ----" % self.hostname)
243        self.start_time = time.time()
244
245        # Find the list of target and protocol supported.
246        check_protocol_list = []
247        config_dict = self.ffdc_actions
248
249        for target_type in config_dict.keys():
250            if self.target_type != target_type:
251                continue
252
253            for k, v in config_dict[target_type].items():
254                if config_dict[target_type][k]['PROTOCOL'][0] not in check_protocol_list:
255                    check_protocol_list.append(config_dict[target_type][k]['PROTOCOL'][0])
256
257        self.logger.info("\n\t %s protocol type: %s" % (self.target_type, check_protocol_list))
258
259        verified_working_protocol = self.verify_protocol(check_protocol_list)
260
261        if verified_working_protocol:
262            self.logger.info("\n\t---- Completed protocol pre-requisite check ----\n")
263
264        # Verify top level directory exists for storage
265        self.validate_local_store(self.location)
266
267        if ((self.remote_protocol not in verified_working_protocol) and (self.remote_protocol != 'ALL')):
268            self.logger.info("\n\tWorking protocol list: %s" % verified_working_protocol)
269            self.logger.error(
270                '\tERROR: Requested protocol %s is not in working protocol list.\n'
271                % self.remote_protocol)
272            sys.exit(-1)
273        else:
274            self.generate_ffdc(verified_working_protocol)
275
276    def ssh_to_target_system(self):
277        r"""
278        Open a ssh connection to targeted system.
279
280        """
281
282        self.ssh_remoteclient = SSHRemoteclient(self.hostname,
283                                                self.username,
284                                                self.password)
285
286        if self.ssh_remoteclient.ssh_remoteclient_login():
287            self.logger.info("\n\t[Check] %s SSH connection established.\t [OK]" % self.hostname)
288
289            # Check scp connection.
290            # If scp connection fails,
291            # continue with FFDC generation but skip scp files to local host.
292            self.ssh_remoteclient.scp_connection()
293            return True
294        else:
295            self.logger.info("\n\t[Check] %s SSH connection.\t [NOT AVAILABLE]" % self.hostname)
296            return False
297
298    def telnet_to_target_system(self):
299        r"""
300        Open a telnet connection to targeted system.
301        """
302        self.telnet_remoteclient = TelnetRemoteclient(self.hostname,
303                                                      self.username,
304                                                      self.password)
305        if self.telnet_remoteclient.tn_remoteclient_login():
306            self.logger.info("\n\t[Check] %s Telnet connection established.\t [OK]" % self.hostname)
307            return True
308        else:
309            self.logger.info("\n\t[Check] %s Telnet connection.\t [NOT AVAILABLE]" % self.hostname)
310            return False
311
312    def generate_ffdc(self, working_protocol_list):
313        r"""
314        Determine actions based on remote host type
315
316        Description of argument(s):
317        working_protocol_list    list of confirmed working protocols to connect to remote host.
318        """
319
320        self.logger.info("\n\t---- Executing commands on " + self.hostname + " ----")
321        self.logger.info("\n\tWorking protocol list: %s" % working_protocol_list)
322
323        config_dict = self.ffdc_actions
324        for target_type in config_dict.keys():
325            if self.target_type != target_type:
326                continue
327
328            self.logger.info("\n\tFFDC Path: %s " % self.ffdc_dir_path)
329            global_plugin_dict['global_log_store_path'] = self.ffdc_dir_path
330            self.logger.info("\tSystem Type: %s" % target_type)
331            for k, v in config_dict[target_type].items():
332
333                if self.remote_protocol not in working_protocol_list \
334                        and self.remote_protocol != 'ALL':
335                    continue
336
337                protocol = config_dict[target_type][k]['PROTOCOL'][0]
338
339                if protocol not in working_protocol_list:
340                    continue
341
342                if protocol in working_protocol_list:
343                    if protocol == 'SSH' or protocol == 'SCP':
344                        self.protocol_ssh(protocol, target_type, k)
345                    elif protocol == 'TELNET':
346                        self.protocol_telnet(target_type, k)
347                    elif protocol == 'REDFISH' or protocol == 'IPMI' or protocol == 'SHELL':
348                        self.protocol_execute(protocol, target_type, k)
349                else:
350                    self.logger.error("\n\tERROR: %s is not available for %s." % (protocol, self.hostname))
351
352        # Close network connection after collecting all files
353        self.elapsed_time = time.strftime("%H:%M:%S", time.gmtime(time.time() - self.start_time))
354        if self.ssh_remoteclient:
355            self.ssh_remoteclient.ssh_remoteclient_disconnect()
356        if self.telnet_remoteclient:
357            self.telnet_remoteclient.tn_remoteclient_disconnect()
358
359    def protocol_ssh(self,
360                     protocol,
361                     target_type,
362                     sub_type):
363        r"""
364        Perform actions using SSH and SCP protocols.
365
366        Description of argument(s):
367        protocol            Protocol to execute.
368        target_type         OS Type of remote host.
369        sub_type            Group type of commands.
370        """
371
372        if protocol == 'SCP':
373            self.group_copy(self.ffdc_actions[target_type][sub_type])
374        else:
375            self.collect_and_copy_ffdc(self.ffdc_actions[target_type][sub_type])
376
377    def protocol_telnet(self,
378                        target_type,
379                        sub_type):
380        r"""
381        Perform actions using telnet protocol.
382        Description of argument(s):
383        target_type          OS Type of remote host.
384        """
385        self.logger.info("\n\t[Run] Executing commands on %s using %s" % (self.hostname, 'TELNET'))
386        telnet_files_saved = []
387        progress_counter = 0
388        list_of_commands = self.ffdc_actions[target_type][sub_type]['COMMANDS']
389        for index, each_cmd in enumerate(list_of_commands, start=0):
390            command_txt, command_timeout = self.unpack_command(each_cmd)
391            result = self.telnet_remoteclient.execute_command(command_txt, command_timeout)
392            if result:
393                try:
394                    targ_file = self.ffdc_actions[target_type][sub_type]['FILES'][index]
395                except IndexError:
396                    targ_file = command_txt
397                    self.logger.warning(
398                        "\n\t[WARN] Missing filename to store data from telnet %s." % each_cmd)
399                    self.logger.warning("\t[WARN] Data will be stored in %s." % targ_file)
400                targ_file_with_path = (self.ffdc_dir_path
401                                       + self.ffdc_prefix
402                                       + targ_file)
403                # Creates a new file
404                with open(targ_file_with_path, 'w') as fp:
405                    fp.write(result)
406                    fp.close
407                    telnet_files_saved.append(targ_file)
408            progress_counter += 1
409            self.print_progress(progress_counter)
410        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
411        for file in telnet_files_saved:
412            self.logger.info("\n\t\tSuccessfully save file " + file + ".")
413
414    def protocol_execute(self,
415                         protocol,
416                         target_type,
417                         sub_type):
418        r"""
419        Perform actions for a given protocol.
420
421        Description of argument(s):
422        protocol            Protocol to execute.
423        target_type         OS Type of remote host.
424        sub_type            Group type of commands.
425        """
426
427        self.logger.info("\n\t[Run] Executing commands to %s using %s" % (self.hostname, protocol))
428        executed_files_saved = []
429        progress_counter = 0
430        list_of_cmd = self.get_command_list(self.ffdc_actions[target_type][sub_type])
431        for index, each_cmd in enumerate(list_of_cmd, start=0):
432            plugin_call = False
433            if isinstance(each_cmd, dict):
434                if 'plugin' in each_cmd:
435                    # If the error is set and plugin explicitly
436                    # requested to skip execution on error..
437                    if plugin_error_dict['exit_on_error'] and \
438                            self.plugin_error_check(each_cmd['plugin']):
439                        self.logger.info("\n\t[PLUGIN-ERROR] exit_on_error: %s" %
440                                         plugin_error_dict['exit_on_error'])
441                        self.logger.info("\t[PLUGIN-SKIP] %s" %
442                                         each_cmd['plugin'][0])
443                        continue
444                    plugin_call = True
445                    # call the plugin
446                    self.logger.info("\n\t[PLUGIN-START]")
447                    result = self.execute_plugin_block(each_cmd['plugin'])
448                    self.logger.info("\t[PLUGIN-END]\n")
449            else:
450                each_cmd = self.yaml_env_and_plugin_vars_populate(each_cmd)
451
452            if not plugin_call:
453                result = self.run_tool_cmd(each_cmd)
454            if result:
455                try:
456                    file_name = self.get_file_list(self.ffdc_actions[target_type][sub_type])[index]
457                    # If file is specified as None.
458                    if file_name == "None":
459                        continue
460                    targ_file = self.yaml_env_and_plugin_vars_populate(file_name)
461                except IndexError:
462                    targ_file = each_cmd.split('/')[-1]
463                    self.logger.warning(
464                        "\n\t[WARN] Missing filename to store data from %s." % each_cmd)
465                    self.logger.warning("\t[WARN] Data will be stored in %s." % targ_file)
466
467                targ_file_with_path = (self.ffdc_dir_path
468                                       + self.ffdc_prefix
469                                       + targ_file)
470
471                # Creates a new file
472                with open(targ_file_with_path, 'w') as fp:
473                    if isinstance(result, dict):
474                        fp.write(json.dumps(result))
475                    else:
476                        fp.write(result)
477                    fp.close
478                    executed_files_saved.append(targ_file)
479
480            progress_counter += 1
481            self.print_progress(progress_counter)
482
483        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
484
485        for file in executed_files_saved:
486            self.logger.info("\n\t\tSuccessfully save file " + file + ".")
487
488    def collect_and_copy_ffdc(self,
489                              ffdc_actions_for_target_type,
490                              form_filename=False):
491        r"""
492        Send commands in ffdc_config file to targeted system.
493
494        Description of argument(s):
495        ffdc_actions_for_target_type     commands and files for the selected remote host type.
496        form_filename                    if true, pre-pend self.target_type to filename
497        """
498
499        # Executing commands, if any
500        self.ssh_execute_ffdc_commands(ffdc_actions_for_target_type,
501                                       form_filename)
502
503        # Copying files
504        if self.ssh_remoteclient.scpclient:
505            self.logger.info("\n\n\tCopying FFDC files from remote system %s.\n" % self.hostname)
506
507            # Retrieving files from target system
508            list_of_files = self.get_file_list(ffdc_actions_for_target_type)
509            self.scp_ffdc(self.ffdc_dir_path, self.ffdc_prefix, form_filename, list_of_files)
510        else:
511            self.logger.info("\n\n\tSkip copying FFDC files from remote system %s.\n" % self.hostname)
512
513    def get_command_list(self,
514                         ffdc_actions_for_target_type):
515        r"""
516        Fetch list of commands from configuration file
517
518        Description of argument(s):
519        ffdc_actions_for_target_type    commands and files for the selected remote host type.
520        """
521        try:
522            list_of_commands = ffdc_actions_for_target_type['COMMANDS']
523        except KeyError:
524            list_of_commands = []
525        return list_of_commands
526
527    def get_file_list(self,
528                      ffdc_actions_for_target_type):
529        r"""
530        Fetch list of commands from configuration file
531
532        Description of argument(s):
533        ffdc_actions_for_target_type    commands and files for the selected remote host type.
534        """
535        try:
536            list_of_files = ffdc_actions_for_target_type['FILES']
537        except KeyError:
538            list_of_files = []
539        return list_of_files
540
541    def unpack_command(self,
542                       command):
543        r"""
544        Unpack command from config file
545
546        Description of argument(s):
547        command    Command from config file.
548        """
549        if isinstance(command, dict):
550            command_txt = next(iter(command))
551            command_timeout = next(iter(command.values()))
552        elif isinstance(command, str):
553            command_txt = command
554            # Default command timeout 60 seconds
555            command_timeout = 60
556
557        return command_txt, command_timeout
558
559    def ssh_execute_ffdc_commands(self,
560                                  ffdc_actions_for_target_type,
561                                  form_filename=False):
562        r"""
563        Send commands in ffdc_config file to targeted system.
564
565        Description of argument(s):
566        ffdc_actions_for_target_type    commands and files for the selected remote host type.
567        form_filename                    if true, pre-pend self.target_type to filename
568        """
569        self.logger.info("\n\t[Run] Executing commands on %s using %s"
570                         % (self.hostname, ffdc_actions_for_target_type['PROTOCOL'][0]))
571
572        list_of_commands = self.get_command_list(ffdc_actions_for_target_type)
573        # If command list is empty, returns
574        if not list_of_commands:
575            return
576
577        progress_counter = 0
578        for command in list_of_commands:
579            command_txt, command_timeout = self.unpack_command(command)
580
581            if form_filename:
582                command_txt = str(command_txt % self.target_type)
583
584            cmd_exit_code, err, response = \
585                self.ssh_remoteclient.execute_command(command_txt, command_timeout)
586
587            if cmd_exit_code:
588                self.logger.warning(
589                    "\n\t\t[WARN] %s exits with code %s." % (command_txt, str(cmd_exit_code)))
590                self.logger.warning("\t\t[WARN] %s " % err)
591
592            progress_counter += 1
593            self.print_progress(progress_counter)
594
595        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
596
597    def group_copy(self,
598                   ffdc_actions_for_target_type):
599        r"""
600        scp group of files (wild card) from remote host.
601
602        Description of argument(s):
603        fdc_actions_for_target_type    commands and files for the selected remote host type.
604        """
605
606        if self.ssh_remoteclient.scpclient:
607            self.logger.info("\n\tCopying files from remote system %s via SCP.\n" % self.hostname)
608
609            list_of_commands = self.get_command_list(ffdc_actions_for_target_type)
610            # If command list is empty, returns
611            if not list_of_commands:
612                return
613
614            for command in list_of_commands:
615                try:
616                    command = self.yaml_env_and_plugin_vars_populate(command)
617                except IndexError:
618                    self.logger.error("\t\tInvalid command %s" % command)
619                    continue
620
621                cmd_exit_code, err, response = \
622                    self.ssh_remoteclient.execute_command(command)
623
624                # If file does not exist, code take no action.
625                # cmd_exit_code is ignored for this scenario.
626                if response:
627                    scp_result = \
628                        self.ssh_remoteclient.scp_file_from_remote(response.split('\n'),
629                                                                   self.ffdc_dir_path)
630                    if scp_result:
631                        self.logger.info("\t\tSuccessfully copied from " + self.hostname + ':' + command)
632                else:
633                    self.logger.info("\t\t%s has no result" % command)
634
635        else:
636            self.logger.info("\n\n\tSkip copying files from remote system %s.\n" % self.hostname)
637
638    def scp_ffdc(self,
639                 targ_dir_path,
640                 targ_file_prefix,
641                 form_filename,
642                 file_list=None,
643                 quiet=None):
644        r"""
645        SCP all files in file_dict to the indicated directory on the local system.
646
647        Description of argument(s):
648        targ_dir_path                   The path of the directory to receive the files.
649        targ_file_prefix                Prefix which will be pre-pended to each
650                                        target file's name.
651        file_dict                       A dictionary of files to scp from targeted system to this system
652
653        """
654
655        progress_counter = 0
656        for filename in file_list:
657            if form_filename:
658                filename = str(filename % self.target_type)
659            source_file_path = filename
660            targ_file_path = targ_dir_path + targ_file_prefix + filename.split('/')[-1]
661
662            # If source file name contains wild card, copy filename as is.
663            if '*' in source_file_path:
664                scp_result = self.ssh_remoteclient.scp_file_from_remote(source_file_path, self.ffdc_dir_path)
665            else:
666                scp_result = self.ssh_remoteclient.scp_file_from_remote(source_file_path, targ_file_path)
667
668            if not quiet:
669                if scp_result:
670                    self.logger.info(
671                        "\t\tSuccessfully copied from " + self.hostname + ':' + source_file_path + ".\n")
672                else:
673                    self.logger.info(
674                        "\t\tFail to copy from " + self.hostname + ':' + source_file_path + ".\n")
675            else:
676                progress_counter += 1
677                self.print_progress(progress_counter)
678
679    def set_ffdc_defaults(self):
680        r"""
681        Set a default value for self.ffdc_dir_path and self.ffdc_prefix.
682        Collected ffdc file will be stored in dir /self.location/hostname_timestr/.
683        Individual ffdc file will have timestr_filename.
684
685        Description of class variables:
686        self.ffdc_dir_path  The dir path where collected ffdc data files should be put.
687
688        self.ffdc_prefix    The prefix to be given to each ffdc file name.
689
690        """
691
692        timestr = time.strftime("%Y%m%d-%H%M%S")
693        self.ffdc_dir_path = self.location + "/" + self.hostname + "_" + timestr + "/"
694        self.ffdc_prefix = timestr + "_"
695        self.validate_local_store(self.ffdc_dir_path)
696
697    def validate_local_store(self, dir_path):
698        r"""
699        Ensure path exists to store FFDC files locally.
700
701        Description of variable:
702        dir_path  The dir path where collected ffdc data files will be stored.
703
704        """
705
706        if not os.path.exists(dir_path):
707            try:
708                os.makedirs(dir_path, 0o755)
709            except (IOError, OSError) as e:
710                # PermissionError
711                if e.errno == EPERM or e.errno == EACCES:
712                    self.logger.error(
713                        '\tERROR: os.makedirs %s failed with PermissionError.\n' % dir_path)
714                else:
715                    self.logger.error(
716                        '\tERROR: os.makedirs %s failed with %s.\n' % (dir_path, e.strerror))
717                sys.exit(-1)
718
719    def print_progress(self, progress):
720        r"""
721        Print activity progress +
722
723        Description of variable:
724        progress  Progress counter.
725
726        """
727
728        sys.stdout.write("\r\t" + "+" * progress)
729        sys.stdout.flush()
730        time.sleep(.1)
731
732    def verify_redfish(self):
733        r"""
734        Verify remote host has redfish service active
735
736        """
737        redfish_parm = 'redfishtool -r ' \
738                       + self.hostname + ' -S Always raw GET /redfish/v1/'
739        return(self.run_tool_cmd(redfish_parm, True))
740
741    def verify_ipmi(self):
742        r"""
743        Verify remote host has IPMI LAN service active
744
745        """
746        if self.target_type == 'OPENBMC':
747            ipmi_parm = 'ipmitool -I lanplus -C 17  -U ' + self.username + ' -P ' \
748                + self.password + ' -H ' + self.hostname + ' power status'
749        else:
750            ipmi_parm = 'ipmitool -I lanplus  -P ' \
751                + self.password + ' -H ' + self.hostname + ' power status'
752
753        return(self.run_tool_cmd(ipmi_parm, True))
754
755    def run_tool_cmd(self,
756                     parms_string,
757                     quiet=False):
758        r"""
759        Run CLI standard tool or scripts.
760
761        Description of variable:
762        parms_string         tool command options.
763        quiet                do not print tool error message if True
764        """
765
766        result = subprocess.run([parms_string],
767                                stdout=subprocess.PIPE,
768                                stderr=subprocess.PIPE,
769                                shell=True,
770                                universal_newlines=True)
771
772        if result.stderr and not quiet:
773            self.logger.error('\n\t\tERROR with %s ' % parms_string)
774            self.logger.error('\t\t' + result.stderr)
775
776        return result.stdout
777
778    def verify_protocol(self, protocol_list):
779        r"""
780        Perform protocol working check.
781
782        Description of argument(s):
783        protocol_list        List of protocol.
784        """
785
786        tmp_list = []
787        if self.target_is_pingable():
788            tmp_list.append("SHELL")
789
790        for protocol in protocol_list:
791            if self.remote_protocol != 'ALL':
792                if self.remote_protocol != protocol:
793                    continue
794
795            # Only check SSH/SCP once for both protocols
796            if protocol == 'SSH' or protocol == 'SCP' and protocol not in tmp_list:
797                if self.ssh_to_target_system():
798                    # Add only what user asked.
799                    if self.remote_protocol != 'ALL':
800                        tmp_list.append(self.remote_protocol)
801                    else:
802                        tmp_list.append('SSH')
803                        tmp_list.append('SCP')
804
805            if protocol == 'TELNET':
806                if self.telnet_to_target_system():
807                    tmp_list.append(protocol)
808
809            if protocol == 'REDFISH':
810                if self.verify_redfish():
811                    tmp_list.append(protocol)
812                    self.logger.info("\n\t[Check] %s Redfish Service.\t\t [OK]" % self.hostname)
813                else:
814                    self.logger.info("\n\t[Check] %s Redfish Service.\t\t [NOT AVAILABLE]" % self.hostname)
815
816            if protocol == 'IPMI':
817                if self.verify_ipmi():
818                    tmp_list.append(protocol)
819                    self.logger.info("\n\t[Check] %s IPMI LAN Service.\t\t [OK]" % self.hostname)
820                else:
821                    self.logger.info("\n\t[Check] %s IPMI LAN Service.\t\t [NOT AVAILABLE]" % self.hostname)
822
823        return tmp_list
824
825    def load_env(self):
826        r"""
827        Perform protocol working check.
828
829        """
830        # This is for the env vars a user can use in YAML to load it at runtime.
831        # Example YAML:
832        # -COMMANDS:
833        #    - my_command ${hostname}  ${username}   ${password}
834        os.environ['hostname'] = self.hostname
835        os.environ['username'] = self.username
836        os.environ['password'] = self.password
837
838        # Append default Env.
839        self.env_dict['hostname'] = self.hostname
840        self.env_dict['username'] = self.username
841        self.env_dict['password'] = self.password
842
843        try:
844            tmp_env_dict = {}
845            if self.env_vars:
846                tmp_env_dict = json.loads(self.env_vars)
847                # Export ENV vars default.
848                for key, value in tmp_env_dict.items():
849                    os.environ[key] = value
850                    self.env_dict[key] = str(value)
851
852            if self.econfig:
853                with open(self.econfig, 'r') as file:
854                    tmp_env_dict = yaml.load(file, Loader=yaml.FullLoader)
855                # Export ENV vars.
856                for key, value in tmp_env_dict['env_params'].items():
857                    os.environ[key] = str(value)
858                    self.env_dict[key] = str(value)
859        except json.decoder.JSONDecodeError as e:
860            self.logger.error("\n\tERROR: %s " % e)
861            sys.exit(-1)
862
863        # This to mask the password from displaying on the console.
864        mask_dict = self.env_dict.copy()
865        for k, v in mask_dict.items():
866            if k.lower().find("password") != -1:
867                hidden_text = []
868                hidden_text.append(v)
869                password_regex = '(' +\
870                    '|'.join([re.escape(x) for x in hidden_text]) + ')'
871                mask_dict[k] = re.sub(password_regex, "********", v)
872
873        self.logger.info(json.dumps(mask_dict, indent=8, sort_keys=False))
874
875    def execute_python_eval(self, eval_string):
876        r"""
877        Execute qualified python function using eval.
878
879        Description of argument(s):
880        eval_string        Execute the python object.
881
882        Example:
883                eval(plugin.foo_func.foo_func(10))
884        """
885        try:
886            self.logger.info("\tExecuting plugin func()")
887            self.logger.debug("\tCall func: %s" % eval_string)
888            result = eval(eval_string)
889            self.logger.info("\treturn: %s" % str(result))
890        except (ValueError, SyntaxError, NameError) as e:
891            self.logger.error("\tERROR: execute_python_eval: %s" % e)
892            # Set the plugin error state.
893            plugin_error_dict['exit_on_error'] = True
894            pass
895
896        return result
897
898    def execute_plugin_block(self, plugin_cmd_list):
899        r"""
900        Pack the plugin command to quailifed python string object.
901
902        Description of argument(s):
903        plugin_list_dict      Plugin block read from YAML
904                              [{'plugin_name': 'plugin.foo_func.my_func'},
905                               {'plugin_args': [10]}]
906
907        Example:
908            - plugin:
909              - plugin_name: plugin.foo_func.my_func
910              - plugin_args:
911                - arg1
912                - arg2
913
914            - plugin:
915              - plugin_name: result = plugin.foo_func.my_func
916              - plugin_args:
917                - arg1
918                - arg2
919
920            - plugin:
921              - plugin_name: result1,result2 = plugin.foo_func.my_func
922              - plugin_args:
923                - arg1
924                - arg2
925        """
926        try:
927            plugin_name = plugin_cmd_list[0]['plugin_name']
928            # Equal separator means plugin function returns result.
929            if ' = ' in plugin_name:
930                # Ex. ['result', 'plugin.foo_func.my_func']
931                plugin_name_args = plugin_name.split(' = ')
932                # plugin func return data.
933                for arg in plugin_name_args:
934                    if arg == plugin_name_args[-1]:
935                        plugin_name = arg
936                    else:
937                        plugin_resp = arg.split(',')
938                        # ['result1','result2']
939                        for x in plugin_resp:
940                            global_plugin_list.append(x)
941                            global_plugin_dict[x] = ""
942
943            # Walk the plugin args ['arg1,'arg2']
944            # If the YAML plugin statement 'plugin_args' is not declared.
945            if any('plugin_args' in d for d in plugin_cmd_list):
946                plugin_args = plugin_cmd_list[1]['plugin_args']
947                if plugin_args:
948                    plugin_args = self.yaml_args_populate(plugin_args)
949                else:
950                    plugin_args = []
951            else:
952                plugin_args = self.yaml_args_populate([])
953
954            # Pack the args arg1, arg2, .... argn into
955            # "arg1","arg2","argn"  string as params for function.
956            parm_args_str = self.yaml_args_string(plugin_args)
957            if parm_args_str:
958                plugin_func = plugin_name + '(' + parm_args_str + ')'
959            else:
960                plugin_func = plugin_name + '()'
961
962            # Execute plugin function.
963            if global_plugin_dict:
964                resp = self.execute_python_eval(plugin_func)
965                self.response_args_data(resp)
966            else:
967                resp = self.execute_python_eval(plugin_func)
968            return resp
969        except Exception as e:
970            # Set the plugin error state.
971            plugin_error_dict['exit_on_error'] = True
972            self.logger.error("\tERROR: execute_plugin_block: %s" % e)
973            pass
974
975    def response_args_data(self, plugin_resp):
976        r"""
977        Parse the plugin function response.
978
979        plugin_resp       Response data from plugin function.
980        """
981        resp_list = []
982        resp_data = ""
983        # There is nothing to update the plugin response.
984        if len(global_plugin_list) == 0 or plugin_resp == 'None':
985            return
986
987        if isinstance(plugin_resp, str):
988            resp_data = plugin_resp.strip('\r\n\t')
989            resp_list.append(resp_data)
990        elif isinstance(plugin_resp, bytes):
991            resp_data = str(plugin_resp, 'UTF-8').strip('\r\n\t')
992            resp_list.append(resp_data)
993        elif isinstance(plugin_resp, tuple):
994            if len(global_plugin_list) == 1:
995                resp_list.append(plugin_resp)
996            else:
997                resp_list = list(plugin_resp)
998                resp_list = [x.strip('\r\n\t') for x in resp_list]
999        elif isinstance(plugin_resp, list):
1000            if len(global_plugin_list) == 1:
1001                resp_list.append([x.strip('\r\n\t') for x in plugin_resp])
1002            else:
1003                resp_list = [x.strip('\r\n\t') for x in plugin_resp]
1004        elif isinstance(plugin_resp, int) or isinstance(plugin_resp, float):
1005            resp_list.append(plugin_resp)
1006
1007        for idx, item in enumerate(resp_list, start=0):
1008            # Exit loop
1009            if idx >= len(global_plugin_list):
1010                break
1011            # Find the index of the return func in the list and
1012            # update the global func return dictionary.
1013            try:
1014                dict_idx = global_plugin_list[idx]
1015                global_plugin_dict[dict_idx] = item
1016            except (IndexError, ValueError) as e:
1017                self.logger.warn("\tWARN: response_args_data: %s" % e)
1018                pass
1019
1020        # Done updating plugin dict irrespective of pass or failed,
1021        # clear all the list element.
1022        global_plugin_list.clear()
1023
1024    def yaml_args_string(self, plugin_args):
1025        r"""
1026        Pack the args into string.
1027
1028        plugin_args            arg list ['arg1','arg2,'argn']
1029        """
1030        args_str = ''
1031        for args in plugin_args:
1032            if args:
1033                if isinstance(args, (int, float)):
1034                    args_str += str(args)
1035                elif args in global_plugin_type_list:
1036                    args_str += str(global_plugin_dict[args])
1037                else:
1038                    args_str += '"' + str(args.strip('\r\n\t')) + '"'
1039            # Skip last list element.
1040            if args != plugin_args[-1]:
1041                args_str += ","
1042        return args_str
1043
1044    def yaml_args_populate(self, yaml_arg_list):
1045        r"""
1046        Decode ${MY_VAR} and load env data when read from YAML.
1047
1048        Description of argument(s):
1049        yaml_arg_list         arg list read from YAML
1050
1051        Example:
1052          - plugin_args:
1053            - arg1
1054            - arg2
1055
1056                  yaml_arg_list:  [arg2, arg2]
1057        """
1058        # Get the env loaded keys as list ['hostname', 'username', 'password'].
1059        env_vars_list = list(self.env_dict)
1060
1061        if isinstance(yaml_arg_list, list):
1062            tmp_list = []
1063            for arg in yaml_arg_list:
1064                if isinstance(arg, (int, float)):
1065                    tmp_list.append(arg)
1066                    continue
1067                elif isinstance(arg, str):
1068                    arg_str = self.yaml_env_and_plugin_vars_populate(str(arg))
1069                    tmp_list.append(arg_str)
1070                else:
1071                    tmp_list.append(arg)
1072
1073            # return populated list.
1074            return tmp_list
1075
1076    def yaml_env_and_plugin_vars_populate(self, yaml_arg_str):
1077        r"""
1078        Update ${MY_VAR} and my_plugin_vars
1079
1080        Description of argument(s):
1081        yaml_arg_str         arg string read from YAML
1082
1083        Example:
1084            - cat ${MY_VAR}
1085            - ls -AX my_plugin_var
1086        """
1087        # Parse the string for env vars.
1088        try:
1089            # Example, list of matching env vars ['username', 'password', 'hostname']
1090            # Extra escape \ for special symbols. '\$\{([^\}]+)\}' works good.
1091            var_name_regex = '\\$\\{([^\\}]+)\\}'
1092            env_var_names_list = re.findall(var_name_regex, yaml_arg_str)
1093            for var in env_var_names_list:
1094                env_var = os.environ[var]
1095                env_replace = '${' + var + '}'
1096                yaml_arg_str = yaml_arg_str.replace(env_replace, env_var)
1097        except Exception as e:
1098            self.logger.error("\tERROR:yaml_env_vars_populate: %s" % e)
1099            pass
1100
1101        # Parse the string for plugin vars.
1102        try:
1103            # Example, list of plugin vars ['my_username', 'my_data']
1104            plugin_var_name_list = global_plugin_dict.keys()
1105            for var in plugin_var_name_list:
1106                # skip env var list already populated above block list.
1107                if var in env_var_names_list:
1108                    continue
1109                # If this plugin var exist but empty value in dict, don't replace.
1110                # This is either a YAML plugin statement incorrectly used or
1111                # user added a plugin var which is not populated.
1112                if yaml_arg_str in global_plugin_dict:
1113                    if isinstance(global_plugin_dict[var], (list, dict)):
1114                        # List data type or dict can't be replaced, use directly
1115                        # in eval function call.
1116                        global_plugin_type_list.append(var)
1117                    else:
1118                        yaml_arg_str = yaml_arg_str.replace(str(var), str(global_plugin_dict[var]))
1119                # Just a string like filename or command.
1120                else:
1121                    yaml_arg_str = yaml_arg_str.replace(str(var), str(global_plugin_dict[var]))
1122        except (IndexError, ValueError) as e:
1123            self.logger.error("\tERROR: yaml_plugin_vars_populate: %s" % e)
1124            pass
1125
1126        return yaml_arg_str
1127
1128    def plugin_error_check(self, plugin_dict):
1129        r"""
1130        Plugin error dict processing.
1131
1132        Description of argument(s):
1133        plugin_dict        Dictionary of plugin error.
1134        """
1135        if any('plugin_error' in d for d in plugin_dict):
1136            for d in plugin_dict:
1137                if 'plugin_error' in d:
1138                    value = d['plugin_error']
1139                    # Reference if the error is set or not by plugin.
1140                    return plugin_error_dict[value]
1141