1#!/usr/bin/env python3
2
3r"""
4See class prolog below for details.
5"""
6
7import os
8import re
9import sys
10import yaml
11import json
12import time
13import logging
14import platform
15from errno import EACCES, EPERM
16import subprocess
17
18sys.path.extend([f'./{name[0]}' for name in os.walk(".") if os.path.isdir(name[0])])
19from ssh_utility import SSHRemoteclient
20from telnet_utility import TelnetRemoteclient
21
22r"""
23User define plugins python functions.
24
25It will imports files from directory plugins
26
27plugins
28├── file1.py
29└── file2.py
30
31Example how to define in YAML:
32 - plugin:
33   - plugin_name: plugin.foo_func.foo_func_yaml
34     - plugin_args:
35       - arg1
36       - arg2
37"""
38plugin_dir = __file__.split(__file__.split("/")[-1])[0] + '/plugins'
39sys.path.append(plugin_dir)
40try:
41    for module in os.listdir(plugin_dir):
42        if module == '__init__.py' or module[-3:] != '.py':
43            continue
44        plugin_module = "plugins." + module[:-3]
45        # To access the module plugin.<module name>.<function>
46        # Example: plugin.foo_func.foo_func_yaml()
47        try:
48            plugin = __import__(plugin_module, globals(), locals(), [], 0)
49        except Exception as e:
50            print("PLUGIN: Module import failed: %s" % module)
51            pass
52except FileNotFoundError as e:
53    print("PLUGIN: %s" % e)
54    pass
55
56r"""
57This is for plugin functions returning data or responses to the caller
58in YAML plugin setup.
59
60Example:
61
62    - plugin:
63      - plugin_name: version = plugin.ssh_execution.ssh_execute_cmd
64      - plugin_args:
65        - ${hostname}
66        - ${username}
67        - ${password}
68        - "cat /etc/os-release | grep VERSION_ID | awk -F'=' '{print $2}'"
69     - plugin:
70        - plugin_name: plugin.print_vars.print_vars
71        - plugin_args:
72          - version
73
74where first plugin "version" var is used by another plugin in the YAML
75block or plugin
76
77"""
78global global_log_store_path
79global global_plugin_dict
80global global_plugin_list
81
82# Hold the plugin return values in dict and plugin return vars in list.
83# Dict is to reference and update vars processing in parser where as
84# list is for current vars from the plugin block which needs processing.
85global_plugin_dict = {}
86global_plugin_list = []
87
88# Hold the plugin return named declared if function returned values are list,dict.
89# Refer this name list to look up the plugin dict for eval() args function
90# Example ['version']
91global_plugin_type_list = []
92
93# Path where logs are to be stored or written.
94global_log_store_path = ''
95
96# Plugin error state defaults.
97plugin_error_dict = {
98    'exit_on_error': False,
99    'continue_on_error': False,
100}
101
102
103class ffdc_collector:
104
105    r"""
106    Execute commands from configuration file to collect log files.
107    Fetch and store generated files at the specified location.
108
109    """
110
111    def __init__(self,
112                 hostname,
113                 username,
114                 password,
115                 ffdc_config,
116                 location,
117                 remote_type,
118                 remote_protocol,
119                 env_vars,
120                 econfig,
121                 log_level):
122        r"""
123        Description of argument(s):
124
125        hostname            name/ip of the targeted (remote) system
126        username            user on the targeted system with access to FFDC files
127        password            password for user on targeted system
128        ffdc_config         configuration file listing commands and files for FFDC
129        location            where to store collected FFDC
130        remote_type         os type of the remote host
131        remote_protocol     Protocol to use to collect data
132        env_vars            User define CLI env vars '{"key : "value"}'
133        econfig             User define env vars YAML file
134
135        """
136
137        self.hostname = hostname
138        self.username = username
139        self.password = password
140        self.ffdc_config = ffdc_config
141        self.location = location + "/" + remote_type.upper()
142        self.ssh_remoteclient = None
143        self.telnet_remoteclient = None
144        self.ffdc_dir_path = ""
145        self.ffdc_prefix = ""
146        self.target_type = remote_type.upper()
147        self.remote_protocol = remote_protocol.upper()
148        self.env_vars = env_vars
149        self.econfig = econfig
150        self.start_time = 0
151        self.elapsed_time = ''
152        self.logger = None
153
154        # Set prefix values for scp files and directory.
155        # Since the time stamp is at second granularity, these values are set here
156        # to be sure that all files for this run will have same timestamps
157        # and they will be saved in the same directory.
158        # self.location == local system for now
159        self.set_ffdc_default_store_path()
160
161        # Logger for this run.  Need to be after set_ffdc_default_store_path()
162        self.script_logging(getattr(logging, log_level.upper()))
163
164        # Verify top level directory exists for storage
165        self.validate_local_store(self.location)
166
167        if self.verify_script_env():
168            # Load default or user define YAML configuration file.
169            with open(self.ffdc_config, 'r') as file:
170                try:
171                    self.ffdc_actions = yaml.load(file, Loader=yaml.FullLoader)
172                except yaml.YAMLError as e:
173                    self.logger.error(e)
174                    sys.exit(-1)
175
176            if self.target_type not in self.ffdc_actions.keys():
177                self.logger.error(
178                    "\n\tERROR: %s is not listed in %s.\n\n" % (self.target_type, self.ffdc_config))
179                sys.exit(-1)
180        else:
181            sys.exit(-1)
182
183        # Load ENV vars from user.
184        self.logger.info("\n\tENV: User define input YAML variables")
185        self.env_dict = {}
186        self.load_env()
187
188    def verify_script_env(self):
189
190        # Import to log version
191        import click
192        import paramiko
193
194        run_env_ok = True
195
196        redfishtool_version = self.run_tool_cmd('redfishtool -V').split(' ')[2].strip('\n')
197        ipmitool_version = self.run_tool_cmd('ipmitool -V').split(' ')[2]
198
199        self.logger.info("\n\t---- Script host environment ----")
200        self.logger.info("\t{:<10}  {:<10}".format('Script hostname', os.uname()[1]))
201        self.logger.info("\t{:<10}  {:<10}".format('Script host os', platform.platform()))
202        self.logger.info("\t{:<10}  {:>10}".format('Python', platform.python_version()))
203        self.logger.info("\t{:<10}  {:>10}".format('PyYAML', yaml.__version__))
204        self.logger.info("\t{:<10}  {:>10}".format('click', click.__version__))
205        self.logger.info("\t{:<10}  {:>10}".format('paramiko', paramiko.__version__))
206        self.logger.info("\t{:<10}  {:>9}".format('redfishtool', redfishtool_version))
207        self.logger.info("\t{:<10}  {:>12}".format('ipmitool', ipmitool_version))
208
209        if eval(yaml.__version__.replace('.', ',')) < (5, 4, 1):
210            self.logger.error("\n\tERROR: Python or python packages do not meet minimum version requirement.")
211            self.logger.error("\tERROR: PyYAML version 5.4.1 or higher is needed.\n")
212            run_env_ok = False
213
214        self.logger.info("\t---- End script host environment ----")
215        return run_env_ok
216
217    def script_logging(self,
218                       log_level_attr):
219        r"""
220        Create logger
221
222        """
223        self.logger = logging.getLogger()
224        self.logger.setLevel(log_level_attr)
225        log_file_handler = logging.FileHandler(self.ffdc_dir_path + "collector.log")
226
227        stdout_handler = logging.StreamHandler(sys.stdout)
228        self.logger.addHandler(log_file_handler)
229        self.logger.addHandler(stdout_handler)
230
231        # Turn off paramiko INFO logging
232        logging.getLogger("paramiko").setLevel(logging.WARNING)
233
234    def target_is_pingable(self):
235        r"""
236        Check if target system is ping-able.
237
238        """
239        response = os.system("ping -c 1 %s  2>&1 >/dev/null" % self.hostname)
240        if response == 0:
241            self.logger.info("\n\t[Check] %s is ping-able.\t\t [OK]" % self.hostname)
242            return True
243        else:
244            self.logger.error(
245                "\n\tERROR: %s is not ping-able. FFDC collection aborted.\n" % self.hostname)
246            sys.exit(-1)
247
248    def collect_ffdc(self):
249        r"""
250        Initiate FFDC Collection depending on requested protocol.
251
252        """
253
254        self.logger.info("\n\t---- Start communicating with %s ----" % self.hostname)
255        self.start_time = time.time()
256
257        # Find the list of target and protocol supported.
258        check_protocol_list = []
259        config_dict = self.ffdc_actions
260
261        for target_type in config_dict.keys():
262            if self.target_type != target_type:
263                continue
264
265            for k, v in config_dict[target_type].items():
266                if config_dict[target_type][k]['PROTOCOL'][0] not in check_protocol_list:
267                    check_protocol_list.append(config_dict[target_type][k]['PROTOCOL'][0])
268
269        self.logger.info("\n\t %s protocol type: %s" % (self.target_type, check_protocol_list))
270
271        verified_working_protocol = self.verify_protocol(check_protocol_list)
272
273        if verified_working_protocol:
274            self.logger.info("\n\t---- Completed protocol pre-requisite check ----\n")
275
276        # Verify top level directory exists for storage
277        self.validate_local_store(self.location)
278
279        if ((self.remote_protocol not in verified_working_protocol) and (self.remote_protocol != 'ALL')):
280            self.logger.info("\n\tWorking protocol list: %s" % verified_working_protocol)
281            self.logger.error(
282                '\tERROR: Requested protocol %s is not in working protocol list.\n'
283                % self.remote_protocol)
284            sys.exit(-1)
285        else:
286            self.generate_ffdc(verified_working_protocol)
287
288    def ssh_to_target_system(self):
289        r"""
290        Open a ssh connection to targeted system.
291
292        """
293
294        self.ssh_remoteclient = SSHRemoteclient(self.hostname,
295                                                self.username,
296                                                self.password)
297
298        if self.ssh_remoteclient.ssh_remoteclient_login():
299            self.logger.info("\n\t[Check] %s SSH connection established.\t [OK]" % self.hostname)
300
301            # Check scp connection.
302            # If scp connection fails,
303            # continue with FFDC generation but skip scp files to local host.
304            self.ssh_remoteclient.scp_connection()
305            return True
306        else:
307            self.logger.info("\n\t[Check] %s SSH connection.\t [NOT AVAILABLE]" % self.hostname)
308            return False
309
310    def telnet_to_target_system(self):
311        r"""
312        Open a telnet connection to targeted system.
313        """
314        self.telnet_remoteclient = TelnetRemoteclient(self.hostname,
315                                                      self.username,
316                                                      self.password)
317        if self.telnet_remoteclient.tn_remoteclient_login():
318            self.logger.info("\n\t[Check] %s Telnet connection established.\t [OK]" % self.hostname)
319            return True
320        else:
321            self.logger.info("\n\t[Check] %s Telnet connection.\t [NOT AVAILABLE]" % self.hostname)
322            return False
323
324    def generate_ffdc(self, working_protocol_list):
325        r"""
326        Determine actions based on remote host type
327
328        Description of argument(s):
329        working_protocol_list    list of confirmed working protocols to connect to remote host.
330        """
331
332        self.logger.info("\n\t---- Executing commands on " + self.hostname + " ----")
333        self.logger.info("\n\tWorking protocol list: %s" % working_protocol_list)
334
335        config_dict = self.ffdc_actions
336        for target_type in config_dict.keys():
337            if self.target_type != target_type:
338                continue
339
340            self.logger.info("\n\tFFDC Path: %s " % self.ffdc_dir_path)
341            global_plugin_dict['global_log_store_path'] = self.ffdc_dir_path
342            self.logger.info("\tSystem Type: %s" % target_type)
343            for k, v in config_dict[target_type].items():
344
345                if self.remote_protocol not in working_protocol_list \
346                        and self.remote_protocol != 'ALL':
347                    continue
348
349                protocol = config_dict[target_type][k]['PROTOCOL'][0]
350
351                if protocol not in working_protocol_list:
352                    continue
353
354                if protocol in working_protocol_list:
355                    if protocol == 'SSH' or protocol == 'SCP':
356                        self.protocol_ssh(protocol, target_type, k)
357                    elif protocol == 'TELNET':
358                        self.protocol_telnet(target_type, k)
359                    elif protocol == 'REDFISH' or protocol == 'IPMI' or protocol == 'SHELL':
360                        self.protocol_execute(protocol, target_type, k)
361                else:
362                    self.logger.error("\n\tERROR: %s is not available for %s." % (protocol, self.hostname))
363
364        # Close network connection after collecting all files
365        self.elapsed_time = time.strftime("%H:%M:%S", time.gmtime(time.time() - self.start_time))
366        if self.ssh_remoteclient:
367            self.ssh_remoteclient.ssh_remoteclient_disconnect()
368        if self.telnet_remoteclient:
369            self.telnet_remoteclient.tn_remoteclient_disconnect()
370
371    def protocol_ssh(self,
372                     protocol,
373                     target_type,
374                     sub_type):
375        r"""
376        Perform actions using SSH and SCP protocols.
377
378        Description of argument(s):
379        protocol            Protocol to execute.
380        target_type         OS Type of remote host.
381        sub_type            Group type of commands.
382        """
383
384        if protocol == 'SCP':
385            self.group_copy(self.ffdc_actions[target_type][sub_type])
386        else:
387            self.collect_and_copy_ffdc(self.ffdc_actions[target_type][sub_type])
388
389    def protocol_telnet(self,
390                        target_type,
391                        sub_type):
392        r"""
393        Perform actions using telnet protocol.
394        Description of argument(s):
395        target_type          OS Type of remote host.
396        """
397        self.logger.info("\n\t[Run] Executing commands on %s using %s" % (self.hostname, 'TELNET'))
398        telnet_files_saved = []
399        progress_counter = 0
400        list_of_commands = self.ffdc_actions[target_type][sub_type]['COMMANDS']
401        for index, each_cmd in enumerate(list_of_commands, start=0):
402            command_txt, command_timeout = self.unpack_command(each_cmd)
403            result = self.telnet_remoteclient.execute_command(command_txt, command_timeout)
404            if result:
405                try:
406                    targ_file = self.ffdc_actions[target_type][sub_type]['FILES'][index]
407                except IndexError:
408                    targ_file = command_txt
409                    self.logger.warning(
410                        "\n\t[WARN] Missing filename to store data from telnet %s." % each_cmd)
411                    self.logger.warning("\t[WARN] Data will be stored in %s." % targ_file)
412                targ_file_with_path = (self.ffdc_dir_path
413                                       + self.ffdc_prefix
414                                       + targ_file)
415                # Creates a new file
416                with open(targ_file_with_path, 'w') as fp:
417                    fp.write(result)
418                    fp.close
419                    telnet_files_saved.append(targ_file)
420            progress_counter += 1
421            self.print_progress(progress_counter)
422        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
423        for file in telnet_files_saved:
424            self.logger.info("\n\t\tSuccessfully save file " + file + ".")
425
426    def protocol_execute(self,
427                         protocol,
428                         target_type,
429                         sub_type):
430        r"""
431        Perform actions for a given protocol.
432
433        Description of argument(s):
434        protocol            Protocol to execute.
435        target_type         OS Type of remote host.
436        sub_type            Group type of commands.
437        """
438
439        self.logger.info("\n\t[Run] Executing commands to %s using %s" % (self.hostname, protocol))
440        executed_files_saved = []
441        progress_counter = 0
442        list_of_cmd = self.get_command_list(self.ffdc_actions[target_type][sub_type])
443        for index, each_cmd in enumerate(list_of_cmd, start=0):
444            plugin_call = False
445            if isinstance(each_cmd, dict):
446                if 'plugin' in each_cmd:
447                    # If the error is set and plugin explicitly
448                    # requested to skip execution on error..
449                    if plugin_error_dict['exit_on_error'] and \
450                            self.plugin_error_check(each_cmd['plugin']):
451                        self.logger.info("\n\t[PLUGIN-ERROR] exit_on_error: %s" %
452                                         plugin_error_dict['exit_on_error'])
453                        self.logger.info("\t[PLUGIN-SKIP] %s" %
454                                         each_cmd['plugin'][0])
455                        continue
456                    plugin_call = True
457                    # call the plugin
458                    self.logger.info("\n\t[PLUGIN-START]")
459                    result = self.execute_plugin_block(each_cmd['plugin'])
460                    self.logger.info("\t[PLUGIN-END]\n")
461            else:
462                each_cmd = self.yaml_env_and_plugin_vars_populate(each_cmd)
463
464            if not plugin_call:
465                result = self.run_tool_cmd(each_cmd)
466            if result:
467                try:
468                    file_name = self.get_file_list(self.ffdc_actions[target_type][sub_type])[index]
469                    # If file is specified as None.
470                    if file_name == "None":
471                        continue
472                    targ_file = self.yaml_env_and_plugin_vars_populate(file_name)
473                except IndexError:
474                    targ_file = each_cmd.split('/')[-1]
475                    self.logger.warning(
476                        "\n\t[WARN] Missing filename to store data from %s." % each_cmd)
477                    self.logger.warning("\t[WARN] Data will be stored in %s." % targ_file)
478
479                targ_file_with_path = (self.ffdc_dir_path
480                                       + self.ffdc_prefix
481                                       + targ_file)
482
483                # Creates a new file
484                with open(targ_file_with_path, 'w') as fp:
485                    if isinstance(result, dict):
486                        fp.write(json.dumps(result))
487                    else:
488                        fp.write(result)
489                    fp.close
490                    executed_files_saved.append(targ_file)
491
492            progress_counter += 1
493            self.print_progress(progress_counter)
494
495        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
496
497        for file in executed_files_saved:
498            self.logger.info("\n\t\tSuccessfully save file " + file + ".")
499
500    def collect_and_copy_ffdc(self,
501                              ffdc_actions_for_target_type,
502                              form_filename=False):
503        r"""
504        Send commands in ffdc_config file to targeted system.
505
506        Description of argument(s):
507        ffdc_actions_for_target_type     commands and files for the selected remote host type.
508        form_filename                    if true, pre-pend self.target_type to filename
509        """
510
511        # Executing commands, if any
512        self.ssh_execute_ffdc_commands(ffdc_actions_for_target_type,
513                                       form_filename)
514
515        # Copying files
516        if self.ssh_remoteclient.scpclient:
517            self.logger.info("\n\n\tCopying FFDC files from remote system %s.\n" % self.hostname)
518
519            # Retrieving files from target system
520            list_of_files = self.get_file_list(ffdc_actions_for_target_type)
521            self.scp_ffdc(self.ffdc_dir_path, self.ffdc_prefix, form_filename, list_of_files)
522        else:
523            self.logger.info("\n\n\tSkip copying FFDC files from remote system %s.\n" % self.hostname)
524
525    def get_command_list(self,
526                         ffdc_actions_for_target_type):
527        r"""
528        Fetch list of commands from configuration file
529
530        Description of argument(s):
531        ffdc_actions_for_target_type    commands and files for the selected remote host type.
532        """
533        try:
534            list_of_commands = ffdc_actions_for_target_type['COMMANDS']
535        except KeyError:
536            list_of_commands = []
537        return list_of_commands
538
539    def get_file_list(self,
540                      ffdc_actions_for_target_type):
541        r"""
542        Fetch list of commands from configuration file
543
544        Description of argument(s):
545        ffdc_actions_for_target_type    commands and files for the selected remote host type.
546        """
547        try:
548            list_of_files = ffdc_actions_for_target_type['FILES']
549        except KeyError:
550            list_of_files = []
551        return list_of_files
552
553    def unpack_command(self,
554                       command):
555        r"""
556        Unpack command from config file
557
558        Description of argument(s):
559        command    Command from config file.
560        """
561        if isinstance(command, dict):
562            command_txt = next(iter(command))
563            command_timeout = next(iter(command.values()))
564        elif isinstance(command, str):
565            command_txt = command
566            # Default command timeout 60 seconds
567            command_timeout = 60
568
569        return command_txt, command_timeout
570
571    def ssh_execute_ffdc_commands(self,
572                                  ffdc_actions_for_target_type,
573                                  form_filename=False):
574        r"""
575        Send commands in ffdc_config file to targeted system.
576
577        Description of argument(s):
578        ffdc_actions_for_target_type    commands and files for the selected remote host type.
579        form_filename                    if true, pre-pend self.target_type to filename
580        """
581        self.logger.info("\n\t[Run] Executing commands on %s using %s"
582                         % (self.hostname, ffdc_actions_for_target_type['PROTOCOL'][0]))
583
584        list_of_commands = self.get_command_list(ffdc_actions_for_target_type)
585        # If command list is empty, returns
586        if not list_of_commands:
587            return
588
589        progress_counter = 0
590        for command in list_of_commands:
591            command_txt, command_timeout = self.unpack_command(command)
592
593            if form_filename:
594                command_txt = str(command_txt % self.target_type)
595
596            cmd_exit_code, err, response = \
597                self.ssh_remoteclient.execute_command(command_txt, command_timeout)
598
599            if cmd_exit_code:
600                self.logger.warning(
601                    "\n\t\t[WARN] %s exits with code %s." % (command_txt, str(cmd_exit_code)))
602                self.logger.warning("\t\t[WARN] %s " % err)
603
604            progress_counter += 1
605            self.print_progress(progress_counter)
606
607        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
608
609    def group_copy(self,
610                   ffdc_actions_for_target_type):
611        r"""
612        scp group of files (wild card) from remote host.
613
614        Description of argument(s):
615        fdc_actions_for_target_type    commands and files for the selected remote host type.
616        """
617
618        if self.ssh_remoteclient.scpclient:
619            self.logger.info("\n\tCopying files from remote system %s via SCP.\n" % self.hostname)
620
621            list_of_commands = self.get_command_list(ffdc_actions_for_target_type)
622            # If command list is empty, returns
623            if not list_of_commands:
624                return
625
626            for command in list_of_commands:
627                try:
628                    command = self.yaml_env_and_plugin_vars_populate(command)
629                except IndexError:
630                    self.logger.error("\t\tInvalid command %s" % command)
631                    continue
632
633                cmd_exit_code, err, response = \
634                    self.ssh_remoteclient.execute_command(command)
635
636                # If file does not exist, code take no action.
637                # cmd_exit_code is ignored for this scenario.
638                if response:
639                    scp_result = \
640                        self.ssh_remoteclient.scp_file_from_remote(response.split('\n'),
641                                                                   self.ffdc_dir_path)
642                    if scp_result:
643                        self.logger.info("\t\tSuccessfully copied from " + self.hostname + ':' + command)
644                else:
645                    self.logger.info("\t\t%s has no result" % command)
646
647        else:
648            self.logger.info("\n\n\tSkip copying files from remote system %s.\n" % self.hostname)
649
650    def scp_ffdc(self,
651                 targ_dir_path,
652                 targ_file_prefix,
653                 form_filename,
654                 file_list=None,
655                 quiet=None):
656        r"""
657        SCP all files in file_dict to the indicated directory on the local system.
658
659        Description of argument(s):
660        targ_dir_path                   The path of the directory to receive the files.
661        targ_file_prefix                Prefix which will be pre-pended to each
662                                        target file's name.
663        file_dict                       A dictionary of files to scp from targeted system to this system
664
665        """
666
667        progress_counter = 0
668        for filename in file_list:
669            if form_filename:
670                filename = str(filename % self.target_type)
671            source_file_path = filename
672            targ_file_path = targ_dir_path + targ_file_prefix + filename.split('/')[-1]
673
674            # If source file name contains wild card, copy filename as is.
675            if '*' in source_file_path:
676                scp_result = self.ssh_remoteclient.scp_file_from_remote(source_file_path, self.ffdc_dir_path)
677            else:
678                scp_result = self.ssh_remoteclient.scp_file_from_remote(source_file_path, targ_file_path)
679
680            if not quiet:
681                if scp_result:
682                    self.logger.info(
683                        "\t\tSuccessfully copied from " + self.hostname + ':' + source_file_path + ".\n")
684                else:
685                    self.logger.info(
686                        "\t\tFail to copy from " + self.hostname + ':' + source_file_path + ".\n")
687            else:
688                progress_counter += 1
689                self.print_progress(progress_counter)
690
691    def set_ffdc_default_store_path(self):
692        r"""
693        Set a default value for self.ffdc_dir_path and self.ffdc_prefix.
694        Collected ffdc file will be stored in dir /self.location/hostname_timestr/.
695        Individual ffdc file will have timestr_filename.
696
697        Description of class variables:
698        self.ffdc_dir_path  The dir path where collected ffdc data files should be put.
699
700        self.ffdc_prefix    The prefix to be given to each ffdc file name.
701
702        """
703
704        timestr = time.strftime("%Y%m%d-%H%M%S")
705        self.ffdc_dir_path = self.location + "/" + self.hostname + "_" + timestr + "/"
706        self.ffdc_prefix = timestr + "_"
707        self.validate_local_store(self.ffdc_dir_path)
708
709    # Need to verify local store path exists prior to instantiate this class.
710    # This class method is used to share the same code between CLI input parm
711    # and Robot Framework "${EXECDIR}/logs" before referencing this class.
712    @classmethod
713    def validate_local_store(cls, dir_path):
714        r"""
715        Ensure path exists to store FFDC files locally.
716
717        Description of variable:
718        dir_path  The dir path where collected ffdc data files will be stored.
719
720        """
721
722        if not os.path.exists(dir_path):
723            try:
724                os.makedirs(dir_path, 0o755)
725            except (IOError, OSError) as e:
726                # PermissionError
727                if e.errno == EPERM or e.errno == EACCES:
728                    self.logger.error(
729                        '\tERROR: os.makedirs %s failed with PermissionError.\n' % dir_path)
730                else:
731                    self.logger.error(
732                        '\tERROR: os.makedirs %s failed with %s.\n' % (dir_path, e.strerror))
733                sys.exit(-1)
734
735    def print_progress(self, progress):
736        r"""
737        Print activity progress +
738
739        Description of variable:
740        progress  Progress counter.
741
742        """
743
744        sys.stdout.write("\r\t" + "+" * progress)
745        sys.stdout.flush()
746        time.sleep(.1)
747
748    def verify_redfish(self):
749        r"""
750        Verify remote host has redfish service active
751
752        """
753        redfish_parm = 'redfishtool -r ' \
754                       + self.hostname + ' -S Always raw GET /redfish/v1/'
755        return(self.run_tool_cmd(redfish_parm, True))
756
757    def verify_ipmi(self):
758        r"""
759        Verify remote host has IPMI LAN service active
760
761        """
762        if self.target_type == 'OPENBMC':
763            ipmi_parm = 'ipmitool -I lanplus -C 17  -U ' + self.username + ' -P ' \
764                + self.password + ' -H ' + self.hostname + ' power status'
765        else:
766            ipmi_parm = 'ipmitool -I lanplus  -P ' \
767                + self.password + ' -H ' + self.hostname + ' power status'
768
769        return(self.run_tool_cmd(ipmi_parm, True))
770
771    def run_tool_cmd(self,
772                     parms_string,
773                     quiet=False):
774        r"""
775        Run CLI standard tool or scripts.
776
777        Description of variable:
778        parms_string         tool command options.
779        quiet                do not print tool error message if True
780        """
781
782        result = subprocess.run([parms_string],
783                                stdout=subprocess.PIPE,
784                                stderr=subprocess.PIPE,
785                                shell=True,
786                                universal_newlines=True)
787
788        if result.stderr and not quiet:
789            self.logger.error('\n\t\tERROR with %s ' % parms_string)
790            self.logger.error('\t\t' + result.stderr)
791
792        return result.stdout
793
794    def verify_protocol(self, protocol_list):
795        r"""
796        Perform protocol working check.
797
798        Description of argument(s):
799        protocol_list        List of protocol.
800        """
801
802        tmp_list = []
803        if self.target_is_pingable():
804            tmp_list.append("SHELL")
805
806        for protocol in protocol_list:
807            if self.remote_protocol != 'ALL':
808                if self.remote_protocol != protocol:
809                    continue
810
811            # Only check SSH/SCP once for both protocols
812            if protocol == 'SSH' or protocol == 'SCP' and protocol not in tmp_list:
813                if self.ssh_to_target_system():
814                    # Add only what user asked.
815                    if self.remote_protocol != 'ALL':
816                        tmp_list.append(self.remote_protocol)
817                    else:
818                        tmp_list.append('SSH')
819                        tmp_list.append('SCP')
820
821            if protocol == 'TELNET':
822                if self.telnet_to_target_system():
823                    tmp_list.append(protocol)
824
825            if protocol == 'REDFISH':
826                if self.verify_redfish():
827                    tmp_list.append(protocol)
828                    self.logger.info("\n\t[Check] %s Redfish Service.\t\t [OK]" % self.hostname)
829                else:
830                    self.logger.info("\n\t[Check] %s Redfish Service.\t\t [NOT AVAILABLE]" % self.hostname)
831
832            if protocol == 'IPMI':
833                if self.verify_ipmi():
834                    tmp_list.append(protocol)
835                    self.logger.info("\n\t[Check] %s IPMI LAN Service.\t\t [OK]" % self.hostname)
836                else:
837                    self.logger.info("\n\t[Check] %s IPMI LAN Service.\t\t [NOT AVAILABLE]" % self.hostname)
838
839        return tmp_list
840
841    def load_env(self):
842        r"""
843        Perform protocol working check.
844
845        """
846        # This is for the env vars a user can use in YAML to load it at runtime.
847        # Example YAML:
848        # -COMMANDS:
849        #    - my_command ${hostname}  ${username}   ${password}
850        os.environ['hostname'] = self.hostname
851        os.environ['username'] = self.username
852        os.environ['password'] = self.password
853
854        # Append default Env.
855        self.env_dict['hostname'] = self.hostname
856        self.env_dict['username'] = self.username
857        self.env_dict['password'] = self.password
858
859        try:
860            tmp_env_dict = {}
861            if self.env_vars:
862                tmp_env_dict = json.loads(self.env_vars)
863                # Export ENV vars default.
864                for key, value in tmp_env_dict.items():
865                    os.environ[key] = value
866                    self.env_dict[key] = str(value)
867
868            if self.econfig:
869                with open(self.econfig, 'r') as file:
870                    try:
871                        tmp_env_dict = yaml.load(file, Loader=yaml.FullLoader)
872                    except yaml.YAMLError as e:
873                        self.logger.error(e)
874                        sys.exit(-1)
875                # Export ENV vars.
876                for key, value in tmp_env_dict['env_params'].items():
877                    os.environ[key] = str(value)
878                    self.env_dict[key] = str(value)
879        except json.decoder.JSONDecodeError as e:
880            self.logger.error("\n\tERROR: %s " % e)
881            sys.exit(-1)
882
883        # This to mask the password from displaying on the console.
884        mask_dict = self.env_dict.copy()
885        for k, v in mask_dict.items():
886            if k.lower().find("password") != -1:
887                hidden_text = []
888                hidden_text.append(v)
889                password_regex = '(' +\
890                    '|'.join([re.escape(x) for x in hidden_text]) + ')'
891                mask_dict[k] = re.sub(password_regex, "********", v)
892
893        self.logger.info(json.dumps(mask_dict, indent=8, sort_keys=False))
894
895    def execute_python_eval(self, eval_string):
896        r"""
897        Execute qualified python function string using eval.
898
899        Description of argument(s):
900        eval_string        Execute the python object.
901
902        Example:
903                eval(plugin.foo_func.foo_func(10))
904        """
905        try:
906            self.logger.info("\tExecuting plugin func()")
907            self.logger.debug("\tCall func: %s" % eval_string)
908            result = eval(eval_string)
909            self.logger.info("\treturn: %s" % str(result))
910        except (ValueError,
911                SyntaxError,
912                NameError,
913                AttributeError,
914                TypeError) as e:
915            self.logger.error("\tERROR: execute_python_eval: %s" % e)
916            # Set the plugin error state.
917            plugin_error_dict['exit_on_error'] = True
918            self.logger.info("\treturn: PLUGIN_EVAL_ERROR")
919            return 'PLUGIN_EVAL_ERROR'
920
921        return result
922
923    def execute_plugin_block(self, plugin_cmd_list):
924        r"""
925        Pack the plugin command to qualifed python string object.
926
927        Description of argument(s):
928        plugin_list_dict      Plugin block read from YAML
929                              [{'plugin_name': 'plugin.foo_func.my_func'},
930                               {'plugin_args': [10]}]
931
932        Example:
933            - plugin:
934              - plugin_name: plugin.foo_func.my_func
935              - plugin_args:
936                - arg1
937                - arg2
938
939            - plugin:
940              - plugin_name: result = plugin.foo_func.my_func
941              - plugin_args:
942                - arg1
943                - arg2
944
945            - plugin:
946              - plugin_name: result1,result2 = plugin.foo_func.my_func
947              - plugin_args:
948                - arg1
949                - arg2
950        """
951        try:
952            idx = self.key_index_list_dict('plugin_name', plugin_cmd_list)
953            plugin_name = plugin_cmd_list[idx]['plugin_name']
954            # Equal separator means plugin function returns result.
955            if ' = ' in plugin_name:
956                # Ex. ['result', 'plugin.foo_func.my_func']
957                plugin_name_args = plugin_name.split(' = ')
958                # plugin func return data.
959                for arg in plugin_name_args:
960                    if arg == plugin_name_args[-1]:
961                        plugin_name = arg
962                    else:
963                        plugin_resp = arg.split(',')
964                        # ['result1','result2']
965                        for x in plugin_resp:
966                            global_plugin_list.append(x)
967                            global_plugin_dict[x] = ""
968
969            # Walk the plugin args ['arg1,'arg2']
970            # If the YAML plugin statement 'plugin_args' is not declared.
971            if any('plugin_args' in d for d in plugin_cmd_list):
972                idx = self.key_index_list_dict('plugin_args', plugin_cmd_list)
973                plugin_args = plugin_cmd_list[idx]['plugin_args']
974                if plugin_args:
975                    plugin_args = self.yaml_args_populate(plugin_args)
976                else:
977                    plugin_args = []
978            else:
979                plugin_args = self.yaml_args_populate([])
980
981            # Pack the args arg1, arg2, .... argn into
982            # "arg1","arg2","argn"  string as params for function.
983            parm_args_str = self.yaml_args_string(plugin_args)
984            if parm_args_str:
985                plugin_func = plugin_name + '(' + parm_args_str + ')'
986            else:
987                plugin_func = plugin_name + '()'
988
989            # Execute plugin function.
990            if global_plugin_dict:
991                resp = self.execute_python_eval(plugin_func)
992                # Update plugin vars dict if there is any.
993                if resp != 'PLUGIN_EVAL_ERROR':
994                    self.response_args_data(resp)
995            else:
996                resp = self.execute_python_eval(plugin_func)
997        except Exception as e:
998            # Set the plugin error state.
999            plugin_error_dict['exit_on_error'] = True
1000            self.logger.error("\tERROR: execute_plugin_block: %s" % e)
1001            pass
1002
1003        # There is a real error executing the plugin function.
1004        if resp == 'PLUGIN_EVAL_ERROR':
1005            return resp
1006
1007        # Check if plugin_expects_return (int, string, list,dict etc)
1008        if any('plugin_expects_return' in d for d in plugin_cmd_list):
1009            idx = self.key_index_list_dict('plugin_expects_return', plugin_cmd_list)
1010            plugin_expects = plugin_cmd_list[idx]['plugin_expects_return']
1011            if plugin_expects:
1012                if resp:
1013                    if self.plugin_expect_type(plugin_expects, resp) == 'INVALID':
1014                        self.logger.error("\tWARN: Plugin error check skipped")
1015                    elif not self.plugin_expect_type(plugin_expects, resp):
1016                        self.logger.error("\tERROR: Plugin expects return data: %s"
1017                                          % plugin_expects)
1018                        plugin_error_dict['exit_on_error'] = True
1019                elif not resp:
1020                    self.logger.error("\tERROR: Plugin func failed to return data")
1021                    plugin_error_dict['exit_on_error'] = True
1022
1023        return resp
1024
1025    def response_args_data(self, plugin_resp):
1026        r"""
1027        Parse the plugin function response and update plugin return variable.
1028
1029        plugin_resp       Response data from plugin function.
1030        """
1031        resp_list = []
1032        resp_data = ""
1033
1034        # There is nothing to update the plugin response.
1035        if len(global_plugin_list) == 0 or plugin_resp == 'None':
1036            return
1037
1038        if isinstance(plugin_resp, str):
1039            resp_data = plugin_resp.strip('\r\n\t')
1040            resp_list.append(resp_data)
1041        elif isinstance(plugin_resp, bytes):
1042            resp_data = str(plugin_resp, 'UTF-8').strip('\r\n\t')
1043            resp_list.append(resp_data)
1044        elif isinstance(plugin_resp, tuple):
1045            if len(global_plugin_list) == 1:
1046                resp_list.append(plugin_resp)
1047            else:
1048                resp_list = list(plugin_resp)
1049                resp_list = [x.strip('\r\n\t') for x in resp_list]
1050        elif isinstance(plugin_resp, list):
1051            if len(global_plugin_list) == 1:
1052                resp_list.append([x.strip('\r\n\t') for x in plugin_resp])
1053            else:
1054                resp_list = [x.strip('\r\n\t') for x in plugin_resp]
1055        elif isinstance(plugin_resp, int) or isinstance(plugin_resp, float):
1056            resp_list.append(plugin_resp)
1057
1058        # Iterate if there is a list of plugin return vars to update.
1059        for idx, item in enumerate(resp_list, start=0):
1060            # Exit loop, done required loop.
1061            if idx >= len(global_plugin_list):
1062                break
1063            # Find the index of the return func in the list and
1064            # update the global func return dictionary.
1065            try:
1066                dict_idx = global_plugin_list[idx]
1067                global_plugin_dict[dict_idx] = item
1068            except (IndexError, ValueError) as e:
1069                self.logger.warn("\tWARN: response_args_data: %s" % e)
1070                pass
1071
1072        # Done updating plugin dict irrespective of pass or failed,
1073        # clear all the list element for next plugin block execute.
1074        global_plugin_list.clear()
1075
1076    def yaml_args_string(self, plugin_args):
1077        r"""
1078        Pack the args into string.
1079
1080        plugin_args            arg list ['arg1','arg2,'argn']
1081        """
1082        args_str = ''
1083        for args in plugin_args:
1084            if args:
1085                if isinstance(args, (int, float)):
1086                    args_str += str(args)
1087                elif args in global_plugin_type_list:
1088                    args_str += str(global_plugin_dict[args])
1089                else:
1090                    args_str += '"' + str(args.strip('\r\n\t')) + '"'
1091            # Skip last list element.
1092            if args != plugin_args[-1]:
1093                args_str += ","
1094        return args_str
1095
1096    def yaml_args_populate(self, yaml_arg_list):
1097        r"""
1098        Decode env and plugin vars and populate.
1099
1100        Description of argument(s):
1101        yaml_arg_list         arg list read from YAML
1102
1103        Example:
1104          - plugin_args:
1105            - arg1
1106            - arg2
1107
1108                  yaml_arg_list:  [arg2, arg2]
1109        """
1110        # Get the env loaded keys as list ['hostname', 'username', 'password'].
1111        env_vars_list = list(self.env_dict)
1112
1113        if isinstance(yaml_arg_list, list):
1114            tmp_list = []
1115            for arg in yaml_arg_list:
1116                if isinstance(arg, (int, float)):
1117                    tmp_list.append(arg)
1118                    continue
1119                elif isinstance(arg, str):
1120                    arg_str = self.yaml_env_and_plugin_vars_populate(str(arg))
1121                    tmp_list.append(arg_str)
1122                else:
1123                    tmp_list.append(arg)
1124
1125            # return populated list.
1126            return tmp_list
1127
1128    def yaml_env_and_plugin_vars_populate(self, yaml_arg_str):
1129        r"""
1130        Update ${MY_VAR} and plugin vars.
1131
1132        Description of argument(s):
1133        yaml_arg_str         arg string read from YAML.
1134
1135        Example:
1136            - cat ${MY_VAR}
1137            - ls -AX my_plugin_var
1138        """
1139        # Parse the string for env vars ${env_vars}.
1140        try:
1141            # Example, list of matching env vars ['username', 'password', 'hostname']
1142            # Extra escape \ for special symbols. '\$\{([^\}]+)\}' works good.
1143            var_name_regex = '\\$\\{([^\\}]+)\\}'
1144            env_var_names_list = re.findall(var_name_regex, yaml_arg_str)
1145            for var in env_var_names_list:
1146                env_var = os.environ[var]
1147                env_replace = '${' + var + '}'
1148                yaml_arg_str = yaml_arg_str.replace(env_replace, env_var)
1149        except Exception as e:
1150            self.logger.error("\tERROR:yaml_env_vars_populate: %s" % e)
1151            pass
1152
1153        # Parse the string for plugin vars.
1154        try:
1155            # Example, list of plugin vars ['my_username', 'my_data']
1156            plugin_var_name_list = global_plugin_dict.keys()
1157            for var in plugin_var_name_list:
1158                # skip env var list already populated above code block list.
1159                if var in env_var_names_list:
1160                    continue
1161                # If this plugin var exist but empty in dict, don't replace.
1162                # This is either a YAML plugin statement incorrectly used or
1163                # user added a plugin var which is not going to be populated.
1164                if yaml_arg_str in global_plugin_dict:
1165                    if isinstance(global_plugin_dict[var], (list, dict)):
1166                        # List data type or dict can't be replaced, use directly
1167                        # in eval function call.
1168                        global_plugin_type_list.append(var)
1169                    else:
1170                        yaml_arg_str = yaml_arg_str.replace(str(var), str(global_plugin_dict[var]))
1171                # Just a string like filename or command.
1172                else:
1173                    yaml_arg_str = yaml_arg_str.replace(str(var), str(global_plugin_dict[var]))
1174        except (IndexError, ValueError) as e:
1175            self.logger.error("\tERROR: yaml_plugin_vars_populate: %s" % e)
1176            pass
1177
1178        return yaml_arg_str
1179
1180    def plugin_error_check(self, plugin_dict):
1181        r"""
1182        Plugin error dict processing.
1183
1184        Description of argument(s):
1185        plugin_dict        Dictionary of plugin error.
1186        """
1187        if any('plugin_error' in d for d in plugin_dict):
1188            for d in plugin_dict:
1189                if 'plugin_error' in d:
1190                    value = d['plugin_error']
1191                    # Reference if the error is set or not by plugin.
1192                    return plugin_error_dict[value]
1193
1194    def key_index_list_dict(self, key, list_dict):
1195        r"""
1196        Iterate list of dictionary and return index if the key match is found.
1197
1198        Description of argument(s):
1199        key           Valid Key in a dict.
1200        list_dict     list of dictionary.
1201        """
1202        for i, d in enumerate(list_dict):
1203            if key in d.keys():
1204                return i
1205
1206    def plugin_expect_type(self, type, data):
1207        r"""
1208        Plugin expect directive type check.
1209        """
1210        if type == 'int':
1211            return isinstance(data, int)
1212        elif type == 'float':
1213            return isinstance(data, float)
1214        elif type == 'str':
1215            return isinstance(data, str)
1216        elif type == 'list':
1217            return isinstance(data, list)
1218        elif type == 'dict':
1219            return isinstance(data, dict)
1220        elif type == 'tuple':
1221            return isinstance(data, tuple)
1222        else:
1223            self.logger.info("\tInvalid data type requested: %s" % type)
1224            return 'INVALID'
1225