1#!/usr/bin/env python3
2
3r"""
4See class prolog below for details.
5"""
6
7import os
8import re
9import sys
10import yaml
11import json
12import time
13import logging
14import platform
15from errno import EACCES, EPERM
16import subprocess
17from ssh_utility import SSHRemoteclient
18from telnet_utility import TelnetRemoteclient
19
20r"""
21User define plugins python functions.
22
23It will imports files from directory plugins
24
25plugins
26├── file1.py
27└── file2.py
28
29Example how to define in YAML:
30 - plugin:
31   - plugin_name: plugin.foo_func.foo_func_yaml
32     - plugin_args:
33       - arg1
34       - arg2
35"""
36abs_path = os.path.abspath(os.path.dirname(sys.argv[0]))
37plugin_dir = abs_path + '/plugins'
38try:
39    for module in os.listdir(plugin_dir):
40        if module == '__init__.py' or module[-3:] != '.py':
41            continue
42        plugin_module = "plugins." + module[:-3]
43        # To access the module plugin.<module name>.<function>
44        # Example: plugin.foo_func.foo_func_yaml()
45        try:
46            plugin = __import__(plugin_module, globals(), locals(), [], 0)
47        except Exception as e:
48            print("PLUGIN: Module import failed: %s" % module)
49            pass
50except FileNotFoundError as e:
51    print("PLUGIN: %s" % e)
52    pass
53
54r"""
55This is for plugin functions returning data or responses to the caller
56in YAML plugin setup.
57
58Example:
59
60    - plugin:
61      - plugin_name: version = plugin.ssh_execution.ssh_execute_cmd
62      - plugin_args:
63        - ${hostname}
64        - ${username}
65        - ${password}
66        - "cat /etc/os-release | grep VERSION_ID | awk -F'=' '{print $2}'"
67     - plugin:
68        - plugin_name: plugin.print_vars.print_vars
69        - plugin_args:
70          - version
71
72where first plugin "version" var is used by another plugin in the YAML
73block or plugin
74
75"""
76global global_log_store_path
77global global_plugin_dict
78global global_plugin_list
79
80# Hold the plugin return values in dict and plugin return vars in list.
81# Dict is to reference and update vars processing in parser where as
82# list is for current vars from the plugin block which needs processing.
83global_plugin_dict = {}
84global_plugin_list = []
85
86# Hold the plugin return named declared if function returned values are list,dict.
87# Refer this name list to look up the plugin dict for eval() args function
88# Example ['version']
89global_plugin_type_list = []
90
91# Path where logs are to be stored or written.
92global_log_store_path = ''
93
94# Plugin error state defaults.
95plugin_error_dict = {
96    'exit_on_error': False,
97    'continue_on_error': False,
98}
99
100
101class FFDCCollector:
102
103    r"""
104    Execute commands from configuration file to collect log files.
105    Fetch and store generated files at the specified location.
106
107    """
108
109    def __init__(self,
110                 hostname,
111                 username,
112                 password,
113                 ffdc_config,
114                 location,
115                 remote_type,
116                 remote_protocol,
117                 env_vars,
118                 econfig,
119                 log_level):
120        r"""
121        Description of argument(s):
122
123        hostname            name/ip of the targeted (remote) system
124        username            user on the targeted system with access to FFDC files
125        password            password for user on targeted system
126        ffdc_config         configuration file listing commands and files for FFDC
127        location            where to store collected FFDC
128        remote_type         os type of the remote host
129        remote_protocol     Protocol to use to collect data
130        env_vars            User define CLI env vars '{"key : "value"}'
131        econfig             User define env vars YAML file
132
133        """
134
135        self.hostname = hostname
136        self.username = username
137        self.password = password
138        self.ffdc_config = ffdc_config
139        self.location = location + "/" + remote_type.upper()
140        self.ssh_remoteclient = None
141        self.telnet_remoteclient = None
142        self.ffdc_dir_path = ""
143        self.ffdc_prefix = ""
144        self.target_type = remote_type.upper()
145        self.remote_protocol = remote_protocol.upper()
146        self.env_vars = env_vars
147        self.econfig = econfig
148        self.start_time = 0
149        self.elapsed_time = ''
150        self.logger = None
151
152        # Set prefix values for scp files and directory.
153        # Since the time stamp is at second granularity, these values are set here
154        # to be sure that all files for this run will have same timestamps
155        # and they will be saved in the same directory.
156        # self.location == local system for now
157        self.set_ffdc_defaults()
158
159        # Logger for this run.  Need to be after set_ffdc_defaults()
160        self.script_logging(getattr(logging, log_level.upper()))
161
162        # Verify top level directory exists for storage
163        self.validate_local_store(self.location)
164
165        if self.verify_script_env():
166            # Load default or user define YAML configuration file.
167            with open(self.ffdc_config, 'r') as file:
168                try:
169                    self.ffdc_actions = yaml.load(file, Loader=yaml.FullLoader)
170                except yaml.YAMLError as e:
171                    self.logger.error(e)
172                    sys.exit(-1)
173
174            if self.target_type not in self.ffdc_actions.keys():
175                self.logger.error(
176                    "\n\tERROR: %s is not listed in %s.\n\n" % (self.target_type, self.ffdc_config))
177                sys.exit(-1)
178        else:
179            sys.exit(-1)
180
181        # Load ENV vars from user.
182        self.logger.info("\n\tENV: User define input YAML variables")
183        self.env_dict = {}
184        self. load_env()
185
186    def verify_script_env(self):
187
188        # Import to log version
189        import click
190        import paramiko
191
192        run_env_ok = True
193
194        redfishtool_version = self.run_tool_cmd('redfishtool -V').split(' ')[2].strip('\n')
195        ipmitool_version = self.run_tool_cmd('ipmitool -V').split(' ')[2]
196
197        self.logger.info("\n\t---- Script host environment ----")
198        self.logger.info("\t{:<10}  {:<10}".format('Script hostname', os.uname()[1]))
199        self.logger.info("\t{:<10}  {:<10}".format('Script host os', platform.platform()))
200        self.logger.info("\t{:<10}  {:>10}".format('Python', platform.python_version()))
201        self.logger.info("\t{:<10}  {:>10}".format('PyYAML', yaml.__version__))
202        self.logger.info("\t{:<10}  {:>10}".format('click', click.__version__))
203        self.logger.info("\t{:<10}  {:>10}".format('paramiko', paramiko.__version__))
204        self.logger.info("\t{:<10}  {:>9}".format('redfishtool', redfishtool_version))
205        self.logger.info("\t{:<10}  {:>12}".format('ipmitool', ipmitool_version))
206
207        if eval(yaml.__version__.replace('.', ',')) < (5, 4, 1):
208            self.logger.error("\n\tERROR: Python or python packages do not meet minimum version requirement.")
209            self.logger.error("\tERROR: PyYAML version 5.4.1 or higher is needed.\n")
210            run_env_ok = False
211
212        self.logger.info("\t---- End script host environment ----")
213        return run_env_ok
214
215    def script_logging(self,
216                       log_level_attr):
217        r"""
218        Create logger
219
220        """
221        self.logger = logging.getLogger()
222        self.logger.setLevel(log_level_attr)
223        log_file_handler = logging.FileHandler(self.ffdc_dir_path + "collector.log")
224
225        stdout_handler = logging.StreamHandler(sys.stdout)
226        self.logger.addHandler(log_file_handler)
227        self.logger.addHandler(stdout_handler)
228
229        # Turn off paramiko INFO logging
230        logging.getLogger("paramiko").setLevel(logging.WARNING)
231
232    def target_is_pingable(self):
233        r"""
234        Check if target system is ping-able.
235
236        """
237        response = os.system("ping -c 1 %s  2>&1 >/dev/null" % self.hostname)
238        if response == 0:
239            self.logger.info("\n\t[Check] %s is ping-able.\t\t [OK]" % self.hostname)
240            return True
241        else:
242            self.logger.error(
243                "\n\tERROR: %s is not ping-able. FFDC collection aborted.\n" % self.hostname)
244            sys.exit(-1)
245
246    def collect_ffdc(self):
247        r"""
248        Initiate FFDC Collection depending on requested protocol.
249
250        """
251
252        self.logger.info("\n\t---- Start communicating with %s ----" % self.hostname)
253        self.start_time = time.time()
254
255        # Find the list of target and protocol supported.
256        check_protocol_list = []
257        config_dict = self.ffdc_actions
258
259        for target_type in config_dict.keys():
260            if self.target_type != target_type:
261                continue
262
263            for k, v in config_dict[target_type].items():
264                if config_dict[target_type][k]['PROTOCOL'][0] not in check_protocol_list:
265                    check_protocol_list.append(config_dict[target_type][k]['PROTOCOL'][0])
266
267        self.logger.info("\n\t %s protocol type: %s" % (self.target_type, check_protocol_list))
268
269        verified_working_protocol = self.verify_protocol(check_protocol_list)
270
271        if verified_working_protocol:
272            self.logger.info("\n\t---- Completed protocol pre-requisite check ----\n")
273
274        # Verify top level directory exists for storage
275        self.validate_local_store(self.location)
276
277        if ((self.remote_protocol not in verified_working_protocol) and (self.remote_protocol != 'ALL')):
278            self.logger.info("\n\tWorking protocol list: %s" % verified_working_protocol)
279            self.logger.error(
280                '\tERROR: Requested protocol %s is not in working protocol list.\n'
281                % self.remote_protocol)
282            sys.exit(-1)
283        else:
284            self.generate_ffdc(verified_working_protocol)
285
286    def ssh_to_target_system(self):
287        r"""
288        Open a ssh connection to targeted system.
289
290        """
291
292        self.ssh_remoteclient = SSHRemoteclient(self.hostname,
293                                                self.username,
294                                                self.password)
295
296        if self.ssh_remoteclient.ssh_remoteclient_login():
297            self.logger.info("\n\t[Check] %s SSH connection established.\t [OK]" % self.hostname)
298
299            # Check scp connection.
300            # If scp connection fails,
301            # continue with FFDC generation but skip scp files to local host.
302            self.ssh_remoteclient.scp_connection()
303            return True
304        else:
305            self.logger.info("\n\t[Check] %s SSH connection.\t [NOT AVAILABLE]" % self.hostname)
306            return False
307
308    def telnet_to_target_system(self):
309        r"""
310        Open a telnet connection to targeted system.
311        """
312        self.telnet_remoteclient = TelnetRemoteclient(self.hostname,
313                                                      self.username,
314                                                      self.password)
315        if self.telnet_remoteclient.tn_remoteclient_login():
316            self.logger.info("\n\t[Check] %s Telnet connection established.\t [OK]" % self.hostname)
317            return True
318        else:
319            self.logger.info("\n\t[Check] %s Telnet connection.\t [NOT AVAILABLE]" % self.hostname)
320            return False
321
322    def generate_ffdc(self, working_protocol_list):
323        r"""
324        Determine actions based on remote host type
325
326        Description of argument(s):
327        working_protocol_list    list of confirmed working protocols to connect to remote host.
328        """
329
330        self.logger.info("\n\t---- Executing commands on " + self.hostname + " ----")
331        self.logger.info("\n\tWorking protocol list: %s" % working_protocol_list)
332
333        config_dict = self.ffdc_actions
334        for target_type in config_dict.keys():
335            if self.target_type != target_type:
336                continue
337
338            self.logger.info("\n\tFFDC Path: %s " % self.ffdc_dir_path)
339            global_plugin_dict['global_log_store_path'] = self.ffdc_dir_path
340            self.logger.info("\tSystem Type: %s" % target_type)
341            for k, v in config_dict[target_type].items():
342
343                if self.remote_protocol not in working_protocol_list \
344                        and self.remote_protocol != 'ALL':
345                    continue
346
347                protocol = config_dict[target_type][k]['PROTOCOL'][0]
348
349                if protocol not in working_protocol_list:
350                    continue
351
352                if protocol in working_protocol_list:
353                    if protocol == 'SSH' or protocol == 'SCP':
354                        self.protocol_ssh(protocol, target_type, k)
355                    elif protocol == 'TELNET':
356                        self.protocol_telnet(target_type, k)
357                    elif protocol == 'REDFISH' or protocol == 'IPMI' or protocol == 'SHELL':
358                        self.protocol_execute(protocol, target_type, k)
359                else:
360                    self.logger.error("\n\tERROR: %s is not available for %s." % (protocol, self.hostname))
361
362        # Close network connection after collecting all files
363        self.elapsed_time = time.strftime("%H:%M:%S", time.gmtime(time.time() - self.start_time))
364        if self.ssh_remoteclient:
365            self.ssh_remoteclient.ssh_remoteclient_disconnect()
366        if self.telnet_remoteclient:
367            self.telnet_remoteclient.tn_remoteclient_disconnect()
368
369    def protocol_ssh(self,
370                     protocol,
371                     target_type,
372                     sub_type):
373        r"""
374        Perform actions using SSH and SCP protocols.
375
376        Description of argument(s):
377        protocol            Protocol to execute.
378        target_type         OS Type of remote host.
379        sub_type            Group type of commands.
380        """
381
382        if protocol == 'SCP':
383            self.group_copy(self.ffdc_actions[target_type][sub_type])
384        else:
385            self.collect_and_copy_ffdc(self.ffdc_actions[target_type][sub_type])
386
387    def protocol_telnet(self,
388                        target_type,
389                        sub_type):
390        r"""
391        Perform actions using telnet protocol.
392        Description of argument(s):
393        target_type          OS Type of remote host.
394        """
395        self.logger.info("\n\t[Run] Executing commands on %s using %s" % (self.hostname, 'TELNET'))
396        telnet_files_saved = []
397        progress_counter = 0
398        list_of_commands = self.ffdc_actions[target_type][sub_type]['COMMANDS']
399        for index, each_cmd in enumerate(list_of_commands, start=0):
400            command_txt, command_timeout = self.unpack_command(each_cmd)
401            result = self.telnet_remoteclient.execute_command(command_txt, command_timeout)
402            if result:
403                try:
404                    targ_file = self.ffdc_actions[target_type][sub_type]['FILES'][index]
405                except IndexError:
406                    targ_file = command_txt
407                    self.logger.warning(
408                        "\n\t[WARN] Missing filename to store data from telnet %s." % each_cmd)
409                    self.logger.warning("\t[WARN] Data will be stored in %s." % targ_file)
410                targ_file_with_path = (self.ffdc_dir_path
411                                       + self.ffdc_prefix
412                                       + targ_file)
413                # Creates a new file
414                with open(targ_file_with_path, 'w') as fp:
415                    fp.write(result)
416                    fp.close
417                    telnet_files_saved.append(targ_file)
418            progress_counter += 1
419            self.print_progress(progress_counter)
420        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
421        for file in telnet_files_saved:
422            self.logger.info("\n\t\tSuccessfully save file " + file + ".")
423
424    def protocol_execute(self,
425                         protocol,
426                         target_type,
427                         sub_type):
428        r"""
429        Perform actions for a given protocol.
430
431        Description of argument(s):
432        protocol            Protocol to execute.
433        target_type         OS Type of remote host.
434        sub_type            Group type of commands.
435        """
436
437        self.logger.info("\n\t[Run] Executing commands to %s using %s" % (self.hostname, protocol))
438        executed_files_saved = []
439        progress_counter = 0
440        list_of_cmd = self.get_command_list(self.ffdc_actions[target_type][sub_type])
441        for index, each_cmd in enumerate(list_of_cmd, start=0):
442            plugin_call = False
443            if isinstance(each_cmd, dict):
444                if 'plugin' in each_cmd:
445                    # If the error is set and plugin explicitly
446                    # requested to skip execution on error..
447                    if plugin_error_dict['exit_on_error'] and \
448                            self.plugin_error_check(each_cmd['plugin']):
449                        self.logger.info("\n\t[PLUGIN-ERROR] exit_on_error: %s" %
450                                         plugin_error_dict['exit_on_error'])
451                        self.logger.info("\t[PLUGIN-SKIP] %s" %
452                                         each_cmd['plugin'][0])
453                        continue
454                    plugin_call = True
455                    # call the plugin
456                    self.logger.info("\n\t[PLUGIN-START]")
457                    result = self.execute_plugin_block(each_cmd['plugin'])
458                    self.logger.info("\t[PLUGIN-END]\n")
459            else:
460                each_cmd = self.yaml_env_and_plugin_vars_populate(each_cmd)
461
462            if not plugin_call:
463                result = self.run_tool_cmd(each_cmd)
464            if result:
465                try:
466                    file_name = self.get_file_list(self.ffdc_actions[target_type][sub_type])[index]
467                    # If file is specified as None.
468                    if file_name == "None":
469                        continue
470                    targ_file = self.yaml_env_and_plugin_vars_populate(file_name)
471                except IndexError:
472                    targ_file = each_cmd.split('/')[-1]
473                    self.logger.warning(
474                        "\n\t[WARN] Missing filename to store data from %s." % each_cmd)
475                    self.logger.warning("\t[WARN] Data will be stored in %s." % targ_file)
476
477                targ_file_with_path = (self.ffdc_dir_path
478                                       + self.ffdc_prefix
479                                       + targ_file)
480
481                # Creates a new file
482                with open(targ_file_with_path, 'w') as fp:
483                    if isinstance(result, dict):
484                        fp.write(json.dumps(result))
485                    else:
486                        fp.write(result)
487                    fp.close
488                    executed_files_saved.append(targ_file)
489
490            progress_counter += 1
491            self.print_progress(progress_counter)
492
493        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
494
495        for file in executed_files_saved:
496            self.logger.info("\n\t\tSuccessfully save file " + file + ".")
497
498    def collect_and_copy_ffdc(self,
499                              ffdc_actions_for_target_type,
500                              form_filename=False):
501        r"""
502        Send commands in ffdc_config file to targeted system.
503
504        Description of argument(s):
505        ffdc_actions_for_target_type     commands and files for the selected remote host type.
506        form_filename                    if true, pre-pend self.target_type to filename
507        """
508
509        # Executing commands, if any
510        self.ssh_execute_ffdc_commands(ffdc_actions_for_target_type,
511                                       form_filename)
512
513        # Copying files
514        if self.ssh_remoteclient.scpclient:
515            self.logger.info("\n\n\tCopying FFDC files from remote system %s.\n" % self.hostname)
516
517            # Retrieving files from target system
518            list_of_files = self.get_file_list(ffdc_actions_for_target_type)
519            self.scp_ffdc(self.ffdc_dir_path, self.ffdc_prefix, form_filename, list_of_files)
520        else:
521            self.logger.info("\n\n\tSkip copying FFDC files from remote system %s.\n" % self.hostname)
522
523    def get_command_list(self,
524                         ffdc_actions_for_target_type):
525        r"""
526        Fetch list of commands from configuration file
527
528        Description of argument(s):
529        ffdc_actions_for_target_type    commands and files for the selected remote host type.
530        """
531        try:
532            list_of_commands = ffdc_actions_for_target_type['COMMANDS']
533        except KeyError:
534            list_of_commands = []
535        return list_of_commands
536
537    def get_file_list(self,
538                      ffdc_actions_for_target_type):
539        r"""
540        Fetch list of commands from configuration file
541
542        Description of argument(s):
543        ffdc_actions_for_target_type    commands and files for the selected remote host type.
544        """
545        try:
546            list_of_files = ffdc_actions_for_target_type['FILES']
547        except KeyError:
548            list_of_files = []
549        return list_of_files
550
551    def unpack_command(self,
552                       command):
553        r"""
554        Unpack command from config file
555
556        Description of argument(s):
557        command    Command from config file.
558        """
559        if isinstance(command, dict):
560            command_txt = next(iter(command))
561            command_timeout = next(iter(command.values()))
562        elif isinstance(command, str):
563            command_txt = command
564            # Default command timeout 60 seconds
565            command_timeout = 60
566
567        return command_txt, command_timeout
568
569    def ssh_execute_ffdc_commands(self,
570                                  ffdc_actions_for_target_type,
571                                  form_filename=False):
572        r"""
573        Send commands in ffdc_config file to targeted system.
574
575        Description of argument(s):
576        ffdc_actions_for_target_type    commands and files for the selected remote host type.
577        form_filename                    if true, pre-pend self.target_type to filename
578        """
579        self.logger.info("\n\t[Run] Executing commands on %s using %s"
580                         % (self.hostname, ffdc_actions_for_target_type['PROTOCOL'][0]))
581
582        list_of_commands = self.get_command_list(ffdc_actions_for_target_type)
583        # If command list is empty, returns
584        if not list_of_commands:
585            return
586
587        progress_counter = 0
588        for command in list_of_commands:
589            command_txt, command_timeout = self.unpack_command(command)
590
591            if form_filename:
592                command_txt = str(command_txt % self.target_type)
593
594            cmd_exit_code, err, response = \
595                self.ssh_remoteclient.execute_command(command_txt, command_timeout)
596
597            if cmd_exit_code:
598                self.logger.warning(
599                    "\n\t\t[WARN] %s exits with code %s." % (command_txt, str(cmd_exit_code)))
600                self.logger.warning("\t\t[WARN] %s " % err)
601
602            progress_counter += 1
603            self.print_progress(progress_counter)
604
605        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
606
607    def group_copy(self,
608                   ffdc_actions_for_target_type):
609        r"""
610        scp group of files (wild card) from remote host.
611
612        Description of argument(s):
613        fdc_actions_for_target_type    commands and files for the selected remote host type.
614        """
615
616        if self.ssh_remoteclient.scpclient:
617            self.logger.info("\n\tCopying files from remote system %s via SCP.\n" % self.hostname)
618
619            list_of_commands = self.get_command_list(ffdc_actions_for_target_type)
620            # If command list is empty, returns
621            if not list_of_commands:
622                return
623
624            for command in list_of_commands:
625                try:
626                    command = self.yaml_env_and_plugin_vars_populate(command)
627                except IndexError:
628                    self.logger.error("\t\tInvalid command %s" % command)
629                    continue
630
631                cmd_exit_code, err, response = \
632                    self.ssh_remoteclient.execute_command(command)
633
634                # If file does not exist, code take no action.
635                # cmd_exit_code is ignored for this scenario.
636                if response:
637                    scp_result = \
638                        self.ssh_remoteclient.scp_file_from_remote(response.split('\n'),
639                                                                   self.ffdc_dir_path)
640                    if scp_result:
641                        self.logger.info("\t\tSuccessfully copied from " + self.hostname + ':' + command)
642                else:
643                    self.logger.info("\t\t%s has no result" % command)
644
645        else:
646            self.logger.info("\n\n\tSkip copying files from remote system %s.\n" % self.hostname)
647
648    def scp_ffdc(self,
649                 targ_dir_path,
650                 targ_file_prefix,
651                 form_filename,
652                 file_list=None,
653                 quiet=None):
654        r"""
655        SCP all files in file_dict to the indicated directory on the local system.
656
657        Description of argument(s):
658        targ_dir_path                   The path of the directory to receive the files.
659        targ_file_prefix                Prefix which will be pre-pended to each
660                                        target file's name.
661        file_dict                       A dictionary of files to scp from targeted system to this system
662
663        """
664
665        progress_counter = 0
666        for filename in file_list:
667            if form_filename:
668                filename = str(filename % self.target_type)
669            source_file_path = filename
670            targ_file_path = targ_dir_path + targ_file_prefix + filename.split('/')[-1]
671
672            # If source file name contains wild card, copy filename as is.
673            if '*' in source_file_path:
674                scp_result = self.ssh_remoteclient.scp_file_from_remote(source_file_path, self.ffdc_dir_path)
675            else:
676                scp_result = self.ssh_remoteclient.scp_file_from_remote(source_file_path, targ_file_path)
677
678            if not quiet:
679                if scp_result:
680                    self.logger.info(
681                        "\t\tSuccessfully copied from " + self.hostname + ':' + source_file_path + ".\n")
682                else:
683                    self.logger.info(
684                        "\t\tFail to copy from " + self.hostname + ':' + source_file_path + ".\n")
685            else:
686                progress_counter += 1
687                self.print_progress(progress_counter)
688
689    def set_ffdc_defaults(self):
690        r"""
691        Set a default value for self.ffdc_dir_path and self.ffdc_prefix.
692        Collected ffdc file will be stored in dir /self.location/hostname_timestr/.
693        Individual ffdc file will have timestr_filename.
694
695        Description of class variables:
696        self.ffdc_dir_path  The dir path where collected ffdc data files should be put.
697
698        self.ffdc_prefix    The prefix to be given to each ffdc file name.
699
700        """
701
702        timestr = time.strftime("%Y%m%d-%H%M%S")
703        self.ffdc_dir_path = self.location + "/" + self.hostname + "_" + timestr + "/"
704        self.ffdc_prefix = timestr + "_"
705        self.validate_local_store(self.ffdc_dir_path)
706
707    def validate_local_store(self, dir_path):
708        r"""
709        Ensure path exists to store FFDC files locally.
710
711        Description of variable:
712        dir_path  The dir path where collected ffdc data files will be stored.
713
714        """
715
716        if not os.path.exists(dir_path):
717            try:
718                os.makedirs(dir_path, 0o755)
719            except (IOError, OSError) as e:
720                # PermissionError
721                if e.errno == EPERM or e.errno == EACCES:
722                    self.logger.error(
723                        '\tERROR: os.makedirs %s failed with PermissionError.\n' % dir_path)
724                else:
725                    self.logger.error(
726                        '\tERROR: os.makedirs %s failed with %s.\n' % (dir_path, e.strerror))
727                sys.exit(-1)
728
729    def print_progress(self, progress):
730        r"""
731        Print activity progress +
732
733        Description of variable:
734        progress  Progress counter.
735
736        """
737
738        sys.stdout.write("\r\t" + "+" * progress)
739        sys.stdout.flush()
740        time.sleep(.1)
741
742    def verify_redfish(self):
743        r"""
744        Verify remote host has redfish service active
745
746        """
747        redfish_parm = 'redfishtool -r ' \
748                       + self.hostname + ' -S Always raw GET /redfish/v1/'
749        return(self.run_tool_cmd(redfish_parm, True))
750
751    def verify_ipmi(self):
752        r"""
753        Verify remote host has IPMI LAN service active
754
755        """
756        if self.target_type == 'OPENBMC':
757            ipmi_parm = 'ipmitool -I lanplus -C 17  -U ' + self.username + ' -P ' \
758                + self.password + ' -H ' + self.hostname + ' power status'
759        else:
760            ipmi_parm = 'ipmitool -I lanplus  -P ' \
761                + self.password + ' -H ' + self.hostname + ' power status'
762
763        return(self.run_tool_cmd(ipmi_parm, True))
764
765    def run_tool_cmd(self,
766                     parms_string,
767                     quiet=False):
768        r"""
769        Run CLI standard tool or scripts.
770
771        Description of variable:
772        parms_string         tool command options.
773        quiet                do not print tool error message if True
774        """
775
776        result = subprocess.run([parms_string],
777                                stdout=subprocess.PIPE,
778                                stderr=subprocess.PIPE,
779                                shell=True,
780                                universal_newlines=True)
781
782        if result.stderr and not quiet:
783            self.logger.error('\n\t\tERROR with %s ' % parms_string)
784            self.logger.error('\t\t' + result.stderr)
785
786        return result.stdout
787
788    def verify_protocol(self, protocol_list):
789        r"""
790        Perform protocol working check.
791
792        Description of argument(s):
793        protocol_list        List of protocol.
794        """
795
796        tmp_list = []
797        if self.target_is_pingable():
798            tmp_list.append("SHELL")
799
800        for protocol in protocol_list:
801            if self.remote_protocol != 'ALL':
802                if self.remote_protocol != protocol:
803                    continue
804
805            # Only check SSH/SCP once for both protocols
806            if protocol == 'SSH' or protocol == 'SCP' and protocol not in tmp_list:
807                if self.ssh_to_target_system():
808                    # Add only what user asked.
809                    if self.remote_protocol != 'ALL':
810                        tmp_list.append(self.remote_protocol)
811                    else:
812                        tmp_list.append('SSH')
813                        tmp_list.append('SCP')
814
815            if protocol == 'TELNET':
816                if self.telnet_to_target_system():
817                    tmp_list.append(protocol)
818
819            if protocol == 'REDFISH':
820                if self.verify_redfish():
821                    tmp_list.append(protocol)
822                    self.logger.info("\n\t[Check] %s Redfish Service.\t\t [OK]" % self.hostname)
823                else:
824                    self.logger.info("\n\t[Check] %s Redfish Service.\t\t [NOT AVAILABLE]" % self.hostname)
825
826            if protocol == 'IPMI':
827                if self.verify_ipmi():
828                    tmp_list.append(protocol)
829                    self.logger.info("\n\t[Check] %s IPMI LAN Service.\t\t [OK]" % self.hostname)
830                else:
831                    self.logger.info("\n\t[Check] %s IPMI LAN Service.\t\t [NOT AVAILABLE]" % self.hostname)
832
833        return tmp_list
834
835    def load_env(self):
836        r"""
837        Perform protocol working check.
838
839        """
840        # This is for the env vars a user can use in YAML to load it at runtime.
841        # Example YAML:
842        # -COMMANDS:
843        #    - my_command ${hostname}  ${username}   ${password}
844        os.environ['hostname'] = self.hostname
845        os.environ['username'] = self.username
846        os.environ['password'] = self.password
847
848        # Append default Env.
849        self.env_dict['hostname'] = self.hostname
850        self.env_dict['username'] = self.username
851        self.env_dict['password'] = self.password
852
853        try:
854            tmp_env_dict = {}
855            if self.env_vars:
856                tmp_env_dict = json.loads(self.env_vars)
857                # Export ENV vars default.
858                for key, value in tmp_env_dict.items():
859                    os.environ[key] = value
860                    self.env_dict[key] = str(value)
861
862            if self.econfig:
863                with open(self.econfig, 'r') as file:
864                    try:
865                        tmp_env_dict = yaml.load(file, Loader=yaml.FullLoader)
866                    except yaml.YAMLError as e:
867                        self.logger.error(e)
868                        sys.exit(-1)
869                # Export ENV vars.
870                for key, value in tmp_env_dict['env_params'].items():
871                    os.environ[key] = str(value)
872                    self.env_dict[key] = str(value)
873        except json.decoder.JSONDecodeError as e:
874            self.logger.error("\n\tERROR: %s " % e)
875            sys.exit(-1)
876
877        # This to mask the password from displaying on the console.
878        mask_dict = self.env_dict.copy()
879        for k, v in mask_dict.items():
880            if k.lower().find("password") != -1:
881                hidden_text = []
882                hidden_text.append(v)
883                password_regex = '(' +\
884                    '|'.join([re.escape(x) for x in hidden_text]) + ')'
885                mask_dict[k] = re.sub(password_regex, "********", v)
886
887        self.logger.info(json.dumps(mask_dict, indent=8, sort_keys=False))
888
889    def execute_python_eval(self, eval_string):
890        r"""
891        Execute qualified python function string using eval.
892
893        Description of argument(s):
894        eval_string        Execute the python object.
895
896        Example:
897                eval(plugin.foo_func.foo_func(10))
898        """
899        try:
900            self.logger.info("\tExecuting plugin func()")
901            self.logger.debug("\tCall func: %s" % eval_string)
902            result = eval(eval_string)
903            self.logger.info("\treturn: %s" % str(result))
904        except (ValueError,
905                SyntaxError,
906                NameError,
907                AttributeError,
908                TypeError) as e:
909            self.logger.error("\tERROR: execute_python_eval: %s" % e)
910            # Set the plugin error state.
911            plugin_error_dict['exit_on_error'] = True
912            self.logger.info("\treturn: PLUGIN_EVAL_ERROR")
913            return 'PLUGIN_EVAL_ERROR'
914
915        return result
916
917    def execute_plugin_block(self, plugin_cmd_list):
918        r"""
919        Pack the plugin command to quailifed python string object.
920
921        Description of argument(s):
922        plugin_list_dict      Plugin block read from YAML
923                              [{'plugin_name': 'plugin.foo_func.my_func'},
924                               {'plugin_args': [10]}]
925
926        Example:
927            - plugin:
928              - plugin_name: plugin.foo_func.my_func
929              - plugin_args:
930                - arg1
931                - arg2
932
933            - plugin:
934              - plugin_name: result = plugin.foo_func.my_func
935              - plugin_args:
936                - arg1
937                - arg2
938
939            - plugin:
940              - plugin_name: result1,result2 = plugin.foo_func.my_func
941              - plugin_args:
942                - arg1
943                - arg2
944        """
945        try:
946            idx = self.key_index_list_dict('plugin_name', plugin_cmd_list)
947            plugin_name = plugin_cmd_list[idx]['plugin_name']
948            # Equal separator means plugin function returns result.
949            if ' = ' in plugin_name:
950                # Ex. ['result', 'plugin.foo_func.my_func']
951                plugin_name_args = plugin_name.split(' = ')
952                # plugin func return data.
953                for arg in plugin_name_args:
954                    if arg == plugin_name_args[-1]:
955                        plugin_name = arg
956                    else:
957                        plugin_resp = arg.split(',')
958                        # ['result1','result2']
959                        for x in plugin_resp:
960                            global_plugin_list.append(x)
961                            global_plugin_dict[x] = ""
962
963            # Walk the plugin args ['arg1,'arg2']
964            # If the YAML plugin statement 'plugin_args' is not declared.
965            if any('plugin_args' in d for d in plugin_cmd_list):
966                idx = self.key_index_list_dict('plugin_args', plugin_cmd_list)
967                plugin_args = plugin_cmd_list[idx]['plugin_args']
968                if plugin_args:
969                    plugin_args = self.yaml_args_populate(plugin_args)
970                else:
971                    plugin_args = []
972            else:
973                plugin_args = self.yaml_args_populate([])
974
975            # Pack the args arg1, arg2, .... argn into
976            # "arg1","arg2","argn"  string as params for function.
977            parm_args_str = self.yaml_args_string(plugin_args)
978            if parm_args_str:
979                plugin_func = plugin_name + '(' + parm_args_str + ')'
980            else:
981                plugin_func = plugin_name + '()'
982
983            # Execute plugin function.
984            if global_plugin_dict:
985                resp = self.execute_python_eval(plugin_func)
986                # Update plugin vars dict if there is any.
987                if resp != 'PLUGIN_EVAL_ERROR':
988                    self.response_args_data(resp)
989            else:
990                resp = self.execute_python_eval(plugin_func)
991        except Exception as e:
992            # Set the plugin error state.
993            plugin_error_dict['exit_on_error'] = True
994            self.logger.error("\tERROR: execute_plugin_block: %s" % e)
995            pass
996
997        # There is a real error executing the plugin function.
998        if resp == 'PLUGIN_EVAL_ERROR':
999            return resp
1000
1001        # Check if plugin_expects_return (int, string, list,dict etc)
1002        if any('plugin_expects_return' in d for d in plugin_cmd_list):
1003            idx = self.key_index_list_dict('plugin_expects_return', plugin_cmd_list)
1004            plugin_expects = plugin_cmd_list[idx]['plugin_expects_return']
1005            if plugin_expects:
1006                if resp:
1007                    if self.plugin_expect_type(plugin_expects, resp) == 'INVALID':
1008                        self.logger.error("\tWARN: Plugin error check skipped")
1009                    elif not self.plugin_expect_type(plugin_expects, resp):
1010                        self.logger.error("\tERROR: Plugin expects return data: %s"
1011                                          % plugin_expects)
1012                        plugin_error_dict['exit_on_error'] = True
1013                elif not resp:
1014                    self.logger.error("\tERROR: Plugin func failed to return data")
1015                    plugin_error_dict['exit_on_error'] = True
1016
1017        return resp
1018
1019    def response_args_data(self, plugin_resp):
1020        r"""
1021        Parse the plugin function response and update plugin return variable.
1022
1023        plugin_resp       Response data from plugin function.
1024        """
1025        resp_list = []
1026        resp_data = ""
1027
1028        # There is nothing to update the plugin response.
1029        if len(global_plugin_list) == 0 or plugin_resp == 'None':
1030            return
1031
1032        if isinstance(plugin_resp, str):
1033            resp_data = plugin_resp.strip('\r\n\t')
1034            resp_list.append(resp_data)
1035        elif isinstance(plugin_resp, bytes):
1036            resp_data = str(plugin_resp, 'UTF-8').strip('\r\n\t')
1037            resp_list.append(resp_data)
1038        elif isinstance(plugin_resp, tuple):
1039            if len(global_plugin_list) == 1:
1040                resp_list.append(plugin_resp)
1041            else:
1042                resp_list = list(plugin_resp)
1043                resp_list = [x.strip('\r\n\t') for x in resp_list]
1044        elif isinstance(plugin_resp, list):
1045            if len(global_plugin_list) == 1:
1046                resp_list.append([x.strip('\r\n\t') for x in plugin_resp])
1047            else:
1048                resp_list = [x.strip('\r\n\t') for x in plugin_resp]
1049        elif isinstance(plugin_resp, int) or isinstance(plugin_resp, float):
1050            resp_list.append(plugin_resp)
1051
1052        # Iterate if there is a list of plugin return vars to update.
1053        for idx, item in enumerate(resp_list, start=0):
1054            # Exit loop, done required loop.
1055            if idx >= len(global_plugin_list):
1056                break
1057            # Find the index of the return func in the list and
1058            # update the global func return dictionary.
1059            try:
1060                dict_idx = global_plugin_list[idx]
1061                global_plugin_dict[dict_idx] = item
1062            except (IndexError, ValueError) as e:
1063                self.logger.warn("\tWARN: response_args_data: %s" % e)
1064                pass
1065
1066        # Done updating plugin dict irrespective of pass or failed,
1067        # clear all the list element for next plugin block execute.
1068        global_plugin_list.clear()
1069
1070    def yaml_args_string(self, plugin_args):
1071        r"""
1072        Pack the args into string.
1073
1074        plugin_args            arg list ['arg1','arg2,'argn']
1075        """
1076        args_str = ''
1077        for args in plugin_args:
1078            if args:
1079                if isinstance(args, (int, float)):
1080                    args_str += str(args)
1081                elif args in global_plugin_type_list:
1082                    args_str += str(global_plugin_dict[args])
1083                else:
1084                    args_str += '"' + str(args.strip('\r\n\t')) + '"'
1085            # Skip last list element.
1086            if args != plugin_args[-1]:
1087                args_str += ","
1088        return args_str
1089
1090    def yaml_args_populate(self, yaml_arg_list):
1091        r"""
1092        Decode env and plugin vars and populate.
1093
1094        Description of argument(s):
1095        yaml_arg_list         arg list read from YAML
1096
1097        Example:
1098          - plugin_args:
1099            - arg1
1100            - arg2
1101
1102                  yaml_arg_list:  [arg2, arg2]
1103        """
1104        # Get the env loaded keys as list ['hostname', 'username', 'password'].
1105        env_vars_list = list(self.env_dict)
1106
1107        if isinstance(yaml_arg_list, list):
1108            tmp_list = []
1109            for arg in yaml_arg_list:
1110                if isinstance(arg, (int, float)):
1111                    tmp_list.append(arg)
1112                    continue
1113                elif isinstance(arg, str):
1114                    arg_str = self.yaml_env_and_plugin_vars_populate(str(arg))
1115                    tmp_list.append(arg_str)
1116                else:
1117                    tmp_list.append(arg)
1118
1119            # return populated list.
1120            return tmp_list
1121
1122    def yaml_env_and_plugin_vars_populate(self, yaml_arg_str):
1123        r"""
1124        Update ${MY_VAR} and plugin vars.
1125
1126        Description of argument(s):
1127        yaml_arg_str         arg string read from YAML.
1128
1129        Example:
1130            - cat ${MY_VAR}
1131            - ls -AX my_plugin_var
1132        """
1133        # Parse the string for env vars ${env_vars}.
1134        try:
1135            # Example, list of matching env vars ['username', 'password', 'hostname']
1136            # Extra escape \ for special symbols. '\$\{([^\}]+)\}' works good.
1137            var_name_regex = '\\$\\{([^\\}]+)\\}'
1138            env_var_names_list = re.findall(var_name_regex, yaml_arg_str)
1139            for var in env_var_names_list:
1140                env_var = os.environ[var]
1141                env_replace = '${' + var + '}'
1142                yaml_arg_str = yaml_arg_str.replace(env_replace, env_var)
1143        except Exception as e:
1144            self.logger.error("\tERROR:yaml_env_vars_populate: %s" % e)
1145            pass
1146
1147        # Parse the string for plugin vars.
1148        try:
1149            # Example, list of plugin vars ['my_username', 'my_data']
1150            plugin_var_name_list = global_plugin_dict.keys()
1151            for var in plugin_var_name_list:
1152                # skip env var list already populated above code block list.
1153                if var in env_var_names_list:
1154                    continue
1155                # If this plugin var exist but empty in dict, don't replace.
1156                # This is either a YAML plugin statement incorrectly used or
1157                # user added a plugin var which is not going to be populated.
1158                if yaml_arg_str in global_plugin_dict:
1159                    if isinstance(global_plugin_dict[var], (list, dict)):
1160                        # List data type or dict can't be replaced, use directly
1161                        # in eval function call.
1162                        global_plugin_type_list.append(var)
1163                    else:
1164                        yaml_arg_str = yaml_arg_str.replace(str(var), str(global_plugin_dict[var]))
1165                # Just a string like filename or command.
1166                else:
1167                    yaml_arg_str = yaml_arg_str.replace(str(var), str(global_plugin_dict[var]))
1168        except (IndexError, ValueError) as e:
1169            self.logger.error("\tERROR: yaml_plugin_vars_populate: %s" % e)
1170            pass
1171
1172        return yaml_arg_str
1173
1174    def plugin_error_check(self, plugin_dict):
1175        r"""
1176        Plugin error dict processing.
1177
1178        Description of argument(s):
1179        plugin_dict        Dictionary of plugin error.
1180        """
1181        if any('plugin_error' in d for d in plugin_dict):
1182            for d in plugin_dict:
1183                if 'plugin_error' in d:
1184                    value = d['plugin_error']
1185                    # Reference if the error is set or not by plugin.
1186                    return plugin_error_dict[value]
1187
1188    def key_index_list_dict(self, key, list_dict):
1189        r"""
1190        Iterate list of dictionary and return index if the key match is found.
1191
1192        Description of argument(s):
1193        key           Valid Key in a dict.
1194        list_dict     list of dictionary.
1195        """
1196        for i, d in enumerate(list_dict):
1197            if key in d.keys():
1198                return i
1199
1200    def plugin_expect_type(self, type, data):
1201        r"""
1202        Plugin expect directive type check.
1203        """
1204        if type == 'int':
1205            return isinstance(data, int)
1206        elif type == 'float':
1207            return isinstance(data, float)
1208        elif type == 'str':
1209            return isinstance(data, str)
1210        elif type == 'list':
1211            return isinstance(data, list)
1212        elif type == 'dict':
1213            return isinstance(data, dict)
1214        elif type == 'tuple':
1215            return isinstance(data, tuple)
1216        else:
1217            self.logger.info("\tInvalid data type requested: %s" % type)
1218            return 'INVALID'
1219