xref: /openbmc/openbmc-test-automation/ffdc/ffdc_collector.py (revision c754b43f563d43399575778a28217af52a7f8993)
1#!/usr/bin/env python3
2
3r"""
4See class prolog below for details.
5"""
6
7import json
8import logging
9import os
10import platform
11import re
12import subprocess
13import sys
14import time
15from errno import EACCES, EPERM
16
17import yaml
18
19script_dir = os.path.dirname(os.path.abspath(__file__))
20sys.path.append(script_dir)
21# Walk path and append to sys.path
22for root, dirs, files in os.walk(script_dir):
23    for dir in dirs:
24        sys.path.append(os.path.join(root, dir))
25
26from ssh_utility import SSHRemoteclient  # NOQA
27from telnet_utility import TelnetRemoteclient  # NOQA
28
29r"""
30User define plugins python functions.
31
32It will imports files from directory plugins
33
34plugins
35├── file1.py
36└── file2.py
37
38Example how to define in YAML:
39 - plugin:
40   - plugin_name: plugin.foo_func.foo_func_yaml
41     - plugin_args:
42       - arg1
43       - arg2
44"""
45plugin_dir = __file__.split(__file__.split("/")[-1])[0] + "/plugins"
46sys.path.append(plugin_dir)
47try:
48    for module in os.listdir(plugin_dir):
49        if module == "__init__.py" or module[-3:] != ".py":
50            continue
51        plugin_module = "plugins." + module[:-3]
52        # To access the module plugin.<module name>.<function>
53        # Example: plugin.foo_func.foo_func_yaml()
54        try:
55            plugin = __import__(plugin_module, globals(), locals(), [], 0)
56        except Exception as e:
57            print("PLUGIN: Module import failed: %s" % module)
58            pass
59except FileNotFoundError as e:
60    print("PLUGIN: %s" % e)
61    pass
62
63r"""
64This is for plugin functions returning data or responses to the caller
65in YAML plugin setup.
66
67Example:
68
69    - plugin:
70      - plugin_name: version = plugin.ssh_execution.ssh_execute_cmd
71      - plugin_args:
72        - ${hostname}
73        - ${username}
74        - ${password}
75        - "cat /etc/os-release | grep VERSION_ID | awk -F'=' '{print $2}'"
76     - plugin:
77        - plugin_name: plugin.print_vars.print_vars
78        - plugin_args:
79          - version
80
81where first plugin "version" var is used by another plugin in the YAML
82block or plugin
83
84"""
85global global_log_store_path
86global global_plugin_dict
87global global_plugin_list
88
89# Hold the plugin return values in dict and plugin return vars in list.
90# Dict is to reference and update vars processing in parser where as
91# list is for current vars from the plugin block which needs processing.
92global_plugin_dict = {}
93global_plugin_list = []
94
95# Hold the plugin return named declared if function returned values are
96# list,dict.
97# Refer this name list to look up the plugin dict for eval() args function
98# Example ['version']
99global_plugin_type_list = []
100
101# Path where logs are to be stored or written.
102global_log_store_path = ""
103
104# Plugin error state defaults.
105plugin_error_dict = {
106    "exit_on_error": False,
107    "continue_on_error": False,
108}
109
110
111class ffdc_collector:
112    r"""
113    Execute commands from configuration file to collect log files.
114    Fetch and store generated files at the specified location.
115
116    """
117
118    def __init__(
119        self,
120        hostname,
121        username,
122        password,
123        port_ssh,
124        port_https,
125        port_ipmi,
126        ffdc_config,
127        location,
128        remote_type,
129        remote_protocol,
130        env_vars,
131        econfig,
132        log_level,
133    ):
134        r"""
135        Description of argument(s):
136
137        hostname            Name/ip of the targeted (remote) system
138        username            User on the targeted system with access to
139                            FFDC files
140        password            Password for user on targeted system
141        port_ssh            SSH port value. By default 22
142        port_https          HTTPS port value. By default 443
143        port_ipmi           IPMI port value. By default 623
144        ffdc_config         Configuration file listing commands and files
145                            for FFDC
146        location            Where to store collected FFDC
147        remote_type         OS type of the remote host
148        remote_protocol     Protocol to use to collect data
149        env_vars            User define CLI env vars '{"key : "value"}'
150        econfig             User define env vars YAML file
151
152        """
153
154        self.hostname = hostname
155        self.username = username
156        self.password = password
157        self.port_ssh = str(port_ssh)
158        self.port_https = str(port_https)
159        self.port_ipmi = str(port_ipmi)
160        self.ffdc_config = ffdc_config
161        self.location = location + "/" + remote_type.upper()
162        self.ssh_remoteclient = None
163        self.telnet_remoteclient = None
164        self.ffdc_dir_path = ""
165        self.ffdc_prefix = ""
166        self.target_type = remote_type.upper()
167        self.remote_protocol = remote_protocol.upper()
168        self.env_vars = env_vars
169        self.econfig = econfig
170        self.start_time = 0
171        self.elapsed_time = ""
172        self.logger = None
173
174        # Set prefix values for scp files and directory.
175        # Since the time stamp is at second granularity, these values are set
176        # here to be sure that all files for this run will have same timestamps
177        # and they will be saved in the same directory.
178        # self.location == local system for now
179        self.set_ffdc_default_store_path()
180
181        # Logger for this run.  Need to be after set_ffdc_default_store_path()
182        self.script_logging(getattr(logging, log_level.upper()))
183
184        # Verify top level directory exists for storage
185        self.validate_local_store(self.location)
186
187        if self.verify_script_env():
188            # Load default or user define YAML configuration file.
189            with open(self.ffdc_config, "r") as file:
190                try:
191                    self.ffdc_actions = yaml.load(file, Loader=yaml.SafeLoader)
192                except yaml.YAMLError as e:
193                    self.logger.error(e)
194                    sys.exit(-1)
195
196            if self.target_type not in self.ffdc_actions.keys():
197                self.logger.error(
198                    "\n\tERROR: %s is not listed in %s.\n\n"
199                    % (self.target_type, self.ffdc_config)
200                )
201                sys.exit(-1)
202        else:
203            sys.exit(-1)
204
205        # Load ENV vars from user.
206        self.logger.info("\n\tENV: User define input YAML variables")
207        self.env_dict = {}
208        self.load_env()
209
210    def verify_script_env(self):
211        # Import to log version
212        import click
213        import paramiko
214
215        run_env_ok = True
216
217        try:
218            redfishtool_version = (
219                self.run_tool_cmd("redfishtool -V").split(" ")[2].strip("\n")
220            )
221        except Exception as e:
222            self.logger.error("\tEXCEPTION redfishtool: %s", e)
223            redfishtool_version = "Not Installed (optional)"
224
225        try:
226            ipmitool_version = self.run_tool_cmd("ipmitool -V").split(" ")[2]
227        except Exception as e:
228            self.logger.error("\tEXCEPTION ipmitool: %s", e)
229            ipmitool_version = "Not Installed (optional)"
230
231        self.logger.info("\n\t---- Script host environment ----")
232        self.logger.info(
233            "\t{:<10}  {:<10}".format("Script hostname", os.uname()[1])
234        )
235        self.logger.info(
236            "\t{:<10}  {:<10}".format("Script host os", platform.platform())
237        )
238        self.logger.info(
239            "\t{:<10}  {:>10}".format("Python", platform.python_version())
240        )
241        self.logger.info("\t{:<10}  {:>10}".format("PyYAML", yaml.__version__))
242        self.logger.info("\t{:<10}  {:>10}".format("click", click.__version__))
243        self.logger.info(
244            "\t{:<10}  {:>10}".format("paramiko", paramiko.__version__)
245        )
246        self.logger.info(
247            "\t{:<10}  {:>9}".format("redfishtool", redfishtool_version)
248        )
249        self.logger.info(
250            "\t{:<10}  {:>12}".format("ipmitool", ipmitool_version)
251        )
252
253        if eval(yaml.__version__.replace(".", ",")) < (5, 3, 0):
254            self.logger.error(
255                "\n\tERROR: Python or python packages do not meet minimum"
256                " version requirement."
257            )
258            self.logger.error(
259                "\tERROR: PyYAML version 5.3.0 or higher is needed.\n"
260            )
261            run_env_ok = False
262
263        self.logger.info("\t---- End script host environment ----")
264        return run_env_ok
265
266    def script_logging(self, log_level_attr):
267        r"""
268        Create logger
269
270        """
271        self.logger = logging.getLogger()
272        self.logger.setLevel(log_level_attr)
273        log_file_handler = logging.FileHandler(
274            self.ffdc_dir_path + "collector.log"
275        )
276
277        stdout_handler = logging.StreamHandler(sys.stdout)
278        self.logger.addHandler(log_file_handler)
279        self.logger.addHandler(stdout_handler)
280
281        # Turn off paramiko INFO logging
282        logging.getLogger("paramiko").setLevel(logging.WARNING)
283
284    def target_is_pingable(self):
285        r"""
286        Check if target system is ping-able.
287
288        """
289        response = os.system("ping -c 1 %s  2>&1 >/dev/null" % self.hostname)
290        if response == 0:
291            self.logger.info(
292                "\n\t[Check] %s is ping-able.\t\t [OK]" % self.hostname
293            )
294            return True
295        else:
296            self.logger.error(
297                "\n\tERROR: %s is not ping-able. FFDC collection aborted.\n"
298                % self.hostname
299            )
300            sys.exit(-1)
301
302    def collect_ffdc(self):
303        r"""
304        Initiate FFDC Collection depending on requested protocol.
305
306        """
307
308        self.logger.info(
309            "\n\t---- Start communicating with %s ----" % self.hostname
310        )
311        self.start_time = time.time()
312
313        # Find the list of target and protocol supported.
314        check_protocol_list = []
315        config_dict = self.ffdc_actions
316
317        for target_type in config_dict.keys():
318            if self.target_type != target_type:
319                continue
320
321            for k, v in config_dict[target_type].items():
322                if (
323                    config_dict[target_type][k]["PROTOCOL"][0]
324                    not in check_protocol_list
325                ):
326                    check_protocol_list.append(
327                        config_dict[target_type][k]["PROTOCOL"][0]
328                    )
329
330        self.logger.info(
331            "\n\t %s protocol type: %s"
332            % (self.target_type, check_protocol_list)
333        )
334
335        verified_working_protocol = self.verify_protocol(check_protocol_list)
336
337        if verified_working_protocol:
338            self.logger.info(
339                "\n\t---- Completed protocol pre-requisite check ----\n"
340            )
341
342        # Verify top level directory exists for storage
343        self.validate_local_store(self.location)
344
345        if (self.remote_protocol not in verified_working_protocol) and (
346            self.remote_protocol != "ALL"
347        ):
348            self.logger.info(
349                "\n\tWorking protocol list: %s" % verified_working_protocol
350            )
351            self.logger.error(
352                "\tERROR: Requested protocol %s is not in working protocol"
353                " list.\n" % self.remote_protocol
354            )
355            sys.exit(-1)
356        else:
357            self.generate_ffdc(verified_working_protocol)
358
359    def ssh_to_target_system(self):
360        r"""
361        Open a ssh connection to targeted system.
362
363        """
364
365        self.ssh_remoteclient = SSHRemoteclient(
366            self.hostname, self.username, self.password, self.port_ssh
367        )
368
369        if self.ssh_remoteclient.ssh_remoteclient_login():
370            self.logger.info(
371                "\n\t[Check] %s SSH connection established.\t [OK]"
372                % self.hostname
373            )
374
375            # Check scp connection.
376            # If scp connection fails,
377            # continue with FFDC generation but skip scp files to local host.
378            self.ssh_remoteclient.scp_connection()
379            return True
380        else:
381            self.logger.info(
382                "\n\t[Check] %s SSH connection.\t [NOT AVAILABLE]"
383                % self.hostname
384            )
385            return False
386
387    def telnet_to_target_system(self):
388        r"""
389        Open a telnet connection to targeted system.
390        """
391        self.telnet_remoteclient = TelnetRemoteclient(
392            self.hostname, self.username, self.password
393        )
394        if self.telnet_remoteclient.tn_remoteclient_login():
395            self.logger.info(
396                "\n\t[Check] %s Telnet connection established.\t [OK]"
397                % self.hostname
398            )
399            return True
400        else:
401            self.logger.info(
402                "\n\t[Check] %s Telnet connection.\t [NOT AVAILABLE]"
403                % self.hostname
404            )
405            return False
406
407    def generate_ffdc(self, working_protocol_list):
408        r"""
409        Determine actions based on remote host type
410
411        Description of argument(s):
412        working_protocol_list    List of confirmed working protocols to
413                                 connect to remote host.
414        """
415
416        self.logger.info(
417            "\n\t---- Executing commands on " + self.hostname + " ----"
418        )
419        self.logger.info(
420            "\n\tWorking protocol list: %s" % working_protocol_list
421        )
422
423        config_dict = self.ffdc_actions
424        for target_type in config_dict.keys():
425            if self.target_type != target_type:
426                continue
427
428            self.logger.info("\n\tFFDC Path: %s " % self.ffdc_dir_path)
429            global_plugin_dict["global_log_store_path"] = self.ffdc_dir_path
430            self.logger.info("\tSystem Type: %s" % target_type)
431            for k, v in config_dict[target_type].items():
432                if (
433                    self.remote_protocol not in working_protocol_list
434                    and self.remote_protocol != "ALL"
435                ):
436                    continue
437
438                protocol = config_dict[target_type][k]["PROTOCOL"][0]
439
440                if protocol not in working_protocol_list:
441                    continue
442
443                if protocol in working_protocol_list:
444                    if protocol == "SSH" or protocol == "SCP":
445                        self.protocol_ssh(protocol, target_type, k)
446                    elif protocol == "TELNET":
447                        self.protocol_telnet(target_type, k)
448                    elif (
449                        protocol == "REDFISH"
450                        or protocol == "IPMI"
451                        or protocol == "SHELL"
452                    ):
453                        self.protocol_execute(protocol, target_type, k)
454                else:
455                    self.logger.error(
456                        "\n\tERROR: %s is not available for %s."
457                        % (protocol, self.hostname)
458                    )
459
460        # Close network connection after collecting all files
461        self.elapsed_time = time.strftime(
462            "%H:%M:%S", time.gmtime(time.time() - self.start_time)
463        )
464        if self.ssh_remoteclient:
465            self.ssh_remoteclient.ssh_remoteclient_disconnect()
466        if self.telnet_remoteclient:
467            self.telnet_remoteclient.tn_remoteclient_disconnect()
468
469    def protocol_ssh(self, protocol, target_type, sub_type):
470        r"""
471        Perform actions using SSH and SCP protocols.
472
473        Description of argument(s):
474        protocol            Protocol to execute.
475        target_type         OS Type of remote host.
476        sub_type            Group type of commands.
477        """
478
479        if protocol == "SCP":
480            self.group_copy(self.ffdc_actions[target_type][sub_type])
481        else:
482            self.collect_and_copy_ffdc(
483                self.ffdc_actions[target_type][sub_type]
484            )
485
486    def protocol_telnet(self, target_type, sub_type):
487        r"""
488        Perform actions using telnet protocol.
489        Description of argument(s):
490        target_type          OS Type of remote host.
491        """
492        self.logger.info(
493            "\n\t[Run] Executing commands on %s using %s"
494            % (self.hostname, "TELNET")
495        )
496        telnet_files_saved = []
497        progress_counter = 0
498        list_of_commands = self.ffdc_actions[target_type][sub_type]["COMMANDS"]
499        for index, each_cmd in enumerate(list_of_commands, start=0):
500            command_txt, command_timeout = self.unpack_command(each_cmd)
501            result = self.telnet_remoteclient.execute_command(
502                command_txt, command_timeout
503            )
504            if result:
505                try:
506                    targ_file = self.ffdc_actions[target_type][sub_type][
507                        "FILES"
508                    ][index]
509                except IndexError:
510                    targ_file = command_txt
511                    self.logger.warning(
512                        "\n\t[WARN] Missing filename to store data from"
513                        " telnet %s." % each_cmd
514                    )
515                    self.logger.warning(
516                        "\t[WARN] Data will be stored in %s." % targ_file
517                    )
518                targ_file_with_path = (
519                    self.ffdc_dir_path + self.ffdc_prefix + targ_file
520                )
521                # Creates a new file
522                with open(targ_file_with_path, "w") as fp:
523                    fp.write(result)
524                    fp.close
525                    telnet_files_saved.append(targ_file)
526            progress_counter += 1
527            self.print_progress(progress_counter)
528        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
529        for file in telnet_files_saved:
530            self.logger.info("\n\t\tSuccessfully save file " + file + ".")
531
532    def protocol_execute(self, protocol, target_type, sub_type):
533        r"""
534        Perform actions for a given protocol.
535
536        Description of argument(s):
537        protocol            Protocol to execute.
538        target_type         OS Type of remote host.
539        sub_type            Group type of commands.
540        """
541
542        self.logger.info(
543            "\n\t[Run] Executing commands to %s using %s"
544            % (self.hostname, protocol)
545        )
546        executed_files_saved = []
547        progress_counter = 0
548        list_of_cmd = self.get_command_list(
549            self.ffdc_actions[target_type][sub_type]
550        )
551        for index, each_cmd in enumerate(list_of_cmd, start=0):
552            plugin_call = False
553            if isinstance(each_cmd, dict):
554                if "plugin" in each_cmd:
555                    # If the error is set and plugin explicitly
556                    # requested to skip execution on error..
557                    if plugin_error_dict[
558                        "exit_on_error"
559                    ] and self.plugin_error_check(each_cmd["plugin"]):
560                        self.logger.info(
561                            "\n\t[PLUGIN-ERROR] exit_on_error: %s"
562                            % plugin_error_dict["exit_on_error"]
563                        )
564                        self.logger.info(
565                            "\t[PLUGIN-SKIP] %s" % each_cmd["plugin"][0]
566                        )
567                        continue
568                    plugin_call = True
569                    # call the plugin
570                    self.logger.info("\n\t[PLUGIN-START]")
571                    result = self.execute_plugin_block(each_cmd["plugin"])
572                    self.logger.info("\t[PLUGIN-END]\n")
573            else:
574                each_cmd = self.yaml_env_and_plugin_vars_populate(each_cmd)
575
576            if not plugin_call:
577                result = self.run_tool_cmd(each_cmd)
578            if result:
579                try:
580                    file_name = self.get_file_list(
581                        self.ffdc_actions[target_type][sub_type]
582                    )[index]
583                    # If file is specified as None.
584                    if file_name == "None":
585                        continue
586                    targ_file = self.yaml_env_and_plugin_vars_populate(
587                        file_name
588                    )
589                except IndexError:
590                    targ_file = each_cmd.split("/")[-1]
591                    self.logger.warning(
592                        "\n\t[WARN] Missing filename to store data from %s."
593                        % each_cmd
594                    )
595                    self.logger.warning(
596                        "\t[WARN] Data will be stored in %s." % targ_file
597                    )
598
599                targ_file_with_path = (
600                    self.ffdc_dir_path + self.ffdc_prefix + targ_file
601                )
602
603                # Creates a new file
604                with open(targ_file_with_path, "w") as fp:
605                    if isinstance(result, dict):
606                        fp.write(json.dumps(result))
607                    else:
608                        fp.write(result)
609                    fp.close
610                    executed_files_saved.append(targ_file)
611
612            progress_counter += 1
613            self.print_progress(progress_counter)
614
615        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
616
617        for file in executed_files_saved:
618            self.logger.info("\n\t\tSuccessfully save file " + file + ".")
619
620    def collect_and_copy_ffdc(
621        self, ffdc_actions_for_target_type, form_filename=False
622    ):
623        r"""
624        Send commands in ffdc_config file to targeted system.
625
626        Description of argument(s):
627        ffdc_actions_for_target_type     Commands and files for the selected
628                                         remote host type.
629        form_filename                    If true, pre-pend self.target_type to
630                                         filename
631        """
632
633        # Executing commands, if any
634        self.ssh_execute_ffdc_commands(
635            ffdc_actions_for_target_type, form_filename
636        )
637
638        # Copying files
639        if self.ssh_remoteclient.scpclient:
640            self.logger.info(
641                "\n\n\tCopying FFDC files from remote system %s.\n"
642                % self.hostname
643            )
644
645            # Retrieving files from target system
646            list_of_files = self.get_file_list(ffdc_actions_for_target_type)
647            self.scp_ffdc(
648                self.ffdc_dir_path,
649                self.ffdc_prefix,
650                form_filename,
651                list_of_files,
652            )
653        else:
654            self.logger.info(
655                "\n\n\tSkip copying FFDC files from remote system %s.\n"
656                % self.hostname
657            )
658
659    def get_command_list(self, ffdc_actions_for_target_type):
660        r"""
661        Fetch list of commands from configuration file
662
663        Description of argument(s):
664        ffdc_actions_for_target_type    Commands and files for the selected
665                                        remote host type.
666        """
667        try:
668            list_of_commands = ffdc_actions_for_target_type["COMMANDS"]
669        except KeyError:
670            list_of_commands = []
671        return list_of_commands
672
673    def get_file_list(self, ffdc_actions_for_target_type):
674        r"""
675        Fetch list of commands from configuration file
676
677        Description of argument(s):
678        ffdc_actions_for_target_type    Commands and files for the selected
679                                        remote host type.
680        """
681        try:
682            list_of_files = ffdc_actions_for_target_type["FILES"]
683        except KeyError:
684            list_of_files = []
685        return list_of_files
686
687    def unpack_command(self, command):
688        r"""
689        Unpack command from config file
690
691        Description of argument(s):
692        command    Command from config file.
693        """
694        if isinstance(command, dict):
695            command_txt = next(iter(command))
696            command_timeout = next(iter(command.values()))
697        elif isinstance(command, str):
698            command_txt = command
699            # Default command timeout 60 seconds
700            command_timeout = 60
701
702        return command_txt, command_timeout
703
704    def ssh_execute_ffdc_commands(
705        self, ffdc_actions_for_target_type, form_filename=False
706    ):
707        r"""
708        Send commands in ffdc_config file to targeted system.
709
710        Description of argument(s):
711        ffdc_actions_for_target_type    Commands and files for the selected
712                                        remote host type.
713        form_filename                   If true, pre-pend self.target_type to
714                                        filename
715        """
716        self.logger.info(
717            "\n\t[Run] Executing commands on %s using %s"
718            % (self.hostname, ffdc_actions_for_target_type["PROTOCOL"][0])
719        )
720
721        list_of_commands = self.get_command_list(ffdc_actions_for_target_type)
722        # If command list is empty, returns
723        if not list_of_commands:
724            return
725
726        progress_counter = 0
727        for command in list_of_commands:
728            command_txt, command_timeout = self.unpack_command(command)
729
730            if form_filename:
731                command_txt = str(command_txt % self.target_type)
732
733            (
734                cmd_exit_code,
735                err,
736                response,
737            ) = self.ssh_remoteclient.execute_command(
738                command_txt, command_timeout
739            )
740
741            if cmd_exit_code:
742                self.logger.warning(
743                    "\n\t\t[WARN] %s exits with code %s."
744                    % (command_txt, str(cmd_exit_code))
745                )
746                self.logger.warning("\t\t[WARN] %s " % err)
747
748            progress_counter += 1
749            self.print_progress(progress_counter)
750
751        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
752
753    def group_copy(self, ffdc_actions_for_target_type):
754        r"""
755        scp group of files (wild card) from remote host.
756
757        Description of argument(s):
758        fdc_actions_for_target_type    Commands and files for the selected
759                                       remote host type.
760        """
761
762        if self.ssh_remoteclient.scpclient:
763            self.logger.info(
764                "\n\tCopying files from remote system %s via SCP.\n"
765                % self.hostname
766            )
767
768            list_of_commands = self.get_command_list(
769                ffdc_actions_for_target_type
770            )
771            # If command list is empty, returns
772            if not list_of_commands:
773                return
774
775            for command in list_of_commands:
776                try:
777                    command = self.yaml_env_and_plugin_vars_populate(command)
778                except IndexError:
779                    self.logger.error("\t\tInvalid command %s" % command)
780                    continue
781
782                (
783                    cmd_exit_code,
784                    err,
785                    response,
786                ) = self.ssh_remoteclient.execute_command(command)
787
788                # If file does not exist, code take no action.
789                # cmd_exit_code is ignored for this scenario.
790                if response:
791                    scp_result = self.ssh_remoteclient.scp_file_from_remote(
792                        response.split("\n"), self.ffdc_dir_path
793                    )
794                    if scp_result:
795                        self.logger.info(
796                            "\t\tSuccessfully copied from "
797                            + self.hostname
798                            + ":"
799                            + command
800                        )
801                else:
802                    self.logger.info("\t\t%s has no result" % command)
803
804        else:
805            self.logger.info(
806                "\n\n\tSkip copying files from remote system %s.\n"
807                % self.hostname
808            )
809
810    def scp_ffdc(
811        self,
812        targ_dir_path,
813        targ_file_prefix,
814        form_filename,
815        file_list=None,
816        quiet=None,
817    ):
818        r"""
819        SCP all files in file_dict to the indicated directory on the local
820        system.
821
822        Description of argument(s):
823        targ_dir_path                   The path of the directory to receive
824                                        the files.
825        targ_file_prefix                Prefix which will be prepended to each
826                                        target file's name.
827        file_dict                       A dictionary of files to scp from
828                                        targeted system to this system
829
830        """
831
832        progress_counter = 0
833        for filename in file_list:
834            if form_filename:
835                filename = str(filename % self.target_type)
836            source_file_path = filename
837            targ_file_path = (
838                targ_dir_path + targ_file_prefix + filename.split("/")[-1]
839            )
840
841            # If source file name contains wild card, copy filename as is.
842            if "*" in source_file_path:
843                scp_result = self.ssh_remoteclient.scp_file_from_remote(
844                    source_file_path, self.ffdc_dir_path
845                )
846            else:
847                scp_result = self.ssh_remoteclient.scp_file_from_remote(
848                    source_file_path, targ_file_path
849                )
850
851            if not quiet:
852                if scp_result:
853                    self.logger.info(
854                        "\t\tSuccessfully copied from "
855                        + self.hostname
856                        + ":"
857                        + source_file_path
858                        + ".\n"
859                    )
860                else:
861                    self.logger.info(
862                        "\t\tFail to copy from "
863                        + self.hostname
864                        + ":"
865                        + source_file_path
866                        + ".\n"
867                    )
868            else:
869                progress_counter += 1
870                self.print_progress(progress_counter)
871
872    def set_ffdc_default_store_path(self):
873        r"""
874        Set a default value for self.ffdc_dir_path and self.ffdc_prefix.
875        Collected ffdc file will be stored in dir
876        /self.location/hostname_timestr/.
877        Individual ffdc file will have timestr_filename.
878
879        Description of class variables:
880        self.ffdc_dir_path  The dir path where collected ffdc data files
881                            should be put.
882
883        self.ffdc_prefix    The prefix to be given to each ffdc file name.
884
885        """
886
887        timestr = time.strftime("%Y%m%d-%H%M%S")
888        self.ffdc_dir_path = (
889            self.location + "/" + self.hostname + "_" + timestr + "/"
890        )
891        self.ffdc_prefix = timestr + "_"
892        self.validate_local_store(self.ffdc_dir_path)
893
894    # Need to verify local store path exists prior to instantiate this class.
895    # This class method is used to share the same code between CLI input parm
896    # and Robot Framework "${EXECDIR}/logs" before referencing this class.
897    @classmethod
898    def validate_local_store(cls, dir_path):
899        r"""
900        Ensure path exists to store FFDC files locally.
901
902        Description of variable:
903        dir_path  The dir path where collected ffdc data files will be stored.
904
905        """
906
907        if not os.path.exists(dir_path):
908            try:
909                os.makedirs(dir_path, 0o755)
910            except (IOError, OSError) as e:
911                # PermissionError
912                if e.errno == EPERM or e.errno == EACCES:
913                    self.logger.error(
914                        "\tERROR: os.makedirs %s failed with"
915                        " PermissionError.\n" % dir_path
916                    )
917                else:
918                    self.logger.error(
919                        "\tERROR: os.makedirs %s failed with %s.\n"
920                        % (dir_path, e.strerror)
921                    )
922                sys.exit(-1)
923
924    def print_progress(self, progress):
925        r"""
926        Print activity progress +
927
928        Description of variable:
929        progress  Progress counter.
930
931        """
932
933        sys.stdout.write("\r\t" + "+" * progress)
934        sys.stdout.flush()
935        time.sleep(0.1)
936
937    def verify_redfish(self):
938        r"""
939        Verify remote host has redfish service active
940
941        """
942        redfish_parm = (
943            "redfishtool -r "
944            + self.hostname
945            + ":"
946            + self.port_https
947            + " -S Always raw GET /redfish/v1/"
948        )
949        return self.run_tool_cmd(redfish_parm, True)
950
951    def verify_ipmi(self):
952        r"""
953        Verify remote host has IPMI LAN service active
954
955        """
956        if self.target_type == "OPENBMC":
957            ipmi_parm = (
958                "ipmitool -I lanplus -C 17  -U "
959                + self.username
960                + " -P "
961                + self.password
962                + " -H "
963                + self.hostname
964                + " -p "
965                + str(self.port_ipmi)
966                + " power status"
967            )
968        else:
969            ipmi_parm = (
970                "ipmitool -I lanplus  -P "
971                + self.password
972                + " -H "
973                + self.hostname
974                + " -p "
975                + str(self.port_ipmi)
976                + " power status"
977            )
978
979        return self.run_tool_cmd(ipmi_parm, True)
980
981    def run_tool_cmd(self, parms_string, quiet=False):
982        r"""
983        Run CLI standard tool or scripts.
984
985        Description of variable:
986        parms_string         tool command options.
987        quiet                do not print tool error message if True
988        """
989
990        result = subprocess.run(
991            [parms_string],
992            stdout=subprocess.PIPE,
993            stderr=subprocess.PIPE,
994            shell=True,
995            universal_newlines=True,
996        )
997
998        if result.stderr and not quiet:
999            self.logger.error("\n\t\tERROR with %s " % parms_string)
1000            self.logger.error("\t\t" + result.stderr)
1001
1002        return result.stdout
1003
1004    def verify_protocol(self, protocol_list):
1005        r"""
1006        Perform protocol working check.
1007
1008        Description of argument(s):
1009        protocol_list        List of protocol.
1010        """
1011
1012        tmp_list = []
1013        if self.target_is_pingable():
1014            tmp_list.append("SHELL")
1015
1016        for protocol in protocol_list:
1017            if self.remote_protocol != "ALL":
1018                if self.remote_protocol != protocol:
1019                    continue
1020
1021            # Only check SSH/SCP once for both protocols
1022            if (
1023                protocol == "SSH"
1024                or protocol == "SCP"
1025                and protocol not in tmp_list
1026            ):
1027                if self.ssh_to_target_system():
1028                    # Add only what user asked.
1029                    if self.remote_protocol != "ALL":
1030                        tmp_list.append(self.remote_protocol)
1031                    else:
1032                        tmp_list.append("SSH")
1033                        tmp_list.append("SCP")
1034
1035            if protocol == "TELNET":
1036                if self.telnet_to_target_system():
1037                    tmp_list.append(protocol)
1038
1039            if protocol == "REDFISH":
1040                if self.verify_redfish():
1041                    tmp_list.append(protocol)
1042                    self.logger.info(
1043                        "\n\t[Check] %s Redfish Service.\t\t [OK]"
1044                        % self.hostname
1045                    )
1046                else:
1047                    self.logger.info(
1048                        "\n\t[Check] %s Redfish Service.\t\t [NOT AVAILABLE]"
1049                        % self.hostname
1050                    )
1051
1052            if protocol == "IPMI":
1053                if self.verify_ipmi():
1054                    tmp_list.append(protocol)
1055                    self.logger.info(
1056                        "\n\t[Check] %s IPMI LAN Service.\t\t [OK]"
1057                        % self.hostname
1058                    )
1059                else:
1060                    self.logger.info(
1061                        "\n\t[Check] %s IPMI LAN Service.\t\t [NOT AVAILABLE]"
1062                        % self.hostname
1063                    )
1064
1065        return tmp_list
1066
1067    def load_env(self):
1068        r"""
1069        Perform protocol working check.
1070
1071        """
1072        # This is for the env vars a user can use in YAML to load
1073        # it at runtime.
1074        # Example YAML:
1075        # -COMMANDS:
1076        #    - my_command ${hostname}  ${username}   ${password}
1077        os.environ["hostname"] = self.hostname
1078        os.environ["username"] = self.username
1079        os.environ["password"] = self.password
1080        os.environ["port_ssh"] = self.port_ssh
1081        os.environ["port_https"] = self.port_https
1082        os.environ["port_ipmi"] = self.port_ipmi
1083
1084        # Append default Env.
1085        self.env_dict["hostname"] = self.hostname
1086        self.env_dict["username"] = self.username
1087        self.env_dict["password"] = self.password
1088        self.env_dict["port_ssh"] = self.port_ssh
1089        self.env_dict["port_https"] = self.port_https
1090        self.env_dict["port_ipmi"] = self.port_ipmi
1091
1092        try:
1093            tmp_env_dict = {}
1094            if self.env_vars:
1095                tmp_env_dict = json.loads(self.env_vars)
1096                # Export ENV vars default.
1097                for key, value in tmp_env_dict.items():
1098                    os.environ[key] = value
1099                    self.env_dict[key] = str(value)
1100
1101            if self.econfig:
1102                with open(self.econfig, "r") as file:
1103                    try:
1104                        tmp_env_dict = yaml.load(file, Loader=yaml.SafeLoader)
1105                    except yaml.YAMLError as e:
1106                        self.logger.error(e)
1107                        sys.exit(-1)
1108                # Export ENV vars.
1109                for key, value in tmp_env_dict["env_params"].items():
1110                    os.environ[key] = str(value)
1111                    self.env_dict[key] = str(value)
1112        except json.decoder.JSONDecodeError as e:
1113            self.logger.error("\n\tERROR: %s " % e)
1114            sys.exit(-1)
1115
1116        # This to mask the password from displaying on the console.
1117        mask_dict = self.env_dict.copy()
1118        for k, v in mask_dict.items():
1119            if k.lower().find("password") != -1:
1120                hidden_text = []
1121                hidden_text.append(v)
1122                password_regex = (
1123                    "(" + "|".join([re.escape(x) for x in hidden_text]) + ")"
1124                )
1125                mask_dict[k] = re.sub(password_regex, "********", v)
1126
1127        self.logger.info(json.dumps(mask_dict, indent=8, sort_keys=False))
1128
1129    def execute_python_eval(self, eval_string):
1130        r"""
1131        Execute qualified python function string using eval.
1132
1133        Description of argument(s):
1134        eval_string        Execute the python object.
1135
1136        Example:
1137                eval(plugin.foo_func.foo_func(10))
1138        """
1139        try:
1140            self.logger.info("\tExecuting plugin func()")
1141            self.logger.debug("\tCall func: %s" % eval_string)
1142            result = eval(eval_string)
1143            self.logger.info("\treturn: %s" % str(result))
1144        except (
1145            ValueError,
1146            SyntaxError,
1147            NameError,
1148            AttributeError,
1149            TypeError,
1150        ) as e:
1151            self.logger.error("\tERROR: execute_python_eval: %s" % e)
1152            # Set the plugin error state.
1153            plugin_error_dict["exit_on_error"] = True
1154            self.logger.info("\treturn: PLUGIN_EVAL_ERROR")
1155            return "PLUGIN_EVAL_ERROR"
1156
1157        return result
1158
1159    def execute_plugin_block(self, plugin_cmd_list):
1160        r"""
1161        Pack the plugin command to qualifed python string object.
1162
1163        Description of argument(s):
1164        plugin_list_dict      Plugin block read from YAML
1165                              [{'plugin_name': 'plugin.foo_func.my_func'},
1166                               {'plugin_args': [10]}]
1167
1168        Example:
1169            - plugin:
1170              - plugin_name: plugin.foo_func.my_func
1171              - plugin_args:
1172                - arg1
1173                - arg2
1174
1175            - plugin:
1176              - plugin_name: result = plugin.foo_func.my_func
1177              - plugin_args:
1178                - arg1
1179                - arg2
1180
1181            - plugin:
1182              - plugin_name: result1,result2 = plugin.foo_func.my_func
1183              - plugin_args:
1184                - arg1
1185                - arg2
1186        """
1187        try:
1188            idx = self.key_index_list_dict("plugin_name", plugin_cmd_list)
1189            plugin_name = plugin_cmd_list[idx]["plugin_name"]
1190            # Equal separator means plugin function returns result.
1191            if " = " in plugin_name:
1192                # Ex. ['result', 'plugin.foo_func.my_func']
1193                plugin_name_args = plugin_name.split(" = ")
1194                # plugin func return data.
1195                for arg in plugin_name_args:
1196                    if arg == plugin_name_args[-1]:
1197                        plugin_name = arg
1198                    else:
1199                        plugin_resp = arg.split(",")
1200                        # ['result1','result2']
1201                        for x in plugin_resp:
1202                            global_plugin_list.append(x)
1203                            global_plugin_dict[x] = ""
1204
1205            # Walk the plugin args ['arg1,'arg2']
1206            # If the YAML plugin statement 'plugin_args' is not declared.
1207            if any("plugin_args" in d for d in plugin_cmd_list):
1208                idx = self.key_index_list_dict("plugin_args", plugin_cmd_list)
1209                plugin_args = plugin_cmd_list[idx]["plugin_args"]
1210                if plugin_args:
1211                    plugin_args = self.yaml_args_populate(plugin_args)
1212                else:
1213                    plugin_args = []
1214            else:
1215                plugin_args = self.yaml_args_populate([])
1216
1217            # Pack the args arg1, arg2, .... argn into
1218            # "arg1","arg2","argn"  string as params for function.
1219            parm_args_str = self.yaml_args_string(plugin_args)
1220            if parm_args_str:
1221                plugin_func = plugin_name + "(" + parm_args_str + ")"
1222            else:
1223                plugin_func = plugin_name + "()"
1224
1225            # Execute plugin function.
1226            if global_plugin_dict:
1227                resp = self.execute_python_eval(plugin_func)
1228                # Update plugin vars dict if there is any.
1229                if resp != "PLUGIN_EVAL_ERROR":
1230                    self.response_args_data(resp)
1231            else:
1232                resp = self.execute_python_eval(plugin_func)
1233        except Exception as e:
1234            # Set the plugin error state.
1235            plugin_error_dict["exit_on_error"] = True
1236            self.logger.error("\tERROR: execute_plugin_block: %s" % e)
1237            pass
1238
1239        # There is a real error executing the plugin function.
1240        if resp == "PLUGIN_EVAL_ERROR":
1241            return resp
1242
1243        # Check if plugin_expects_return (int, string, list,dict etc)
1244        if any("plugin_expects_return" in d for d in plugin_cmd_list):
1245            idx = self.key_index_list_dict(
1246                "plugin_expects_return", plugin_cmd_list
1247            )
1248            plugin_expects = plugin_cmd_list[idx]["plugin_expects_return"]
1249            if plugin_expects:
1250                if resp:
1251                    if (
1252                        self.plugin_expect_type(plugin_expects, resp)
1253                        == "INVALID"
1254                    ):
1255                        self.logger.error("\tWARN: Plugin error check skipped")
1256                    elif not self.plugin_expect_type(plugin_expects, resp):
1257                        self.logger.error(
1258                            "\tERROR: Plugin expects return data: %s"
1259                            % plugin_expects
1260                        )
1261                        plugin_error_dict["exit_on_error"] = True
1262                elif not resp:
1263                    self.logger.error(
1264                        "\tERROR: Plugin func failed to return data"
1265                    )
1266                    plugin_error_dict["exit_on_error"] = True
1267
1268        return resp
1269
1270    def response_args_data(self, plugin_resp):
1271        r"""
1272        Parse the plugin function response and update plugin return variable.
1273
1274        plugin_resp       Response data from plugin function.
1275        """
1276        resp_list = []
1277        resp_data = ""
1278
1279        # There is nothing to update the plugin response.
1280        if len(global_plugin_list) == 0 or plugin_resp == "None":
1281            return
1282
1283        if isinstance(plugin_resp, str):
1284            resp_data = plugin_resp.strip("\r\n\t")
1285            resp_list.append(resp_data)
1286        elif isinstance(plugin_resp, bytes):
1287            resp_data = str(plugin_resp, "UTF-8").strip("\r\n\t")
1288            resp_list.append(resp_data)
1289        elif isinstance(plugin_resp, tuple):
1290            if len(global_plugin_list) == 1:
1291                resp_list.append(plugin_resp)
1292            else:
1293                resp_list = list(plugin_resp)
1294                resp_list = [x.strip("\r\n\t") for x in resp_list]
1295        elif isinstance(plugin_resp, list):
1296            if len(global_plugin_list) == 1:
1297                resp_list.append([x.strip("\r\n\t") for x in plugin_resp])
1298            else:
1299                resp_list = [x.strip("\r\n\t") for x in plugin_resp]
1300        elif isinstance(plugin_resp, int) or isinstance(plugin_resp, float):
1301            resp_list.append(plugin_resp)
1302
1303        # Iterate if there is a list of plugin return vars to update.
1304        for idx, item in enumerate(resp_list, start=0):
1305            # Exit loop, done required loop.
1306            if idx >= len(global_plugin_list):
1307                break
1308            # Find the index of the return func in the list and
1309            # update the global func return dictionary.
1310            try:
1311                dict_idx = global_plugin_list[idx]
1312                global_plugin_dict[dict_idx] = item
1313            except (IndexError, ValueError) as e:
1314                self.logger.warn("\tWARN: response_args_data: %s" % e)
1315                pass
1316
1317        # Done updating plugin dict irrespective of pass or failed,
1318        # clear all the list element for next plugin block execute.
1319        global_plugin_list.clear()
1320
1321    def yaml_args_string(self, plugin_args):
1322        r"""
1323        Pack the args into string.
1324
1325        plugin_args            arg list ['arg1','arg2,'argn']
1326        """
1327        args_str = ""
1328        for args in plugin_args:
1329            if args:
1330                if isinstance(args, (int, float)):
1331                    args_str += str(args)
1332                elif args in global_plugin_type_list:
1333                    args_str += str(global_plugin_dict[args])
1334                else:
1335                    args_str += '"' + str(args.strip("\r\n\t")) + '"'
1336            # Skip last list element.
1337            if args != plugin_args[-1]:
1338                args_str += ","
1339        return args_str
1340
1341    def yaml_args_populate(self, yaml_arg_list):
1342        r"""
1343        Decode env and plugin vars and populate.
1344
1345        Description of argument(s):
1346        yaml_arg_list         arg list read from YAML
1347
1348        Example:
1349          - plugin_args:
1350            - arg1
1351            - arg2
1352
1353                  yaml_arg_list:  [arg2, arg2]
1354        """
1355        # Get the env loaded keys as list ['hostname', 'username', 'password'].
1356        env_vars_list = list(self.env_dict)
1357
1358        if isinstance(yaml_arg_list, list):
1359            tmp_list = []
1360            for arg in yaml_arg_list:
1361                if isinstance(arg, (int, float)):
1362                    tmp_list.append(arg)
1363                    continue
1364                elif isinstance(arg, str):
1365                    arg_str = self.yaml_env_and_plugin_vars_populate(str(arg))
1366                    tmp_list.append(arg_str)
1367                else:
1368                    tmp_list.append(arg)
1369
1370            # return populated list.
1371            return tmp_list
1372
1373    def yaml_env_and_plugin_vars_populate(self, yaml_arg_str):
1374        r"""
1375        Update ${MY_VAR} and plugin vars.
1376
1377        Description of argument(s):
1378        yaml_arg_str         arg string read from YAML.
1379
1380        Example:
1381            - cat ${MY_VAR}
1382            - ls -AX my_plugin_var
1383        """
1384        # Parse the string for env vars ${env_vars}.
1385        try:
1386            # Example, list of matching
1387            # env vars ['username', 'password', 'hostname']
1388            # Extra escape \ for special symbols. '\$\{([^\}]+)\}' works good.
1389            var_name_regex = "\\$\\{([^\\}]+)\\}"
1390            env_var_names_list = re.findall(var_name_regex, yaml_arg_str)
1391            for var in env_var_names_list:
1392                env_var = os.environ[var]
1393                env_replace = "${" + var + "}"
1394                yaml_arg_str = yaml_arg_str.replace(env_replace, env_var)
1395        except Exception as e:
1396            self.logger.error("\tERROR:yaml_env_vars_populate: %s" % e)
1397            pass
1398
1399        # Parse the string for plugin vars.
1400        try:
1401            # Example, list of plugin vars ['my_username', 'my_data']
1402            plugin_var_name_list = global_plugin_dict.keys()
1403            for var in plugin_var_name_list:
1404                # skip env var list already populated above code block list.
1405                if var in env_var_names_list:
1406                    continue
1407                # If this plugin var exist but empty in dict, don't replace.
1408                # This is either a YAML plugin statement incorrectly used or
1409                # user added a plugin var which is not going to be populated.
1410                if yaml_arg_str in global_plugin_dict:
1411                    if isinstance(global_plugin_dict[var], (list, dict)):
1412                        # List data type or dict can't be replaced, use
1413                        # directly in eval function call.
1414                        global_plugin_type_list.append(var)
1415                    else:
1416                        yaml_arg_str = yaml_arg_str.replace(
1417                            str(var), str(global_plugin_dict[var])
1418                        )
1419                # Just a string like filename or command.
1420                else:
1421                    yaml_arg_str = yaml_arg_str.replace(
1422                        str(var), str(global_plugin_dict[var])
1423                    )
1424        except (IndexError, ValueError) as e:
1425            self.logger.error("\tERROR: yaml_plugin_vars_populate: %s" % e)
1426            pass
1427
1428        return yaml_arg_str
1429
1430    def plugin_error_check(self, plugin_dict):
1431        r"""
1432        Plugin error dict processing.
1433
1434        Description of argument(s):
1435        plugin_dict        Dictionary of plugin error.
1436        """
1437        if any("plugin_error" in d for d in plugin_dict):
1438            for d in plugin_dict:
1439                if "plugin_error" in d:
1440                    value = d["plugin_error"]
1441                    # Reference if the error is set or not by plugin.
1442                    return plugin_error_dict[value]
1443
1444    def key_index_list_dict(self, key, list_dict):
1445        r"""
1446        Iterate list of dictionary and return index if the key match is found.
1447
1448        Description of argument(s):
1449        key           Valid Key in a dict.
1450        list_dict     list of dictionary.
1451        """
1452        for i, d in enumerate(list_dict):
1453            if key in d.keys():
1454                return i
1455
1456    def plugin_expect_type(self, type, data):
1457        r"""
1458        Plugin expect directive type check.
1459        """
1460        if type == "int":
1461            return isinstance(data, int)
1462        elif type == "float":
1463            return isinstance(data, float)
1464        elif type == "str":
1465            return isinstance(data, str)
1466        elif type == "list":
1467            return isinstance(data, list)
1468        elif type == "dict":
1469            return isinstance(data, dict)
1470        elif type == "tuple":
1471            return isinstance(data, tuple)
1472        else:
1473            self.logger.info("\tInvalid data type requested: %s" % type)
1474            return "INVALID"
1475