xref: /openbmc/openbmc-test-automation/ffdc/ffdc_collector.py (revision ac15572023ce70ddb104603b0d09a9277a5168d7)
1#!/usr/bin/env python3
2
3r"""
4See class prolog below for details.
5"""
6
7import json
8import logging
9import os
10import platform
11import re
12import subprocess
13import sys
14import time
15from errno import EACCES, EPERM
16
17import yaml
18
19script_dir = os.path.dirname(os.path.abspath(__file__))
20sys.path.append(script_dir)
21# Walk path and append to sys.path
22for root, dirs, files in os.walk(script_dir):
23    for dir in dirs:
24        sys.path.append(os.path.join(root, dir))
25
26from ssh_utility import SSHRemoteclient  # NOQA
27from telnet_utility import TelnetRemoteclient  # NOQA
28
29r"""
30User define plugins python functions.
31
32It will imports files from directory plugins
33
34plugins
35├── file1.py
36└── file2.py
37
38Example how to define in YAML:
39 - plugin:
40   - plugin_name: plugin.foo_func.foo_func_yaml
41     - plugin_args:
42       - arg1
43       - arg2
44"""
45plugin_dir = __file__.split(__file__.split("/")[-1])[0] + "/plugins"
46sys.path.append(plugin_dir)
47try:
48    for module in os.listdir(plugin_dir):
49        if module == "__init__.py" or module[-3:] != ".py":
50            continue
51        plugin_module = "plugins." + module[:-3]
52        # To access the module plugin.<module name>.<function>
53        # Example: plugin.foo_func.foo_func_yaml()
54        try:
55            plugin = __import__(plugin_module, globals(), locals(), [], 0)
56        except Exception as e:
57            print("PLUGIN: Exception: %s" % e)
58            print("PLUGIN: Module import failed: %s" % module)
59            pass
60except FileNotFoundError as e:
61    print("PLUGIN: %s" % e)
62    pass
63
64r"""
65This is for plugin functions returning data or responses to the caller
66in YAML plugin setup.
67
68Example:
69
70    - plugin:
71      - plugin_name: version = plugin.ssh_execution.ssh_execute_cmd
72      - plugin_args:
73        - ${hostname}
74        - ${username}
75        - ${password}
76        - "cat /etc/os-release | grep VERSION_ID | awk -F'=' '{print $2}'"
77     - plugin:
78        - plugin_name: plugin.print_vars.print_vars
79        - plugin_args:
80          - version
81
82where first plugin "version" var is used by another plugin in the YAML
83block or plugin
84
85"""
86global global_log_store_path
87global global_plugin_dict
88global global_plugin_list
89
90# Hold the plugin return values in dict and plugin return vars in list.
91# Dict is to reference and update vars processing in parser where as
92# list is for current vars from the plugin block which needs processing.
93global_plugin_dict = {}
94global_plugin_list = []
95
96# Hold the plugin return named declared if function returned values are
97# list,dict.
98# Refer this name list to look up the plugin dict for eval() args function
99# Example ['version']
100global_plugin_type_list = []
101
102# Path where logs are to be stored or written.
103global_log_store_path = ""
104
105# Plugin error state defaults.
106plugin_error_dict = {
107    "exit_on_error": False,
108    "continue_on_error": False,
109}
110
111
112class ffdc_collector:
113    r"""
114    Execute commands from configuration file to collect log files.
115    Fetch and store generated files at the specified location.
116
117    """
118
119    def __init__(
120        self,
121        hostname,
122        username,
123        password,
124        port_ssh,
125        port_https,
126        port_ipmi,
127        ffdc_config,
128        location,
129        remote_type,
130        remote_protocol,
131        env_vars,
132        econfig,
133        log_level,
134    ):
135        r"""
136        Description of argument(s):
137
138        hostname            Name/ip of the targeted (remote) system
139        username            User on the targeted system with access to
140                            FFDC files
141        password            Password for user on targeted system
142        port_ssh            SSH port value. By default 22
143        port_https          HTTPS port value. By default 443
144        port_ipmi           IPMI port value. By default 623
145        ffdc_config         Configuration file listing commands and files
146                            for FFDC
147        location            Where to store collected FFDC
148        remote_type         OS type of the remote host
149        remote_protocol     Protocol to use to collect data
150        env_vars            User define CLI env vars '{"key : "value"}'
151        econfig             User define env vars YAML file
152
153        """
154
155        self.hostname = hostname
156        self.username = username
157        self.password = password
158        self.port_ssh = str(port_ssh)
159        self.port_https = str(port_https)
160        self.port_ipmi = str(port_ipmi)
161        self.ffdc_config = ffdc_config
162        self.location = location + "/" + remote_type.upper()
163        self.ssh_remoteclient = None
164        self.telnet_remoteclient = None
165        self.ffdc_dir_path = ""
166        self.ffdc_prefix = ""
167        self.target_type = remote_type.upper()
168        self.remote_protocol = remote_protocol.upper()
169        self.env_vars = env_vars
170        self.econfig = econfig
171        self.start_time = 0
172        self.elapsed_time = ""
173        self.logger = None
174
175        # Set prefix values for scp files and directory.
176        # Since the time stamp is at second granularity, these values are set
177        # here to be sure that all files for this run will have same timestamps
178        # and they will be saved in the same directory.
179        # self.location == local system for now
180        self.set_ffdc_default_store_path()
181
182        # Logger for this run.  Need to be after set_ffdc_default_store_path()
183        self.script_logging(getattr(logging, log_level.upper()))
184
185        # Verify top level directory exists for storage
186        self.validate_local_store(self.location)
187
188        if self.verify_script_env():
189            # Load default or user define YAML configuration file.
190            with open(self.ffdc_config, "r") as file:
191                try:
192                    self.ffdc_actions = yaml.load(file, Loader=yaml.SafeLoader)
193                except yaml.YAMLError as e:
194                    self.logger.error(e)
195                    sys.exit(-1)
196
197            if self.target_type not in self.ffdc_actions.keys():
198                self.logger.error(
199                    "\n\tERROR: %s is not listed in %s.\n\n"
200                    % (self.target_type, self.ffdc_config)
201                )
202                sys.exit(-1)
203        else:
204            sys.exit(-1)
205
206        # Load ENV vars from user.
207        self.logger.info("\n\tENV: User define input YAML variables")
208        self.env_dict = {}
209        self.load_env()
210
211    def verify_script_env(self):
212        # Import to log version
213        import click
214        import paramiko
215
216        run_env_ok = True
217
218        try:
219            redfishtool_version = (
220                self.run_tool_cmd("redfishtool -V").split(" ")[2].strip("\n")
221            )
222        except Exception as e:
223            self.logger.error("\tEXCEPTION redfishtool: %s", e)
224            redfishtool_version = "Not Installed (optional)"
225
226        try:
227            ipmitool_version = self.run_tool_cmd("ipmitool -V").split(" ")[2]
228        except Exception as e:
229            self.logger.error("\tEXCEPTION ipmitool: %s", e)
230            ipmitool_version = "Not Installed (optional)"
231
232        self.logger.info("\n\t---- Script host environment ----")
233        self.logger.info(
234            "\t{:<10}  {:<10}".format("Script hostname", os.uname()[1])
235        )
236        self.logger.info(
237            "\t{:<10}  {:<10}".format("Script host os", platform.platform())
238        )
239        self.logger.info(
240            "\t{:<10}  {:>10}".format("Python", platform.python_version())
241        )
242        self.logger.info("\t{:<10}  {:>10}".format("PyYAML", yaml.__version__))
243        self.logger.info("\t{:<10}  {:>10}".format("click", click.__version__))
244        self.logger.info(
245            "\t{:<10}  {:>10}".format("paramiko", paramiko.__version__)
246        )
247        self.logger.info(
248            "\t{:<10}  {:>9}".format("redfishtool", redfishtool_version)
249        )
250        self.logger.info(
251            "\t{:<10}  {:>12}".format("ipmitool", ipmitool_version)
252        )
253
254        if eval(yaml.__version__.replace(".", ",")) < (5, 3, 0):
255            self.logger.error(
256                "\n\tERROR: Python or python packages do not meet minimum"
257                " version requirement."
258            )
259            self.logger.error(
260                "\tERROR: PyYAML version 5.3.0 or higher is needed.\n"
261            )
262            run_env_ok = False
263
264        self.logger.info("\t---- End script host environment ----")
265        return run_env_ok
266
267    def script_logging(self, log_level_attr):
268        r"""
269        Create logger
270
271        """
272        self.logger = logging.getLogger()
273        self.logger.setLevel(log_level_attr)
274        log_file_handler = logging.FileHandler(
275            self.ffdc_dir_path + "collector.log"
276        )
277
278        stdout_handler = logging.StreamHandler(sys.stdout)
279        self.logger.addHandler(log_file_handler)
280        self.logger.addHandler(stdout_handler)
281
282        # Turn off paramiko INFO logging
283        logging.getLogger("paramiko").setLevel(logging.WARNING)
284
285    def target_is_pingable(self):
286        r"""
287        Check if target system is ping-able.
288
289        """
290        response = os.system("ping -c 1 %s  2>&1 >/dev/null" % self.hostname)
291        if response == 0:
292            self.logger.info(
293                "\n\t[Check] %s is ping-able.\t\t [OK]" % self.hostname
294            )
295            return True
296        else:
297            self.logger.error(
298                "\n\tERROR: %s is not ping-able. FFDC collection aborted.\n"
299                % self.hostname
300            )
301            sys.exit(-1)
302
303    def collect_ffdc(self):
304        r"""
305        Initiate FFDC Collection depending on requested protocol.
306
307        """
308
309        self.logger.info(
310            "\n\t---- Start communicating with %s ----" % self.hostname
311        )
312        self.start_time = time.time()
313
314        # Find the list of target and protocol supported.
315        check_protocol_list = []
316        config_dict = self.ffdc_actions
317
318        for target_type in config_dict.keys():
319            if self.target_type != target_type:
320                continue
321
322            for k, v in config_dict[target_type].items():
323                if (
324                    config_dict[target_type][k]["PROTOCOL"][0]
325                    not in check_protocol_list
326                ):
327                    check_protocol_list.append(
328                        config_dict[target_type][k]["PROTOCOL"][0]
329                    )
330
331        self.logger.info(
332            "\n\t %s protocol type: %s"
333            % (self.target_type, check_protocol_list)
334        )
335
336        verified_working_protocol = self.verify_protocol(check_protocol_list)
337
338        if verified_working_protocol:
339            self.logger.info(
340                "\n\t---- Completed protocol pre-requisite check ----\n"
341            )
342
343        # Verify top level directory exists for storage
344        self.validate_local_store(self.location)
345
346        if (self.remote_protocol not in verified_working_protocol) and (
347            self.remote_protocol != "ALL"
348        ):
349            self.logger.info(
350                "\n\tWorking protocol list: %s" % verified_working_protocol
351            )
352            self.logger.error(
353                "\tERROR: Requested protocol %s is not in working protocol"
354                " list.\n" % self.remote_protocol
355            )
356            sys.exit(-1)
357        else:
358            self.generate_ffdc(verified_working_protocol)
359
360    def ssh_to_target_system(self):
361        r"""
362        Open a ssh connection to targeted system.
363
364        """
365
366        self.ssh_remoteclient = SSHRemoteclient(
367            self.hostname, self.username, self.password, self.port_ssh
368        )
369
370        if self.ssh_remoteclient.ssh_remoteclient_login():
371            self.logger.info(
372                "\n\t[Check] %s SSH connection established.\t [OK]"
373                % self.hostname
374            )
375
376            # Check scp connection.
377            # If scp connection fails,
378            # continue with FFDC generation but skip scp files to local host.
379            self.ssh_remoteclient.scp_connection()
380            return True
381        else:
382            self.logger.info(
383                "\n\t[Check] %s SSH connection.\t [NOT AVAILABLE]"
384                % self.hostname
385            )
386            return False
387
388    def telnet_to_target_system(self):
389        r"""
390        Open a telnet connection to targeted system.
391        """
392        self.telnet_remoteclient = TelnetRemoteclient(
393            self.hostname, self.username, self.password
394        )
395        if self.telnet_remoteclient.tn_remoteclient_login():
396            self.logger.info(
397                "\n\t[Check] %s Telnet connection established.\t [OK]"
398                % self.hostname
399            )
400            return True
401        else:
402            self.logger.info(
403                "\n\t[Check] %s Telnet connection.\t [NOT AVAILABLE]"
404                % self.hostname
405            )
406            return False
407
408    def generate_ffdc(self, working_protocol_list):
409        r"""
410        Determine actions based on remote host type
411
412        Description of argument(s):
413        working_protocol_list    List of confirmed working protocols to
414                                 connect to remote host.
415        """
416
417        self.logger.info(
418            "\n\t---- Executing commands on " + self.hostname + " ----"
419        )
420        self.logger.info(
421            "\n\tWorking protocol list: %s" % working_protocol_list
422        )
423
424        config_dict = self.ffdc_actions
425        for target_type in config_dict.keys():
426            if self.target_type != target_type:
427                continue
428
429            self.logger.info("\n\tFFDC Path: %s " % self.ffdc_dir_path)
430            global_plugin_dict["global_log_store_path"] = self.ffdc_dir_path
431            self.logger.info("\tSystem Type: %s" % target_type)
432            for k, v in config_dict[target_type].items():
433                if (
434                    self.remote_protocol not in working_protocol_list
435                    and self.remote_protocol != "ALL"
436                ):
437                    continue
438
439                protocol = config_dict[target_type][k]["PROTOCOL"][0]
440
441                if protocol not in working_protocol_list:
442                    continue
443
444                if protocol in working_protocol_list:
445                    if protocol == "SSH" or protocol == "SCP":
446                        self.protocol_ssh(protocol, target_type, k)
447                    elif protocol == "TELNET":
448                        self.protocol_telnet(target_type, k)
449                    elif (
450                        protocol == "REDFISH"
451                        or protocol == "IPMI"
452                        or protocol == "SHELL"
453                    ):
454                        self.protocol_execute(protocol, target_type, k)
455                else:
456                    self.logger.error(
457                        "\n\tERROR: %s is not available for %s."
458                        % (protocol, self.hostname)
459                    )
460
461        # Close network connection after collecting all files
462        self.elapsed_time = time.strftime(
463            "%H:%M:%S", time.gmtime(time.time() - self.start_time)
464        )
465        if self.ssh_remoteclient:
466            self.ssh_remoteclient.ssh_remoteclient_disconnect()
467        if self.telnet_remoteclient:
468            self.telnet_remoteclient.tn_remoteclient_disconnect()
469
470    def protocol_ssh(self, protocol, target_type, sub_type):
471        r"""
472        Perform actions using SSH and SCP protocols.
473
474        Description of argument(s):
475        protocol            Protocol to execute.
476        target_type         OS Type of remote host.
477        sub_type            Group type of commands.
478        """
479
480        if protocol == "SCP":
481            self.group_copy(self.ffdc_actions[target_type][sub_type])
482        else:
483            self.collect_and_copy_ffdc(
484                self.ffdc_actions[target_type][sub_type]
485            )
486
487    def protocol_telnet(self, target_type, sub_type):
488        r"""
489        Perform actions using telnet protocol.
490        Description of argument(s):
491        target_type          OS Type of remote host.
492        """
493        self.logger.info(
494            "\n\t[Run] Executing commands on %s using %s"
495            % (self.hostname, "TELNET")
496        )
497        telnet_files_saved = []
498        progress_counter = 0
499        list_of_commands = self.ffdc_actions[target_type][sub_type]["COMMANDS"]
500        for index, each_cmd in enumerate(list_of_commands, start=0):
501            command_txt, command_timeout = self.unpack_command(each_cmd)
502            result = self.telnet_remoteclient.execute_command(
503                command_txt, command_timeout
504            )
505            if result:
506                try:
507                    targ_file = self.ffdc_actions[target_type][sub_type][
508                        "FILES"
509                    ][index]
510                except IndexError:
511                    targ_file = command_txt
512                    self.logger.warning(
513                        "\n\t[WARN] Missing filename to store data from"
514                        " telnet %s." % each_cmd
515                    )
516                    self.logger.warning(
517                        "\t[WARN] Data will be stored in %s." % targ_file
518                    )
519                targ_file_with_path = (
520                    self.ffdc_dir_path + self.ffdc_prefix + targ_file
521                )
522                # Creates a new file
523                with open(targ_file_with_path, "w") as fp:
524                    fp.write(result)
525                    fp.close
526                    telnet_files_saved.append(targ_file)
527            progress_counter += 1
528            self.print_progress(progress_counter)
529        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
530        for file in telnet_files_saved:
531            self.logger.info("\n\t\tSuccessfully save file " + file + ".")
532
533    def protocol_execute(self, protocol, target_type, sub_type):
534        r"""
535        Perform actions for a given protocol.
536
537        Description of argument(s):
538        protocol            Protocol to execute.
539        target_type         OS Type of remote host.
540        sub_type            Group type of commands.
541        """
542
543        self.logger.info(
544            "\n\t[Run] Executing commands to %s using %s"
545            % (self.hostname, protocol)
546        )
547        executed_files_saved = []
548        progress_counter = 0
549        list_of_cmd = self.get_command_list(
550            self.ffdc_actions[target_type][sub_type]
551        )
552        for index, each_cmd in enumerate(list_of_cmd, start=0):
553            plugin_call = False
554            if isinstance(each_cmd, dict):
555                if "plugin" in each_cmd:
556                    # If the error is set and plugin explicitly
557                    # requested to skip execution on error..
558                    if plugin_error_dict[
559                        "exit_on_error"
560                    ] and self.plugin_error_check(each_cmd["plugin"]):
561                        self.logger.info(
562                            "\n\t[PLUGIN-ERROR] exit_on_error: %s"
563                            % plugin_error_dict["exit_on_error"]
564                        )
565                        self.logger.info(
566                            "\t[PLUGIN-SKIP] %s" % each_cmd["plugin"][0]
567                        )
568                        continue
569                    plugin_call = True
570                    # call the plugin
571                    self.logger.info("\n\t[PLUGIN-START]")
572                    result = self.execute_plugin_block(each_cmd["plugin"])
573                    self.logger.info("\t[PLUGIN-END]\n")
574            else:
575                each_cmd = self.yaml_env_and_plugin_vars_populate(each_cmd)
576
577            if not plugin_call:
578                result = self.run_tool_cmd(each_cmd)
579            if result:
580                try:
581                    file_name = self.get_file_list(
582                        self.ffdc_actions[target_type][sub_type]
583                    )[index]
584                    # If file is specified as None.
585                    if file_name == "None":
586                        continue
587                    targ_file = self.yaml_env_and_plugin_vars_populate(
588                        file_name
589                    )
590                except IndexError:
591                    targ_file = each_cmd.split("/")[-1]
592                    self.logger.warning(
593                        "\n\t[WARN] Missing filename to store data from %s."
594                        % each_cmd
595                    )
596                    self.logger.warning(
597                        "\t[WARN] Data will be stored in %s." % targ_file
598                    )
599
600                targ_file_with_path = (
601                    self.ffdc_dir_path + self.ffdc_prefix + targ_file
602                )
603
604                # Creates a new file
605                with open(targ_file_with_path, "w") as fp:
606                    if isinstance(result, dict):
607                        fp.write(json.dumps(result))
608                    else:
609                        fp.write(result)
610                    fp.close
611                    executed_files_saved.append(targ_file)
612
613            progress_counter += 1
614            self.print_progress(progress_counter)
615
616        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
617
618        for file in executed_files_saved:
619            self.logger.info("\n\t\tSuccessfully save file " + file + ".")
620
621    def collect_and_copy_ffdc(
622        self, ffdc_actions_for_target_type, form_filename=False
623    ):
624        r"""
625        Send commands in ffdc_config file to targeted system.
626
627        Description of argument(s):
628        ffdc_actions_for_target_type     Commands and files for the selected
629                                         remote host type.
630        form_filename                    If true, pre-pend self.target_type to
631                                         filename
632        """
633
634        # Executing commands, if any
635        self.ssh_execute_ffdc_commands(
636            ffdc_actions_for_target_type, form_filename
637        )
638
639        # Copying files
640        if self.ssh_remoteclient.scpclient:
641            self.logger.info(
642                "\n\n\tCopying FFDC files from remote system %s.\n"
643                % self.hostname
644            )
645
646            # Retrieving files from target system
647            list_of_files = self.get_file_list(ffdc_actions_for_target_type)
648            self.scp_ffdc(
649                self.ffdc_dir_path,
650                self.ffdc_prefix,
651                form_filename,
652                list_of_files,
653            )
654        else:
655            self.logger.info(
656                "\n\n\tSkip copying FFDC files from remote system %s.\n"
657                % self.hostname
658            )
659
660    def get_command_list(self, ffdc_actions_for_target_type):
661        r"""
662        Fetch list of commands from configuration file
663
664        Description of argument(s):
665        ffdc_actions_for_target_type    Commands and files for the selected
666                                        remote host type.
667        """
668        try:
669            list_of_commands = ffdc_actions_for_target_type["COMMANDS"]
670        except KeyError:
671            list_of_commands = []
672        return list_of_commands
673
674    def get_file_list(self, ffdc_actions_for_target_type):
675        r"""
676        Fetch list of commands from configuration file
677
678        Description of argument(s):
679        ffdc_actions_for_target_type    Commands and files for the selected
680                                        remote host type.
681        """
682        try:
683            list_of_files = ffdc_actions_for_target_type["FILES"]
684        except KeyError:
685            list_of_files = []
686        return list_of_files
687
688    def unpack_command(self, command):
689        r"""
690        Unpack command from config file
691
692        Description of argument(s):
693        command    Command from config file.
694        """
695        if isinstance(command, dict):
696            command_txt = next(iter(command))
697            command_timeout = next(iter(command.values()))
698        elif isinstance(command, str):
699            command_txt = command
700            # Default command timeout 60 seconds
701            command_timeout = 60
702
703        return command_txt, command_timeout
704
705    def ssh_execute_ffdc_commands(
706        self, ffdc_actions_for_target_type, form_filename=False
707    ):
708        r"""
709        Send commands in ffdc_config file to targeted system.
710
711        Description of argument(s):
712        ffdc_actions_for_target_type    Commands and files for the selected
713                                        remote host type.
714        form_filename                   If true, pre-pend self.target_type to
715                                        filename
716        """
717        self.logger.info(
718            "\n\t[Run] Executing commands on %s using %s"
719            % (self.hostname, ffdc_actions_for_target_type["PROTOCOL"][0])
720        )
721
722        list_of_commands = self.get_command_list(ffdc_actions_for_target_type)
723        # If command list is empty, returns
724        if not list_of_commands:
725            return
726
727        progress_counter = 0
728        for command in list_of_commands:
729            command_txt, command_timeout = self.unpack_command(command)
730
731            if form_filename:
732                command_txt = str(command_txt % self.target_type)
733
734            (
735                cmd_exit_code,
736                err,
737                response,
738            ) = self.ssh_remoteclient.execute_command(
739                command_txt, command_timeout
740            )
741
742            if cmd_exit_code:
743                self.logger.warning(
744                    "\n\t\t[WARN] %s exits with code %s."
745                    % (command_txt, str(cmd_exit_code))
746                )
747                self.logger.warning("\t\t[WARN] %s " % err)
748
749            progress_counter += 1
750            self.print_progress(progress_counter)
751
752        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
753
754    def group_copy(self, ffdc_actions_for_target_type):
755        r"""
756        scp group of files (wild card) from remote host.
757
758        Description of argument(s):
759        fdc_actions_for_target_type    Commands and files for the selected
760                                       remote host type.
761        """
762
763        if self.ssh_remoteclient.scpclient:
764            self.logger.info(
765                "\n\tCopying files from remote system %s via SCP.\n"
766                % self.hostname
767            )
768
769            list_of_commands = self.get_command_list(
770                ffdc_actions_for_target_type
771            )
772            # If command list is empty, returns
773            if not list_of_commands:
774                return
775
776            for command in list_of_commands:
777                try:
778                    command = self.yaml_env_and_plugin_vars_populate(command)
779                except IndexError:
780                    self.logger.error("\t\tInvalid command %s" % command)
781                    continue
782
783                (
784                    cmd_exit_code,
785                    err,
786                    response,
787                ) = self.ssh_remoteclient.execute_command(command)
788
789                # If file does not exist, code take no action.
790                # cmd_exit_code is ignored for this scenario.
791                if response:
792                    scp_result = self.ssh_remoteclient.scp_file_from_remote(
793                        response.split("\n"), self.ffdc_dir_path
794                    )
795                    if scp_result:
796                        self.logger.info(
797                            "\t\tSuccessfully copied from "
798                            + self.hostname
799                            + ":"
800                            + command
801                        )
802                else:
803                    self.logger.info("\t\t%s has no result" % command)
804
805        else:
806            self.logger.info(
807                "\n\n\tSkip copying files from remote system %s.\n"
808                % self.hostname
809            )
810
811    def scp_ffdc(
812        self,
813        targ_dir_path,
814        targ_file_prefix,
815        form_filename,
816        file_list=None,
817        quiet=None,
818    ):
819        r"""
820        SCP all files in file_dict to the indicated directory on the local
821        system.
822
823        Description of argument(s):
824        targ_dir_path                   The path of the directory to receive
825                                        the files.
826        targ_file_prefix                Prefix which will be prepended to each
827                                        target file's name.
828        file_dict                       A dictionary of files to scp from
829                                        targeted system to this system
830
831        """
832
833        progress_counter = 0
834        for filename in file_list:
835            if form_filename:
836                filename = str(filename % self.target_type)
837            source_file_path = filename
838            targ_file_path = (
839                targ_dir_path + targ_file_prefix + filename.split("/")[-1]
840            )
841
842            # If source file name contains wild card, copy filename as is.
843            if "*" in source_file_path:
844                scp_result = self.ssh_remoteclient.scp_file_from_remote(
845                    source_file_path, self.ffdc_dir_path
846                )
847            else:
848                scp_result = self.ssh_remoteclient.scp_file_from_remote(
849                    source_file_path, targ_file_path
850                )
851
852            if not quiet:
853                if scp_result:
854                    self.logger.info(
855                        "\t\tSuccessfully copied from "
856                        + self.hostname
857                        + ":"
858                        + source_file_path
859                        + ".\n"
860                    )
861                else:
862                    self.logger.info(
863                        "\t\tFail to copy from "
864                        + self.hostname
865                        + ":"
866                        + source_file_path
867                        + ".\n"
868                    )
869            else:
870                progress_counter += 1
871                self.print_progress(progress_counter)
872
873    def set_ffdc_default_store_path(self):
874        r"""
875        Set a default value for self.ffdc_dir_path and self.ffdc_prefix.
876        Collected ffdc file will be stored in dir
877        /self.location/hostname_timestr/.
878        Individual ffdc file will have timestr_filename.
879
880        Description of class variables:
881        self.ffdc_dir_path  The dir path where collected ffdc data files
882                            should be put.
883
884        self.ffdc_prefix    The prefix to be given to each ffdc file name.
885
886        """
887
888        timestr = time.strftime("%Y%m%d-%H%M%S")
889        self.ffdc_dir_path = (
890            self.location + "/" + self.hostname + "_" + timestr + "/"
891        )
892        self.ffdc_prefix = timestr + "_"
893        self.validate_local_store(self.ffdc_dir_path)
894
895    # Need to verify local store path exists prior to instantiate this class.
896    # This class method is used to share the same code between CLI input parm
897    # and Robot Framework "${EXECDIR}/logs" before referencing this class.
898    @classmethod
899    def validate_local_store(cls, dir_path):
900        r"""
901        Ensure path exists to store FFDC files locally.
902
903        Description of variable:
904        dir_path  The dir path where collected ffdc data files will be stored.
905
906        """
907
908        if not os.path.exists(dir_path):
909            try:
910                os.makedirs(dir_path, 0o755)
911            except (IOError, OSError) as e:
912                # PermissionError
913                if e.errno == EPERM or e.errno == EACCES:
914                    print(
915                        "\tERROR: os.makedirs %s failed with"
916                        " PermissionError.\n" % dir_path
917                    )
918                else:
919                    print(
920                        "\tERROR: os.makedirs %s failed with %s.\n"
921                        % (dir_path, e.strerror)
922                    )
923                sys.exit(-1)
924
925    def print_progress(self, progress):
926        r"""
927        Print activity progress +
928
929        Description of variable:
930        progress  Progress counter.
931
932        """
933
934        sys.stdout.write("\r\t" + "+" * progress)
935        sys.stdout.flush()
936        time.sleep(0.1)
937
938    def verify_redfish(self):
939        r"""
940        Verify remote host has redfish service active
941
942        """
943        redfish_parm = (
944            "redfishtool -r "
945            + self.hostname
946            + ":"
947            + self.port_https
948            + " -S Always raw GET /redfish/v1/"
949        )
950        return self.run_tool_cmd(redfish_parm, True)
951
952    def verify_ipmi(self):
953        r"""
954        Verify remote host has IPMI LAN service active
955
956        """
957        if self.target_type == "OPENBMC":
958            ipmi_parm = (
959                "ipmitool -I lanplus -C 17  -U "
960                + self.username
961                + " -P "
962                + self.password
963                + " -H "
964                + self.hostname
965                + " -p "
966                + str(self.port_ipmi)
967                + " power status"
968            )
969        else:
970            ipmi_parm = (
971                "ipmitool -I lanplus  -P "
972                + self.password
973                + " -H "
974                + self.hostname
975                + " -p "
976                + str(self.port_ipmi)
977                + " power status"
978            )
979
980        return self.run_tool_cmd(ipmi_parm, True)
981
982    def run_tool_cmd(self, parms_string, quiet=False):
983        r"""
984        Run CLI standard tool or scripts.
985
986        Description of variable:
987        parms_string         tool command options.
988        quiet                do not print tool error message if True
989        """
990
991        result = subprocess.run(
992            [parms_string],
993            stdout=subprocess.PIPE,
994            stderr=subprocess.PIPE,
995            shell=True,
996            universal_newlines=True,
997        )
998
999        if result.stderr and not quiet:
1000            self.logger.error("\n\t\tERROR with %s " % parms_string)
1001            self.logger.error("\t\t" + result.stderr)
1002
1003        return result.stdout
1004
1005    def verify_protocol(self, protocol_list):
1006        r"""
1007        Perform protocol working check.
1008
1009        Description of argument(s):
1010        protocol_list        List of protocol.
1011        """
1012
1013        tmp_list = []
1014        if self.target_is_pingable():
1015            tmp_list.append("SHELL")
1016
1017        for protocol in protocol_list:
1018            if self.remote_protocol != "ALL":
1019                if self.remote_protocol != protocol:
1020                    continue
1021
1022            # Only check SSH/SCP once for both protocols
1023            if (
1024                protocol == "SSH"
1025                or protocol == "SCP"
1026                and protocol not in tmp_list
1027            ):
1028                if self.ssh_to_target_system():
1029                    # Add only what user asked.
1030                    if self.remote_protocol != "ALL":
1031                        tmp_list.append(self.remote_protocol)
1032                    else:
1033                        tmp_list.append("SSH")
1034                        tmp_list.append("SCP")
1035
1036            if protocol == "TELNET":
1037                if self.telnet_to_target_system():
1038                    tmp_list.append(protocol)
1039
1040            if protocol == "REDFISH":
1041                if self.verify_redfish():
1042                    tmp_list.append(protocol)
1043                    self.logger.info(
1044                        "\n\t[Check] %s Redfish Service.\t\t [OK]"
1045                        % self.hostname
1046                    )
1047                else:
1048                    self.logger.info(
1049                        "\n\t[Check] %s Redfish Service.\t\t [NOT AVAILABLE]"
1050                        % self.hostname
1051                    )
1052
1053            if protocol == "IPMI":
1054                if self.verify_ipmi():
1055                    tmp_list.append(protocol)
1056                    self.logger.info(
1057                        "\n\t[Check] %s IPMI LAN Service.\t\t [OK]"
1058                        % self.hostname
1059                    )
1060                else:
1061                    self.logger.info(
1062                        "\n\t[Check] %s IPMI LAN Service.\t\t [NOT AVAILABLE]"
1063                        % self.hostname
1064                    )
1065
1066        return tmp_list
1067
1068    def load_env(self):
1069        r"""
1070        Perform protocol working check.
1071
1072        """
1073        # This is for the env vars a user can use in YAML to load
1074        # it at runtime.
1075        # Example YAML:
1076        # -COMMANDS:
1077        #    - my_command ${hostname}  ${username}   ${password}
1078        os.environ["hostname"] = self.hostname
1079        os.environ["username"] = self.username
1080        os.environ["password"] = self.password
1081        os.environ["port_ssh"] = self.port_ssh
1082        os.environ["port_https"] = self.port_https
1083        os.environ["port_ipmi"] = self.port_ipmi
1084
1085        # Append default Env.
1086        self.env_dict["hostname"] = self.hostname
1087        self.env_dict["username"] = self.username
1088        self.env_dict["password"] = self.password
1089        self.env_dict["port_ssh"] = self.port_ssh
1090        self.env_dict["port_https"] = self.port_https
1091        self.env_dict["port_ipmi"] = self.port_ipmi
1092
1093        try:
1094            tmp_env_dict = {}
1095            if self.env_vars:
1096                tmp_env_dict = json.loads(self.env_vars)
1097                # Export ENV vars default.
1098                for key, value in tmp_env_dict.items():
1099                    os.environ[key] = value
1100                    self.env_dict[key] = str(value)
1101
1102            if self.econfig:
1103                with open(self.econfig, "r") as file:
1104                    try:
1105                        tmp_env_dict = yaml.load(file, Loader=yaml.SafeLoader)
1106                    except yaml.YAMLError as e:
1107                        self.logger.error(e)
1108                        sys.exit(-1)
1109                # Export ENV vars.
1110                for key, value in tmp_env_dict["env_params"].items():
1111                    os.environ[key] = str(value)
1112                    self.env_dict[key] = str(value)
1113        except json.decoder.JSONDecodeError as e:
1114            self.logger.error("\n\tERROR: %s " % e)
1115            sys.exit(-1)
1116
1117        # This to mask the password from displaying on the console.
1118        mask_dict = self.env_dict.copy()
1119        for k, v in mask_dict.items():
1120            if k.lower().find("password") != -1:
1121                hidden_text = []
1122                hidden_text.append(v)
1123                password_regex = (
1124                    "(" + "|".join([re.escape(x) for x in hidden_text]) + ")"
1125                )
1126                mask_dict[k] = re.sub(password_regex, "********", v)
1127
1128        self.logger.info(json.dumps(mask_dict, indent=8, sort_keys=False))
1129
1130    def execute_python_eval(self, eval_string):
1131        r"""
1132        Execute qualified python function string using eval.
1133
1134        Description of argument(s):
1135        eval_string        Execute the python object.
1136
1137        Example:
1138                eval(plugin.foo_func.foo_func(10))
1139        """
1140        try:
1141            self.logger.info("\tExecuting plugin func()")
1142            self.logger.debug("\tCall func: %s" % eval_string)
1143            result = eval(eval_string)
1144            self.logger.info("\treturn: %s" % str(result))
1145        except (
1146            ValueError,
1147            SyntaxError,
1148            NameError,
1149            AttributeError,
1150            TypeError,
1151        ) as e:
1152            self.logger.error("\tERROR: execute_python_eval: %s" % e)
1153            # Set the plugin error state.
1154            plugin_error_dict["exit_on_error"] = True
1155            self.logger.info("\treturn: PLUGIN_EVAL_ERROR")
1156            return "PLUGIN_EVAL_ERROR"
1157
1158        return result
1159
1160    def execute_plugin_block(self, plugin_cmd_list):
1161        r"""
1162        Pack the plugin command to qualifed python string object.
1163
1164        Description of argument(s):
1165        plugin_list_dict      Plugin block read from YAML
1166                              [{'plugin_name': 'plugin.foo_func.my_func'},
1167                               {'plugin_args': [10]}]
1168
1169        Example:
1170            - plugin:
1171              - plugin_name: plugin.foo_func.my_func
1172              - plugin_args:
1173                - arg1
1174                - arg2
1175
1176            - plugin:
1177              - plugin_name: result = plugin.foo_func.my_func
1178              - plugin_args:
1179                - arg1
1180                - arg2
1181
1182            - plugin:
1183              - plugin_name: result1,result2 = plugin.foo_func.my_func
1184              - plugin_args:
1185                - arg1
1186                - arg2
1187        """
1188        try:
1189            idx = self.key_index_list_dict("plugin_name", plugin_cmd_list)
1190            plugin_name = plugin_cmd_list[idx]["plugin_name"]
1191            # Equal separator means plugin function returns result.
1192            if " = " in plugin_name:
1193                # Ex. ['result', 'plugin.foo_func.my_func']
1194                plugin_name_args = plugin_name.split(" = ")
1195                # plugin func return data.
1196                for arg in plugin_name_args:
1197                    if arg == plugin_name_args[-1]:
1198                        plugin_name = arg
1199                    else:
1200                        plugin_resp = arg.split(",")
1201                        # ['result1','result2']
1202                        for x in plugin_resp:
1203                            global_plugin_list.append(x)
1204                            global_plugin_dict[x] = ""
1205
1206            # Walk the plugin args ['arg1,'arg2']
1207            # If the YAML plugin statement 'plugin_args' is not declared.
1208            if any("plugin_args" in d for d in plugin_cmd_list):
1209                idx = self.key_index_list_dict("plugin_args", plugin_cmd_list)
1210                plugin_args = plugin_cmd_list[idx]["plugin_args"]
1211                if plugin_args:
1212                    plugin_args = self.yaml_args_populate(plugin_args)
1213                else:
1214                    plugin_args = []
1215            else:
1216                plugin_args = self.yaml_args_populate([])
1217
1218            # Pack the args arg1, arg2, .... argn into
1219            # "arg1","arg2","argn"  string as params for function.
1220            parm_args_str = self.yaml_args_string(plugin_args)
1221            if parm_args_str:
1222                plugin_func = plugin_name + "(" + parm_args_str + ")"
1223            else:
1224                plugin_func = plugin_name + "()"
1225
1226            # Execute plugin function.
1227            if global_plugin_dict:
1228                resp = self.execute_python_eval(plugin_func)
1229                # Update plugin vars dict if there is any.
1230                if resp != "PLUGIN_EVAL_ERROR":
1231                    self.response_args_data(resp)
1232            else:
1233                resp = self.execute_python_eval(plugin_func)
1234        except Exception as e:
1235            # Set the plugin error state.
1236            plugin_error_dict["exit_on_error"] = True
1237            self.logger.error("\tERROR: execute_plugin_block: %s" % e)
1238            pass
1239
1240        # There is a real error executing the plugin function.
1241        if resp == "PLUGIN_EVAL_ERROR":
1242            return resp
1243
1244        # Check if plugin_expects_return (int, string, list,dict etc)
1245        if any("plugin_expects_return" in d for d in plugin_cmd_list):
1246            idx = self.key_index_list_dict(
1247                "plugin_expects_return", plugin_cmd_list
1248            )
1249            plugin_expects = plugin_cmd_list[idx]["plugin_expects_return"]
1250            if plugin_expects:
1251                if resp:
1252                    if (
1253                        self.plugin_expect_type(plugin_expects, resp)
1254                        == "INVALID"
1255                    ):
1256                        self.logger.error("\tWARN: Plugin error check skipped")
1257                    elif not self.plugin_expect_type(plugin_expects, resp):
1258                        self.logger.error(
1259                            "\tERROR: Plugin expects return data: %s"
1260                            % plugin_expects
1261                        )
1262                        plugin_error_dict["exit_on_error"] = True
1263                elif not resp:
1264                    self.logger.error(
1265                        "\tERROR: Plugin func failed to return data"
1266                    )
1267                    plugin_error_dict["exit_on_error"] = True
1268
1269        return resp
1270
1271    def response_args_data(self, plugin_resp):
1272        r"""
1273        Parse the plugin function response and update plugin return variable.
1274
1275        plugin_resp       Response data from plugin function.
1276        """
1277        resp_list = []
1278        resp_data = ""
1279
1280        # There is nothing to update the plugin response.
1281        if len(global_plugin_list) == 0 or plugin_resp == "None":
1282            return
1283
1284        if isinstance(plugin_resp, str):
1285            resp_data = plugin_resp.strip("\r\n\t")
1286            resp_list.append(resp_data)
1287        elif isinstance(plugin_resp, bytes):
1288            resp_data = str(plugin_resp, "UTF-8").strip("\r\n\t")
1289            resp_list.append(resp_data)
1290        elif isinstance(plugin_resp, tuple):
1291            if len(global_plugin_list) == 1:
1292                resp_list.append(plugin_resp)
1293            else:
1294                resp_list = list(plugin_resp)
1295                resp_list = [x.strip("\r\n\t") for x in resp_list]
1296        elif isinstance(plugin_resp, list):
1297            if len(global_plugin_list) == 1:
1298                resp_list.append([x.strip("\r\n\t") for x in plugin_resp])
1299            else:
1300                resp_list = [x.strip("\r\n\t") for x in plugin_resp]
1301        elif isinstance(plugin_resp, int) or isinstance(plugin_resp, float):
1302            resp_list.append(plugin_resp)
1303
1304        # Iterate if there is a list of plugin return vars to update.
1305        for idx, item in enumerate(resp_list, start=0):
1306            # Exit loop, done required loop.
1307            if idx >= len(global_plugin_list):
1308                break
1309            # Find the index of the return func in the list and
1310            # update the global func return dictionary.
1311            try:
1312                dict_idx = global_plugin_list[idx]
1313                global_plugin_dict[dict_idx] = item
1314            except (IndexError, ValueError) as e:
1315                self.logger.warn("\tWARN: response_args_data: %s" % e)
1316                pass
1317
1318        # Done updating plugin dict irrespective of pass or failed,
1319        # clear all the list element for next plugin block execute.
1320        global_plugin_list.clear()
1321
1322    def yaml_args_string(self, plugin_args):
1323        r"""
1324        Pack the args into string.
1325
1326        plugin_args            arg list ['arg1','arg2,'argn']
1327        """
1328        args_str = ""
1329        for args in plugin_args:
1330            if args:
1331                if isinstance(args, (int, float)):
1332                    args_str += str(args)
1333                elif args in global_plugin_type_list:
1334                    args_str += str(global_plugin_dict[args])
1335                else:
1336                    args_str += '"' + str(args.strip("\r\n\t")) + '"'
1337            # Skip last list element.
1338            if args != plugin_args[-1]:
1339                args_str += ","
1340        return args_str
1341
1342    def yaml_args_populate(self, yaml_arg_list):
1343        r"""
1344        Decode env and plugin vars and populate.
1345
1346        Description of argument(s):
1347        yaml_arg_list         arg list read from YAML
1348
1349        Example:
1350          - plugin_args:
1351            - arg1
1352            - arg2
1353
1354                  yaml_arg_list:  [arg2, arg2]
1355        """
1356        # Get the env loaded keys as list ['hostname', 'username', 'password'].
1357
1358        if isinstance(yaml_arg_list, list):
1359            tmp_list = []
1360            for arg in yaml_arg_list:
1361                if isinstance(arg, (int, float)):
1362                    tmp_list.append(arg)
1363                    continue
1364                elif isinstance(arg, str):
1365                    arg_str = self.yaml_env_and_plugin_vars_populate(str(arg))
1366                    tmp_list.append(arg_str)
1367                else:
1368                    tmp_list.append(arg)
1369
1370            # return populated list.
1371            return tmp_list
1372
1373    def yaml_env_and_plugin_vars_populate(self, yaml_arg_str):
1374        r"""
1375        Update ${MY_VAR} and plugin vars.
1376
1377        Description of argument(s):
1378        yaml_arg_str         arg string read from YAML.
1379
1380        Example:
1381            - cat ${MY_VAR}
1382            - ls -AX my_plugin_var
1383        """
1384        # Parse the string for env vars ${env_vars}.
1385        try:
1386            # Example, list of matching
1387            # env vars ['username', 'password', 'hostname']
1388            # Extra escape \ for special symbols. '\$\{([^\}]+)\}' works good.
1389            var_name_regex = "\\$\\{([^\\}]+)\\}"
1390            env_var_names_list = re.findall(var_name_regex, yaml_arg_str)
1391            for var in env_var_names_list:
1392                env_var = os.environ[var]
1393                env_replace = "${" + var + "}"
1394                yaml_arg_str = yaml_arg_str.replace(env_replace, env_var)
1395        except Exception as e:
1396            self.logger.error("\tERROR:yaml_env_vars_populate: %s" % e)
1397            pass
1398
1399        # Parse the string for plugin vars.
1400        try:
1401            # Example, list of plugin vars ['my_username', 'my_data']
1402            plugin_var_name_list = global_plugin_dict.keys()
1403            for var in plugin_var_name_list:
1404                # skip env var list already populated above code block list.
1405                if var in env_var_names_list:
1406                    continue
1407                # If this plugin var exist but empty in dict, don't replace.
1408                # This is either a YAML plugin statement incorrectly used or
1409                # user added a plugin var which is not going to be populated.
1410                if yaml_arg_str in global_plugin_dict:
1411                    if isinstance(global_plugin_dict[var], (list, dict)):
1412                        # List data type or dict can't be replaced, use
1413                        # directly in eval function call.
1414                        global_plugin_type_list.append(var)
1415                    else:
1416                        yaml_arg_str = yaml_arg_str.replace(
1417                            str(var), str(global_plugin_dict[var])
1418                        )
1419                # Just a string like filename or command.
1420                else:
1421                    yaml_arg_str = yaml_arg_str.replace(
1422                        str(var), str(global_plugin_dict[var])
1423                    )
1424        except (IndexError, ValueError) as e:
1425            self.logger.error("\tERROR: yaml_plugin_vars_populate: %s" % e)
1426            pass
1427
1428        return yaml_arg_str
1429
1430    def plugin_error_check(self, plugin_dict):
1431        r"""
1432        Plugin error dict processing.
1433
1434        Description of argument(s):
1435        plugin_dict        Dictionary of plugin error.
1436        """
1437        if any("plugin_error" in d for d in plugin_dict):
1438            for d in plugin_dict:
1439                if "plugin_error" in d:
1440                    value = d["plugin_error"]
1441                    # Reference if the error is set or not by plugin.
1442                    return plugin_error_dict[value]
1443
1444    def key_index_list_dict(self, key, list_dict):
1445        r"""
1446        Iterate list of dictionary and return index if the key match is found.
1447
1448        Description of argument(s):
1449        key           Valid Key in a dict.
1450        list_dict     list of dictionary.
1451        """
1452        for i, d in enumerate(list_dict):
1453            if key in d.keys():
1454                return i
1455
1456    def plugin_expect_type(self, type, data):
1457        r"""
1458        Plugin expect directive type check.
1459        """
1460        if type == "int":
1461            return isinstance(data, int)
1462        elif type == "float":
1463            return isinstance(data, float)
1464        elif type == "str":
1465            return isinstance(data, str)
1466        elif type == "list":
1467            return isinstance(data, list)
1468        elif type == "dict":
1469            return isinstance(data, dict)
1470        elif type == "tuple":
1471            return isinstance(data, tuple)
1472        else:
1473            self.logger.info("\tInvalid data type requested: %s" % type)
1474            return "INVALID"
1475