xref: /openbmc/openbmc-test-automation/ffdc/ffdc_collector.py (revision e54be14763346d82a359e67dae7237973bd1acfa)
1#!/usr/bin/env python3
2
3r"""
4See class prolog below for details.
5"""
6
7import json
8import logging
9import os
10import platform
11import re
12import subprocess
13import sys
14import time
15from errno import EACCES, EPERM
16
17import yaml
18
19script_dir = os.path.dirname(os.path.abspath(__file__))
20sys.path.append(script_dir)
21# Walk path and append to sys.path
22for root, dirs, files in os.walk(script_dir):
23    for dir in dirs:
24        sys.path.append(os.path.join(root, dir))
25
26from ssh_utility import SSHRemoteclient  # NOQA
27from telnet_utility import TelnetRemoteclient  # NOQA
28
29r"""
30User define plugins python functions.
31
32It will imports files from directory plugins
33
34plugins
35├── file1.py
36└── file2.py
37
38Example how to define in YAML:
39 - plugin:
40   - plugin_name: plugin.foo_func.foo_func_yaml
41     - plugin_args:
42       - arg1
43       - arg2
44"""
45plugin_dir = __file__.split(__file__.split("/")[-1])[0] + "/plugins"
46sys.path.append(plugin_dir)
47try:
48    for module in os.listdir(plugin_dir):
49        if module == "__init__.py" or module[-3:] != ".py":
50            continue
51        plugin_module = "plugins." + module[:-3]
52        # To access the module plugin.<module name>.<function>
53        # Example: plugin.foo_func.foo_func_yaml()
54        try:
55            plugin = __import__(plugin_module, globals(), locals(), [], 0)
56        except Exception as e:
57            print("PLUGIN: Exception: %s" % e)
58            print("PLUGIN: Module import failed: %s" % module)
59            pass
60except FileNotFoundError as e:
61    print("PLUGIN: %s" % e)
62    pass
63
64r"""
65This is for plugin functions returning data or responses to the caller
66in YAML plugin setup.
67
68Example:
69
70    - plugin:
71      - plugin_name: version = plugin.ssh_execution.ssh_execute_cmd
72      - plugin_args:
73        - ${hostname}
74        - ${username}
75        - ${password}
76        - "cat /etc/os-release | grep VERSION_ID | awk -F'=' '{print $2}'"
77     - plugin:
78        - plugin_name: plugin.print_vars.print_vars
79        - plugin_args:
80          - version
81
82where first plugin "version" var is used by another plugin in the YAML
83block or plugin
84
85"""
86global global_log_store_path
87global global_plugin_dict
88global global_plugin_list
89
90# Hold the plugin return values in dict and plugin return vars in list.
91# Dict is to reference and update vars processing in parser where as
92# list is for current vars from the plugin block which needs processing.
93global_plugin_dict = {}
94global_plugin_list = []
95
96# Hold the plugin return named declared if function returned values are
97# list,dict.
98# Refer this name list to look up the plugin dict for eval() args function
99# Example ['version']
100global_plugin_type_list = []
101
102# Path where logs are to be stored or written.
103global_log_store_path = ""
104
105# Plugin error state defaults.
106plugin_error_dict = {
107    "exit_on_error": False,
108    "continue_on_error": False,
109}
110
111
112class ffdc_collector:
113    r"""
114    Execute commands from configuration file to collect log files.
115    Fetch and store generated files at the specified location.
116
117    """
118
119    def __init__(
120        self,
121        hostname,
122        username,
123        password,
124        port_ssh,
125        port_https,
126        port_ipmi,
127        ffdc_config,
128        location,
129        remote_type,
130        remote_protocol,
131        env_vars,
132        econfig,
133        log_level,
134    ):
135        r"""
136        Description of argument(s):
137
138        hostname            Name/ip of the targeted (remote) system
139        username            User on the targeted system with access to
140                            FFDC files
141        password            Password for user on targeted system
142        port_ssh            SSH port value. By default 22
143        port_https          HTTPS port value. By default 443
144        port_ipmi           IPMI port value. By default 623
145        ffdc_config         Configuration file listing commands and files
146                            for FFDC
147        location            Where to store collected FFDC
148        remote_type         OS type of the remote host
149        remote_protocol     Protocol to use to collect data
150        env_vars            User define CLI env vars '{"key : "value"}'
151        econfig             User define env vars YAML file
152
153        """
154
155        self.hostname = hostname
156        self.username = username
157        self.password = password
158        self.port_ssh = str(port_ssh)
159        self.port_https = str(port_https)
160        self.port_ipmi = str(port_ipmi)
161        self.ffdc_config = ffdc_config
162        self.location = location + "/" + remote_type.upper()
163        self.ssh_remoteclient = None
164        self.telnet_remoteclient = None
165        self.ffdc_dir_path = ""
166        self.ffdc_prefix = ""
167        self.target_type = remote_type.upper()
168        self.remote_protocol = remote_protocol.upper()
169        self.env_vars = env_vars
170        self.econfig = econfig
171        self.start_time = 0
172        self.elapsed_time = ""
173        self.logger = None
174
175        # Set prefix values for scp files and directory.
176        # Since the time stamp is at second granularity, these values are set
177        # here to be sure that all files for this run will have same timestamps
178        # and they will be saved in the same directory.
179        # self.location == local system for now
180        self.set_ffdc_default_store_path()
181
182        # Logger for this run.  Need to be after set_ffdc_default_store_path()
183        self.script_logging(getattr(logging, log_level.upper()))
184
185        # Verify top level directory exists for storage
186        self.validate_local_store(self.location)
187
188        if self.verify_script_env():
189            # Load default or user define YAML configuration file.
190            with open(self.ffdc_config, "r") as file:
191                try:
192                    self.ffdc_actions = yaml.load(file, Loader=yaml.SafeLoader)
193                except yaml.YAMLError as e:
194                    self.logger.error(e)
195                    sys.exit(-1)
196
197            if self.target_type not in self.ffdc_actions.keys():
198                self.logger.error(
199                    "\n\tERROR: %s is not listed in %s.\n\n"
200                    % (self.target_type, self.ffdc_config)
201                )
202                sys.exit(-1)
203        else:
204            sys.exit(-1)
205
206        # Load ENV vars from user.
207        self.logger.info("\n\tENV: User define input YAML variables")
208        self.env_dict = {}
209        self.load_env()
210
211    def verify_script_env(self):
212        # Import to log version
213        import click
214        import paramiko
215
216        run_env_ok = True
217
218        try:
219            redfishtool_version = (
220                self.run_tool_cmd("redfishtool -V").split(" ")[2].strip("\n")
221            )
222        except Exception as e:
223            self.logger.error("\tEXCEPTION redfishtool: %s", e)
224            redfishtool_version = "Not Installed (optional)"
225
226        try:
227            ipmitool_version = self.run_tool_cmd("ipmitool -V").split(" ")[2]
228        except Exception as e:
229            self.logger.error("\tEXCEPTION ipmitool: %s", e)
230            ipmitool_version = "Not Installed (optional)"
231
232        self.logger.info("\n\t---- Script host environment ----")
233        self.logger.info(
234            "\t{:<10}  {:<10}".format("Script hostname", os.uname()[1])
235        )
236        self.logger.info(
237            "\t{:<10}  {:<10}".format("Script host os", platform.platform())
238        )
239        self.logger.info(
240            "\t{:<10}  {:>10}".format("Python", platform.python_version())
241        )
242        self.logger.info("\t{:<10}  {:>10}".format("PyYAML", yaml.__version__))
243        self.logger.info("\t{:<10}  {:>10}".format("click", click.__version__))
244        self.logger.info(
245            "\t{:<10}  {:>10}".format("paramiko", paramiko.__version__)
246        )
247        self.logger.info(
248            "\t{:<10}  {:>9}".format("redfishtool", redfishtool_version)
249        )
250        self.logger.info(
251            "\t{:<10}  {:>12}".format("ipmitool", ipmitool_version)
252        )
253
254        if eval(yaml.__version__.replace(".", ",")) < (5, 3, 0):
255            self.logger.error(
256                "\n\tERROR: Python or python packages do not meet minimum"
257                " version requirement."
258            )
259            self.logger.error(
260                "\tERROR: PyYAML version 5.3.0 or higher is needed.\n"
261            )
262            run_env_ok = False
263
264        self.logger.info("\t---- End script host environment ----")
265        return run_env_ok
266
267    def script_logging(self, log_level_attr):
268        r"""
269        Create logger
270
271        """
272        self.logger = logging.getLogger()
273        self.logger.setLevel(log_level_attr)
274        log_file_handler = logging.FileHandler(
275            self.ffdc_dir_path + "collector.log"
276        )
277
278        stdout_handler = logging.StreamHandler(sys.stdout)
279        self.logger.addHandler(log_file_handler)
280        self.logger.addHandler(stdout_handler)
281
282        # Turn off paramiko INFO logging
283        logging.getLogger("paramiko").setLevel(logging.WARNING)
284
285    def target_is_pingable(self):
286        r"""
287        Check if target system is ping-able.
288
289        """
290        response = os.system("ping -c 1 %s  2>&1 >/dev/null" % self.hostname)
291        if response == 0:
292            self.logger.info(
293                "\n\t[Check] %s is ping-able.\t\t [OK]" % self.hostname
294            )
295            return True
296        else:
297            self.logger.error(
298                "\n\tERROR: %s is not ping-able. FFDC collection aborted.\n"
299                % self.hostname
300            )
301            sys.exit(-1)
302
303    def collect_ffdc(self):
304        r"""
305        Initiate FFDC Collection depending on requested protocol.
306
307        """
308
309        self.logger.info(
310            "\n\t---- Start communicating with %s ----" % self.hostname
311        )
312        self.start_time = time.time()
313
314        # Find the list of target and protocol supported.
315        check_protocol_list = []
316        config_dict = self.ffdc_actions
317
318        for target_type in config_dict.keys():
319            if self.target_type != target_type:
320                continue
321
322            for k, v in config_dict[target_type].items():
323                if (
324                    config_dict[target_type][k]["PROTOCOL"][0]
325                    not in check_protocol_list
326                ):
327                    check_protocol_list.append(
328                        config_dict[target_type][k]["PROTOCOL"][0]
329                    )
330
331        self.logger.info(
332            "\n\t %s protocol type: %s"
333            % (self.target_type, check_protocol_list)
334        )
335
336        verified_working_protocol = self.verify_protocol(check_protocol_list)
337
338        if verified_working_protocol:
339            self.logger.info(
340                "\n\t---- Completed protocol pre-requisite check ----\n"
341            )
342
343        # Verify top level directory exists for storage
344        self.validate_local_store(self.location)
345
346        if (self.remote_protocol not in verified_working_protocol) and (
347            self.remote_protocol != "ALL"
348        ):
349            self.logger.info(
350                "\n\tWorking protocol list: %s" % verified_working_protocol
351            )
352            self.logger.error(
353                "\tERROR: Requested protocol %s is not in working protocol"
354                " list.\n" % self.remote_protocol
355            )
356            sys.exit(-1)
357        else:
358            self.generate_ffdc(verified_working_protocol)
359
360    def ssh_to_target_system(self):
361        r"""
362        Open a ssh connection to targeted system.
363
364        """
365
366        self.ssh_remoteclient = SSHRemoteclient(
367            self.hostname, self.username, self.password, self.port_ssh
368        )
369
370        if self.ssh_remoteclient.ssh_remoteclient_login():
371            self.logger.info(
372                "\n\t[Check] %s SSH connection established.\t [OK]"
373                % self.hostname
374            )
375
376            # Check scp connection.
377            # If scp connection fails,
378            # continue with FFDC generation but skip scp files to local host.
379            self.ssh_remoteclient.scp_connection()
380            return True
381        else:
382            self.logger.info(
383                "\n\t[Check] %s SSH connection.\t [NOT AVAILABLE]"
384                % self.hostname
385            )
386            return False
387
388    def telnet_to_target_system(self):
389        r"""
390        Open a telnet connection to targeted system.
391        """
392        self.telnet_remoteclient = TelnetRemoteclient(
393            self.hostname, self.username, self.password
394        )
395        if self.telnet_remoteclient.tn_remoteclient_login():
396            self.logger.info(
397                "\n\t[Check] %s Telnet connection established.\t [OK]"
398                % self.hostname
399            )
400            return True
401        else:
402            self.logger.info(
403                "\n\t[Check] %s Telnet connection.\t [NOT AVAILABLE]"
404                % self.hostname
405            )
406            return False
407
408    def generate_ffdc(self, working_protocol_list):
409        r"""
410        Determine actions based on remote host type
411
412        Description of argument(s):
413        working_protocol_list    List of confirmed working protocols to
414                                 connect to remote host.
415        """
416
417        self.logger.info(
418            "\n\t---- Executing commands on " + self.hostname + " ----"
419        )
420        self.logger.info(
421            "\n\tWorking protocol list: %s" % working_protocol_list
422        )
423
424        config_dict = self.ffdc_actions
425        for target_type in config_dict.keys():
426            if self.target_type != target_type:
427                continue
428
429            self.logger.info("\n\tFFDC Path: %s " % self.ffdc_dir_path)
430            global_plugin_dict["global_log_store_path"] = self.ffdc_dir_path
431            self.logger.info("\tSystem Type: %s" % target_type)
432            for k, v in config_dict[target_type].items():
433                if (
434                    self.remote_protocol not in working_protocol_list
435                    and self.remote_protocol != "ALL"
436                ):
437                    continue
438
439                protocol = config_dict[target_type][k]["PROTOCOL"][0]
440
441                if protocol not in working_protocol_list:
442                    continue
443
444                if protocol in working_protocol_list:
445                    if protocol == "SSH" or protocol == "SCP":
446                        self.protocol_ssh(protocol, target_type, k)
447                    elif protocol == "TELNET":
448                        self.protocol_telnet(target_type, k)
449                    elif (
450                        protocol == "REDFISH"
451                        or protocol == "IPMI"
452                        or protocol == "SHELL"
453                    ):
454                        self.protocol_execute(protocol, target_type, k)
455                else:
456                    self.logger.error(
457                        "\n\tERROR: %s is not available for %s."
458                        % (protocol, self.hostname)
459                    )
460
461        # Close network connection after collecting all files
462        self.elapsed_time = time.strftime(
463            "%H:%M:%S", time.gmtime(time.time() - self.start_time)
464        )
465        self.logger.info("\n\tTotal time taken: %s" % self.elapsed_time)
466        if self.ssh_remoteclient:
467            self.ssh_remoteclient.ssh_remoteclient_disconnect()
468        if self.telnet_remoteclient:
469            self.telnet_remoteclient.tn_remoteclient_disconnect()
470
471    def protocol_ssh(self, protocol, target_type, sub_type):
472        r"""
473        Perform actions using SSH and SCP protocols.
474
475        Description of argument(s):
476        protocol            Protocol to execute.
477        target_type         OS Type of remote host.
478        sub_type            Group type of commands.
479        """
480
481        if protocol == "SCP":
482            self.group_copy(self.ffdc_actions[target_type][sub_type])
483        else:
484            self.collect_and_copy_ffdc(
485                self.ffdc_actions[target_type][sub_type]
486            )
487
488    def protocol_telnet(self, target_type, sub_type):
489        r"""
490        Perform actions using telnet protocol.
491        Description of argument(s):
492        target_type          OS Type of remote host.
493        """
494        self.logger.info(
495            "\n\t[Run] Executing commands on %s using %s"
496            % (self.hostname, "TELNET")
497        )
498        telnet_files_saved = []
499        progress_counter = 0
500        list_of_commands = self.ffdc_actions[target_type][sub_type]["COMMANDS"]
501        for index, each_cmd in enumerate(list_of_commands, start=0):
502            command_txt, command_timeout = self.unpack_command(each_cmd)
503            result = self.telnet_remoteclient.execute_command(
504                command_txt, command_timeout
505            )
506            if result:
507                try:
508                    targ_file = self.ffdc_actions[target_type][sub_type][
509                        "FILES"
510                    ][index]
511                except IndexError:
512                    targ_file = command_txt
513                    self.logger.warning(
514                        "\n\t[WARN] Missing filename to store data from"
515                        " telnet %s." % each_cmd
516                    )
517                    self.logger.warning(
518                        "\t[WARN] Data will be stored in %s." % targ_file
519                    )
520                targ_file_with_path = (
521                    self.ffdc_dir_path + self.ffdc_prefix + targ_file
522                )
523                # Creates a new file
524                with open(targ_file_with_path, "w") as fp:
525                    fp.write(result)
526                    fp.close
527                    telnet_files_saved.append(targ_file)
528            progress_counter += 1
529            self.print_progress(progress_counter)
530        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
531        for file in telnet_files_saved:
532            self.logger.info("\n\t\tSuccessfully save file " + file + ".")
533
534    def protocol_execute(self, protocol, target_type, sub_type):
535        r"""
536        Perform actions for a given protocol.
537
538        Description of argument(s):
539        protocol            Protocol to execute.
540        target_type         OS Type of remote host.
541        sub_type            Group type of commands.
542        """
543
544        self.logger.info(
545            "\n\t[Run] Executing commands to %s using %s"
546            % (self.hostname, protocol)
547        )
548        executed_files_saved = []
549        progress_counter = 0
550        list_of_cmd = self.get_command_list(
551            self.ffdc_actions[target_type][sub_type]
552        )
553        for index, each_cmd in enumerate(list_of_cmd, start=0):
554            plugin_call = False
555            if isinstance(each_cmd, dict):
556                if "plugin" in each_cmd:
557                    # If the error is set and plugin explicitly
558                    # requested to skip execution on error..
559                    if plugin_error_dict[
560                        "exit_on_error"
561                    ] and self.plugin_error_check(each_cmd["plugin"]):
562                        self.logger.info(
563                            "\n\t[PLUGIN-ERROR] exit_on_error: %s"
564                            % plugin_error_dict["exit_on_error"]
565                        )
566                        self.logger.info(
567                            "\t[PLUGIN-SKIP] %s" % each_cmd["plugin"][0]
568                        )
569                        continue
570                    plugin_call = True
571                    # call the plugin
572                    self.logger.info("\n\t[PLUGIN-START]")
573                    result = self.execute_plugin_block(each_cmd["plugin"])
574                    self.logger.info("\t[PLUGIN-END]\n")
575            else:
576                each_cmd = self.yaml_env_and_plugin_vars_populate(each_cmd)
577
578            if not plugin_call:
579                result = self.run_tool_cmd(each_cmd)
580            if result:
581                try:
582                    file_name = self.get_file_list(
583                        self.ffdc_actions[target_type][sub_type]
584                    )[index]
585                    # If file is specified as None.
586                    if file_name == "None":
587                        continue
588                    targ_file = self.yaml_env_and_plugin_vars_populate(
589                        file_name
590                    )
591                except IndexError:
592                    targ_file = each_cmd.split("/")[-1]
593                    self.logger.warning(
594                        "\n\t[WARN] Missing filename to store data from %s."
595                        % each_cmd
596                    )
597                    self.logger.warning(
598                        "\t[WARN] Data will be stored in %s." % targ_file
599                    )
600
601                targ_file_with_path = (
602                    self.ffdc_dir_path + self.ffdc_prefix + targ_file
603                )
604
605                # Creates a new file
606                with open(targ_file_with_path, "w") as fp:
607                    if isinstance(result, dict):
608                        fp.write(json.dumps(result))
609                    else:
610                        fp.write(result)
611                    fp.close
612                    executed_files_saved.append(targ_file)
613
614            progress_counter += 1
615            self.print_progress(progress_counter)
616
617        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
618
619        for file in executed_files_saved:
620            self.logger.info("\n\t\tSuccessfully save file " + file + ".")
621
622    def collect_and_copy_ffdc(
623        self, ffdc_actions_for_target_type, form_filename=False
624    ):
625        r"""
626        Send commands in ffdc_config file to targeted system.
627
628        Description of argument(s):
629        ffdc_actions_for_target_type     Commands and files for the selected
630                                         remote host type.
631        form_filename                    If true, pre-pend self.target_type to
632                                         filename
633        """
634
635        # Executing commands, if any
636        self.ssh_execute_ffdc_commands(
637            ffdc_actions_for_target_type, form_filename
638        )
639
640        # Copying files
641        if self.ssh_remoteclient.scpclient:
642            self.logger.info(
643                "\n\n\tCopying FFDC files from remote system %s.\n"
644                % self.hostname
645            )
646
647            # Retrieving files from target system
648            list_of_files = self.get_file_list(ffdc_actions_for_target_type)
649            self.scp_ffdc(
650                self.ffdc_dir_path,
651                self.ffdc_prefix,
652                form_filename,
653                list_of_files,
654            )
655        else:
656            self.logger.info(
657                "\n\n\tSkip copying FFDC files from remote system %s.\n"
658                % self.hostname
659            )
660
661    def get_command_list(self, ffdc_actions_for_target_type):
662        r"""
663        Fetch list of commands from configuration file
664
665        Description of argument(s):
666        ffdc_actions_for_target_type    Commands and files for the selected
667                                        remote host type.
668        """
669        try:
670            list_of_commands = ffdc_actions_for_target_type["COMMANDS"]
671        except KeyError:
672            list_of_commands = []
673        return list_of_commands
674
675    def get_file_list(self, ffdc_actions_for_target_type):
676        r"""
677        Fetch list of commands from configuration file
678
679        Description of argument(s):
680        ffdc_actions_for_target_type    Commands and files for the selected
681                                        remote host type.
682        """
683        try:
684            list_of_files = ffdc_actions_for_target_type["FILES"]
685        except KeyError:
686            list_of_files = []
687        return list_of_files
688
689    def unpack_command(self, command):
690        r"""
691        Unpack command from config file
692
693        Description of argument(s):
694        command    Command from config file.
695        """
696        if isinstance(command, dict):
697            command_txt = next(iter(command))
698            command_timeout = next(iter(command.values()))
699        elif isinstance(command, str):
700            command_txt = command
701            # Default command timeout 60 seconds
702            command_timeout = 60
703
704        return command_txt, command_timeout
705
706    def ssh_execute_ffdc_commands(
707        self, ffdc_actions_for_target_type, form_filename=False
708    ):
709        r"""
710        Send commands in ffdc_config file to targeted system.
711
712        Description of argument(s):
713        ffdc_actions_for_target_type    Commands and files for the selected
714                                        remote host type.
715        form_filename                   If true, pre-pend self.target_type to
716                                        filename
717        """
718        self.logger.info(
719            "\n\t[Run] Executing commands on %s using %s"
720            % (self.hostname, ffdc_actions_for_target_type["PROTOCOL"][0])
721        )
722
723        list_of_commands = self.get_command_list(ffdc_actions_for_target_type)
724        # If command list is empty, returns
725        if not list_of_commands:
726            return
727
728        progress_counter = 0
729        for command in list_of_commands:
730            command_txt, command_timeout = self.unpack_command(command)
731
732            if form_filename:
733                command_txt = str(command_txt % self.target_type)
734
735            (
736                cmd_exit_code,
737                err,
738                response,
739            ) = self.ssh_remoteclient.execute_command(
740                command_txt, command_timeout
741            )
742
743            if cmd_exit_code:
744                self.logger.warning(
745                    "\n\t\t[WARN] %s exits with code %s."
746                    % (command_txt, str(cmd_exit_code))
747                )
748                self.logger.warning("\t\t[WARN] %s " % err)
749
750            progress_counter += 1
751            self.print_progress(progress_counter)
752
753        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
754
755    def group_copy(self, ffdc_actions_for_target_type):
756        r"""
757        scp group of files (wild card) from remote host.
758
759        Description of argument(s):
760        fdc_actions_for_target_type    Commands and files for the selected
761                                       remote host type.
762        """
763
764        if self.ssh_remoteclient.scpclient:
765            self.logger.info(
766                "\n\tCopying files from remote system %s via SCP.\n"
767                % self.hostname
768            )
769
770            list_of_commands = self.get_command_list(
771                ffdc_actions_for_target_type
772            )
773            # If command list is empty, returns
774            if not list_of_commands:
775                return
776
777            for command in list_of_commands:
778                try:
779                    command = self.yaml_env_and_plugin_vars_populate(command)
780                except IndexError:
781                    self.logger.error("\t\tInvalid command %s" % command)
782                    continue
783
784                (
785                    cmd_exit_code,
786                    err,
787                    response,
788                ) = self.ssh_remoteclient.execute_command(command)
789
790                # If file does not exist, code take no action.
791                # cmd_exit_code is ignored for this scenario.
792                if response:
793                    scp_result = self.ssh_remoteclient.scp_file_from_remote(
794                        response.split("\n"), self.ffdc_dir_path
795                    )
796                    if scp_result:
797                        self.logger.info(
798                            "\t\tSuccessfully copied from "
799                            + self.hostname
800                            + ":"
801                            + command
802                        )
803                else:
804                    self.logger.info("\t\t%s has no result" % command)
805
806        else:
807            self.logger.info(
808                "\n\n\tSkip copying files from remote system %s.\n"
809                % self.hostname
810            )
811
812    def scp_ffdc(
813        self,
814        targ_dir_path,
815        targ_file_prefix,
816        form_filename,
817        file_list=None,
818        quiet=None,
819    ):
820        r"""
821        SCP all files in file_dict to the indicated directory on the local
822        system.
823
824        Description of argument(s):
825        targ_dir_path                   The path of the directory to receive
826                                        the files.
827        targ_file_prefix                Prefix which will be prepended to each
828                                        target file's name.
829        file_dict                       A dictionary of files to scp from
830                                        targeted system to this system
831
832        """
833
834        progress_counter = 0
835        for filename in file_list:
836            if form_filename:
837                filename = str(filename % self.target_type)
838            source_file_path = filename
839            targ_file_path = (
840                targ_dir_path + targ_file_prefix + filename.split("/")[-1]
841            )
842
843            # If source file name contains wild card, copy filename as is.
844            if "*" in source_file_path:
845                scp_result = self.ssh_remoteclient.scp_file_from_remote(
846                    source_file_path, self.ffdc_dir_path
847                )
848            else:
849                scp_result = self.ssh_remoteclient.scp_file_from_remote(
850                    source_file_path, targ_file_path
851                )
852
853            if not quiet:
854                if scp_result:
855                    self.logger.info(
856                        "\t\tSuccessfully copied from "
857                        + self.hostname
858                        + ":"
859                        + source_file_path
860                        + ".\n"
861                    )
862                else:
863                    self.logger.info(
864                        "\t\tFail to copy from "
865                        + self.hostname
866                        + ":"
867                        + source_file_path
868                        + ".\n"
869                    )
870            else:
871                progress_counter += 1
872                self.print_progress(progress_counter)
873
874    def set_ffdc_default_store_path(self):
875        r"""
876        Set a default value for self.ffdc_dir_path and self.ffdc_prefix.
877        Collected ffdc file will be stored in dir
878        /self.location/hostname_timestr/.
879        Individual ffdc file will have timestr_filename.
880
881        Description of class variables:
882        self.ffdc_dir_path  The dir path where collected ffdc data files
883                            should be put.
884
885        self.ffdc_prefix    The prefix to be given to each ffdc file name.
886
887        """
888
889        timestr = time.strftime("%Y%m%d-%H%M%S")
890        self.ffdc_dir_path = (
891            self.location + "/" + self.hostname + "_" + timestr + "/"
892        )
893        self.ffdc_prefix = timestr + "_"
894        self.validate_local_store(self.ffdc_dir_path)
895
896    # Need to verify local store path exists prior to instantiate this class.
897    # This class method is used to share the same code between CLI input parm
898    # and Robot Framework "${EXECDIR}/logs" before referencing this class.
899    @classmethod
900    def validate_local_store(cls, dir_path):
901        r"""
902        Ensure path exists to store FFDC files locally.
903
904        Description of variable:
905        dir_path  The dir path where collected ffdc data files will be stored.
906
907        """
908
909        if not os.path.exists(dir_path):
910            try:
911                os.makedirs(dir_path, 0o755)
912            except (IOError, OSError) as e:
913                # PermissionError
914                if e.errno == EPERM or e.errno == EACCES:
915                    print(
916                        "\tERROR: os.makedirs %s failed with"
917                        " PermissionError.\n" % dir_path
918                    )
919                else:
920                    print(
921                        "\tERROR: os.makedirs %s failed with %s.\n"
922                        % (dir_path, e.strerror)
923                    )
924                sys.exit(-1)
925
926    def print_progress(self, progress):
927        r"""
928        Print activity progress +
929
930        Description of variable:
931        progress  Progress counter.
932
933        """
934
935        sys.stdout.write("\r\t" + "+" * progress)
936        sys.stdout.flush()
937        time.sleep(0.1)
938
939    def verify_redfish(self):
940        r"""
941        Verify remote host has redfish service active
942
943        """
944        redfish_parm = (
945            "redfishtool -r "
946            + self.hostname
947            + ":"
948            + self.port_https
949            + " -S Always raw GET /redfish/v1/"
950        )
951        return self.run_tool_cmd(redfish_parm, True)
952
953    def verify_ipmi(self):
954        r"""
955        Verify remote host has IPMI LAN service active
956
957        """
958        if self.target_type == "OPENBMC":
959            ipmi_parm = (
960                "ipmitool -I lanplus -C 17  -U "
961                + self.username
962                + " -P "
963                + self.password
964                + " -H "
965                + self.hostname
966                + " -p "
967                + str(self.port_ipmi)
968                + " power status"
969            )
970        else:
971            ipmi_parm = (
972                "ipmitool -I lanplus  -P "
973                + self.password
974                + " -H "
975                + self.hostname
976                + " -p "
977                + str(self.port_ipmi)
978                + " power status"
979            )
980
981        return self.run_tool_cmd(ipmi_parm, True)
982
983    def run_tool_cmd(self, parms_string, quiet=False):
984        r"""
985        Run CLI standard tool or scripts.
986
987        Description of variable:
988        parms_string         tool command options.
989        quiet                do not print tool error message if True
990        """
991
992        result = subprocess.run(
993            [parms_string],
994            stdout=subprocess.PIPE,
995            stderr=subprocess.PIPE,
996            shell=True,
997            universal_newlines=True,
998        )
999
1000        if result.stderr and not quiet:
1001            self.logger.error("\n\t\tERROR with %s " % parms_string)
1002            self.logger.error("\t\t" + result.stderr)
1003
1004        return result.stdout
1005
1006    def verify_protocol(self, protocol_list):
1007        r"""
1008        Perform protocol working check.
1009
1010        Description of argument(s):
1011        protocol_list        List of protocol.
1012        """
1013
1014        tmp_list = []
1015        if self.target_is_pingable():
1016            tmp_list.append("SHELL")
1017
1018        for protocol in protocol_list:
1019            if self.remote_protocol != "ALL":
1020                if self.remote_protocol != protocol:
1021                    continue
1022
1023            # Only check SSH/SCP once for both protocols
1024            if (
1025                protocol == "SSH"
1026                or protocol == "SCP"
1027                and protocol not in tmp_list
1028            ):
1029                if self.ssh_to_target_system():
1030                    # Add only what user asked.
1031                    if self.remote_protocol != "ALL":
1032                        tmp_list.append(self.remote_protocol)
1033                    else:
1034                        tmp_list.append("SSH")
1035                        tmp_list.append("SCP")
1036
1037            if protocol == "TELNET":
1038                if self.telnet_to_target_system():
1039                    tmp_list.append(protocol)
1040
1041            if protocol == "REDFISH":
1042                if self.verify_redfish():
1043                    tmp_list.append(protocol)
1044                    self.logger.info(
1045                        "\n\t[Check] %s Redfish Service.\t\t [OK]"
1046                        % self.hostname
1047                    )
1048                else:
1049                    self.logger.info(
1050                        "\n\t[Check] %s Redfish Service.\t\t [NOT AVAILABLE]"
1051                        % self.hostname
1052                    )
1053
1054            if protocol == "IPMI":
1055                if self.verify_ipmi():
1056                    tmp_list.append(protocol)
1057                    self.logger.info(
1058                        "\n\t[Check] %s IPMI LAN Service.\t\t [OK]"
1059                        % self.hostname
1060                    )
1061                else:
1062                    self.logger.info(
1063                        "\n\t[Check] %s IPMI LAN Service.\t\t [NOT AVAILABLE]"
1064                        % self.hostname
1065                    )
1066
1067        return tmp_list
1068
1069    def load_env(self):
1070        r"""
1071        Perform protocol working check.
1072
1073        """
1074        # This is for the env vars a user can use in YAML to load
1075        # it at runtime.
1076        # Example YAML:
1077        # -COMMANDS:
1078        #    - my_command ${hostname}  ${username}   ${password}
1079        os.environ["hostname"] = self.hostname
1080        os.environ["username"] = self.username
1081        os.environ["password"] = self.password
1082        os.environ["port_ssh"] = self.port_ssh
1083        os.environ["port_https"] = self.port_https
1084        os.environ["port_ipmi"] = self.port_ipmi
1085
1086        # Append default Env.
1087        self.env_dict["hostname"] = self.hostname
1088        self.env_dict["username"] = self.username
1089        self.env_dict["password"] = self.password
1090        self.env_dict["port_ssh"] = self.port_ssh
1091        self.env_dict["port_https"] = self.port_https
1092        self.env_dict["port_ipmi"] = self.port_ipmi
1093
1094        try:
1095            tmp_env_dict = {}
1096            if self.env_vars:
1097                tmp_env_dict = json.loads(self.env_vars)
1098                # Export ENV vars default.
1099                for key, value in tmp_env_dict.items():
1100                    os.environ[key] = value
1101                    self.env_dict[key] = str(value)
1102
1103            if self.econfig:
1104                with open(self.econfig, "r") as file:
1105                    try:
1106                        tmp_env_dict = yaml.load(file, Loader=yaml.SafeLoader)
1107                    except yaml.YAMLError as e:
1108                        self.logger.error(e)
1109                        sys.exit(-1)
1110                # Export ENV vars.
1111                for key, value in tmp_env_dict["env_params"].items():
1112                    os.environ[key] = str(value)
1113                    self.env_dict[key] = str(value)
1114        except json.decoder.JSONDecodeError as e:
1115            self.logger.error("\n\tERROR: %s " % e)
1116            sys.exit(-1)
1117
1118        # This to mask the password from displaying on the console.
1119        mask_dict = self.env_dict.copy()
1120        for k, v in mask_dict.items():
1121            if k.lower().find("password") != -1:
1122                hidden_text = []
1123                hidden_text.append(v)
1124                password_regex = (
1125                    "(" + "|".join([re.escape(x) for x in hidden_text]) + ")"
1126                )
1127                mask_dict[k] = re.sub(password_regex, "********", v)
1128
1129        self.logger.info(json.dumps(mask_dict, indent=8, sort_keys=False))
1130
1131    def execute_python_eval(self, eval_string):
1132        r"""
1133        Execute qualified python function string using eval.
1134
1135        Description of argument(s):
1136        eval_string        Execute the python object.
1137
1138        Example:
1139                eval(plugin.foo_func.foo_func(10))
1140        """
1141        try:
1142            self.logger.info("\tExecuting plugin func()")
1143            self.logger.debug("\tCall func: %s" % eval_string)
1144            result = eval(eval_string)
1145            self.logger.info("\treturn: %s" % str(result))
1146        except (
1147            ValueError,
1148            SyntaxError,
1149            NameError,
1150            AttributeError,
1151            TypeError,
1152        ) as e:
1153            self.logger.error("\tERROR: execute_python_eval: %s" % e)
1154            # Set the plugin error state.
1155            plugin_error_dict["exit_on_error"] = True
1156            self.logger.info("\treturn: PLUGIN_EVAL_ERROR")
1157            return "PLUGIN_EVAL_ERROR"
1158
1159        return result
1160
1161    def execute_plugin_block(self, plugin_cmd_list):
1162        r"""
1163        Pack the plugin command to qualifed python string object.
1164
1165        Description of argument(s):
1166        plugin_list_dict      Plugin block read from YAML
1167                              [{'plugin_name': 'plugin.foo_func.my_func'},
1168                               {'plugin_args': [10]}]
1169
1170        Example:
1171            - plugin:
1172              - plugin_name: plugin.foo_func.my_func
1173              - plugin_args:
1174                - arg1
1175                - arg2
1176
1177            - plugin:
1178              - plugin_name: result = plugin.foo_func.my_func
1179              - plugin_args:
1180                - arg1
1181                - arg2
1182
1183            - plugin:
1184              - plugin_name: result1,result2 = plugin.foo_func.my_func
1185              - plugin_args:
1186                - arg1
1187                - arg2
1188        """
1189        try:
1190            idx = self.key_index_list_dict("plugin_name", plugin_cmd_list)
1191            plugin_name = plugin_cmd_list[idx]["plugin_name"]
1192            # Equal separator means plugin function returns result.
1193            if " = " in plugin_name:
1194                # Ex. ['result', 'plugin.foo_func.my_func']
1195                plugin_name_args = plugin_name.split(" = ")
1196                # plugin func return data.
1197                for arg in plugin_name_args:
1198                    if arg == plugin_name_args[-1]:
1199                        plugin_name = arg
1200                    else:
1201                        plugin_resp = arg.split(",")
1202                        # ['result1','result2']
1203                        for x in plugin_resp:
1204                            global_plugin_list.append(x)
1205                            global_plugin_dict[x] = ""
1206
1207            # Walk the plugin args ['arg1,'arg2']
1208            # If the YAML plugin statement 'plugin_args' is not declared.
1209            if any("plugin_args" in d for d in plugin_cmd_list):
1210                idx = self.key_index_list_dict("plugin_args", plugin_cmd_list)
1211                plugin_args = plugin_cmd_list[idx]["plugin_args"]
1212                if plugin_args:
1213                    plugin_args = self.yaml_args_populate(plugin_args)
1214                else:
1215                    plugin_args = []
1216            else:
1217                plugin_args = self.yaml_args_populate([])
1218
1219            # Pack the args arg1, arg2, .... argn into
1220            # "arg1","arg2","argn"  string as params for function.
1221            parm_args_str = self.yaml_args_string(plugin_args)
1222            if parm_args_str:
1223                plugin_func = plugin_name + "(" + parm_args_str + ")"
1224            else:
1225                plugin_func = plugin_name + "()"
1226
1227            # Execute plugin function.
1228            if global_plugin_dict:
1229                resp = self.execute_python_eval(plugin_func)
1230                # Update plugin vars dict if there is any.
1231                if resp != "PLUGIN_EVAL_ERROR":
1232                    self.response_args_data(resp)
1233            else:
1234                resp = self.execute_python_eval(plugin_func)
1235        except Exception as e:
1236            # Set the plugin error state.
1237            plugin_error_dict["exit_on_error"] = True
1238            self.logger.error("\tERROR: execute_plugin_block: %s" % e)
1239            pass
1240
1241        # There is a real error executing the plugin function.
1242        if resp == "PLUGIN_EVAL_ERROR":
1243            return resp
1244
1245        # Check if plugin_expects_return (int, string, list,dict etc)
1246        if any("plugin_expects_return" in d for d in plugin_cmd_list):
1247            idx = self.key_index_list_dict(
1248                "plugin_expects_return", plugin_cmd_list
1249            )
1250            plugin_expects = plugin_cmd_list[idx]["plugin_expects_return"]
1251            if plugin_expects:
1252                if resp:
1253                    if (
1254                        self.plugin_expect_type(plugin_expects, resp)
1255                        == "INVALID"
1256                    ):
1257                        self.logger.error("\tWARN: Plugin error check skipped")
1258                    elif not self.plugin_expect_type(plugin_expects, resp):
1259                        self.logger.error(
1260                            "\tERROR: Plugin expects return data: %s"
1261                            % plugin_expects
1262                        )
1263                        plugin_error_dict["exit_on_error"] = True
1264                elif not resp:
1265                    self.logger.error(
1266                        "\tERROR: Plugin func failed to return data"
1267                    )
1268                    plugin_error_dict["exit_on_error"] = True
1269
1270        return resp
1271
1272    def response_args_data(self, plugin_resp):
1273        r"""
1274        Parse the plugin function response and update plugin return variable.
1275
1276        plugin_resp       Response data from plugin function.
1277        """
1278        resp_list = []
1279        resp_data = ""
1280
1281        # There is nothing to update the plugin response.
1282        if len(global_plugin_list) == 0 or plugin_resp == "None":
1283            return
1284
1285        if isinstance(plugin_resp, str):
1286            resp_data = plugin_resp.strip("\r\n\t")
1287            resp_list.append(resp_data)
1288        elif isinstance(plugin_resp, bytes):
1289            resp_data = str(plugin_resp, "UTF-8").strip("\r\n\t")
1290            resp_list.append(resp_data)
1291        elif isinstance(plugin_resp, tuple):
1292            if len(global_plugin_list) == 1:
1293                resp_list.append(plugin_resp)
1294            else:
1295                resp_list = list(plugin_resp)
1296                resp_list = [x.strip("\r\n\t") for x in resp_list]
1297        elif isinstance(plugin_resp, list):
1298            if len(global_plugin_list) == 1:
1299                resp_list.append([x.strip("\r\n\t") for x in plugin_resp])
1300            else:
1301                resp_list = [x.strip("\r\n\t") for x in plugin_resp]
1302        elif isinstance(plugin_resp, int) or isinstance(plugin_resp, float):
1303            resp_list.append(plugin_resp)
1304
1305        # Iterate if there is a list of plugin return vars to update.
1306        for idx, item in enumerate(resp_list, start=0):
1307            # Exit loop, done required loop.
1308            if idx >= len(global_plugin_list):
1309                break
1310            # Find the index of the return func in the list and
1311            # update the global func return dictionary.
1312            try:
1313                dict_idx = global_plugin_list[idx]
1314                global_plugin_dict[dict_idx] = item
1315            except (IndexError, ValueError) as e:
1316                self.logger.warn("\tWARN: response_args_data: %s" % e)
1317                pass
1318
1319        # Done updating plugin dict irrespective of pass or failed,
1320        # clear all the list element for next plugin block execute.
1321        global_plugin_list.clear()
1322
1323    def yaml_args_string(self, plugin_args):
1324        r"""
1325        Pack the args into string.
1326
1327        plugin_args            arg list ['arg1','arg2,'argn']
1328        """
1329        args_str = ""
1330        for args in plugin_args:
1331            if args:
1332                if isinstance(args, (int, float)):
1333                    args_str += str(args)
1334                elif args in global_plugin_type_list:
1335                    args_str += str(global_plugin_dict[args])
1336                else:
1337                    args_str += '"' + str(args.strip("\r\n\t")) + '"'
1338            # Skip last list element.
1339            if args != plugin_args[-1]:
1340                args_str += ","
1341        return args_str
1342
1343    def yaml_args_populate(self, yaml_arg_list):
1344        r"""
1345        Decode env and plugin vars and populate.
1346
1347        Description of argument(s):
1348        yaml_arg_list         arg list read from YAML
1349
1350        Example:
1351          - plugin_args:
1352            - arg1
1353            - arg2
1354
1355                  yaml_arg_list:  [arg2, arg2]
1356        """
1357        # Get the env loaded keys as list ['hostname', 'username', 'password'].
1358
1359        if isinstance(yaml_arg_list, list):
1360            tmp_list = []
1361            for arg in yaml_arg_list:
1362                if isinstance(arg, (int, float)):
1363                    tmp_list.append(arg)
1364                    continue
1365                elif isinstance(arg, str):
1366                    arg_str = self.yaml_env_and_plugin_vars_populate(str(arg))
1367                    tmp_list.append(arg_str)
1368                else:
1369                    tmp_list.append(arg)
1370
1371            # return populated list.
1372            return tmp_list
1373
1374    def yaml_env_and_plugin_vars_populate(self, yaml_arg_str):
1375        r"""
1376        Update ${MY_VAR} and plugin vars.
1377
1378        Description of argument(s):
1379        yaml_arg_str         arg string read from YAML.
1380
1381        Example:
1382            - cat ${MY_VAR}
1383            - ls -AX my_plugin_var
1384        """
1385        # Parse the string for env vars ${env_vars}.
1386        try:
1387            # Example, list of matching
1388            # env vars ['username', 'password', 'hostname']
1389            # Extra escape \ for special symbols. '\$\{([^\}]+)\}' works good.
1390            var_name_regex = "\\$\\{([^\\}]+)\\}"
1391            env_var_names_list = re.findall(var_name_regex, yaml_arg_str)
1392            for var in env_var_names_list:
1393                env_var = os.environ[var]
1394                env_replace = "${" + var + "}"
1395                yaml_arg_str = yaml_arg_str.replace(env_replace, env_var)
1396        except Exception as e:
1397            self.logger.error("\tERROR:yaml_env_vars_populate: %s" % e)
1398            pass
1399
1400        # Parse the string for plugin vars.
1401        try:
1402            # Example, list of plugin vars ['my_username', 'my_data']
1403            plugin_var_name_list = global_plugin_dict.keys()
1404            for var in plugin_var_name_list:
1405                # skip env var list already populated above code block list.
1406                if var in env_var_names_list:
1407                    continue
1408                # If this plugin var exist but empty in dict, don't replace.
1409                # This is either a YAML plugin statement incorrectly used or
1410                # user added a plugin var which is not going to be populated.
1411                if yaml_arg_str in global_plugin_dict:
1412                    if isinstance(global_plugin_dict[var], (list, dict)):
1413                        # List data type or dict can't be replaced, use
1414                        # directly in eval function call.
1415                        global_plugin_type_list.append(var)
1416                    else:
1417                        yaml_arg_str = yaml_arg_str.replace(
1418                            str(var), str(global_plugin_dict[var])
1419                        )
1420                # Just a string like filename or command.
1421                else:
1422                    yaml_arg_str = yaml_arg_str.replace(
1423                        str(var), str(global_plugin_dict[var])
1424                    )
1425        except (IndexError, ValueError) as e:
1426            self.logger.error("\tERROR: yaml_plugin_vars_populate: %s" % e)
1427            pass
1428
1429        return yaml_arg_str
1430
1431    def plugin_error_check(self, plugin_dict):
1432        r"""
1433        Plugin error dict processing.
1434
1435        Description of argument(s):
1436        plugin_dict        Dictionary of plugin error.
1437        """
1438        if any("plugin_error" in d for d in plugin_dict):
1439            for d in plugin_dict:
1440                if "plugin_error" in d:
1441                    value = d["plugin_error"]
1442                    # Reference if the error is set or not by plugin.
1443                    return plugin_error_dict[value]
1444
1445    def key_index_list_dict(self, key, list_dict):
1446        r"""
1447        Iterate list of dictionary and return index if the key match is found.
1448
1449        Description of argument(s):
1450        key           Valid Key in a dict.
1451        list_dict     list of dictionary.
1452        """
1453        for i, d in enumerate(list_dict):
1454            if key in d.keys():
1455                return i
1456
1457    def plugin_expect_type(self, type, data):
1458        r"""
1459        Plugin expect directive type check.
1460        """
1461        if type == "int":
1462            return isinstance(data, int)
1463        elif type == "float":
1464            return isinstance(data, float)
1465        elif type == "str":
1466            return isinstance(data, str)
1467        elif type == "list":
1468            return isinstance(data, list)
1469        elif type == "dict":
1470            return isinstance(data, dict)
1471        elif type == "tuple":
1472            return isinstance(data, tuple)
1473        else:
1474            self.logger.info("\tInvalid data type requested: %s" % type)
1475            return "INVALID"
1476