xref: /openbmc/openbmc-test-automation/ffdc/ffdc_collector.py (revision 49c1b53931191184251a4d1d9ba9ab127ee9f9fb)
1#!/usr/bin/env python3
2
3r"""
4See class prolog below for details.
5"""
6
7import json
8import logging
9import os
10import platform
11import re
12import subprocess
13import sys
14import time
15from errno import EACCES, EPERM
16
17import yaml
18
19script_dir = os.path.dirname(os.path.abspath(__file__))
20sys.path.append(script_dir)
21# Walk path and append to sys.path
22for root, dirs, files in os.walk(script_dir):
23    for dir in dirs:
24        sys.path.append(os.path.join(root, dir))
25
26from ssh_utility import SSHRemoteclient  # NOQA
27from telnet_utility import TelnetRemoteclient  # NOQA
28
29r"""
30User define plugins python functions.
31
32It will imports files from directory plugins
33
34plugins
35├── file1.py
36└── file2.py
37
38Example how to define in YAML:
39 - plugin:
40   - plugin_name: plugin.foo_func.foo_func_yaml
41     - plugin_args:
42       - arg1
43       - arg2
44"""
45plugin_dir = os.path.join(os.path.dirname(__file__), "plugins")
46sys.path.append(plugin_dir)
47
48for module in os.listdir(plugin_dir):
49    if module == "__init__.py" or not module.endswith(".py"):
50        continue
51
52    plugin_module = f"plugins.{module[:-3]}"
53    try:
54        plugin = __import__(plugin_module, globals(), locals(), [], 0)
55    except Exception as e:
56        print(f"PLUGIN: Exception: {e}")
57        print(f"PLUGIN: Module import failed: {module}")
58        continue
59
60r"""
61This is for plugin functions returning data or responses to the caller
62in YAML plugin setup.
63
64Example:
65
66    - plugin:
67      - plugin_name: version = plugin.ssh_execution.ssh_execute_cmd
68      - plugin_args:
69        - ${hostname}
70        - ${username}
71        - ${password}
72        - "cat /etc/os-release | grep VERSION_ID | awk -F'=' '{print $2}'"
73     - plugin:
74        - plugin_name: plugin.print_vars.print_vars
75        - plugin_args:
76          - version
77
78where first plugin "version" var is used by another plugin in the YAML
79block or plugin
80
81"""
82global global_log_store_path
83global global_plugin_dict
84global global_plugin_list
85
86# Hold the plugin return values in dict and plugin return vars in list.
87# Dict is to reference and update vars processing in parser where as
88# list is for current vars from the plugin block which needs processing.
89global_plugin_dict = {}
90global_plugin_list = []
91
92# Hold the plugin return named declared if function returned values are
93# list,dict.
94# Refer this name list to look up the plugin dict for eval() args function
95# Example ['version']
96global_plugin_type_list = []
97
98# Path where logs are to be stored or written.
99global_log_store_path = ""
100
101# Plugin error state defaults.
102plugin_error_dict = {
103    "exit_on_error": False,
104    "continue_on_error": False,
105}
106
107
108class ffdc_collector:
109    r"""
110    Execute commands from configuration file to collect log files.
111    Fetch and store generated files at the specified location.
112
113    """
114
115    def __init__(
116        self,
117        hostname,
118        username,
119        password,
120        port_ssh,
121        port_https,
122        port_ipmi,
123        ffdc_config,
124        location,
125        remote_type,
126        remote_protocol,
127        env_vars,
128        econfig,
129        log_level,
130    ):
131        r"""
132        Description of argument(s):
133
134        hostname            Name/ip of the targeted (remote) system
135        username            User on the targeted system with access to
136                            FFDC files
137        password            Password for user on targeted system
138        port_ssh            SSH port value. By default 22
139        port_https          HTTPS port value. By default 443
140        port_ipmi           IPMI port value. By default 623
141        ffdc_config         Configuration file listing commands and files
142                            for FFDC
143        location            Where to store collected FFDC
144        remote_type         OS type of the remote host
145        remote_protocol     Protocol to use to collect data
146        env_vars            User define CLI env vars '{"key : "value"}'
147        econfig             User define env vars YAML file
148
149        """
150
151        self.hostname = hostname
152        self.username = username
153        self.password = password
154        self.port_ssh = str(port_ssh)
155        self.port_https = str(port_https)
156        self.port_ipmi = str(port_ipmi)
157        self.ffdc_config = ffdc_config
158        self.location = location + "/" + remote_type.upper()
159        self.ssh_remoteclient = None
160        self.telnet_remoteclient = None
161        self.ffdc_dir_path = ""
162        self.ffdc_prefix = ""
163        self.target_type = remote_type.upper()
164        self.remote_protocol = remote_protocol.upper()
165        self.env_vars = env_vars
166        self.econfig = econfig
167        self.start_time = 0
168        self.elapsed_time = ""
169        self.logger = None
170
171        # Set prefix values for scp files and directory.
172        # Since the time stamp is at second granularity, these values are set
173        # here to be sure that all files for this run will have same timestamps
174        # and they will be saved in the same directory.
175        # self.location == local system for now
176        self.set_ffdc_default_store_path()
177
178        # Logger for this run.  Need to be after set_ffdc_default_store_path()
179        self.script_logging(getattr(logging, log_level.upper()))
180
181        # Verify top level directory exists for storage
182        self.validate_local_store(self.location)
183
184        if self.verify_script_env():
185            # Load default or user define YAML configuration file.
186            with open(self.ffdc_config, "r") as file:
187                try:
188                    self.ffdc_actions = yaml.load(file, Loader=yaml.SafeLoader)
189                except yaml.YAMLError as e:
190                    self.logger.error(e)
191                    sys.exit(-1)
192
193            if self.target_type not in self.ffdc_actions.keys():
194                self.logger.error(
195                    "\n\tERROR: %s is not listed in %s.\n\n"
196                    % (self.target_type, self.ffdc_config)
197                )
198                sys.exit(-1)
199        else:
200            sys.exit(-1)
201
202        # Load ENV vars from user.
203        self.logger.info("\n\tENV: User define input YAML variables")
204        self.env_dict = {}
205        self.load_env()
206
207    def verify_script_env(self):
208        # Import to log version
209        import click
210        import paramiko
211
212        run_env_ok = True
213
214        try:
215            redfishtool_version = (
216                self.run_tool_cmd("redfishtool -V").split(" ")[2].strip("\n")
217            )
218        except Exception as e:
219            self.logger.error("\tEXCEPTION redfishtool: %s", e)
220            redfishtool_version = "Not Installed (optional)"
221
222        try:
223            ipmitool_version = self.run_tool_cmd("ipmitool -V").split(" ")[2]
224        except Exception as e:
225            self.logger.error("\tEXCEPTION ipmitool: %s", e)
226            ipmitool_version = "Not Installed (optional)"
227
228        self.logger.info("\n\t---- Script host environment ----")
229        self.logger.info(
230            "\t{:<10}  {:<10}".format("Script hostname", os.uname()[1])
231        )
232        self.logger.info(
233            "\t{:<10}  {:<10}".format("Script host os", platform.platform())
234        )
235        self.logger.info(
236            "\t{:<10}  {:>10}".format("Python", platform.python_version())
237        )
238        self.logger.info("\t{:<10}  {:>10}".format("PyYAML", yaml.__version__))
239        self.logger.info("\t{:<10}  {:>10}".format("click", click.__version__))
240        self.logger.info(
241            "\t{:<10}  {:>10}".format("paramiko", paramiko.__version__)
242        )
243        self.logger.info(
244            "\t{:<10}  {:>9}".format("redfishtool", redfishtool_version)
245        )
246        self.logger.info(
247            "\t{:<10}  {:>12}".format("ipmitool", ipmitool_version)
248        )
249
250        if eval(yaml.__version__.replace(".", ",")) < (5, 3, 0):
251            self.logger.error(
252                "\n\tERROR: Python or python packages do not meet minimum"
253                " version requirement."
254            )
255            self.logger.error(
256                "\tERROR: PyYAML version 5.3.0 or higher is needed.\n"
257            )
258            run_env_ok = False
259
260        self.logger.info("\t---- End script host environment ----")
261        return run_env_ok
262
263    def script_logging(self, log_level_attr):
264        r"""
265        Create logger
266
267        """
268        self.logger = logging.getLogger()
269        self.logger.setLevel(log_level_attr)
270        log_file_handler = logging.FileHandler(
271            self.ffdc_dir_path + "collector.log"
272        )
273
274        stdout_handler = logging.StreamHandler(sys.stdout)
275        self.logger.addHandler(log_file_handler)
276        self.logger.addHandler(stdout_handler)
277
278        # Turn off paramiko INFO logging
279        logging.getLogger("paramiko").setLevel(logging.WARNING)
280
281    def target_is_pingable(self):
282        r"""
283        Check if target system is ping-able.
284
285        """
286        response = os.system("ping -c 1 %s  2>&1 >/dev/null" % self.hostname)
287        if response == 0:
288            self.logger.info(
289                "\n\t[Check] %s is ping-able.\t\t [OK]" % self.hostname
290            )
291            return True
292        else:
293            self.logger.error(
294                "\n\tERROR: %s is not ping-able. FFDC collection aborted.\n"
295                % self.hostname
296            )
297            sys.exit(-1)
298
299    def collect_ffdc(self):
300        r"""
301        Initiate FFDC Collection depending on requested protocol.
302
303        """
304
305        self.logger.info(
306            "\n\t---- Start communicating with %s ----" % self.hostname
307        )
308        self.start_time = time.time()
309
310        # Find the list of target and protocol supported.
311        check_protocol_list = []
312        config_dict = self.ffdc_actions
313
314        for target_type in config_dict.keys():
315            if self.target_type != target_type:
316                continue
317
318            for k, v in config_dict[target_type].items():
319                if (
320                    config_dict[target_type][k]["PROTOCOL"][0]
321                    not in check_protocol_list
322                ):
323                    check_protocol_list.append(
324                        config_dict[target_type][k]["PROTOCOL"][0]
325                    )
326
327        self.logger.info(
328            "\n\t %s protocol type: %s"
329            % (self.target_type, check_protocol_list)
330        )
331
332        verified_working_protocol = self.verify_protocol(check_protocol_list)
333
334        if verified_working_protocol:
335            self.logger.info(
336                "\n\t---- Completed protocol pre-requisite check ----\n"
337            )
338
339        # Verify top level directory exists for storage
340        self.validate_local_store(self.location)
341
342        if (self.remote_protocol not in verified_working_protocol) and (
343            self.remote_protocol != "ALL"
344        ):
345            self.logger.info(
346                "\n\tWorking protocol list: %s" % verified_working_protocol
347            )
348            self.logger.error(
349                "\tERROR: Requested protocol %s is not in working protocol"
350                " list.\n" % self.remote_protocol
351            )
352            sys.exit(-1)
353        else:
354            self.generate_ffdc(verified_working_protocol)
355
356    def ssh_to_target_system(self):
357        r"""
358        Open a ssh connection to targeted system.
359
360        """
361
362        self.ssh_remoteclient = SSHRemoteclient(
363            self.hostname, self.username, self.password, self.port_ssh
364        )
365
366        if self.ssh_remoteclient.ssh_remoteclient_login():
367            self.logger.info(
368                "\n\t[Check] %s SSH connection established.\t [OK]"
369                % self.hostname
370            )
371
372            # Check scp connection.
373            # If scp connection fails,
374            # continue with FFDC generation but skip scp files to local host.
375            self.ssh_remoteclient.scp_connection()
376            return True
377        else:
378            self.logger.info(
379                "\n\t[Check] %s SSH connection.\t [NOT AVAILABLE]"
380                % self.hostname
381            )
382            return False
383
384    def telnet_to_target_system(self):
385        r"""
386        Open a telnet connection to targeted system.
387        """
388        self.telnet_remoteclient = TelnetRemoteclient(
389            self.hostname, self.username, self.password
390        )
391        if self.telnet_remoteclient.tn_remoteclient_login():
392            self.logger.info(
393                "\n\t[Check] %s Telnet connection established.\t [OK]"
394                % self.hostname
395            )
396            return True
397        else:
398            self.logger.info(
399                "\n\t[Check] %s Telnet connection.\t [NOT AVAILABLE]"
400                % self.hostname
401            )
402            return False
403
404    def generate_ffdc(self, working_protocol_list):
405        r"""
406        Determine actions based on remote host type
407
408        Description of argument(s):
409        working_protocol_list    List of confirmed working protocols to
410                                 connect to remote host.
411        """
412
413        self.logger.info(
414            "\n\t---- Executing commands on " + self.hostname + " ----"
415        )
416        self.logger.info(
417            "\n\tWorking protocol list: %s" % working_protocol_list
418        )
419
420        config_dict = self.ffdc_actions
421        for target_type in config_dict.keys():
422            if self.target_type != target_type:
423                continue
424
425            self.logger.info("\n\tFFDC Path: %s " % self.ffdc_dir_path)
426            global_plugin_dict["global_log_store_path"] = self.ffdc_dir_path
427            self.logger.info("\tSystem Type: %s" % target_type)
428            for k, v in config_dict[target_type].items():
429                if (
430                    self.remote_protocol not in working_protocol_list
431                    and self.remote_protocol != "ALL"
432                ):
433                    continue
434
435                protocol = config_dict[target_type][k]["PROTOCOL"][0]
436
437                if protocol not in working_protocol_list:
438                    continue
439
440                if protocol in working_protocol_list:
441                    if protocol == "SSH" or protocol == "SCP":
442                        self.protocol_ssh(protocol, target_type, k)
443                    elif protocol == "TELNET":
444                        self.protocol_telnet(target_type, k)
445                    elif (
446                        protocol == "REDFISH"
447                        or protocol == "IPMI"
448                        or protocol == "SHELL"
449                    ):
450                        self.protocol_execute(protocol, target_type, k)
451                else:
452                    self.logger.error(
453                        "\n\tERROR: %s is not available for %s."
454                        % (protocol, self.hostname)
455                    )
456
457        # Close network connection after collecting all files
458        self.elapsed_time = time.strftime(
459            "%H:%M:%S", time.gmtime(time.time() - self.start_time)
460        )
461        self.logger.info("\n\tTotal time taken: %s" % self.elapsed_time)
462        if self.ssh_remoteclient:
463            self.ssh_remoteclient.ssh_remoteclient_disconnect()
464        if self.telnet_remoteclient:
465            self.telnet_remoteclient.tn_remoteclient_disconnect()
466
467    def protocol_ssh(self, protocol, target_type, sub_type):
468        r"""
469        Perform actions using SSH and SCP protocols.
470
471        Description of argument(s):
472        protocol            Protocol to execute.
473        target_type         OS Type of remote host.
474        sub_type            Group type of commands.
475        """
476
477        if protocol == "SCP":
478            self.group_copy(self.ffdc_actions[target_type][sub_type])
479        else:
480            self.collect_and_copy_ffdc(
481                self.ffdc_actions[target_type][sub_type]
482            )
483
484    def protocol_telnet(self, target_type, sub_type):
485        r"""
486        Perform actions using telnet protocol.
487        Description of argument(s):
488        target_type          OS Type of remote host.
489        """
490        self.logger.info(
491            "\n\t[Run] Executing commands on %s using %s"
492            % (self.hostname, "TELNET")
493        )
494        telnet_files_saved = []
495        progress_counter = 0
496        list_of_commands = self.ffdc_actions[target_type][sub_type]["COMMANDS"]
497        for index, each_cmd in enumerate(list_of_commands, start=0):
498            command_txt, command_timeout = self.unpack_command(each_cmd)
499            result = self.telnet_remoteclient.execute_command(
500                command_txt, command_timeout
501            )
502            if result:
503                try:
504                    targ_file = self.ffdc_actions[target_type][sub_type][
505                        "FILES"
506                    ][index]
507                except IndexError:
508                    targ_file = command_txt
509                    self.logger.warning(
510                        "\n\t[WARN] Missing filename to store data from"
511                        " telnet %s." % each_cmd
512                    )
513                    self.logger.warning(
514                        "\t[WARN] Data will be stored in %s." % targ_file
515                    )
516                targ_file_with_path = (
517                    self.ffdc_dir_path + self.ffdc_prefix + targ_file
518                )
519                # Creates a new file
520                with open(targ_file_with_path, "w") as fp:
521                    fp.write(result)
522                    fp.close
523                    telnet_files_saved.append(targ_file)
524            progress_counter += 1
525            self.print_progress(progress_counter)
526        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
527        for file in telnet_files_saved:
528            self.logger.info("\n\t\tSuccessfully save file " + file + ".")
529
530    def protocol_execute(self, protocol, target_type, sub_type):
531        r"""
532        Perform actions for a given protocol.
533
534        Description of argument(s):
535        protocol            Protocol to execute.
536        target_type         OS Type of remote host.
537        sub_type            Group type of commands.
538        """
539
540        self.logger.info(
541            "\n\t[Run] Executing commands to %s using %s"
542            % (self.hostname, protocol)
543        )
544        executed_files_saved = []
545        progress_counter = 0
546        list_of_cmd = self.get_command_list(
547            self.ffdc_actions[target_type][sub_type]
548        )
549        for index, each_cmd in enumerate(list_of_cmd, start=0):
550            plugin_call = False
551            if isinstance(each_cmd, dict):
552                if "plugin" in each_cmd:
553                    # If the error is set and plugin explicitly
554                    # requested to skip execution on error..
555                    if plugin_error_dict[
556                        "exit_on_error"
557                    ] and self.plugin_error_check(each_cmd["plugin"]):
558                        self.logger.info(
559                            "\n\t[PLUGIN-ERROR] exit_on_error: %s"
560                            % plugin_error_dict["exit_on_error"]
561                        )
562                        self.logger.info(
563                            "\t[PLUGIN-SKIP] %s" % each_cmd["plugin"][0]
564                        )
565                        continue
566                    plugin_call = True
567                    # call the plugin
568                    self.logger.info("\n\t[PLUGIN-START]")
569                    result = self.execute_plugin_block(each_cmd["plugin"])
570                    self.logger.info("\t[PLUGIN-END]\n")
571            else:
572                each_cmd = self.yaml_env_and_plugin_vars_populate(each_cmd)
573
574            if not plugin_call:
575                result = self.run_tool_cmd(each_cmd)
576            if result:
577                try:
578                    file_name = self.get_file_list(
579                        self.ffdc_actions[target_type][sub_type]
580                    )[index]
581                    # If file is specified as None.
582                    if file_name == "None":
583                        continue
584                    targ_file = self.yaml_env_and_plugin_vars_populate(
585                        file_name
586                    )
587                except IndexError:
588                    targ_file = each_cmd.split("/")[-1]
589                    self.logger.warning(
590                        "\n\t[WARN] Missing filename to store data from %s."
591                        % each_cmd
592                    )
593                    self.logger.warning(
594                        "\t[WARN] Data will be stored in %s." % targ_file
595                    )
596
597                targ_file_with_path = (
598                    self.ffdc_dir_path + self.ffdc_prefix + targ_file
599                )
600
601                # Creates a new file
602                with open(targ_file_with_path, "w") as fp:
603                    if isinstance(result, dict):
604                        fp.write(json.dumps(result))
605                    else:
606                        fp.write(result)
607                    fp.close
608                    executed_files_saved.append(targ_file)
609
610            progress_counter += 1
611            self.print_progress(progress_counter)
612
613        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
614
615        for file in executed_files_saved:
616            self.logger.info("\n\t\tSuccessfully save file " + file + ".")
617
618    def collect_and_copy_ffdc(
619        self, ffdc_actions_for_target_type, form_filename=False
620    ):
621        r"""
622        Send commands in ffdc_config file to targeted system.
623
624        Description of argument(s):
625        ffdc_actions_for_target_type     Commands and files for the selected
626                                         remote host type.
627        form_filename                    If true, pre-pend self.target_type to
628                                         filename
629        """
630
631        # Executing commands, if any
632        self.ssh_execute_ffdc_commands(
633            ffdc_actions_for_target_type, form_filename
634        )
635
636        # Copying files
637        if self.ssh_remoteclient.scpclient:
638            self.logger.info(
639                "\n\n\tCopying FFDC files from remote system %s.\n"
640                % self.hostname
641            )
642
643            # Retrieving files from target system
644            list_of_files = self.get_file_list(ffdc_actions_for_target_type)
645            self.scp_ffdc(
646                self.ffdc_dir_path,
647                self.ffdc_prefix,
648                form_filename,
649                list_of_files,
650            )
651        else:
652            self.logger.info(
653                "\n\n\tSkip copying FFDC files from remote system %s.\n"
654                % self.hostname
655            )
656
657    def get_command_list(self, ffdc_actions_for_target_type):
658        r"""
659        Fetch list of commands from configuration file
660
661        Description of argument(s):
662        ffdc_actions_for_target_type    Commands and files for the selected
663                                        remote host type.
664        """
665        try:
666            list_of_commands = ffdc_actions_for_target_type["COMMANDS"]
667        except KeyError:
668            list_of_commands = []
669        return list_of_commands
670
671    def get_file_list(self, ffdc_actions_for_target_type):
672        r"""
673        Fetch list of commands from configuration file
674
675        Description of argument(s):
676        ffdc_actions_for_target_type    Commands and files for the selected
677                                        remote host type.
678        """
679        try:
680            list_of_files = ffdc_actions_for_target_type["FILES"]
681        except KeyError:
682            list_of_files = []
683        return list_of_files
684
685    def unpack_command(self, command):
686        r"""
687        Unpack command from config file
688
689        Description of argument(s):
690        command    Command from config file.
691        """
692        if isinstance(command, dict):
693            command_txt = next(iter(command))
694            command_timeout = next(iter(command.values()))
695        elif isinstance(command, str):
696            command_txt = command
697            # Default command timeout 60 seconds
698            command_timeout = 60
699
700        return command_txt, command_timeout
701
702    def ssh_execute_ffdc_commands(
703        self, ffdc_actions_for_target_type, form_filename=False
704    ):
705        r"""
706        Send commands in ffdc_config file to targeted system.
707
708        Description of argument(s):
709        ffdc_actions_for_target_type    Commands and files for the selected
710                                        remote host type.
711        form_filename                   If true, pre-pend self.target_type to
712                                        filename
713        """
714        self.logger.info(
715            "\n\t[Run] Executing commands on %s using %s"
716            % (self.hostname, ffdc_actions_for_target_type["PROTOCOL"][0])
717        )
718
719        list_of_commands = self.get_command_list(ffdc_actions_for_target_type)
720        # If command list is empty, returns
721        if not list_of_commands:
722            return
723
724        progress_counter = 0
725        for command in list_of_commands:
726            command_txt, command_timeout = self.unpack_command(command)
727
728            if form_filename:
729                command_txt = str(command_txt % self.target_type)
730
731            (
732                cmd_exit_code,
733                err,
734                response,
735            ) = self.ssh_remoteclient.execute_command(
736                command_txt, command_timeout
737            )
738
739            if cmd_exit_code:
740                self.logger.warning(
741                    "\n\t\t[WARN] %s exits with code %s."
742                    % (command_txt, str(cmd_exit_code))
743                )
744                self.logger.warning("\t\t[WARN] %s " % err)
745
746            progress_counter += 1
747            self.print_progress(progress_counter)
748
749        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
750
751    def group_copy(self, ffdc_actions_for_target_type):
752        r"""
753        scp group of files (wild card) from remote host.
754
755        Description of argument(s):
756        fdc_actions_for_target_type    Commands and files for the selected
757                                       remote host type.
758        """
759
760        if self.ssh_remoteclient.scpclient:
761            self.logger.info(
762                "\n\tCopying files from remote system %s via SCP.\n"
763                % self.hostname
764            )
765
766            list_of_commands = self.get_command_list(
767                ffdc_actions_for_target_type
768            )
769            # If command list is empty, returns
770            if not list_of_commands:
771                return
772
773            for command in list_of_commands:
774                try:
775                    command = self.yaml_env_and_plugin_vars_populate(command)
776                except IndexError:
777                    self.logger.error("\t\tInvalid command %s" % command)
778                    continue
779
780                (
781                    cmd_exit_code,
782                    err,
783                    response,
784                ) = self.ssh_remoteclient.execute_command(command)
785
786                # If file does not exist, code take no action.
787                # cmd_exit_code is ignored for this scenario.
788                if response:
789                    scp_result = self.ssh_remoteclient.scp_file_from_remote(
790                        response.split("\n"), self.ffdc_dir_path
791                    )
792                    if scp_result:
793                        self.logger.info(
794                            "\t\tSuccessfully copied from "
795                            + self.hostname
796                            + ":"
797                            + command
798                        )
799                else:
800                    self.logger.info("\t\t%s has no result" % command)
801
802        else:
803            self.logger.info(
804                "\n\n\tSkip copying files from remote system %s.\n"
805                % self.hostname
806            )
807
808    def scp_ffdc(
809        self,
810        targ_dir_path,
811        targ_file_prefix,
812        form_filename,
813        file_list=None,
814        quiet=None,
815    ):
816        r"""
817        SCP all files in file_dict to the indicated directory on the local
818        system.
819
820        Description of argument(s):
821        targ_dir_path                   The path of the directory to receive
822                                        the files.
823        targ_file_prefix                Prefix which will be prepended to each
824                                        target file's name.
825        file_dict                       A dictionary of files to scp from
826                                        targeted system to this system
827
828        """
829
830        progress_counter = 0
831        for filename in file_list:
832            if form_filename:
833                filename = str(filename % self.target_type)
834            source_file_path = filename
835            targ_file_path = (
836                targ_dir_path + targ_file_prefix + filename.split("/")[-1]
837            )
838
839            # If source file name contains wild card, copy filename as is.
840            if "*" in source_file_path:
841                scp_result = self.ssh_remoteclient.scp_file_from_remote(
842                    source_file_path, self.ffdc_dir_path
843                )
844            else:
845                scp_result = self.ssh_remoteclient.scp_file_from_remote(
846                    source_file_path, targ_file_path
847                )
848
849            if not quiet:
850                if scp_result:
851                    self.logger.info(
852                        "\t\tSuccessfully copied from "
853                        + self.hostname
854                        + ":"
855                        + source_file_path
856                        + ".\n"
857                    )
858                else:
859                    self.logger.info(
860                        "\t\tFail to copy from "
861                        + self.hostname
862                        + ":"
863                        + source_file_path
864                        + ".\n"
865                    )
866            else:
867                progress_counter += 1
868                self.print_progress(progress_counter)
869
870    def set_ffdc_default_store_path(self):
871        r"""
872        Set a default value for self.ffdc_dir_path and self.ffdc_prefix.
873        Collected ffdc file will be stored in dir
874        /self.location/hostname_timestr/.
875        Individual ffdc file will have timestr_filename.
876
877        Description of class variables:
878        self.ffdc_dir_path  The dir path where collected ffdc data files
879                            should be put.
880
881        self.ffdc_prefix    The prefix to be given to each ffdc file name.
882
883        """
884
885        timestr = time.strftime("%Y%m%d-%H%M%S")
886        self.ffdc_dir_path = (
887            self.location + "/" + self.hostname + "_" + timestr + "/"
888        )
889        self.ffdc_prefix = timestr + "_"
890        self.validate_local_store(self.ffdc_dir_path)
891
892    # Need to verify local store path exists prior to instantiate this class.
893    # This class method is used to share the same code between CLI input parm
894    # and Robot Framework "${EXECDIR}/logs" before referencing this class.
895    @classmethod
896    def validate_local_store(cls, dir_path):
897        r"""
898        Ensure path exists to store FFDC files locally.
899
900        Description of variable:
901        dir_path  The dir path where collected ffdc data files will be stored.
902
903        """
904
905        if not os.path.exists(dir_path):
906            try:
907                os.makedirs(dir_path, 0o755)
908            except (IOError, OSError) as e:
909                # PermissionError
910                if e.errno == EPERM or e.errno == EACCES:
911                    print(
912                        "\tERROR: os.makedirs %s failed with"
913                        " PermissionError.\n" % dir_path
914                    )
915                else:
916                    print(
917                        "\tERROR: os.makedirs %s failed with %s.\n"
918                        % (dir_path, e.strerror)
919                    )
920                sys.exit(-1)
921
922    def print_progress(self, progress):
923        r"""
924        Print activity progress +
925
926        Description of variable:
927        progress  Progress counter.
928
929        """
930
931        sys.stdout.write("\r\t" + "+" * progress)
932        sys.stdout.flush()
933        time.sleep(0.1)
934
935    def verify_redfish(self):
936        r"""
937        Verify remote host has redfish service active
938
939        """
940        redfish_parm = (
941            "redfishtool -r "
942            + self.hostname
943            + ":"
944            + self.port_https
945            + " -S Always raw GET /redfish/v1/"
946        )
947        return self.run_tool_cmd(redfish_parm, True)
948
949    def verify_ipmi(self):
950        r"""
951        Verify remote host has IPMI LAN service active
952
953        """
954        if self.target_type == "OPENBMC":
955            ipmi_parm = (
956                "ipmitool -I lanplus -C 17  -U "
957                + self.username
958                + " -P "
959                + self.password
960                + " -H "
961                + self.hostname
962                + " -p "
963                + str(self.port_ipmi)
964                + " power status"
965            )
966        else:
967            ipmi_parm = (
968                "ipmitool -I lanplus  -P "
969                + self.password
970                + " -H "
971                + self.hostname
972                + " -p "
973                + str(self.port_ipmi)
974                + " power status"
975            )
976
977        return self.run_tool_cmd(ipmi_parm, True)
978
979    def run_tool_cmd(self, parms_string, quiet=False):
980        r"""
981        Run CLI standard tool or scripts.
982
983        Description of variable:
984        parms_string         tool command options.
985        quiet                do not print tool error message if True
986        """
987
988        result = subprocess.run(
989            [parms_string],
990            stdout=subprocess.PIPE,
991            stderr=subprocess.PIPE,
992            shell=True,
993            universal_newlines=True,
994        )
995
996        if result.stderr and not quiet:
997            if self.password in parms_string:
998                parms_string = parms_string.replace(self.password, "********")
999            self.logger.error("\n\t\tERROR with %s " % parms_string)
1000            self.logger.error("\t\t" + result.stderr)
1001
1002        return result.stdout
1003
1004    def verify_protocol(self, protocol_list):
1005        r"""
1006        Perform protocol working check.
1007
1008        Description of argument(s):
1009        protocol_list        List of protocol.
1010        """
1011
1012        tmp_list = []
1013        if self.target_is_pingable():
1014            tmp_list.append("SHELL")
1015
1016        for protocol in protocol_list:
1017            if self.remote_protocol != "ALL":
1018                if self.remote_protocol != protocol:
1019                    continue
1020
1021            # Only check SSH/SCP once for both protocols
1022            if (
1023                protocol == "SSH"
1024                or protocol == "SCP"
1025                and protocol not in tmp_list
1026            ):
1027                if self.ssh_to_target_system():
1028                    # Add only what user asked.
1029                    if self.remote_protocol != "ALL":
1030                        tmp_list.append(self.remote_protocol)
1031                    else:
1032                        tmp_list.append("SSH")
1033                        tmp_list.append("SCP")
1034
1035            if protocol == "TELNET":
1036                if self.telnet_to_target_system():
1037                    tmp_list.append(protocol)
1038
1039            if protocol == "REDFISH":
1040                if self.verify_redfish():
1041                    tmp_list.append(protocol)
1042                    self.logger.info(
1043                        "\n\t[Check] %s Redfish Service.\t\t [OK]"
1044                        % self.hostname
1045                    )
1046                else:
1047                    self.logger.info(
1048                        "\n\t[Check] %s Redfish Service.\t\t [NOT AVAILABLE]"
1049                        % self.hostname
1050                    )
1051
1052            if protocol == "IPMI":
1053                if self.verify_ipmi():
1054                    tmp_list.append(protocol)
1055                    self.logger.info(
1056                        "\n\t[Check] %s IPMI LAN Service.\t\t [OK]"
1057                        % self.hostname
1058                    )
1059                else:
1060                    self.logger.info(
1061                        "\n\t[Check] %s IPMI LAN Service.\t\t [NOT AVAILABLE]"
1062                        % self.hostname
1063                    )
1064
1065        return tmp_list
1066
1067    def load_env(self):
1068        r"""
1069        Load the user environment variables from a YAML file.
1070
1071        This method reads the environment variables from a YAML file specified
1072        in the ENV_FILE environment variable. If the file is not found or
1073        there is an error reading the file, an exception is raised.
1074
1075        The YAML file should have the following format:
1076
1077        .. code-block:: yaml
1078
1079            VAR_NAME: VAR_VALUE
1080
1081        Where VAR_NAME is the name of the environment variable, and
1082        VAR_VALUE is its value.
1083
1084        After loading the environment variables, they are stored in the
1085        self.env attribute for later use.
1086        """
1087
1088        os.environ["hostname"] = self.hostname
1089        os.environ["username"] = self.username
1090        os.environ["password"] = self.password
1091        os.environ["port_ssh"] = self.port_ssh
1092        os.environ["port_https"] = self.port_https
1093        os.environ["port_ipmi"] = self.port_ipmi
1094
1095        # Append default Env.
1096        self.env_dict["hostname"] = self.hostname
1097        self.env_dict["username"] = self.username
1098        self.env_dict["password"] = self.password
1099        self.env_dict["port_ssh"] = self.port_ssh
1100        self.env_dict["port_https"] = self.port_https
1101        self.env_dict["port_ipmi"] = self.port_ipmi
1102
1103        try:
1104            tmp_env_dict = {}
1105            if self.env_vars:
1106                tmp_env_dict = json.loads(self.env_vars)
1107                # Export ENV vars default.
1108                for key, value in tmp_env_dict.items():
1109                    os.environ[key] = value
1110                    self.env_dict[key] = str(value)
1111
1112            # Load user specified ENV config YAML.
1113            if self.econfig:
1114                with open(self.econfig, "r") as file:
1115                    try:
1116                        tmp_env_dict = yaml.load(file, Loader=yaml.SafeLoader)
1117                    except yaml.YAMLError as e:
1118                        self.logger.error(e)
1119                        sys.exit(-1)
1120                # Export ENV vars.
1121                for key, value in tmp_env_dict["env_params"].items():
1122                    os.environ[key] = str(value)
1123                    self.env_dict[key] = str(value)
1124        except json.decoder.JSONDecodeError as e:
1125            self.logger.error("\n\tERROR: %s " % e)
1126            sys.exit(-1)
1127        except FileNotFoundError as e:
1128            self.logger.error("\n\tERROR: %s " % e)
1129            sys.exit(-1)
1130
1131        # This to mask the password from displaying on the console.
1132        mask_dict = self.env_dict.copy()
1133        for k, v in mask_dict.items():
1134            if k.lower().find("password") != -1:
1135                hidden_text = []
1136                hidden_text.append(v)
1137                password_regex = (
1138                    "(" + "|".join([re.escape(x) for x in hidden_text]) + ")"
1139                )
1140                mask_dict[k] = re.sub(password_regex, "********", v)
1141
1142        self.logger.info(json.dumps(mask_dict, indent=8, sort_keys=False))
1143
1144    def execute_python_eval(self, eval_string):
1145        r"""
1146        Execute qualified python function string using eval.
1147
1148        Description of argument(s):
1149        eval_string        Execute the python object.
1150
1151        Example:
1152                eval(plugin.foo_func.foo_func(10))
1153        """
1154        try:
1155            self.logger.info("\tExecuting plugin func()")
1156            self.logger.debug("\tCall func: %s" % eval_string)
1157            result = eval(eval_string)
1158            self.logger.info("\treturn: %s" % str(result))
1159        except (
1160            ValueError,
1161            SyntaxError,
1162            NameError,
1163            AttributeError,
1164            TypeError,
1165        ) as e:
1166            self.logger.error("\tERROR: execute_python_eval: %s" % e)
1167            # Set the plugin error state.
1168            plugin_error_dict["exit_on_error"] = True
1169            self.logger.info("\treturn: PLUGIN_EVAL_ERROR")
1170            return "PLUGIN_EVAL_ERROR"
1171
1172        return result
1173
1174    def execute_plugin_block(self, plugin_cmd_list):
1175        r"""
1176        Pack the plugin command to qualifed python string object.
1177
1178        Description of argument(s):
1179        plugin_list_dict      Plugin block read from YAML
1180                              [{'plugin_name': 'plugin.foo_func.my_func'},
1181                               {'plugin_args': [10]}]
1182
1183        Example:
1184            - plugin:
1185              - plugin_name: plugin.foo_func.my_func
1186              - plugin_args:
1187                - arg1
1188                - arg2
1189
1190            - plugin:
1191              - plugin_name: result = plugin.foo_func.my_func
1192              - plugin_args:
1193                - arg1
1194                - arg2
1195
1196            - plugin:
1197              - plugin_name: result1,result2 = plugin.foo_func.my_func
1198              - plugin_args:
1199                - arg1
1200                - arg2
1201        """
1202        try:
1203            idx = self.key_index_list_dict("plugin_name", plugin_cmd_list)
1204            plugin_name = plugin_cmd_list[idx]["plugin_name"]
1205            # Equal separator means plugin function returns result.
1206            if " = " in plugin_name:
1207                # Ex. ['result', 'plugin.foo_func.my_func']
1208                plugin_name_args = plugin_name.split(" = ")
1209                # plugin func return data.
1210                for arg in plugin_name_args:
1211                    if arg == plugin_name_args[-1]:
1212                        plugin_name = arg
1213                    else:
1214                        plugin_resp = arg.split(",")
1215                        # ['result1','result2']
1216                        for x in plugin_resp:
1217                            global_plugin_list.append(x)
1218                            global_plugin_dict[x] = ""
1219
1220            # Walk the plugin args ['arg1,'arg2']
1221            # If the YAML plugin statement 'plugin_args' is not declared.
1222            if any("plugin_args" in d for d in plugin_cmd_list):
1223                idx = self.key_index_list_dict("plugin_args", plugin_cmd_list)
1224                plugin_args = plugin_cmd_list[idx]["plugin_args"]
1225                if plugin_args:
1226                    plugin_args = self.yaml_args_populate(plugin_args)
1227                else:
1228                    plugin_args = []
1229            else:
1230                plugin_args = self.yaml_args_populate([])
1231
1232            # Pack the args arg1, arg2, .... argn into
1233            # "arg1","arg2","argn"  string as params for function.
1234            parm_args_str = self.yaml_args_string(plugin_args)
1235            if parm_args_str:
1236                plugin_func = plugin_name + "(" + parm_args_str + ")"
1237            else:
1238                plugin_func = plugin_name + "()"
1239
1240            # Execute plugin function.
1241            if global_plugin_dict:
1242                resp = self.execute_python_eval(plugin_func)
1243                # Update plugin vars dict if there is any.
1244                if resp != "PLUGIN_EVAL_ERROR":
1245                    self.response_args_data(resp)
1246            else:
1247                resp = self.execute_python_eval(plugin_func)
1248        except Exception as e:
1249            # Set the plugin error state.
1250            plugin_error_dict["exit_on_error"] = True
1251            self.logger.error("\tERROR: execute_plugin_block: %s" % e)
1252            pass
1253
1254        # There is a real error executing the plugin function.
1255        if resp == "PLUGIN_EVAL_ERROR":
1256            return resp
1257
1258        # Check if plugin_expects_return (int, string, list,dict etc)
1259        if any("plugin_expects_return" in d for d in plugin_cmd_list):
1260            idx = self.key_index_list_dict(
1261                "plugin_expects_return", plugin_cmd_list
1262            )
1263            plugin_expects = plugin_cmd_list[idx]["plugin_expects_return"]
1264            if plugin_expects:
1265                if resp:
1266                    if (
1267                        self.plugin_expect_type(plugin_expects, resp)
1268                        == "INVALID"
1269                    ):
1270                        self.logger.error("\tWARN: Plugin error check skipped")
1271                    elif not self.plugin_expect_type(plugin_expects, resp):
1272                        self.logger.error(
1273                            "\tERROR: Plugin expects return data: %s"
1274                            % plugin_expects
1275                        )
1276                        plugin_error_dict["exit_on_error"] = True
1277                elif not resp:
1278                    self.logger.error(
1279                        "\tERROR: Plugin func failed to return data"
1280                    )
1281                    plugin_error_dict["exit_on_error"] = True
1282
1283        return resp
1284
1285    def response_args_data(self, plugin_resp):
1286        r"""
1287        Parse the plugin function response and update plugin return variable.
1288
1289        plugin_resp       Response data from plugin function.
1290        """
1291        resp_list = []
1292        resp_data = ""
1293
1294        # There is nothing to update the plugin response.
1295        if len(global_plugin_list) == 0 or plugin_resp == "None":
1296            return
1297
1298        if isinstance(plugin_resp, str):
1299            resp_data = plugin_resp.strip("\r\n\t")
1300            resp_list.append(resp_data)
1301        elif isinstance(plugin_resp, bytes):
1302            resp_data = str(plugin_resp, "UTF-8").strip("\r\n\t")
1303            resp_list.append(resp_data)
1304        elif isinstance(plugin_resp, tuple):
1305            if len(global_plugin_list) == 1:
1306                resp_list.append(plugin_resp)
1307            else:
1308                resp_list = list(plugin_resp)
1309                resp_list = [x.strip("\r\n\t") for x in resp_list]
1310        elif isinstance(plugin_resp, list):
1311            if len(global_plugin_list) == 1:
1312                resp_list.append([x.strip("\r\n\t") for x in plugin_resp])
1313            else:
1314                resp_list = [x.strip("\r\n\t") for x in plugin_resp]
1315        elif isinstance(plugin_resp, int) or isinstance(plugin_resp, float):
1316            resp_list.append(plugin_resp)
1317
1318        # Iterate if there is a list of plugin return vars to update.
1319        for idx, item in enumerate(resp_list, start=0):
1320            # Exit loop, done required loop.
1321            if idx >= len(global_plugin_list):
1322                break
1323            # Find the index of the return func in the list and
1324            # update the global func return dictionary.
1325            try:
1326                dict_idx = global_plugin_list[idx]
1327                global_plugin_dict[dict_idx] = item
1328            except (IndexError, ValueError) as e:
1329                self.logger.warn("\tWARN: response_args_data: %s" % e)
1330                pass
1331
1332        # Done updating plugin dict irrespective of pass or failed,
1333        # clear all the list element for next plugin block execute.
1334        global_plugin_list.clear()
1335
1336    def yaml_args_string(self, plugin_args):
1337        r"""
1338        Pack the args into string.
1339
1340        plugin_args            arg list ['arg1','arg2,'argn']
1341        """
1342        args_str = ""
1343        for args in plugin_args:
1344            if args:
1345                if isinstance(args, (int, float)):
1346                    args_str += str(args)
1347                elif args in global_plugin_type_list:
1348                    args_str += str(global_plugin_dict[args])
1349                else:
1350                    args_str += '"' + str(args.strip("\r\n\t")) + '"'
1351            # Skip last list element.
1352            if args != plugin_args[-1]:
1353                args_str += ","
1354        return args_str
1355
1356    def yaml_args_populate(self, yaml_arg_list):
1357        r"""
1358        Decode env and plugin vars and populate.
1359
1360        Description of argument(s):
1361        yaml_arg_list         arg list read from YAML
1362
1363        Example:
1364          - plugin_args:
1365            - arg1
1366            - arg2
1367
1368                  yaml_arg_list:  [arg2, arg2]
1369        """
1370        # Get the env loaded keys as list ['hostname', 'username', 'password'].
1371
1372        if isinstance(yaml_arg_list, list):
1373            tmp_list = []
1374            for arg in yaml_arg_list:
1375                if isinstance(arg, (int, float)):
1376                    tmp_list.append(arg)
1377                    continue
1378                elif isinstance(arg, str):
1379                    arg_str = self.yaml_env_and_plugin_vars_populate(str(arg))
1380                    tmp_list.append(arg_str)
1381                else:
1382                    tmp_list.append(arg)
1383
1384            # return populated list.
1385            return tmp_list
1386
1387    def yaml_env_and_plugin_vars_populate(self, yaml_arg_str):
1388        r"""
1389        Update ${MY_VAR} and plugin vars.
1390
1391        Description of argument(s):
1392        yaml_arg_str         arg string read from YAML.
1393
1394        Example:
1395            - cat ${MY_VAR}
1396            - ls -AX my_plugin_var
1397        """
1398        # Parse the string for env vars ${env_vars}.
1399        try:
1400            # Example, list of matching
1401            # env vars ['username', 'password', 'hostname']
1402            # Extra escape \ for special symbols. '\$\{([^\}]+)\}' works good.
1403            var_name_regex = "\\$\\{([^\\}]+)\\}"
1404            env_var_names_list = re.findall(var_name_regex, yaml_arg_str)
1405            for var in env_var_names_list:
1406                env_var = os.environ[var]
1407                env_replace = "${" + var + "}"
1408                yaml_arg_str = yaml_arg_str.replace(env_replace, env_var)
1409        except Exception as e:
1410            self.logger.error("\tERROR:yaml_env_vars_populate: %s" % e)
1411            pass
1412
1413        # Parse the string for plugin vars.
1414        try:
1415            # Example, list of plugin vars ['my_username', 'my_data']
1416            plugin_var_name_list = global_plugin_dict.keys()
1417            for var in plugin_var_name_list:
1418                # skip env var list already populated above code block list.
1419                if var in env_var_names_list:
1420                    continue
1421                # If this plugin var exist but empty in dict, don't replace.
1422                # This is either a YAML plugin statement incorrectly used or
1423                # user added a plugin var which is not going to be populated.
1424                if yaml_arg_str in global_plugin_dict:
1425                    if isinstance(global_plugin_dict[var], (list, dict)):
1426                        # List data type or dict can't be replaced, use
1427                        # directly in eval function call.
1428                        global_plugin_type_list.append(var)
1429                    else:
1430                        yaml_arg_str = yaml_arg_str.replace(
1431                            str(var), str(global_plugin_dict[var])
1432                        )
1433                # Just a string like filename or command.
1434                else:
1435                    yaml_arg_str = yaml_arg_str.replace(
1436                        str(var), str(global_plugin_dict[var])
1437                    )
1438        except (IndexError, ValueError) as e:
1439            self.logger.error("\tERROR: yaml_plugin_vars_populate: %s" % e)
1440            pass
1441
1442        return yaml_arg_str
1443
1444    def plugin_error_check(self, plugin_dict):
1445        r"""
1446        Process plugin error dictionary and return the corresponding error
1447        message.
1448
1449        This method checks if any dictionary in the plugin_dict list contains
1450        a "plugin_error" key. If such a dictionary is found, it retrieves the
1451        value associated with the "plugin_error" key and returns the
1452        corresponding error message from the plugin_error_dict attribute.
1453
1454        Parameters:
1455            plugin_dict (list of dict): A list of dictionaries containing
1456                                        plugin error information.
1457
1458        Returns:
1459           str: The error message corresponding to the "plugin_error" value,
1460                or None if no error is found.
1461        """
1462        if any("plugin_error" in d for d in plugin_dict):
1463            for d in plugin_dict:
1464                if "plugin_error" in d:
1465                    value = d["plugin_error"]
1466                    return self.plugin_error_dict.get(value, None)
1467        return None
1468
1469    def key_index_list_dict(self, key, list_dict):
1470        r"""
1471        Find the index of the first dictionary in the list that contains
1472        the specified key.
1473
1474        Parameters:
1475            key (str):                 The key to search for in the
1476                                       dictionaries.
1477            list_dict (list of dict):  A list of dictionaries to search
1478                                       through.
1479
1480        Returns:
1481            int: The index of the first dictionary containing the key, or -1
1482            if no match is found.
1483        """
1484        for i, d in enumerate(list_dict):
1485            if key in d:
1486                return i
1487        return -1
1488
1489    def plugin_expect_type(self, type, data):
1490        r"""
1491        Check if the provided data matches the expected type.
1492
1493        This method checks if the data argument matches the specified type.
1494        It supports the following types: "int", "float", "str", "list", "dict",
1495        and "tuple".
1496
1497        If the type is not recognized, it logs an info message and returns
1498        "INVALID".
1499
1500        Parameters:
1501            type (str): The expected data type.
1502            data:       The data to check against the expected type.
1503
1504        Returns:
1505            bool or str: True if the data matches the expected type, False if
1506                         not, or "INVALID" if the type is not recognized.
1507        """
1508        if type == "int":
1509            return isinstance(data, int)
1510        elif type == "float":
1511            return isinstance(data, float)
1512        elif type == "str":
1513            return isinstance(data, str)
1514        elif type == "list":
1515            return isinstance(data, list)
1516        elif type == "dict":
1517            return isinstance(data, dict)
1518        elif type == "tuple":
1519            return isinstance(data, tuple)
1520        else:
1521            self.logger.info("\tInvalid data type requested: %s" % type)
1522            return "INVALID"
1523