xref: /openbmc/openbmc-test-automation/ffdc/ffdc_collector.py (revision d805bc02c4df2c1558dcb5521c996f46049b9d18)
1#!/usr/bin/env python3
2
3r"""
4See class prolog below for details.
5"""
6
7import json
8import logging
9import os
10import platform
11import re
12import subprocess
13import sys
14import time
15from errno import EACCES, EPERM
16
17import yaml
18
19script_dir = os.path.dirname(os.path.abspath(__file__))
20sys.path.append(script_dir)
21# Walk path and append to sys.path
22for root, dirs, files in os.walk(script_dir):
23    for dir in dirs:
24        sys.path.append(os.path.join(root, dir))
25
26from ssh_utility import SSHRemoteclient  # NOQA
27from telnet_utility import TelnetRemoteclient  # NOQA
28
29r"""
30User define plugins python functions.
31
32It will imports files from directory plugins
33
34plugins
35├── file1.py
36└── file2.py
37
38Example how to define in YAML:
39 - plugin:
40   - plugin_name: plugin.foo_func.foo_func_yaml
41     - plugin_args:
42       - arg1
43       - arg2
44"""
45plugin_dir = __file__.split(__file__.split("/")[-1])[0] + "/plugins"
46sys.path.append(plugin_dir)
47try:
48    for module in os.listdir(plugin_dir):
49        if module == "__init__.py" or module[-3:] != ".py":
50            continue
51        plugin_module = "plugins." + module[:-3]
52        # To access the module plugin.<module name>.<function>
53        # Example: plugin.foo_func.foo_func_yaml()
54        try:
55            plugin = __import__(plugin_module, globals(), locals(), [], 0)
56        except Exception as e:
57            print("PLUGIN: Module import failed: %s" % module)
58            pass
59except FileNotFoundError as e:
60    print("PLUGIN: %s" % e)
61    pass
62
63r"""
64This is for plugin functions returning data or responses to the caller
65in YAML plugin setup.
66
67Example:
68
69    - plugin:
70      - plugin_name: version = plugin.ssh_execution.ssh_execute_cmd
71      - plugin_args:
72        - ${hostname}
73        - ${username}
74        - ${password}
75        - "cat /etc/os-release | grep VERSION_ID | awk -F'=' '{print $2}'"
76     - plugin:
77        - plugin_name: plugin.print_vars.print_vars
78        - plugin_args:
79          - version
80
81where first plugin "version" var is used by another plugin in the YAML
82block or plugin
83
84"""
85global global_log_store_path
86global global_plugin_dict
87global global_plugin_list
88
89# Hold the plugin return values in dict and plugin return vars in list.
90# Dict is to reference and update vars processing in parser where as
91# list is for current vars from the plugin block which needs processing.
92global_plugin_dict = {}
93global_plugin_list = []
94
95# Hold the plugin return named declared if function returned values are list,dict.
96# Refer this name list to look up the plugin dict for eval() args function
97# Example ['version']
98global_plugin_type_list = []
99
100# Path where logs are to be stored or written.
101global_log_store_path = ""
102
103# Plugin error state defaults.
104plugin_error_dict = {
105    "exit_on_error": False,
106    "continue_on_error": False,
107}
108
109
110class ffdc_collector:
111    r"""
112    Execute commands from configuration file to collect log files.
113    Fetch and store generated files at the specified location.
114
115    """
116
117    def __init__(
118        self,
119        hostname,
120        username,
121        password,
122        port_ssh,
123        port_https,
124        port_ipmi,
125        ffdc_config,
126        location,
127        remote_type,
128        remote_protocol,
129        env_vars,
130        econfig,
131        log_level,
132    ):
133        r"""
134        Description of argument(s):
135
136        hostname            name/ip of the targeted (remote) system
137        username            user on the targeted system with access to FFDC files
138        password            password for user on targeted system
139        port_ssh            SSH port value. By default 22
140        port_https          HTTPS port value. By default 443
141        port_ipmi           IPMI port value. By default 623
142        ffdc_config         configuration file listing commands and files for FFDC
143        location            where to store collected FFDC
144        remote_type         os type of the remote host
145        remote_protocol     Protocol to use to collect data
146        env_vars            User define CLI env vars '{"key : "value"}'
147        econfig             User define env vars YAML file
148
149        """
150
151        self.hostname = hostname
152        self.username = username
153        self.password = password
154        self.port_ssh = str(port_ssh)
155        self.port_https = str(port_https)
156        self.port_ipmi = str(port_ipmi)
157        self.ffdc_config = ffdc_config
158        self.location = location + "/" + remote_type.upper()
159        self.ssh_remoteclient = None
160        self.telnet_remoteclient = None
161        self.ffdc_dir_path = ""
162        self.ffdc_prefix = ""
163        self.target_type = remote_type.upper()
164        self.remote_protocol = remote_protocol.upper()
165        self.env_vars = env_vars
166        self.econfig = econfig
167        self.start_time = 0
168        self.elapsed_time = ""
169        self.logger = None
170
171        # Set prefix values for scp files and directory.
172        # Since the time stamp is at second granularity, these values are set here
173        # to be sure that all files for this run will have same timestamps
174        # and they will be saved in the same directory.
175        # self.location == local system for now
176        self.set_ffdc_default_store_path()
177
178        # Logger for this run.  Need to be after set_ffdc_default_store_path()
179        self.script_logging(getattr(logging, log_level.upper()))
180
181        # Verify top level directory exists for storage
182        self.validate_local_store(self.location)
183
184        if self.verify_script_env():
185            # Load default or user define YAML configuration file.
186            with open(self.ffdc_config, "r") as file:
187                try:
188                    self.ffdc_actions = yaml.load(file, Loader=yaml.SafeLoader)
189                except yaml.YAMLError as e:
190                    self.logger.error(e)
191                    sys.exit(-1)
192
193            if self.target_type not in self.ffdc_actions.keys():
194                self.logger.error(
195                    "\n\tERROR: %s is not listed in %s.\n\n"
196                    % (self.target_type, self.ffdc_config)
197                )
198                sys.exit(-1)
199        else:
200            sys.exit(-1)
201
202        # Load ENV vars from user.
203        self.logger.info("\n\tENV: User define input YAML variables")
204        self.env_dict = {}
205        self.load_env()
206
207    def verify_script_env(self):
208        # Import to log version
209        import click
210        import paramiko
211
212        run_env_ok = True
213
214        try:
215            redfishtool_version = (
216                self.run_tool_cmd("redfishtool -V").split(" ")[2].strip("\n")
217            )
218        except Exception as e:
219            self.logger.error("\tEXCEPTION redfishtool: %s", e)
220            redfishtool_version = "Not Installed (optional)"
221
222        try:
223            ipmitool_version = self.run_tool_cmd("ipmitool -V").split(" ")[2]
224        except Exception as e:
225            self.logger.error("\tEXCEPTION ipmitool: %s", e)
226            ipmitool_version = "Not Installed (optional)"
227
228        self.logger.info("\n\t---- Script host environment ----")
229        self.logger.info(
230            "\t{:<10}  {:<10}".format("Script hostname", os.uname()[1])
231        )
232        self.logger.info(
233            "\t{:<10}  {:<10}".format("Script host os", platform.platform())
234        )
235        self.logger.info(
236            "\t{:<10}  {:>10}".format("Python", platform.python_version())
237        )
238        self.logger.info("\t{:<10}  {:>10}".format("PyYAML", yaml.__version__))
239        self.logger.info("\t{:<10}  {:>10}".format("click", click.__version__))
240        self.logger.info(
241            "\t{:<10}  {:>10}".format("paramiko", paramiko.__version__)
242        )
243        self.logger.info(
244            "\t{:<10}  {:>9}".format("redfishtool", redfishtool_version)
245        )
246        self.logger.info(
247            "\t{:<10}  {:>12}".format("ipmitool", ipmitool_version)
248        )
249
250        if eval(yaml.__version__.replace(".", ",")) < (5, 3, 0):
251            self.logger.error(
252                "\n\tERROR: Python or python packages do not meet minimum"
253                " version requirement."
254            )
255            self.logger.error(
256                "\tERROR: PyYAML version 5.3.0 or higher is needed.\n"
257            )
258            run_env_ok = False
259
260        self.logger.info("\t---- End script host environment ----")
261        return run_env_ok
262
263    def script_logging(self, log_level_attr):
264        r"""
265        Create logger
266
267        """
268        self.logger = logging.getLogger()
269        self.logger.setLevel(log_level_attr)
270        log_file_handler = logging.FileHandler(
271            self.ffdc_dir_path + "collector.log"
272        )
273
274        stdout_handler = logging.StreamHandler(sys.stdout)
275        self.logger.addHandler(log_file_handler)
276        self.logger.addHandler(stdout_handler)
277
278        # Turn off paramiko INFO logging
279        logging.getLogger("paramiko").setLevel(logging.WARNING)
280
281    def target_is_pingable(self):
282        r"""
283        Check if target system is ping-able.
284
285        """
286        response = os.system("ping -c 1 %s  2>&1 >/dev/null" % self.hostname)
287        if response == 0:
288            self.logger.info(
289                "\n\t[Check] %s is ping-able.\t\t [OK]" % self.hostname
290            )
291            return True
292        else:
293            self.logger.error(
294                "\n\tERROR: %s is not ping-able. FFDC collection aborted.\n"
295                % self.hostname
296            )
297            sys.exit(-1)
298
299    def collect_ffdc(self):
300        r"""
301        Initiate FFDC Collection depending on requested protocol.
302
303        """
304
305        self.logger.info(
306            "\n\t---- Start communicating with %s ----" % self.hostname
307        )
308        self.start_time = time.time()
309
310        # Find the list of target and protocol supported.
311        check_protocol_list = []
312        config_dict = self.ffdc_actions
313
314        for target_type in config_dict.keys():
315            if self.target_type != target_type:
316                continue
317
318            for k, v in config_dict[target_type].items():
319                if (
320                    config_dict[target_type][k]["PROTOCOL"][0]
321                    not in check_protocol_list
322                ):
323                    check_protocol_list.append(
324                        config_dict[target_type][k]["PROTOCOL"][0]
325                    )
326
327        self.logger.info(
328            "\n\t %s protocol type: %s"
329            % (self.target_type, check_protocol_list)
330        )
331
332        verified_working_protocol = self.verify_protocol(check_protocol_list)
333
334        if verified_working_protocol:
335            self.logger.info(
336                "\n\t---- Completed protocol pre-requisite check ----\n"
337            )
338
339        # Verify top level directory exists for storage
340        self.validate_local_store(self.location)
341
342        if (self.remote_protocol not in verified_working_protocol) and (
343            self.remote_protocol != "ALL"
344        ):
345            self.logger.info(
346                "\n\tWorking protocol list: %s" % verified_working_protocol
347            )
348            self.logger.error(
349                "\tERROR: Requested protocol %s is not in working protocol"
350                " list.\n" % self.remote_protocol
351            )
352            sys.exit(-1)
353        else:
354            self.generate_ffdc(verified_working_protocol)
355
356    def ssh_to_target_system(self):
357        r"""
358        Open a ssh connection to targeted system.
359
360        """
361
362        self.ssh_remoteclient = SSHRemoteclient(
363            self.hostname, self.username, self.password, self.port_ssh
364        )
365
366        if self.ssh_remoteclient.ssh_remoteclient_login():
367            self.logger.info(
368                "\n\t[Check] %s SSH connection established.\t [OK]"
369                % self.hostname
370            )
371
372            # Check scp connection.
373            # If scp connection fails,
374            # continue with FFDC generation but skip scp files to local host.
375            self.ssh_remoteclient.scp_connection()
376            return True
377        else:
378            self.logger.info(
379                "\n\t[Check] %s SSH connection.\t [NOT AVAILABLE]"
380                % self.hostname
381            )
382            return False
383
384    def telnet_to_target_system(self):
385        r"""
386        Open a telnet connection to targeted system.
387        """
388        self.telnet_remoteclient = TelnetRemoteclient(
389            self.hostname, self.username, self.password
390        )
391        if self.telnet_remoteclient.tn_remoteclient_login():
392            self.logger.info(
393                "\n\t[Check] %s Telnet connection established.\t [OK]"
394                % self.hostname
395            )
396            return True
397        else:
398            self.logger.info(
399                "\n\t[Check] %s Telnet connection.\t [NOT AVAILABLE]"
400                % self.hostname
401            )
402            return False
403
404    def generate_ffdc(self, working_protocol_list):
405        r"""
406        Determine actions based on remote host type
407
408        Description of argument(s):
409        working_protocol_list    list of confirmed working protocols to connect to remote host.
410        """
411
412        self.logger.info(
413            "\n\t---- Executing commands on " + self.hostname + " ----"
414        )
415        self.logger.info(
416            "\n\tWorking protocol list: %s" % working_protocol_list
417        )
418
419        config_dict = self.ffdc_actions
420        for target_type in config_dict.keys():
421            if self.target_type != target_type:
422                continue
423
424            self.logger.info("\n\tFFDC Path: %s " % self.ffdc_dir_path)
425            global_plugin_dict["global_log_store_path"] = self.ffdc_dir_path
426            self.logger.info("\tSystem Type: %s" % target_type)
427            for k, v in config_dict[target_type].items():
428                if (
429                    self.remote_protocol not in working_protocol_list
430                    and self.remote_protocol != "ALL"
431                ):
432                    continue
433
434                protocol = config_dict[target_type][k]["PROTOCOL"][0]
435
436                if protocol not in working_protocol_list:
437                    continue
438
439                if protocol in working_protocol_list:
440                    if protocol == "SSH" or protocol == "SCP":
441                        self.protocol_ssh(protocol, target_type, k)
442                    elif protocol == "TELNET":
443                        self.protocol_telnet(target_type, k)
444                    elif (
445                        protocol == "REDFISH"
446                        or protocol == "IPMI"
447                        or protocol == "SHELL"
448                    ):
449                        self.protocol_execute(protocol, target_type, k)
450                else:
451                    self.logger.error(
452                        "\n\tERROR: %s is not available for %s."
453                        % (protocol, self.hostname)
454                    )
455
456        # Close network connection after collecting all files
457        self.elapsed_time = time.strftime(
458            "%H:%M:%S", time.gmtime(time.time() - self.start_time)
459        )
460        if self.ssh_remoteclient:
461            self.ssh_remoteclient.ssh_remoteclient_disconnect()
462        if self.telnet_remoteclient:
463            self.telnet_remoteclient.tn_remoteclient_disconnect()
464
465    def protocol_ssh(self, protocol, target_type, sub_type):
466        r"""
467        Perform actions using SSH and SCP protocols.
468
469        Description of argument(s):
470        protocol            Protocol to execute.
471        target_type         OS Type of remote host.
472        sub_type            Group type of commands.
473        """
474
475        if protocol == "SCP":
476            self.group_copy(self.ffdc_actions[target_type][sub_type])
477        else:
478            self.collect_and_copy_ffdc(
479                self.ffdc_actions[target_type][sub_type]
480            )
481
482    def protocol_telnet(self, target_type, sub_type):
483        r"""
484        Perform actions using telnet protocol.
485        Description of argument(s):
486        target_type          OS Type of remote host.
487        """
488        self.logger.info(
489            "\n\t[Run] Executing commands on %s using %s"
490            % (self.hostname, "TELNET")
491        )
492        telnet_files_saved = []
493        progress_counter = 0
494        list_of_commands = self.ffdc_actions[target_type][sub_type]["COMMANDS"]
495        for index, each_cmd in enumerate(list_of_commands, start=0):
496            command_txt, command_timeout = self.unpack_command(each_cmd)
497            result = self.telnet_remoteclient.execute_command(
498                command_txt, command_timeout
499            )
500            if result:
501                try:
502                    targ_file = self.ffdc_actions[target_type][sub_type][
503                        "FILES"
504                    ][index]
505                except IndexError:
506                    targ_file = command_txt
507                    self.logger.warning(
508                        "\n\t[WARN] Missing filename to store data from"
509                        " telnet %s." % each_cmd
510                    )
511                    self.logger.warning(
512                        "\t[WARN] Data will be stored in %s." % targ_file
513                    )
514                targ_file_with_path = (
515                    self.ffdc_dir_path + self.ffdc_prefix + targ_file
516                )
517                # Creates a new file
518                with open(targ_file_with_path, "w") as fp:
519                    fp.write(result)
520                    fp.close
521                    telnet_files_saved.append(targ_file)
522            progress_counter += 1
523            self.print_progress(progress_counter)
524        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
525        for file in telnet_files_saved:
526            self.logger.info("\n\t\tSuccessfully save file " + file + ".")
527
528    def protocol_execute(self, protocol, target_type, sub_type):
529        r"""
530        Perform actions for a given protocol.
531
532        Description of argument(s):
533        protocol            Protocol to execute.
534        target_type         OS Type of remote host.
535        sub_type            Group type of commands.
536        """
537
538        self.logger.info(
539            "\n\t[Run] Executing commands to %s using %s"
540            % (self.hostname, protocol)
541        )
542        executed_files_saved = []
543        progress_counter = 0
544        list_of_cmd = self.get_command_list(
545            self.ffdc_actions[target_type][sub_type]
546        )
547        for index, each_cmd in enumerate(list_of_cmd, start=0):
548            plugin_call = False
549            if isinstance(each_cmd, dict):
550                if "plugin" in each_cmd:
551                    # If the error is set and plugin explicitly
552                    # requested to skip execution on error..
553                    if plugin_error_dict[
554                        "exit_on_error"
555                    ] and self.plugin_error_check(each_cmd["plugin"]):
556                        self.logger.info(
557                            "\n\t[PLUGIN-ERROR] exit_on_error: %s"
558                            % plugin_error_dict["exit_on_error"]
559                        )
560                        self.logger.info(
561                            "\t[PLUGIN-SKIP] %s" % each_cmd["plugin"][0]
562                        )
563                        continue
564                    plugin_call = True
565                    # call the plugin
566                    self.logger.info("\n\t[PLUGIN-START]")
567                    result = self.execute_plugin_block(each_cmd["plugin"])
568                    self.logger.info("\t[PLUGIN-END]\n")
569            else:
570                each_cmd = self.yaml_env_and_plugin_vars_populate(each_cmd)
571
572            if not plugin_call:
573                result = self.run_tool_cmd(each_cmd)
574            if result:
575                try:
576                    file_name = self.get_file_list(
577                        self.ffdc_actions[target_type][sub_type]
578                    )[index]
579                    # If file is specified as None.
580                    if file_name == "None":
581                        continue
582                    targ_file = self.yaml_env_and_plugin_vars_populate(
583                        file_name
584                    )
585                except IndexError:
586                    targ_file = each_cmd.split("/")[-1]
587                    self.logger.warning(
588                        "\n\t[WARN] Missing filename to store data from %s."
589                        % each_cmd
590                    )
591                    self.logger.warning(
592                        "\t[WARN] Data will be stored in %s." % targ_file
593                    )
594
595                targ_file_with_path = (
596                    self.ffdc_dir_path + self.ffdc_prefix + targ_file
597                )
598
599                # Creates a new file
600                with open(targ_file_with_path, "w") as fp:
601                    if isinstance(result, dict):
602                        fp.write(json.dumps(result))
603                    else:
604                        fp.write(result)
605                    fp.close
606                    executed_files_saved.append(targ_file)
607
608            progress_counter += 1
609            self.print_progress(progress_counter)
610
611        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
612
613        for file in executed_files_saved:
614            self.logger.info("\n\t\tSuccessfully save file " + file + ".")
615
616    def collect_and_copy_ffdc(
617        self, ffdc_actions_for_target_type, form_filename=False
618    ):
619        r"""
620        Send commands in ffdc_config file to targeted system.
621
622        Description of argument(s):
623        ffdc_actions_for_target_type     commands and files for the selected remote host type.
624        form_filename                    if true, pre-pend self.target_type to filename
625        """
626
627        # Executing commands, if any
628        self.ssh_execute_ffdc_commands(
629            ffdc_actions_for_target_type, form_filename
630        )
631
632        # Copying files
633        if self.ssh_remoteclient.scpclient:
634            self.logger.info(
635                "\n\n\tCopying FFDC files from remote system %s.\n"
636                % self.hostname
637            )
638
639            # Retrieving files from target system
640            list_of_files = self.get_file_list(ffdc_actions_for_target_type)
641            self.scp_ffdc(
642                self.ffdc_dir_path,
643                self.ffdc_prefix,
644                form_filename,
645                list_of_files,
646            )
647        else:
648            self.logger.info(
649                "\n\n\tSkip copying FFDC files from remote system %s.\n"
650                % self.hostname
651            )
652
653    def get_command_list(self, ffdc_actions_for_target_type):
654        r"""
655        Fetch list of commands from configuration file
656
657        Description of argument(s):
658        ffdc_actions_for_target_type    commands and files for the selected remote host type.
659        """
660        try:
661            list_of_commands = ffdc_actions_for_target_type["COMMANDS"]
662        except KeyError:
663            list_of_commands = []
664        return list_of_commands
665
666    def get_file_list(self, ffdc_actions_for_target_type):
667        r"""
668        Fetch list of commands from configuration file
669
670        Description of argument(s):
671        ffdc_actions_for_target_type    commands and files for the selected remote host type.
672        """
673        try:
674            list_of_files = ffdc_actions_for_target_type["FILES"]
675        except KeyError:
676            list_of_files = []
677        return list_of_files
678
679    def unpack_command(self, command):
680        r"""
681        Unpack command from config file
682
683        Description of argument(s):
684        command    Command from config file.
685        """
686        if isinstance(command, dict):
687            command_txt = next(iter(command))
688            command_timeout = next(iter(command.values()))
689        elif isinstance(command, str):
690            command_txt = command
691            # Default command timeout 60 seconds
692            command_timeout = 60
693
694        return command_txt, command_timeout
695
696    def ssh_execute_ffdc_commands(
697        self, ffdc_actions_for_target_type, form_filename=False
698    ):
699        r"""
700        Send commands in ffdc_config file to targeted system.
701
702        Description of argument(s):
703        ffdc_actions_for_target_type    commands and files for the selected remote host type.
704        form_filename                    if true, pre-pend self.target_type to filename
705        """
706        self.logger.info(
707            "\n\t[Run] Executing commands on %s using %s"
708            % (self.hostname, ffdc_actions_for_target_type["PROTOCOL"][0])
709        )
710
711        list_of_commands = self.get_command_list(ffdc_actions_for_target_type)
712        # If command list is empty, returns
713        if not list_of_commands:
714            return
715
716        progress_counter = 0
717        for command in list_of_commands:
718            command_txt, command_timeout = self.unpack_command(command)
719
720            if form_filename:
721                command_txt = str(command_txt % self.target_type)
722
723            (
724                cmd_exit_code,
725                err,
726                response,
727            ) = self.ssh_remoteclient.execute_command(
728                command_txt, command_timeout
729            )
730
731            if cmd_exit_code:
732                self.logger.warning(
733                    "\n\t\t[WARN] %s exits with code %s."
734                    % (command_txt, str(cmd_exit_code))
735                )
736                self.logger.warning("\t\t[WARN] %s " % err)
737
738            progress_counter += 1
739            self.print_progress(progress_counter)
740
741        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
742
743    def group_copy(self, ffdc_actions_for_target_type):
744        r"""
745        scp group of files (wild card) from remote host.
746
747        Description of argument(s):
748        fdc_actions_for_target_type    commands and files for the selected remote host type.
749        """
750
751        if self.ssh_remoteclient.scpclient:
752            self.logger.info(
753                "\n\tCopying files from remote system %s via SCP.\n"
754                % self.hostname
755            )
756
757            list_of_commands = self.get_command_list(
758                ffdc_actions_for_target_type
759            )
760            # If command list is empty, returns
761            if not list_of_commands:
762                return
763
764            for command in list_of_commands:
765                try:
766                    command = self.yaml_env_and_plugin_vars_populate(command)
767                except IndexError:
768                    self.logger.error("\t\tInvalid command %s" % command)
769                    continue
770
771                (
772                    cmd_exit_code,
773                    err,
774                    response,
775                ) = self.ssh_remoteclient.execute_command(command)
776
777                # If file does not exist, code take no action.
778                # cmd_exit_code is ignored for this scenario.
779                if response:
780                    scp_result = self.ssh_remoteclient.scp_file_from_remote(
781                        response.split("\n"), self.ffdc_dir_path
782                    )
783                    if scp_result:
784                        self.logger.info(
785                            "\t\tSuccessfully copied from "
786                            + self.hostname
787                            + ":"
788                            + command
789                        )
790                else:
791                    self.logger.info("\t\t%s has no result" % command)
792
793        else:
794            self.logger.info(
795                "\n\n\tSkip copying files from remote system %s.\n"
796                % self.hostname
797            )
798
799    def scp_ffdc(
800        self,
801        targ_dir_path,
802        targ_file_prefix,
803        form_filename,
804        file_list=None,
805        quiet=None,
806    ):
807        r"""
808        SCP all files in file_dict to the indicated directory on the local system.
809
810        Description of argument(s):
811        targ_dir_path                   The path of the directory to receive the files.
812        targ_file_prefix                Prefix which will be prepended to each
813                                        target file's name.
814        file_dict                       A dictionary of files to scp from targeted system to this system
815
816        """
817
818        progress_counter = 0
819        for filename in file_list:
820            if form_filename:
821                filename = str(filename % self.target_type)
822            source_file_path = filename
823            targ_file_path = (
824                targ_dir_path + targ_file_prefix + filename.split("/")[-1]
825            )
826
827            # If source file name contains wild card, copy filename as is.
828            if "*" in source_file_path:
829                scp_result = self.ssh_remoteclient.scp_file_from_remote(
830                    source_file_path, self.ffdc_dir_path
831                )
832            else:
833                scp_result = self.ssh_remoteclient.scp_file_from_remote(
834                    source_file_path, targ_file_path
835                )
836
837            if not quiet:
838                if scp_result:
839                    self.logger.info(
840                        "\t\tSuccessfully copied from "
841                        + self.hostname
842                        + ":"
843                        + source_file_path
844                        + ".\n"
845                    )
846                else:
847                    self.logger.info(
848                        "\t\tFail to copy from "
849                        + self.hostname
850                        + ":"
851                        + source_file_path
852                        + ".\n"
853                    )
854            else:
855                progress_counter += 1
856                self.print_progress(progress_counter)
857
858    def set_ffdc_default_store_path(self):
859        r"""
860        Set a default value for self.ffdc_dir_path and self.ffdc_prefix.
861        Collected ffdc file will be stored in dir /self.location/hostname_timestr/.
862        Individual ffdc file will have timestr_filename.
863
864        Description of class variables:
865        self.ffdc_dir_path  The dir path where collected ffdc data files should be put.
866
867        self.ffdc_prefix    The prefix to be given to each ffdc file name.
868
869        """
870
871        timestr = time.strftime("%Y%m%d-%H%M%S")
872        self.ffdc_dir_path = (
873            self.location + "/" + self.hostname + "_" + timestr + "/"
874        )
875        self.ffdc_prefix = timestr + "_"
876        self.validate_local_store(self.ffdc_dir_path)
877
878    # Need to verify local store path exists prior to instantiate this class.
879    # This class method is used to share the same code between CLI input parm
880    # and Robot Framework "${EXECDIR}/logs" before referencing this class.
881    @classmethod
882    def validate_local_store(cls, dir_path):
883        r"""
884        Ensure path exists to store FFDC files locally.
885
886        Description of variable:
887        dir_path  The dir path where collected ffdc data files will be stored.
888
889        """
890
891        if not os.path.exists(dir_path):
892            try:
893                os.makedirs(dir_path, 0o755)
894            except (IOError, OSError) as e:
895                # PermissionError
896                if e.errno == EPERM or e.errno == EACCES:
897                    self.logger.error(
898                        "\tERROR: os.makedirs %s failed with"
899                        " PermissionError.\n" % dir_path
900                    )
901                else:
902                    self.logger.error(
903                        "\tERROR: os.makedirs %s failed with %s.\n"
904                        % (dir_path, e.strerror)
905                    )
906                sys.exit(-1)
907
908    def print_progress(self, progress):
909        r"""
910        Print activity progress +
911
912        Description of variable:
913        progress  Progress counter.
914
915        """
916
917        sys.stdout.write("\r\t" + "+" * progress)
918        sys.stdout.flush()
919        time.sleep(0.1)
920
921    def verify_redfish(self):
922        r"""
923        Verify remote host has redfish service active
924
925        """
926        redfish_parm = (
927            "redfishtool -r "
928            + self.hostname
929            + ":"
930            + self.port_https
931            + " -S Always raw GET /redfish/v1/"
932        )
933        return self.run_tool_cmd(redfish_parm, True)
934
935    def verify_ipmi(self):
936        r"""
937        Verify remote host has IPMI LAN service active
938
939        """
940        if self.target_type == "OPENBMC":
941            ipmi_parm = (
942                "ipmitool -I lanplus -C 17  -U "
943                + self.username
944                + " -P "
945                + self.password
946                + " -H "
947                + self.hostname
948                + " -p "
949                + str(self.port_ipmi)
950                + " power status"
951            )
952        else:
953            ipmi_parm = (
954                "ipmitool -I lanplus  -P "
955                + self.password
956                + " -H "
957                + self.hostname
958                + " -p "
959                + str(self.port_ipmi)
960                + " power status"
961            )
962
963        return self.run_tool_cmd(ipmi_parm, True)
964
965    def run_tool_cmd(self, parms_string, quiet=False):
966        r"""
967        Run CLI standard tool or scripts.
968
969        Description of variable:
970        parms_string         tool command options.
971        quiet                do not print tool error message if True
972        """
973
974        result = subprocess.run(
975            [parms_string],
976            stdout=subprocess.PIPE,
977            stderr=subprocess.PIPE,
978            shell=True,
979            universal_newlines=True,
980        )
981
982        if result.stderr and not quiet:
983            self.logger.error("\n\t\tERROR with %s " % parms_string)
984            self.logger.error("\t\t" + result.stderr)
985
986        return result.stdout
987
988    def verify_protocol(self, protocol_list):
989        r"""
990        Perform protocol working check.
991
992        Description of argument(s):
993        protocol_list        List of protocol.
994        """
995
996        tmp_list = []
997        if self.target_is_pingable():
998            tmp_list.append("SHELL")
999
1000        for protocol in protocol_list:
1001            if self.remote_protocol != "ALL":
1002                if self.remote_protocol != protocol:
1003                    continue
1004
1005            # Only check SSH/SCP once for both protocols
1006            if (
1007                protocol == "SSH"
1008                or protocol == "SCP"
1009                and protocol not in tmp_list
1010            ):
1011                if self.ssh_to_target_system():
1012                    # Add only what user asked.
1013                    if self.remote_protocol != "ALL":
1014                        tmp_list.append(self.remote_protocol)
1015                    else:
1016                        tmp_list.append("SSH")
1017                        tmp_list.append("SCP")
1018
1019            if protocol == "TELNET":
1020                if self.telnet_to_target_system():
1021                    tmp_list.append(protocol)
1022
1023            if protocol == "REDFISH":
1024                if self.verify_redfish():
1025                    tmp_list.append(protocol)
1026                    self.logger.info(
1027                        "\n\t[Check] %s Redfish Service.\t\t [OK]"
1028                        % self.hostname
1029                    )
1030                else:
1031                    self.logger.info(
1032                        "\n\t[Check] %s Redfish Service.\t\t [NOT AVAILABLE]"
1033                        % self.hostname
1034                    )
1035
1036            if protocol == "IPMI":
1037                if self.verify_ipmi():
1038                    tmp_list.append(protocol)
1039                    self.logger.info(
1040                        "\n\t[Check] %s IPMI LAN Service.\t\t [OK]"
1041                        % self.hostname
1042                    )
1043                else:
1044                    self.logger.info(
1045                        "\n\t[Check] %s IPMI LAN Service.\t\t [NOT AVAILABLE]"
1046                        % self.hostname
1047                    )
1048
1049        return tmp_list
1050
1051    def load_env(self):
1052        r"""
1053        Perform protocol working check.
1054
1055        """
1056        # This is for the env vars a user can use in YAML to load it at runtime.
1057        # Example YAML:
1058        # -COMMANDS:
1059        #    - my_command ${hostname}  ${username}   ${password}
1060        os.environ["hostname"] = self.hostname
1061        os.environ["username"] = self.username
1062        os.environ["password"] = self.password
1063        os.environ["port_ssh"] = self.port_ssh
1064        os.environ["port_https"] = self.port_https
1065        os.environ["port_ipmi"] = self.port_ipmi
1066
1067        # Append default Env.
1068        self.env_dict["hostname"] = self.hostname
1069        self.env_dict["username"] = self.username
1070        self.env_dict["password"] = self.password
1071        self.env_dict["port_ssh"] = self.port_ssh
1072        self.env_dict["port_https"] = self.port_https
1073        self.env_dict["port_ipmi"] = self.port_ipmi
1074
1075        try:
1076            tmp_env_dict = {}
1077            if self.env_vars:
1078                tmp_env_dict = json.loads(self.env_vars)
1079                # Export ENV vars default.
1080                for key, value in tmp_env_dict.items():
1081                    os.environ[key] = value
1082                    self.env_dict[key] = str(value)
1083
1084            if self.econfig:
1085                with open(self.econfig, "r") as file:
1086                    try:
1087                        tmp_env_dict = yaml.load(file, Loader=yaml.SafeLoader)
1088                    except yaml.YAMLError as e:
1089                        self.logger.error(e)
1090                        sys.exit(-1)
1091                # Export ENV vars.
1092                for key, value in tmp_env_dict["env_params"].items():
1093                    os.environ[key] = str(value)
1094                    self.env_dict[key] = str(value)
1095        except json.decoder.JSONDecodeError as e:
1096            self.logger.error("\n\tERROR: %s " % e)
1097            sys.exit(-1)
1098
1099        # This to mask the password from displaying on the console.
1100        mask_dict = self.env_dict.copy()
1101        for k, v in mask_dict.items():
1102            if k.lower().find("password") != -1:
1103                hidden_text = []
1104                hidden_text.append(v)
1105                password_regex = (
1106                    "(" + "|".join([re.escape(x) for x in hidden_text]) + ")"
1107                )
1108                mask_dict[k] = re.sub(password_regex, "********", v)
1109
1110        self.logger.info(json.dumps(mask_dict, indent=8, sort_keys=False))
1111
1112    def execute_python_eval(self, eval_string):
1113        r"""
1114        Execute qualified python function string using eval.
1115
1116        Description of argument(s):
1117        eval_string        Execute the python object.
1118
1119        Example:
1120                eval(plugin.foo_func.foo_func(10))
1121        """
1122        try:
1123            self.logger.info("\tExecuting plugin func()")
1124            self.logger.debug("\tCall func: %s" % eval_string)
1125            result = eval(eval_string)
1126            self.logger.info("\treturn: %s" % str(result))
1127        except (
1128            ValueError,
1129            SyntaxError,
1130            NameError,
1131            AttributeError,
1132            TypeError,
1133        ) as e:
1134            self.logger.error("\tERROR: execute_python_eval: %s" % e)
1135            # Set the plugin error state.
1136            plugin_error_dict["exit_on_error"] = True
1137            self.logger.info("\treturn: PLUGIN_EVAL_ERROR")
1138            return "PLUGIN_EVAL_ERROR"
1139
1140        return result
1141
1142    def execute_plugin_block(self, plugin_cmd_list):
1143        r"""
1144        Pack the plugin command to qualifed python string object.
1145
1146        Description of argument(s):
1147        plugin_list_dict      Plugin block read from YAML
1148                              [{'plugin_name': 'plugin.foo_func.my_func'},
1149                               {'plugin_args': [10]}]
1150
1151        Example:
1152            - plugin:
1153              - plugin_name: plugin.foo_func.my_func
1154              - plugin_args:
1155                - arg1
1156                - arg2
1157
1158            - plugin:
1159              - plugin_name: result = plugin.foo_func.my_func
1160              - plugin_args:
1161                - arg1
1162                - arg2
1163
1164            - plugin:
1165              - plugin_name: result1,result2 = plugin.foo_func.my_func
1166              - plugin_args:
1167                - arg1
1168                - arg2
1169        """
1170        try:
1171            idx = self.key_index_list_dict("plugin_name", plugin_cmd_list)
1172            plugin_name = plugin_cmd_list[idx]["plugin_name"]
1173            # Equal separator means plugin function returns result.
1174            if " = " in plugin_name:
1175                # Ex. ['result', 'plugin.foo_func.my_func']
1176                plugin_name_args = plugin_name.split(" = ")
1177                # plugin func return data.
1178                for arg in plugin_name_args:
1179                    if arg == plugin_name_args[-1]:
1180                        plugin_name = arg
1181                    else:
1182                        plugin_resp = arg.split(",")
1183                        # ['result1','result2']
1184                        for x in plugin_resp:
1185                            global_plugin_list.append(x)
1186                            global_plugin_dict[x] = ""
1187
1188            # Walk the plugin args ['arg1,'arg2']
1189            # If the YAML plugin statement 'plugin_args' is not declared.
1190            if any("plugin_args" in d for d in plugin_cmd_list):
1191                idx = self.key_index_list_dict("plugin_args", plugin_cmd_list)
1192                plugin_args = plugin_cmd_list[idx]["plugin_args"]
1193                if plugin_args:
1194                    plugin_args = self.yaml_args_populate(plugin_args)
1195                else:
1196                    plugin_args = []
1197            else:
1198                plugin_args = self.yaml_args_populate([])
1199
1200            # Pack the args arg1, arg2, .... argn into
1201            # "arg1","arg2","argn"  string as params for function.
1202            parm_args_str = self.yaml_args_string(plugin_args)
1203            if parm_args_str:
1204                plugin_func = plugin_name + "(" + parm_args_str + ")"
1205            else:
1206                plugin_func = plugin_name + "()"
1207
1208            # Execute plugin function.
1209            if global_plugin_dict:
1210                resp = self.execute_python_eval(plugin_func)
1211                # Update plugin vars dict if there is any.
1212                if resp != "PLUGIN_EVAL_ERROR":
1213                    self.response_args_data(resp)
1214            else:
1215                resp = self.execute_python_eval(plugin_func)
1216        except Exception as e:
1217            # Set the plugin error state.
1218            plugin_error_dict["exit_on_error"] = True
1219            self.logger.error("\tERROR: execute_plugin_block: %s" % e)
1220            pass
1221
1222        # There is a real error executing the plugin function.
1223        if resp == "PLUGIN_EVAL_ERROR":
1224            return resp
1225
1226        # Check if plugin_expects_return (int, string, list,dict etc)
1227        if any("plugin_expects_return" in d for d in plugin_cmd_list):
1228            idx = self.key_index_list_dict(
1229                "plugin_expects_return", plugin_cmd_list
1230            )
1231            plugin_expects = plugin_cmd_list[idx]["plugin_expects_return"]
1232            if plugin_expects:
1233                if resp:
1234                    if (
1235                        self.plugin_expect_type(plugin_expects, resp)
1236                        == "INVALID"
1237                    ):
1238                        self.logger.error("\tWARN: Plugin error check skipped")
1239                    elif not self.plugin_expect_type(plugin_expects, resp):
1240                        self.logger.error(
1241                            "\tERROR: Plugin expects return data: %s"
1242                            % plugin_expects
1243                        )
1244                        plugin_error_dict["exit_on_error"] = True
1245                elif not resp:
1246                    self.logger.error(
1247                        "\tERROR: Plugin func failed to return data"
1248                    )
1249                    plugin_error_dict["exit_on_error"] = True
1250
1251        return resp
1252
1253    def response_args_data(self, plugin_resp):
1254        r"""
1255        Parse the plugin function response and update plugin return variable.
1256
1257        plugin_resp       Response data from plugin function.
1258        """
1259        resp_list = []
1260        resp_data = ""
1261
1262        # There is nothing to update the plugin response.
1263        if len(global_plugin_list) == 0 or plugin_resp == "None":
1264            return
1265
1266        if isinstance(plugin_resp, str):
1267            resp_data = plugin_resp.strip("\r\n\t")
1268            resp_list.append(resp_data)
1269        elif isinstance(plugin_resp, bytes):
1270            resp_data = str(plugin_resp, "UTF-8").strip("\r\n\t")
1271            resp_list.append(resp_data)
1272        elif isinstance(plugin_resp, tuple):
1273            if len(global_plugin_list) == 1:
1274                resp_list.append(plugin_resp)
1275            else:
1276                resp_list = list(plugin_resp)
1277                resp_list = [x.strip("\r\n\t") for x in resp_list]
1278        elif isinstance(plugin_resp, list):
1279            if len(global_plugin_list) == 1:
1280                resp_list.append([x.strip("\r\n\t") for x in plugin_resp])
1281            else:
1282                resp_list = [x.strip("\r\n\t") for x in plugin_resp]
1283        elif isinstance(plugin_resp, int) or isinstance(plugin_resp, float):
1284            resp_list.append(plugin_resp)
1285
1286        # Iterate if there is a list of plugin return vars to update.
1287        for idx, item in enumerate(resp_list, start=0):
1288            # Exit loop, done required loop.
1289            if idx >= len(global_plugin_list):
1290                break
1291            # Find the index of the return func in the list and
1292            # update the global func return dictionary.
1293            try:
1294                dict_idx = global_plugin_list[idx]
1295                global_plugin_dict[dict_idx] = item
1296            except (IndexError, ValueError) as e:
1297                self.logger.warn("\tWARN: response_args_data: %s" % e)
1298                pass
1299
1300        # Done updating plugin dict irrespective of pass or failed,
1301        # clear all the list element for next plugin block execute.
1302        global_plugin_list.clear()
1303
1304    def yaml_args_string(self, plugin_args):
1305        r"""
1306        Pack the args into string.
1307
1308        plugin_args            arg list ['arg1','arg2,'argn']
1309        """
1310        args_str = ""
1311        for args in plugin_args:
1312            if args:
1313                if isinstance(args, (int, float)):
1314                    args_str += str(args)
1315                elif args in global_plugin_type_list:
1316                    args_str += str(global_plugin_dict[args])
1317                else:
1318                    args_str += '"' + str(args.strip("\r\n\t")) + '"'
1319            # Skip last list element.
1320            if args != plugin_args[-1]:
1321                args_str += ","
1322        return args_str
1323
1324    def yaml_args_populate(self, yaml_arg_list):
1325        r"""
1326        Decode env and plugin vars and populate.
1327
1328        Description of argument(s):
1329        yaml_arg_list         arg list read from YAML
1330
1331        Example:
1332          - plugin_args:
1333            - arg1
1334            - arg2
1335
1336                  yaml_arg_list:  [arg2, arg2]
1337        """
1338        # Get the env loaded keys as list ['hostname', 'username', 'password'].
1339        env_vars_list = list(self.env_dict)
1340
1341        if isinstance(yaml_arg_list, list):
1342            tmp_list = []
1343            for arg in yaml_arg_list:
1344                if isinstance(arg, (int, float)):
1345                    tmp_list.append(arg)
1346                    continue
1347                elif isinstance(arg, str):
1348                    arg_str = self.yaml_env_and_plugin_vars_populate(str(arg))
1349                    tmp_list.append(arg_str)
1350                else:
1351                    tmp_list.append(arg)
1352
1353            # return populated list.
1354            return tmp_list
1355
1356    def yaml_env_and_plugin_vars_populate(self, yaml_arg_str):
1357        r"""
1358        Update ${MY_VAR} and plugin vars.
1359
1360        Description of argument(s):
1361        yaml_arg_str         arg string read from YAML.
1362
1363        Example:
1364            - cat ${MY_VAR}
1365            - ls -AX my_plugin_var
1366        """
1367        # Parse the string for env vars ${env_vars}.
1368        try:
1369            # Example, list of matching env vars ['username', 'password', 'hostname']
1370            # Extra escape \ for special symbols. '\$\{([^\}]+)\}' works good.
1371            var_name_regex = "\\$\\{([^\\}]+)\\}"
1372            env_var_names_list = re.findall(var_name_regex, yaml_arg_str)
1373            for var in env_var_names_list:
1374                env_var = os.environ[var]
1375                env_replace = "${" + var + "}"
1376                yaml_arg_str = yaml_arg_str.replace(env_replace, env_var)
1377        except Exception as e:
1378            self.logger.error("\tERROR:yaml_env_vars_populate: %s" % e)
1379            pass
1380
1381        # Parse the string for plugin vars.
1382        try:
1383            # Example, list of plugin vars ['my_username', 'my_data']
1384            plugin_var_name_list = global_plugin_dict.keys()
1385            for var in plugin_var_name_list:
1386                # skip env var list already populated above code block list.
1387                if var in env_var_names_list:
1388                    continue
1389                # If this plugin var exist but empty in dict, don't replace.
1390                # This is either a YAML plugin statement incorrectly used or
1391                # user added a plugin var which is not going to be populated.
1392                if yaml_arg_str in global_plugin_dict:
1393                    if isinstance(global_plugin_dict[var], (list, dict)):
1394                        # List data type or dict can't be replaced, use directly
1395                        # in eval function call.
1396                        global_plugin_type_list.append(var)
1397                    else:
1398                        yaml_arg_str = yaml_arg_str.replace(
1399                            str(var), str(global_plugin_dict[var])
1400                        )
1401                # Just a string like filename or command.
1402                else:
1403                    yaml_arg_str = yaml_arg_str.replace(
1404                        str(var), str(global_plugin_dict[var])
1405                    )
1406        except (IndexError, ValueError) as e:
1407            self.logger.error("\tERROR: yaml_plugin_vars_populate: %s" % e)
1408            pass
1409
1410        return yaml_arg_str
1411
1412    def plugin_error_check(self, plugin_dict):
1413        r"""
1414        Plugin error dict processing.
1415
1416        Description of argument(s):
1417        plugin_dict        Dictionary of plugin error.
1418        """
1419        if any("plugin_error" in d for d in plugin_dict):
1420            for d in plugin_dict:
1421                if "plugin_error" in d:
1422                    value = d["plugin_error"]
1423                    # Reference if the error is set or not by plugin.
1424                    return plugin_error_dict[value]
1425
1426    def key_index_list_dict(self, key, list_dict):
1427        r"""
1428        Iterate list of dictionary and return index if the key match is found.
1429
1430        Description of argument(s):
1431        key           Valid Key in a dict.
1432        list_dict     list of dictionary.
1433        """
1434        for i, d in enumerate(list_dict):
1435            if key in d.keys():
1436                return i
1437
1438    def plugin_expect_type(self, type, data):
1439        r"""
1440        Plugin expect directive type check.
1441        """
1442        if type == "int":
1443            return isinstance(data, int)
1444        elif type == "float":
1445            return isinstance(data, float)
1446        elif type == "str":
1447            return isinstance(data, str)
1448        elif type == "list":
1449            return isinstance(data, list)
1450        elif type == "dict":
1451            return isinstance(data, dict)
1452        elif type == "tuple":
1453            return isinstance(data, tuple)
1454        else:
1455            self.logger.info("\tInvalid data type requested: %s" % type)
1456            return "INVALID"
1457