1#!/usr/bin/env python3
2
3r"""
4See class prolog below for details.
5"""
6
7import json
8import logging
9import os
10import platform
11import re
12import subprocess
13import sys
14import time
15from errno import EACCES, EPERM
16
17import yaml
18
19script_dir = os.path.dirname(os.path.abspath(__file__))
20sys.path.append(script_dir)
21# Walk path and append to sys.path
22for root, dirs, files in os.walk(script_dir):
23    for dir in dirs:
24        sys.path.append(os.path.join(root, dir))
25
26from ssh_utility import SSHRemoteclient  # NOQA
27from telnet_utility import TelnetRemoteclient  # NOQA
28
29r"""
30User define plugins python functions.
31
32It will imports files from directory plugins
33
34plugins
35├── file1.py
36└── file2.py
37
38Example how to define in YAML:
39 - plugin:
40   - plugin_name: plugin.foo_func.foo_func_yaml
41     - plugin_args:
42       - arg1
43       - arg2
44"""
45plugin_dir = __file__.split(__file__.split("/")[-1])[0] + "/plugins"
46sys.path.append(plugin_dir)
47try:
48    for module in os.listdir(plugin_dir):
49        if module == "__init__.py" or module[-3:] != ".py":
50            continue
51        plugin_module = "plugins." + module[:-3]
52        # To access the module plugin.<module name>.<function>
53        # Example: plugin.foo_func.foo_func_yaml()
54        try:
55            plugin = __import__(plugin_module, globals(), locals(), [], 0)
56        except Exception as e:
57            print("PLUGIN: Module import failed: %s" % module)
58            pass
59except FileNotFoundError as e:
60    print("PLUGIN: %s" % e)
61    pass
62
63r"""
64This is for plugin functions returning data or responses to the caller
65in YAML plugin setup.
66
67Example:
68
69    - plugin:
70      - plugin_name: version = plugin.ssh_execution.ssh_execute_cmd
71      - plugin_args:
72        - ${hostname}
73        - ${username}
74        - ${password}
75        - "cat /etc/os-release | grep VERSION_ID | awk -F'=' '{print $2}'"
76     - plugin:
77        - plugin_name: plugin.print_vars.print_vars
78        - plugin_args:
79          - version
80
81where first plugin "version" var is used by another plugin in the YAML
82block or plugin
83
84"""
85global global_log_store_path
86global global_plugin_dict
87global global_plugin_list
88
89# Hold the plugin return values in dict and plugin return vars in list.
90# Dict is to reference and update vars processing in parser where as
91# list is for current vars from the plugin block which needs processing.
92global_plugin_dict = {}
93global_plugin_list = []
94
95# Hold the plugin return named declared if function returned values are list,dict.
96# Refer this name list to look up the plugin dict for eval() args function
97# Example ['version']
98global_plugin_type_list = []
99
100# Path where logs are to be stored or written.
101global_log_store_path = ""
102
103# Plugin error state defaults.
104plugin_error_dict = {
105    "exit_on_error": False,
106    "continue_on_error": False,
107}
108
109
110class ffdc_collector:
111    r"""
112    Execute commands from configuration file to collect log files.
113    Fetch and store generated files at the specified location.
114
115    """
116
117    def __init__(
118        self,
119        hostname,
120        username,
121        password,
122        ffdc_config,
123        location,
124        remote_type,
125        remote_protocol,
126        env_vars,
127        econfig,
128        log_level,
129    ):
130        r"""
131        Description of argument(s):
132
133        hostname            name/ip of the targeted (remote) system
134        username            user on the targeted system with access to FFDC files
135        password            password for user on targeted system
136        ffdc_config         configuration file listing commands and files for FFDC
137        location            where to store collected FFDC
138        remote_type         os type of the remote host
139        remote_protocol     Protocol to use to collect data
140        env_vars            User define CLI env vars '{"key : "value"}'
141        econfig             User define env vars YAML file
142
143        """
144
145        self.hostname = hostname
146        self.username = username
147        self.password = password
148        self.ffdc_config = ffdc_config
149        self.location = location + "/" + remote_type.upper()
150        self.ssh_remoteclient = None
151        self.telnet_remoteclient = None
152        self.ffdc_dir_path = ""
153        self.ffdc_prefix = ""
154        self.target_type = remote_type.upper()
155        self.remote_protocol = remote_protocol.upper()
156        self.env_vars = env_vars
157        self.econfig = econfig
158        self.start_time = 0
159        self.elapsed_time = ""
160        self.logger = None
161
162        # Set prefix values for scp files and directory.
163        # Since the time stamp is at second granularity, these values are set here
164        # to be sure that all files for this run will have same timestamps
165        # and they will be saved in the same directory.
166        # self.location == local system for now
167        self.set_ffdc_default_store_path()
168
169        # Logger for this run.  Need to be after set_ffdc_default_store_path()
170        self.script_logging(getattr(logging, log_level.upper()))
171
172        # Verify top level directory exists for storage
173        self.validate_local_store(self.location)
174
175        if self.verify_script_env():
176            # Load default or user define YAML configuration file.
177            with open(self.ffdc_config, "r") as file:
178                try:
179                    self.ffdc_actions = yaml.load(file, Loader=yaml.SafeLoader)
180                except yaml.YAMLError as e:
181                    self.logger.error(e)
182                    sys.exit(-1)
183
184            if self.target_type not in self.ffdc_actions.keys():
185                self.logger.error(
186                    "\n\tERROR: %s is not listed in %s.\n\n"
187                    % (self.target_type, self.ffdc_config)
188                )
189                sys.exit(-1)
190        else:
191            sys.exit(-1)
192
193        # Load ENV vars from user.
194        self.logger.info("\n\tENV: User define input YAML variables")
195        self.env_dict = {}
196        self.load_env()
197
198    def verify_script_env(self):
199        # Import to log version
200        import click
201        import paramiko
202
203        run_env_ok = True
204
205        redfishtool_version = (
206            self.run_tool_cmd("redfishtool -V").split(" ")[2].strip("\n")
207        )
208        ipmitool_version = self.run_tool_cmd("ipmitool -V").split(" ")[2]
209
210        self.logger.info("\n\t---- Script host environment ----")
211        self.logger.info(
212            "\t{:<10}  {:<10}".format("Script hostname", os.uname()[1])
213        )
214        self.logger.info(
215            "\t{:<10}  {:<10}".format("Script host os", platform.platform())
216        )
217        self.logger.info(
218            "\t{:<10}  {:>10}".format("Python", platform.python_version())
219        )
220        self.logger.info("\t{:<10}  {:>10}".format("PyYAML", yaml.__version__))
221        self.logger.info("\t{:<10}  {:>10}".format("click", click.__version__))
222        self.logger.info(
223            "\t{:<10}  {:>10}".format("paramiko", paramiko.__version__)
224        )
225        self.logger.info(
226            "\t{:<10}  {:>9}".format("redfishtool", redfishtool_version)
227        )
228        self.logger.info(
229            "\t{:<10}  {:>12}".format("ipmitool", ipmitool_version)
230        )
231
232        if eval(yaml.__version__.replace(".", ",")) < (5, 3, 0):
233            self.logger.error(
234                "\n\tERROR: Python or python packages do not meet minimum"
235                " version requirement."
236            )
237            self.logger.error(
238                "\tERROR: PyYAML version 5.3.0 or higher is needed.\n"
239            )
240            run_env_ok = False
241
242        self.logger.info("\t---- End script host environment ----")
243        return run_env_ok
244
245    def script_logging(self, log_level_attr):
246        r"""
247        Create logger
248
249        """
250        self.logger = logging.getLogger()
251        self.logger.setLevel(log_level_attr)
252        log_file_handler = logging.FileHandler(
253            self.ffdc_dir_path + "collector.log"
254        )
255
256        stdout_handler = logging.StreamHandler(sys.stdout)
257        self.logger.addHandler(log_file_handler)
258        self.logger.addHandler(stdout_handler)
259
260        # Turn off paramiko INFO logging
261        logging.getLogger("paramiko").setLevel(logging.WARNING)
262
263    def target_is_pingable(self):
264        r"""
265        Check if target system is ping-able.
266
267        """
268        response = os.system("ping -c 1 %s  2>&1 >/dev/null" % self.hostname)
269        if response == 0:
270            self.logger.info(
271                "\n\t[Check] %s is ping-able.\t\t [OK]" % self.hostname
272            )
273            return True
274        else:
275            self.logger.error(
276                "\n\tERROR: %s is not ping-able. FFDC collection aborted.\n"
277                % self.hostname
278            )
279            sys.exit(-1)
280
281    def collect_ffdc(self):
282        r"""
283        Initiate FFDC Collection depending on requested protocol.
284
285        """
286
287        self.logger.info(
288            "\n\t---- Start communicating with %s ----" % self.hostname
289        )
290        self.start_time = time.time()
291
292        # Find the list of target and protocol supported.
293        check_protocol_list = []
294        config_dict = self.ffdc_actions
295
296        for target_type in config_dict.keys():
297            if self.target_type != target_type:
298                continue
299
300            for k, v in config_dict[target_type].items():
301                if (
302                    config_dict[target_type][k]["PROTOCOL"][0]
303                    not in check_protocol_list
304                ):
305                    check_protocol_list.append(
306                        config_dict[target_type][k]["PROTOCOL"][0]
307                    )
308
309        self.logger.info(
310            "\n\t %s protocol type: %s"
311            % (self.target_type, check_protocol_list)
312        )
313
314        verified_working_protocol = self.verify_protocol(check_protocol_list)
315
316        if verified_working_protocol:
317            self.logger.info(
318                "\n\t---- Completed protocol pre-requisite check ----\n"
319            )
320
321        # Verify top level directory exists for storage
322        self.validate_local_store(self.location)
323
324        if (self.remote_protocol not in verified_working_protocol) and (
325            self.remote_protocol != "ALL"
326        ):
327            self.logger.info(
328                "\n\tWorking protocol list: %s" % verified_working_protocol
329            )
330            self.logger.error(
331                "\tERROR: Requested protocol %s is not in working protocol"
332                " list.\n"
333                % self.remote_protocol
334            )
335            sys.exit(-1)
336        else:
337            self.generate_ffdc(verified_working_protocol)
338
339    def ssh_to_target_system(self):
340        r"""
341        Open a ssh connection to targeted system.
342
343        """
344
345        self.ssh_remoteclient = SSHRemoteclient(
346            self.hostname, self.username, self.password
347        )
348
349        if self.ssh_remoteclient.ssh_remoteclient_login():
350            self.logger.info(
351                "\n\t[Check] %s SSH connection established.\t [OK]"
352                % self.hostname
353            )
354
355            # Check scp connection.
356            # If scp connection fails,
357            # continue with FFDC generation but skip scp files to local host.
358            self.ssh_remoteclient.scp_connection()
359            return True
360        else:
361            self.logger.info(
362                "\n\t[Check] %s SSH connection.\t [NOT AVAILABLE]"
363                % self.hostname
364            )
365            return False
366
367    def telnet_to_target_system(self):
368        r"""
369        Open a telnet connection to targeted system.
370        """
371        self.telnet_remoteclient = TelnetRemoteclient(
372            self.hostname, self.username, self.password
373        )
374        if self.telnet_remoteclient.tn_remoteclient_login():
375            self.logger.info(
376                "\n\t[Check] %s Telnet connection established.\t [OK]"
377                % self.hostname
378            )
379            return True
380        else:
381            self.logger.info(
382                "\n\t[Check] %s Telnet connection.\t [NOT AVAILABLE]"
383                % self.hostname
384            )
385            return False
386
387    def generate_ffdc(self, working_protocol_list):
388        r"""
389        Determine actions based on remote host type
390
391        Description of argument(s):
392        working_protocol_list    list of confirmed working protocols to connect to remote host.
393        """
394
395        self.logger.info(
396            "\n\t---- Executing commands on " + self.hostname + " ----"
397        )
398        self.logger.info(
399            "\n\tWorking protocol list: %s" % working_protocol_list
400        )
401
402        config_dict = self.ffdc_actions
403        for target_type in config_dict.keys():
404            if self.target_type != target_type:
405                continue
406
407            self.logger.info("\n\tFFDC Path: %s " % self.ffdc_dir_path)
408            global_plugin_dict["global_log_store_path"] = self.ffdc_dir_path
409            self.logger.info("\tSystem Type: %s" % target_type)
410            for k, v in config_dict[target_type].items():
411                if (
412                    self.remote_protocol not in working_protocol_list
413                    and self.remote_protocol != "ALL"
414                ):
415                    continue
416
417                protocol = config_dict[target_type][k]["PROTOCOL"][0]
418
419                if protocol not in working_protocol_list:
420                    continue
421
422                if protocol in working_protocol_list:
423                    if protocol == "SSH" or protocol == "SCP":
424                        self.protocol_ssh(protocol, target_type, k)
425                    elif protocol == "TELNET":
426                        self.protocol_telnet(target_type, k)
427                    elif (
428                        protocol == "REDFISH"
429                        or protocol == "IPMI"
430                        or protocol == "SHELL"
431                    ):
432                        self.protocol_execute(protocol, target_type, k)
433                else:
434                    self.logger.error(
435                        "\n\tERROR: %s is not available for %s."
436                        % (protocol, self.hostname)
437                    )
438
439        # Close network connection after collecting all files
440        self.elapsed_time = time.strftime(
441            "%H:%M:%S", time.gmtime(time.time() - self.start_time)
442        )
443        if self.ssh_remoteclient:
444            self.ssh_remoteclient.ssh_remoteclient_disconnect()
445        if self.telnet_remoteclient:
446            self.telnet_remoteclient.tn_remoteclient_disconnect()
447
448    def protocol_ssh(self, protocol, target_type, sub_type):
449        r"""
450        Perform actions using SSH and SCP protocols.
451
452        Description of argument(s):
453        protocol            Protocol to execute.
454        target_type         OS Type of remote host.
455        sub_type            Group type of commands.
456        """
457
458        if protocol == "SCP":
459            self.group_copy(self.ffdc_actions[target_type][sub_type])
460        else:
461            self.collect_and_copy_ffdc(
462                self.ffdc_actions[target_type][sub_type]
463            )
464
465    def protocol_telnet(self, target_type, sub_type):
466        r"""
467        Perform actions using telnet protocol.
468        Description of argument(s):
469        target_type          OS Type of remote host.
470        """
471        self.logger.info(
472            "\n\t[Run] Executing commands on %s using %s"
473            % (self.hostname, "TELNET")
474        )
475        telnet_files_saved = []
476        progress_counter = 0
477        list_of_commands = self.ffdc_actions[target_type][sub_type]["COMMANDS"]
478        for index, each_cmd in enumerate(list_of_commands, start=0):
479            command_txt, command_timeout = self.unpack_command(each_cmd)
480            result = self.telnet_remoteclient.execute_command(
481                command_txt, command_timeout
482            )
483            if result:
484                try:
485                    targ_file = self.ffdc_actions[target_type][sub_type][
486                        "FILES"
487                    ][index]
488                except IndexError:
489                    targ_file = command_txt
490                    self.logger.warning(
491                        "\n\t[WARN] Missing filename to store data from"
492                        " telnet %s." % each_cmd
493                    )
494                    self.logger.warning(
495                        "\t[WARN] Data will be stored in %s." % targ_file
496                    )
497                targ_file_with_path = (
498                    self.ffdc_dir_path + self.ffdc_prefix + targ_file
499                )
500                # Creates a new file
501                with open(targ_file_with_path, "w") as fp:
502                    fp.write(result)
503                    fp.close
504                    telnet_files_saved.append(targ_file)
505            progress_counter += 1
506            self.print_progress(progress_counter)
507        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
508        for file in telnet_files_saved:
509            self.logger.info("\n\t\tSuccessfully save file " + file + ".")
510
511    def protocol_execute(self, protocol, target_type, sub_type):
512        r"""
513        Perform actions for a given protocol.
514
515        Description of argument(s):
516        protocol            Protocol to execute.
517        target_type         OS Type of remote host.
518        sub_type            Group type of commands.
519        """
520
521        self.logger.info(
522            "\n\t[Run] Executing commands to %s using %s"
523            % (self.hostname, protocol)
524        )
525        executed_files_saved = []
526        progress_counter = 0
527        list_of_cmd = self.get_command_list(
528            self.ffdc_actions[target_type][sub_type]
529        )
530        for index, each_cmd in enumerate(list_of_cmd, start=0):
531            plugin_call = False
532            if isinstance(each_cmd, dict):
533                if "plugin" in each_cmd:
534                    # If the error is set and plugin explicitly
535                    # requested to skip execution on error..
536                    if plugin_error_dict[
537                        "exit_on_error"
538                    ] and self.plugin_error_check(each_cmd["plugin"]):
539                        self.logger.info(
540                            "\n\t[PLUGIN-ERROR] exit_on_error: %s"
541                            % plugin_error_dict["exit_on_error"]
542                        )
543                        self.logger.info(
544                            "\t[PLUGIN-SKIP] %s" % each_cmd["plugin"][0]
545                        )
546                        continue
547                    plugin_call = True
548                    # call the plugin
549                    self.logger.info("\n\t[PLUGIN-START]")
550                    result = self.execute_plugin_block(each_cmd["plugin"])
551                    self.logger.info("\t[PLUGIN-END]\n")
552            else:
553                each_cmd = self.yaml_env_and_plugin_vars_populate(each_cmd)
554
555            if not plugin_call:
556                result = self.run_tool_cmd(each_cmd)
557            if result:
558                try:
559                    file_name = self.get_file_list(
560                        self.ffdc_actions[target_type][sub_type]
561                    )[index]
562                    # If file is specified as None.
563                    if file_name == "None":
564                        continue
565                    targ_file = self.yaml_env_and_plugin_vars_populate(
566                        file_name
567                    )
568                except IndexError:
569                    targ_file = each_cmd.split("/")[-1]
570                    self.logger.warning(
571                        "\n\t[WARN] Missing filename to store data from %s."
572                        % each_cmd
573                    )
574                    self.logger.warning(
575                        "\t[WARN] Data will be stored in %s." % targ_file
576                    )
577
578                targ_file_with_path = (
579                    self.ffdc_dir_path + self.ffdc_prefix + targ_file
580                )
581
582                # Creates a new file
583                with open(targ_file_with_path, "w") as fp:
584                    if isinstance(result, dict):
585                        fp.write(json.dumps(result))
586                    else:
587                        fp.write(result)
588                    fp.close
589                    executed_files_saved.append(targ_file)
590
591            progress_counter += 1
592            self.print_progress(progress_counter)
593
594        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
595
596        for file in executed_files_saved:
597            self.logger.info("\n\t\tSuccessfully save file " + file + ".")
598
599    def collect_and_copy_ffdc(
600        self, ffdc_actions_for_target_type, form_filename=False
601    ):
602        r"""
603        Send commands in ffdc_config file to targeted system.
604
605        Description of argument(s):
606        ffdc_actions_for_target_type     commands and files for the selected remote host type.
607        form_filename                    if true, pre-pend self.target_type to filename
608        """
609
610        # Executing commands, if any
611        self.ssh_execute_ffdc_commands(
612            ffdc_actions_for_target_type, form_filename
613        )
614
615        # Copying files
616        if self.ssh_remoteclient.scpclient:
617            self.logger.info(
618                "\n\n\tCopying FFDC files from remote system %s.\n"
619                % self.hostname
620            )
621
622            # Retrieving files from target system
623            list_of_files = self.get_file_list(ffdc_actions_for_target_type)
624            self.scp_ffdc(
625                self.ffdc_dir_path,
626                self.ffdc_prefix,
627                form_filename,
628                list_of_files,
629            )
630        else:
631            self.logger.info(
632                "\n\n\tSkip copying FFDC files from remote system %s.\n"
633                % self.hostname
634            )
635
636    def get_command_list(self, ffdc_actions_for_target_type):
637        r"""
638        Fetch list of commands from configuration file
639
640        Description of argument(s):
641        ffdc_actions_for_target_type    commands and files for the selected remote host type.
642        """
643        try:
644            list_of_commands = ffdc_actions_for_target_type["COMMANDS"]
645        except KeyError:
646            list_of_commands = []
647        return list_of_commands
648
649    def get_file_list(self, ffdc_actions_for_target_type):
650        r"""
651        Fetch list of commands from configuration file
652
653        Description of argument(s):
654        ffdc_actions_for_target_type    commands and files for the selected remote host type.
655        """
656        try:
657            list_of_files = ffdc_actions_for_target_type["FILES"]
658        except KeyError:
659            list_of_files = []
660        return list_of_files
661
662    def unpack_command(self, command):
663        r"""
664        Unpack command from config file
665
666        Description of argument(s):
667        command    Command from config file.
668        """
669        if isinstance(command, dict):
670            command_txt = next(iter(command))
671            command_timeout = next(iter(command.values()))
672        elif isinstance(command, str):
673            command_txt = command
674            # Default command timeout 60 seconds
675            command_timeout = 60
676
677        return command_txt, command_timeout
678
679    def ssh_execute_ffdc_commands(
680        self, ffdc_actions_for_target_type, form_filename=False
681    ):
682        r"""
683        Send commands in ffdc_config file to targeted system.
684
685        Description of argument(s):
686        ffdc_actions_for_target_type    commands and files for the selected remote host type.
687        form_filename                    if true, pre-pend self.target_type to filename
688        """
689        self.logger.info(
690            "\n\t[Run] Executing commands on %s using %s"
691            % (self.hostname, ffdc_actions_for_target_type["PROTOCOL"][0])
692        )
693
694        list_of_commands = self.get_command_list(ffdc_actions_for_target_type)
695        # If command list is empty, returns
696        if not list_of_commands:
697            return
698
699        progress_counter = 0
700        for command in list_of_commands:
701            command_txt, command_timeout = self.unpack_command(command)
702
703            if form_filename:
704                command_txt = str(command_txt % self.target_type)
705
706            (
707                cmd_exit_code,
708                err,
709                response,
710            ) = self.ssh_remoteclient.execute_command(
711                command_txt, command_timeout
712            )
713
714            if cmd_exit_code:
715                self.logger.warning(
716                    "\n\t\t[WARN] %s exits with code %s."
717                    % (command_txt, str(cmd_exit_code))
718                )
719                self.logger.warning("\t\t[WARN] %s " % err)
720
721            progress_counter += 1
722            self.print_progress(progress_counter)
723
724        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
725
726    def group_copy(self, ffdc_actions_for_target_type):
727        r"""
728        scp group of files (wild card) from remote host.
729
730        Description of argument(s):
731        fdc_actions_for_target_type    commands and files for the selected remote host type.
732        """
733
734        if self.ssh_remoteclient.scpclient:
735            self.logger.info(
736                "\n\tCopying files from remote system %s via SCP.\n"
737                % self.hostname
738            )
739
740            list_of_commands = self.get_command_list(
741                ffdc_actions_for_target_type
742            )
743            # If command list is empty, returns
744            if not list_of_commands:
745                return
746
747            for command in list_of_commands:
748                try:
749                    command = self.yaml_env_and_plugin_vars_populate(command)
750                except IndexError:
751                    self.logger.error("\t\tInvalid command %s" % command)
752                    continue
753
754                (
755                    cmd_exit_code,
756                    err,
757                    response,
758                ) = self.ssh_remoteclient.execute_command(command)
759
760                # If file does not exist, code take no action.
761                # cmd_exit_code is ignored for this scenario.
762                if response:
763                    scp_result = self.ssh_remoteclient.scp_file_from_remote(
764                        response.split("\n"), self.ffdc_dir_path
765                    )
766                    if scp_result:
767                        self.logger.info(
768                            "\t\tSuccessfully copied from "
769                            + self.hostname
770                            + ":"
771                            + command
772                        )
773                else:
774                    self.logger.info("\t\t%s has no result" % command)
775
776        else:
777            self.logger.info(
778                "\n\n\tSkip copying files from remote system %s.\n"
779                % self.hostname
780            )
781
782    def scp_ffdc(
783        self,
784        targ_dir_path,
785        targ_file_prefix,
786        form_filename,
787        file_list=None,
788        quiet=None,
789    ):
790        r"""
791        SCP all files in file_dict to the indicated directory on the local system.
792
793        Description of argument(s):
794        targ_dir_path                   The path of the directory to receive the files.
795        targ_file_prefix                Prefix which will be prepended to each
796                                        target file's name.
797        file_dict                       A dictionary of files to scp from targeted system to this system
798
799        """
800
801        progress_counter = 0
802        for filename in file_list:
803            if form_filename:
804                filename = str(filename % self.target_type)
805            source_file_path = filename
806            targ_file_path = (
807                targ_dir_path + targ_file_prefix + filename.split("/")[-1]
808            )
809
810            # If source file name contains wild card, copy filename as is.
811            if "*" in source_file_path:
812                scp_result = self.ssh_remoteclient.scp_file_from_remote(
813                    source_file_path, self.ffdc_dir_path
814                )
815            else:
816                scp_result = self.ssh_remoteclient.scp_file_from_remote(
817                    source_file_path, targ_file_path
818                )
819
820            if not quiet:
821                if scp_result:
822                    self.logger.info(
823                        "\t\tSuccessfully copied from "
824                        + self.hostname
825                        + ":"
826                        + source_file_path
827                        + ".\n"
828                    )
829                else:
830                    self.logger.info(
831                        "\t\tFail to copy from "
832                        + self.hostname
833                        + ":"
834                        + source_file_path
835                        + ".\n"
836                    )
837            else:
838                progress_counter += 1
839                self.print_progress(progress_counter)
840
841    def set_ffdc_default_store_path(self):
842        r"""
843        Set a default value for self.ffdc_dir_path and self.ffdc_prefix.
844        Collected ffdc file will be stored in dir /self.location/hostname_timestr/.
845        Individual ffdc file will have timestr_filename.
846
847        Description of class variables:
848        self.ffdc_dir_path  The dir path where collected ffdc data files should be put.
849
850        self.ffdc_prefix    The prefix to be given to each ffdc file name.
851
852        """
853
854        timestr = time.strftime("%Y%m%d-%H%M%S")
855        self.ffdc_dir_path = (
856            self.location + "/" + self.hostname + "_" + timestr + "/"
857        )
858        self.ffdc_prefix = timestr + "_"
859        self.validate_local_store(self.ffdc_dir_path)
860
861    # Need to verify local store path exists prior to instantiate this class.
862    # This class method is used to share the same code between CLI input parm
863    # and Robot Framework "${EXECDIR}/logs" before referencing this class.
864    @classmethod
865    def validate_local_store(cls, dir_path):
866        r"""
867        Ensure path exists to store FFDC files locally.
868
869        Description of variable:
870        dir_path  The dir path where collected ffdc data files will be stored.
871
872        """
873
874        if not os.path.exists(dir_path):
875            try:
876                os.makedirs(dir_path, 0o755)
877            except (IOError, OSError) as e:
878                # PermissionError
879                if e.errno == EPERM or e.errno == EACCES:
880                    self.logger.error(
881                        "\tERROR: os.makedirs %s failed with"
882                        " PermissionError.\n" % dir_path
883                    )
884                else:
885                    self.logger.error(
886                        "\tERROR: os.makedirs %s failed with %s.\n"
887                        % (dir_path, e.strerror)
888                    )
889                sys.exit(-1)
890
891    def print_progress(self, progress):
892        r"""
893        Print activity progress +
894
895        Description of variable:
896        progress  Progress counter.
897
898        """
899
900        sys.stdout.write("\r\t" + "+" * progress)
901        sys.stdout.flush()
902        time.sleep(0.1)
903
904    def verify_redfish(self):
905        r"""
906        Verify remote host has redfish service active
907
908        """
909        redfish_parm = (
910            "redfishtool -r "
911            + self.hostname
912            + " -S Always raw GET /redfish/v1/"
913        )
914        return self.run_tool_cmd(redfish_parm, True)
915
916    def verify_ipmi(self):
917        r"""
918        Verify remote host has IPMI LAN service active
919
920        """
921        if self.target_type == "OPENBMC":
922            ipmi_parm = (
923                "ipmitool -I lanplus -C 17  -U "
924                + self.username
925                + " -P "
926                + self.password
927                + " -H "
928                + self.hostname
929                + " power status"
930            )
931        else:
932            ipmi_parm = (
933                "ipmitool -I lanplus  -P "
934                + self.password
935                + " -H "
936                + self.hostname
937                + " power status"
938            )
939
940        return self.run_tool_cmd(ipmi_parm, True)
941
942    def run_tool_cmd(self, parms_string, quiet=False):
943        r"""
944        Run CLI standard tool or scripts.
945
946        Description of variable:
947        parms_string         tool command options.
948        quiet                do not print tool error message if True
949        """
950
951        result = subprocess.run(
952            [parms_string],
953            stdout=subprocess.PIPE,
954            stderr=subprocess.PIPE,
955            shell=True,
956            universal_newlines=True,
957        )
958
959        if result.stderr and not quiet:
960            self.logger.error("\n\t\tERROR with %s " % parms_string)
961            self.logger.error("\t\t" + result.stderr)
962
963        return result.stdout
964
965    def verify_protocol(self, protocol_list):
966        r"""
967        Perform protocol working check.
968
969        Description of argument(s):
970        protocol_list        List of protocol.
971        """
972
973        tmp_list = []
974        if self.target_is_pingable():
975            tmp_list.append("SHELL")
976
977        for protocol in protocol_list:
978            if self.remote_protocol != "ALL":
979                if self.remote_protocol != protocol:
980                    continue
981
982            # Only check SSH/SCP once for both protocols
983            if (
984                protocol == "SSH"
985                or protocol == "SCP"
986                and protocol not in tmp_list
987            ):
988                if self.ssh_to_target_system():
989                    # Add only what user asked.
990                    if self.remote_protocol != "ALL":
991                        tmp_list.append(self.remote_protocol)
992                    else:
993                        tmp_list.append("SSH")
994                        tmp_list.append("SCP")
995
996            if protocol == "TELNET":
997                if self.telnet_to_target_system():
998                    tmp_list.append(protocol)
999
1000            if protocol == "REDFISH":
1001                if self.verify_redfish():
1002                    tmp_list.append(protocol)
1003                    self.logger.info(
1004                        "\n\t[Check] %s Redfish Service.\t\t [OK]"
1005                        % self.hostname
1006                    )
1007                else:
1008                    self.logger.info(
1009                        "\n\t[Check] %s Redfish Service.\t\t [NOT AVAILABLE]"
1010                        % self.hostname
1011                    )
1012
1013            if protocol == "IPMI":
1014                if self.verify_ipmi():
1015                    tmp_list.append(protocol)
1016                    self.logger.info(
1017                        "\n\t[Check] %s IPMI LAN Service.\t\t [OK]"
1018                        % self.hostname
1019                    )
1020                else:
1021                    self.logger.info(
1022                        "\n\t[Check] %s IPMI LAN Service.\t\t [NOT AVAILABLE]"
1023                        % self.hostname
1024                    )
1025
1026        return tmp_list
1027
1028    def load_env(self):
1029        r"""
1030        Perform protocol working check.
1031
1032        """
1033        # This is for the env vars a user can use in YAML to load it at runtime.
1034        # Example YAML:
1035        # -COMMANDS:
1036        #    - my_command ${hostname}  ${username}   ${password}
1037        os.environ["hostname"] = self.hostname
1038        os.environ["username"] = self.username
1039        os.environ["password"] = self.password
1040
1041        # Append default Env.
1042        self.env_dict["hostname"] = self.hostname
1043        self.env_dict["username"] = self.username
1044        self.env_dict["password"] = self.password
1045
1046        try:
1047            tmp_env_dict = {}
1048            if self.env_vars:
1049                tmp_env_dict = json.loads(self.env_vars)
1050                # Export ENV vars default.
1051                for key, value in tmp_env_dict.items():
1052                    os.environ[key] = value
1053                    self.env_dict[key] = str(value)
1054
1055            if self.econfig:
1056                with open(self.econfig, "r") as file:
1057                    try:
1058                        tmp_env_dict = yaml.load(file, Loader=yaml.SafeLoader)
1059                    except yaml.YAMLError as e:
1060                        self.logger.error(e)
1061                        sys.exit(-1)
1062                # Export ENV vars.
1063                for key, value in tmp_env_dict["env_params"].items():
1064                    os.environ[key] = str(value)
1065                    self.env_dict[key] = str(value)
1066        except json.decoder.JSONDecodeError as e:
1067            self.logger.error("\n\tERROR: %s " % e)
1068            sys.exit(-1)
1069
1070        # This to mask the password from displaying on the console.
1071        mask_dict = self.env_dict.copy()
1072        for k, v in mask_dict.items():
1073            if k.lower().find("password") != -1:
1074                hidden_text = []
1075                hidden_text.append(v)
1076                password_regex = (
1077                    "(" + "|".join([re.escape(x) for x in hidden_text]) + ")"
1078                )
1079                mask_dict[k] = re.sub(password_regex, "********", v)
1080
1081        self.logger.info(json.dumps(mask_dict, indent=8, sort_keys=False))
1082
1083    def execute_python_eval(self, eval_string):
1084        r"""
1085        Execute qualified python function string using eval.
1086
1087        Description of argument(s):
1088        eval_string        Execute the python object.
1089
1090        Example:
1091                eval(plugin.foo_func.foo_func(10))
1092        """
1093        try:
1094            self.logger.info("\tExecuting plugin func()")
1095            self.logger.debug("\tCall func: %s" % eval_string)
1096            result = eval(eval_string)
1097            self.logger.info("\treturn: %s" % str(result))
1098        except (
1099            ValueError,
1100            SyntaxError,
1101            NameError,
1102            AttributeError,
1103            TypeError,
1104        ) as e:
1105            self.logger.error("\tERROR: execute_python_eval: %s" % e)
1106            # Set the plugin error state.
1107            plugin_error_dict["exit_on_error"] = True
1108            self.logger.info("\treturn: PLUGIN_EVAL_ERROR")
1109            return "PLUGIN_EVAL_ERROR"
1110
1111        return result
1112
1113    def execute_plugin_block(self, plugin_cmd_list):
1114        r"""
1115        Pack the plugin command to qualifed python string object.
1116
1117        Description of argument(s):
1118        plugin_list_dict      Plugin block read from YAML
1119                              [{'plugin_name': 'plugin.foo_func.my_func'},
1120                               {'plugin_args': [10]}]
1121
1122        Example:
1123            - plugin:
1124              - plugin_name: plugin.foo_func.my_func
1125              - plugin_args:
1126                - arg1
1127                - arg2
1128
1129            - plugin:
1130              - plugin_name: result = plugin.foo_func.my_func
1131              - plugin_args:
1132                - arg1
1133                - arg2
1134
1135            - plugin:
1136              - plugin_name: result1,result2 = plugin.foo_func.my_func
1137              - plugin_args:
1138                - arg1
1139                - arg2
1140        """
1141        try:
1142            idx = self.key_index_list_dict("plugin_name", plugin_cmd_list)
1143            plugin_name = plugin_cmd_list[idx]["plugin_name"]
1144            # Equal separator means plugin function returns result.
1145            if " = " in plugin_name:
1146                # Ex. ['result', 'plugin.foo_func.my_func']
1147                plugin_name_args = plugin_name.split(" = ")
1148                # plugin func return data.
1149                for arg in plugin_name_args:
1150                    if arg == plugin_name_args[-1]:
1151                        plugin_name = arg
1152                    else:
1153                        plugin_resp = arg.split(",")
1154                        # ['result1','result2']
1155                        for x in plugin_resp:
1156                            global_plugin_list.append(x)
1157                            global_plugin_dict[x] = ""
1158
1159            # Walk the plugin args ['arg1,'arg2']
1160            # If the YAML plugin statement 'plugin_args' is not declared.
1161            if any("plugin_args" in d for d in plugin_cmd_list):
1162                idx = self.key_index_list_dict("plugin_args", plugin_cmd_list)
1163                plugin_args = plugin_cmd_list[idx]["plugin_args"]
1164                if plugin_args:
1165                    plugin_args = self.yaml_args_populate(plugin_args)
1166                else:
1167                    plugin_args = []
1168            else:
1169                plugin_args = self.yaml_args_populate([])
1170
1171            # Pack the args arg1, arg2, .... argn into
1172            # "arg1","arg2","argn"  string as params for function.
1173            parm_args_str = self.yaml_args_string(plugin_args)
1174            if parm_args_str:
1175                plugin_func = plugin_name + "(" + parm_args_str + ")"
1176            else:
1177                plugin_func = plugin_name + "()"
1178
1179            # Execute plugin function.
1180            if global_plugin_dict:
1181                resp = self.execute_python_eval(plugin_func)
1182                # Update plugin vars dict if there is any.
1183                if resp != "PLUGIN_EVAL_ERROR":
1184                    self.response_args_data(resp)
1185            else:
1186                resp = self.execute_python_eval(plugin_func)
1187        except Exception as e:
1188            # Set the plugin error state.
1189            plugin_error_dict["exit_on_error"] = True
1190            self.logger.error("\tERROR: execute_plugin_block: %s" % e)
1191            pass
1192
1193        # There is a real error executing the plugin function.
1194        if resp == "PLUGIN_EVAL_ERROR":
1195            return resp
1196
1197        # Check if plugin_expects_return (int, string, list,dict etc)
1198        if any("plugin_expects_return" in d for d in plugin_cmd_list):
1199            idx = self.key_index_list_dict(
1200                "plugin_expects_return", plugin_cmd_list
1201            )
1202            plugin_expects = plugin_cmd_list[idx]["plugin_expects_return"]
1203            if plugin_expects:
1204                if resp:
1205                    if (
1206                        self.plugin_expect_type(plugin_expects, resp)
1207                        == "INVALID"
1208                    ):
1209                        self.logger.error("\tWARN: Plugin error check skipped")
1210                    elif not self.plugin_expect_type(plugin_expects, resp):
1211                        self.logger.error(
1212                            "\tERROR: Plugin expects return data: %s"
1213                            % plugin_expects
1214                        )
1215                        plugin_error_dict["exit_on_error"] = True
1216                elif not resp:
1217                    self.logger.error(
1218                        "\tERROR: Plugin func failed to return data"
1219                    )
1220                    plugin_error_dict["exit_on_error"] = True
1221
1222        return resp
1223
1224    def response_args_data(self, plugin_resp):
1225        r"""
1226        Parse the plugin function response and update plugin return variable.
1227
1228        plugin_resp       Response data from plugin function.
1229        """
1230        resp_list = []
1231        resp_data = ""
1232
1233        # There is nothing to update the plugin response.
1234        if len(global_plugin_list) == 0 or plugin_resp == "None":
1235            return
1236
1237        if isinstance(plugin_resp, str):
1238            resp_data = plugin_resp.strip("\r\n\t")
1239            resp_list.append(resp_data)
1240        elif isinstance(plugin_resp, bytes):
1241            resp_data = str(plugin_resp, "UTF-8").strip("\r\n\t")
1242            resp_list.append(resp_data)
1243        elif isinstance(plugin_resp, tuple):
1244            if len(global_plugin_list) == 1:
1245                resp_list.append(plugin_resp)
1246            else:
1247                resp_list = list(plugin_resp)
1248                resp_list = [x.strip("\r\n\t") for x in resp_list]
1249        elif isinstance(plugin_resp, list):
1250            if len(global_plugin_list) == 1:
1251                resp_list.append([x.strip("\r\n\t") for x in plugin_resp])
1252            else:
1253                resp_list = [x.strip("\r\n\t") for x in plugin_resp]
1254        elif isinstance(plugin_resp, int) or isinstance(plugin_resp, float):
1255            resp_list.append(plugin_resp)
1256
1257        # Iterate if there is a list of plugin return vars to update.
1258        for idx, item in enumerate(resp_list, start=0):
1259            # Exit loop, done required loop.
1260            if idx >= len(global_plugin_list):
1261                break
1262            # Find the index of the return func in the list and
1263            # update the global func return dictionary.
1264            try:
1265                dict_idx = global_plugin_list[idx]
1266                global_plugin_dict[dict_idx] = item
1267            except (IndexError, ValueError) as e:
1268                self.logger.warn("\tWARN: response_args_data: %s" % e)
1269                pass
1270
1271        # Done updating plugin dict irrespective of pass or failed,
1272        # clear all the list element for next plugin block execute.
1273        global_plugin_list.clear()
1274
1275    def yaml_args_string(self, plugin_args):
1276        r"""
1277        Pack the args into string.
1278
1279        plugin_args            arg list ['arg1','arg2,'argn']
1280        """
1281        args_str = ""
1282        for args in plugin_args:
1283            if args:
1284                if isinstance(args, (int, float)):
1285                    args_str += str(args)
1286                elif args in global_plugin_type_list:
1287                    args_str += str(global_plugin_dict[args])
1288                else:
1289                    args_str += '"' + str(args.strip("\r\n\t")) + '"'
1290            # Skip last list element.
1291            if args != plugin_args[-1]:
1292                args_str += ","
1293        return args_str
1294
1295    def yaml_args_populate(self, yaml_arg_list):
1296        r"""
1297        Decode env and plugin vars and populate.
1298
1299        Description of argument(s):
1300        yaml_arg_list         arg list read from YAML
1301
1302        Example:
1303          - plugin_args:
1304            - arg1
1305            - arg2
1306
1307                  yaml_arg_list:  [arg2, arg2]
1308        """
1309        # Get the env loaded keys as list ['hostname', 'username', 'password'].
1310        env_vars_list = list(self.env_dict)
1311
1312        if isinstance(yaml_arg_list, list):
1313            tmp_list = []
1314            for arg in yaml_arg_list:
1315                if isinstance(arg, (int, float)):
1316                    tmp_list.append(arg)
1317                    continue
1318                elif isinstance(arg, str):
1319                    arg_str = self.yaml_env_and_plugin_vars_populate(str(arg))
1320                    tmp_list.append(arg_str)
1321                else:
1322                    tmp_list.append(arg)
1323
1324            # return populated list.
1325            return tmp_list
1326
1327    def yaml_env_and_plugin_vars_populate(self, yaml_arg_str):
1328        r"""
1329        Update ${MY_VAR} and plugin vars.
1330
1331        Description of argument(s):
1332        yaml_arg_str         arg string read from YAML.
1333
1334        Example:
1335            - cat ${MY_VAR}
1336            - ls -AX my_plugin_var
1337        """
1338        # Parse the string for env vars ${env_vars}.
1339        try:
1340            # Example, list of matching env vars ['username', 'password', 'hostname']
1341            # Extra escape \ for special symbols. '\$\{([^\}]+)\}' works good.
1342            var_name_regex = "\\$\\{([^\\}]+)\\}"
1343            env_var_names_list = re.findall(var_name_regex, yaml_arg_str)
1344            for var in env_var_names_list:
1345                env_var = os.environ[var]
1346                env_replace = "${" + var + "}"
1347                yaml_arg_str = yaml_arg_str.replace(env_replace, env_var)
1348        except Exception as e:
1349            self.logger.error("\tERROR:yaml_env_vars_populate: %s" % e)
1350            pass
1351
1352        # Parse the string for plugin vars.
1353        try:
1354            # Example, list of plugin vars ['my_username', 'my_data']
1355            plugin_var_name_list = global_plugin_dict.keys()
1356            for var in plugin_var_name_list:
1357                # skip env var list already populated above code block list.
1358                if var in env_var_names_list:
1359                    continue
1360                # If this plugin var exist but empty in dict, don't replace.
1361                # This is either a YAML plugin statement incorrectly used or
1362                # user added a plugin var which is not going to be populated.
1363                if yaml_arg_str in global_plugin_dict:
1364                    if isinstance(global_plugin_dict[var], (list, dict)):
1365                        # List data type or dict can't be replaced, use directly
1366                        # in eval function call.
1367                        global_plugin_type_list.append(var)
1368                    else:
1369                        yaml_arg_str = yaml_arg_str.replace(
1370                            str(var), str(global_plugin_dict[var])
1371                        )
1372                # Just a string like filename or command.
1373                else:
1374                    yaml_arg_str = yaml_arg_str.replace(
1375                        str(var), str(global_plugin_dict[var])
1376                    )
1377        except (IndexError, ValueError) as e:
1378            self.logger.error("\tERROR: yaml_plugin_vars_populate: %s" % e)
1379            pass
1380
1381        return yaml_arg_str
1382
1383    def plugin_error_check(self, plugin_dict):
1384        r"""
1385        Plugin error dict processing.
1386
1387        Description of argument(s):
1388        plugin_dict        Dictionary of plugin error.
1389        """
1390        if any("plugin_error" in d for d in plugin_dict):
1391            for d in plugin_dict:
1392                if "plugin_error" in d:
1393                    value = d["plugin_error"]
1394                    # Reference if the error is set or not by plugin.
1395                    return plugin_error_dict[value]
1396
1397    def key_index_list_dict(self, key, list_dict):
1398        r"""
1399        Iterate list of dictionary and return index if the key match is found.
1400
1401        Description of argument(s):
1402        key           Valid Key in a dict.
1403        list_dict     list of dictionary.
1404        """
1405        for i, d in enumerate(list_dict):
1406            if key in d.keys():
1407                return i
1408
1409    def plugin_expect_type(self, type, data):
1410        r"""
1411        Plugin expect directive type check.
1412        """
1413        if type == "int":
1414            return isinstance(data, int)
1415        elif type == "float":
1416            return isinstance(data, float)
1417        elif type == "str":
1418            return isinstance(data, str)
1419        elif type == "list":
1420            return isinstance(data, list)
1421        elif type == "dict":
1422            return isinstance(data, dict)
1423        elif type == "tuple":
1424            return isinstance(data, tuple)
1425        else:
1426            self.logger.info("\tInvalid data type requested: %s" % type)
1427            return "INVALID"
1428