1#!/usr/bin/env python3
2
3r"""
4See class prolog below for details.
5"""
6
7import json
8import logging
9import os
10import platform
11import re
12import subprocess
13import sys
14import time
15from errno import EACCES, EPERM
16
17import yaml
18
19script_dir = os.path.dirname(os.path.abspath(__file__))
20sys.path.append(script_dir)
21# Walk path and append to sys.path
22for root, dirs, files in os.walk(script_dir):
23    for dir in dirs:
24        sys.path.append(os.path.join(root, dir))
25
26from ssh_utility import SSHRemoteclient  # NOQA
27from telnet_utility import TelnetRemoteclient  # NOQA
28
29r"""
30User define plugins python functions.
31
32It will imports files from directory plugins
33
34plugins
35├── file1.py
36└── file2.py
37
38Example how to define in YAML:
39 - plugin:
40   - plugin_name: plugin.foo_func.foo_func_yaml
41     - plugin_args:
42       - arg1
43       - arg2
44"""
45plugin_dir = __file__.split(__file__.split("/")[-1])[0] + "/plugins"
46sys.path.append(plugin_dir)
47try:
48    for module in os.listdir(plugin_dir):
49        if module == "__init__.py" or module[-3:] != ".py":
50            continue
51        plugin_module = "plugins." + module[:-3]
52        # To access the module plugin.<module name>.<function>
53        # Example: plugin.foo_func.foo_func_yaml()
54        try:
55            plugin = __import__(plugin_module, globals(), locals(), [], 0)
56        except Exception as e:
57            print("PLUGIN: Module import failed: %s" % module)
58            pass
59except FileNotFoundError as e:
60    print("PLUGIN: %s" % e)
61    pass
62
63r"""
64This is for plugin functions returning data or responses to the caller
65in YAML plugin setup.
66
67Example:
68
69    - plugin:
70      - plugin_name: version = plugin.ssh_execution.ssh_execute_cmd
71      - plugin_args:
72        - ${hostname}
73        - ${username}
74        - ${password}
75        - "cat /etc/os-release | grep VERSION_ID | awk -F'=' '{print $2}'"
76     - plugin:
77        - plugin_name: plugin.print_vars.print_vars
78        - plugin_args:
79          - version
80
81where first plugin "version" var is used by another plugin in the YAML
82block or plugin
83
84"""
85global global_log_store_path
86global global_plugin_dict
87global global_plugin_list
88
89# Hold the plugin return values in dict and plugin return vars in list.
90# Dict is to reference and update vars processing in parser where as
91# list is for current vars from the plugin block which needs processing.
92global_plugin_dict = {}
93global_plugin_list = []
94
95# Hold the plugin return named declared if function returned values are list,dict.
96# Refer this name list to look up the plugin dict for eval() args function
97# Example ['version']
98global_plugin_type_list = []
99
100# Path where logs are to be stored or written.
101global_log_store_path = ""
102
103# Plugin error state defaults.
104plugin_error_dict = {
105    "exit_on_error": False,
106    "continue_on_error": False,
107}
108
109
110class ffdc_collector:
111    r"""
112    Execute commands from configuration file to collect log files.
113    Fetch and store generated files at the specified location.
114
115    """
116
117    def __init__(
118        self,
119        hostname,
120        username,
121        password,
122        ffdc_config,
123        location,
124        remote_type,
125        remote_protocol,
126        env_vars,
127        econfig,
128        log_level,
129    ):
130        r"""
131        Description of argument(s):
132
133        hostname            name/ip of the targeted (remote) system
134        username            user on the targeted system with access to FFDC files
135        password            password for user on targeted system
136        ffdc_config         configuration file listing commands and files for FFDC
137        location            where to store collected FFDC
138        remote_type         os type of the remote host
139        remote_protocol     Protocol to use to collect data
140        env_vars            User define CLI env vars '{"key : "value"}'
141        econfig             User define env vars YAML file
142
143        """
144
145        self.hostname = hostname
146        self.username = username
147        self.password = password
148        self.ffdc_config = ffdc_config
149        self.location = location + "/" + remote_type.upper()
150        self.ssh_remoteclient = None
151        self.telnet_remoteclient = None
152        self.ffdc_dir_path = ""
153        self.ffdc_prefix = ""
154        self.target_type = remote_type.upper()
155        self.remote_protocol = remote_protocol.upper()
156        self.env_vars = env_vars
157        self.econfig = econfig
158        self.start_time = 0
159        self.elapsed_time = ""
160        self.logger = None
161
162        # Set prefix values for scp files and directory.
163        # Since the time stamp is at second granularity, these values are set here
164        # to be sure that all files for this run will have same timestamps
165        # and they will be saved in the same directory.
166        # self.location == local system for now
167        self.set_ffdc_default_store_path()
168
169        # Logger for this run.  Need to be after set_ffdc_default_store_path()
170        self.script_logging(getattr(logging, log_level.upper()))
171
172        # Verify top level directory exists for storage
173        self.validate_local_store(self.location)
174
175        if self.verify_script_env():
176            # Load default or user define YAML configuration file.
177            with open(self.ffdc_config, "r") as file:
178                try:
179                    self.ffdc_actions = yaml.load(file, Loader=yaml.SafeLoader)
180                except yaml.YAMLError as e:
181                    self.logger.error(e)
182                    sys.exit(-1)
183
184            if self.target_type not in self.ffdc_actions.keys():
185                self.logger.error(
186                    "\n\tERROR: %s is not listed in %s.\n\n"
187                    % (self.target_type, self.ffdc_config)
188                )
189                sys.exit(-1)
190        else:
191            sys.exit(-1)
192
193        # Load ENV vars from user.
194        self.logger.info("\n\tENV: User define input YAML variables")
195        self.env_dict = {}
196        self.load_env()
197
198    def verify_script_env(self):
199        # Import to log version
200        import click
201        import paramiko
202
203        run_env_ok = True
204
205        redfishtool_version = (
206            self.run_tool_cmd("redfishtool -V").split(" ")[2].strip("\n")
207        )
208        ipmitool_version = self.run_tool_cmd("ipmitool -V").split(" ")[2]
209
210        self.logger.info("\n\t---- Script host environment ----")
211        self.logger.info(
212            "\t{:<10}  {:<10}".format("Script hostname", os.uname()[1])
213        )
214        self.logger.info(
215            "\t{:<10}  {:<10}".format("Script host os", platform.platform())
216        )
217        self.logger.info(
218            "\t{:<10}  {:>10}".format("Python", platform.python_version())
219        )
220        self.logger.info("\t{:<10}  {:>10}".format("PyYAML", yaml.__version__))
221        self.logger.info("\t{:<10}  {:>10}".format("click", click.__version__))
222        self.logger.info(
223            "\t{:<10}  {:>10}".format("paramiko", paramiko.__version__)
224        )
225        self.logger.info(
226            "\t{:<10}  {:>9}".format("redfishtool", redfishtool_version)
227        )
228        self.logger.info(
229            "\t{:<10}  {:>12}".format("ipmitool", ipmitool_version)
230        )
231
232        if eval(yaml.__version__.replace(".", ",")) < (5, 3, 0):
233            self.logger.error(
234                "\n\tERROR: Python or python packages do not meet minimum"
235                " version requirement."
236            )
237            self.logger.error(
238                "\tERROR: PyYAML version 5.3.0 or higher is needed.\n"
239            )
240            run_env_ok = False
241
242        self.logger.info("\t---- End script host environment ----")
243        return run_env_ok
244
245    def script_logging(self, log_level_attr):
246        r"""
247        Create logger
248
249        """
250        self.logger = logging.getLogger()
251        self.logger.setLevel(log_level_attr)
252        log_file_handler = logging.FileHandler(
253            self.ffdc_dir_path + "collector.log"
254        )
255
256        stdout_handler = logging.StreamHandler(sys.stdout)
257        self.logger.addHandler(log_file_handler)
258        self.logger.addHandler(stdout_handler)
259
260        # Turn off paramiko INFO logging
261        logging.getLogger("paramiko").setLevel(logging.WARNING)
262
263    def target_is_pingable(self):
264        r"""
265        Check if target system is ping-able.
266
267        """
268        response = os.system("ping -c 1 %s  2>&1 >/dev/null" % self.hostname)
269        if response == 0:
270            self.logger.info(
271                "\n\t[Check] %s is ping-able.\t\t [OK]" % self.hostname
272            )
273            return True
274        else:
275            self.logger.error(
276                "\n\tERROR: %s is not ping-able. FFDC collection aborted.\n"
277                % self.hostname
278            )
279            sys.exit(-1)
280
281    def collect_ffdc(self):
282        r"""
283        Initiate FFDC Collection depending on requested protocol.
284
285        """
286
287        self.logger.info(
288            "\n\t---- Start communicating with %s ----" % self.hostname
289        )
290        self.start_time = time.time()
291
292        # Find the list of target and protocol supported.
293        check_protocol_list = []
294        config_dict = self.ffdc_actions
295
296        for target_type in config_dict.keys():
297            if self.target_type != target_type:
298                continue
299
300            for k, v in config_dict[target_type].items():
301                if (
302                    config_dict[target_type][k]["PROTOCOL"][0]
303                    not in check_protocol_list
304                ):
305                    check_protocol_list.append(
306                        config_dict[target_type][k]["PROTOCOL"][0]
307                    )
308
309        self.logger.info(
310            "\n\t %s protocol type: %s"
311            % (self.target_type, check_protocol_list)
312        )
313
314        verified_working_protocol = self.verify_protocol(check_protocol_list)
315
316        if verified_working_protocol:
317            self.logger.info(
318                "\n\t---- Completed protocol pre-requisite check ----\n"
319            )
320
321        # Verify top level directory exists for storage
322        self.validate_local_store(self.location)
323
324        if (self.remote_protocol not in verified_working_protocol) and (
325            self.remote_protocol != "ALL"
326        ):
327            self.logger.info(
328                "\n\tWorking protocol list: %s" % verified_working_protocol
329            )
330            self.logger.error(
331                "\tERROR: Requested protocol %s is not in working protocol"
332                " list.\n" % self.remote_protocol
333            )
334            sys.exit(-1)
335        else:
336            self.generate_ffdc(verified_working_protocol)
337
338    def ssh_to_target_system(self):
339        r"""
340        Open a ssh connection to targeted system.
341
342        """
343
344        self.ssh_remoteclient = SSHRemoteclient(
345            self.hostname, self.username, self.password
346        )
347
348        if self.ssh_remoteclient.ssh_remoteclient_login():
349            self.logger.info(
350                "\n\t[Check] %s SSH connection established.\t [OK]"
351                % self.hostname
352            )
353
354            # Check scp connection.
355            # If scp connection fails,
356            # continue with FFDC generation but skip scp files to local host.
357            self.ssh_remoteclient.scp_connection()
358            return True
359        else:
360            self.logger.info(
361                "\n\t[Check] %s SSH connection.\t [NOT AVAILABLE]"
362                % self.hostname
363            )
364            return False
365
366    def telnet_to_target_system(self):
367        r"""
368        Open a telnet connection to targeted system.
369        """
370        self.telnet_remoteclient = TelnetRemoteclient(
371            self.hostname, self.username, self.password
372        )
373        if self.telnet_remoteclient.tn_remoteclient_login():
374            self.logger.info(
375                "\n\t[Check] %s Telnet connection established.\t [OK]"
376                % self.hostname
377            )
378            return True
379        else:
380            self.logger.info(
381                "\n\t[Check] %s Telnet connection.\t [NOT AVAILABLE]"
382                % self.hostname
383            )
384            return False
385
386    def generate_ffdc(self, working_protocol_list):
387        r"""
388        Determine actions based on remote host type
389
390        Description of argument(s):
391        working_protocol_list    list of confirmed working protocols to connect to remote host.
392        """
393
394        self.logger.info(
395            "\n\t---- Executing commands on " + self.hostname + " ----"
396        )
397        self.logger.info(
398            "\n\tWorking protocol list: %s" % working_protocol_list
399        )
400
401        config_dict = self.ffdc_actions
402        for target_type in config_dict.keys():
403            if self.target_type != target_type:
404                continue
405
406            self.logger.info("\n\tFFDC Path: %s " % self.ffdc_dir_path)
407            global_plugin_dict["global_log_store_path"] = self.ffdc_dir_path
408            self.logger.info("\tSystem Type: %s" % target_type)
409            for k, v in config_dict[target_type].items():
410                if (
411                    self.remote_protocol not in working_protocol_list
412                    and self.remote_protocol != "ALL"
413                ):
414                    continue
415
416                protocol = config_dict[target_type][k]["PROTOCOL"][0]
417
418                if protocol not in working_protocol_list:
419                    continue
420
421                if protocol in working_protocol_list:
422                    if protocol == "SSH" or protocol == "SCP":
423                        self.protocol_ssh(protocol, target_type, k)
424                    elif protocol == "TELNET":
425                        self.protocol_telnet(target_type, k)
426                    elif (
427                        protocol == "REDFISH"
428                        or protocol == "IPMI"
429                        or protocol == "SHELL"
430                    ):
431                        self.protocol_execute(protocol, target_type, k)
432                else:
433                    self.logger.error(
434                        "\n\tERROR: %s is not available for %s."
435                        % (protocol, self.hostname)
436                    )
437
438        # Close network connection after collecting all files
439        self.elapsed_time = time.strftime(
440            "%H:%M:%S", time.gmtime(time.time() - self.start_time)
441        )
442        if self.ssh_remoteclient:
443            self.ssh_remoteclient.ssh_remoteclient_disconnect()
444        if self.telnet_remoteclient:
445            self.telnet_remoteclient.tn_remoteclient_disconnect()
446
447    def protocol_ssh(self, protocol, target_type, sub_type):
448        r"""
449        Perform actions using SSH and SCP protocols.
450
451        Description of argument(s):
452        protocol            Protocol to execute.
453        target_type         OS Type of remote host.
454        sub_type            Group type of commands.
455        """
456
457        if protocol == "SCP":
458            self.group_copy(self.ffdc_actions[target_type][sub_type])
459        else:
460            self.collect_and_copy_ffdc(
461                self.ffdc_actions[target_type][sub_type]
462            )
463
464    def protocol_telnet(self, target_type, sub_type):
465        r"""
466        Perform actions using telnet protocol.
467        Description of argument(s):
468        target_type          OS Type of remote host.
469        """
470        self.logger.info(
471            "\n\t[Run] Executing commands on %s using %s"
472            % (self.hostname, "TELNET")
473        )
474        telnet_files_saved = []
475        progress_counter = 0
476        list_of_commands = self.ffdc_actions[target_type][sub_type]["COMMANDS"]
477        for index, each_cmd in enumerate(list_of_commands, start=0):
478            command_txt, command_timeout = self.unpack_command(each_cmd)
479            result = self.telnet_remoteclient.execute_command(
480                command_txt, command_timeout
481            )
482            if result:
483                try:
484                    targ_file = self.ffdc_actions[target_type][sub_type][
485                        "FILES"
486                    ][index]
487                except IndexError:
488                    targ_file = command_txt
489                    self.logger.warning(
490                        "\n\t[WARN] Missing filename to store data from"
491                        " telnet %s." % each_cmd
492                    )
493                    self.logger.warning(
494                        "\t[WARN] Data will be stored in %s." % targ_file
495                    )
496                targ_file_with_path = (
497                    self.ffdc_dir_path + self.ffdc_prefix + targ_file
498                )
499                # Creates a new file
500                with open(targ_file_with_path, "w") as fp:
501                    fp.write(result)
502                    fp.close
503                    telnet_files_saved.append(targ_file)
504            progress_counter += 1
505            self.print_progress(progress_counter)
506        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
507        for file in telnet_files_saved:
508            self.logger.info("\n\t\tSuccessfully save file " + file + ".")
509
510    def protocol_execute(self, protocol, target_type, sub_type):
511        r"""
512        Perform actions for a given protocol.
513
514        Description of argument(s):
515        protocol            Protocol to execute.
516        target_type         OS Type of remote host.
517        sub_type            Group type of commands.
518        """
519
520        self.logger.info(
521            "\n\t[Run] Executing commands to %s using %s"
522            % (self.hostname, protocol)
523        )
524        executed_files_saved = []
525        progress_counter = 0
526        list_of_cmd = self.get_command_list(
527            self.ffdc_actions[target_type][sub_type]
528        )
529        for index, each_cmd in enumerate(list_of_cmd, start=0):
530            plugin_call = False
531            if isinstance(each_cmd, dict):
532                if "plugin" in each_cmd:
533                    # If the error is set and plugin explicitly
534                    # requested to skip execution on error..
535                    if plugin_error_dict[
536                        "exit_on_error"
537                    ] and self.plugin_error_check(each_cmd["plugin"]):
538                        self.logger.info(
539                            "\n\t[PLUGIN-ERROR] exit_on_error: %s"
540                            % plugin_error_dict["exit_on_error"]
541                        )
542                        self.logger.info(
543                            "\t[PLUGIN-SKIP] %s" % each_cmd["plugin"][0]
544                        )
545                        continue
546                    plugin_call = True
547                    # call the plugin
548                    self.logger.info("\n\t[PLUGIN-START]")
549                    result = self.execute_plugin_block(each_cmd["plugin"])
550                    self.logger.info("\t[PLUGIN-END]\n")
551            else:
552                each_cmd = self.yaml_env_and_plugin_vars_populate(each_cmd)
553
554            if not plugin_call:
555                result = self.run_tool_cmd(each_cmd)
556            if result:
557                try:
558                    file_name = self.get_file_list(
559                        self.ffdc_actions[target_type][sub_type]
560                    )[index]
561                    # If file is specified as None.
562                    if file_name == "None":
563                        continue
564                    targ_file = self.yaml_env_and_plugin_vars_populate(
565                        file_name
566                    )
567                except IndexError:
568                    targ_file = each_cmd.split("/")[-1]
569                    self.logger.warning(
570                        "\n\t[WARN] Missing filename to store data from %s."
571                        % each_cmd
572                    )
573                    self.logger.warning(
574                        "\t[WARN] Data will be stored in %s." % targ_file
575                    )
576
577                targ_file_with_path = (
578                    self.ffdc_dir_path + self.ffdc_prefix + targ_file
579                )
580
581                # Creates a new file
582                with open(targ_file_with_path, "w") as fp:
583                    if isinstance(result, dict):
584                        fp.write(json.dumps(result))
585                    else:
586                        fp.write(result)
587                    fp.close
588                    executed_files_saved.append(targ_file)
589
590            progress_counter += 1
591            self.print_progress(progress_counter)
592
593        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
594
595        for file in executed_files_saved:
596            self.logger.info("\n\t\tSuccessfully save file " + file + ".")
597
598    def collect_and_copy_ffdc(
599        self, ffdc_actions_for_target_type, form_filename=False
600    ):
601        r"""
602        Send commands in ffdc_config file to targeted system.
603
604        Description of argument(s):
605        ffdc_actions_for_target_type     commands and files for the selected remote host type.
606        form_filename                    if true, pre-pend self.target_type to filename
607        """
608
609        # Executing commands, if any
610        self.ssh_execute_ffdc_commands(
611            ffdc_actions_for_target_type, form_filename
612        )
613
614        # Copying files
615        if self.ssh_remoteclient.scpclient:
616            self.logger.info(
617                "\n\n\tCopying FFDC files from remote system %s.\n"
618                % self.hostname
619            )
620
621            # Retrieving files from target system
622            list_of_files = self.get_file_list(ffdc_actions_for_target_type)
623            self.scp_ffdc(
624                self.ffdc_dir_path,
625                self.ffdc_prefix,
626                form_filename,
627                list_of_files,
628            )
629        else:
630            self.logger.info(
631                "\n\n\tSkip copying FFDC files from remote system %s.\n"
632                % self.hostname
633            )
634
635    def get_command_list(self, ffdc_actions_for_target_type):
636        r"""
637        Fetch list of commands from configuration file
638
639        Description of argument(s):
640        ffdc_actions_for_target_type    commands and files for the selected remote host type.
641        """
642        try:
643            list_of_commands = ffdc_actions_for_target_type["COMMANDS"]
644        except KeyError:
645            list_of_commands = []
646        return list_of_commands
647
648    def get_file_list(self, ffdc_actions_for_target_type):
649        r"""
650        Fetch list of commands from configuration file
651
652        Description of argument(s):
653        ffdc_actions_for_target_type    commands and files for the selected remote host type.
654        """
655        try:
656            list_of_files = ffdc_actions_for_target_type["FILES"]
657        except KeyError:
658            list_of_files = []
659        return list_of_files
660
661    def unpack_command(self, command):
662        r"""
663        Unpack command from config file
664
665        Description of argument(s):
666        command    Command from config file.
667        """
668        if isinstance(command, dict):
669            command_txt = next(iter(command))
670            command_timeout = next(iter(command.values()))
671        elif isinstance(command, str):
672            command_txt = command
673            # Default command timeout 60 seconds
674            command_timeout = 60
675
676        return command_txt, command_timeout
677
678    def ssh_execute_ffdc_commands(
679        self, ffdc_actions_for_target_type, form_filename=False
680    ):
681        r"""
682        Send commands in ffdc_config file to targeted system.
683
684        Description of argument(s):
685        ffdc_actions_for_target_type    commands and files for the selected remote host type.
686        form_filename                    if true, pre-pend self.target_type to filename
687        """
688        self.logger.info(
689            "\n\t[Run] Executing commands on %s using %s"
690            % (self.hostname, ffdc_actions_for_target_type["PROTOCOL"][0])
691        )
692
693        list_of_commands = self.get_command_list(ffdc_actions_for_target_type)
694        # If command list is empty, returns
695        if not list_of_commands:
696            return
697
698        progress_counter = 0
699        for command in list_of_commands:
700            command_txt, command_timeout = self.unpack_command(command)
701
702            if form_filename:
703                command_txt = str(command_txt % self.target_type)
704
705            (
706                cmd_exit_code,
707                err,
708                response,
709            ) = self.ssh_remoteclient.execute_command(
710                command_txt, command_timeout
711            )
712
713            if cmd_exit_code:
714                self.logger.warning(
715                    "\n\t\t[WARN] %s exits with code %s."
716                    % (command_txt, str(cmd_exit_code))
717                )
718                self.logger.warning("\t\t[WARN] %s " % err)
719
720            progress_counter += 1
721            self.print_progress(progress_counter)
722
723        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
724
725    def group_copy(self, ffdc_actions_for_target_type):
726        r"""
727        scp group of files (wild card) from remote host.
728
729        Description of argument(s):
730        fdc_actions_for_target_type    commands and files for the selected remote host type.
731        """
732
733        if self.ssh_remoteclient.scpclient:
734            self.logger.info(
735                "\n\tCopying files from remote system %s via SCP.\n"
736                % self.hostname
737            )
738
739            list_of_commands = self.get_command_list(
740                ffdc_actions_for_target_type
741            )
742            # If command list is empty, returns
743            if not list_of_commands:
744                return
745
746            for command in list_of_commands:
747                try:
748                    command = self.yaml_env_and_plugin_vars_populate(command)
749                except IndexError:
750                    self.logger.error("\t\tInvalid command %s" % command)
751                    continue
752
753                (
754                    cmd_exit_code,
755                    err,
756                    response,
757                ) = self.ssh_remoteclient.execute_command(command)
758
759                # If file does not exist, code take no action.
760                # cmd_exit_code is ignored for this scenario.
761                if response:
762                    scp_result = self.ssh_remoteclient.scp_file_from_remote(
763                        response.split("\n"), self.ffdc_dir_path
764                    )
765                    if scp_result:
766                        self.logger.info(
767                            "\t\tSuccessfully copied from "
768                            + self.hostname
769                            + ":"
770                            + command
771                        )
772                else:
773                    self.logger.info("\t\t%s has no result" % command)
774
775        else:
776            self.logger.info(
777                "\n\n\tSkip copying files from remote system %s.\n"
778                % self.hostname
779            )
780
781    def scp_ffdc(
782        self,
783        targ_dir_path,
784        targ_file_prefix,
785        form_filename,
786        file_list=None,
787        quiet=None,
788    ):
789        r"""
790        SCP all files in file_dict to the indicated directory on the local system.
791
792        Description of argument(s):
793        targ_dir_path                   The path of the directory to receive the files.
794        targ_file_prefix                Prefix which will be prepended to each
795                                        target file's name.
796        file_dict                       A dictionary of files to scp from targeted system to this system
797
798        """
799
800        progress_counter = 0
801        for filename in file_list:
802            if form_filename:
803                filename = str(filename % self.target_type)
804            source_file_path = filename
805            targ_file_path = (
806                targ_dir_path + targ_file_prefix + filename.split("/")[-1]
807            )
808
809            # If source file name contains wild card, copy filename as is.
810            if "*" in source_file_path:
811                scp_result = self.ssh_remoteclient.scp_file_from_remote(
812                    source_file_path, self.ffdc_dir_path
813                )
814            else:
815                scp_result = self.ssh_remoteclient.scp_file_from_remote(
816                    source_file_path, targ_file_path
817                )
818
819            if not quiet:
820                if scp_result:
821                    self.logger.info(
822                        "\t\tSuccessfully copied from "
823                        + self.hostname
824                        + ":"
825                        + source_file_path
826                        + ".\n"
827                    )
828                else:
829                    self.logger.info(
830                        "\t\tFail to copy from "
831                        + self.hostname
832                        + ":"
833                        + source_file_path
834                        + ".\n"
835                    )
836            else:
837                progress_counter += 1
838                self.print_progress(progress_counter)
839
840    def set_ffdc_default_store_path(self):
841        r"""
842        Set a default value for self.ffdc_dir_path and self.ffdc_prefix.
843        Collected ffdc file will be stored in dir /self.location/hostname_timestr/.
844        Individual ffdc file will have timestr_filename.
845
846        Description of class variables:
847        self.ffdc_dir_path  The dir path where collected ffdc data files should be put.
848
849        self.ffdc_prefix    The prefix to be given to each ffdc file name.
850
851        """
852
853        timestr = time.strftime("%Y%m%d-%H%M%S")
854        self.ffdc_dir_path = (
855            self.location + "/" + self.hostname + "_" + timestr + "/"
856        )
857        self.ffdc_prefix = timestr + "_"
858        self.validate_local_store(self.ffdc_dir_path)
859
860    # Need to verify local store path exists prior to instantiate this class.
861    # This class method is used to share the same code between CLI input parm
862    # and Robot Framework "${EXECDIR}/logs" before referencing this class.
863    @classmethod
864    def validate_local_store(cls, dir_path):
865        r"""
866        Ensure path exists to store FFDC files locally.
867
868        Description of variable:
869        dir_path  The dir path where collected ffdc data files will be stored.
870
871        """
872
873        if not os.path.exists(dir_path):
874            try:
875                os.makedirs(dir_path, 0o755)
876            except (IOError, OSError) as e:
877                # PermissionError
878                if e.errno == EPERM or e.errno == EACCES:
879                    self.logger.error(
880                        "\tERROR: os.makedirs %s failed with"
881                        " PermissionError.\n" % dir_path
882                    )
883                else:
884                    self.logger.error(
885                        "\tERROR: os.makedirs %s failed with %s.\n"
886                        % (dir_path, e.strerror)
887                    )
888                sys.exit(-1)
889
890    def print_progress(self, progress):
891        r"""
892        Print activity progress +
893
894        Description of variable:
895        progress  Progress counter.
896
897        """
898
899        sys.stdout.write("\r\t" + "+" * progress)
900        sys.stdout.flush()
901        time.sleep(0.1)
902
903    def verify_redfish(self):
904        r"""
905        Verify remote host has redfish service active
906
907        """
908        redfish_parm = (
909            "redfishtool -r "
910            + self.hostname
911            + " -S Always raw GET /redfish/v1/"
912        )
913        return self.run_tool_cmd(redfish_parm, True)
914
915    def verify_ipmi(self):
916        r"""
917        Verify remote host has IPMI LAN service active
918
919        """
920        if self.target_type == "OPENBMC":
921            ipmi_parm = (
922                "ipmitool -I lanplus -C 17  -U "
923                + self.username
924                + " -P "
925                + self.password
926                + " -H "
927                + self.hostname
928                + " power status"
929            )
930        else:
931            ipmi_parm = (
932                "ipmitool -I lanplus  -P "
933                + self.password
934                + " -H "
935                + self.hostname
936                + " power status"
937            )
938
939        return self.run_tool_cmd(ipmi_parm, True)
940
941    def run_tool_cmd(self, parms_string, quiet=False):
942        r"""
943        Run CLI standard tool or scripts.
944
945        Description of variable:
946        parms_string         tool command options.
947        quiet                do not print tool error message if True
948        """
949
950        result = subprocess.run(
951            [parms_string],
952            stdout=subprocess.PIPE,
953            stderr=subprocess.PIPE,
954            shell=True,
955            universal_newlines=True,
956        )
957
958        if result.stderr and not quiet:
959            self.logger.error("\n\t\tERROR with %s " % parms_string)
960            self.logger.error("\t\t" + result.stderr)
961
962        return result.stdout
963
964    def verify_protocol(self, protocol_list):
965        r"""
966        Perform protocol working check.
967
968        Description of argument(s):
969        protocol_list        List of protocol.
970        """
971
972        tmp_list = []
973        if self.target_is_pingable():
974            tmp_list.append("SHELL")
975
976        for protocol in protocol_list:
977            if self.remote_protocol != "ALL":
978                if self.remote_protocol != protocol:
979                    continue
980
981            # Only check SSH/SCP once for both protocols
982            if (
983                protocol == "SSH"
984                or protocol == "SCP"
985                and protocol not in tmp_list
986            ):
987                if self.ssh_to_target_system():
988                    # Add only what user asked.
989                    if self.remote_protocol != "ALL":
990                        tmp_list.append(self.remote_protocol)
991                    else:
992                        tmp_list.append("SSH")
993                        tmp_list.append("SCP")
994
995            if protocol == "TELNET":
996                if self.telnet_to_target_system():
997                    tmp_list.append(protocol)
998
999            if protocol == "REDFISH":
1000                if self.verify_redfish():
1001                    tmp_list.append(protocol)
1002                    self.logger.info(
1003                        "\n\t[Check] %s Redfish Service.\t\t [OK]"
1004                        % self.hostname
1005                    )
1006                else:
1007                    self.logger.info(
1008                        "\n\t[Check] %s Redfish Service.\t\t [NOT AVAILABLE]"
1009                        % self.hostname
1010                    )
1011
1012            if protocol == "IPMI":
1013                if self.verify_ipmi():
1014                    tmp_list.append(protocol)
1015                    self.logger.info(
1016                        "\n\t[Check] %s IPMI LAN Service.\t\t [OK]"
1017                        % self.hostname
1018                    )
1019                else:
1020                    self.logger.info(
1021                        "\n\t[Check] %s IPMI LAN Service.\t\t [NOT AVAILABLE]"
1022                        % self.hostname
1023                    )
1024
1025        return tmp_list
1026
1027    def load_env(self):
1028        r"""
1029        Perform protocol working check.
1030
1031        """
1032        # This is for the env vars a user can use in YAML to load it at runtime.
1033        # Example YAML:
1034        # -COMMANDS:
1035        #    - my_command ${hostname}  ${username}   ${password}
1036        os.environ["hostname"] = self.hostname
1037        os.environ["username"] = self.username
1038        os.environ["password"] = self.password
1039
1040        # Append default Env.
1041        self.env_dict["hostname"] = self.hostname
1042        self.env_dict["username"] = self.username
1043        self.env_dict["password"] = self.password
1044
1045        try:
1046            tmp_env_dict = {}
1047            if self.env_vars:
1048                tmp_env_dict = json.loads(self.env_vars)
1049                # Export ENV vars default.
1050                for key, value in tmp_env_dict.items():
1051                    os.environ[key] = value
1052                    self.env_dict[key] = str(value)
1053
1054            if self.econfig:
1055                with open(self.econfig, "r") as file:
1056                    try:
1057                        tmp_env_dict = yaml.load(file, Loader=yaml.SafeLoader)
1058                    except yaml.YAMLError as e:
1059                        self.logger.error(e)
1060                        sys.exit(-1)
1061                # Export ENV vars.
1062                for key, value in tmp_env_dict["env_params"].items():
1063                    os.environ[key] = str(value)
1064                    self.env_dict[key] = str(value)
1065        except json.decoder.JSONDecodeError as e:
1066            self.logger.error("\n\tERROR: %s " % e)
1067            sys.exit(-1)
1068
1069        # This to mask the password from displaying on the console.
1070        mask_dict = self.env_dict.copy()
1071        for k, v in mask_dict.items():
1072            if k.lower().find("password") != -1:
1073                hidden_text = []
1074                hidden_text.append(v)
1075                password_regex = (
1076                    "(" + "|".join([re.escape(x) for x in hidden_text]) + ")"
1077                )
1078                mask_dict[k] = re.sub(password_regex, "********", v)
1079
1080        self.logger.info(json.dumps(mask_dict, indent=8, sort_keys=False))
1081
1082    def execute_python_eval(self, eval_string):
1083        r"""
1084        Execute qualified python function string using eval.
1085
1086        Description of argument(s):
1087        eval_string        Execute the python object.
1088
1089        Example:
1090                eval(plugin.foo_func.foo_func(10))
1091        """
1092        try:
1093            self.logger.info("\tExecuting plugin func()")
1094            self.logger.debug("\tCall func: %s" % eval_string)
1095            result = eval(eval_string)
1096            self.logger.info("\treturn: %s" % str(result))
1097        except (
1098            ValueError,
1099            SyntaxError,
1100            NameError,
1101            AttributeError,
1102            TypeError,
1103        ) as e:
1104            self.logger.error("\tERROR: execute_python_eval: %s" % e)
1105            # Set the plugin error state.
1106            plugin_error_dict["exit_on_error"] = True
1107            self.logger.info("\treturn: PLUGIN_EVAL_ERROR")
1108            return "PLUGIN_EVAL_ERROR"
1109
1110        return result
1111
1112    def execute_plugin_block(self, plugin_cmd_list):
1113        r"""
1114        Pack the plugin command to qualifed python string object.
1115
1116        Description of argument(s):
1117        plugin_list_dict      Plugin block read from YAML
1118                              [{'plugin_name': 'plugin.foo_func.my_func'},
1119                               {'plugin_args': [10]}]
1120
1121        Example:
1122            - plugin:
1123              - plugin_name: plugin.foo_func.my_func
1124              - plugin_args:
1125                - arg1
1126                - arg2
1127
1128            - plugin:
1129              - plugin_name: result = plugin.foo_func.my_func
1130              - plugin_args:
1131                - arg1
1132                - arg2
1133
1134            - plugin:
1135              - plugin_name: result1,result2 = plugin.foo_func.my_func
1136              - plugin_args:
1137                - arg1
1138                - arg2
1139        """
1140        try:
1141            idx = self.key_index_list_dict("plugin_name", plugin_cmd_list)
1142            plugin_name = plugin_cmd_list[idx]["plugin_name"]
1143            # Equal separator means plugin function returns result.
1144            if " = " in plugin_name:
1145                # Ex. ['result', 'plugin.foo_func.my_func']
1146                plugin_name_args = plugin_name.split(" = ")
1147                # plugin func return data.
1148                for arg in plugin_name_args:
1149                    if arg == plugin_name_args[-1]:
1150                        plugin_name = arg
1151                    else:
1152                        plugin_resp = arg.split(",")
1153                        # ['result1','result2']
1154                        for x in plugin_resp:
1155                            global_plugin_list.append(x)
1156                            global_plugin_dict[x] = ""
1157
1158            # Walk the plugin args ['arg1,'arg2']
1159            # If the YAML plugin statement 'plugin_args' is not declared.
1160            if any("plugin_args" in d for d in plugin_cmd_list):
1161                idx = self.key_index_list_dict("plugin_args", plugin_cmd_list)
1162                plugin_args = plugin_cmd_list[idx]["plugin_args"]
1163                if plugin_args:
1164                    plugin_args = self.yaml_args_populate(plugin_args)
1165                else:
1166                    plugin_args = []
1167            else:
1168                plugin_args = self.yaml_args_populate([])
1169
1170            # Pack the args arg1, arg2, .... argn into
1171            # "arg1","arg2","argn"  string as params for function.
1172            parm_args_str = self.yaml_args_string(plugin_args)
1173            if parm_args_str:
1174                plugin_func = plugin_name + "(" + parm_args_str + ")"
1175            else:
1176                plugin_func = plugin_name + "()"
1177
1178            # Execute plugin function.
1179            if global_plugin_dict:
1180                resp = self.execute_python_eval(plugin_func)
1181                # Update plugin vars dict if there is any.
1182                if resp != "PLUGIN_EVAL_ERROR":
1183                    self.response_args_data(resp)
1184            else:
1185                resp = self.execute_python_eval(plugin_func)
1186        except Exception as e:
1187            # Set the plugin error state.
1188            plugin_error_dict["exit_on_error"] = True
1189            self.logger.error("\tERROR: execute_plugin_block: %s" % e)
1190            pass
1191
1192        # There is a real error executing the plugin function.
1193        if resp == "PLUGIN_EVAL_ERROR":
1194            return resp
1195
1196        # Check if plugin_expects_return (int, string, list,dict etc)
1197        if any("plugin_expects_return" in d for d in plugin_cmd_list):
1198            idx = self.key_index_list_dict(
1199                "plugin_expects_return", plugin_cmd_list
1200            )
1201            plugin_expects = plugin_cmd_list[idx]["plugin_expects_return"]
1202            if plugin_expects:
1203                if resp:
1204                    if (
1205                        self.plugin_expect_type(plugin_expects, resp)
1206                        == "INVALID"
1207                    ):
1208                        self.logger.error("\tWARN: Plugin error check skipped")
1209                    elif not self.plugin_expect_type(plugin_expects, resp):
1210                        self.logger.error(
1211                            "\tERROR: Plugin expects return data: %s"
1212                            % plugin_expects
1213                        )
1214                        plugin_error_dict["exit_on_error"] = True
1215                elif not resp:
1216                    self.logger.error(
1217                        "\tERROR: Plugin func failed to return data"
1218                    )
1219                    plugin_error_dict["exit_on_error"] = True
1220
1221        return resp
1222
1223    def response_args_data(self, plugin_resp):
1224        r"""
1225        Parse the plugin function response and update plugin return variable.
1226
1227        plugin_resp       Response data from plugin function.
1228        """
1229        resp_list = []
1230        resp_data = ""
1231
1232        # There is nothing to update the plugin response.
1233        if len(global_plugin_list) == 0 or plugin_resp == "None":
1234            return
1235
1236        if isinstance(plugin_resp, str):
1237            resp_data = plugin_resp.strip("\r\n\t")
1238            resp_list.append(resp_data)
1239        elif isinstance(plugin_resp, bytes):
1240            resp_data = str(plugin_resp, "UTF-8").strip("\r\n\t")
1241            resp_list.append(resp_data)
1242        elif isinstance(plugin_resp, tuple):
1243            if len(global_plugin_list) == 1:
1244                resp_list.append(plugin_resp)
1245            else:
1246                resp_list = list(plugin_resp)
1247                resp_list = [x.strip("\r\n\t") for x in resp_list]
1248        elif isinstance(plugin_resp, list):
1249            if len(global_plugin_list) == 1:
1250                resp_list.append([x.strip("\r\n\t") for x in plugin_resp])
1251            else:
1252                resp_list = [x.strip("\r\n\t") for x in plugin_resp]
1253        elif isinstance(plugin_resp, int) or isinstance(plugin_resp, float):
1254            resp_list.append(plugin_resp)
1255
1256        # Iterate if there is a list of plugin return vars to update.
1257        for idx, item in enumerate(resp_list, start=0):
1258            # Exit loop, done required loop.
1259            if idx >= len(global_plugin_list):
1260                break
1261            # Find the index of the return func in the list and
1262            # update the global func return dictionary.
1263            try:
1264                dict_idx = global_plugin_list[idx]
1265                global_plugin_dict[dict_idx] = item
1266            except (IndexError, ValueError) as e:
1267                self.logger.warn("\tWARN: response_args_data: %s" % e)
1268                pass
1269
1270        # Done updating plugin dict irrespective of pass or failed,
1271        # clear all the list element for next plugin block execute.
1272        global_plugin_list.clear()
1273
1274    def yaml_args_string(self, plugin_args):
1275        r"""
1276        Pack the args into string.
1277
1278        plugin_args            arg list ['arg1','arg2,'argn']
1279        """
1280        args_str = ""
1281        for args in plugin_args:
1282            if args:
1283                if isinstance(args, (int, float)):
1284                    args_str += str(args)
1285                elif args in global_plugin_type_list:
1286                    args_str += str(global_plugin_dict[args])
1287                else:
1288                    args_str += '"' + str(args.strip("\r\n\t")) + '"'
1289            # Skip last list element.
1290            if args != plugin_args[-1]:
1291                args_str += ","
1292        return args_str
1293
1294    def yaml_args_populate(self, yaml_arg_list):
1295        r"""
1296        Decode env and plugin vars and populate.
1297
1298        Description of argument(s):
1299        yaml_arg_list         arg list read from YAML
1300
1301        Example:
1302          - plugin_args:
1303            - arg1
1304            - arg2
1305
1306                  yaml_arg_list:  [arg2, arg2]
1307        """
1308        # Get the env loaded keys as list ['hostname', 'username', 'password'].
1309        env_vars_list = list(self.env_dict)
1310
1311        if isinstance(yaml_arg_list, list):
1312            tmp_list = []
1313            for arg in yaml_arg_list:
1314                if isinstance(arg, (int, float)):
1315                    tmp_list.append(arg)
1316                    continue
1317                elif isinstance(arg, str):
1318                    arg_str = self.yaml_env_and_plugin_vars_populate(str(arg))
1319                    tmp_list.append(arg_str)
1320                else:
1321                    tmp_list.append(arg)
1322
1323            # return populated list.
1324            return tmp_list
1325
1326    def yaml_env_and_plugin_vars_populate(self, yaml_arg_str):
1327        r"""
1328        Update ${MY_VAR} and plugin vars.
1329
1330        Description of argument(s):
1331        yaml_arg_str         arg string read from YAML.
1332
1333        Example:
1334            - cat ${MY_VAR}
1335            - ls -AX my_plugin_var
1336        """
1337        # Parse the string for env vars ${env_vars}.
1338        try:
1339            # Example, list of matching env vars ['username', 'password', 'hostname']
1340            # Extra escape \ for special symbols. '\$\{([^\}]+)\}' works good.
1341            var_name_regex = "\\$\\{([^\\}]+)\\}"
1342            env_var_names_list = re.findall(var_name_regex, yaml_arg_str)
1343            for var in env_var_names_list:
1344                env_var = os.environ[var]
1345                env_replace = "${" + var + "}"
1346                yaml_arg_str = yaml_arg_str.replace(env_replace, env_var)
1347        except Exception as e:
1348            self.logger.error("\tERROR:yaml_env_vars_populate: %s" % e)
1349            pass
1350
1351        # Parse the string for plugin vars.
1352        try:
1353            # Example, list of plugin vars ['my_username', 'my_data']
1354            plugin_var_name_list = global_plugin_dict.keys()
1355            for var in plugin_var_name_list:
1356                # skip env var list already populated above code block list.
1357                if var in env_var_names_list:
1358                    continue
1359                # If this plugin var exist but empty in dict, don't replace.
1360                # This is either a YAML plugin statement incorrectly used or
1361                # user added a plugin var which is not going to be populated.
1362                if yaml_arg_str in global_plugin_dict:
1363                    if isinstance(global_plugin_dict[var], (list, dict)):
1364                        # List data type or dict can't be replaced, use directly
1365                        # in eval function call.
1366                        global_plugin_type_list.append(var)
1367                    else:
1368                        yaml_arg_str = yaml_arg_str.replace(
1369                            str(var), str(global_plugin_dict[var])
1370                        )
1371                # Just a string like filename or command.
1372                else:
1373                    yaml_arg_str = yaml_arg_str.replace(
1374                        str(var), str(global_plugin_dict[var])
1375                    )
1376        except (IndexError, ValueError) as e:
1377            self.logger.error("\tERROR: yaml_plugin_vars_populate: %s" % e)
1378            pass
1379
1380        return yaml_arg_str
1381
1382    def plugin_error_check(self, plugin_dict):
1383        r"""
1384        Plugin error dict processing.
1385
1386        Description of argument(s):
1387        plugin_dict        Dictionary of plugin error.
1388        """
1389        if any("plugin_error" in d for d in plugin_dict):
1390            for d in plugin_dict:
1391                if "plugin_error" in d:
1392                    value = d["plugin_error"]
1393                    # Reference if the error is set or not by plugin.
1394                    return plugin_error_dict[value]
1395
1396    def key_index_list_dict(self, key, list_dict):
1397        r"""
1398        Iterate list of dictionary and return index if the key match is found.
1399
1400        Description of argument(s):
1401        key           Valid Key in a dict.
1402        list_dict     list of dictionary.
1403        """
1404        for i, d in enumerate(list_dict):
1405            if key in d.keys():
1406                return i
1407
1408    def plugin_expect_type(self, type, data):
1409        r"""
1410        Plugin expect directive type check.
1411        """
1412        if type == "int":
1413            return isinstance(data, int)
1414        elif type == "float":
1415            return isinstance(data, float)
1416        elif type == "str":
1417            return isinstance(data, str)
1418        elif type == "list":
1419            return isinstance(data, list)
1420        elif type == "dict":
1421            return isinstance(data, dict)
1422        elif type == "tuple":
1423            return isinstance(data, tuple)
1424        else:
1425            self.logger.info("\tInvalid data type requested: %s" % type)
1426            return "INVALID"
1427