1#!/usr/bin/env python3
2
3r"""
4See class prolog below for details.
5"""
6
7import json
8import logging
9import os
10import platform
11import re
12import subprocess
13import sys
14import time
15from errno import EACCES, EPERM
16
17import yaml
18
19script_dir = os.path.dirname(os.path.abspath(__file__))
20sys.path.append(script_dir)
21# Walk path and append to sys.path
22for root, dirs, files in os.walk(script_dir):
23    for dir in dirs:
24        sys.path.append(os.path.join(root, dir))
25
26from ssh_utility import SSHRemoteclient  # NOQA
27from telnet_utility import TelnetRemoteclient  # NOQA
28
29r"""
30User define plugins python functions.
31
32It will imports files from directory plugins
33
34plugins
35├── file1.py
36└── file2.py
37
38Example how to define in YAML:
39 - plugin:
40   - plugin_name: plugin.foo_func.foo_func_yaml
41     - plugin_args:
42       - arg1
43       - arg2
44"""
45plugin_dir = __file__.split(__file__.split("/")[-1])[0] + "/plugins"
46sys.path.append(plugin_dir)
47try:
48    for module in os.listdir(plugin_dir):
49        if module == "__init__.py" or module[-3:] != ".py":
50            continue
51        plugin_module = "plugins." + module[:-3]
52        # To access the module plugin.<module name>.<function>
53        # Example: plugin.foo_func.foo_func_yaml()
54        try:
55            plugin = __import__(plugin_module, globals(), locals(), [], 0)
56        except Exception as e:
57            print("PLUGIN: Module import failed: %s" % module)
58            pass
59except FileNotFoundError as e:
60    print("PLUGIN: %s" % e)
61    pass
62
63r"""
64This is for plugin functions returning data or responses to the caller
65in YAML plugin setup.
66
67Example:
68
69    - plugin:
70      - plugin_name: version = plugin.ssh_execution.ssh_execute_cmd
71      - plugin_args:
72        - ${hostname}
73        - ${username}
74        - ${password}
75        - "cat /etc/os-release | grep VERSION_ID | awk -F'=' '{print $2}'"
76     - plugin:
77        - plugin_name: plugin.print_vars.print_vars
78        - plugin_args:
79          - version
80
81where first plugin "version" var is used by another plugin in the YAML
82block or plugin
83
84"""
85global global_log_store_path
86global global_plugin_dict
87global global_plugin_list
88
89# Hold the plugin return values in dict and plugin return vars in list.
90# Dict is to reference and update vars processing in parser where as
91# list is for current vars from the plugin block which needs processing.
92global_plugin_dict = {}
93global_plugin_list = []
94
95# Hold the plugin return named declared if function returned values are list,dict.
96# Refer this name list to look up the plugin dict for eval() args function
97# Example ['version']
98global_plugin_type_list = []
99
100# Path where logs are to be stored or written.
101global_log_store_path = ""
102
103# Plugin error state defaults.
104plugin_error_dict = {
105    "exit_on_error": False,
106    "continue_on_error": False,
107}
108
109
110class ffdc_collector:
111    r"""
112    Execute commands from configuration file to collect log files.
113    Fetch and store generated files at the specified location.
114
115    """
116
117    def __init__(
118        self,
119        hostname,
120        username,
121        password,
122        port_https,
123        port_ipmi,
124        ffdc_config,
125        location,
126        remote_type,
127        remote_protocol,
128        env_vars,
129        econfig,
130        log_level,
131    ):
132        r"""
133        Description of argument(s):
134
135        hostname            name/ip of the targeted (remote) system
136        username            user on the targeted system with access to FFDC files
137        password            password for user on targeted system
138        port_https          HTTPS port value. By default 443
139        port_ipmi           IPMI port value. By default 623
140        ffdc_config         configuration file listing commands and files for FFDC
141        location            where to store collected FFDC
142        remote_type         os type of the remote host
143        remote_protocol     Protocol to use to collect data
144        env_vars            User define CLI env vars '{"key : "value"}'
145        econfig             User define env vars YAML file
146
147        """
148
149        self.hostname = hostname
150        self.username = username
151        self.password = password
152        self.port_https = str(port_https)
153        self.port_ipmi = str(port_ipmi)
154        self.ffdc_config = ffdc_config
155        self.location = location + "/" + remote_type.upper()
156        self.ssh_remoteclient = None
157        self.telnet_remoteclient = None
158        self.ffdc_dir_path = ""
159        self.ffdc_prefix = ""
160        self.target_type = remote_type.upper()
161        self.remote_protocol = remote_protocol.upper()
162        self.env_vars = env_vars
163        self.econfig = econfig
164        self.start_time = 0
165        self.elapsed_time = ""
166        self.logger = None
167
168        # Set prefix values for scp files and directory.
169        # Since the time stamp is at second granularity, these values are set here
170        # to be sure that all files for this run will have same timestamps
171        # and they will be saved in the same directory.
172        # self.location == local system for now
173        self.set_ffdc_default_store_path()
174
175        # Logger for this run.  Need to be after set_ffdc_default_store_path()
176        self.script_logging(getattr(logging, log_level.upper()))
177
178        # Verify top level directory exists for storage
179        self.validate_local_store(self.location)
180
181        if self.verify_script_env():
182            # Load default or user define YAML configuration file.
183            with open(self.ffdc_config, "r") as file:
184                try:
185                    self.ffdc_actions = yaml.load(file, Loader=yaml.SafeLoader)
186                except yaml.YAMLError as e:
187                    self.logger.error(e)
188                    sys.exit(-1)
189
190            if self.target_type not in self.ffdc_actions.keys():
191                self.logger.error(
192                    "\n\tERROR: %s is not listed in %s.\n\n"
193                    % (self.target_type, self.ffdc_config)
194                )
195                sys.exit(-1)
196        else:
197            sys.exit(-1)
198
199        # Load ENV vars from user.
200        self.logger.info("\n\tENV: User define input YAML variables")
201        self.env_dict = {}
202        self.load_env()
203
204    def verify_script_env(self):
205        # Import to log version
206        import click
207        import paramiko
208
209        run_env_ok = True
210
211        redfishtool_version = (
212            self.run_tool_cmd("redfishtool -V").split(" ")[2].strip("\n")
213        )
214        ipmitool_version = self.run_tool_cmd("ipmitool -V").split(" ")[2]
215
216        self.logger.info("\n\t---- Script host environment ----")
217        self.logger.info(
218            "\t{:<10}  {:<10}".format("Script hostname", os.uname()[1])
219        )
220        self.logger.info(
221            "\t{:<10}  {:<10}".format("Script host os", platform.platform())
222        )
223        self.logger.info(
224            "\t{:<10}  {:>10}".format("Python", platform.python_version())
225        )
226        self.logger.info("\t{:<10}  {:>10}".format("PyYAML", yaml.__version__))
227        self.logger.info("\t{:<10}  {:>10}".format("click", click.__version__))
228        self.logger.info(
229            "\t{:<10}  {:>10}".format("paramiko", paramiko.__version__)
230        )
231        self.logger.info(
232            "\t{:<10}  {:>9}".format("redfishtool", redfishtool_version)
233        )
234        self.logger.info(
235            "\t{:<10}  {:>12}".format("ipmitool", ipmitool_version)
236        )
237
238        if eval(yaml.__version__.replace(".", ",")) < (5, 3, 0):
239            self.logger.error(
240                "\n\tERROR: Python or python packages do not meet minimum"
241                " version requirement."
242            )
243            self.logger.error(
244                "\tERROR: PyYAML version 5.3.0 or higher is needed.\n"
245            )
246            run_env_ok = False
247
248        self.logger.info("\t---- End script host environment ----")
249        return run_env_ok
250
251    def script_logging(self, log_level_attr):
252        r"""
253        Create logger
254
255        """
256        self.logger = logging.getLogger()
257        self.logger.setLevel(log_level_attr)
258        log_file_handler = logging.FileHandler(
259            self.ffdc_dir_path + "collector.log"
260        )
261
262        stdout_handler = logging.StreamHandler(sys.stdout)
263        self.logger.addHandler(log_file_handler)
264        self.logger.addHandler(stdout_handler)
265
266        # Turn off paramiko INFO logging
267        logging.getLogger("paramiko").setLevel(logging.WARNING)
268
269    def target_is_pingable(self):
270        r"""
271        Check if target system is ping-able.
272
273        """
274        response = os.system("ping -c 1 %s  2>&1 >/dev/null" % self.hostname)
275        if response == 0:
276            self.logger.info(
277                "\n\t[Check] %s is ping-able.\t\t [OK]" % self.hostname
278            )
279            return True
280        else:
281            self.logger.error(
282                "\n\tERROR: %s is not ping-able. FFDC collection aborted.\n"
283                % self.hostname
284            )
285            sys.exit(-1)
286
287    def collect_ffdc(self):
288        r"""
289        Initiate FFDC Collection depending on requested protocol.
290
291        """
292
293        self.logger.info(
294            "\n\t---- Start communicating with %s ----" % self.hostname
295        )
296        self.start_time = time.time()
297
298        # Find the list of target and protocol supported.
299        check_protocol_list = []
300        config_dict = self.ffdc_actions
301
302        for target_type in config_dict.keys():
303            if self.target_type != target_type:
304                continue
305
306            for k, v in config_dict[target_type].items():
307                if (
308                    config_dict[target_type][k]["PROTOCOL"][0]
309                    not in check_protocol_list
310                ):
311                    check_protocol_list.append(
312                        config_dict[target_type][k]["PROTOCOL"][0]
313                    )
314
315        self.logger.info(
316            "\n\t %s protocol type: %s"
317            % (self.target_type, check_protocol_list)
318        )
319
320        verified_working_protocol = self.verify_protocol(check_protocol_list)
321
322        if verified_working_protocol:
323            self.logger.info(
324                "\n\t---- Completed protocol pre-requisite check ----\n"
325            )
326
327        # Verify top level directory exists for storage
328        self.validate_local_store(self.location)
329
330        if (self.remote_protocol not in verified_working_protocol) and (
331            self.remote_protocol != "ALL"
332        ):
333            self.logger.info(
334                "\n\tWorking protocol list: %s" % verified_working_protocol
335            )
336            self.logger.error(
337                "\tERROR: Requested protocol %s is not in working protocol"
338                " list.\n" % self.remote_protocol
339            )
340            sys.exit(-1)
341        else:
342            self.generate_ffdc(verified_working_protocol)
343
344    def ssh_to_target_system(self):
345        r"""
346        Open a ssh connection to targeted system.
347
348        """
349
350        self.ssh_remoteclient = SSHRemoteclient(
351            self.hostname, self.username, self.password
352        )
353
354        if self.ssh_remoteclient.ssh_remoteclient_login():
355            self.logger.info(
356                "\n\t[Check] %s SSH connection established.\t [OK]"
357                % self.hostname
358            )
359
360            # Check scp connection.
361            # If scp connection fails,
362            # continue with FFDC generation but skip scp files to local host.
363            self.ssh_remoteclient.scp_connection()
364            return True
365        else:
366            self.logger.info(
367                "\n\t[Check] %s SSH connection.\t [NOT AVAILABLE]"
368                % self.hostname
369            )
370            return False
371
372    def telnet_to_target_system(self):
373        r"""
374        Open a telnet connection to targeted system.
375        """
376        self.telnet_remoteclient = TelnetRemoteclient(
377            self.hostname, self.username, self.password
378        )
379        if self.telnet_remoteclient.tn_remoteclient_login():
380            self.logger.info(
381                "\n\t[Check] %s Telnet connection established.\t [OK]"
382                % self.hostname
383            )
384            return True
385        else:
386            self.logger.info(
387                "\n\t[Check] %s Telnet connection.\t [NOT AVAILABLE]"
388                % self.hostname
389            )
390            return False
391
392    def generate_ffdc(self, working_protocol_list):
393        r"""
394        Determine actions based on remote host type
395
396        Description of argument(s):
397        working_protocol_list    list of confirmed working protocols to connect to remote host.
398        """
399
400        self.logger.info(
401            "\n\t---- Executing commands on " + self.hostname + " ----"
402        )
403        self.logger.info(
404            "\n\tWorking protocol list: %s" % working_protocol_list
405        )
406
407        config_dict = self.ffdc_actions
408        for target_type in config_dict.keys():
409            if self.target_type != target_type:
410                continue
411
412            self.logger.info("\n\tFFDC Path: %s " % self.ffdc_dir_path)
413            global_plugin_dict["global_log_store_path"] = self.ffdc_dir_path
414            self.logger.info("\tSystem Type: %s" % target_type)
415            for k, v in config_dict[target_type].items():
416                if (
417                    self.remote_protocol not in working_protocol_list
418                    and self.remote_protocol != "ALL"
419                ):
420                    continue
421
422                protocol = config_dict[target_type][k]["PROTOCOL"][0]
423
424                if protocol not in working_protocol_list:
425                    continue
426
427                if protocol in working_protocol_list:
428                    if protocol == "SSH" or protocol == "SCP":
429                        self.protocol_ssh(protocol, target_type, k)
430                    elif protocol == "TELNET":
431                        self.protocol_telnet(target_type, k)
432                    elif (
433                        protocol == "REDFISH"
434                        or protocol == "IPMI"
435                        or protocol == "SHELL"
436                    ):
437                        self.protocol_execute(protocol, target_type, k)
438                else:
439                    self.logger.error(
440                        "\n\tERROR: %s is not available for %s."
441                        % (protocol, self.hostname)
442                    )
443
444        # Close network connection after collecting all files
445        self.elapsed_time = time.strftime(
446            "%H:%M:%S", time.gmtime(time.time() - self.start_time)
447        )
448        if self.ssh_remoteclient:
449            self.ssh_remoteclient.ssh_remoteclient_disconnect()
450        if self.telnet_remoteclient:
451            self.telnet_remoteclient.tn_remoteclient_disconnect()
452
453    def protocol_ssh(self, protocol, target_type, sub_type):
454        r"""
455        Perform actions using SSH and SCP protocols.
456
457        Description of argument(s):
458        protocol            Protocol to execute.
459        target_type         OS Type of remote host.
460        sub_type            Group type of commands.
461        """
462
463        if protocol == "SCP":
464            self.group_copy(self.ffdc_actions[target_type][sub_type])
465        else:
466            self.collect_and_copy_ffdc(
467                self.ffdc_actions[target_type][sub_type]
468            )
469
470    def protocol_telnet(self, target_type, sub_type):
471        r"""
472        Perform actions using telnet protocol.
473        Description of argument(s):
474        target_type          OS Type of remote host.
475        """
476        self.logger.info(
477            "\n\t[Run] Executing commands on %s using %s"
478            % (self.hostname, "TELNET")
479        )
480        telnet_files_saved = []
481        progress_counter = 0
482        list_of_commands = self.ffdc_actions[target_type][sub_type]["COMMANDS"]
483        for index, each_cmd in enumerate(list_of_commands, start=0):
484            command_txt, command_timeout = self.unpack_command(each_cmd)
485            result = self.telnet_remoteclient.execute_command(
486                command_txt, command_timeout
487            )
488            if result:
489                try:
490                    targ_file = self.ffdc_actions[target_type][sub_type][
491                        "FILES"
492                    ][index]
493                except IndexError:
494                    targ_file = command_txt
495                    self.logger.warning(
496                        "\n\t[WARN] Missing filename to store data from"
497                        " telnet %s." % each_cmd
498                    )
499                    self.logger.warning(
500                        "\t[WARN] Data will be stored in %s." % targ_file
501                    )
502                targ_file_with_path = (
503                    self.ffdc_dir_path + self.ffdc_prefix + targ_file
504                )
505                # Creates a new file
506                with open(targ_file_with_path, "w") as fp:
507                    fp.write(result)
508                    fp.close
509                    telnet_files_saved.append(targ_file)
510            progress_counter += 1
511            self.print_progress(progress_counter)
512        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
513        for file in telnet_files_saved:
514            self.logger.info("\n\t\tSuccessfully save file " + file + ".")
515
516    def protocol_execute(self, protocol, target_type, sub_type):
517        r"""
518        Perform actions for a given protocol.
519
520        Description of argument(s):
521        protocol            Protocol to execute.
522        target_type         OS Type of remote host.
523        sub_type            Group type of commands.
524        """
525
526        self.logger.info(
527            "\n\t[Run] Executing commands to %s using %s"
528            % (self.hostname, protocol)
529        )
530        executed_files_saved = []
531        progress_counter = 0
532        list_of_cmd = self.get_command_list(
533            self.ffdc_actions[target_type][sub_type]
534        )
535        for index, each_cmd in enumerate(list_of_cmd, start=0):
536            plugin_call = False
537            if isinstance(each_cmd, dict):
538                if "plugin" in each_cmd:
539                    # If the error is set and plugin explicitly
540                    # requested to skip execution on error..
541                    if plugin_error_dict[
542                        "exit_on_error"
543                    ] and self.plugin_error_check(each_cmd["plugin"]):
544                        self.logger.info(
545                            "\n\t[PLUGIN-ERROR] exit_on_error: %s"
546                            % plugin_error_dict["exit_on_error"]
547                        )
548                        self.logger.info(
549                            "\t[PLUGIN-SKIP] %s" % each_cmd["plugin"][0]
550                        )
551                        continue
552                    plugin_call = True
553                    # call the plugin
554                    self.logger.info("\n\t[PLUGIN-START]")
555                    result = self.execute_plugin_block(each_cmd["plugin"])
556                    self.logger.info("\t[PLUGIN-END]\n")
557            else:
558                each_cmd = self.yaml_env_and_plugin_vars_populate(each_cmd)
559
560            if not plugin_call:
561                result = self.run_tool_cmd(each_cmd)
562            if result:
563                try:
564                    file_name = self.get_file_list(
565                        self.ffdc_actions[target_type][sub_type]
566                    )[index]
567                    # If file is specified as None.
568                    if file_name == "None":
569                        continue
570                    targ_file = self.yaml_env_and_plugin_vars_populate(
571                        file_name
572                    )
573                except IndexError:
574                    targ_file = each_cmd.split("/")[-1]
575                    self.logger.warning(
576                        "\n\t[WARN] Missing filename to store data from %s."
577                        % each_cmd
578                    )
579                    self.logger.warning(
580                        "\t[WARN] Data will be stored in %s." % targ_file
581                    )
582
583                targ_file_with_path = (
584                    self.ffdc_dir_path + self.ffdc_prefix + targ_file
585                )
586
587                # Creates a new file
588                with open(targ_file_with_path, "w") as fp:
589                    if isinstance(result, dict):
590                        fp.write(json.dumps(result))
591                    else:
592                        fp.write(result)
593                    fp.close
594                    executed_files_saved.append(targ_file)
595
596            progress_counter += 1
597            self.print_progress(progress_counter)
598
599        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
600
601        for file in executed_files_saved:
602            self.logger.info("\n\t\tSuccessfully save file " + file + ".")
603
604    def collect_and_copy_ffdc(
605        self, ffdc_actions_for_target_type, form_filename=False
606    ):
607        r"""
608        Send commands in ffdc_config file to targeted system.
609
610        Description of argument(s):
611        ffdc_actions_for_target_type     commands and files for the selected remote host type.
612        form_filename                    if true, pre-pend self.target_type to filename
613        """
614
615        # Executing commands, if any
616        self.ssh_execute_ffdc_commands(
617            ffdc_actions_for_target_type, form_filename
618        )
619
620        # Copying files
621        if self.ssh_remoteclient.scpclient:
622            self.logger.info(
623                "\n\n\tCopying FFDC files from remote system %s.\n"
624                % self.hostname
625            )
626
627            # Retrieving files from target system
628            list_of_files = self.get_file_list(ffdc_actions_for_target_type)
629            self.scp_ffdc(
630                self.ffdc_dir_path,
631                self.ffdc_prefix,
632                form_filename,
633                list_of_files,
634            )
635        else:
636            self.logger.info(
637                "\n\n\tSkip copying FFDC files from remote system %s.\n"
638                % self.hostname
639            )
640
641    def get_command_list(self, ffdc_actions_for_target_type):
642        r"""
643        Fetch list of commands from configuration file
644
645        Description of argument(s):
646        ffdc_actions_for_target_type    commands and files for the selected remote host type.
647        """
648        try:
649            list_of_commands = ffdc_actions_for_target_type["COMMANDS"]
650        except KeyError:
651            list_of_commands = []
652        return list_of_commands
653
654    def get_file_list(self, ffdc_actions_for_target_type):
655        r"""
656        Fetch list of commands from configuration file
657
658        Description of argument(s):
659        ffdc_actions_for_target_type    commands and files for the selected remote host type.
660        """
661        try:
662            list_of_files = ffdc_actions_for_target_type["FILES"]
663        except KeyError:
664            list_of_files = []
665        return list_of_files
666
667    def unpack_command(self, command):
668        r"""
669        Unpack command from config file
670
671        Description of argument(s):
672        command    Command from config file.
673        """
674        if isinstance(command, dict):
675            command_txt = next(iter(command))
676            command_timeout = next(iter(command.values()))
677        elif isinstance(command, str):
678            command_txt = command
679            # Default command timeout 60 seconds
680            command_timeout = 60
681
682        return command_txt, command_timeout
683
684    def ssh_execute_ffdc_commands(
685        self, ffdc_actions_for_target_type, form_filename=False
686    ):
687        r"""
688        Send commands in ffdc_config file to targeted system.
689
690        Description of argument(s):
691        ffdc_actions_for_target_type    commands and files for the selected remote host type.
692        form_filename                    if true, pre-pend self.target_type to filename
693        """
694        self.logger.info(
695            "\n\t[Run] Executing commands on %s using %s"
696            % (self.hostname, ffdc_actions_for_target_type["PROTOCOL"][0])
697        )
698
699        list_of_commands = self.get_command_list(ffdc_actions_for_target_type)
700        # If command list is empty, returns
701        if not list_of_commands:
702            return
703
704        progress_counter = 0
705        for command in list_of_commands:
706            command_txt, command_timeout = self.unpack_command(command)
707
708            if form_filename:
709                command_txt = str(command_txt % self.target_type)
710
711            (
712                cmd_exit_code,
713                err,
714                response,
715            ) = self.ssh_remoteclient.execute_command(
716                command_txt, command_timeout
717            )
718
719            if cmd_exit_code:
720                self.logger.warning(
721                    "\n\t\t[WARN] %s exits with code %s."
722                    % (command_txt, str(cmd_exit_code))
723                )
724                self.logger.warning("\t\t[WARN] %s " % err)
725
726            progress_counter += 1
727            self.print_progress(progress_counter)
728
729        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
730
731    def group_copy(self, ffdc_actions_for_target_type):
732        r"""
733        scp group of files (wild card) from remote host.
734
735        Description of argument(s):
736        fdc_actions_for_target_type    commands and files for the selected remote host type.
737        """
738
739        if self.ssh_remoteclient.scpclient:
740            self.logger.info(
741                "\n\tCopying files from remote system %s via SCP.\n"
742                % self.hostname
743            )
744
745            list_of_commands = self.get_command_list(
746                ffdc_actions_for_target_type
747            )
748            # If command list is empty, returns
749            if not list_of_commands:
750                return
751
752            for command in list_of_commands:
753                try:
754                    command = self.yaml_env_and_plugin_vars_populate(command)
755                except IndexError:
756                    self.logger.error("\t\tInvalid command %s" % command)
757                    continue
758
759                (
760                    cmd_exit_code,
761                    err,
762                    response,
763                ) = self.ssh_remoteclient.execute_command(command)
764
765                # If file does not exist, code take no action.
766                # cmd_exit_code is ignored for this scenario.
767                if response:
768                    scp_result = self.ssh_remoteclient.scp_file_from_remote(
769                        response.split("\n"), self.ffdc_dir_path
770                    )
771                    if scp_result:
772                        self.logger.info(
773                            "\t\tSuccessfully copied from "
774                            + self.hostname
775                            + ":"
776                            + command
777                        )
778                else:
779                    self.logger.info("\t\t%s has no result" % command)
780
781        else:
782            self.logger.info(
783                "\n\n\tSkip copying files from remote system %s.\n"
784                % self.hostname
785            )
786
787    def scp_ffdc(
788        self,
789        targ_dir_path,
790        targ_file_prefix,
791        form_filename,
792        file_list=None,
793        quiet=None,
794    ):
795        r"""
796        SCP all files in file_dict to the indicated directory on the local system.
797
798        Description of argument(s):
799        targ_dir_path                   The path of the directory to receive the files.
800        targ_file_prefix                Prefix which will be prepended to each
801                                        target file's name.
802        file_dict                       A dictionary of files to scp from targeted system to this system
803
804        """
805
806        progress_counter = 0
807        for filename in file_list:
808            if form_filename:
809                filename = str(filename % self.target_type)
810            source_file_path = filename
811            targ_file_path = (
812                targ_dir_path + targ_file_prefix + filename.split("/")[-1]
813            )
814
815            # If source file name contains wild card, copy filename as is.
816            if "*" in source_file_path:
817                scp_result = self.ssh_remoteclient.scp_file_from_remote(
818                    source_file_path, self.ffdc_dir_path
819                )
820            else:
821                scp_result = self.ssh_remoteclient.scp_file_from_remote(
822                    source_file_path, targ_file_path
823                )
824
825            if not quiet:
826                if scp_result:
827                    self.logger.info(
828                        "\t\tSuccessfully copied from "
829                        + self.hostname
830                        + ":"
831                        + source_file_path
832                        + ".\n"
833                    )
834                else:
835                    self.logger.info(
836                        "\t\tFail to copy from "
837                        + self.hostname
838                        + ":"
839                        + source_file_path
840                        + ".\n"
841                    )
842            else:
843                progress_counter += 1
844                self.print_progress(progress_counter)
845
846    def set_ffdc_default_store_path(self):
847        r"""
848        Set a default value for self.ffdc_dir_path and self.ffdc_prefix.
849        Collected ffdc file will be stored in dir /self.location/hostname_timestr/.
850        Individual ffdc file will have timestr_filename.
851
852        Description of class variables:
853        self.ffdc_dir_path  The dir path where collected ffdc data files should be put.
854
855        self.ffdc_prefix    The prefix to be given to each ffdc file name.
856
857        """
858
859        timestr = time.strftime("%Y%m%d-%H%M%S")
860        self.ffdc_dir_path = (
861            self.location + "/" + self.hostname + "_" + timestr + "/"
862        )
863        self.ffdc_prefix = timestr + "_"
864        self.validate_local_store(self.ffdc_dir_path)
865
866    # Need to verify local store path exists prior to instantiate this class.
867    # This class method is used to share the same code between CLI input parm
868    # and Robot Framework "${EXECDIR}/logs" before referencing this class.
869    @classmethod
870    def validate_local_store(cls, dir_path):
871        r"""
872        Ensure path exists to store FFDC files locally.
873
874        Description of variable:
875        dir_path  The dir path where collected ffdc data files will be stored.
876
877        """
878
879        if not os.path.exists(dir_path):
880            try:
881                os.makedirs(dir_path, 0o755)
882            except (IOError, OSError) as e:
883                # PermissionError
884                if e.errno == EPERM or e.errno == EACCES:
885                    self.logger.error(
886                        "\tERROR: os.makedirs %s failed with"
887                        " PermissionError.\n" % dir_path
888                    )
889                else:
890                    self.logger.error(
891                        "\tERROR: os.makedirs %s failed with %s.\n"
892                        % (dir_path, e.strerror)
893                    )
894                sys.exit(-1)
895
896    def print_progress(self, progress):
897        r"""
898        Print activity progress +
899
900        Description of variable:
901        progress  Progress counter.
902
903        """
904
905        sys.stdout.write("\r\t" + "+" * progress)
906        sys.stdout.flush()
907        time.sleep(0.1)
908
909    def verify_redfish(self):
910        r"""
911        Verify remote host has redfish service active
912
913        """
914        redfish_parm = (
915            "redfishtool -r "
916            + self.hostname
917            + " -S Always raw GET /redfish/v1/"
918        )
919        return self.run_tool_cmd(redfish_parm, True)
920
921    def verify_ipmi(self):
922        r"""
923        Verify remote host has IPMI LAN service active
924
925        """
926        if self.target_type == "OPENBMC":
927            ipmi_parm = (
928                "ipmitool -I lanplus -C 17  -U "
929                + self.username
930                + " -P "
931                + self.password
932                + " -H "
933                + self.hostname
934                + " -p "
935                + str(self.port_ipmi)
936                + " power status"
937            )
938        else:
939            ipmi_parm = (
940                "ipmitool -I lanplus  -P "
941                + self.password
942                + " -H "
943                + self.hostname
944                + " -p "
945                + str(self.port_ipmi)
946                + " power status"
947            )
948
949        return self.run_tool_cmd(ipmi_parm, True)
950
951    def run_tool_cmd(self, parms_string, quiet=False):
952        r"""
953        Run CLI standard tool or scripts.
954
955        Description of variable:
956        parms_string         tool command options.
957        quiet                do not print tool error message if True
958        """
959
960        result = subprocess.run(
961            [parms_string],
962            stdout=subprocess.PIPE,
963            stderr=subprocess.PIPE,
964            shell=True,
965            universal_newlines=True,
966        )
967
968        if result.stderr and not quiet:
969            self.logger.error("\n\t\tERROR with %s " % parms_string)
970            self.logger.error("\t\t" + result.stderr)
971
972        return result.stdout
973
974    def verify_protocol(self, protocol_list):
975        r"""
976        Perform protocol working check.
977
978        Description of argument(s):
979        protocol_list        List of protocol.
980        """
981
982        tmp_list = []
983        if self.target_is_pingable():
984            tmp_list.append("SHELL")
985
986        for protocol in protocol_list:
987            if self.remote_protocol != "ALL":
988                if self.remote_protocol != protocol:
989                    continue
990
991            # Only check SSH/SCP once for both protocols
992            if (
993                protocol == "SSH"
994                or protocol == "SCP"
995                and protocol not in tmp_list
996            ):
997                if self.ssh_to_target_system():
998                    # Add only what user asked.
999                    if self.remote_protocol != "ALL":
1000                        tmp_list.append(self.remote_protocol)
1001                    else:
1002                        tmp_list.append("SSH")
1003                        tmp_list.append("SCP")
1004
1005            if protocol == "TELNET":
1006                if self.telnet_to_target_system():
1007                    tmp_list.append(protocol)
1008
1009            if protocol == "REDFISH":
1010                if self.verify_redfish():
1011                    tmp_list.append(protocol)
1012                    self.logger.info(
1013                        "\n\t[Check] %s Redfish Service.\t\t [OK]"
1014                        % self.hostname
1015                    )
1016                else:
1017                    self.logger.info(
1018                        "\n\t[Check] %s Redfish Service.\t\t [NOT AVAILABLE]"
1019                        % self.hostname
1020                    )
1021
1022            if protocol == "IPMI":
1023                if self.verify_ipmi():
1024                    tmp_list.append(protocol)
1025                    self.logger.info(
1026                        "\n\t[Check] %s IPMI LAN Service.\t\t [OK]"
1027                        % self.hostname
1028                    )
1029                else:
1030                    self.logger.info(
1031                        "\n\t[Check] %s IPMI LAN Service.\t\t [NOT AVAILABLE]"
1032                        % self.hostname
1033                    )
1034
1035        return tmp_list
1036
1037    def load_env(self):
1038        r"""
1039        Perform protocol working check.
1040
1041        """
1042        # This is for the env vars a user can use in YAML to load it at runtime.
1043        # Example YAML:
1044        # -COMMANDS:
1045        #    - my_command ${hostname}  ${username}   ${password}
1046        os.environ["hostname"] = self.hostname
1047        os.environ["username"] = self.username
1048        os.environ["password"] = self.password
1049        os.environ["port_https"] = self.port_https
1050        os.environ["port_ipmi"] = self.port_ipmi
1051
1052        # Append default Env.
1053        self.env_dict["hostname"] = self.hostname
1054        self.env_dict["username"] = self.username
1055        self.env_dict["password"] = self.password
1056        self.env_dict["port_https"] = self.port_https
1057        self.env_dict["port_ipmi"] = self.port_ipmi
1058
1059        try:
1060            tmp_env_dict = {}
1061            if self.env_vars:
1062                tmp_env_dict = json.loads(self.env_vars)
1063                # Export ENV vars default.
1064                for key, value in tmp_env_dict.items():
1065                    os.environ[key] = value
1066                    self.env_dict[key] = str(value)
1067
1068            if self.econfig:
1069                with open(self.econfig, "r") as file:
1070                    try:
1071                        tmp_env_dict = yaml.load(file, Loader=yaml.SafeLoader)
1072                    except yaml.YAMLError as e:
1073                        self.logger.error(e)
1074                        sys.exit(-1)
1075                # Export ENV vars.
1076                for key, value in tmp_env_dict["env_params"].items():
1077                    os.environ[key] = str(value)
1078                    self.env_dict[key] = str(value)
1079        except json.decoder.JSONDecodeError as e:
1080            self.logger.error("\n\tERROR: %s " % e)
1081            sys.exit(-1)
1082
1083        # This to mask the password from displaying on the console.
1084        mask_dict = self.env_dict.copy()
1085        for k, v in mask_dict.items():
1086            if k.lower().find("password") != -1:
1087                hidden_text = []
1088                hidden_text.append(v)
1089                password_regex = (
1090                    "(" + "|".join([re.escape(x) for x in hidden_text]) + ")"
1091                )
1092                mask_dict[k] = re.sub(password_regex, "********", v)
1093
1094        self.logger.info(json.dumps(mask_dict, indent=8, sort_keys=False))
1095
1096    def execute_python_eval(self, eval_string):
1097        r"""
1098        Execute qualified python function string using eval.
1099
1100        Description of argument(s):
1101        eval_string        Execute the python object.
1102
1103        Example:
1104                eval(plugin.foo_func.foo_func(10))
1105        """
1106        try:
1107            self.logger.info("\tExecuting plugin func()")
1108            self.logger.debug("\tCall func: %s" % eval_string)
1109            result = eval(eval_string)
1110            self.logger.info("\treturn: %s" % str(result))
1111        except (
1112            ValueError,
1113            SyntaxError,
1114            NameError,
1115            AttributeError,
1116            TypeError,
1117        ) as e:
1118            self.logger.error("\tERROR: execute_python_eval: %s" % e)
1119            # Set the plugin error state.
1120            plugin_error_dict["exit_on_error"] = True
1121            self.logger.info("\treturn: PLUGIN_EVAL_ERROR")
1122            return "PLUGIN_EVAL_ERROR"
1123
1124        return result
1125
1126    def execute_plugin_block(self, plugin_cmd_list):
1127        r"""
1128        Pack the plugin command to qualifed python string object.
1129
1130        Description of argument(s):
1131        plugin_list_dict      Plugin block read from YAML
1132                              [{'plugin_name': 'plugin.foo_func.my_func'},
1133                               {'plugin_args': [10]}]
1134
1135        Example:
1136            - plugin:
1137              - plugin_name: plugin.foo_func.my_func
1138              - plugin_args:
1139                - arg1
1140                - arg2
1141
1142            - plugin:
1143              - plugin_name: result = plugin.foo_func.my_func
1144              - plugin_args:
1145                - arg1
1146                - arg2
1147
1148            - plugin:
1149              - plugin_name: result1,result2 = plugin.foo_func.my_func
1150              - plugin_args:
1151                - arg1
1152                - arg2
1153        """
1154        try:
1155            idx = self.key_index_list_dict("plugin_name", plugin_cmd_list)
1156            plugin_name = plugin_cmd_list[idx]["plugin_name"]
1157            # Equal separator means plugin function returns result.
1158            if " = " in plugin_name:
1159                # Ex. ['result', 'plugin.foo_func.my_func']
1160                plugin_name_args = plugin_name.split(" = ")
1161                # plugin func return data.
1162                for arg in plugin_name_args:
1163                    if arg == plugin_name_args[-1]:
1164                        plugin_name = arg
1165                    else:
1166                        plugin_resp = arg.split(",")
1167                        # ['result1','result2']
1168                        for x in plugin_resp:
1169                            global_plugin_list.append(x)
1170                            global_plugin_dict[x] = ""
1171
1172            # Walk the plugin args ['arg1,'arg2']
1173            # If the YAML plugin statement 'plugin_args' is not declared.
1174            if any("plugin_args" in d for d in plugin_cmd_list):
1175                idx = self.key_index_list_dict("plugin_args", plugin_cmd_list)
1176                plugin_args = plugin_cmd_list[idx]["plugin_args"]
1177                if plugin_args:
1178                    plugin_args = self.yaml_args_populate(plugin_args)
1179                else:
1180                    plugin_args = []
1181            else:
1182                plugin_args = self.yaml_args_populate([])
1183
1184            # Pack the args arg1, arg2, .... argn into
1185            # "arg1","arg2","argn"  string as params for function.
1186            parm_args_str = self.yaml_args_string(plugin_args)
1187            if parm_args_str:
1188                plugin_func = plugin_name + "(" + parm_args_str + ")"
1189            else:
1190                plugin_func = plugin_name + "()"
1191
1192            # Execute plugin function.
1193            if global_plugin_dict:
1194                resp = self.execute_python_eval(plugin_func)
1195                # Update plugin vars dict if there is any.
1196                if resp != "PLUGIN_EVAL_ERROR":
1197                    self.response_args_data(resp)
1198            else:
1199                resp = self.execute_python_eval(plugin_func)
1200        except Exception as e:
1201            # Set the plugin error state.
1202            plugin_error_dict["exit_on_error"] = True
1203            self.logger.error("\tERROR: execute_plugin_block: %s" % e)
1204            pass
1205
1206        # There is a real error executing the plugin function.
1207        if resp == "PLUGIN_EVAL_ERROR":
1208            return resp
1209
1210        # Check if plugin_expects_return (int, string, list,dict etc)
1211        if any("plugin_expects_return" in d for d in plugin_cmd_list):
1212            idx = self.key_index_list_dict(
1213                "plugin_expects_return", plugin_cmd_list
1214            )
1215            plugin_expects = plugin_cmd_list[idx]["plugin_expects_return"]
1216            if plugin_expects:
1217                if resp:
1218                    if (
1219                        self.plugin_expect_type(plugin_expects, resp)
1220                        == "INVALID"
1221                    ):
1222                        self.logger.error("\tWARN: Plugin error check skipped")
1223                    elif not self.plugin_expect_type(plugin_expects, resp):
1224                        self.logger.error(
1225                            "\tERROR: Plugin expects return data: %s"
1226                            % plugin_expects
1227                        )
1228                        plugin_error_dict["exit_on_error"] = True
1229                elif not resp:
1230                    self.logger.error(
1231                        "\tERROR: Plugin func failed to return data"
1232                    )
1233                    plugin_error_dict["exit_on_error"] = True
1234
1235        return resp
1236
1237    def response_args_data(self, plugin_resp):
1238        r"""
1239        Parse the plugin function response and update plugin return variable.
1240
1241        plugin_resp       Response data from plugin function.
1242        """
1243        resp_list = []
1244        resp_data = ""
1245
1246        # There is nothing to update the plugin response.
1247        if len(global_plugin_list) == 0 or plugin_resp == "None":
1248            return
1249
1250        if isinstance(plugin_resp, str):
1251            resp_data = plugin_resp.strip("\r\n\t")
1252            resp_list.append(resp_data)
1253        elif isinstance(plugin_resp, bytes):
1254            resp_data = str(plugin_resp, "UTF-8").strip("\r\n\t")
1255            resp_list.append(resp_data)
1256        elif isinstance(plugin_resp, tuple):
1257            if len(global_plugin_list) == 1:
1258                resp_list.append(plugin_resp)
1259            else:
1260                resp_list = list(plugin_resp)
1261                resp_list = [x.strip("\r\n\t") for x in resp_list]
1262        elif isinstance(plugin_resp, list):
1263            if len(global_plugin_list) == 1:
1264                resp_list.append([x.strip("\r\n\t") for x in plugin_resp])
1265            else:
1266                resp_list = [x.strip("\r\n\t") for x in plugin_resp]
1267        elif isinstance(plugin_resp, int) or isinstance(plugin_resp, float):
1268            resp_list.append(plugin_resp)
1269
1270        # Iterate if there is a list of plugin return vars to update.
1271        for idx, item in enumerate(resp_list, start=0):
1272            # Exit loop, done required loop.
1273            if idx >= len(global_plugin_list):
1274                break
1275            # Find the index of the return func in the list and
1276            # update the global func return dictionary.
1277            try:
1278                dict_idx = global_plugin_list[idx]
1279                global_plugin_dict[dict_idx] = item
1280            except (IndexError, ValueError) as e:
1281                self.logger.warn("\tWARN: response_args_data: %s" % e)
1282                pass
1283
1284        # Done updating plugin dict irrespective of pass or failed,
1285        # clear all the list element for next plugin block execute.
1286        global_plugin_list.clear()
1287
1288    def yaml_args_string(self, plugin_args):
1289        r"""
1290        Pack the args into string.
1291
1292        plugin_args            arg list ['arg1','arg2,'argn']
1293        """
1294        args_str = ""
1295        for args in plugin_args:
1296            if args:
1297                if isinstance(args, (int, float)):
1298                    args_str += str(args)
1299                elif args in global_plugin_type_list:
1300                    args_str += str(global_plugin_dict[args])
1301                else:
1302                    args_str += '"' + str(args.strip("\r\n\t")) + '"'
1303            # Skip last list element.
1304            if args != plugin_args[-1]:
1305                args_str += ","
1306        return args_str
1307
1308    def yaml_args_populate(self, yaml_arg_list):
1309        r"""
1310        Decode env and plugin vars and populate.
1311
1312        Description of argument(s):
1313        yaml_arg_list         arg list read from YAML
1314
1315        Example:
1316          - plugin_args:
1317            - arg1
1318            - arg2
1319
1320                  yaml_arg_list:  [arg2, arg2]
1321        """
1322        # Get the env loaded keys as list ['hostname', 'username', 'password'].
1323        env_vars_list = list(self.env_dict)
1324
1325        if isinstance(yaml_arg_list, list):
1326            tmp_list = []
1327            for arg in yaml_arg_list:
1328                if isinstance(arg, (int, float)):
1329                    tmp_list.append(arg)
1330                    continue
1331                elif isinstance(arg, str):
1332                    arg_str = self.yaml_env_and_plugin_vars_populate(str(arg))
1333                    tmp_list.append(arg_str)
1334                else:
1335                    tmp_list.append(arg)
1336
1337            # return populated list.
1338            return tmp_list
1339
1340    def yaml_env_and_plugin_vars_populate(self, yaml_arg_str):
1341        r"""
1342        Update ${MY_VAR} and plugin vars.
1343
1344        Description of argument(s):
1345        yaml_arg_str         arg string read from YAML.
1346
1347        Example:
1348            - cat ${MY_VAR}
1349            - ls -AX my_plugin_var
1350        """
1351        # Parse the string for env vars ${env_vars}.
1352        try:
1353            # Example, list of matching env vars ['username', 'password', 'hostname']
1354            # Extra escape \ for special symbols. '\$\{([^\}]+)\}' works good.
1355            var_name_regex = "\\$\\{([^\\}]+)\\}"
1356            env_var_names_list = re.findall(var_name_regex, yaml_arg_str)
1357            for var in env_var_names_list:
1358                env_var = os.environ[var]
1359                env_replace = "${" + var + "}"
1360                yaml_arg_str = yaml_arg_str.replace(env_replace, env_var)
1361        except Exception as e:
1362            self.logger.error("\tERROR:yaml_env_vars_populate: %s" % e)
1363            pass
1364
1365        # Parse the string for plugin vars.
1366        try:
1367            # Example, list of plugin vars ['my_username', 'my_data']
1368            plugin_var_name_list = global_plugin_dict.keys()
1369            for var in plugin_var_name_list:
1370                # skip env var list already populated above code block list.
1371                if var in env_var_names_list:
1372                    continue
1373                # If this plugin var exist but empty in dict, don't replace.
1374                # This is either a YAML plugin statement incorrectly used or
1375                # user added a plugin var which is not going to be populated.
1376                if yaml_arg_str in global_plugin_dict:
1377                    if isinstance(global_plugin_dict[var], (list, dict)):
1378                        # List data type or dict can't be replaced, use directly
1379                        # in eval function call.
1380                        global_plugin_type_list.append(var)
1381                    else:
1382                        yaml_arg_str = yaml_arg_str.replace(
1383                            str(var), str(global_plugin_dict[var])
1384                        )
1385                # Just a string like filename or command.
1386                else:
1387                    yaml_arg_str = yaml_arg_str.replace(
1388                        str(var), str(global_plugin_dict[var])
1389                    )
1390        except (IndexError, ValueError) as e:
1391            self.logger.error("\tERROR: yaml_plugin_vars_populate: %s" % e)
1392            pass
1393
1394        return yaml_arg_str
1395
1396    def plugin_error_check(self, plugin_dict):
1397        r"""
1398        Plugin error dict processing.
1399
1400        Description of argument(s):
1401        plugin_dict        Dictionary of plugin error.
1402        """
1403        if any("plugin_error" in d for d in plugin_dict):
1404            for d in plugin_dict:
1405                if "plugin_error" in d:
1406                    value = d["plugin_error"]
1407                    # Reference if the error is set or not by plugin.
1408                    return plugin_error_dict[value]
1409
1410    def key_index_list_dict(self, key, list_dict):
1411        r"""
1412        Iterate list of dictionary and return index if the key match is found.
1413
1414        Description of argument(s):
1415        key           Valid Key in a dict.
1416        list_dict     list of dictionary.
1417        """
1418        for i, d in enumerate(list_dict):
1419            if key in d.keys():
1420                return i
1421
1422    def plugin_expect_type(self, type, data):
1423        r"""
1424        Plugin expect directive type check.
1425        """
1426        if type == "int":
1427            return isinstance(data, int)
1428        elif type == "float":
1429            return isinstance(data, float)
1430        elif type == "str":
1431            return isinstance(data, str)
1432        elif type == "list":
1433            return isinstance(data, list)
1434        elif type == "dict":
1435            return isinstance(data, dict)
1436        elif type == "tuple":
1437            return isinstance(data, tuple)
1438        else:
1439            self.logger.info("\tInvalid data type requested: %s" % type)
1440            return "INVALID"
1441