1#!/usr/bin/env python3
2
3r"""
4See class prolog below for details.
5"""
6
7import json
8import logging
9import os
10import platform
11import re
12import subprocess
13import sys
14import time
15from errno import EACCES, EPERM
16
17import yaml
18
19script_dir = os.path.dirname(os.path.abspath(__file__))
20sys.path.append(script_dir)
21# Walk path and append to sys.path
22for root, dirs, files in os.walk(script_dir):
23    for dir in dirs:
24        sys.path.append(os.path.join(root, dir))
25
26from ssh_utility import SSHRemoteclient  # NOQA
27from telnet_utility import TelnetRemoteclient  # NOQA
28
29r"""
30User define plugins python functions.
31
32It will imports files from directory plugins
33
34plugins
35├── file1.py
36└── file2.py
37
38Example how to define in YAML:
39 - plugin:
40   - plugin_name: plugin.foo_func.foo_func_yaml
41     - plugin_args:
42       - arg1
43       - arg2
44"""
45plugin_dir = __file__.split(__file__.split("/")[-1])[0] + "/plugins"
46sys.path.append(plugin_dir)
47try:
48    for module in os.listdir(plugin_dir):
49        if module == "__init__.py" or module[-3:] != ".py":
50            continue
51        plugin_module = "plugins." + module[:-3]
52        # To access the module plugin.<module name>.<function>
53        # Example: plugin.foo_func.foo_func_yaml()
54        try:
55            plugin = __import__(plugin_module, globals(), locals(), [], 0)
56        except Exception as e:
57            print("PLUGIN: Module import failed: %s" % module)
58            pass
59except FileNotFoundError as e:
60    print("PLUGIN: %s" % e)
61    pass
62
63r"""
64This is for plugin functions returning data or responses to the caller
65in YAML plugin setup.
66
67Example:
68
69    - plugin:
70      - plugin_name: version = plugin.ssh_execution.ssh_execute_cmd
71      - plugin_args:
72        - ${hostname}
73        - ${username}
74        - ${password}
75        - "cat /etc/os-release | grep VERSION_ID | awk -F'=' '{print $2}'"
76     - plugin:
77        - plugin_name: plugin.print_vars.print_vars
78        - plugin_args:
79          - version
80
81where first plugin "version" var is used by another plugin in the YAML
82block or plugin
83
84"""
85global global_log_store_path
86global global_plugin_dict
87global global_plugin_list
88
89# Hold the plugin return values in dict and plugin return vars in list.
90# Dict is to reference and update vars processing in parser where as
91# list is for current vars from the plugin block which needs processing.
92global_plugin_dict = {}
93global_plugin_list = []
94
95# Hold the plugin return named declared if function returned values are list,dict.
96# Refer this name list to look up the plugin dict for eval() args function
97# Example ['version']
98global_plugin_type_list = []
99
100# Path where logs are to be stored or written.
101global_log_store_path = ""
102
103# Plugin error state defaults.
104plugin_error_dict = {
105    "exit_on_error": False,
106    "continue_on_error": False,
107}
108
109
110class ffdc_collector:
111    r"""
112    Execute commands from configuration file to collect log files.
113    Fetch and store generated files at the specified location.
114
115    """
116
117    def __init__(
118        self,
119        hostname,
120        username,
121        password,
122        port_ssh,
123        port_https,
124        port_ipmi,
125        ffdc_config,
126        location,
127        remote_type,
128        remote_protocol,
129        env_vars,
130        econfig,
131        log_level,
132    ):
133        r"""
134        Description of argument(s):
135
136        hostname            name/ip of the targeted (remote) system
137        username            user on the targeted system with access to FFDC files
138        password            password for user on targeted system
139        port_ssh            SSH port value. By default 22
140        port_https          HTTPS port value. By default 443
141        port_ipmi           IPMI port value. By default 623
142        ffdc_config         configuration file listing commands and files for FFDC
143        location            where to store collected FFDC
144        remote_type         os type of the remote host
145        remote_protocol     Protocol to use to collect data
146        env_vars            User define CLI env vars '{"key : "value"}'
147        econfig             User define env vars YAML file
148
149        """
150
151        self.hostname = hostname
152        self.username = username
153        self.password = password
154        self.port_ssh = str(port_ssh)
155        self.port_https = str(port_https)
156        self.port_ipmi = str(port_ipmi)
157        self.ffdc_config = ffdc_config
158        self.location = location + "/" + remote_type.upper()
159        self.ssh_remoteclient = None
160        self.telnet_remoteclient = None
161        self.ffdc_dir_path = ""
162        self.ffdc_prefix = ""
163        self.target_type = remote_type.upper()
164        self.remote_protocol = remote_protocol.upper()
165        self.env_vars = env_vars
166        self.econfig = econfig
167        self.start_time = 0
168        self.elapsed_time = ""
169        self.logger = None
170
171        # Set prefix values for scp files and directory.
172        # Since the time stamp is at second granularity, these values are set here
173        # to be sure that all files for this run will have same timestamps
174        # and they will be saved in the same directory.
175        # self.location == local system for now
176        self.set_ffdc_default_store_path()
177
178        # Logger for this run.  Need to be after set_ffdc_default_store_path()
179        self.script_logging(getattr(logging, log_level.upper()))
180
181        # Verify top level directory exists for storage
182        self.validate_local_store(self.location)
183
184        if self.verify_script_env():
185            # Load default or user define YAML configuration file.
186            with open(self.ffdc_config, "r") as file:
187                try:
188                    self.ffdc_actions = yaml.load(file, Loader=yaml.SafeLoader)
189                except yaml.YAMLError as e:
190                    self.logger.error(e)
191                    sys.exit(-1)
192
193            if self.target_type not in self.ffdc_actions.keys():
194                self.logger.error(
195                    "\n\tERROR: %s is not listed in %s.\n\n"
196                    % (self.target_type, self.ffdc_config)
197                )
198                sys.exit(-1)
199        else:
200            sys.exit(-1)
201
202        # Load ENV vars from user.
203        self.logger.info("\n\tENV: User define input YAML variables")
204        self.env_dict = {}
205        self.load_env()
206
207    def verify_script_env(self):
208        # Import to log version
209        import click
210        import paramiko
211
212        run_env_ok = True
213
214        redfishtool_version = (
215            self.run_tool_cmd("redfishtool -V").split(" ")[2].strip("\n")
216        )
217        ipmitool_version = self.run_tool_cmd("ipmitool -V").split(" ")[2]
218
219        self.logger.info("\n\t---- Script host environment ----")
220        self.logger.info(
221            "\t{:<10}  {:<10}".format("Script hostname", os.uname()[1])
222        )
223        self.logger.info(
224            "\t{:<10}  {:<10}".format("Script host os", platform.platform())
225        )
226        self.logger.info(
227            "\t{:<10}  {:>10}".format("Python", platform.python_version())
228        )
229        self.logger.info("\t{:<10}  {:>10}".format("PyYAML", yaml.__version__))
230        self.logger.info("\t{:<10}  {:>10}".format("click", click.__version__))
231        self.logger.info(
232            "\t{:<10}  {:>10}".format("paramiko", paramiko.__version__)
233        )
234        self.logger.info(
235            "\t{:<10}  {:>9}".format("redfishtool", redfishtool_version)
236        )
237        self.logger.info(
238            "\t{:<10}  {:>12}".format("ipmitool", ipmitool_version)
239        )
240
241        if eval(yaml.__version__.replace(".", ",")) < (5, 3, 0):
242            self.logger.error(
243                "\n\tERROR: Python or python packages do not meet minimum"
244                " version requirement."
245            )
246            self.logger.error(
247                "\tERROR: PyYAML version 5.3.0 or higher is needed.\n"
248            )
249            run_env_ok = False
250
251        self.logger.info("\t---- End script host environment ----")
252        return run_env_ok
253
254    def script_logging(self, log_level_attr):
255        r"""
256        Create logger
257
258        """
259        self.logger = logging.getLogger()
260        self.logger.setLevel(log_level_attr)
261        log_file_handler = logging.FileHandler(
262            self.ffdc_dir_path + "collector.log"
263        )
264
265        stdout_handler = logging.StreamHandler(sys.stdout)
266        self.logger.addHandler(log_file_handler)
267        self.logger.addHandler(stdout_handler)
268
269        # Turn off paramiko INFO logging
270        logging.getLogger("paramiko").setLevel(logging.WARNING)
271
272    def target_is_pingable(self):
273        r"""
274        Check if target system is ping-able.
275
276        """
277        response = os.system("ping -c 1 %s  2>&1 >/dev/null" % self.hostname)
278        if response == 0:
279            self.logger.info(
280                "\n\t[Check] %s is ping-able.\t\t [OK]" % self.hostname
281            )
282            return True
283        else:
284            self.logger.error(
285                "\n\tERROR: %s is not ping-able. FFDC collection aborted.\n"
286                % self.hostname
287            )
288            sys.exit(-1)
289
290    def collect_ffdc(self):
291        r"""
292        Initiate FFDC Collection depending on requested protocol.
293
294        """
295
296        self.logger.info(
297            "\n\t---- Start communicating with %s ----" % self.hostname
298        )
299        self.start_time = time.time()
300
301        # Find the list of target and protocol supported.
302        check_protocol_list = []
303        config_dict = self.ffdc_actions
304
305        for target_type in config_dict.keys():
306            if self.target_type != target_type:
307                continue
308
309            for k, v in config_dict[target_type].items():
310                if (
311                    config_dict[target_type][k]["PROTOCOL"][0]
312                    not in check_protocol_list
313                ):
314                    check_protocol_list.append(
315                        config_dict[target_type][k]["PROTOCOL"][0]
316                    )
317
318        self.logger.info(
319            "\n\t %s protocol type: %s"
320            % (self.target_type, check_protocol_list)
321        )
322
323        verified_working_protocol = self.verify_protocol(check_protocol_list)
324
325        if verified_working_protocol:
326            self.logger.info(
327                "\n\t---- Completed protocol pre-requisite check ----\n"
328            )
329
330        # Verify top level directory exists for storage
331        self.validate_local_store(self.location)
332
333        if (self.remote_protocol not in verified_working_protocol) and (
334            self.remote_protocol != "ALL"
335        ):
336            self.logger.info(
337                "\n\tWorking protocol list: %s" % verified_working_protocol
338            )
339            self.logger.error(
340                "\tERROR: Requested protocol %s is not in working protocol"
341                " list.\n" % self.remote_protocol
342            )
343            sys.exit(-1)
344        else:
345            self.generate_ffdc(verified_working_protocol)
346
347    def ssh_to_target_system(self):
348        r"""
349        Open a ssh connection to targeted system.
350
351        """
352
353        self.ssh_remoteclient = SSHRemoteclient(
354            self.hostname, self.username, self.password, self.port_ssh
355        )
356
357        if self.ssh_remoteclient.ssh_remoteclient_login():
358            self.logger.info(
359                "\n\t[Check] %s SSH connection established.\t [OK]"
360                % self.hostname
361            )
362
363            # Check scp connection.
364            # If scp connection fails,
365            # continue with FFDC generation but skip scp files to local host.
366            self.ssh_remoteclient.scp_connection()
367            return True
368        else:
369            self.logger.info(
370                "\n\t[Check] %s SSH connection.\t [NOT AVAILABLE]"
371                % self.hostname
372            )
373            return False
374
375    def telnet_to_target_system(self):
376        r"""
377        Open a telnet connection to targeted system.
378        """
379        self.telnet_remoteclient = TelnetRemoteclient(
380            self.hostname, self.username, self.password
381        )
382        if self.telnet_remoteclient.tn_remoteclient_login():
383            self.logger.info(
384                "\n\t[Check] %s Telnet connection established.\t [OK]"
385                % self.hostname
386            )
387            return True
388        else:
389            self.logger.info(
390                "\n\t[Check] %s Telnet connection.\t [NOT AVAILABLE]"
391                % self.hostname
392            )
393            return False
394
395    def generate_ffdc(self, working_protocol_list):
396        r"""
397        Determine actions based on remote host type
398
399        Description of argument(s):
400        working_protocol_list    list of confirmed working protocols to connect to remote host.
401        """
402
403        self.logger.info(
404            "\n\t---- Executing commands on " + self.hostname + " ----"
405        )
406        self.logger.info(
407            "\n\tWorking protocol list: %s" % working_protocol_list
408        )
409
410        config_dict = self.ffdc_actions
411        for target_type in config_dict.keys():
412            if self.target_type != target_type:
413                continue
414
415            self.logger.info("\n\tFFDC Path: %s " % self.ffdc_dir_path)
416            global_plugin_dict["global_log_store_path"] = self.ffdc_dir_path
417            self.logger.info("\tSystem Type: %s" % target_type)
418            for k, v in config_dict[target_type].items():
419                if (
420                    self.remote_protocol not in working_protocol_list
421                    and self.remote_protocol != "ALL"
422                ):
423                    continue
424
425                protocol = config_dict[target_type][k]["PROTOCOL"][0]
426
427                if protocol not in working_protocol_list:
428                    continue
429
430                if protocol in working_protocol_list:
431                    if protocol == "SSH" or protocol == "SCP":
432                        self.protocol_ssh(protocol, target_type, k)
433                    elif protocol == "TELNET":
434                        self.protocol_telnet(target_type, k)
435                    elif (
436                        protocol == "REDFISH"
437                        or protocol == "IPMI"
438                        or protocol == "SHELL"
439                    ):
440                        self.protocol_execute(protocol, target_type, k)
441                else:
442                    self.logger.error(
443                        "\n\tERROR: %s is not available for %s."
444                        % (protocol, self.hostname)
445                    )
446
447        # Close network connection after collecting all files
448        self.elapsed_time = time.strftime(
449            "%H:%M:%S", time.gmtime(time.time() - self.start_time)
450        )
451        if self.ssh_remoteclient:
452            self.ssh_remoteclient.ssh_remoteclient_disconnect()
453        if self.telnet_remoteclient:
454            self.telnet_remoteclient.tn_remoteclient_disconnect()
455
456    def protocol_ssh(self, protocol, target_type, sub_type):
457        r"""
458        Perform actions using SSH and SCP protocols.
459
460        Description of argument(s):
461        protocol            Protocol to execute.
462        target_type         OS Type of remote host.
463        sub_type            Group type of commands.
464        """
465
466        if protocol == "SCP":
467            self.group_copy(self.ffdc_actions[target_type][sub_type])
468        else:
469            self.collect_and_copy_ffdc(
470                self.ffdc_actions[target_type][sub_type]
471            )
472
473    def protocol_telnet(self, target_type, sub_type):
474        r"""
475        Perform actions using telnet protocol.
476        Description of argument(s):
477        target_type          OS Type of remote host.
478        """
479        self.logger.info(
480            "\n\t[Run] Executing commands on %s using %s"
481            % (self.hostname, "TELNET")
482        )
483        telnet_files_saved = []
484        progress_counter = 0
485        list_of_commands = self.ffdc_actions[target_type][sub_type]["COMMANDS"]
486        for index, each_cmd in enumerate(list_of_commands, start=0):
487            command_txt, command_timeout = self.unpack_command(each_cmd)
488            result = self.telnet_remoteclient.execute_command(
489                command_txt, command_timeout
490            )
491            if result:
492                try:
493                    targ_file = self.ffdc_actions[target_type][sub_type][
494                        "FILES"
495                    ][index]
496                except IndexError:
497                    targ_file = command_txt
498                    self.logger.warning(
499                        "\n\t[WARN] Missing filename to store data from"
500                        " telnet %s." % each_cmd
501                    )
502                    self.logger.warning(
503                        "\t[WARN] Data will be stored in %s." % targ_file
504                    )
505                targ_file_with_path = (
506                    self.ffdc_dir_path + self.ffdc_prefix + targ_file
507                )
508                # Creates a new file
509                with open(targ_file_with_path, "w") as fp:
510                    fp.write(result)
511                    fp.close
512                    telnet_files_saved.append(targ_file)
513            progress_counter += 1
514            self.print_progress(progress_counter)
515        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
516        for file in telnet_files_saved:
517            self.logger.info("\n\t\tSuccessfully save file " + file + ".")
518
519    def protocol_execute(self, protocol, target_type, sub_type):
520        r"""
521        Perform actions for a given protocol.
522
523        Description of argument(s):
524        protocol            Protocol to execute.
525        target_type         OS Type of remote host.
526        sub_type            Group type of commands.
527        """
528
529        self.logger.info(
530            "\n\t[Run] Executing commands to %s using %s"
531            % (self.hostname, protocol)
532        )
533        executed_files_saved = []
534        progress_counter = 0
535        list_of_cmd = self.get_command_list(
536            self.ffdc_actions[target_type][sub_type]
537        )
538        for index, each_cmd in enumerate(list_of_cmd, start=0):
539            plugin_call = False
540            if isinstance(each_cmd, dict):
541                if "plugin" in each_cmd:
542                    # If the error is set and plugin explicitly
543                    # requested to skip execution on error..
544                    if plugin_error_dict[
545                        "exit_on_error"
546                    ] and self.plugin_error_check(each_cmd["plugin"]):
547                        self.logger.info(
548                            "\n\t[PLUGIN-ERROR] exit_on_error: %s"
549                            % plugin_error_dict["exit_on_error"]
550                        )
551                        self.logger.info(
552                            "\t[PLUGIN-SKIP] %s" % each_cmd["plugin"][0]
553                        )
554                        continue
555                    plugin_call = True
556                    # call the plugin
557                    self.logger.info("\n\t[PLUGIN-START]")
558                    result = self.execute_plugin_block(each_cmd["plugin"])
559                    self.logger.info("\t[PLUGIN-END]\n")
560            else:
561                each_cmd = self.yaml_env_and_plugin_vars_populate(each_cmd)
562
563            if not plugin_call:
564                result = self.run_tool_cmd(each_cmd)
565            if result:
566                try:
567                    file_name = self.get_file_list(
568                        self.ffdc_actions[target_type][sub_type]
569                    )[index]
570                    # If file is specified as None.
571                    if file_name == "None":
572                        continue
573                    targ_file = self.yaml_env_and_plugin_vars_populate(
574                        file_name
575                    )
576                except IndexError:
577                    targ_file = each_cmd.split("/")[-1]
578                    self.logger.warning(
579                        "\n\t[WARN] Missing filename to store data from %s."
580                        % each_cmd
581                    )
582                    self.logger.warning(
583                        "\t[WARN] Data will be stored in %s." % targ_file
584                    )
585
586                targ_file_with_path = (
587                    self.ffdc_dir_path + self.ffdc_prefix + targ_file
588                )
589
590                # Creates a new file
591                with open(targ_file_with_path, "w") as fp:
592                    if isinstance(result, dict):
593                        fp.write(json.dumps(result))
594                    else:
595                        fp.write(result)
596                    fp.close
597                    executed_files_saved.append(targ_file)
598
599            progress_counter += 1
600            self.print_progress(progress_counter)
601
602        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
603
604        for file in executed_files_saved:
605            self.logger.info("\n\t\tSuccessfully save file " + file + ".")
606
607    def collect_and_copy_ffdc(
608        self, ffdc_actions_for_target_type, form_filename=False
609    ):
610        r"""
611        Send commands in ffdc_config file to targeted system.
612
613        Description of argument(s):
614        ffdc_actions_for_target_type     commands and files for the selected remote host type.
615        form_filename                    if true, pre-pend self.target_type to filename
616        """
617
618        # Executing commands, if any
619        self.ssh_execute_ffdc_commands(
620            ffdc_actions_for_target_type, form_filename
621        )
622
623        # Copying files
624        if self.ssh_remoteclient.scpclient:
625            self.logger.info(
626                "\n\n\tCopying FFDC files from remote system %s.\n"
627                % self.hostname
628            )
629
630            # Retrieving files from target system
631            list_of_files = self.get_file_list(ffdc_actions_for_target_type)
632            self.scp_ffdc(
633                self.ffdc_dir_path,
634                self.ffdc_prefix,
635                form_filename,
636                list_of_files,
637            )
638        else:
639            self.logger.info(
640                "\n\n\tSkip copying FFDC files from remote system %s.\n"
641                % self.hostname
642            )
643
644    def get_command_list(self, ffdc_actions_for_target_type):
645        r"""
646        Fetch list of commands from configuration file
647
648        Description of argument(s):
649        ffdc_actions_for_target_type    commands and files for the selected remote host type.
650        """
651        try:
652            list_of_commands = ffdc_actions_for_target_type["COMMANDS"]
653        except KeyError:
654            list_of_commands = []
655        return list_of_commands
656
657    def get_file_list(self, ffdc_actions_for_target_type):
658        r"""
659        Fetch list of commands from configuration file
660
661        Description of argument(s):
662        ffdc_actions_for_target_type    commands and files for the selected remote host type.
663        """
664        try:
665            list_of_files = ffdc_actions_for_target_type["FILES"]
666        except KeyError:
667            list_of_files = []
668        return list_of_files
669
670    def unpack_command(self, command):
671        r"""
672        Unpack command from config file
673
674        Description of argument(s):
675        command    Command from config file.
676        """
677        if isinstance(command, dict):
678            command_txt = next(iter(command))
679            command_timeout = next(iter(command.values()))
680        elif isinstance(command, str):
681            command_txt = command
682            # Default command timeout 60 seconds
683            command_timeout = 60
684
685        return command_txt, command_timeout
686
687    def ssh_execute_ffdc_commands(
688        self, ffdc_actions_for_target_type, form_filename=False
689    ):
690        r"""
691        Send commands in ffdc_config file to targeted system.
692
693        Description of argument(s):
694        ffdc_actions_for_target_type    commands and files for the selected remote host type.
695        form_filename                    if true, pre-pend self.target_type to filename
696        """
697        self.logger.info(
698            "\n\t[Run] Executing commands on %s using %s"
699            % (self.hostname, ffdc_actions_for_target_type["PROTOCOL"][0])
700        )
701
702        list_of_commands = self.get_command_list(ffdc_actions_for_target_type)
703        # If command list is empty, returns
704        if not list_of_commands:
705            return
706
707        progress_counter = 0
708        for command in list_of_commands:
709            command_txt, command_timeout = self.unpack_command(command)
710
711            if form_filename:
712                command_txt = str(command_txt % self.target_type)
713
714            (
715                cmd_exit_code,
716                err,
717                response,
718            ) = self.ssh_remoteclient.execute_command(
719                command_txt, command_timeout
720            )
721
722            if cmd_exit_code:
723                self.logger.warning(
724                    "\n\t\t[WARN] %s exits with code %s."
725                    % (command_txt, str(cmd_exit_code))
726                )
727                self.logger.warning("\t\t[WARN] %s " % err)
728
729            progress_counter += 1
730            self.print_progress(progress_counter)
731
732        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
733
734    def group_copy(self, ffdc_actions_for_target_type):
735        r"""
736        scp group of files (wild card) from remote host.
737
738        Description of argument(s):
739        fdc_actions_for_target_type    commands and files for the selected remote host type.
740        """
741
742        if self.ssh_remoteclient.scpclient:
743            self.logger.info(
744                "\n\tCopying files from remote system %s via SCP.\n"
745                % self.hostname
746            )
747
748            list_of_commands = self.get_command_list(
749                ffdc_actions_for_target_type
750            )
751            # If command list is empty, returns
752            if not list_of_commands:
753                return
754
755            for command in list_of_commands:
756                try:
757                    command = self.yaml_env_and_plugin_vars_populate(command)
758                except IndexError:
759                    self.logger.error("\t\tInvalid command %s" % command)
760                    continue
761
762                (
763                    cmd_exit_code,
764                    err,
765                    response,
766                ) = self.ssh_remoteclient.execute_command(command)
767
768                # If file does not exist, code take no action.
769                # cmd_exit_code is ignored for this scenario.
770                if response:
771                    scp_result = self.ssh_remoteclient.scp_file_from_remote(
772                        response.split("\n"), self.ffdc_dir_path
773                    )
774                    if scp_result:
775                        self.logger.info(
776                            "\t\tSuccessfully copied from "
777                            + self.hostname
778                            + ":"
779                            + command
780                        )
781                else:
782                    self.logger.info("\t\t%s has no result" % command)
783
784        else:
785            self.logger.info(
786                "\n\n\tSkip copying files from remote system %s.\n"
787                % self.hostname
788            )
789
790    def scp_ffdc(
791        self,
792        targ_dir_path,
793        targ_file_prefix,
794        form_filename,
795        file_list=None,
796        quiet=None,
797    ):
798        r"""
799        SCP all files in file_dict to the indicated directory on the local system.
800
801        Description of argument(s):
802        targ_dir_path                   The path of the directory to receive the files.
803        targ_file_prefix                Prefix which will be prepended to each
804                                        target file's name.
805        file_dict                       A dictionary of files to scp from targeted system to this system
806
807        """
808
809        progress_counter = 0
810        for filename in file_list:
811            if form_filename:
812                filename = str(filename % self.target_type)
813            source_file_path = filename
814            targ_file_path = (
815                targ_dir_path + targ_file_prefix + filename.split("/")[-1]
816            )
817
818            # If source file name contains wild card, copy filename as is.
819            if "*" in source_file_path:
820                scp_result = self.ssh_remoteclient.scp_file_from_remote(
821                    source_file_path, self.ffdc_dir_path
822                )
823            else:
824                scp_result = self.ssh_remoteclient.scp_file_from_remote(
825                    source_file_path, targ_file_path
826                )
827
828            if not quiet:
829                if scp_result:
830                    self.logger.info(
831                        "\t\tSuccessfully copied from "
832                        + self.hostname
833                        + ":"
834                        + source_file_path
835                        + ".\n"
836                    )
837                else:
838                    self.logger.info(
839                        "\t\tFail to copy from "
840                        + self.hostname
841                        + ":"
842                        + source_file_path
843                        + ".\n"
844                    )
845            else:
846                progress_counter += 1
847                self.print_progress(progress_counter)
848
849    def set_ffdc_default_store_path(self):
850        r"""
851        Set a default value for self.ffdc_dir_path and self.ffdc_prefix.
852        Collected ffdc file will be stored in dir /self.location/hostname_timestr/.
853        Individual ffdc file will have timestr_filename.
854
855        Description of class variables:
856        self.ffdc_dir_path  The dir path where collected ffdc data files should be put.
857
858        self.ffdc_prefix    The prefix to be given to each ffdc file name.
859
860        """
861
862        timestr = time.strftime("%Y%m%d-%H%M%S")
863        self.ffdc_dir_path = (
864            self.location + "/" + self.hostname + "_" + timestr + "/"
865        )
866        self.ffdc_prefix = timestr + "_"
867        self.validate_local_store(self.ffdc_dir_path)
868
869    # Need to verify local store path exists prior to instantiate this class.
870    # This class method is used to share the same code between CLI input parm
871    # and Robot Framework "${EXECDIR}/logs" before referencing this class.
872    @classmethod
873    def validate_local_store(cls, dir_path):
874        r"""
875        Ensure path exists to store FFDC files locally.
876
877        Description of variable:
878        dir_path  The dir path where collected ffdc data files will be stored.
879
880        """
881
882        if not os.path.exists(dir_path):
883            try:
884                os.makedirs(dir_path, 0o755)
885            except (IOError, OSError) as e:
886                # PermissionError
887                if e.errno == EPERM or e.errno == EACCES:
888                    self.logger.error(
889                        "\tERROR: os.makedirs %s failed with"
890                        " PermissionError.\n" % dir_path
891                    )
892                else:
893                    self.logger.error(
894                        "\tERROR: os.makedirs %s failed with %s.\n"
895                        % (dir_path, e.strerror)
896                    )
897                sys.exit(-1)
898
899    def print_progress(self, progress):
900        r"""
901        Print activity progress +
902
903        Description of variable:
904        progress  Progress counter.
905
906        """
907
908        sys.stdout.write("\r\t" + "+" * progress)
909        sys.stdout.flush()
910        time.sleep(0.1)
911
912    def verify_redfish(self):
913        r"""
914        Verify remote host has redfish service active
915
916        """
917        redfish_parm = (
918            "redfishtool -r "
919            + self.hostname
920            + ":"
921            + self.port_https
922            + " -S Always raw GET /redfish/v1/"
923        )
924        return self.run_tool_cmd(redfish_parm, True)
925
926    def verify_ipmi(self):
927        r"""
928        Verify remote host has IPMI LAN service active
929
930        """
931        if self.target_type == "OPENBMC":
932            ipmi_parm = (
933                "ipmitool -I lanplus -C 17  -U "
934                + self.username
935                + " -P "
936                + self.password
937                + " -H "
938                + self.hostname
939                + " -p "
940                + str(self.port_ipmi)
941                + " power status"
942            )
943        else:
944            ipmi_parm = (
945                "ipmitool -I lanplus  -P "
946                + self.password
947                + " -H "
948                + self.hostname
949                + " -p "
950                + str(self.port_ipmi)
951                + " power status"
952            )
953
954        return self.run_tool_cmd(ipmi_parm, True)
955
956    def run_tool_cmd(self, parms_string, quiet=False):
957        r"""
958        Run CLI standard tool or scripts.
959
960        Description of variable:
961        parms_string         tool command options.
962        quiet                do not print tool error message if True
963        """
964
965        result = subprocess.run(
966            [parms_string],
967            stdout=subprocess.PIPE,
968            stderr=subprocess.PIPE,
969            shell=True,
970            universal_newlines=True,
971        )
972
973        if result.stderr and not quiet:
974            self.logger.error("\n\t\tERROR with %s " % parms_string)
975            self.logger.error("\t\t" + result.stderr)
976
977        return result.stdout
978
979    def verify_protocol(self, protocol_list):
980        r"""
981        Perform protocol working check.
982
983        Description of argument(s):
984        protocol_list        List of protocol.
985        """
986
987        tmp_list = []
988        if self.target_is_pingable():
989            tmp_list.append("SHELL")
990
991        for protocol in protocol_list:
992            if self.remote_protocol != "ALL":
993                if self.remote_protocol != protocol:
994                    continue
995
996            # Only check SSH/SCP once for both protocols
997            if (
998                protocol == "SSH"
999                or protocol == "SCP"
1000                and protocol not in tmp_list
1001            ):
1002                if self.ssh_to_target_system():
1003                    # Add only what user asked.
1004                    if self.remote_protocol != "ALL":
1005                        tmp_list.append(self.remote_protocol)
1006                    else:
1007                        tmp_list.append("SSH")
1008                        tmp_list.append("SCP")
1009
1010            if protocol == "TELNET":
1011                if self.telnet_to_target_system():
1012                    tmp_list.append(protocol)
1013
1014            if protocol == "REDFISH":
1015                if self.verify_redfish():
1016                    tmp_list.append(protocol)
1017                    self.logger.info(
1018                        "\n\t[Check] %s Redfish Service.\t\t [OK]"
1019                        % self.hostname
1020                    )
1021                else:
1022                    self.logger.info(
1023                        "\n\t[Check] %s Redfish Service.\t\t [NOT AVAILABLE]"
1024                        % self.hostname
1025                    )
1026
1027            if protocol == "IPMI":
1028                if self.verify_ipmi():
1029                    tmp_list.append(protocol)
1030                    self.logger.info(
1031                        "\n\t[Check] %s IPMI LAN Service.\t\t [OK]"
1032                        % self.hostname
1033                    )
1034                else:
1035                    self.logger.info(
1036                        "\n\t[Check] %s IPMI LAN Service.\t\t [NOT AVAILABLE]"
1037                        % self.hostname
1038                    )
1039
1040        return tmp_list
1041
1042    def load_env(self):
1043        r"""
1044        Perform protocol working check.
1045
1046        """
1047        # This is for the env vars a user can use in YAML to load it at runtime.
1048        # Example YAML:
1049        # -COMMANDS:
1050        #    - my_command ${hostname}  ${username}   ${password}
1051        os.environ["hostname"] = self.hostname
1052        os.environ["username"] = self.username
1053        os.environ["password"] = self.password
1054        os.environ["port_ssh"] = self.port_ssh
1055        os.environ["port_https"] = self.port_https
1056        os.environ["port_ipmi"] = self.port_ipmi
1057
1058        # Append default Env.
1059        self.env_dict["hostname"] = self.hostname
1060        self.env_dict["username"] = self.username
1061        self.env_dict["password"] = self.password
1062        self.env_dict["port_ssh"] = self.port_ssh
1063        self.env_dict["port_https"] = self.port_https
1064        self.env_dict["port_ipmi"] = self.port_ipmi
1065
1066        try:
1067            tmp_env_dict = {}
1068            if self.env_vars:
1069                tmp_env_dict = json.loads(self.env_vars)
1070                # Export ENV vars default.
1071                for key, value in tmp_env_dict.items():
1072                    os.environ[key] = value
1073                    self.env_dict[key] = str(value)
1074
1075            if self.econfig:
1076                with open(self.econfig, "r") as file:
1077                    try:
1078                        tmp_env_dict = yaml.load(file, Loader=yaml.SafeLoader)
1079                    except yaml.YAMLError as e:
1080                        self.logger.error(e)
1081                        sys.exit(-1)
1082                # Export ENV vars.
1083                for key, value in tmp_env_dict["env_params"].items():
1084                    os.environ[key] = str(value)
1085                    self.env_dict[key] = str(value)
1086        except json.decoder.JSONDecodeError as e:
1087            self.logger.error("\n\tERROR: %s " % e)
1088            sys.exit(-1)
1089
1090        # This to mask the password from displaying on the console.
1091        mask_dict = self.env_dict.copy()
1092        for k, v in mask_dict.items():
1093            if k.lower().find("password") != -1:
1094                hidden_text = []
1095                hidden_text.append(v)
1096                password_regex = (
1097                    "(" + "|".join([re.escape(x) for x in hidden_text]) + ")"
1098                )
1099                mask_dict[k] = re.sub(password_regex, "********", v)
1100
1101        self.logger.info(json.dumps(mask_dict, indent=8, sort_keys=False))
1102
1103    def execute_python_eval(self, eval_string):
1104        r"""
1105        Execute qualified python function string using eval.
1106
1107        Description of argument(s):
1108        eval_string        Execute the python object.
1109
1110        Example:
1111                eval(plugin.foo_func.foo_func(10))
1112        """
1113        try:
1114            self.logger.info("\tExecuting plugin func()")
1115            self.logger.debug("\tCall func: %s" % eval_string)
1116            result = eval(eval_string)
1117            self.logger.info("\treturn: %s" % str(result))
1118        except (
1119            ValueError,
1120            SyntaxError,
1121            NameError,
1122            AttributeError,
1123            TypeError,
1124        ) as e:
1125            self.logger.error("\tERROR: execute_python_eval: %s" % e)
1126            # Set the plugin error state.
1127            plugin_error_dict["exit_on_error"] = True
1128            self.logger.info("\treturn: PLUGIN_EVAL_ERROR")
1129            return "PLUGIN_EVAL_ERROR"
1130
1131        return result
1132
1133    def execute_plugin_block(self, plugin_cmd_list):
1134        r"""
1135        Pack the plugin command to qualifed python string object.
1136
1137        Description of argument(s):
1138        plugin_list_dict      Plugin block read from YAML
1139                              [{'plugin_name': 'plugin.foo_func.my_func'},
1140                               {'plugin_args': [10]}]
1141
1142        Example:
1143            - plugin:
1144              - plugin_name: plugin.foo_func.my_func
1145              - plugin_args:
1146                - arg1
1147                - arg2
1148
1149            - plugin:
1150              - plugin_name: result = plugin.foo_func.my_func
1151              - plugin_args:
1152                - arg1
1153                - arg2
1154
1155            - plugin:
1156              - plugin_name: result1,result2 = plugin.foo_func.my_func
1157              - plugin_args:
1158                - arg1
1159                - arg2
1160        """
1161        try:
1162            idx = self.key_index_list_dict("plugin_name", plugin_cmd_list)
1163            plugin_name = plugin_cmd_list[idx]["plugin_name"]
1164            # Equal separator means plugin function returns result.
1165            if " = " in plugin_name:
1166                # Ex. ['result', 'plugin.foo_func.my_func']
1167                plugin_name_args = plugin_name.split(" = ")
1168                # plugin func return data.
1169                for arg in plugin_name_args:
1170                    if arg == plugin_name_args[-1]:
1171                        plugin_name = arg
1172                    else:
1173                        plugin_resp = arg.split(",")
1174                        # ['result1','result2']
1175                        for x in plugin_resp:
1176                            global_plugin_list.append(x)
1177                            global_plugin_dict[x] = ""
1178
1179            # Walk the plugin args ['arg1,'arg2']
1180            # If the YAML plugin statement 'plugin_args' is not declared.
1181            if any("plugin_args" in d for d in plugin_cmd_list):
1182                idx = self.key_index_list_dict("plugin_args", plugin_cmd_list)
1183                plugin_args = plugin_cmd_list[idx]["plugin_args"]
1184                if plugin_args:
1185                    plugin_args = self.yaml_args_populate(plugin_args)
1186                else:
1187                    plugin_args = []
1188            else:
1189                plugin_args = self.yaml_args_populate([])
1190
1191            # Pack the args arg1, arg2, .... argn into
1192            # "arg1","arg2","argn"  string as params for function.
1193            parm_args_str = self.yaml_args_string(plugin_args)
1194            if parm_args_str:
1195                plugin_func = plugin_name + "(" + parm_args_str + ")"
1196            else:
1197                plugin_func = plugin_name + "()"
1198
1199            # Execute plugin function.
1200            if global_plugin_dict:
1201                resp = self.execute_python_eval(plugin_func)
1202                # Update plugin vars dict if there is any.
1203                if resp != "PLUGIN_EVAL_ERROR":
1204                    self.response_args_data(resp)
1205            else:
1206                resp = self.execute_python_eval(plugin_func)
1207        except Exception as e:
1208            # Set the plugin error state.
1209            plugin_error_dict["exit_on_error"] = True
1210            self.logger.error("\tERROR: execute_plugin_block: %s" % e)
1211            pass
1212
1213        # There is a real error executing the plugin function.
1214        if resp == "PLUGIN_EVAL_ERROR":
1215            return resp
1216
1217        # Check if plugin_expects_return (int, string, list,dict etc)
1218        if any("plugin_expects_return" in d for d in plugin_cmd_list):
1219            idx = self.key_index_list_dict(
1220                "plugin_expects_return", plugin_cmd_list
1221            )
1222            plugin_expects = plugin_cmd_list[idx]["plugin_expects_return"]
1223            if plugin_expects:
1224                if resp:
1225                    if (
1226                        self.plugin_expect_type(plugin_expects, resp)
1227                        == "INVALID"
1228                    ):
1229                        self.logger.error("\tWARN: Plugin error check skipped")
1230                    elif not self.plugin_expect_type(plugin_expects, resp):
1231                        self.logger.error(
1232                            "\tERROR: Plugin expects return data: %s"
1233                            % plugin_expects
1234                        )
1235                        plugin_error_dict["exit_on_error"] = True
1236                elif not resp:
1237                    self.logger.error(
1238                        "\tERROR: Plugin func failed to return data"
1239                    )
1240                    plugin_error_dict["exit_on_error"] = True
1241
1242        return resp
1243
1244    def response_args_data(self, plugin_resp):
1245        r"""
1246        Parse the plugin function response and update plugin return variable.
1247
1248        plugin_resp       Response data from plugin function.
1249        """
1250        resp_list = []
1251        resp_data = ""
1252
1253        # There is nothing to update the plugin response.
1254        if len(global_plugin_list) == 0 or plugin_resp == "None":
1255            return
1256
1257        if isinstance(plugin_resp, str):
1258            resp_data = plugin_resp.strip("\r\n\t")
1259            resp_list.append(resp_data)
1260        elif isinstance(plugin_resp, bytes):
1261            resp_data = str(plugin_resp, "UTF-8").strip("\r\n\t")
1262            resp_list.append(resp_data)
1263        elif isinstance(plugin_resp, tuple):
1264            if len(global_plugin_list) == 1:
1265                resp_list.append(plugin_resp)
1266            else:
1267                resp_list = list(plugin_resp)
1268                resp_list = [x.strip("\r\n\t") for x in resp_list]
1269        elif isinstance(plugin_resp, list):
1270            if len(global_plugin_list) == 1:
1271                resp_list.append([x.strip("\r\n\t") for x in plugin_resp])
1272            else:
1273                resp_list = [x.strip("\r\n\t") for x in plugin_resp]
1274        elif isinstance(plugin_resp, int) or isinstance(plugin_resp, float):
1275            resp_list.append(plugin_resp)
1276
1277        # Iterate if there is a list of plugin return vars to update.
1278        for idx, item in enumerate(resp_list, start=0):
1279            # Exit loop, done required loop.
1280            if idx >= len(global_plugin_list):
1281                break
1282            # Find the index of the return func in the list and
1283            # update the global func return dictionary.
1284            try:
1285                dict_idx = global_plugin_list[idx]
1286                global_plugin_dict[dict_idx] = item
1287            except (IndexError, ValueError) as e:
1288                self.logger.warn("\tWARN: response_args_data: %s" % e)
1289                pass
1290
1291        # Done updating plugin dict irrespective of pass or failed,
1292        # clear all the list element for next plugin block execute.
1293        global_plugin_list.clear()
1294
1295    def yaml_args_string(self, plugin_args):
1296        r"""
1297        Pack the args into string.
1298
1299        plugin_args            arg list ['arg1','arg2,'argn']
1300        """
1301        args_str = ""
1302        for args in plugin_args:
1303            if args:
1304                if isinstance(args, (int, float)):
1305                    args_str += str(args)
1306                elif args in global_plugin_type_list:
1307                    args_str += str(global_plugin_dict[args])
1308                else:
1309                    args_str += '"' + str(args.strip("\r\n\t")) + '"'
1310            # Skip last list element.
1311            if args != plugin_args[-1]:
1312                args_str += ","
1313        return args_str
1314
1315    def yaml_args_populate(self, yaml_arg_list):
1316        r"""
1317        Decode env and plugin vars and populate.
1318
1319        Description of argument(s):
1320        yaml_arg_list         arg list read from YAML
1321
1322        Example:
1323          - plugin_args:
1324            - arg1
1325            - arg2
1326
1327                  yaml_arg_list:  [arg2, arg2]
1328        """
1329        # Get the env loaded keys as list ['hostname', 'username', 'password'].
1330        env_vars_list = list(self.env_dict)
1331
1332        if isinstance(yaml_arg_list, list):
1333            tmp_list = []
1334            for arg in yaml_arg_list:
1335                if isinstance(arg, (int, float)):
1336                    tmp_list.append(arg)
1337                    continue
1338                elif isinstance(arg, str):
1339                    arg_str = self.yaml_env_and_plugin_vars_populate(str(arg))
1340                    tmp_list.append(arg_str)
1341                else:
1342                    tmp_list.append(arg)
1343
1344            # return populated list.
1345            return tmp_list
1346
1347    def yaml_env_and_plugin_vars_populate(self, yaml_arg_str):
1348        r"""
1349        Update ${MY_VAR} and plugin vars.
1350
1351        Description of argument(s):
1352        yaml_arg_str         arg string read from YAML.
1353
1354        Example:
1355            - cat ${MY_VAR}
1356            - ls -AX my_plugin_var
1357        """
1358        # Parse the string for env vars ${env_vars}.
1359        try:
1360            # Example, list of matching env vars ['username', 'password', 'hostname']
1361            # Extra escape \ for special symbols. '\$\{([^\}]+)\}' works good.
1362            var_name_regex = "\\$\\{([^\\}]+)\\}"
1363            env_var_names_list = re.findall(var_name_regex, yaml_arg_str)
1364            for var in env_var_names_list:
1365                env_var = os.environ[var]
1366                env_replace = "${" + var + "}"
1367                yaml_arg_str = yaml_arg_str.replace(env_replace, env_var)
1368        except Exception as e:
1369            self.logger.error("\tERROR:yaml_env_vars_populate: %s" % e)
1370            pass
1371
1372        # Parse the string for plugin vars.
1373        try:
1374            # Example, list of plugin vars ['my_username', 'my_data']
1375            plugin_var_name_list = global_plugin_dict.keys()
1376            for var in plugin_var_name_list:
1377                # skip env var list already populated above code block list.
1378                if var in env_var_names_list:
1379                    continue
1380                # If this plugin var exist but empty in dict, don't replace.
1381                # This is either a YAML plugin statement incorrectly used or
1382                # user added a plugin var which is not going to be populated.
1383                if yaml_arg_str in global_plugin_dict:
1384                    if isinstance(global_plugin_dict[var], (list, dict)):
1385                        # List data type or dict can't be replaced, use directly
1386                        # in eval function call.
1387                        global_plugin_type_list.append(var)
1388                    else:
1389                        yaml_arg_str = yaml_arg_str.replace(
1390                            str(var), str(global_plugin_dict[var])
1391                        )
1392                # Just a string like filename or command.
1393                else:
1394                    yaml_arg_str = yaml_arg_str.replace(
1395                        str(var), str(global_plugin_dict[var])
1396                    )
1397        except (IndexError, ValueError) as e:
1398            self.logger.error("\tERROR: yaml_plugin_vars_populate: %s" % e)
1399            pass
1400
1401        return yaml_arg_str
1402
1403    def plugin_error_check(self, plugin_dict):
1404        r"""
1405        Plugin error dict processing.
1406
1407        Description of argument(s):
1408        plugin_dict        Dictionary of plugin error.
1409        """
1410        if any("plugin_error" in d for d in plugin_dict):
1411            for d in plugin_dict:
1412                if "plugin_error" in d:
1413                    value = d["plugin_error"]
1414                    # Reference if the error is set or not by plugin.
1415                    return plugin_error_dict[value]
1416
1417    def key_index_list_dict(self, key, list_dict):
1418        r"""
1419        Iterate list of dictionary and return index if the key match is found.
1420
1421        Description of argument(s):
1422        key           Valid Key in a dict.
1423        list_dict     list of dictionary.
1424        """
1425        for i, d in enumerate(list_dict):
1426            if key in d.keys():
1427                return i
1428
1429    def plugin_expect_type(self, type, data):
1430        r"""
1431        Plugin expect directive type check.
1432        """
1433        if type == "int":
1434            return isinstance(data, int)
1435        elif type == "float":
1436            return isinstance(data, float)
1437        elif type == "str":
1438            return isinstance(data, str)
1439        elif type == "list":
1440            return isinstance(data, list)
1441        elif type == "dict":
1442            return isinstance(data, dict)
1443        elif type == "tuple":
1444            return isinstance(data, tuple)
1445        else:
1446            self.logger.info("\tInvalid data type requested: %s" % type)
1447            return "INVALID"
1448