1#!/usr/bin/env python
2
3r"""
4See class prolog below for details.
5"""
6
7import os
8import re
9import sys
10import yaml
11import json
12import time
13import logging
14import platform
15from errno import EACCES, EPERM
16import subprocess
17from ssh_utility import SSHRemoteclient
18from telnet_utility import TelnetRemoteclient
19
20r"""
21User define plugins python functions.
22
23It will imports files from directory plugins
24
25plugins
26├── file1.py
27└── file2.py
28
29Example how to define in YAML:
30 - plugin:
31   - plugin_name: plugin.foo_func.foo_func_yaml
32     - plugin_args:
33       - arg1
34       - arg2
35"""
36plugin_dir = 'plugins'
37try:
38    for module in os.listdir(plugin_dir):
39        if module == '__init__.py' or module[-3:] != '.py':
40            continue
41        plugin_module = "plugins." + module[:-3]
42        # To access the module plugin.<module name>.<function>
43        # Example: plugin.foo_func.foo_func_yaml()
44        try:
45            plugin = __import__(plugin_module, globals(), locals(), [], 0)
46        except Exception as e:
47            print("PLUGIN: Module import failed: %s" % module)
48            pass
49except FileNotFoundError as e:
50    print("PLUGIN: %s" % e)
51    pass
52
53r"""
54This is for plugin functions returning data or responses to the caller
55in YAML plugin setup.
56
57Example:
58
59    - plugin:
60      - plugin_name: version = plugin.ssh_execution.ssh_execute_cmd
61      - plugin_args:
62        - ${hostname}
63        - ${username}
64        - ${password}
65        - "cat /etc/os-release | grep VERSION_ID | awk -F'=' '{print $2}'"
66     - plugin:
67        - plugin_name: plugin.print_vars.print_vars
68        - plugin_args:
69          - version
70
71where first plugin "version" var is used by another plugin in the YAML
72block or plugin
73
74"""
75global global_log_store_path
76global global_plugin_dict
77global global_plugin_list
78global_plugin_dict = {}
79global_plugin_list = []
80global_log_store_path = ''
81
82
83class FFDCCollector:
84
85    r"""
86    Sends commands from configuration file to the targeted system to collect log files.
87    Fetch and store generated files at the specified location.
88
89    """
90
91    def __init__(self,
92                 hostname,
93                 username,
94                 password,
95                 ffdc_config,
96                 location,
97                 remote_type,
98                 remote_protocol,
99                 env_vars,
100                 econfig,
101                 log_level):
102        r"""
103        Description of argument(s):
104
105        hostname            name/ip of the targeted (remote) system
106        username            user on the targeted system with access to FFDC files
107        password            password for user on targeted system
108        ffdc_config         configuration file listing commands and files for FFDC
109        location            where to store collected FFDC
110        remote_type         os type of the remote host
111        remote_protocol     Protocol to use to collect data
112        env_vars            User define CLI env vars '{"key : "value"}'
113        econfig             User define env vars YAML file
114
115        """
116
117        self.hostname = hostname
118        self.username = username
119        self.password = password
120        self.ffdc_config = ffdc_config
121        self.location = location + "/" + remote_type.upper()
122        self.ssh_remoteclient = None
123        self.telnet_remoteclient = None
124        self.ffdc_dir_path = ""
125        self.ffdc_prefix = ""
126        self.target_type = remote_type.upper()
127        self.remote_protocol = remote_protocol.upper()
128        self.env_vars = env_vars
129        self.econfig = econfig
130        self.start_time = 0
131        self.elapsed_time = ''
132        self.logger = None
133
134        # Set prefix values for scp files and directory.
135        # Since the time stamp is at second granularity, these values are set here
136        # to be sure that all files for this run will have same timestamps
137        # and they will be saved in the same directory.
138        # self.location == local system for now
139        self.set_ffdc_defaults()
140
141        # Logger for this run.  Need to be after set_ffdc_defaults()
142        self.script_logging(getattr(logging, log_level.upper()))
143
144        # Verify top level directory exists for storage
145        self.validate_local_store(self.location)
146
147        if self.verify_script_env():
148            # Load default or user define YAML configuration file.
149            with open(self.ffdc_config, 'r') as file:
150                self.ffdc_actions = yaml.load(file, Loader=yaml.FullLoader)
151
152            if self.target_type not in self.ffdc_actions.keys():
153                self.logger.error(
154                    "\n\tERROR: %s is not listed in %s.\n\n" % (self.target_type, self.ffdc_config))
155                sys.exit(-1)
156        else:
157            sys.exit(-1)
158
159        # Load ENV vars from user.
160        self.logger.info("\n\tENV: User define input YAML variables")
161        self.env_dict = {}
162        self. load_env()
163
164    def verify_script_env(self):
165
166        # Import to log version
167        import click
168        import paramiko
169
170        run_env_ok = True
171
172        redfishtool_version = self.run_tool_cmd('redfishtool -V').split(' ')[2].strip('\n')
173        ipmitool_version = self.run_tool_cmd('ipmitool -V').split(' ')[2]
174
175        self.logger.info("\n\t---- Script host environment ----")
176        self.logger.info("\t{:<10}  {:<10}".format('Script hostname', os.uname()[1]))
177        self.logger.info("\t{:<10}  {:<10}".format('Script host os', platform.platform()))
178        self.logger.info("\t{:<10}  {:>10}".format('Python', platform.python_version()))
179        self.logger.info("\t{:<10}  {:>10}".format('PyYAML', yaml.__version__))
180        self.logger.info("\t{:<10}  {:>10}".format('click', click.__version__))
181        self.logger.info("\t{:<10}  {:>10}".format('paramiko', paramiko.__version__))
182        self.logger.info("\t{:<10}  {:>9}".format('redfishtool', redfishtool_version))
183        self.logger.info("\t{:<10}  {:>12}".format('ipmitool', ipmitool_version))
184
185        if eval(yaml.__version__.replace('.', ',')) < (5, 4, 1):
186            self.logger.error("\n\tERROR: Python or python packages do not meet minimum version requirement.")
187            self.logger.error("\tERROR: PyYAML version 5.4.1 or higher is needed.\n")
188            run_env_ok = False
189
190        self.logger.info("\t---- End script host environment ----")
191        return run_env_ok
192
193    def script_logging(self,
194                       log_level_attr):
195        r"""
196        Create logger
197
198        """
199        self.logger = logging.getLogger()
200        self.logger.setLevel(log_level_attr)
201        log_file_handler = logging.FileHandler(self.ffdc_dir_path + "collector.log")
202
203        stdout_handler = logging.StreamHandler(sys.stdout)
204        self.logger.addHandler(log_file_handler)
205        self.logger.addHandler(stdout_handler)
206
207        # Turn off paramiko INFO logging
208        logging.getLogger("paramiko").setLevel(logging.WARNING)
209
210    def target_is_pingable(self):
211        r"""
212        Check if target system is ping-able.
213
214        """
215        response = os.system("ping -c 1 %s  2>&1 >/dev/null" % self.hostname)
216        if response == 0:
217            self.logger.info("\n\t[Check] %s is ping-able.\t\t [OK]" % self.hostname)
218            return True
219        else:
220            self.logger.error(
221                "\n\tERROR: %s is not ping-able. FFDC collection aborted.\n" % self.hostname)
222            sys.exit(-1)
223
224    def collect_ffdc(self):
225        r"""
226        Initiate FFDC Collection depending on requested protocol.
227
228        """
229
230        self.logger.info("\n\t---- Start communicating with %s ----" % self.hostname)
231        self.start_time = time.time()
232
233        # Find the list of target and protocol supported.
234        check_protocol_list = []
235        config_dict = self.ffdc_actions
236
237        for target_type in config_dict.keys():
238            if self.target_type != target_type:
239                continue
240
241            for k, v in config_dict[target_type].items():
242                if config_dict[target_type][k]['PROTOCOL'][0] not in check_protocol_list:
243                    check_protocol_list.append(config_dict[target_type][k]['PROTOCOL'][0])
244
245        self.logger.info("\n\t %s protocol type: %s" % (self.target_type, check_protocol_list))
246
247        verified_working_protocol = self.verify_protocol(check_protocol_list)
248
249        if verified_working_protocol:
250            self.logger.info("\n\t---- Completed protocol pre-requisite check ----\n")
251
252        # Verify top level directory exists for storage
253        self.validate_local_store(self.location)
254
255        if ((self.remote_protocol not in verified_working_protocol) and (self.remote_protocol != 'ALL')):
256            self.logger.info("\n\tWorking protocol list: %s" % verified_working_protocol)
257            self.logger.error(
258                '\tERROR: Requested protocol %s is not in working protocol list.\n'
259                % self.remote_protocol)
260            sys.exit(-1)
261        else:
262            self.generate_ffdc(verified_working_protocol)
263
264    def ssh_to_target_system(self):
265        r"""
266        Open a ssh connection to targeted system.
267
268        """
269
270        self.ssh_remoteclient = SSHRemoteclient(self.hostname,
271                                                self.username,
272                                                self.password)
273
274        if self.ssh_remoteclient.ssh_remoteclient_login():
275            self.logger.info("\n\t[Check] %s SSH connection established.\t [OK]" % self.hostname)
276
277            # Check scp connection.
278            # If scp connection fails,
279            # continue with FFDC generation but skip scp files to local host.
280            self.ssh_remoteclient.scp_connection()
281            return True
282        else:
283            self.logger.info("\n\t[Check] %s SSH connection.\t [NOT AVAILABLE]" % self.hostname)
284            return False
285
286    def telnet_to_target_system(self):
287        r"""
288        Open a telnet connection to targeted system.
289        """
290        self.telnet_remoteclient = TelnetRemoteclient(self.hostname,
291                                                      self.username,
292                                                      self.password)
293        if self.telnet_remoteclient.tn_remoteclient_login():
294            self.logger.info("\n\t[Check] %s Telnet connection established.\t [OK]" % self.hostname)
295            return True
296        else:
297            self.logger.info("\n\t[Check] %s Telnet connection.\t [NOT AVAILABLE]" % self.hostname)
298            return False
299
300    def generate_ffdc(self, working_protocol_list):
301        r"""
302        Determine actions based on remote host type
303
304        Description of argument(s):
305        working_protocol_list    list of confirmed working protocols to connect to remote host.
306        """
307
308        self.logger.info("\n\t---- Executing commands on " + self.hostname + " ----")
309        self.logger.info("\n\tWorking protocol list: %s" % working_protocol_list)
310
311        config_dict = self.ffdc_actions
312        for target_type in config_dict.keys():
313            if self.target_type != target_type:
314                continue
315
316            self.logger.info("\n\tFFDC Path: %s " % self.ffdc_dir_path)
317            global_plugin_dict['global_log_store_path'] = self.ffdc_dir_path
318            self.logger.info("\tSystem Type: %s" % target_type)
319            for k, v in config_dict[target_type].items():
320
321                if self.remote_protocol not in working_protocol_list \
322                        and self.remote_protocol != 'ALL':
323                    continue
324
325                protocol = config_dict[target_type][k]['PROTOCOL'][0]
326
327                if protocol not in working_protocol_list:
328                    continue
329
330                if protocol in working_protocol_list:
331                    if protocol == 'SSH' or protocol == 'SCP':
332                        self.protocol_ssh(protocol, target_type, k)
333                    elif protocol == 'TELNET':
334                        self.protocol_telnet(target_type, k)
335                    elif protocol == 'REDFISH' or protocol == 'IPMI' or protocol == 'SHELL':
336                        self.protocol_execute(protocol, target_type, k)
337                else:
338                    self.logger.error("\n\tERROR: %s is not available for %s." % (protocol, self.hostname))
339
340        # Close network connection after collecting all files
341        self.elapsed_time = time.strftime("%H:%M:%S", time.gmtime(time.time() - self.start_time))
342        if self.ssh_remoteclient:
343            self.ssh_remoteclient.ssh_remoteclient_disconnect()
344        if self.telnet_remoteclient:
345            self.telnet_remoteclient.tn_remoteclient_disconnect()
346
347    def protocol_ssh(self,
348                     protocol,
349                     target_type,
350                     sub_type):
351        r"""
352        Perform actions using SSH and SCP protocols.
353
354        Description of argument(s):
355        protocol            Protocol to execute.
356        target_type         OS Type of remote host.
357        sub_type            Group type of commands.
358        """
359
360        if protocol == 'SCP':
361            self.group_copy(self.ffdc_actions[target_type][sub_type])
362        else:
363            self.collect_and_copy_ffdc(self.ffdc_actions[target_type][sub_type])
364
365    def protocol_telnet(self,
366                        target_type,
367                        sub_type):
368        r"""
369        Perform actions using telnet protocol.
370        Description of argument(s):
371        target_type          OS Type of remote host.
372        """
373        self.logger.info("\n\t[Run] Executing commands on %s using %s" % (self.hostname, 'TELNET'))
374        telnet_files_saved = []
375        progress_counter = 0
376        list_of_commands = self.ffdc_actions[target_type][sub_type]['COMMANDS']
377        for index, each_cmd in enumerate(list_of_commands, start=0):
378            command_txt, command_timeout = self.unpack_command(each_cmd)
379            result = self.telnet_remoteclient.execute_command(command_txt, command_timeout)
380            if result:
381                try:
382                    targ_file = self.ffdc_actions[target_type][sub_type]['FILES'][index]
383                except IndexError:
384                    targ_file = command_txt
385                    self.logger.warning(
386                        "\n\t[WARN] Missing filename to store data from telnet %s." % each_cmd)
387                    self.logger.warning("\t[WARN] Data will be stored in %s." % targ_file)
388                targ_file_with_path = (self.ffdc_dir_path
389                                       + self.ffdc_prefix
390                                       + targ_file)
391                # Creates a new file
392                with open(targ_file_with_path, 'w') as fp:
393                    fp.write(result)
394                    fp.close
395                    telnet_files_saved.append(targ_file)
396            progress_counter += 1
397            self.print_progress(progress_counter)
398        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
399        for file in telnet_files_saved:
400            self.logger.info("\n\t\tSuccessfully save file " + file + ".")
401
402    def protocol_execute(self,
403                         protocol,
404                         target_type,
405                         sub_type):
406        r"""
407        Perform actions for a given protocol.
408
409        Description of argument(s):
410        protocol            Protocol to execute.
411        target_type         OS Type of remote host.
412        sub_type            Group type of commands.
413        """
414
415        self.logger.info("\n\t[Run] Executing commands to %s using %s" % (self.hostname, protocol))
416        executed_files_saved = []
417        progress_counter = 0
418        list_of_cmd = self.get_command_list(self.ffdc_actions[target_type][sub_type])
419        for index, each_cmd in enumerate(list_of_cmd, start=0):
420            plugin_call = False
421            if isinstance(each_cmd, dict):
422                if 'plugin' in each_cmd:
423                    plugin_call = True
424                    # call the plugin
425                    self.logger.info("\n\t[PLUGIN-START]")
426                    result = self.execute_plugin_block(each_cmd['plugin'])
427                    self.logger.info("\t[PLUGIN-END]\n")
428            else:
429                each_cmd = self.yaml_env_and_plugin_vars_populate(each_cmd)
430
431            if not plugin_call:
432                result = self.run_tool_cmd(each_cmd)
433            if result:
434                try:
435                    file_name = self.get_file_list(self.ffdc_actions[target_type][sub_type])[index]
436                    targ_file = self.yaml_env_and_plugin_vars_populate(file_name)
437                    # If file is specified as None.
438                    if targ_file == "None":
439                        continue
440                except IndexError:
441                    targ_file = each_cmd.split('/')[-1]
442                    self.logger.warning(
443                        "\n\t[WARN] Missing filename to store data from %s." % each_cmd)
444                    self.logger.warning("\t[WARN] Data will be stored in %s." % targ_file)
445
446                targ_file_with_path = (self.ffdc_dir_path
447                                       + self.ffdc_prefix
448                                       + targ_file)
449
450                # Creates a new file
451                with open(targ_file_with_path, 'w') as fp:
452                    fp.write(result)
453                    fp.close
454                    executed_files_saved.append(targ_file)
455
456            progress_counter += 1
457            self.print_progress(progress_counter)
458
459        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
460
461        for file in executed_files_saved:
462            self.logger.info("\n\t\tSuccessfully save file " + file + ".")
463
464    def collect_and_copy_ffdc(self,
465                              ffdc_actions_for_target_type,
466                              form_filename=False):
467        r"""
468        Send commands in ffdc_config file to targeted system.
469
470        Description of argument(s):
471        ffdc_actions_for_target_type     commands and files for the selected remote host type.
472        form_filename                    if true, pre-pend self.target_type to filename
473        """
474
475        # Executing commands, if any
476        self.ssh_execute_ffdc_commands(ffdc_actions_for_target_type,
477                                       form_filename)
478
479        # Copying files
480        if self.ssh_remoteclient.scpclient:
481            self.logger.info("\n\n\tCopying FFDC files from remote system %s.\n" % self.hostname)
482
483            # Retrieving files from target system
484            list_of_files = self.get_file_list(ffdc_actions_for_target_type)
485            self.scp_ffdc(self.ffdc_dir_path, self.ffdc_prefix, form_filename, list_of_files)
486        else:
487            self.logger.info("\n\n\tSkip copying FFDC files from remote system %s.\n" % self.hostname)
488
489    def get_command_list(self,
490                         ffdc_actions_for_target_type):
491        r"""
492        Fetch list of commands from configuration file
493
494        Description of argument(s):
495        ffdc_actions_for_target_type    commands and files for the selected remote host type.
496        """
497        try:
498            list_of_commands = ffdc_actions_for_target_type['COMMANDS']
499        except KeyError:
500            list_of_commands = []
501        return list_of_commands
502
503    def get_file_list(self,
504                      ffdc_actions_for_target_type):
505        r"""
506        Fetch list of commands from configuration file
507
508        Description of argument(s):
509        ffdc_actions_for_target_type    commands and files for the selected remote host type.
510        """
511        try:
512            list_of_files = ffdc_actions_for_target_type['FILES']
513        except KeyError:
514            list_of_files = []
515        return list_of_files
516
517    def unpack_command(self,
518                       command):
519        r"""
520        Unpack command from config file
521
522        Description of argument(s):
523        command    Command from config file.
524        """
525        if isinstance(command, dict):
526            command_txt = next(iter(command))
527            command_timeout = next(iter(command.values()))
528        elif isinstance(command, str):
529            command_txt = command
530            # Default command timeout 60 seconds
531            command_timeout = 60
532
533        return command_txt, command_timeout
534
535    def ssh_execute_ffdc_commands(self,
536                                  ffdc_actions_for_target_type,
537                                  form_filename=False):
538        r"""
539        Send commands in ffdc_config file to targeted system.
540
541        Description of argument(s):
542        ffdc_actions_for_target_type    commands and files for the selected remote host type.
543        form_filename                    if true, pre-pend self.target_type to filename
544        """
545        self.logger.info("\n\t[Run] Executing commands on %s using %s"
546                         % (self.hostname, ffdc_actions_for_target_type['PROTOCOL'][0]))
547
548        list_of_commands = self.get_command_list(ffdc_actions_for_target_type)
549        # If command list is empty, returns
550        if not list_of_commands:
551            return
552
553        progress_counter = 0
554        for command in list_of_commands:
555            command_txt, command_timeout = self.unpack_command(command)
556
557            if form_filename:
558                command_txt = str(command_txt % self.target_type)
559
560            cmd_exit_code, err, response = \
561                self.ssh_remoteclient.execute_command(command_txt, command_timeout)
562
563            if cmd_exit_code:
564                self.logger.warning(
565                    "\n\t\t[WARN] %s exits with code %s." % (command_txt, str(cmd_exit_code)))
566                self.logger.warning("\t\t[WARN] %s " % err)
567
568            progress_counter += 1
569            self.print_progress(progress_counter)
570
571        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
572
573    def group_copy(self,
574                   ffdc_actions_for_target_type):
575        r"""
576        scp group of files (wild card) from remote host.
577
578        Description of argument(s):
579        fdc_actions_for_target_type    commands and files for the selected remote host type.
580        """
581
582        if self.ssh_remoteclient.scpclient:
583            self.logger.info("\n\tCopying files from remote system %s via SCP.\n" % self.hostname)
584
585            list_of_commands = self.get_command_list(ffdc_actions_for_target_type)
586            # If command list is empty, returns
587            if not list_of_commands:
588                return
589
590            for command in list_of_commands:
591                try:
592                    command = self.yaml_env_and_plugin_vars_populate(command)
593                    filename = command.split('ls -AX')[1]
594                    filename = self.yaml_env_and_plugin_vars_populate(filename)
595                except IndexError:
596                    self.logger.error("\t\tInvalid command %s" % command)
597                    continue
598
599                cmd_exit_code, err, response = \
600                    self.ssh_remoteclient.execute_command(command)
601
602                # If file does not exist, code take no action.
603                # cmd_exit_code is ignored for this scenario.
604                if response:
605                    scp_result = self.ssh_remoteclient.scp_file_from_remote(filename, self.ffdc_dir_path)
606                    if scp_result:
607                        self.logger.info("\t\tSuccessfully copied from " + self.hostname + ':' + filename)
608                else:
609                    self.logger.info("\t\tThere is no " + filename)
610
611        else:
612            self.logger.info("\n\n\tSkip copying files from remote system %s.\n" % self.hostname)
613
614    def scp_ffdc(self,
615                 targ_dir_path,
616                 targ_file_prefix,
617                 form_filename,
618                 file_list=None,
619                 quiet=None):
620        r"""
621        SCP all files in file_dict to the indicated directory on the local system.
622
623        Description of argument(s):
624        targ_dir_path                   The path of the directory to receive the files.
625        targ_file_prefix                Prefix which will be pre-pended to each
626                                        target file's name.
627        file_dict                       A dictionary of files to scp from targeted system to this system
628
629        """
630
631        progress_counter = 0
632        for filename in file_list:
633            if form_filename:
634                filename = str(filename % self.target_type)
635            source_file_path = filename
636            targ_file_path = targ_dir_path + targ_file_prefix + filename.split('/')[-1]
637
638            # If source file name contains wild card, copy filename as is.
639            if '*' in source_file_path:
640                scp_result = self.ssh_remoteclient.scp_file_from_remote(source_file_path, self.ffdc_dir_path)
641            else:
642                scp_result = self.ssh_remoteclient.scp_file_from_remote(source_file_path, targ_file_path)
643
644            if not quiet:
645                if scp_result:
646                    self.logger.info(
647                        "\t\tSuccessfully copied from " + self.hostname + ':' + source_file_path + ".\n")
648                else:
649                    self.logger.info(
650                        "\t\tFail to copy from " + self.hostname + ':' + source_file_path + ".\n")
651            else:
652                progress_counter += 1
653                self.print_progress(progress_counter)
654
655    def set_ffdc_defaults(self):
656        r"""
657        Set a default value for self.ffdc_dir_path and self.ffdc_prefix.
658        Collected ffdc file will be stored in dir /self.location/hostname_timestr/.
659        Individual ffdc file will have timestr_filename.
660
661        Description of class variables:
662        self.ffdc_dir_path  The dir path where collected ffdc data files should be put.
663
664        self.ffdc_prefix    The prefix to be given to each ffdc file name.
665
666        """
667
668        timestr = time.strftime("%Y%m%d-%H%M%S")
669        self.ffdc_dir_path = self.location + "/" + self.hostname + "_" + timestr + "/"
670        self.ffdc_prefix = timestr + "_"
671        self.validate_local_store(self.ffdc_dir_path)
672
673    def validate_local_store(self, dir_path):
674        r"""
675        Ensure path exists to store FFDC files locally.
676
677        Description of variable:
678        dir_path  The dir path where collected ffdc data files will be stored.
679
680        """
681
682        if not os.path.exists(dir_path):
683            try:
684                os.makedirs(dir_path, 0o755)
685            except (IOError, OSError) as e:
686                # PermissionError
687                if e.errno == EPERM or e.errno == EACCES:
688                    self.logger.error(
689                        '\tERROR: os.makedirs %s failed with PermissionError.\n' % dir_path)
690                else:
691                    self.logger.error(
692                        '\tERROR: os.makedirs %s failed with %s.\n' % (dir_path, e.strerror))
693                sys.exit(-1)
694
695    def print_progress(self, progress):
696        r"""
697        Print activity progress +
698
699        Description of variable:
700        progress  Progress counter.
701
702        """
703
704        sys.stdout.write("\r\t" + "+" * progress)
705        sys.stdout.flush()
706        time.sleep(.1)
707
708    def verify_redfish(self):
709        r"""
710        Verify remote host has redfish service active
711
712        """
713        redfish_parm = 'redfishtool -r ' \
714                       + self.hostname + ' -S Always raw GET /redfish/v1/'
715        return(self.run_tool_cmd(redfish_parm, True))
716
717    def verify_ipmi(self):
718        r"""
719        Verify remote host has IPMI LAN service active
720
721        """
722        if self.target_type == 'OPENBMC':
723            ipmi_parm = 'ipmitool -I lanplus -C 17  -U ' + self.username + ' -P ' \
724                + self.password + ' -H ' + self.hostname + ' power status'
725        else:
726            ipmi_parm = 'ipmitool -I lanplus  -P ' \
727                + self.password + ' -H ' + self.hostname + ' power status'
728
729        return(self.run_tool_cmd(ipmi_parm, True))
730
731    def run_tool_cmd(self,
732                     parms_string,
733                     quiet=False):
734        r"""
735        Run CLI standard tool or scripts.
736
737        Description of variable:
738        parms_string         tool command options.
739        quiet                do not print tool error message if True
740        """
741
742        result = subprocess.run([parms_string],
743                                stdout=subprocess.PIPE,
744                                stderr=subprocess.PIPE,
745                                shell=True,
746                                universal_newlines=True)
747
748        if result.stderr and not quiet:
749            self.logger.error('\n\t\tERROR with %s ' % parms_string)
750            self.logger.error('\t\t' + result.stderr)
751
752        return result.stdout
753
754    def verify_protocol(self, protocol_list):
755        r"""
756        Perform protocol working check.
757
758        Description of argument(s):
759        protocol_list        List of protocol.
760        """
761
762        tmp_list = []
763        if self.target_is_pingable():
764            tmp_list.append("SHELL")
765
766        for protocol in protocol_list:
767            if self.remote_protocol != 'ALL':
768                if self.remote_protocol != protocol:
769                    continue
770
771            # Only check SSH/SCP once for both protocols
772            if protocol == 'SSH' or protocol == 'SCP' and protocol not in tmp_list:
773                if self.ssh_to_target_system():
774                    # Add only what user asked.
775                    if self.remote_protocol != 'ALL':
776                        tmp_list.append(self.remote_protocol)
777                    else:
778                        tmp_list.append('SSH')
779                        tmp_list.append('SCP')
780
781            if protocol == 'TELNET':
782                if self.telnet_to_target_system():
783                    tmp_list.append(protocol)
784
785            if protocol == 'REDFISH':
786                if self.verify_redfish():
787                    tmp_list.append(protocol)
788                    self.logger.info("\n\t[Check] %s Redfish Service.\t\t [OK]" % self.hostname)
789                else:
790                    self.logger.info("\n\t[Check] %s Redfish Service.\t\t [NOT AVAILABLE]" % self.hostname)
791
792            if protocol == 'IPMI':
793                if self.verify_ipmi():
794                    tmp_list.append(protocol)
795                    self.logger.info("\n\t[Check] %s IPMI LAN Service.\t\t [OK]" % self.hostname)
796                else:
797                    self.logger.info("\n\t[Check] %s IPMI LAN Service.\t\t [NOT AVAILABLE]" % self.hostname)
798
799        return tmp_list
800
801    def load_env(self):
802        r"""
803        Perform protocol working check.
804
805        """
806        # This is for the env vars a user can use in YAML to load it at runtime.
807        # Example YAML:
808        # -COMMANDS:
809        #    - my_command ${hostname}  ${username}   ${password}
810        os.environ['hostname'] = self.hostname
811        os.environ['username'] = self.username
812        os.environ['password'] = self.password
813
814        # Append default Env.
815        self.env_dict['hostname'] = self.hostname
816        self.env_dict['username'] = self.username
817        self.env_dict['password'] = self.password
818
819        try:
820            tmp_env_dict = {}
821            if self.env_vars:
822                tmp_env_dict = json.loads(self.env_vars)
823                # Export ENV vars default.
824                for key, value in tmp_env_dict.items():
825                    os.environ[key] = value
826                    self.env_dict[key] = str(value)
827
828            if self.econfig:
829                with open(self.econfig, 'r') as file:
830                    tmp_env_dict = yaml.load(file, Loader=yaml.FullLoader)
831                # Export ENV vars.
832                for key, value in tmp_env_dict['env_params'].items():
833                    os.environ[key] = str(value)
834                    self.env_dict[key] = str(value)
835        except json.decoder.JSONDecodeError as e:
836            self.logger.error("\n\tERROR: %s " % e)
837            sys.exit(-1)
838
839        # This to mask the password from displaying on the console.
840        mask_dict = self.env_dict.copy()
841        for k, v in mask_dict.items():
842            if k.lower().find("password") != -1:
843                hidden_text = []
844                hidden_text.append(v)
845                password_regex = '(' +\
846                    '|'.join([re.escape(x) for x in hidden_text]) + ')'
847                mask_dict[k] = re.sub(password_regex, "********", v)
848
849        self.logger.info(json.dumps(mask_dict, indent=8, sort_keys=False))
850
851    def execute_python_eval(self, eval_string):
852        r"""
853        Execute qualified python function using eval.
854
855        Description of argument(s):
856        eval_string        Execute the python object.
857
858        Example:
859                eval(plugin.foo_func.foo_func(10))
860        """
861        try:
862            self.logger.info("\tCall func: %s" % eval_string)
863            result = eval(eval_string)
864            self.logger.info("\treturn: %s" % str(result))
865        except (ValueError, SyntaxError, NameError) as e:
866            self.logger.error("execute_python_eval: %s" % e)
867            pass
868
869        return result
870
871    def execute_plugin_block(self, plugin_cmd_list):
872        r"""
873        Pack the plugin command to quailifed python string object.
874
875        Description of argument(s):
876        plugin_list_dict      Plugin block read from YAML
877                              [{'plugin_name': 'plugin.foo_func.my_func'},
878                               {'plugin_args': [10]}]
879
880        Example:
881            - plugin:
882              - plugin_name: plugin.foo_func.my_func
883              - plugin_args:
884                - arg1
885                - arg2
886
887            - plugin:
888              - plugin_name: result = plugin.foo_func.my_func
889              - plugin_args:
890                - arg1
891                - arg2
892
893            - plugin:
894              - plugin_name: result1,result2 = plugin.foo_func.my_func
895              - plugin_args:
896                - arg1
897                - arg2
898        """
899        try:
900            plugin_name = plugin_cmd_list[0]['plugin_name']
901            # Equal separator means plugin function returns result.
902            if ' = ' in plugin_name:
903                # Ex. ['result', 'plugin.foo_func.my_func']
904                plugin_name_args = plugin_name.split(' = ')
905                # plugin func return data.
906                for arg in plugin_name_args:
907                    if arg == plugin_name_args[-1]:
908                        plugin_name = arg
909                    else:
910                        plugin_resp = arg.split(',')
911                        # ['result1','result2']
912                        for x in plugin_resp:
913                            global_plugin_list.append(x)
914                            global_plugin_dict[x] = ""
915
916            # Walk the plugin args ['arg1,'arg2']
917            # If the YAML plugin statement 'plugin_args' is not declared.
918            if any('plugin_args' in d for d in plugin_cmd_list):
919                plugin_args = plugin_cmd_list[1]['plugin_args']
920                if plugin_args:
921                    plugin_args = self.yaml_args_populate(plugin_args)
922                else:
923                    plugin_args = []
924            else:
925                plugin_args = self.yaml_args_populate([])
926
927            # Pack the args arg1, arg2, .... argn into
928            # "arg1","arg2","argn"  string as params for function.
929            parm_args_str = self.yaml_args_string(plugin_args)
930            if parm_args_str:
931                plugin_func = plugin_name + '(' + parm_args_str + ')'
932            else:
933                plugin_func = plugin_name + '()'
934
935            # Execute plugin function.
936            if global_plugin_dict:
937                resp = self.execute_python_eval(plugin_func)
938                self.response_args_data(resp)
939            else:
940                resp = self.execute_python_eval(plugin_func)
941            return resp
942        except Exception as e:
943            self.logger.error("execute_plugin_block: %s" % e)
944            pass
945
946    def response_args_data(self, plugin_resp):
947        r"""
948        Parse the plugin function response.
949
950        plugin_resp       Response data from plugin function.
951        """
952        resp_list = []
953        resp_data = ""
954        # There is nothing to update the plugin response.
955        if len(global_plugin_list) == 0 or plugin_resp == 'None':
956            return
957
958        if isinstance(plugin_resp, str):
959            resp_data = plugin_resp.strip('\r\n\t')
960            resp_list.append(resp_data)
961        elif isinstance(plugin_resp, bytes):
962            resp_data = str(plugin_resp, 'UTF-8').strip('\r\n\t')
963            resp_list.append(resp_data)
964        elif isinstance(plugin_resp, tuple):
965            if len(global_plugin_list) == 1:
966                resp_list.append(plugin_resp)
967            else:
968                resp_list = list(plugin_resp)
969                resp_list = [x.strip('\r\n\t') for x in resp_list]
970        elif isinstance(plugin_resp, list):
971            if len(global_plugin_list) == 1:
972                resp_list.append([x.strip('\r\n\t') for x in plugin_resp])
973            else:
974                resp_list = [x.strip('\r\n\t') for x in plugin_resp]
975        elif isinstance(plugin_resp, int) or isinstance(plugin_resp, float):
976            resp_list.append(plugin_resp)
977
978        for idx, item in enumerate(resp_list, start=0):
979            # Exit loop
980            if idx >= len(global_plugin_list):
981                break
982            # Find the index of the return func in the list and
983            # update the global func return dictionary.
984            try:
985                dict_idx = global_plugin_list[idx]
986                global_plugin_dict[dict_idx] = item
987            except (IndexError, ValueError) as e:
988                self.logger.warn("\tresponse_args_data: %s" % e)
989                pass
990
991        # Done updating plugin dict irrespective of pass or failed,
992        # clear all the list element.
993        global_plugin_list.clear()
994
995    def yaml_args_string(self, plugin_args):
996        r"""
997        Pack the args into string.
998
999        plugin_args            arg list ['arg1','arg2,'argn']
1000        """
1001        args_str = ''
1002        for args in plugin_args:
1003            if args:
1004                if isinstance(args, int):
1005                    args_str += str(args)
1006                else:
1007                    args_str += '"' + str(args.strip('\r\n\t')) + '"'
1008            # Skip last list element.
1009            if args != plugin_args[-1]:
1010                args_str += ","
1011        return args_str
1012
1013    def yaml_args_populate(self, yaml_arg_list):
1014        r"""
1015        Decode ${MY_VAR} and load env data when read from YAML.
1016
1017        Description of argument(s):
1018        yaml_arg_list         arg list read from YAML
1019
1020        Example:
1021          - plugin_args:
1022            - arg1
1023            - arg2
1024
1025                  yaml_arg_list:  [arg2, arg2]
1026        """
1027        # Get the env loaded keys as list ['hostname', 'username', 'password'].
1028        env_vars_list = list(self.env_dict)
1029
1030        if isinstance(yaml_arg_list, list):
1031            tmp_list = []
1032            for arg in yaml_arg_list:
1033                if isinstance(arg, int):
1034                    tmp_list.append(arg)
1035                    continue
1036                elif isinstance(arg, str):
1037                    arg_str = self.yaml_env_and_plugin_vars_populate(str(arg))
1038                    tmp_list.append(arg_str)
1039                else:
1040                    tmp_list.append(arg)
1041
1042            # return populated list.
1043            return tmp_list
1044
1045    def yaml_env_and_plugin_vars_populate(self, yaml_arg_str):
1046        r"""
1047        Update ${MY_VAR} and my_plugin_vars
1048
1049        Description of argument(s):
1050        yaml_arg_str         arg string read from YAML
1051
1052        Example:
1053            - cat ${MY_VAR}
1054            - ls -AX my_plugin_var
1055        """
1056        # Parse the string for env vars.
1057        try:
1058            # Example, list of matching env vars ['username', 'password', 'hostname']
1059            # Extra escape \ for special symbols. '\$\{([^\}]+)\}' works good.
1060            var_name_regex = '\\$\\{([^\\}]+)\\}'
1061            env_var_names_list = re.findall(var_name_regex, yaml_arg_str)
1062            for var in env_var_names_list:
1063                env_var = os.environ[var]
1064                env_replace = '${' + var + '}'
1065                yaml_arg_str = yaml_arg_str.replace(env_replace, env_var)
1066        except Exception as e:
1067            self.logger.error("yaml_env_vars_populate: %s" % e)
1068            pass
1069
1070        # Parse the string for plugin vars.
1071        try:
1072            # Example, list of plugin vars ['my_username', 'my_data']
1073            plugin_var_name_list = global_plugin_dict.keys()
1074            for var in plugin_var_name_list:
1075                # If this plugin var exist but empty value in dict, don't replace.
1076                # This is either a YAML plugin statement incorrecly used or
1077                # user added a plugin var which is not populated.
1078                if str(global_plugin_dict[var]):
1079                    yaml_arg_str = yaml_arg_str.replace(str(var), str(global_plugin_dict[var]))
1080        except (IndexError, ValueError) as e:
1081            self.logger.error("yaml_plugin_vars_populate: %s" % e)
1082            pass
1083
1084        return yaml_arg_str
1085