1#!/usr/bin/env python3
2
3r"""
4See class prolog below for details.
5"""
6
7from ssh_utility import SSHRemoteclient
8from telnet_utility import TelnetRemoteclient
9from errno import EACCES, EPERM
10
11import os
12import re
13import sys
14import yaml
15import json
16import time
17import logging
18import platform
19import subprocess
20
21script_dir = os.path.dirname(os.path.abspath(__file__))
22sys.path.append(script_dir)
23# Walk path and append to sys.path
24for root, dirs, files in os.walk(script_dir):
25    for dir in dirs:
26        sys.path.append(os.path.join(root, dir))
27
28
29r"""
30User define plugins python functions.
31
32It will imports files from directory plugins
33
34plugins
35├── file1.py
36└── file2.py
37
38Example how to define in YAML:
39 - plugin:
40   - plugin_name: plugin.foo_func.foo_func_yaml
41     - plugin_args:
42       - arg1
43       - arg2
44"""
45plugin_dir = __file__.split(__file__.split("/")[-1])[0] + '/plugins'
46sys.path.append(plugin_dir)
47try:
48    for module in os.listdir(plugin_dir):
49        if module == '__init__.py' or module[-3:] != '.py':
50            continue
51        plugin_module = "plugins." + module[:-3]
52        # To access the module plugin.<module name>.<function>
53        # Example: plugin.foo_func.foo_func_yaml()
54        try:
55            plugin = __import__(plugin_module, globals(), locals(), [], 0)
56        except Exception as e:
57            print("PLUGIN: Module import failed: %s" % module)
58            pass
59except FileNotFoundError as e:
60    print("PLUGIN: %s" % e)
61    pass
62
63r"""
64This is for plugin functions returning data or responses to the caller
65in YAML plugin setup.
66
67Example:
68
69    - plugin:
70      - plugin_name: version = plugin.ssh_execution.ssh_execute_cmd
71      - plugin_args:
72        - ${hostname}
73        - ${username}
74        - ${password}
75        - "cat /etc/os-release | grep VERSION_ID | awk -F'=' '{print $2}'"
76     - plugin:
77        - plugin_name: plugin.print_vars.print_vars
78        - plugin_args:
79          - version
80
81where first plugin "version" var is used by another plugin in the YAML
82block or plugin
83
84"""
85global global_log_store_path
86global global_plugin_dict
87global global_plugin_list
88
89# Hold the plugin return values in dict and plugin return vars in list.
90# Dict is to reference and update vars processing in parser where as
91# list is for current vars from the plugin block which needs processing.
92global_plugin_dict = {}
93global_plugin_list = []
94
95# Hold the plugin return named declared if function returned values are list,dict.
96# Refer this name list to look up the plugin dict for eval() args function
97# Example ['version']
98global_plugin_type_list = []
99
100# Path where logs are to be stored or written.
101global_log_store_path = ''
102
103# Plugin error state defaults.
104plugin_error_dict = {
105    'exit_on_error': False,
106    'continue_on_error': False,
107}
108
109
110class ffdc_collector:
111
112    r"""
113    Execute commands from configuration file to collect log files.
114    Fetch and store generated files at the specified location.
115
116    """
117
118    def __init__(self,
119                 hostname,
120                 username,
121                 password,
122                 ffdc_config,
123                 location,
124                 remote_type,
125                 remote_protocol,
126                 env_vars,
127                 econfig,
128                 log_level):
129        r"""
130        Description of argument(s):
131
132        hostname            name/ip of the targeted (remote) system
133        username            user on the targeted system with access to FFDC files
134        password            password for user on targeted system
135        ffdc_config         configuration file listing commands and files for FFDC
136        location            where to store collected FFDC
137        remote_type         os type of the remote host
138        remote_protocol     Protocol to use to collect data
139        env_vars            User define CLI env vars '{"key : "value"}'
140        econfig             User define env vars YAML file
141
142        """
143
144        self.hostname = hostname
145        self.username = username
146        self.password = password
147        self.ffdc_config = ffdc_config
148        self.location = location + "/" + remote_type.upper()
149        self.ssh_remoteclient = None
150        self.telnet_remoteclient = None
151        self.ffdc_dir_path = ""
152        self.ffdc_prefix = ""
153        self.target_type = remote_type.upper()
154        self.remote_protocol = remote_protocol.upper()
155        self.env_vars = env_vars
156        self.econfig = econfig
157        self.start_time = 0
158        self.elapsed_time = ''
159        self.logger = None
160
161        # Set prefix values for scp files and directory.
162        # Since the time stamp is at second granularity, these values are set here
163        # to be sure that all files for this run will have same timestamps
164        # and they will be saved in the same directory.
165        # self.location == local system for now
166        self.set_ffdc_default_store_path()
167
168        # Logger for this run.  Need to be after set_ffdc_default_store_path()
169        self.script_logging(getattr(logging, log_level.upper()))
170
171        # Verify top level directory exists for storage
172        self.validate_local_store(self.location)
173
174        if self.verify_script_env():
175            # Load default or user define YAML configuration file.
176            with open(self.ffdc_config, 'r') as file:
177                try:
178                    self.ffdc_actions = yaml.load(file, Loader=yaml.SafeLoader)
179                except yaml.YAMLError as e:
180                    self.logger.error(e)
181                    sys.exit(-1)
182
183            if self.target_type not in self.ffdc_actions.keys():
184                self.logger.error(
185                    "\n\tERROR: %s is not listed in %s.\n\n" % (self.target_type, self.ffdc_config))
186                sys.exit(-1)
187        else:
188            sys.exit(-1)
189
190        # Load ENV vars from user.
191        self.logger.info("\n\tENV: User define input YAML variables")
192        self.env_dict = {}
193        self.load_env()
194
195    def verify_script_env(self):
196
197        # Import to log version
198        import click
199        import paramiko
200
201        run_env_ok = True
202
203        redfishtool_version = self.run_tool_cmd('redfishtool -V').split(' ')[2].strip('\n')
204        ipmitool_version = self.run_tool_cmd('ipmitool -V').split(' ')[2]
205
206        self.logger.info("\n\t---- Script host environment ----")
207        self.logger.info("\t{:<10}  {:<10}".format('Script hostname', os.uname()[1]))
208        self.logger.info("\t{:<10}  {:<10}".format('Script host os', platform.platform()))
209        self.logger.info("\t{:<10}  {:>10}".format('Python', platform.python_version()))
210        self.logger.info("\t{:<10}  {:>10}".format('PyYAML', yaml.__version__))
211        self.logger.info("\t{:<10}  {:>10}".format('click', click.__version__))
212        self.logger.info("\t{:<10}  {:>10}".format('paramiko', paramiko.__version__))
213        self.logger.info("\t{:<10}  {:>9}".format('redfishtool', redfishtool_version))
214        self.logger.info("\t{:<10}  {:>12}".format('ipmitool', ipmitool_version))
215
216        if eval(yaml.__version__.replace('.', ',')) < (5, 3, 0):
217            self.logger.error("\n\tERROR: Python or python packages do not meet minimum version requirement.")
218            self.logger.error("\tERROR: PyYAML version 5.3.0 or higher is needed.\n")
219            run_env_ok = False
220
221        self.logger.info("\t---- End script host environment ----")
222        return run_env_ok
223
224    def script_logging(self,
225                       log_level_attr):
226        r"""
227        Create logger
228
229        """
230        self.logger = logging.getLogger()
231        self.logger.setLevel(log_level_attr)
232        log_file_handler = logging.FileHandler(self.ffdc_dir_path + "collector.log")
233
234        stdout_handler = logging.StreamHandler(sys.stdout)
235        self.logger.addHandler(log_file_handler)
236        self.logger.addHandler(stdout_handler)
237
238        # Turn off paramiko INFO logging
239        logging.getLogger("paramiko").setLevel(logging.WARNING)
240
241    def target_is_pingable(self):
242        r"""
243        Check if target system is ping-able.
244
245        """
246        response = os.system("ping -c 1 %s  2>&1 >/dev/null" % self.hostname)
247        if response == 0:
248            self.logger.info("\n\t[Check] %s is ping-able.\t\t [OK]" % self.hostname)
249            return True
250        else:
251            self.logger.error(
252                "\n\tERROR: %s is not ping-able. FFDC collection aborted.\n" % self.hostname)
253            sys.exit(-1)
254
255    def collect_ffdc(self):
256        r"""
257        Initiate FFDC Collection depending on requested protocol.
258
259        """
260
261        self.logger.info("\n\t---- Start communicating with %s ----" % self.hostname)
262        self.start_time = time.time()
263
264        # Find the list of target and protocol supported.
265        check_protocol_list = []
266        config_dict = self.ffdc_actions
267
268        for target_type in config_dict.keys():
269            if self.target_type != target_type:
270                continue
271
272            for k, v in config_dict[target_type].items():
273                if config_dict[target_type][k]['PROTOCOL'][0] not in check_protocol_list:
274                    check_protocol_list.append(config_dict[target_type][k]['PROTOCOL'][0])
275
276        self.logger.info("\n\t %s protocol type: %s" % (self.target_type, check_protocol_list))
277
278        verified_working_protocol = self.verify_protocol(check_protocol_list)
279
280        if verified_working_protocol:
281            self.logger.info("\n\t---- Completed protocol pre-requisite check ----\n")
282
283        # Verify top level directory exists for storage
284        self.validate_local_store(self.location)
285
286        if ((self.remote_protocol not in verified_working_protocol) and (self.remote_protocol != 'ALL')):
287            self.logger.info("\n\tWorking protocol list: %s" % verified_working_protocol)
288            self.logger.error(
289                '\tERROR: Requested protocol %s is not in working protocol list.\n'
290                % self.remote_protocol)
291            sys.exit(-1)
292        else:
293            self.generate_ffdc(verified_working_protocol)
294
295    def ssh_to_target_system(self):
296        r"""
297        Open a ssh connection to targeted system.
298
299        """
300
301        self.ssh_remoteclient = SSHRemoteclient(self.hostname,
302                                                self.username,
303                                                self.password)
304
305        if self.ssh_remoteclient.ssh_remoteclient_login():
306            self.logger.info("\n\t[Check] %s SSH connection established.\t [OK]" % self.hostname)
307
308            # Check scp connection.
309            # If scp connection fails,
310            # continue with FFDC generation but skip scp files to local host.
311            self.ssh_remoteclient.scp_connection()
312            return True
313        else:
314            self.logger.info("\n\t[Check] %s SSH connection.\t [NOT AVAILABLE]" % self.hostname)
315            return False
316
317    def telnet_to_target_system(self):
318        r"""
319        Open a telnet connection to targeted system.
320        """
321        self.telnet_remoteclient = TelnetRemoteclient(self.hostname,
322                                                      self.username,
323                                                      self.password)
324        if self.telnet_remoteclient.tn_remoteclient_login():
325            self.logger.info("\n\t[Check] %s Telnet connection established.\t [OK]" % self.hostname)
326            return True
327        else:
328            self.logger.info("\n\t[Check] %s Telnet connection.\t [NOT AVAILABLE]" % self.hostname)
329            return False
330
331    def generate_ffdc(self, working_protocol_list):
332        r"""
333        Determine actions based on remote host type
334
335        Description of argument(s):
336        working_protocol_list    list of confirmed working protocols to connect to remote host.
337        """
338
339        self.logger.info("\n\t---- Executing commands on " + self.hostname + " ----")
340        self.logger.info("\n\tWorking protocol list: %s" % working_protocol_list)
341
342        config_dict = self.ffdc_actions
343        for target_type in config_dict.keys():
344            if self.target_type != target_type:
345                continue
346
347            self.logger.info("\n\tFFDC Path: %s " % self.ffdc_dir_path)
348            global_plugin_dict['global_log_store_path'] = self.ffdc_dir_path
349            self.logger.info("\tSystem Type: %s" % target_type)
350            for k, v in config_dict[target_type].items():
351
352                if self.remote_protocol not in working_protocol_list \
353                        and self.remote_protocol != 'ALL':
354                    continue
355
356                protocol = config_dict[target_type][k]['PROTOCOL'][0]
357
358                if protocol not in working_protocol_list:
359                    continue
360
361                if protocol in working_protocol_list:
362                    if protocol == 'SSH' or protocol == 'SCP':
363                        self.protocol_ssh(protocol, target_type, k)
364                    elif protocol == 'TELNET':
365                        self.protocol_telnet(target_type, k)
366                    elif protocol == 'REDFISH' or protocol == 'IPMI' or protocol == 'SHELL':
367                        self.protocol_execute(protocol, target_type, k)
368                else:
369                    self.logger.error("\n\tERROR: %s is not available for %s." % (protocol, self.hostname))
370
371        # Close network connection after collecting all files
372        self.elapsed_time = time.strftime("%H:%M:%S", time.gmtime(time.time() - self.start_time))
373        if self.ssh_remoteclient:
374            self.ssh_remoteclient.ssh_remoteclient_disconnect()
375        if self.telnet_remoteclient:
376            self.telnet_remoteclient.tn_remoteclient_disconnect()
377
378    def protocol_ssh(self,
379                     protocol,
380                     target_type,
381                     sub_type):
382        r"""
383        Perform actions using SSH and SCP protocols.
384
385        Description of argument(s):
386        protocol            Protocol to execute.
387        target_type         OS Type of remote host.
388        sub_type            Group type of commands.
389        """
390
391        if protocol == 'SCP':
392            self.group_copy(self.ffdc_actions[target_type][sub_type])
393        else:
394            self.collect_and_copy_ffdc(self.ffdc_actions[target_type][sub_type])
395
396    def protocol_telnet(self,
397                        target_type,
398                        sub_type):
399        r"""
400        Perform actions using telnet protocol.
401        Description of argument(s):
402        target_type          OS Type of remote host.
403        """
404        self.logger.info("\n\t[Run] Executing commands on %s using %s" % (self.hostname, 'TELNET'))
405        telnet_files_saved = []
406        progress_counter = 0
407        list_of_commands = self.ffdc_actions[target_type][sub_type]['COMMANDS']
408        for index, each_cmd in enumerate(list_of_commands, start=0):
409            command_txt, command_timeout = self.unpack_command(each_cmd)
410            result = self.telnet_remoteclient.execute_command(command_txt, command_timeout)
411            if result:
412                try:
413                    targ_file = self.ffdc_actions[target_type][sub_type]['FILES'][index]
414                except IndexError:
415                    targ_file = command_txt
416                    self.logger.warning(
417                        "\n\t[WARN] Missing filename to store data from telnet %s." % each_cmd)
418                    self.logger.warning("\t[WARN] Data will be stored in %s." % targ_file)
419                targ_file_with_path = (self.ffdc_dir_path
420                                       + self.ffdc_prefix
421                                       + targ_file)
422                # Creates a new file
423                with open(targ_file_with_path, 'w') as fp:
424                    fp.write(result)
425                    fp.close
426                    telnet_files_saved.append(targ_file)
427            progress_counter += 1
428            self.print_progress(progress_counter)
429        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
430        for file in telnet_files_saved:
431            self.logger.info("\n\t\tSuccessfully save file " + file + ".")
432
433    def protocol_execute(self,
434                         protocol,
435                         target_type,
436                         sub_type):
437        r"""
438        Perform actions for a given protocol.
439
440        Description of argument(s):
441        protocol            Protocol to execute.
442        target_type         OS Type of remote host.
443        sub_type            Group type of commands.
444        """
445
446        self.logger.info("\n\t[Run] Executing commands to %s using %s" % (self.hostname, protocol))
447        executed_files_saved = []
448        progress_counter = 0
449        list_of_cmd = self.get_command_list(self.ffdc_actions[target_type][sub_type])
450        for index, each_cmd in enumerate(list_of_cmd, start=0):
451            plugin_call = False
452            if isinstance(each_cmd, dict):
453                if 'plugin' in each_cmd:
454                    # If the error is set and plugin explicitly
455                    # requested to skip execution on error..
456                    if plugin_error_dict['exit_on_error'] and \
457                            self.plugin_error_check(each_cmd['plugin']):
458                        self.logger.info("\n\t[PLUGIN-ERROR] exit_on_error: %s" %
459                                         plugin_error_dict['exit_on_error'])
460                        self.logger.info("\t[PLUGIN-SKIP] %s" %
461                                         each_cmd['plugin'][0])
462                        continue
463                    plugin_call = True
464                    # call the plugin
465                    self.logger.info("\n\t[PLUGIN-START]")
466                    result = self.execute_plugin_block(each_cmd['plugin'])
467                    self.logger.info("\t[PLUGIN-END]\n")
468            else:
469                each_cmd = self.yaml_env_and_plugin_vars_populate(each_cmd)
470
471            if not plugin_call:
472                result = self.run_tool_cmd(each_cmd)
473            if result:
474                try:
475                    file_name = self.get_file_list(self.ffdc_actions[target_type][sub_type])[index]
476                    # If file is specified as None.
477                    if file_name == "None":
478                        continue
479                    targ_file = self.yaml_env_and_plugin_vars_populate(file_name)
480                except IndexError:
481                    targ_file = each_cmd.split('/')[-1]
482                    self.logger.warning(
483                        "\n\t[WARN] Missing filename to store data from %s." % each_cmd)
484                    self.logger.warning("\t[WARN] Data will be stored in %s." % targ_file)
485
486                targ_file_with_path = (self.ffdc_dir_path
487                                       + self.ffdc_prefix
488                                       + targ_file)
489
490                # Creates a new file
491                with open(targ_file_with_path, 'w') as fp:
492                    if isinstance(result, dict):
493                        fp.write(json.dumps(result))
494                    else:
495                        fp.write(result)
496                    fp.close
497                    executed_files_saved.append(targ_file)
498
499            progress_counter += 1
500            self.print_progress(progress_counter)
501
502        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
503
504        for file in executed_files_saved:
505            self.logger.info("\n\t\tSuccessfully save file " + file + ".")
506
507    def collect_and_copy_ffdc(self,
508                              ffdc_actions_for_target_type,
509                              form_filename=False):
510        r"""
511        Send commands in ffdc_config file to targeted system.
512
513        Description of argument(s):
514        ffdc_actions_for_target_type     commands and files for the selected remote host type.
515        form_filename                    if true, pre-pend self.target_type to filename
516        """
517
518        # Executing commands, if any
519        self.ssh_execute_ffdc_commands(ffdc_actions_for_target_type,
520                                       form_filename)
521
522        # Copying files
523        if self.ssh_remoteclient.scpclient:
524            self.logger.info("\n\n\tCopying FFDC files from remote system %s.\n" % self.hostname)
525
526            # Retrieving files from target system
527            list_of_files = self.get_file_list(ffdc_actions_for_target_type)
528            self.scp_ffdc(self.ffdc_dir_path, self.ffdc_prefix, form_filename, list_of_files)
529        else:
530            self.logger.info("\n\n\tSkip copying FFDC files from remote system %s.\n" % self.hostname)
531
532    def get_command_list(self,
533                         ffdc_actions_for_target_type):
534        r"""
535        Fetch list of commands from configuration file
536
537        Description of argument(s):
538        ffdc_actions_for_target_type    commands and files for the selected remote host type.
539        """
540        try:
541            list_of_commands = ffdc_actions_for_target_type['COMMANDS']
542        except KeyError:
543            list_of_commands = []
544        return list_of_commands
545
546    def get_file_list(self,
547                      ffdc_actions_for_target_type):
548        r"""
549        Fetch list of commands from configuration file
550
551        Description of argument(s):
552        ffdc_actions_for_target_type    commands and files for the selected remote host type.
553        """
554        try:
555            list_of_files = ffdc_actions_for_target_type['FILES']
556        except KeyError:
557            list_of_files = []
558        return list_of_files
559
560    def unpack_command(self,
561                       command):
562        r"""
563        Unpack command from config file
564
565        Description of argument(s):
566        command    Command from config file.
567        """
568        if isinstance(command, dict):
569            command_txt = next(iter(command))
570            command_timeout = next(iter(command.values()))
571        elif isinstance(command, str):
572            command_txt = command
573            # Default command timeout 60 seconds
574            command_timeout = 60
575
576        return command_txt, command_timeout
577
578    def ssh_execute_ffdc_commands(self,
579                                  ffdc_actions_for_target_type,
580                                  form_filename=False):
581        r"""
582        Send commands in ffdc_config file to targeted system.
583
584        Description of argument(s):
585        ffdc_actions_for_target_type    commands and files for the selected remote host type.
586        form_filename                    if true, pre-pend self.target_type to filename
587        """
588        self.logger.info("\n\t[Run] Executing commands on %s using %s"
589                         % (self.hostname, ffdc_actions_for_target_type['PROTOCOL'][0]))
590
591        list_of_commands = self.get_command_list(ffdc_actions_for_target_type)
592        # If command list is empty, returns
593        if not list_of_commands:
594            return
595
596        progress_counter = 0
597        for command in list_of_commands:
598            command_txt, command_timeout = self.unpack_command(command)
599
600            if form_filename:
601                command_txt = str(command_txt % self.target_type)
602
603            cmd_exit_code, err, response = \
604                self.ssh_remoteclient.execute_command(command_txt, command_timeout)
605
606            if cmd_exit_code:
607                self.logger.warning(
608                    "\n\t\t[WARN] %s exits with code %s." % (command_txt, str(cmd_exit_code)))
609                self.logger.warning("\t\t[WARN] %s " % err)
610
611            progress_counter += 1
612            self.print_progress(progress_counter)
613
614        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
615
616    def group_copy(self,
617                   ffdc_actions_for_target_type):
618        r"""
619        scp group of files (wild card) from remote host.
620
621        Description of argument(s):
622        fdc_actions_for_target_type    commands and files for the selected remote host type.
623        """
624
625        if self.ssh_remoteclient.scpclient:
626            self.logger.info("\n\tCopying files from remote system %s via SCP.\n" % self.hostname)
627
628            list_of_commands = self.get_command_list(ffdc_actions_for_target_type)
629            # If command list is empty, returns
630            if not list_of_commands:
631                return
632
633            for command in list_of_commands:
634                try:
635                    command = self.yaml_env_and_plugin_vars_populate(command)
636                except IndexError:
637                    self.logger.error("\t\tInvalid command %s" % command)
638                    continue
639
640                cmd_exit_code, err, response = \
641                    self.ssh_remoteclient.execute_command(command)
642
643                # If file does not exist, code take no action.
644                # cmd_exit_code is ignored for this scenario.
645                if response:
646                    scp_result = \
647                        self.ssh_remoteclient.scp_file_from_remote(response.split('\n'),
648                                                                   self.ffdc_dir_path)
649                    if scp_result:
650                        self.logger.info("\t\tSuccessfully copied from " + self.hostname + ':' + command)
651                else:
652                    self.logger.info("\t\t%s has no result" % command)
653
654        else:
655            self.logger.info("\n\n\tSkip copying files from remote system %s.\n" % self.hostname)
656
657    def scp_ffdc(self,
658                 targ_dir_path,
659                 targ_file_prefix,
660                 form_filename,
661                 file_list=None,
662                 quiet=None):
663        r"""
664        SCP all files in file_dict to the indicated directory on the local system.
665
666        Description of argument(s):
667        targ_dir_path                   The path of the directory to receive the files.
668        targ_file_prefix                Prefix which will be pre-pended to each
669                                        target file's name.
670        file_dict                       A dictionary of files to scp from targeted system to this system
671
672        """
673
674        progress_counter = 0
675        for filename in file_list:
676            if form_filename:
677                filename = str(filename % self.target_type)
678            source_file_path = filename
679            targ_file_path = targ_dir_path + targ_file_prefix + filename.split('/')[-1]
680
681            # If source file name contains wild card, copy filename as is.
682            if '*' in source_file_path:
683                scp_result = self.ssh_remoteclient.scp_file_from_remote(source_file_path, self.ffdc_dir_path)
684            else:
685                scp_result = self.ssh_remoteclient.scp_file_from_remote(source_file_path, targ_file_path)
686
687            if not quiet:
688                if scp_result:
689                    self.logger.info(
690                        "\t\tSuccessfully copied from " + self.hostname + ':' + source_file_path + ".\n")
691                else:
692                    self.logger.info(
693                        "\t\tFail to copy from " + self.hostname + ':' + source_file_path + ".\n")
694            else:
695                progress_counter += 1
696                self.print_progress(progress_counter)
697
698    def set_ffdc_default_store_path(self):
699        r"""
700        Set a default value for self.ffdc_dir_path and self.ffdc_prefix.
701        Collected ffdc file will be stored in dir /self.location/hostname_timestr/.
702        Individual ffdc file will have timestr_filename.
703
704        Description of class variables:
705        self.ffdc_dir_path  The dir path where collected ffdc data files should be put.
706
707        self.ffdc_prefix    The prefix to be given to each ffdc file name.
708
709        """
710
711        timestr = time.strftime("%Y%m%d-%H%M%S")
712        self.ffdc_dir_path = self.location + "/" + self.hostname + "_" + timestr + "/"
713        self.ffdc_prefix = timestr + "_"
714        self.validate_local_store(self.ffdc_dir_path)
715
716    # Need to verify local store path exists prior to instantiate this class.
717    # This class method is used to share the same code between CLI input parm
718    # and Robot Framework "${EXECDIR}/logs" before referencing this class.
719    @classmethod
720    def validate_local_store(cls, dir_path):
721        r"""
722        Ensure path exists to store FFDC files locally.
723
724        Description of variable:
725        dir_path  The dir path where collected ffdc data files will be stored.
726
727        """
728
729        if not os.path.exists(dir_path):
730            try:
731                os.makedirs(dir_path, 0o755)
732            except (IOError, OSError) as e:
733                # PermissionError
734                if e.errno == EPERM or e.errno == EACCES:
735                    self.logger.error(
736                        '\tERROR: os.makedirs %s failed with PermissionError.\n' % dir_path)
737                else:
738                    self.logger.error(
739                        '\tERROR: os.makedirs %s failed with %s.\n' % (dir_path, e.strerror))
740                sys.exit(-1)
741
742    def print_progress(self, progress):
743        r"""
744        Print activity progress +
745
746        Description of variable:
747        progress  Progress counter.
748
749        """
750
751        sys.stdout.write("\r\t" + "+" * progress)
752        sys.stdout.flush()
753        time.sleep(.1)
754
755    def verify_redfish(self):
756        r"""
757        Verify remote host has redfish service active
758
759        """
760        redfish_parm = 'redfishtool -r ' \
761                       + self.hostname + ' -S Always raw GET /redfish/v1/'
762        return (self.run_tool_cmd(redfish_parm, True))
763
764    def verify_ipmi(self):
765        r"""
766        Verify remote host has IPMI LAN service active
767
768        """
769        if self.target_type == 'OPENBMC':
770            ipmi_parm = 'ipmitool -I lanplus -C 17  -U ' + self.username + ' -P ' \
771                + self.password + ' -H ' + self.hostname + ' power status'
772        else:
773            ipmi_parm = 'ipmitool -I lanplus  -P ' \
774                + self.password + ' -H ' + self.hostname + ' power status'
775
776        return (self.run_tool_cmd(ipmi_parm, True))
777
778    def run_tool_cmd(self,
779                     parms_string,
780                     quiet=False):
781        r"""
782        Run CLI standard tool or scripts.
783
784        Description of variable:
785        parms_string         tool command options.
786        quiet                do not print tool error message if True
787        """
788
789        result = subprocess.run([parms_string],
790                                stdout=subprocess.PIPE,
791                                stderr=subprocess.PIPE,
792                                shell=True,
793                                universal_newlines=True)
794
795        if result.stderr and not quiet:
796            self.logger.error('\n\t\tERROR with %s ' % parms_string)
797            self.logger.error('\t\t' + result.stderr)
798
799        return result.stdout
800
801    def verify_protocol(self, protocol_list):
802        r"""
803        Perform protocol working check.
804
805        Description of argument(s):
806        protocol_list        List of protocol.
807        """
808
809        tmp_list = []
810        if self.target_is_pingable():
811            tmp_list.append("SHELL")
812
813        for protocol in protocol_list:
814            if self.remote_protocol != 'ALL':
815                if self.remote_protocol != protocol:
816                    continue
817
818            # Only check SSH/SCP once for both protocols
819            if protocol == 'SSH' or protocol == 'SCP' and protocol not in tmp_list:
820                if self.ssh_to_target_system():
821                    # Add only what user asked.
822                    if self.remote_protocol != 'ALL':
823                        tmp_list.append(self.remote_protocol)
824                    else:
825                        tmp_list.append('SSH')
826                        tmp_list.append('SCP')
827
828            if protocol == 'TELNET':
829                if self.telnet_to_target_system():
830                    tmp_list.append(protocol)
831
832            if protocol == 'REDFISH':
833                if self.verify_redfish():
834                    tmp_list.append(protocol)
835                    self.logger.info("\n\t[Check] %s Redfish Service.\t\t [OK]" % self.hostname)
836                else:
837                    self.logger.info("\n\t[Check] %s Redfish Service.\t\t [NOT AVAILABLE]" % self.hostname)
838
839            if protocol == 'IPMI':
840                if self.verify_ipmi():
841                    tmp_list.append(protocol)
842                    self.logger.info("\n\t[Check] %s IPMI LAN Service.\t\t [OK]" % self.hostname)
843                else:
844                    self.logger.info("\n\t[Check] %s IPMI LAN Service.\t\t [NOT AVAILABLE]" % self.hostname)
845
846        return tmp_list
847
848    def load_env(self):
849        r"""
850        Perform protocol working check.
851
852        """
853        # This is for the env vars a user can use in YAML to load it at runtime.
854        # Example YAML:
855        # -COMMANDS:
856        #    - my_command ${hostname}  ${username}   ${password}
857        os.environ['hostname'] = self.hostname
858        os.environ['username'] = self.username
859        os.environ['password'] = self.password
860
861        # Append default Env.
862        self.env_dict['hostname'] = self.hostname
863        self.env_dict['username'] = self.username
864        self.env_dict['password'] = self.password
865
866        try:
867            tmp_env_dict = {}
868            if self.env_vars:
869                tmp_env_dict = json.loads(self.env_vars)
870                # Export ENV vars default.
871                for key, value in tmp_env_dict.items():
872                    os.environ[key] = value
873                    self.env_dict[key] = str(value)
874
875            if self.econfig:
876                with open(self.econfig, 'r') as file:
877                    try:
878                        tmp_env_dict = yaml.load(file, Loader=yaml.SafeLoader)
879                    except yaml.YAMLError as e:
880                        self.logger.error(e)
881                        sys.exit(-1)
882                # Export ENV vars.
883                for key, value in tmp_env_dict['env_params'].items():
884                    os.environ[key] = str(value)
885                    self.env_dict[key] = str(value)
886        except json.decoder.JSONDecodeError as e:
887            self.logger.error("\n\tERROR: %s " % e)
888            sys.exit(-1)
889
890        # This to mask the password from displaying on the console.
891        mask_dict = self.env_dict.copy()
892        for k, v in mask_dict.items():
893            if k.lower().find("password") != -1:
894                hidden_text = []
895                hidden_text.append(v)
896                password_regex = '(' +\
897                    '|'.join([re.escape(x) for x in hidden_text]) + ')'
898                mask_dict[k] = re.sub(password_regex, "********", v)
899
900        self.logger.info(json.dumps(mask_dict, indent=8, sort_keys=False))
901
902    def execute_python_eval(self, eval_string):
903        r"""
904        Execute qualified python function string using eval.
905
906        Description of argument(s):
907        eval_string        Execute the python object.
908
909        Example:
910                eval(plugin.foo_func.foo_func(10))
911        """
912        try:
913            self.logger.info("\tExecuting plugin func()")
914            self.logger.debug("\tCall func: %s" % eval_string)
915            result = eval(eval_string)
916            self.logger.info("\treturn: %s" % str(result))
917        except (ValueError,
918                SyntaxError,
919                NameError,
920                AttributeError,
921                TypeError) as e:
922            self.logger.error("\tERROR: execute_python_eval: %s" % e)
923            # Set the plugin error state.
924            plugin_error_dict['exit_on_error'] = True
925            self.logger.info("\treturn: PLUGIN_EVAL_ERROR")
926            return 'PLUGIN_EVAL_ERROR'
927
928        return result
929
930    def execute_plugin_block(self, plugin_cmd_list):
931        r"""
932        Pack the plugin command to qualifed python string object.
933
934        Description of argument(s):
935        plugin_list_dict      Plugin block read from YAML
936                              [{'plugin_name': 'plugin.foo_func.my_func'},
937                               {'plugin_args': [10]}]
938
939        Example:
940            - plugin:
941              - plugin_name: plugin.foo_func.my_func
942              - plugin_args:
943                - arg1
944                - arg2
945
946            - plugin:
947              - plugin_name: result = plugin.foo_func.my_func
948              - plugin_args:
949                - arg1
950                - arg2
951
952            - plugin:
953              - plugin_name: result1,result2 = plugin.foo_func.my_func
954              - plugin_args:
955                - arg1
956                - arg2
957        """
958        try:
959            idx = self.key_index_list_dict('plugin_name', plugin_cmd_list)
960            plugin_name = plugin_cmd_list[idx]['plugin_name']
961            # Equal separator means plugin function returns result.
962            if ' = ' in plugin_name:
963                # Ex. ['result', 'plugin.foo_func.my_func']
964                plugin_name_args = plugin_name.split(' = ')
965                # plugin func return data.
966                for arg in plugin_name_args:
967                    if arg == plugin_name_args[-1]:
968                        plugin_name = arg
969                    else:
970                        plugin_resp = arg.split(',')
971                        # ['result1','result2']
972                        for x in plugin_resp:
973                            global_plugin_list.append(x)
974                            global_plugin_dict[x] = ""
975
976            # Walk the plugin args ['arg1,'arg2']
977            # If the YAML plugin statement 'plugin_args' is not declared.
978            if any('plugin_args' in d for d in plugin_cmd_list):
979                idx = self.key_index_list_dict('plugin_args', plugin_cmd_list)
980                plugin_args = plugin_cmd_list[idx]['plugin_args']
981                if plugin_args:
982                    plugin_args = self.yaml_args_populate(plugin_args)
983                else:
984                    plugin_args = []
985            else:
986                plugin_args = self.yaml_args_populate([])
987
988            # Pack the args arg1, arg2, .... argn into
989            # "arg1","arg2","argn"  string as params for function.
990            parm_args_str = self.yaml_args_string(plugin_args)
991            if parm_args_str:
992                plugin_func = plugin_name + '(' + parm_args_str + ')'
993            else:
994                plugin_func = plugin_name + '()'
995
996            # Execute plugin function.
997            if global_plugin_dict:
998                resp = self.execute_python_eval(plugin_func)
999                # Update plugin vars dict if there is any.
1000                if resp != 'PLUGIN_EVAL_ERROR':
1001                    self.response_args_data(resp)
1002            else:
1003                resp = self.execute_python_eval(plugin_func)
1004        except Exception as e:
1005            # Set the plugin error state.
1006            plugin_error_dict['exit_on_error'] = True
1007            self.logger.error("\tERROR: execute_plugin_block: %s" % e)
1008            pass
1009
1010        # There is a real error executing the plugin function.
1011        if resp == 'PLUGIN_EVAL_ERROR':
1012            return resp
1013
1014        # Check if plugin_expects_return (int, string, list,dict etc)
1015        if any('plugin_expects_return' in d for d in plugin_cmd_list):
1016            idx = self.key_index_list_dict('plugin_expects_return', plugin_cmd_list)
1017            plugin_expects = plugin_cmd_list[idx]['plugin_expects_return']
1018            if plugin_expects:
1019                if resp:
1020                    if self.plugin_expect_type(plugin_expects, resp) == 'INVALID':
1021                        self.logger.error("\tWARN: Plugin error check skipped")
1022                    elif not self.plugin_expect_type(plugin_expects, resp):
1023                        self.logger.error("\tERROR: Plugin expects return data: %s"
1024                                          % plugin_expects)
1025                        plugin_error_dict['exit_on_error'] = True
1026                elif not resp:
1027                    self.logger.error("\tERROR: Plugin func failed to return data")
1028                    plugin_error_dict['exit_on_error'] = True
1029
1030        return resp
1031
1032    def response_args_data(self, plugin_resp):
1033        r"""
1034        Parse the plugin function response and update plugin return variable.
1035
1036        plugin_resp       Response data from plugin function.
1037        """
1038        resp_list = []
1039        resp_data = ""
1040
1041        # There is nothing to update the plugin response.
1042        if len(global_plugin_list) == 0 or plugin_resp == 'None':
1043            return
1044
1045        if isinstance(plugin_resp, str):
1046            resp_data = plugin_resp.strip('\r\n\t')
1047            resp_list.append(resp_data)
1048        elif isinstance(plugin_resp, bytes):
1049            resp_data = str(plugin_resp, 'UTF-8').strip('\r\n\t')
1050            resp_list.append(resp_data)
1051        elif isinstance(plugin_resp, tuple):
1052            if len(global_plugin_list) == 1:
1053                resp_list.append(plugin_resp)
1054            else:
1055                resp_list = list(plugin_resp)
1056                resp_list = [x.strip('\r\n\t') for x in resp_list]
1057        elif isinstance(plugin_resp, list):
1058            if len(global_plugin_list) == 1:
1059                resp_list.append([x.strip('\r\n\t') for x in plugin_resp])
1060            else:
1061                resp_list = [x.strip('\r\n\t') for x in plugin_resp]
1062        elif isinstance(plugin_resp, int) or isinstance(plugin_resp, float):
1063            resp_list.append(plugin_resp)
1064
1065        # Iterate if there is a list of plugin return vars to update.
1066        for idx, item in enumerate(resp_list, start=0):
1067            # Exit loop, done required loop.
1068            if idx >= len(global_plugin_list):
1069                break
1070            # Find the index of the return func in the list and
1071            # update the global func return dictionary.
1072            try:
1073                dict_idx = global_plugin_list[idx]
1074                global_plugin_dict[dict_idx] = item
1075            except (IndexError, ValueError) as e:
1076                self.logger.warn("\tWARN: response_args_data: %s" % e)
1077                pass
1078
1079        # Done updating plugin dict irrespective of pass or failed,
1080        # clear all the list element for next plugin block execute.
1081        global_plugin_list.clear()
1082
1083    def yaml_args_string(self, plugin_args):
1084        r"""
1085        Pack the args into string.
1086
1087        plugin_args            arg list ['arg1','arg2,'argn']
1088        """
1089        args_str = ''
1090        for args in plugin_args:
1091            if args:
1092                if isinstance(args, (int, float)):
1093                    args_str += str(args)
1094                elif args in global_plugin_type_list:
1095                    args_str += str(global_plugin_dict[args])
1096                else:
1097                    args_str += '"' + str(args.strip('\r\n\t')) + '"'
1098            # Skip last list element.
1099            if args != plugin_args[-1]:
1100                args_str += ","
1101        return args_str
1102
1103    def yaml_args_populate(self, yaml_arg_list):
1104        r"""
1105        Decode env and plugin vars and populate.
1106
1107        Description of argument(s):
1108        yaml_arg_list         arg list read from YAML
1109
1110        Example:
1111          - plugin_args:
1112            - arg1
1113            - arg2
1114
1115                  yaml_arg_list:  [arg2, arg2]
1116        """
1117        # Get the env loaded keys as list ['hostname', 'username', 'password'].
1118        env_vars_list = list(self.env_dict)
1119
1120        if isinstance(yaml_arg_list, list):
1121            tmp_list = []
1122            for arg in yaml_arg_list:
1123                if isinstance(arg, (int, float)):
1124                    tmp_list.append(arg)
1125                    continue
1126                elif isinstance(arg, str):
1127                    arg_str = self.yaml_env_and_plugin_vars_populate(str(arg))
1128                    tmp_list.append(arg_str)
1129                else:
1130                    tmp_list.append(arg)
1131
1132            # return populated list.
1133            return tmp_list
1134
1135    def yaml_env_and_plugin_vars_populate(self, yaml_arg_str):
1136        r"""
1137        Update ${MY_VAR} and plugin vars.
1138
1139        Description of argument(s):
1140        yaml_arg_str         arg string read from YAML.
1141
1142        Example:
1143            - cat ${MY_VAR}
1144            - ls -AX my_plugin_var
1145        """
1146        # Parse the string for env vars ${env_vars}.
1147        try:
1148            # Example, list of matching env vars ['username', 'password', 'hostname']
1149            # Extra escape \ for special symbols. '\$\{([^\}]+)\}' works good.
1150            var_name_regex = '\\$\\{([^\\}]+)\\}'
1151            env_var_names_list = re.findall(var_name_regex, yaml_arg_str)
1152            for var in env_var_names_list:
1153                env_var = os.environ[var]
1154                env_replace = '${' + var + '}'
1155                yaml_arg_str = yaml_arg_str.replace(env_replace, env_var)
1156        except Exception as e:
1157            self.logger.error("\tERROR:yaml_env_vars_populate: %s" % e)
1158            pass
1159
1160        # Parse the string for plugin vars.
1161        try:
1162            # Example, list of plugin vars ['my_username', 'my_data']
1163            plugin_var_name_list = global_plugin_dict.keys()
1164            for var in plugin_var_name_list:
1165                # skip env var list already populated above code block list.
1166                if var in env_var_names_list:
1167                    continue
1168                # If this plugin var exist but empty in dict, don't replace.
1169                # This is either a YAML plugin statement incorrectly used or
1170                # user added a plugin var which is not going to be populated.
1171                if yaml_arg_str in global_plugin_dict:
1172                    if isinstance(global_plugin_dict[var], (list, dict)):
1173                        # List data type or dict can't be replaced, use directly
1174                        # in eval function call.
1175                        global_plugin_type_list.append(var)
1176                    else:
1177                        yaml_arg_str = yaml_arg_str.replace(str(var), str(global_plugin_dict[var]))
1178                # Just a string like filename or command.
1179                else:
1180                    yaml_arg_str = yaml_arg_str.replace(str(var), str(global_plugin_dict[var]))
1181        except (IndexError, ValueError) as e:
1182            self.logger.error("\tERROR: yaml_plugin_vars_populate: %s" % e)
1183            pass
1184
1185        return yaml_arg_str
1186
1187    def plugin_error_check(self, plugin_dict):
1188        r"""
1189        Plugin error dict processing.
1190
1191        Description of argument(s):
1192        plugin_dict        Dictionary of plugin error.
1193        """
1194        if any('plugin_error' in d for d in plugin_dict):
1195            for d in plugin_dict:
1196                if 'plugin_error' in d:
1197                    value = d['plugin_error']
1198                    # Reference if the error is set or not by plugin.
1199                    return plugin_error_dict[value]
1200
1201    def key_index_list_dict(self, key, list_dict):
1202        r"""
1203        Iterate list of dictionary and return index if the key match is found.
1204
1205        Description of argument(s):
1206        key           Valid Key in a dict.
1207        list_dict     list of dictionary.
1208        """
1209        for i, d in enumerate(list_dict):
1210            if key in d.keys():
1211                return i
1212
1213    def plugin_expect_type(self, type, data):
1214        r"""
1215        Plugin expect directive type check.
1216        """
1217        if type == 'int':
1218            return isinstance(data, int)
1219        elif type == 'float':
1220            return isinstance(data, float)
1221        elif type == 'str':
1222            return isinstance(data, str)
1223        elif type == 'list':
1224            return isinstance(data, list)
1225        elif type == 'dict':
1226            return isinstance(data, dict)
1227        elif type == 'tuple':
1228            return isinstance(data, tuple)
1229        else:
1230            self.logger.info("\tInvalid data type requested: %s" % type)
1231            return 'INVALID'
1232