1#!/usr/bin/env python3
2
3r"""
4See class prolog below for details.
5"""
6
7import os
8import re
9import sys
10import yaml
11import json
12import time
13import logging
14import platform
15from errno import EACCES, EPERM
16import subprocess
17
18script_dir = os.path.dirname(os.path.abspath(__file__))
19sys.path.append(script_dir)
20# Walk path and append to sys.path
21for root, dirs, files in os.walk(script_dir):
22    for dir in dirs:
23        sys.path.append(os.path.join(root, dir))
24
25from ssh_utility import SSHRemoteclient
26from telnet_utility import TelnetRemoteclient
27
28r"""
29User define plugins python functions.
30
31It will imports files from directory plugins
32
33plugins
34├── file1.py
35└── file2.py
36
37Example how to define in YAML:
38 - plugin:
39   - plugin_name: plugin.foo_func.foo_func_yaml
40     - plugin_args:
41       - arg1
42       - arg2
43"""
44plugin_dir = __file__.split(__file__.split("/")[-1])[0] + '/plugins'
45sys.path.append(plugin_dir)
46try:
47    for module in os.listdir(plugin_dir):
48        if module == '__init__.py' or module[-3:] != '.py':
49            continue
50        plugin_module = "plugins." + module[:-3]
51        # To access the module plugin.<module name>.<function>
52        # Example: plugin.foo_func.foo_func_yaml()
53        try:
54            plugin = __import__(plugin_module, globals(), locals(), [], 0)
55        except Exception as e:
56            print("PLUGIN: Module import failed: %s" % module)
57            pass
58except FileNotFoundError as e:
59    print("PLUGIN: %s" % e)
60    pass
61
62r"""
63This is for plugin functions returning data or responses to the caller
64in YAML plugin setup.
65
66Example:
67
68    - plugin:
69      - plugin_name: version = plugin.ssh_execution.ssh_execute_cmd
70      - plugin_args:
71        - ${hostname}
72        - ${username}
73        - ${password}
74        - "cat /etc/os-release | grep VERSION_ID | awk -F'=' '{print $2}'"
75     - plugin:
76        - plugin_name: plugin.print_vars.print_vars
77        - plugin_args:
78          - version
79
80where first plugin "version" var is used by another plugin in the YAML
81block or plugin
82
83"""
84global global_log_store_path
85global global_plugin_dict
86global global_plugin_list
87
88# Hold the plugin return values in dict and plugin return vars in list.
89# Dict is to reference and update vars processing in parser where as
90# list is for current vars from the plugin block which needs processing.
91global_plugin_dict = {}
92global_plugin_list = []
93
94# Hold the plugin return named declared if function returned values are list,dict.
95# Refer this name list to look up the plugin dict for eval() args function
96# Example ['version']
97global_plugin_type_list = []
98
99# Path where logs are to be stored or written.
100global_log_store_path = ''
101
102# Plugin error state defaults.
103plugin_error_dict = {
104    'exit_on_error': False,
105    'continue_on_error': False,
106}
107
108
109class ffdc_collector:
110
111    r"""
112    Execute commands from configuration file to collect log files.
113    Fetch and store generated files at the specified location.
114
115    """
116
117    def __init__(self,
118                 hostname,
119                 username,
120                 password,
121                 ffdc_config,
122                 location,
123                 remote_type,
124                 remote_protocol,
125                 env_vars,
126                 econfig,
127                 log_level):
128        r"""
129        Description of argument(s):
130
131        hostname            name/ip of the targeted (remote) system
132        username            user on the targeted system with access to FFDC files
133        password            password for user on targeted system
134        ffdc_config         configuration file listing commands and files for FFDC
135        location            where to store collected FFDC
136        remote_type         os type of the remote host
137        remote_protocol     Protocol to use to collect data
138        env_vars            User define CLI env vars '{"key : "value"}'
139        econfig             User define env vars YAML file
140
141        """
142
143        self.hostname = hostname
144        self.username = username
145        self.password = password
146        self.ffdc_config = ffdc_config
147        self.location = location + "/" + remote_type.upper()
148        self.ssh_remoteclient = None
149        self.telnet_remoteclient = None
150        self.ffdc_dir_path = ""
151        self.ffdc_prefix = ""
152        self.target_type = remote_type.upper()
153        self.remote_protocol = remote_protocol.upper()
154        self.env_vars = env_vars
155        self.econfig = econfig
156        self.start_time = 0
157        self.elapsed_time = ''
158        self.logger = None
159
160        # Set prefix values for scp files and directory.
161        # Since the time stamp is at second granularity, these values are set here
162        # to be sure that all files for this run will have same timestamps
163        # and they will be saved in the same directory.
164        # self.location == local system for now
165        self.set_ffdc_default_store_path()
166
167        # Logger for this run.  Need to be after set_ffdc_default_store_path()
168        self.script_logging(getattr(logging, log_level.upper()))
169
170        # Verify top level directory exists for storage
171        self.validate_local_store(self.location)
172
173        if self.verify_script_env():
174            # Load default or user define YAML configuration file.
175            with open(self.ffdc_config, 'r') as file:
176                try:
177                    self.ffdc_actions = yaml.load(file, Loader=yaml.SafeLoader)
178                except yaml.YAMLError as e:
179                    self.logger.error(e)
180                    sys.exit(-1)
181
182            if self.target_type not in self.ffdc_actions.keys():
183                self.logger.error(
184                    "\n\tERROR: %s is not listed in %s.\n\n" % (self.target_type, self.ffdc_config))
185                sys.exit(-1)
186        else:
187            sys.exit(-1)
188
189        # Load ENV vars from user.
190        self.logger.info("\n\tENV: User define input YAML variables")
191        self.env_dict = {}
192        self.load_env()
193
194    def verify_script_env(self):
195
196        # Import to log version
197        import click
198        import paramiko
199
200        run_env_ok = True
201
202        redfishtool_version = self.run_tool_cmd('redfishtool -V').split(' ')[2].strip('\n')
203        ipmitool_version = self.run_tool_cmd('ipmitool -V').split(' ')[2]
204
205        self.logger.info("\n\t---- Script host environment ----")
206        self.logger.info("\t{:<10}  {:<10}".format('Script hostname', os.uname()[1]))
207        self.logger.info("\t{:<10}  {:<10}".format('Script host os', platform.platform()))
208        self.logger.info("\t{:<10}  {:>10}".format('Python', platform.python_version()))
209        self.logger.info("\t{:<10}  {:>10}".format('PyYAML', yaml.__version__))
210        self.logger.info("\t{:<10}  {:>10}".format('click', click.__version__))
211        self.logger.info("\t{:<10}  {:>10}".format('paramiko', paramiko.__version__))
212        self.logger.info("\t{:<10}  {:>9}".format('redfishtool', redfishtool_version))
213        self.logger.info("\t{:<10}  {:>12}".format('ipmitool', ipmitool_version))
214
215        if eval(yaml.__version__.replace('.', ',')) < (5, 3, 0):
216            self.logger.error("\n\tERROR: Python or python packages do not meet minimum version requirement.")
217            self.logger.error("\tERROR: PyYAML version 5.3.0 or higher is needed.\n")
218            run_env_ok = False
219
220        self.logger.info("\t---- End script host environment ----")
221        return run_env_ok
222
223    def script_logging(self,
224                       log_level_attr):
225        r"""
226        Create logger
227
228        """
229        self.logger = logging.getLogger()
230        self.logger.setLevel(log_level_attr)
231        log_file_handler = logging.FileHandler(self.ffdc_dir_path + "collector.log")
232
233        stdout_handler = logging.StreamHandler(sys.stdout)
234        self.logger.addHandler(log_file_handler)
235        self.logger.addHandler(stdout_handler)
236
237        # Turn off paramiko INFO logging
238        logging.getLogger("paramiko").setLevel(logging.WARNING)
239
240    def target_is_pingable(self):
241        r"""
242        Check if target system is ping-able.
243
244        """
245        response = os.system("ping -c 1 %s  2>&1 >/dev/null" % self.hostname)
246        if response == 0:
247            self.logger.info("\n\t[Check] %s is ping-able.\t\t [OK]" % self.hostname)
248            return True
249        else:
250            self.logger.error(
251                "\n\tERROR: %s is not ping-able. FFDC collection aborted.\n" % self.hostname)
252            sys.exit(-1)
253
254    def collect_ffdc(self):
255        r"""
256        Initiate FFDC Collection depending on requested protocol.
257
258        """
259
260        self.logger.info("\n\t---- Start communicating with %s ----" % self.hostname)
261        self.start_time = time.time()
262
263        # Find the list of target and protocol supported.
264        check_protocol_list = []
265        config_dict = self.ffdc_actions
266
267        for target_type in config_dict.keys():
268            if self.target_type != target_type:
269                continue
270
271            for k, v in config_dict[target_type].items():
272                if config_dict[target_type][k]['PROTOCOL'][0] not in check_protocol_list:
273                    check_protocol_list.append(config_dict[target_type][k]['PROTOCOL'][0])
274
275        self.logger.info("\n\t %s protocol type: %s" % (self.target_type, check_protocol_list))
276
277        verified_working_protocol = self.verify_protocol(check_protocol_list)
278
279        if verified_working_protocol:
280            self.logger.info("\n\t---- Completed protocol pre-requisite check ----\n")
281
282        # Verify top level directory exists for storage
283        self.validate_local_store(self.location)
284
285        if ((self.remote_protocol not in verified_working_protocol) and (self.remote_protocol != 'ALL')):
286            self.logger.info("\n\tWorking protocol list: %s" % verified_working_protocol)
287            self.logger.error(
288                '\tERROR: Requested protocol %s is not in working protocol list.\n'
289                % self.remote_protocol)
290            sys.exit(-1)
291        else:
292            self.generate_ffdc(verified_working_protocol)
293
294    def ssh_to_target_system(self):
295        r"""
296        Open a ssh connection to targeted system.
297
298        """
299
300        self.ssh_remoteclient = SSHRemoteclient(self.hostname,
301                                                self.username,
302                                                self.password)
303
304        if self.ssh_remoteclient.ssh_remoteclient_login():
305            self.logger.info("\n\t[Check] %s SSH connection established.\t [OK]" % self.hostname)
306
307            # Check scp connection.
308            # If scp connection fails,
309            # continue with FFDC generation but skip scp files to local host.
310            self.ssh_remoteclient.scp_connection()
311            return True
312        else:
313            self.logger.info("\n\t[Check] %s SSH connection.\t [NOT AVAILABLE]" % self.hostname)
314            return False
315
316    def telnet_to_target_system(self):
317        r"""
318        Open a telnet connection to targeted system.
319        """
320        self.telnet_remoteclient = TelnetRemoteclient(self.hostname,
321                                                      self.username,
322                                                      self.password)
323        if self.telnet_remoteclient.tn_remoteclient_login():
324            self.logger.info("\n\t[Check] %s Telnet connection established.\t [OK]" % self.hostname)
325            return True
326        else:
327            self.logger.info("\n\t[Check] %s Telnet connection.\t [NOT AVAILABLE]" % self.hostname)
328            return False
329
330    def generate_ffdc(self, working_protocol_list):
331        r"""
332        Determine actions based on remote host type
333
334        Description of argument(s):
335        working_protocol_list    list of confirmed working protocols to connect to remote host.
336        """
337
338        self.logger.info("\n\t---- Executing commands on " + self.hostname + " ----")
339        self.logger.info("\n\tWorking protocol list: %s" % working_protocol_list)
340
341        config_dict = self.ffdc_actions
342        for target_type in config_dict.keys():
343            if self.target_type != target_type:
344                continue
345
346            self.logger.info("\n\tFFDC Path: %s " % self.ffdc_dir_path)
347            global_plugin_dict['global_log_store_path'] = self.ffdc_dir_path
348            self.logger.info("\tSystem Type: %s" % target_type)
349            for k, v in config_dict[target_type].items():
350
351                if self.remote_protocol not in working_protocol_list \
352                        and self.remote_protocol != 'ALL':
353                    continue
354
355                protocol = config_dict[target_type][k]['PROTOCOL'][0]
356
357                if protocol not in working_protocol_list:
358                    continue
359
360                if protocol in working_protocol_list:
361                    if protocol == 'SSH' or protocol == 'SCP':
362                        self.protocol_ssh(protocol, target_type, k)
363                    elif protocol == 'TELNET':
364                        self.protocol_telnet(target_type, k)
365                    elif protocol == 'REDFISH' or protocol == 'IPMI' or protocol == 'SHELL':
366                        self.protocol_execute(protocol, target_type, k)
367                else:
368                    self.logger.error("\n\tERROR: %s is not available for %s." % (protocol, self.hostname))
369
370        # Close network connection after collecting all files
371        self.elapsed_time = time.strftime("%H:%M:%S", time.gmtime(time.time() - self.start_time))
372        if self.ssh_remoteclient:
373            self.ssh_remoteclient.ssh_remoteclient_disconnect()
374        if self.telnet_remoteclient:
375            self.telnet_remoteclient.tn_remoteclient_disconnect()
376
377    def protocol_ssh(self,
378                     protocol,
379                     target_type,
380                     sub_type):
381        r"""
382        Perform actions using SSH and SCP protocols.
383
384        Description of argument(s):
385        protocol            Protocol to execute.
386        target_type         OS Type of remote host.
387        sub_type            Group type of commands.
388        """
389
390        if protocol == 'SCP':
391            self.group_copy(self.ffdc_actions[target_type][sub_type])
392        else:
393            self.collect_and_copy_ffdc(self.ffdc_actions[target_type][sub_type])
394
395    def protocol_telnet(self,
396                        target_type,
397                        sub_type):
398        r"""
399        Perform actions using telnet protocol.
400        Description of argument(s):
401        target_type          OS Type of remote host.
402        """
403        self.logger.info("\n\t[Run] Executing commands on %s using %s" % (self.hostname, 'TELNET'))
404        telnet_files_saved = []
405        progress_counter = 0
406        list_of_commands = self.ffdc_actions[target_type][sub_type]['COMMANDS']
407        for index, each_cmd in enumerate(list_of_commands, start=0):
408            command_txt, command_timeout = self.unpack_command(each_cmd)
409            result = self.telnet_remoteclient.execute_command(command_txt, command_timeout)
410            if result:
411                try:
412                    targ_file = self.ffdc_actions[target_type][sub_type]['FILES'][index]
413                except IndexError:
414                    targ_file = command_txt
415                    self.logger.warning(
416                        "\n\t[WARN] Missing filename to store data from telnet %s." % each_cmd)
417                    self.logger.warning("\t[WARN] Data will be stored in %s." % targ_file)
418                targ_file_with_path = (self.ffdc_dir_path
419                                       + self.ffdc_prefix
420                                       + targ_file)
421                # Creates a new file
422                with open(targ_file_with_path, 'w') as fp:
423                    fp.write(result)
424                    fp.close
425                    telnet_files_saved.append(targ_file)
426            progress_counter += 1
427            self.print_progress(progress_counter)
428        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
429        for file in telnet_files_saved:
430            self.logger.info("\n\t\tSuccessfully save file " + file + ".")
431
432    def protocol_execute(self,
433                         protocol,
434                         target_type,
435                         sub_type):
436        r"""
437        Perform actions for a given protocol.
438
439        Description of argument(s):
440        protocol            Protocol to execute.
441        target_type         OS Type of remote host.
442        sub_type            Group type of commands.
443        """
444
445        self.logger.info("\n\t[Run] Executing commands to %s using %s" % (self.hostname, protocol))
446        executed_files_saved = []
447        progress_counter = 0
448        list_of_cmd = self.get_command_list(self.ffdc_actions[target_type][sub_type])
449        for index, each_cmd in enumerate(list_of_cmd, start=0):
450            plugin_call = False
451            if isinstance(each_cmd, dict):
452                if 'plugin' in each_cmd:
453                    # If the error is set and plugin explicitly
454                    # requested to skip execution on error..
455                    if plugin_error_dict['exit_on_error'] and \
456                            self.plugin_error_check(each_cmd['plugin']):
457                        self.logger.info("\n\t[PLUGIN-ERROR] exit_on_error: %s" %
458                                         plugin_error_dict['exit_on_error'])
459                        self.logger.info("\t[PLUGIN-SKIP] %s" %
460                                         each_cmd['plugin'][0])
461                        continue
462                    plugin_call = True
463                    # call the plugin
464                    self.logger.info("\n\t[PLUGIN-START]")
465                    result = self.execute_plugin_block(each_cmd['plugin'])
466                    self.logger.info("\t[PLUGIN-END]\n")
467            else:
468                each_cmd = self.yaml_env_and_plugin_vars_populate(each_cmd)
469
470            if not plugin_call:
471                result = self.run_tool_cmd(each_cmd)
472            if result:
473                try:
474                    file_name = self.get_file_list(self.ffdc_actions[target_type][sub_type])[index]
475                    # If file is specified as None.
476                    if file_name == "None":
477                        continue
478                    targ_file = self.yaml_env_and_plugin_vars_populate(file_name)
479                except IndexError:
480                    targ_file = each_cmd.split('/')[-1]
481                    self.logger.warning(
482                        "\n\t[WARN] Missing filename to store data from %s." % each_cmd)
483                    self.logger.warning("\t[WARN] Data will be stored in %s." % targ_file)
484
485                targ_file_with_path = (self.ffdc_dir_path
486                                       + self.ffdc_prefix
487                                       + targ_file)
488
489                # Creates a new file
490                with open(targ_file_with_path, 'w') as fp:
491                    if isinstance(result, dict):
492                        fp.write(json.dumps(result))
493                    else:
494                        fp.write(result)
495                    fp.close
496                    executed_files_saved.append(targ_file)
497
498            progress_counter += 1
499            self.print_progress(progress_counter)
500
501        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
502
503        for file in executed_files_saved:
504            self.logger.info("\n\t\tSuccessfully save file " + file + ".")
505
506    def collect_and_copy_ffdc(self,
507                              ffdc_actions_for_target_type,
508                              form_filename=False):
509        r"""
510        Send commands in ffdc_config file to targeted system.
511
512        Description of argument(s):
513        ffdc_actions_for_target_type     commands and files for the selected remote host type.
514        form_filename                    if true, pre-pend self.target_type to filename
515        """
516
517        # Executing commands, if any
518        self.ssh_execute_ffdc_commands(ffdc_actions_for_target_type,
519                                       form_filename)
520
521        # Copying files
522        if self.ssh_remoteclient.scpclient:
523            self.logger.info("\n\n\tCopying FFDC files from remote system %s.\n" % self.hostname)
524
525            # Retrieving files from target system
526            list_of_files = self.get_file_list(ffdc_actions_for_target_type)
527            self.scp_ffdc(self.ffdc_dir_path, self.ffdc_prefix, form_filename, list_of_files)
528        else:
529            self.logger.info("\n\n\tSkip copying FFDC files from remote system %s.\n" % self.hostname)
530
531    def get_command_list(self,
532                         ffdc_actions_for_target_type):
533        r"""
534        Fetch list of commands from configuration file
535
536        Description of argument(s):
537        ffdc_actions_for_target_type    commands and files for the selected remote host type.
538        """
539        try:
540            list_of_commands = ffdc_actions_for_target_type['COMMANDS']
541        except KeyError:
542            list_of_commands = []
543        return list_of_commands
544
545    def get_file_list(self,
546                      ffdc_actions_for_target_type):
547        r"""
548        Fetch list of commands from configuration file
549
550        Description of argument(s):
551        ffdc_actions_for_target_type    commands and files for the selected remote host type.
552        """
553        try:
554            list_of_files = ffdc_actions_for_target_type['FILES']
555        except KeyError:
556            list_of_files = []
557        return list_of_files
558
559    def unpack_command(self,
560                       command):
561        r"""
562        Unpack command from config file
563
564        Description of argument(s):
565        command    Command from config file.
566        """
567        if isinstance(command, dict):
568            command_txt = next(iter(command))
569            command_timeout = next(iter(command.values()))
570        elif isinstance(command, str):
571            command_txt = command
572            # Default command timeout 60 seconds
573            command_timeout = 60
574
575        return command_txt, command_timeout
576
577    def ssh_execute_ffdc_commands(self,
578                                  ffdc_actions_for_target_type,
579                                  form_filename=False):
580        r"""
581        Send commands in ffdc_config file to targeted system.
582
583        Description of argument(s):
584        ffdc_actions_for_target_type    commands and files for the selected remote host type.
585        form_filename                    if true, pre-pend self.target_type to filename
586        """
587        self.logger.info("\n\t[Run] Executing commands on %s using %s"
588                         % (self.hostname, ffdc_actions_for_target_type['PROTOCOL'][0]))
589
590        list_of_commands = self.get_command_list(ffdc_actions_for_target_type)
591        # If command list is empty, returns
592        if not list_of_commands:
593            return
594
595        progress_counter = 0
596        for command in list_of_commands:
597            command_txt, command_timeout = self.unpack_command(command)
598
599            if form_filename:
600                command_txt = str(command_txt % self.target_type)
601
602            cmd_exit_code, err, response = \
603                self.ssh_remoteclient.execute_command(command_txt, command_timeout)
604
605            if cmd_exit_code:
606                self.logger.warning(
607                    "\n\t\t[WARN] %s exits with code %s." % (command_txt, str(cmd_exit_code)))
608                self.logger.warning("\t\t[WARN] %s " % err)
609
610            progress_counter += 1
611            self.print_progress(progress_counter)
612
613        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
614
615    def group_copy(self,
616                   ffdc_actions_for_target_type):
617        r"""
618        scp group of files (wild card) from remote host.
619
620        Description of argument(s):
621        fdc_actions_for_target_type    commands and files for the selected remote host type.
622        """
623
624        if self.ssh_remoteclient.scpclient:
625            self.logger.info("\n\tCopying files from remote system %s via SCP.\n" % self.hostname)
626
627            list_of_commands = self.get_command_list(ffdc_actions_for_target_type)
628            # If command list is empty, returns
629            if not list_of_commands:
630                return
631
632            for command in list_of_commands:
633                try:
634                    command = self.yaml_env_and_plugin_vars_populate(command)
635                except IndexError:
636                    self.logger.error("\t\tInvalid command %s" % command)
637                    continue
638
639                cmd_exit_code, err, response = \
640                    self.ssh_remoteclient.execute_command(command)
641
642                # If file does not exist, code take no action.
643                # cmd_exit_code is ignored for this scenario.
644                if response:
645                    scp_result = \
646                        self.ssh_remoteclient.scp_file_from_remote(response.split('\n'),
647                                                                   self.ffdc_dir_path)
648                    if scp_result:
649                        self.logger.info("\t\tSuccessfully copied from " + self.hostname + ':' + command)
650                else:
651                    self.logger.info("\t\t%s has no result" % command)
652
653        else:
654            self.logger.info("\n\n\tSkip copying files from remote system %s.\n" % self.hostname)
655
656    def scp_ffdc(self,
657                 targ_dir_path,
658                 targ_file_prefix,
659                 form_filename,
660                 file_list=None,
661                 quiet=None):
662        r"""
663        SCP all files in file_dict to the indicated directory on the local system.
664
665        Description of argument(s):
666        targ_dir_path                   The path of the directory to receive the files.
667        targ_file_prefix                Prefix which will be pre-pended to each
668                                        target file's name.
669        file_dict                       A dictionary of files to scp from targeted system to this system
670
671        """
672
673        progress_counter = 0
674        for filename in file_list:
675            if form_filename:
676                filename = str(filename % self.target_type)
677            source_file_path = filename
678            targ_file_path = targ_dir_path + targ_file_prefix + filename.split('/')[-1]
679
680            # If source file name contains wild card, copy filename as is.
681            if '*' in source_file_path:
682                scp_result = self.ssh_remoteclient.scp_file_from_remote(source_file_path, self.ffdc_dir_path)
683            else:
684                scp_result = self.ssh_remoteclient.scp_file_from_remote(source_file_path, targ_file_path)
685
686            if not quiet:
687                if scp_result:
688                    self.logger.info(
689                        "\t\tSuccessfully copied from " + self.hostname + ':' + source_file_path + ".\n")
690                else:
691                    self.logger.info(
692                        "\t\tFail to copy from " + self.hostname + ':' + source_file_path + ".\n")
693            else:
694                progress_counter += 1
695                self.print_progress(progress_counter)
696
697    def set_ffdc_default_store_path(self):
698        r"""
699        Set a default value for self.ffdc_dir_path and self.ffdc_prefix.
700        Collected ffdc file will be stored in dir /self.location/hostname_timestr/.
701        Individual ffdc file will have timestr_filename.
702
703        Description of class variables:
704        self.ffdc_dir_path  The dir path where collected ffdc data files should be put.
705
706        self.ffdc_prefix    The prefix to be given to each ffdc file name.
707
708        """
709
710        timestr = time.strftime("%Y%m%d-%H%M%S")
711        self.ffdc_dir_path = self.location + "/" + self.hostname + "_" + timestr + "/"
712        self.ffdc_prefix = timestr + "_"
713        self.validate_local_store(self.ffdc_dir_path)
714
715    # Need to verify local store path exists prior to instantiate this class.
716    # This class method is used to share the same code between CLI input parm
717    # and Robot Framework "${EXECDIR}/logs" before referencing this class.
718    @classmethod
719    def validate_local_store(cls, dir_path):
720        r"""
721        Ensure path exists to store FFDC files locally.
722
723        Description of variable:
724        dir_path  The dir path where collected ffdc data files will be stored.
725
726        """
727
728        if not os.path.exists(dir_path):
729            try:
730                os.makedirs(dir_path, 0o755)
731            except (IOError, OSError) as e:
732                # PermissionError
733                if e.errno == EPERM or e.errno == EACCES:
734                    self.logger.error(
735                        '\tERROR: os.makedirs %s failed with PermissionError.\n' % dir_path)
736                else:
737                    self.logger.error(
738                        '\tERROR: os.makedirs %s failed with %s.\n' % (dir_path, e.strerror))
739                sys.exit(-1)
740
741    def print_progress(self, progress):
742        r"""
743        Print activity progress +
744
745        Description of variable:
746        progress  Progress counter.
747
748        """
749
750        sys.stdout.write("\r\t" + "+" * progress)
751        sys.stdout.flush()
752        time.sleep(.1)
753
754    def verify_redfish(self):
755        r"""
756        Verify remote host has redfish service active
757
758        """
759        redfish_parm = 'redfishtool -r ' \
760                       + self.hostname + ' -S Always raw GET /redfish/v1/'
761        return (self.run_tool_cmd(redfish_parm, True))
762
763    def verify_ipmi(self):
764        r"""
765        Verify remote host has IPMI LAN service active
766
767        """
768        if self.target_type == 'OPENBMC':
769            ipmi_parm = 'ipmitool -I lanplus -C 17  -U ' + self.username + ' -P ' \
770                + self.password + ' -H ' + self.hostname + ' power status'
771        else:
772            ipmi_parm = 'ipmitool -I lanplus  -P ' \
773                + self.password + ' -H ' + self.hostname + ' power status'
774
775        return (self.run_tool_cmd(ipmi_parm, True))
776
777    def run_tool_cmd(self,
778                     parms_string,
779                     quiet=False):
780        r"""
781        Run CLI standard tool or scripts.
782
783        Description of variable:
784        parms_string         tool command options.
785        quiet                do not print tool error message if True
786        """
787
788        result = subprocess.run([parms_string],
789                                stdout=subprocess.PIPE,
790                                stderr=subprocess.PIPE,
791                                shell=True,
792                                universal_newlines=True)
793
794        if result.stderr and not quiet:
795            self.logger.error('\n\t\tERROR with %s ' % parms_string)
796            self.logger.error('\t\t' + result.stderr)
797
798        return result.stdout
799
800    def verify_protocol(self, protocol_list):
801        r"""
802        Perform protocol working check.
803
804        Description of argument(s):
805        protocol_list        List of protocol.
806        """
807
808        tmp_list = []
809        if self.target_is_pingable():
810            tmp_list.append("SHELL")
811
812        for protocol in protocol_list:
813            if self.remote_protocol != 'ALL':
814                if self.remote_protocol != protocol:
815                    continue
816
817            # Only check SSH/SCP once for both protocols
818            if protocol == 'SSH' or protocol == 'SCP' and protocol not in tmp_list:
819                if self.ssh_to_target_system():
820                    # Add only what user asked.
821                    if self.remote_protocol != 'ALL':
822                        tmp_list.append(self.remote_protocol)
823                    else:
824                        tmp_list.append('SSH')
825                        tmp_list.append('SCP')
826
827            if protocol == 'TELNET':
828                if self.telnet_to_target_system():
829                    tmp_list.append(protocol)
830
831            if protocol == 'REDFISH':
832                if self.verify_redfish():
833                    tmp_list.append(protocol)
834                    self.logger.info("\n\t[Check] %s Redfish Service.\t\t [OK]" % self.hostname)
835                else:
836                    self.logger.info("\n\t[Check] %s Redfish Service.\t\t [NOT AVAILABLE]" % self.hostname)
837
838            if protocol == 'IPMI':
839                if self.verify_ipmi():
840                    tmp_list.append(protocol)
841                    self.logger.info("\n\t[Check] %s IPMI LAN Service.\t\t [OK]" % self.hostname)
842                else:
843                    self.logger.info("\n\t[Check] %s IPMI LAN Service.\t\t [NOT AVAILABLE]" % self.hostname)
844
845        return tmp_list
846
847    def load_env(self):
848        r"""
849        Perform protocol working check.
850
851        """
852        # This is for the env vars a user can use in YAML to load it at runtime.
853        # Example YAML:
854        # -COMMANDS:
855        #    - my_command ${hostname}  ${username}   ${password}
856        os.environ['hostname'] = self.hostname
857        os.environ['username'] = self.username
858        os.environ['password'] = self.password
859
860        # Append default Env.
861        self.env_dict['hostname'] = self.hostname
862        self.env_dict['username'] = self.username
863        self.env_dict['password'] = self.password
864
865        try:
866            tmp_env_dict = {}
867            if self.env_vars:
868                tmp_env_dict = json.loads(self.env_vars)
869                # Export ENV vars default.
870                for key, value in tmp_env_dict.items():
871                    os.environ[key] = value
872                    self.env_dict[key] = str(value)
873
874            if self.econfig:
875                with open(self.econfig, 'r') as file:
876                    try:
877                        tmp_env_dict = yaml.load(file, Loader=yaml.SafeLoader)
878                    except yaml.YAMLError as e:
879                        self.logger.error(e)
880                        sys.exit(-1)
881                # Export ENV vars.
882                for key, value in tmp_env_dict['env_params'].items():
883                    os.environ[key] = str(value)
884                    self.env_dict[key] = str(value)
885        except json.decoder.JSONDecodeError as e:
886            self.logger.error("\n\tERROR: %s " % e)
887            sys.exit(-1)
888
889        # This to mask the password from displaying on the console.
890        mask_dict = self.env_dict.copy()
891        for k, v in mask_dict.items():
892            if k.lower().find("password") != -1:
893                hidden_text = []
894                hidden_text.append(v)
895                password_regex = '(' +\
896                    '|'.join([re.escape(x) for x in hidden_text]) + ')'
897                mask_dict[k] = re.sub(password_regex, "********", v)
898
899        self.logger.info(json.dumps(mask_dict, indent=8, sort_keys=False))
900
901    def execute_python_eval(self, eval_string):
902        r"""
903        Execute qualified python function string using eval.
904
905        Description of argument(s):
906        eval_string        Execute the python object.
907
908        Example:
909                eval(plugin.foo_func.foo_func(10))
910        """
911        try:
912            self.logger.info("\tExecuting plugin func()")
913            self.logger.debug("\tCall func: %s" % eval_string)
914            result = eval(eval_string)
915            self.logger.info("\treturn: %s" % str(result))
916        except (ValueError,
917                SyntaxError,
918                NameError,
919                AttributeError,
920                TypeError) as e:
921            self.logger.error("\tERROR: execute_python_eval: %s" % e)
922            # Set the plugin error state.
923            plugin_error_dict['exit_on_error'] = True
924            self.logger.info("\treturn: PLUGIN_EVAL_ERROR")
925            return 'PLUGIN_EVAL_ERROR'
926
927        return result
928
929    def execute_plugin_block(self, plugin_cmd_list):
930        r"""
931        Pack the plugin command to qualifed python string object.
932
933        Description of argument(s):
934        plugin_list_dict      Plugin block read from YAML
935                              [{'plugin_name': 'plugin.foo_func.my_func'},
936                               {'plugin_args': [10]}]
937
938        Example:
939            - plugin:
940              - plugin_name: plugin.foo_func.my_func
941              - plugin_args:
942                - arg1
943                - arg2
944
945            - plugin:
946              - plugin_name: result = plugin.foo_func.my_func
947              - plugin_args:
948                - arg1
949                - arg2
950
951            - plugin:
952              - plugin_name: result1,result2 = plugin.foo_func.my_func
953              - plugin_args:
954                - arg1
955                - arg2
956        """
957        try:
958            idx = self.key_index_list_dict('plugin_name', plugin_cmd_list)
959            plugin_name = plugin_cmd_list[idx]['plugin_name']
960            # Equal separator means plugin function returns result.
961            if ' = ' in plugin_name:
962                # Ex. ['result', 'plugin.foo_func.my_func']
963                plugin_name_args = plugin_name.split(' = ')
964                # plugin func return data.
965                for arg in plugin_name_args:
966                    if arg == plugin_name_args[-1]:
967                        plugin_name = arg
968                    else:
969                        plugin_resp = arg.split(',')
970                        # ['result1','result2']
971                        for x in plugin_resp:
972                            global_plugin_list.append(x)
973                            global_plugin_dict[x] = ""
974
975            # Walk the plugin args ['arg1,'arg2']
976            # If the YAML plugin statement 'plugin_args' is not declared.
977            if any('plugin_args' in d for d in plugin_cmd_list):
978                idx = self.key_index_list_dict('plugin_args', plugin_cmd_list)
979                plugin_args = plugin_cmd_list[idx]['plugin_args']
980                if plugin_args:
981                    plugin_args = self.yaml_args_populate(plugin_args)
982                else:
983                    plugin_args = []
984            else:
985                plugin_args = self.yaml_args_populate([])
986
987            # Pack the args arg1, arg2, .... argn into
988            # "arg1","arg2","argn"  string as params for function.
989            parm_args_str = self.yaml_args_string(plugin_args)
990            if parm_args_str:
991                plugin_func = plugin_name + '(' + parm_args_str + ')'
992            else:
993                plugin_func = plugin_name + '()'
994
995            # Execute plugin function.
996            if global_plugin_dict:
997                resp = self.execute_python_eval(plugin_func)
998                # Update plugin vars dict if there is any.
999                if resp != 'PLUGIN_EVAL_ERROR':
1000                    self.response_args_data(resp)
1001            else:
1002                resp = self.execute_python_eval(plugin_func)
1003        except Exception as e:
1004            # Set the plugin error state.
1005            plugin_error_dict['exit_on_error'] = True
1006            self.logger.error("\tERROR: execute_plugin_block: %s" % e)
1007            pass
1008
1009        # There is a real error executing the plugin function.
1010        if resp == 'PLUGIN_EVAL_ERROR':
1011            return resp
1012
1013        # Check if plugin_expects_return (int, string, list,dict etc)
1014        if any('plugin_expects_return' in d for d in plugin_cmd_list):
1015            idx = self.key_index_list_dict('plugin_expects_return', plugin_cmd_list)
1016            plugin_expects = plugin_cmd_list[idx]['plugin_expects_return']
1017            if plugin_expects:
1018                if resp:
1019                    if self.plugin_expect_type(plugin_expects, resp) == 'INVALID':
1020                        self.logger.error("\tWARN: Plugin error check skipped")
1021                    elif not self.plugin_expect_type(plugin_expects, resp):
1022                        self.logger.error("\tERROR: Plugin expects return data: %s"
1023                                          % plugin_expects)
1024                        plugin_error_dict['exit_on_error'] = True
1025                elif not resp:
1026                    self.logger.error("\tERROR: Plugin func failed to return data")
1027                    plugin_error_dict['exit_on_error'] = True
1028
1029        return resp
1030
1031    def response_args_data(self, plugin_resp):
1032        r"""
1033        Parse the plugin function response and update plugin return variable.
1034
1035        plugin_resp       Response data from plugin function.
1036        """
1037        resp_list = []
1038        resp_data = ""
1039
1040        # There is nothing to update the plugin response.
1041        if len(global_plugin_list) == 0 or plugin_resp == 'None':
1042            return
1043
1044        if isinstance(plugin_resp, str):
1045            resp_data = plugin_resp.strip('\r\n\t')
1046            resp_list.append(resp_data)
1047        elif isinstance(plugin_resp, bytes):
1048            resp_data = str(plugin_resp, 'UTF-8').strip('\r\n\t')
1049            resp_list.append(resp_data)
1050        elif isinstance(plugin_resp, tuple):
1051            if len(global_plugin_list) == 1:
1052                resp_list.append(plugin_resp)
1053            else:
1054                resp_list = list(plugin_resp)
1055                resp_list = [x.strip('\r\n\t') for x in resp_list]
1056        elif isinstance(plugin_resp, list):
1057            if len(global_plugin_list) == 1:
1058                resp_list.append([x.strip('\r\n\t') for x in plugin_resp])
1059            else:
1060                resp_list = [x.strip('\r\n\t') for x in plugin_resp]
1061        elif isinstance(plugin_resp, int) or isinstance(plugin_resp, float):
1062            resp_list.append(plugin_resp)
1063
1064        # Iterate if there is a list of plugin return vars to update.
1065        for idx, item in enumerate(resp_list, start=0):
1066            # Exit loop, done required loop.
1067            if idx >= len(global_plugin_list):
1068                break
1069            # Find the index of the return func in the list and
1070            # update the global func return dictionary.
1071            try:
1072                dict_idx = global_plugin_list[idx]
1073                global_plugin_dict[dict_idx] = item
1074            except (IndexError, ValueError) as e:
1075                self.logger.warn("\tWARN: response_args_data: %s" % e)
1076                pass
1077
1078        # Done updating plugin dict irrespective of pass or failed,
1079        # clear all the list element for next plugin block execute.
1080        global_plugin_list.clear()
1081
1082    def yaml_args_string(self, plugin_args):
1083        r"""
1084        Pack the args into string.
1085
1086        plugin_args            arg list ['arg1','arg2,'argn']
1087        """
1088        args_str = ''
1089        for args in plugin_args:
1090            if args:
1091                if isinstance(args, (int, float)):
1092                    args_str += str(args)
1093                elif args in global_plugin_type_list:
1094                    args_str += str(global_plugin_dict[args])
1095                else:
1096                    args_str += '"' + str(args.strip('\r\n\t')) + '"'
1097            # Skip last list element.
1098            if args != plugin_args[-1]:
1099                args_str += ","
1100        return args_str
1101
1102    def yaml_args_populate(self, yaml_arg_list):
1103        r"""
1104        Decode env and plugin vars and populate.
1105
1106        Description of argument(s):
1107        yaml_arg_list         arg list read from YAML
1108
1109        Example:
1110          - plugin_args:
1111            - arg1
1112            - arg2
1113
1114                  yaml_arg_list:  [arg2, arg2]
1115        """
1116        # Get the env loaded keys as list ['hostname', 'username', 'password'].
1117        env_vars_list = list(self.env_dict)
1118
1119        if isinstance(yaml_arg_list, list):
1120            tmp_list = []
1121            for arg in yaml_arg_list:
1122                if isinstance(arg, (int, float)):
1123                    tmp_list.append(arg)
1124                    continue
1125                elif isinstance(arg, str):
1126                    arg_str = self.yaml_env_and_plugin_vars_populate(str(arg))
1127                    tmp_list.append(arg_str)
1128                else:
1129                    tmp_list.append(arg)
1130
1131            # return populated list.
1132            return tmp_list
1133
1134    def yaml_env_and_plugin_vars_populate(self, yaml_arg_str):
1135        r"""
1136        Update ${MY_VAR} and plugin vars.
1137
1138        Description of argument(s):
1139        yaml_arg_str         arg string read from YAML.
1140
1141        Example:
1142            - cat ${MY_VAR}
1143            - ls -AX my_plugin_var
1144        """
1145        # Parse the string for env vars ${env_vars}.
1146        try:
1147            # Example, list of matching env vars ['username', 'password', 'hostname']
1148            # Extra escape \ for special symbols. '\$\{([^\}]+)\}' works good.
1149            var_name_regex = '\\$\\{([^\\}]+)\\}'
1150            env_var_names_list = re.findall(var_name_regex, yaml_arg_str)
1151            for var in env_var_names_list:
1152                env_var = os.environ[var]
1153                env_replace = '${' + var + '}'
1154                yaml_arg_str = yaml_arg_str.replace(env_replace, env_var)
1155        except Exception as e:
1156            self.logger.error("\tERROR:yaml_env_vars_populate: %s" % e)
1157            pass
1158
1159        # Parse the string for plugin vars.
1160        try:
1161            # Example, list of plugin vars ['my_username', 'my_data']
1162            plugin_var_name_list = global_plugin_dict.keys()
1163            for var in plugin_var_name_list:
1164                # skip env var list already populated above code block list.
1165                if var in env_var_names_list:
1166                    continue
1167                # If this plugin var exist but empty in dict, don't replace.
1168                # This is either a YAML plugin statement incorrectly used or
1169                # user added a plugin var which is not going to be populated.
1170                if yaml_arg_str in global_plugin_dict:
1171                    if isinstance(global_plugin_dict[var], (list, dict)):
1172                        # List data type or dict can't be replaced, use directly
1173                        # in eval function call.
1174                        global_plugin_type_list.append(var)
1175                    else:
1176                        yaml_arg_str = yaml_arg_str.replace(str(var), str(global_plugin_dict[var]))
1177                # Just a string like filename or command.
1178                else:
1179                    yaml_arg_str = yaml_arg_str.replace(str(var), str(global_plugin_dict[var]))
1180        except (IndexError, ValueError) as e:
1181            self.logger.error("\tERROR: yaml_plugin_vars_populate: %s" % e)
1182            pass
1183
1184        return yaml_arg_str
1185
1186    def plugin_error_check(self, plugin_dict):
1187        r"""
1188        Plugin error dict processing.
1189
1190        Description of argument(s):
1191        plugin_dict        Dictionary of plugin error.
1192        """
1193        if any('plugin_error' in d for d in plugin_dict):
1194            for d in plugin_dict:
1195                if 'plugin_error' in d:
1196                    value = d['plugin_error']
1197                    # Reference if the error is set or not by plugin.
1198                    return plugin_error_dict[value]
1199
1200    def key_index_list_dict(self, key, list_dict):
1201        r"""
1202        Iterate list of dictionary and return index if the key match is found.
1203
1204        Description of argument(s):
1205        key           Valid Key in a dict.
1206        list_dict     list of dictionary.
1207        """
1208        for i, d in enumerate(list_dict):
1209            if key in d.keys():
1210                return i
1211
1212    def plugin_expect_type(self, type, data):
1213        r"""
1214        Plugin expect directive type check.
1215        """
1216        if type == 'int':
1217            return isinstance(data, int)
1218        elif type == 'float':
1219            return isinstance(data, float)
1220        elif type == 'str':
1221            return isinstance(data, str)
1222        elif type == 'list':
1223            return isinstance(data, list)
1224        elif type == 'dict':
1225            return isinstance(data, dict)
1226        elif type == 'tuple':
1227            return isinstance(data, tuple)
1228        else:
1229            self.logger.info("\tInvalid data type requested: %s" % type)
1230            return 'INVALID'
1231