1#!/usr/bin/env python3
2
3r"""
4See class prolog below for details.
5"""
6
7from errno import EACCES, EPERM
8
9import os
10import re
11import sys
12import yaml
13import json
14import time
15import logging
16import platform
17from errno import EACCES, EPERM
18import subprocess
19
20script_dir = os.path.dirname(os.path.abspath(__file__))
21sys.path.append(script_dir)
22# Walk path and append to sys.path
23for root, dirs, files in os.walk(script_dir):
24    for dir in dirs:
25        sys.path.append(os.path.join(root, dir))
26
27from ssh_utility import SSHRemoteclient         # NOQA
28from telnet_utility import TelnetRemoteclient   # NOQA
29
30r"""
31User define plugins python functions.
32
33It will imports files from directory plugins
34
35plugins
36├── file1.py
37└── file2.py
38
39Example how to define in YAML:
40 - plugin:
41   - plugin_name: plugin.foo_func.foo_func_yaml
42     - plugin_args:
43       - arg1
44       - arg2
45"""
46plugin_dir = __file__.split(__file__.split("/")[-1])[0] + '/plugins'
47sys.path.append(plugin_dir)
48try:
49    for module in os.listdir(plugin_dir):
50        if module == '__init__.py' or module[-3:] != '.py':
51            continue
52        plugin_module = "plugins." + module[:-3]
53        # To access the module plugin.<module name>.<function>
54        # Example: plugin.foo_func.foo_func_yaml()
55        try:
56            plugin = __import__(plugin_module, globals(), locals(), [], 0)
57        except Exception as e:
58            print("PLUGIN: Module import failed: %s" % module)
59            pass
60except FileNotFoundError as e:
61    print("PLUGIN: %s" % e)
62    pass
63
64r"""
65This is for plugin functions returning data or responses to the caller
66in YAML plugin setup.
67
68Example:
69
70    - plugin:
71      - plugin_name: version = plugin.ssh_execution.ssh_execute_cmd
72      - plugin_args:
73        - ${hostname}
74        - ${username}
75        - ${password}
76        - "cat /etc/os-release | grep VERSION_ID | awk -F'=' '{print $2}'"
77     - plugin:
78        - plugin_name: plugin.print_vars.print_vars
79        - plugin_args:
80          - version
81
82where first plugin "version" var is used by another plugin in the YAML
83block or plugin
84
85"""
86global global_log_store_path
87global global_plugin_dict
88global global_plugin_list
89
90# Hold the plugin return values in dict and plugin return vars in list.
91# Dict is to reference and update vars processing in parser where as
92# list is for current vars from the plugin block which needs processing.
93global_plugin_dict = {}
94global_plugin_list = []
95
96# Hold the plugin return named declared if function returned values are list,dict.
97# Refer this name list to look up the plugin dict for eval() args function
98# Example ['version']
99global_plugin_type_list = []
100
101# Path where logs are to be stored or written.
102global_log_store_path = ''
103
104# Plugin error state defaults.
105plugin_error_dict = {
106    'exit_on_error': False,
107    'continue_on_error': False,
108}
109
110
111class ffdc_collector:
112
113    r"""
114    Execute commands from configuration file to collect log files.
115    Fetch and store generated files at the specified location.
116
117    """
118
119    def __init__(self,
120                 hostname,
121                 username,
122                 password,
123                 ffdc_config,
124                 location,
125                 remote_type,
126                 remote_protocol,
127                 env_vars,
128                 econfig,
129                 log_level):
130        r"""
131        Description of argument(s):
132
133        hostname            name/ip of the targeted (remote) system
134        username            user on the targeted system with access to FFDC files
135        password            password for user on targeted system
136        ffdc_config         configuration file listing commands and files for FFDC
137        location            where to store collected FFDC
138        remote_type         os type of the remote host
139        remote_protocol     Protocol to use to collect data
140        env_vars            User define CLI env vars '{"key : "value"}'
141        econfig             User define env vars YAML file
142
143        """
144
145        self.hostname = hostname
146        self.username = username
147        self.password = password
148        self.ffdc_config = ffdc_config
149        self.location = location + "/" + remote_type.upper()
150        self.ssh_remoteclient = None
151        self.telnet_remoteclient = None
152        self.ffdc_dir_path = ""
153        self.ffdc_prefix = ""
154        self.target_type = remote_type.upper()
155        self.remote_protocol = remote_protocol.upper()
156        self.env_vars = env_vars
157        self.econfig = econfig
158        self.start_time = 0
159        self.elapsed_time = ''
160        self.logger = None
161
162        # Set prefix values for scp files and directory.
163        # Since the time stamp is at second granularity, these values are set here
164        # to be sure that all files for this run will have same timestamps
165        # and they will be saved in the same directory.
166        # self.location == local system for now
167        self.set_ffdc_default_store_path()
168
169        # Logger for this run.  Need to be after set_ffdc_default_store_path()
170        self.script_logging(getattr(logging, log_level.upper()))
171
172        # Verify top level directory exists for storage
173        self.validate_local_store(self.location)
174
175        if self.verify_script_env():
176            # Load default or user define YAML configuration file.
177            with open(self.ffdc_config, 'r') as file:
178                try:
179                    self.ffdc_actions = yaml.load(file, Loader=yaml.SafeLoader)
180                except yaml.YAMLError as e:
181                    self.logger.error(e)
182                    sys.exit(-1)
183
184            if self.target_type not in self.ffdc_actions.keys():
185                self.logger.error(
186                    "\n\tERROR: %s is not listed in %s.\n\n" % (self.target_type, self.ffdc_config))
187                sys.exit(-1)
188        else:
189            sys.exit(-1)
190
191        # Load ENV vars from user.
192        self.logger.info("\n\tENV: User define input YAML variables")
193        self.env_dict = {}
194        self.load_env()
195
196    def verify_script_env(self):
197
198        # Import to log version
199        import click
200        import paramiko
201
202        run_env_ok = True
203
204        redfishtool_version = self.run_tool_cmd('redfishtool -V').split(' ')[2].strip('\n')
205        ipmitool_version = self.run_tool_cmd('ipmitool -V').split(' ')[2]
206
207        self.logger.info("\n\t---- Script host environment ----")
208        self.logger.info("\t{:<10}  {:<10}".format('Script hostname', os.uname()[1]))
209        self.logger.info("\t{:<10}  {:<10}".format('Script host os', platform.platform()))
210        self.logger.info("\t{:<10}  {:>10}".format('Python', platform.python_version()))
211        self.logger.info("\t{:<10}  {:>10}".format('PyYAML', yaml.__version__))
212        self.logger.info("\t{:<10}  {:>10}".format('click', click.__version__))
213        self.logger.info("\t{:<10}  {:>10}".format('paramiko', paramiko.__version__))
214        self.logger.info("\t{:<10}  {:>9}".format('redfishtool', redfishtool_version))
215        self.logger.info("\t{:<10}  {:>12}".format('ipmitool', ipmitool_version))
216
217        if eval(yaml.__version__.replace('.', ',')) < (5, 3, 0):
218            self.logger.error("\n\tERROR: Python or python packages do not meet minimum version requirement.")
219            self.logger.error("\tERROR: PyYAML version 5.3.0 or higher is needed.\n")
220            run_env_ok = False
221
222        self.logger.info("\t---- End script host environment ----")
223        return run_env_ok
224
225    def script_logging(self,
226                       log_level_attr):
227        r"""
228        Create logger
229
230        """
231        self.logger = logging.getLogger()
232        self.logger.setLevel(log_level_attr)
233        log_file_handler = logging.FileHandler(self.ffdc_dir_path + "collector.log")
234
235        stdout_handler = logging.StreamHandler(sys.stdout)
236        self.logger.addHandler(log_file_handler)
237        self.logger.addHandler(stdout_handler)
238
239        # Turn off paramiko INFO logging
240        logging.getLogger("paramiko").setLevel(logging.WARNING)
241
242    def target_is_pingable(self):
243        r"""
244        Check if target system is ping-able.
245
246        """
247        response = os.system("ping -c 1 %s  2>&1 >/dev/null" % self.hostname)
248        if response == 0:
249            self.logger.info("\n\t[Check] %s is ping-able.\t\t [OK]" % self.hostname)
250            return True
251        else:
252            self.logger.error(
253                "\n\tERROR: %s is not ping-able. FFDC collection aborted.\n" % self.hostname)
254            sys.exit(-1)
255
256    def collect_ffdc(self):
257        r"""
258        Initiate FFDC Collection depending on requested protocol.
259
260        """
261
262        self.logger.info("\n\t---- Start communicating with %s ----" % self.hostname)
263        self.start_time = time.time()
264
265        # Find the list of target and protocol supported.
266        check_protocol_list = []
267        config_dict = self.ffdc_actions
268
269        for target_type in config_dict.keys():
270            if self.target_type != target_type:
271                continue
272
273            for k, v in config_dict[target_type].items():
274                if config_dict[target_type][k]['PROTOCOL'][0] not in check_protocol_list:
275                    check_protocol_list.append(config_dict[target_type][k]['PROTOCOL'][0])
276
277        self.logger.info("\n\t %s protocol type: %s" % (self.target_type, check_protocol_list))
278
279        verified_working_protocol = self.verify_protocol(check_protocol_list)
280
281        if verified_working_protocol:
282            self.logger.info("\n\t---- Completed protocol pre-requisite check ----\n")
283
284        # Verify top level directory exists for storage
285        self.validate_local_store(self.location)
286
287        if ((self.remote_protocol not in verified_working_protocol) and (self.remote_protocol != 'ALL')):
288            self.logger.info("\n\tWorking protocol list: %s" % verified_working_protocol)
289            self.logger.error(
290                '\tERROR: Requested protocol %s is not in working protocol list.\n'
291                % self.remote_protocol)
292            sys.exit(-1)
293        else:
294            self.generate_ffdc(verified_working_protocol)
295
296    def ssh_to_target_system(self):
297        r"""
298        Open a ssh connection to targeted system.
299
300        """
301
302        self.ssh_remoteclient = SSHRemoteclient(self.hostname,
303                                                self.username,
304                                                self.password)
305
306        if self.ssh_remoteclient.ssh_remoteclient_login():
307            self.logger.info("\n\t[Check] %s SSH connection established.\t [OK]" % self.hostname)
308
309            # Check scp connection.
310            # If scp connection fails,
311            # continue with FFDC generation but skip scp files to local host.
312            self.ssh_remoteclient.scp_connection()
313            return True
314        else:
315            self.logger.info("\n\t[Check] %s SSH connection.\t [NOT AVAILABLE]" % self.hostname)
316            return False
317
318    def telnet_to_target_system(self):
319        r"""
320        Open a telnet connection to targeted system.
321        """
322        self.telnet_remoteclient = TelnetRemoteclient(self.hostname,
323                                                      self.username,
324                                                      self.password)
325        if self.telnet_remoteclient.tn_remoteclient_login():
326            self.logger.info("\n\t[Check] %s Telnet connection established.\t [OK]" % self.hostname)
327            return True
328        else:
329            self.logger.info("\n\t[Check] %s Telnet connection.\t [NOT AVAILABLE]" % self.hostname)
330            return False
331
332    def generate_ffdc(self, working_protocol_list):
333        r"""
334        Determine actions based on remote host type
335
336        Description of argument(s):
337        working_protocol_list    list of confirmed working protocols to connect to remote host.
338        """
339
340        self.logger.info("\n\t---- Executing commands on " + self.hostname + " ----")
341        self.logger.info("\n\tWorking protocol list: %s" % working_protocol_list)
342
343        config_dict = self.ffdc_actions
344        for target_type in config_dict.keys():
345            if self.target_type != target_type:
346                continue
347
348            self.logger.info("\n\tFFDC Path: %s " % self.ffdc_dir_path)
349            global_plugin_dict['global_log_store_path'] = self.ffdc_dir_path
350            self.logger.info("\tSystem Type: %s" % target_type)
351            for k, v in config_dict[target_type].items():
352
353                if self.remote_protocol not in working_protocol_list \
354                        and self.remote_protocol != 'ALL':
355                    continue
356
357                protocol = config_dict[target_type][k]['PROTOCOL'][0]
358
359                if protocol not in working_protocol_list:
360                    continue
361
362                if protocol in working_protocol_list:
363                    if protocol == 'SSH' or protocol == 'SCP':
364                        self.protocol_ssh(protocol, target_type, k)
365                    elif protocol == 'TELNET':
366                        self.protocol_telnet(target_type, k)
367                    elif protocol == 'REDFISH' or protocol == 'IPMI' or protocol == 'SHELL':
368                        self.protocol_execute(protocol, target_type, k)
369                else:
370                    self.logger.error("\n\tERROR: %s is not available for %s." % (protocol, self.hostname))
371
372        # Close network connection after collecting all files
373        self.elapsed_time = time.strftime("%H:%M:%S", time.gmtime(time.time() - self.start_time))
374        if self.ssh_remoteclient:
375            self.ssh_remoteclient.ssh_remoteclient_disconnect()
376        if self.telnet_remoteclient:
377            self.telnet_remoteclient.tn_remoteclient_disconnect()
378
379    def protocol_ssh(self,
380                     protocol,
381                     target_type,
382                     sub_type):
383        r"""
384        Perform actions using SSH and SCP protocols.
385
386        Description of argument(s):
387        protocol            Protocol to execute.
388        target_type         OS Type of remote host.
389        sub_type            Group type of commands.
390        """
391
392        if protocol == 'SCP':
393            self.group_copy(self.ffdc_actions[target_type][sub_type])
394        else:
395            self.collect_and_copy_ffdc(self.ffdc_actions[target_type][sub_type])
396
397    def protocol_telnet(self,
398                        target_type,
399                        sub_type):
400        r"""
401        Perform actions using telnet protocol.
402        Description of argument(s):
403        target_type          OS Type of remote host.
404        """
405        self.logger.info("\n\t[Run] Executing commands on %s using %s" % (self.hostname, 'TELNET'))
406        telnet_files_saved = []
407        progress_counter = 0
408        list_of_commands = self.ffdc_actions[target_type][sub_type]['COMMANDS']
409        for index, each_cmd in enumerate(list_of_commands, start=0):
410            command_txt, command_timeout = self.unpack_command(each_cmd)
411            result = self.telnet_remoteclient.execute_command(command_txt, command_timeout)
412            if result:
413                try:
414                    targ_file = self.ffdc_actions[target_type][sub_type]['FILES'][index]
415                except IndexError:
416                    targ_file = command_txt
417                    self.logger.warning(
418                        "\n\t[WARN] Missing filename to store data from telnet %s." % each_cmd)
419                    self.logger.warning("\t[WARN] Data will be stored in %s." % targ_file)
420                targ_file_with_path = (self.ffdc_dir_path
421                                       + self.ffdc_prefix
422                                       + targ_file)
423                # Creates a new file
424                with open(targ_file_with_path, 'w') as fp:
425                    fp.write(result)
426                    fp.close
427                    telnet_files_saved.append(targ_file)
428            progress_counter += 1
429            self.print_progress(progress_counter)
430        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
431        for file in telnet_files_saved:
432            self.logger.info("\n\t\tSuccessfully save file " + file + ".")
433
434    def protocol_execute(self,
435                         protocol,
436                         target_type,
437                         sub_type):
438        r"""
439        Perform actions for a given protocol.
440
441        Description of argument(s):
442        protocol            Protocol to execute.
443        target_type         OS Type of remote host.
444        sub_type            Group type of commands.
445        """
446
447        self.logger.info("\n\t[Run] Executing commands to %s using %s" % (self.hostname, protocol))
448        executed_files_saved = []
449        progress_counter = 0
450        list_of_cmd = self.get_command_list(self.ffdc_actions[target_type][sub_type])
451        for index, each_cmd in enumerate(list_of_cmd, start=0):
452            plugin_call = False
453            if isinstance(each_cmd, dict):
454                if 'plugin' in each_cmd:
455                    # If the error is set and plugin explicitly
456                    # requested to skip execution on error..
457                    if plugin_error_dict['exit_on_error'] and \
458                            self.plugin_error_check(each_cmd['plugin']):
459                        self.logger.info("\n\t[PLUGIN-ERROR] exit_on_error: %s" %
460                                         plugin_error_dict['exit_on_error'])
461                        self.logger.info("\t[PLUGIN-SKIP] %s" %
462                                         each_cmd['plugin'][0])
463                        continue
464                    plugin_call = True
465                    # call the plugin
466                    self.logger.info("\n\t[PLUGIN-START]")
467                    result = self.execute_plugin_block(each_cmd['plugin'])
468                    self.logger.info("\t[PLUGIN-END]\n")
469            else:
470                each_cmd = self.yaml_env_and_plugin_vars_populate(each_cmd)
471
472            if not plugin_call:
473                result = self.run_tool_cmd(each_cmd)
474            if result:
475                try:
476                    file_name = self.get_file_list(self.ffdc_actions[target_type][sub_type])[index]
477                    # If file is specified as None.
478                    if file_name == "None":
479                        continue
480                    targ_file = self.yaml_env_and_plugin_vars_populate(file_name)
481                except IndexError:
482                    targ_file = each_cmd.split('/')[-1]
483                    self.logger.warning(
484                        "\n\t[WARN] Missing filename to store data from %s." % each_cmd)
485                    self.logger.warning("\t[WARN] Data will be stored in %s." % targ_file)
486
487                targ_file_with_path = (self.ffdc_dir_path
488                                       + self.ffdc_prefix
489                                       + targ_file)
490
491                # Creates a new file
492                with open(targ_file_with_path, 'w') as fp:
493                    if isinstance(result, dict):
494                        fp.write(json.dumps(result))
495                    else:
496                        fp.write(result)
497                    fp.close
498                    executed_files_saved.append(targ_file)
499
500            progress_counter += 1
501            self.print_progress(progress_counter)
502
503        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
504
505        for file in executed_files_saved:
506            self.logger.info("\n\t\tSuccessfully save file " + file + ".")
507
508    def collect_and_copy_ffdc(self,
509                              ffdc_actions_for_target_type,
510                              form_filename=False):
511        r"""
512        Send commands in ffdc_config file to targeted system.
513
514        Description of argument(s):
515        ffdc_actions_for_target_type     commands and files for the selected remote host type.
516        form_filename                    if true, pre-pend self.target_type to filename
517        """
518
519        # Executing commands, if any
520        self.ssh_execute_ffdc_commands(ffdc_actions_for_target_type,
521                                       form_filename)
522
523        # Copying files
524        if self.ssh_remoteclient.scpclient:
525            self.logger.info("\n\n\tCopying FFDC files from remote system %s.\n" % self.hostname)
526
527            # Retrieving files from target system
528            list_of_files = self.get_file_list(ffdc_actions_for_target_type)
529            self.scp_ffdc(self.ffdc_dir_path, self.ffdc_prefix, form_filename, list_of_files)
530        else:
531            self.logger.info("\n\n\tSkip copying FFDC files from remote system %s.\n" % self.hostname)
532
533    def get_command_list(self,
534                         ffdc_actions_for_target_type):
535        r"""
536        Fetch list of commands from configuration file
537
538        Description of argument(s):
539        ffdc_actions_for_target_type    commands and files for the selected remote host type.
540        """
541        try:
542            list_of_commands = ffdc_actions_for_target_type['COMMANDS']
543        except KeyError:
544            list_of_commands = []
545        return list_of_commands
546
547    def get_file_list(self,
548                      ffdc_actions_for_target_type):
549        r"""
550        Fetch list of commands from configuration file
551
552        Description of argument(s):
553        ffdc_actions_for_target_type    commands and files for the selected remote host type.
554        """
555        try:
556            list_of_files = ffdc_actions_for_target_type['FILES']
557        except KeyError:
558            list_of_files = []
559        return list_of_files
560
561    def unpack_command(self,
562                       command):
563        r"""
564        Unpack command from config file
565
566        Description of argument(s):
567        command    Command from config file.
568        """
569        if isinstance(command, dict):
570            command_txt = next(iter(command))
571            command_timeout = next(iter(command.values()))
572        elif isinstance(command, str):
573            command_txt = command
574            # Default command timeout 60 seconds
575            command_timeout = 60
576
577        return command_txt, command_timeout
578
579    def ssh_execute_ffdc_commands(self,
580                                  ffdc_actions_for_target_type,
581                                  form_filename=False):
582        r"""
583        Send commands in ffdc_config file to targeted system.
584
585        Description of argument(s):
586        ffdc_actions_for_target_type    commands and files for the selected remote host type.
587        form_filename                    if true, pre-pend self.target_type to filename
588        """
589        self.logger.info("\n\t[Run] Executing commands on %s using %s"
590                         % (self.hostname, ffdc_actions_for_target_type['PROTOCOL'][0]))
591
592        list_of_commands = self.get_command_list(ffdc_actions_for_target_type)
593        # If command list is empty, returns
594        if not list_of_commands:
595            return
596
597        progress_counter = 0
598        for command in list_of_commands:
599            command_txt, command_timeout = self.unpack_command(command)
600
601            if form_filename:
602                command_txt = str(command_txt % self.target_type)
603
604            cmd_exit_code, err, response = \
605                self.ssh_remoteclient.execute_command(command_txt, command_timeout)
606
607            if cmd_exit_code:
608                self.logger.warning(
609                    "\n\t\t[WARN] %s exits with code %s." % (command_txt, str(cmd_exit_code)))
610                self.logger.warning("\t\t[WARN] %s " % err)
611
612            progress_counter += 1
613            self.print_progress(progress_counter)
614
615        self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
616
617    def group_copy(self,
618                   ffdc_actions_for_target_type):
619        r"""
620        scp group of files (wild card) from remote host.
621
622        Description of argument(s):
623        fdc_actions_for_target_type    commands and files for the selected remote host type.
624        """
625
626        if self.ssh_remoteclient.scpclient:
627            self.logger.info("\n\tCopying files from remote system %s via SCP.\n" % self.hostname)
628
629            list_of_commands = self.get_command_list(ffdc_actions_for_target_type)
630            # If command list is empty, returns
631            if not list_of_commands:
632                return
633
634            for command in list_of_commands:
635                try:
636                    command = self.yaml_env_and_plugin_vars_populate(command)
637                except IndexError:
638                    self.logger.error("\t\tInvalid command %s" % command)
639                    continue
640
641                cmd_exit_code, err, response = \
642                    self.ssh_remoteclient.execute_command(command)
643
644                # If file does not exist, code take no action.
645                # cmd_exit_code is ignored for this scenario.
646                if response:
647                    scp_result = \
648                        self.ssh_remoteclient.scp_file_from_remote(response.split('\n'),
649                                                                   self.ffdc_dir_path)
650                    if scp_result:
651                        self.logger.info("\t\tSuccessfully copied from " + self.hostname + ':' + command)
652                else:
653                    self.logger.info("\t\t%s has no result" % command)
654
655        else:
656            self.logger.info("\n\n\tSkip copying files from remote system %s.\n" % self.hostname)
657
658    def scp_ffdc(self,
659                 targ_dir_path,
660                 targ_file_prefix,
661                 form_filename,
662                 file_list=None,
663                 quiet=None):
664        r"""
665        SCP all files in file_dict to the indicated directory on the local system.
666
667        Description of argument(s):
668        targ_dir_path                   The path of the directory to receive the files.
669        targ_file_prefix                Prefix which will be pre-pended to each
670                                        target file's name.
671        file_dict                       A dictionary of files to scp from targeted system to this system
672
673        """
674
675        progress_counter = 0
676        for filename in file_list:
677            if form_filename:
678                filename = str(filename % self.target_type)
679            source_file_path = filename
680            targ_file_path = targ_dir_path + targ_file_prefix + filename.split('/')[-1]
681
682            # If source file name contains wild card, copy filename as is.
683            if '*' in source_file_path:
684                scp_result = self.ssh_remoteclient.scp_file_from_remote(source_file_path, self.ffdc_dir_path)
685            else:
686                scp_result = self.ssh_remoteclient.scp_file_from_remote(source_file_path, targ_file_path)
687
688            if not quiet:
689                if scp_result:
690                    self.logger.info(
691                        "\t\tSuccessfully copied from " + self.hostname + ':' + source_file_path + ".\n")
692                else:
693                    self.logger.info(
694                        "\t\tFail to copy from " + self.hostname + ':' + source_file_path + ".\n")
695            else:
696                progress_counter += 1
697                self.print_progress(progress_counter)
698
699    def set_ffdc_default_store_path(self):
700        r"""
701        Set a default value for self.ffdc_dir_path and self.ffdc_prefix.
702        Collected ffdc file will be stored in dir /self.location/hostname_timestr/.
703        Individual ffdc file will have timestr_filename.
704
705        Description of class variables:
706        self.ffdc_dir_path  The dir path where collected ffdc data files should be put.
707
708        self.ffdc_prefix    The prefix to be given to each ffdc file name.
709
710        """
711
712        timestr = time.strftime("%Y%m%d-%H%M%S")
713        self.ffdc_dir_path = self.location + "/" + self.hostname + "_" + timestr + "/"
714        self.ffdc_prefix = timestr + "_"
715        self.validate_local_store(self.ffdc_dir_path)
716
717    # Need to verify local store path exists prior to instantiate this class.
718    # This class method is used to share the same code between CLI input parm
719    # and Robot Framework "${EXECDIR}/logs" before referencing this class.
720    @classmethod
721    def validate_local_store(cls, dir_path):
722        r"""
723        Ensure path exists to store FFDC files locally.
724
725        Description of variable:
726        dir_path  The dir path where collected ffdc data files will be stored.
727
728        """
729
730        if not os.path.exists(dir_path):
731            try:
732                os.makedirs(dir_path, 0o755)
733            except (IOError, OSError) as e:
734                # PermissionError
735                if e.errno == EPERM or e.errno == EACCES:
736                    self.logger.error(
737                        '\tERROR: os.makedirs %s failed with PermissionError.\n' % dir_path)
738                else:
739                    self.logger.error(
740                        '\tERROR: os.makedirs %s failed with %s.\n' % (dir_path, e.strerror))
741                sys.exit(-1)
742
743    def print_progress(self, progress):
744        r"""
745        Print activity progress +
746
747        Description of variable:
748        progress  Progress counter.
749
750        """
751
752        sys.stdout.write("\r\t" + "+" * progress)
753        sys.stdout.flush()
754        time.sleep(.1)
755
756    def verify_redfish(self):
757        r"""
758        Verify remote host has redfish service active
759
760        """
761        redfish_parm = 'redfishtool -r ' \
762                       + self.hostname + ' -S Always raw GET /redfish/v1/'
763        return (self.run_tool_cmd(redfish_parm, True))
764
765    def verify_ipmi(self):
766        r"""
767        Verify remote host has IPMI LAN service active
768
769        """
770        if self.target_type == 'OPENBMC':
771            ipmi_parm = 'ipmitool -I lanplus -C 17  -U ' + self.username + ' -P ' \
772                + self.password + ' -H ' + self.hostname + ' power status'
773        else:
774            ipmi_parm = 'ipmitool -I lanplus  -P ' \
775                + self.password + ' -H ' + self.hostname + ' power status'
776
777        return (self.run_tool_cmd(ipmi_parm, True))
778
779    def run_tool_cmd(self,
780                     parms_string,
781                     quiet=False):
782        r"""
783        Run CLI standard tool or scripts.
784
785        Description of variable:
786        parms_string         tool command options.
787        quiet                do not print tool error message if True
788        """
789
790        result = subprocess.run([parms_string],
791                                stdout=subprocess.PIPE,
792                                stderr=subprocess.PIPE,
793                                shell=True,
794                                universal_newlines=True)
795
796        if result.stderr and not quiet:
797            self.logger.error('\n\t\tERROR with %s ' % parms_string)
798            self.logger.error('\t\t' + result.stderr)
799
800        return result.stdout
801
802    def verify_protocol(self, protocol_list):
803        r"""
804        Perform protocol working check.
805
806        Description of argument(s):
807        protocol_list        List of protocol.
808        """
809
810        tmp_list = []
811        if self.target_is_pingable():
812            tmp_list.append("SHELL")
813
814        for protocol in protocol_list:
815            if self.remote_protocol != 'ALL':
816                if self.remote_protocol != protocol:
817                    continue
818
819            # Only check SSH/SCP once for both protocols
820            if protocol == 'SSH' or protocol == 'SCP' and protocol not in tmp_list:
821                if self.ssh_to_target_system():
822                    # Add only what user asked.
823                    if self.remote_protocol != 'ALL':
824                        tmp_list.append(self.remote_protocol)
825                    else:
826                        tmp_list.append('SSH')
827                        tmp_list.append('SCP')
828
829            if protocol == 'TELNET':
830                if self.telnet_to_target_system():
831                    tmp_list.append(protocol)
832
833            if protocol == 'REDFISH':
834                if self.verify_redfish():
835                    tmp_list.append(protocol)
836                    self.logger.info("\n\t[Check] %s Redfish Service.\t\t [OK]" % self.hostname)
837                else:
838                    self.logger.info("\n\t[Check] %s Redfish Service.\t\t [NOT AVAILABLE]" % self.hostname)
839
840            if protocol == 'IPMI':
841                if self.verify_ipmi():
842                    tmp_list.append(protocol)
843                    self.logger.info("\n\t[Check] %s IPMI LAN Service.\t\t [OK]" % self.hostname)
844                else:
845                    self.logger.info("\n\t[Check] %s IPMI LAN Service.\t\t [NOT AVAILABLE]" % self.hostname)
846
847        return tmp_list
848
849    def load_env(self):
850        r"""
851        Perform protocol working check.
852
853        """
854        # This is for the env vars a user can use in YAML to load it at runtime.
855        # Example YAML:
856        # -COMMANDS:
857        #    - my_command ${hostname}  ${username}   ${password}
858        os.environ['hostname'] = self.hostname
859        os.environ['username'] = self.username
860        os.environ['password'] = self.password
861
862        # Append default Env.
863        self.env_dict['hostname'] = self.hostname
864        self.env_dict['username'] = self.username
865        self.env_dict['password'] = self.password
866
867        try:
868            tmp_env_dict = {}
869            if self.env_vars:
870                tmp_env_dict = json.loads(self.env_vars)
871                # Export ENV vars default.
872                for key, value in tmp_env_dict.items():
873                    os.environ[key] = value
874                    self.env_dict[key] = str(value)
875
876            if self.econfig:
877                with open(self.econfig, 'r') as file:
878                    try:
879                        tmp_env_dict = yaml.load(file, Loader=yaml.SafeLoader)
880                    except yaml.YAMLError as e:
881                        self.logger.error(e)
882                        sys.exit(-1)
883                # Export ENV vars.
884                for key, value in tmp_env_dict['env_params'].items():
885                    os.environ[key] = str(value)
886                    self.env_dict[key] = str(value)
887        except json.decoder.JSONDecodeError as e:
888            self.logger.error("\n\tERROR: %s " % e)
889            sys.exit(-1)
890
891        # This to mask the password from displaying on the console.
892        mask_dict = self.env_dict.copy()
893        for k, v in mask_dict.items():
894            if k.lower().find("password") != -1:
895                hidden_text = []
896                hidden_text.append(v)
897                password_regex = '(' +\
898                    '|'.join([re.escape(x) for x in hidden_text]) + ')'
899                mask_dict[k] = re.sub(password_regex, "********", v)
900
901        self.logger.info(json.dumps(mask_dict, indent=8, sort_keys=False))
902
903    def execute_python_eval(self, eval_string):
904        r"""
905        Execute qualified python function string using eval.
906
907        Description of argument(s):
908        eval_string        Execute the python object.
909
910        Example:
911                eval(plugin.foo_func.foo_func(10))
912        """
913        try:
914            self.logger.info("\tExecuting plugin func()")
915            self.logger.debug("\tCall func: %s" % eval_string)
916            result = eval(eval_string)
917            self.logger.info("\treturn: %s" % str(result))
918        except (ValueError,
919                SyntaxError,
920                NameError,
921                AttributeError,
922                TypeError) as e:
923            self.logger.error("\tERROR: execute_python_eval: %s" % e)
924            # Set the plugin error state.
925            plugin_error_dict['exit_on_error'] = True
926            self.logger.info("\treturn: PLUGIN_EVAL_ERROR")
927            return 'PLUGIN_EVAL_ERROR'
928
929        return result
930
931    def execute_plugin_block(self, plugin_cmd_list):
932        r"""
933        Pack the plugin command to qualifed python string object.
934
935        Description of argument(s):
936        plugin_list_dict      Plugin block read from YAML
937                              [{'plugin_name': 'plugin.foo_func.my_func'},
938                               {'plugin_args': [10]}]
939
940        Example:
941            - plugin:
942              - plugin_name: plugin.foo_func.my_func
943              - plugin_args:
944                - arg1
945                - arg2
946
947            - plugin:
948              - plugin_name: result = plugin.foo_func.my_func
949              - plugin_args:
950                - arg1
951                - arg2
952
953            - plugin:
954              - plugin_name: result1,result2 = plugin.foo_func.my_func
955              - plugin_args:
956                - arg1
957                - arg2
958        """
959        try:
960            idx = self.key_index_list_dict('plugin_name', plugin_cmd_list)
961            plugin_name = plugin_cmd_list[idx]['plugin_name']
962            # Equal separator means plugin function returns result.
963            if ' = ' in plugin_name:
964                # Ex. ['result', 'plugin.foo_func.my_func']
965                plugin_name_args = plugin_name.split(' = ')
966                # plugin func return data.
967                for arg in plugin_name_args:
968                    if arg == plugin_name_args[-1]:
969                        plugin_name = arg
970                    else:
971                        plugin_resp = arg.split(',')
972                        # ['result1','result2']
973                        for x in plugin_resp:
974                            global_plugin_list.append(x)
975                            global_plugin_dict[x] = ""
976
977            # Walk the plugin args ['arg1,'arg2']
978            # If the YAML plugin statement 'plugin_args' is not declared.
979            if any('plugin_args' in d for d in plugin_cmd_list):
980                idx = self.key_index_list_dict('plugin_args', plugin_cmd_list)
981                plugin_args = plugin_cmd_list[idx]['plugin_args']
982                if plugin_args:
983                    plugin_args = self.yaml_args_populate(plugin_args)
984                else:
985                    plugin_args = []
986            else:
987                plugin_args = self.yaml_args_populate([])
988
989            # Pack the args arg1, arg2, .... argn into
990            # "arg1","arg2","argn"  string as params for function.
991            parm_args_str = self.yaml_args_string(plugin_args)
992            if parm_args_str:
993                plugin_func = plugin_name + '(' + parm_args_str + ')'
994            else:
995                plugin_func = plugin_name + '()'
996
997            # Execute plugin function.
998            if global_plugin_dict:
999                resp = self.execute_python_eval(plugin_func)
1000                # Update plugin vars dict if there is any.
1001                if resp != 'PLUGIN_EVAL_ERROR':
1002                    self.response_args_data(resp)
1003            else:
1004                resp = self.execute_python_eval(plugin_func)
1005        except Exception as e:
1006            # Set the plugin error state.
1007            plugin_error_dict['exit_on_error'] = True
1008            self.logger.error("\tERROR: execute_plugin_block: %s" % e)
1009            pass
1010
1011        # There is a real error executing the plugin function.
1012        if resp == 'PLUGIN_EVAL_ERROR':
1013            return resp
1014
1015        # Check if plugin_expects_return (int, string, list,dict etc)
1016        if any('plugin_expects_return' in d for d in plugin_cmd_list):
1017            idx = self.key_index_list_dict('plugin_expects_return', plugin_cmd_list)
1018            plugin_expects = plugin_cmd_list[idx]['plugin_expects_return']
1019            if plugin_expects:
1020                if resp:
1021                    if self.plugin_expect_type(plugin_expects, resp) == 'INVALID':
1022                        self.logger.error("\tWARN: Plugin error check skipped")
1023                    elif not self.plugin_expect_type(plugin_expects, resp):
1024                        self.logger.error("\tERROR: Plugin expects return data: %s"
1025                                          % plugin_expects)
1026                        plugin_error_dict['exit_on_error'] = True
1027                elif not resp:
1028                    self.logger.error("\tERROR: Plugin func failed to return data")
1029                    plugin_error_dict['exit_on_error'] = True
1030
1031        return resp
1032
1033    def response_args_data(self, plugin_resp):
1034        r"""
1035        Parse the plugin function response and update plugin return variable.
1036
1037        plugin_resp       Response data from plugin function.
1038        """
1039        resp_list = []
1040        resp_data = ""
1041
1042        # There is nothing to update the plugin response.
1043        if len(global_plugin_list) == 0 or plugin_resp == 'None':
1044            return
1045
1046        if isinstance(plugin_resp, str):
1047            resp_data = plugin_resp.strip('\r\n\t')
1048            resp_list.append(resp_data)
1049        elif isinstance(plugin_resp, bytes):
1050            resp_data = str(plugin_resp, 'UTF-8').strip('\r\n\t')
1051            resp_list.append(resp_data)
1052        elif isinstance(plugin_resp, tuple):
1053            if len(global_plugin_list) == 1:
1054                resp_list.append(plugin_resp)
1055            else:
1056                resp_list = list(plugin_resp)
1057                resp_list = [x.strip('\r\n\t') for x in resp_list]
1058        elif isinstance(plugin_resp, list):
1059            if len(global_plugin_list) == 1:
1060                resp_list.append([x.strip('\r\n\t') for x in plugin_resp])
1061            else:
1062                resp_list = [x.strip('\r\n\t') for x in plugin_resp]
1063        elif isinstance(plugin_resp, int) or isinstance(plugin_resp, float):
1064            resp_list.append(plugin_resp)
1065
1066        # Iterate if there is a list of plugin return vars to update.
1067        for idx, item in enumerate(resp_list, start=0):
1068            # Exit loop, done required loop.
1069            if idx >= len(global_plugin_list):
1070                break
1071            # Find the index of the return func in the list and
1072            # update the global func return dictionary.
1073            try:
1074                dict_idx = global_plugin_list[idx]
1075                global_plugin_dict[dict_idx] = item
1076            except (IndexError, ValueError) as e:
1077                self.logger.warn("\tWARN: response_args_data: %s" % e)
1078                pass
1079
1080        # Done updating plugin dict irrespective of pass or failed,
1081        # clear all the list element for next plugin block execute.
1082        global_plugin_list.clear()
1083
1084    def yaml_args_string(self, plugin_args):
1085        r"""
1086        Pack the args into string.
1087
1088        plugin_args            arg list ['arg1','arg2,'argn']
1089        """
1090        args_str = ''
1091        for args in plugin_args:
1092            if args:
1093                if isinstance(args, (int, float)):
1094                    args_str += str(args)
1095                elif args in global_plugin_type_list:
1096                    args_str += str(global_plugin_dict[args])
1097                else:
1098                    args_str += '"' + str(args.strip('\r\n\t')) + '"'
1099            # Skip last list element.
1100            if args != plugin_args[-1]:
1101                args_str += ","
1102        return args_str
1103
1104    def yaml_args_populate(self, yaml_arg_list):
1105        r"""
1106        Decode env and plugin vars and populate.
1107
1108        Description of argument(s):
1109        yaml_arg_list         arg list read from YAML
1110
1111        Example:
1112          - plugin_args:
1113            - arg1
1114            - arg2
1115
1116                  yaml_arg_list:  [arg2, arg2]
1117        """
1118        # Get the env loaded keys as list ['hostname', 'username', 'password'].
1119        env_vars_list = list(self.env_dict)
1120
1121        if isinstance(yaml_arg_list, list):
1122            tmp_list = []
1123            for arg in yaml_arg_list:
1124                if isinstance(arg, (int, float)):
1125                    tmp_list.append(arg)
1126                    continue
1127                elif isinstance(arg, str):
1128                    arg_str = self.yaml_env_and_plugin_vars_populate(str(arg))
1129                    tmp_list.append(arg_str)
1130                else:
1131                    tmp_list.append(arg)
1132
1133            # return populated list.
1134            return tmp_list
1135
1136    def yaml_env_and_plugin_vars_populate(self, yaml_arg_str):
1137        r"""
1138        Update ${MY_VAR} and plugin vars.
1139
1140        Description of argument(s):
1141        yaml_arg_str         arg string read from YAML.
1142
1143        Example:
1144            - cat ${MY_VAR}
1145            - ls -AX my_plugin_var
1146        """
1147        # Parse the string for env vars ${env_vars}.
1148        try:
1149            # Example, list of matching env vars ['username', 'password', 'hostname']
1150            # Extra escape \ for special symbols. '\$\{([^\}]+)\}' works good.
1151            var_name_regex = '\\$\\{([^\\}]+)\\}'
1152            env_var_names_list = re.findall(var_name_regex, yaml_arg_str)
1153            for var in env_var_names_list:
1154                env_var = os.environ[var]
1155                env_replace = '${' + var + '}'
1156                yaml_arg_str = yaml_arg_str.replace(env_replace, env_var)
1157        except Exception as e:
1158            self.logger.error("\tERROR:yaml_env_vars_populate: %s" % e)
1159            pass
1160
1161        # Parse the string for plugin vars.
1162        try:
1163            # Example, list of plugin vars ['my_username', 'my_data']
1164            plugin_var_name_list = global_plugin_dict.keys()
1165            for var in plugin_var_name_list:
1166                # skip env var list already populated above code block list.
1167                if var in env_var_names_list:
1168                    continue
1169                # If this plugin var exist but empty in dict, don't replace.
1170                # This is either a YAML plugin statement incorrectly used or
1171                # user added a plugin var which is not going to be populated.
1172                if yaml_arg_str in global_plugin_dict:
1173                    if isinstance(global_plugin_dict[var], (list, dict)):
1174                        # List data type or dict can't be replaced, use directly
1175                        # in eval function call.
1176                        global_plugin_type_list.append(var)
1177                    else:
1178                        yaml_arg_str = yaml_arg_str.replace(str(var), str(global_plugin_dict[var]))
1179                # Just a string like filename or command.
1180                else:
1181                    yaml_arg_str = yaml_arg_str.replace(str(var), str(global_plugin_dict[var]))
1182        except (IndexError, ValueError) as e:
1183            self.logger.error("\tERROR: yaml_plugin_vars_populate: %s" % e)
1184            pass
1185
1186        return yaml_arg_str
1187
1188    def plugin_error_check(self, plugin_dict):
1189        r"""
1190        Plugin error dict processing.
1191
1192        Description of argument(s):
1193        plugin_dict        Dictionary of plugin error.
1194        """
1195        if any('plugin_error' in d for d in plugin_dict):
1196            for d in plugin_dict:
1197                if 'plugin_error' in d:
1198                    value = d['plugin_error']
1199                    # Reference if the error is set or not by plugin.
1200                    return plugin_error_dict[value]
1201
1202    def key_index_list_dict(self, key, list_dict):
1203        r"""
1204        Iterate list of dictionary and return index if the key match is found.
1205
1206        Description of argument(s):
1207        key           Valid Key in a dict.
1208        list_dict     list of dictionary.
1209        """
1210        for i, d in enumerate(list_dict):
1211            if key in d.keys():
1212                return i
1213
1214    def plugin_expect_type(self, type, data):
1215        r"""
1216        Plugin expect directive type check.
1217        """
1218        if type == 'int':
1219            return isinstance(data, int)
1220        elif type == 'float':
1221            return isinstance(data, float)
1222        elif type == 'str':
1223            return isinstance(data, str)
1224        elif type == 'list':
1225            return isinstance(data, list)
1226        elif type == 'dict':
1227            return isinstance(data, dict)
1228        elif type == 'tuple':
1229            return isinstance(data, tuple)
1230        else:
1231            self.logger.info("\tInvalid data type requested: %s" % type)
1232            return 'INVALID'
1233