Source code for ironic.common.utils

# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011 Justin Santa Barbara
# Copyright (c) 2012 NTT DOCOMO, INC.
# All Rights Reserved.
#
#    Licensed under the Apache License, Version 2.0 (the "License"); you may
#    not use this file except in compliance with the License. You may obtain
#    a copy of the License at
#
#         http://www.apache.org/licenses/LICENSE-2.0
#
#    Unless required by applicable law or agreed to in writing, software
#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
#    License for the specific language governing permissions and limitations
#    under the License.

"""Utilities and helper functions."""

from collections import abc
import contextlib
import datetime
import errno
import hashlib
import ipaddress
import os
import re
import shlex
import shutil
import tempfile
import time

import jinja2
from oslo_concurrency import processutils
from oslo_log import log as logging
from oslo_serialization import jsonutils
from oslo_utils import fileutils
from oslo_utils import netutils
from oslo_utils import strutils
from oslo_utils import timeutils
import psutil

from ironic.common import exception
from ironic.common.i18n import _
from ironic.conf import CONF

LOG = logging.getLogger(__name__)

DATE_RE = r'(?P<year>-?\d{4,})-(?P<month>\d{2})-(?P<day>\d{2})'
TIME_RE = r'(?P<hour>\d{2}):(?P<min>\d{2}):(?P<sec>\d{2})' + \
          r'(\.(?P<sec_frac>\d+))?'
TZ_RE = r'((?P<tz_sign>[+-])(?P<tz_hour>\d{2}):(?P<tz_min>\d{2}))' + \
        r'|(?P<tz_z>Z)'

DATETIME_RE = re.compile(
    '%sT%s(%s)?' % (DATE_RE, TIME_RE, TZ_RE))

USING_SQLITE = None


[docs] def execute(*cmd, **kwargs): """Convenience wrapper around oslo's execute() method. :param cmd: Passed to processutils.execute. :param use_standard_locale: True | False. Defaults to False. If set to True, execute command with standard locale added to environment variables. :returns: (stdout, stderr) from process execution :raises: UnknownArgumentError :raises: ProcessExecutionError """ use_standard_locale = kwargs.pop('use_standard_locale', False) if use_standard_locale: env = kwargs.pop('env_variables', os.environ.copy()) env['LC_ALL'] = 'C' kwargs['env_variables'] = env result = processutils.execute(*cmd, **kwargs) LOG.debug('Execution completed, command line is "%s"', ' '.join(map(str, cmd))) LOG.debug('Command stdout is: "%s"', result[0]) LOG.debug('Command stderr is: "%s"', result[1]) return result
[docs] def is_valid_datapath_id(datapath_id): """Verify the format of an OpenFlow datapath_id. Check if a datapath_id is valid and contains 16 hexadecimal digits. Datapath ID format: the lower 48-bits are for a MAC address, while the upper 16-bits are implementer-defined. :param datapath_id: OpenFlow datapath_id to be validated. :returns: True if valid. False if not. """ m = "^[0-9a-f]{16}$" return (isinstance(datapath_id, str) and re.match(m, datapath_id.lower()))
_is_valid_logical_name_re = re.compile(r'^[A-Z0-9-._~]+$', re.I) # old is_hostname_safe() regex, retained for backwards compat _is_hostname_safe_re = re.compile(r"""^ [a-z0-9]([a-z0-9\-]{0,61}[a-z0-9])? # host (\.[a-z0-9\-_]{0,62}[a-z0-9])* # domain \.? # trailing dot $""", re.X)
[docs] def is_valid_logical_name(hostname): """Determine if a logical name is valid. The logical name may only consist of RFC3986 unreserved characters: ALPHA / DIGIT / "-" / "." / "_" / "~" """ if not isinstance(hostname, str) or len(hostname) > 255: return False return _is_valid_logical_name_re.match(hostname) is not None
[docs] def is_hostname_safe(hostname): """Old check for valid logical node names. Retained for compatibility with REST API < 1.10. Nominally, checks that the supplied hostname conforms to: * http://en.wikipedia.org/wiki/Hostname * http://tools.ietf.org/html/rfc952 * http://tools.ietf.org/html/rfc1123 In practice, this check has several shortcomings and errors that are more thoroughly documented in bug #1468508. :param hostname: The hostname to be validated. :returns: True if valid. False if not. """ if not isinstance(hostname, str) or len(hostname) > 255: return False return _is_hostname_safe_re.match(hostname) is not None
[docs] def is_valid_no_proxy(no_proxy): """Check no_proxy validity Check if no_proxy value that will be written to environment variable by ironic-python-agent is valid. :param no_proxy: the value that requires validity check. Expected to be a comma-separated list of host names, IP addresses and domain names (with optional :port). :returns: True if no_proxy is valid, False otherwise. """ if not isinstance(no_proxy, str): return False hostname_re = re.compile('(?!-)[A-Z\\d-]{1,63}(?<!-)$', re.IGNORECASE) for hostname in no_proxy.split(','): hostname = hostname.strip().split(':')[0] if not hostname: continue max_length = 253 if hostname.startswith('.'): # It is allowed to specify a dot in the beginning of the value to # indicate that it is a domain name, which means there will be at # least one additional character in full hostname. *. is also # possible but may be not supported by some clients, so is not # considered valid here. hostname = hostname[1:] max_length = 251 if len(hostname) > max_length: return False if not all(hostname_re.match(part) for part in hostname.split('.')): return False return True
[docs] def validate_and_normalize_mac(address): """Validate a MAC address and return normalized form. Checks whether the supplied MAC address is formally correct and normalize it to all lower case. :param address: MAC address to be validated and normalized. :returns: Normalized and validated MAC address. :raises: InvalidMAC If the MAC address is not valid. """ if not netutils.is_valid_mac(address): raise exception.InvalidMAC(mac=address) return address.lower()
[docs] def validate_and_normalize_datapath_id(datapath_id): """Validate an OpenFlow datapath_id and return normalized form. Checks whether the supplied OpenFlow datapath_id is formally correct and normalize it to all lower case. :param datapath_id: OpenFlow datapath_id to be validated and normalized. :returns: Normalized and validated OpenFlow datapath_id. :raises: InvalidDatapathID If an OpenFlow datapath_id is not valid. """ if not is_valid_datapath_id(datapath_id): raise exception.InvalidDatapathID(datapath_id=datapath_id) return datapath_id.lower()
def _get_hash_object(hash_algo_name): """Create a hash object based on given algorithm. :param hash_algo_name: name of the hashing algorithm. :raises: InvalidParameterValue, on unsupported or invalid input. :returns: a hash object based on the given named algorithm. """ algorithms = hashlib.algorithms_guaranteed if hash_algo_name not in algorithms: msg = (_("Unsupported/Invalid hash name '%s' provided.") % hash_algo_name) LOG.error(msg) raise exception.InvalidParameterValue(msg) return getattr(hashlib, hash_algo_name)()
[docs] def file_has_content(path, content, hash_algo='sha256'): """Checks that content of the file is the same as provided reference. :param path: path to file :param content: reference content to check against :param hash_algo: hashing algo from hashlib to use, default is 'sha256' :returns: True if the hash of reference content is the same as the hash of file's content, False otherwise """ file_hash_hex = fileutils.compute_file_checksum(path, algorithm=hash_algo) ref_hash = _get_hash_object(hash_algo) encoded_content = (content.encode(encoding='utf-8') if isinstance(content, str) else content) ref_hash.update(encoded_content) return file_hash_hex == ref_hash.hexdigest()
[docs] @contextlib.contextmanager def tempdir(**kwargs): tempfile.tempdir = CONF.tempdir tmpdir = tempfile.mkdtemp(**kwargs) try: yield tmpdir finally: try: shutil.rmtree(tmpdir) except OSError as e: LOG.error('Could not remove tmpdir: %s', e)
[docs] def rmtree_without_raise(path): try: if os.path.isdir(path): shutil.rmtree(path) except OSError as e: LOG.warning("Failed to remove dir %(path)s, error: %(e)s", {'path': path, 'e': e})
[docs] def write_to_file(path, contents, permission=None): with open(path, 'w') as f: f.write(contents) if permission: os.chmod(path, permission)
[docs] def safe_rstrip(value, chars=None): """Removes trailing characters from a string if that does not make it empty :param value: A string value that will be stripped. :param chars: Characters to remove. :return: Stripped value. """ if not isinstance(value, str): LOG.warning("Failed to remove trailing character. Returning " "original object. Supplied object is not a string: " "%s,", value) return value return value.rstrip(chars) or value
[docs] def check_dir(directory_to_check=None, required_space=1): """Check a directory is usable. This function can be used by drivers to check that directories they need to write to are usable. This should be called from the drivers init function. This function checks that the directory exists and then calls check_dir_writable and check_dir_free_space. If directory_to_check is not provided the default is to use the temp directory. :param directory_to_check: the directory to check. :param required_space: amount of space to check for in MiB. :raises: PathNotFound if directory can not be found :raises: DirectoryNotWritable if user is unable to write to the directory :raises InsufficientDiskSpace: if free space is < required space """ # check if directory_to_check is passed in, if not set to tempdir if directory_to_check is None: directory_to_check = CONF.tempdir LOG.debug("checking directory: %s", directory_to_check) if not os.path.exists(directory_to_check): raise exception.PathNotFound(dir=directory_to_check) _check_dir_writable(directory_to_check) _check_dir_free_space(directory_to_check, required_space)
def _check_dir_writable(chk_dir): """Check that the chk_dir is able to be written to. :param chk_dir: Directory to check :raises: DirectoryNotWritable if user is unable to write to the directory """ is_writable = os.access(chk_dir, os.W_OK) if not is_writable: raise exception.DirectoryNotWritable(dir=chk_dir) def _check_dir_free_space(chk_dir, required_space=1): """Check that directory has some free space. :param chk_dir: Directory to check :param required_space: amount of space to check for in MiB. :raises InsufficientDiskSpace: if free space is < required space """ # check that we have some free space stat = os.statvfs(chk_dir) # get dir free space in MiB. free_space = float(stat.f_bsize * stat.f_bavail) / 1024 / 1024 # check for at least required_space MiB free if free_space < required_space: raise exception.InsufficientDiskSpace(path=chk_dir, required=required_space, actual=free_space)
[docs] def get_updated_capabilities(current_capabilities, new_capabilities): """Returns an updated capability string. This method updates the original (or current) capabilities with the new capabilities. The original capabilities would typically be from a node's properties['capabilities']. From new_capabilities, any new capabilities are added, and existing capabilities may have their values updated. This updated capabilities string is returned. :param current_capabilities: Current capability string :param new_capabilities: the dictionary of capabilities to be updated. :returns: An updated capability string. with new_capabilities. :raises: ValueError, if current_capabilities is malformed or if new_capabilities is not a dictionary """ if not isinstance(new_capabilities, dict): raise ValueError( _("Cannot update capabilities. The new capabilities should be in " "a dictionary. Provided value is %s") % new_capabilities) cap_dict = {} if current_capabilities: try: cap_dict = dict(x.split(':', 1) for x in current_capabilities.split(',')) except ValueError: # Capabilities can be filled by operator. ValueError can # occur in malformed capabilities like: # properties/capabilities='boot_mode:bios,boot_option'. raise ValueError( _("Invalid capabilities string '%s'.") % current_capabilities) cap_dict.update(new_capabilities) return ','.join('%(key)s:%(value)s' % {'key': key, 'value': value} for key, value in cap_dict.items())
[docs] def is_regex_string_in_file(path, string): with open(path, 'r') as inf: return any(re.search(string, line) for line in inf.readlines())
[docs] def unix_file_modification_datetime(file_name): return timeutils.normalize_time( # normalize time to be UTC without timezone datetime.datetime.fromtimestamp( # fromtimestamp will return local time by default, make it UTC os.path.getmtime(file_name), tz=datetime.timezone.utc ) )
[docs] def validate_network_port(port, port_name="Port"): """Validates the given port. :param port: TCP/UDP port. :param port_name: Name of the port. :returns: An integer port number. :raises: InvalidParameterValue, if the port is invalid. """ if netutils.is_valid_port(port): return int(port) raise exception.InvalidParameterValue(_( '%(port_name)s "%(port)s" is not a valid port.') % {'port_name': port_name, 'port': port})
[docs] def render_template(template, params, is_file=True, strict=False): """Renders Jinja2 template file with given parameters. :param template: full path to the Jinja2 template file :param params: dictionary with parameters to use when rendering :param is_file: whether template is file or string with template itself :param strict: Enable strict template rendering. Default is False :returns: Rendered template :raises: jinja2.exceptions.UndefinedError """ if is_file: tmpl_path, tmpl_name = os.path.split(template) loader = jinja2.FileSystemLoader(tmpl_path) else: tmpl_name = 'template' loader = jinja2.DictLoader({tmpl_name: template}) # NOTE(pas-ha) bandit does not seem to cope with such syntaxis # and still complains with B701 for that line # NOTE(pas-ha) not using default_for_string=False as we set the name # of the template above for strings too. env = jinja2.Environment( # nosec B701 loader=loader, autoescape=jinja2.select_autoescape(), undefined=jinja2.StrictUndefined if strict else jinja2.Undefined ) tmpl = env.get_template(tmpl_name) return tmpl.render(params, enumerate=enumerate)
[docs] def parse_instance_info_capabilities(node): """Parse the instance_info capabilities. One way of having these capabilities set is via Nova, where the capabilities are defined in the Flavor extra_spec and passed to Ironic by the Nova Ironic driver. NOTE: Although our API fully supports JSON fields, to maintain the backward compatibility with Juno the Nova Ironic driver is sending it as a string. :param node: a single Node. :raises: InvalidParameterValue if the capabilities string is not a dictionary or is malformed. :returns: A dictionary with the capabilities if found, otherwise an empty dictionary. """ def parse_error(): error_msg = (_('Error parsing capabilities from Node %s instance_info ' 'field. A dictionary or a "jsonified" dictionary is ' 'expected.') % node.uuid) raise exception.InvalidParameterValue(error_msg) capabilities = node.instance_info.get('capabilities', {}) if isinstance(capabilities, str): try: capabilities = jsonutils.loads(capabilities) except (ValueError, TypeError): parse_error() if not isinstance(capabilities, dict): parse_error() return capabilities
[docs] def validate_conductor_group(conductor_group): if not isinstance(conductor_group, str): raise exception.InvalidConductorGroup(group=conductor_group) if not re.match(r'^[a-zA-Z0-9_\-\.]*$', conductor_group): raise exception.InvalidConductorGroup(group=conductor_group)
[docs] def set_node_nested_field(node, collection, field, value): """Set a value in a dictionary field of a node. :param node: Node object. :param collection: Name of the field with the dictionary. :param field: Nested field name. :param value: New value. """ col = getattr(node, collection) col[field] = value setattr(node, collection, col)
[docs] def pop_node_nested_field(node, collection, field, default=None): """Pop a value from a dictionary field of a node. :param node: Node object. :param collection: Name of the field with the dictionary. :param field: Nested field name. :param default: The default value to return. :return: The removed value or the default. """ col = getattr(node, collection) result = col.pop(field, default) setattr(node, collection, col) return result
[docs] def wrap_ipv6(ip): """Wrap the address in square brackets if it's an IPv6 address.""" try: if ipaddress.ip_address(ip).version == 6: return "[%s]" % ip except ValueError: pass return ip
[docs] def file_mime_type(path): """Gets a mime type of the given file.""" return execute('file', '--brief', '--mime-type', path, use_standard_locale=True)[0].strip()
def _get_mb_ram_available(): # NOTE(TheJulia): The .available value is the memory that can be given # to a process without this process beginning to swap itself. return psutil.virtual_memory().available / 1024 / 1024
[docs] def is_memory_insufficient(raise_if_fail=False): """Checks available system memory and holds the deployment process. Evaluates the current system memory available, meaning can be allocated to a process by the kernel upon allocation request, and delays the execution until memory has been freed, or until it has timed out. This method will issue a sleep, if the amount of available memory is insufficient. This is configured using the ``[DEFAULT]minimum_memory_wait_time`` and the ``[DEFAULT]minimum_memory_wait_retries``. :param raise_if_fail: Default False, but if set to true an InsufficientMemory exception is raised upon insufficient memory. :returns: True if the check has timed out. Otherwise None is returned. :raises: InsufficientMemory if the raise_if_fail parameter is set to True. """ required_memory = CONF.minimum_required_memory loop_count = 0 while _get_mb_ram_available() < required_memory: log_values = { 'available': _get_mb_ram_available(), 'required': required_memory, } if CONF.minimum_memory_warning_only: LOG.warning('Memory is at %(available)s MiB, required is ' '%(required)s. Ironic is in warning-only mode ' 'which can be changed by altering the ' '[DEFAULT]minimum_memory_warning_only', log_values) return False if loop_count >= CONF.minimum_memory_wait_retries: LOG.error('Memory is at %(available)s MiB, required is ' '%(required)s. Notifying caller that we have ' 'exceeded retries.', log_values) if raise_if_fail: raise exception.InsufficientMemory( free=_get_mb_ram_available(), required=required_memory) return True LOG.warning('Memory is at %(available)s MiB, required is ' '%(required)s, waiting.', log_values) # Sleep so interpreter can switch threads. time.sleep(CONF.minimum_memory_wait_time) loop_count = loop_count + 1
_LARGE_KEYS = frozenset(['system_logs'])
[docs] def remove_large_keys(var): """Remove specific keys from the var, recursing into dicts and lists.""" if isinstance(var, abc.Mapping): return {key: (remove_large_keys(value) if key not in _LARGE_KEYS else '<...>') for key, value in var.items()} elif isinstance(var, abc.Sequence) and not isinstance(var, str): return var.__class__(map(remove_large_keys, var)) else: return var
[docs] def fast_track_enabled(node): is_enabled = node.driver_info.get('fast_track') if is_enabled is None: return CONF.deploy.fast_track else: try: return strutils.bool_from_string(is_enabled, strict=True) except ValueError as exc: raise exception.InvalidParameterValue( _("Invalid value of fast_track: %s") % exc)
[docs] def is_fips_enabled(): """Check if FIPS mode is enabled in the system.""" try: with open('/proc/sys/crypto/fips_enabled', 'r') as f: content = f.read() if content == "1\n": return True except Exception: pass return False
[docs] def stop_after_retries(option, group=None): """A tenacity retry helper that stops after retries specified in conf.""" # NOTE(dtantsur): fetch the option inside of the nested call, otherwise it # cannot be changed in runtime. def should_stop(retry_state): if group: conf = getattr(CONF, group) else: conf = CONF num_retries = getattr(conf, option) return retry_state.attempt_number >= num_retries + 1 return should_stop
[docs] def is_loopback(hostname_or_ip): """Check if the provided hostname or IP address is a loopback.""" try: return ipaddress.ip_address(hostname_or_ip).is_loopback except ValueError: # host name return hostname_or_ip in ('localhost', 'localhost.localdomain')
[docs] def parse_kernel_params(params): """Parse kernel parameters into a dictionary. ``None`` is used as a value for parameters that are not in the ``key=value`` format. :param params: kernel parameters as a space-delimited string. """ result = {} for s in shlex.split(params): try: key, value = s.split('=', 1) except ValueError: result[s] = None else: result[key] = value return result
[docs] def is_ironic_using_sqlite(): """Return True if Ironic is configured to use SQLite""" global USING_SQLITE if USING_SQLITE is not None: return USING_SQLITE # We're being called for the first time, lets cache and # return the value. USING_SQLITE = 'sqlite' in CONF.database.connection.lower() return USING_SQLITE