From 9b9f2b69bf5f9f5c3971414a594a2971feff122f Mon Sep 17 00:00:00 2001 From: Purnendu Ghosh Date: Thu, 22 Nov 2018 00:03:33 +0530 Subject: [PATCH 01/17] added availability flags and logic for genesis setting --- spyglass/data_extractor/base.py | 34 ++++++------------------ spyglass/data_extractor/formation.py | 22 +++++++-------- spyglass/parser/generate_intermediary.py | 2 +- 3 files changed, 20 insertions(+), 38 deletions(-) diff --git a/spyglass/data_extractor/base.py b/spyglass/data_extractor/base.py index 959595a..9ffeadc 100644 --- a/spyglass/data_extractor/base.py +++ b/spyglass/data_extractor/base.py @@ -277,7 +277,6 @@ def extract_baremetal_information(self): """ LOG.info("Extract baremetal information from plugin") baremetal = {} - is_genesis = False hosts = self.get_hosts(self.region) # For each host list fill host profile and network IPs @@ -301,40 +300,23 @@ def extract_baremetal_information(self): # Fill network IP for this host temp_host['ip'] = {} - temp_host['ip']['oob'] = temp_host_ips[host_name].get('oob', "") + temp_host['ip']['oob'] = temp_host_ips[host_name].get( + 'oob', "#CHANGE_ME") temp_host['ip']['calico'] = temp_host_ips[host_name].get( - 'calico', "") - temp_host['ip']['oam'] = temp_host_ips[host_name].get('oam', "") + 'calico', "#CHANGE_ME") + temp_host['ip']['oam'] = temp_host_ips[host_name].get( + 'oam', "#CHANGE_ME") temp_host['ip']['storage'] = temp_host_ips[host_name].get( - 'storage', "") + 'storage', "#CHANGE_ME") temp_host['ip']['overlay'] = temp_host_ips[host_name].get( - 'overlay', "") - # TODO(pg710r): Testing only. + 'overlay', "#CHANGE_ME") temp_host['ip']['pxe'] = temp_host_ips[host_name].get( 'pxe', "#CHANGE_ME") - - # TODO(nh863p): Can this logic goes into dervied plugin class - # How to determine genesis node?? - - # TODO(nh863p): If below logic is based on host profile name, then - # it should be part of design rule??? - # Filling rack_type( compute/controller/genesis) - # "cp" host profile is controller - # "ns" host profile is compute - if (temp_host['host_profile'] == 'cp'): - # The controller node is designates as genesis" - if is_genesis is False: - is_genesis = True - temp_host['type'] = 'genesis' - else: - temp_host['type'] = 'controller' - else: - temp_host['type'] = 'compute' + temp_host['type'] = host.get('type', "#CHANGE_ME") baremetal[rack_name][host_name] = temp_host LOG.debug("Baremetal information:\n{}".format( pprint.pformat(baremetal))) - return baremetal def extract_site_information(self): diff --git a/spyglass/data_extractor/formation.py b/spyglass/data_extractor/formation.py index 76e9659..69ab488 100644 --- a/spyglass/data_extractor/formation.py +++ b/spyglass/data_extractor/formation.py @@ -313,13 +313,19 @@ def get_hosts(self, region, rack=None): control_hosts = device_api.zones_zone_id_control_nodes_get(zone_id) compute_hosts = device_api.zones_zone_id_devices_get( zone_id, type='KVM') - hosts_list = [] + genesis_set = False for host in control_hosts: self.device_name_id_mapping[host.aic_standard_name] = host.id + # The first control node is designated as genesis node + if genesis_set is False: + node_type = 'genesis' + genesis_set = True + else: + node_type = 'genesis' hosts_list.append({ 'name': host.aic_standard_name, - 'type': 'controller', + 'type': node_type, 'rack_name': host.rack_name, 'host_profile': host.host_profile_name }) @@ -352,8 +358,9 @@ def get_networks(self, region): vlan_api = swagger_client.VlansApi(self.formation_api_client) vlans = vlan_api.zones_zone_id_regions_region_id_vlans_get( zone_id, region_id) - # Case when vlans list is empty from - # zones_zone_id_regions_region_id_vlans_get + # TWEAK(pg710r):Case when vlans list is empty from + # zones_zone_id_regions_region_id_vlans_get. Ideally this should not + # be the case if len(vlans) is 0: # get device-id from the first host and get the network details hosts = self.get_hosts(self.region) @@ -375,13 +382,6 @@ def get_networks(self, region): tmp_vlan['subnet_level'] = vlan_.vlan.subnet_level vlans_list.append(tmp_vlan) - # TODO(pg710r): hack to put dummy values for pxe - tmp_vlan = {} - tmp_vlan['name'] = 'pxe' - tmp_vlan['vlan'] = '43' - tmp_vlan['subnet'] = '172.30.4.0/25' - tmp_vlan['gateway'] = '172.30.4.1' - vlans_list.append(tmp_vlan) return vlans_list def get_ips(self, region, host=None): diff --git a/spyglass/parser/generate_intermediary.py b/spyglass/parser/generate_intermediary.py index 4a3af3e..1321b05 100644 --- a/spyglass/parser/generate_intermediary.py +++ b/spyglass/parser/generate_intermediary.py @@ -257,7 +257,6 @@ def load_extracted_data_from_data_source(self, extracted_data): """ LOG.info("Load extracted data from data source") - self._validate_extracted_data(extracted_data) self.data = extracted_data LOG.debug("Extracted data from plugin data source:\n{}".format( pprint.pformat(extracted_data))) @@ -266,6 +265,7 @@ def load_extracted_data_from_data_source(self, extracted_data): with open(extracted_file, 'w') as f: f.write(yaml_file) f.close() + self._validate_extracted_data(extracted_data) # Append region_data supplied from CLI to self.data self.data['region_name'] = self.region_name From f8740f4d60d3e3ba99bc1683f15b191edb9a1fa7 Mon Sep 17 00:00:00 2001 From: Purnendu Ghosh Date: Fri, 23 Nov 2018 00:55:46 +0530 Subject: [PATCH 02/17] Added framework for extracting baremetal info for tugboat plugin --- setup.py | 2 +- spyglass/data_extractor/base.py | 2 + spyglass/data_extractor/formation.py | 2 +- spyglass/data_extractor/tugboat.py | 91 ------- spyglass/data_extractor/tugboat/tugboat.py | 267 +++++++++++++++++++++ spyglass/spyglass.py | 12 + 6 files changed, 283 insertions(+), 93 deletions(-) delete mode 100644 spyglass/data_extractor/tugboat.py create mode 100644 spyglass/data_extractor/tugboat/tugboat.py diff --git a/setup.py b/setup.py index e28ec9f..adf8c6c 100644 --- a/setup.py +++ b/setup.py @@ -40,7 +40,7 @@ ], 'data_extractor_plugins': ['formation=spyglass.data_extractor.formation:FormationPlugin', - 'tugboat=spyglass.data_extractor.tugboat:TugboatPlugin', + 'tugboat=spyglass.data_extractor.tugboat.tugboat:TugboatPlugin', ] }, include_package_data=True, diff --git a/spyglass/data_extractor/base.py b/spyglass/data_extractor/base.py index 47b5aa2..24cb894 100644 --- a/spyglass/data_extractor/base.py +++ b/spyglass/data_extractor/base.py @@ -278,6 +278,8 @@ def extract_baremetal_information(self): LOG.info("Extract baremetal information from plugin") baremetal = {} hosts = self.get_hosts(self.region) + import pdb + pdb.set_trace() # For each host list fill host profile and network IPs for host in hosts: diff --git a/spyglass/data_extractor/formation.py b/spyglass/data_extractor/formation.py index 69ab488..9beca40 100644 --- a/spyglass/data_extractor/formation.py +++ b/spyglass/data_extractor/formation.py @@ -76,7 +76,7 @@ def set_config_opts(self, conf): self._update_site_and_zone(self.region) def get_plugin_conf(self, kwargs): - """ Validates the plugin param and return if success""" + """ Validates the plugin param from CLI and return if correct""" try: assert (kwargs['formation_url'] ) is not None, "formation_url is Not Specified" diff --git a/spyglass/data_extractor/tugboat.py b/spyglass/data_extractor/tugboat.py deleted file mode 100644 index 6706afb..0000000 --- a/spyglass/data_extractor/tugboat.py +++ /dev/null @@ -1,91 +0,0 @@ -# Copyright 2018 AT&T Intellectual Property. All other rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the 'License'); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an 'AS IS' BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging -# TODO(pg710r): The below lines will be uncommented when tugboat plugib -# code is added -""" -import pprint -import re -import requests -import swagger_client -import urllib3 - - -from spyglass.data_extractor.custom_exceptions import ( - ApiClientError, ConnectionError, MissingAttributeError, - TokenGenerationError) - -urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) -""" -from spyglass.data_extractor.base import BaseDataSourcePlugin -LOG = logging.getLogger(__name__) - - -class TugboatPlugin(BaseDataSourcePlugin): - def __init__(self, region): - LOG.error(" Tugboat currently not supported. Exiting!!") - exit() - - def set_config_opts(self, conf): - # TODO(pg710r): Code will be added later - pass - - def get_plugin_conf(self, kwargs): - # TODO(pg710r): Code will be added later - pass - - def get_zones(self, site=None): - # TODO(pg710r): Code will be added later - pass - - def get_regions(self, zone): - # TODO(pg710r): Code will be added later - pass - - def get_racks(self, region): - # TODO(pg710r): Code will be added later - pass - - def get_hosts(self, region, rack=None): - # TODO(pg710r): Code will be added later - pass - - def get_networks(self, region): - # TODO(pg710r): Code will be added later - pass - - def get_ips(self, region, host=None): - # TODO(pg710r): Code will be added later - pass - - def get_dns_servers(self, region): - # TODO(pg710r): Code will be added later - pass - - def get_ntp_servers(self, region): - # TODO(pg710r): Code will be added later - pass - - def get_ldap_information(self, region): - # TODO(pg710r): Code will be added later - pass - - def get_location_information(self, region): - # TODO(pg710r): Code will be added later - pass - - def get_domain_name(self, region): - # TODO(pg710r): Code will be added later - pass diff --git a/spyglass/data_extractor/tugboat/tugboat.py b/spyglass/data_extractor/tugboat/tugboat.py new file mode 100644 index 0000000..415056d --- /dev/null +++ b/spyglass/data_extractor/tugboat/tugboat.py @@ -0,0 +1,267 @@ +# Copyright 2018 AT&T Intellectual Property. All other rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import pprint +import re +from spyglass.data_extractor.base import BaseDataSourcePlugin +from spyglass.data_extractor.tugboat.excel_parser import ExcelParser +LOG = logging.getLogger(__name__) + + +class TugboatPlugin(BaseDataSourcePlugin): + def __init__(self, region): + LOG.info("Tugboat Initializing") + self.source_type = 'excel' + self.source_name = 'tugboat' + + # Configuration parameters + self.excel_path = None + self.excel_spec = None + + # Site related data + self.region = region + self.region_zone_map = {} + self.site_name_id_mapping = {} + self.zone_name_id_mapping = {} + self.region_name_id_mapping = {} + self.rack_name_id_mapping = {} + self.device_name_id_mapping = {} + + # Raw data from excel + self.parsed_xl_data = None + + # TODO(pg710r) currently hardcoder. will be removed later + self.sitetype = '5ec' + LOG.info("Initiated data extractor plugin:{}".format(self.source_name)) + + def set_config_opts(self, conf): + """ + Placeholder to set confgiuration options + specific to each plugin. + + :param dict conf: Configuration options as dict + + Example: conf = { 'excel_spec': 'spec1.yaml', + 'excel_path': 'excel.xls' } + + Each plugin will have their own config opts. + """ + self.excel_path = conf['excel_path'] + self.excel_spec = conf['excel_spec'] + + # Extract raw data from excel sheets + self._get_excel_obj() + self._extract_raw_data_from_excel() + return + + def _get_excel_obj(self): + """ Creation of an ExcelParser object to store site information. + + The information is obtained based on a excel spec yaml file. + This spec contains row, column and sheet information of + the excel file from where site specific data can be extracted. + """ + self.excel_obj = ExcelParser(self.excel_path, self.excel_spec) + + def _extract_raw_data_from_excel(self): + """ Extracts raw information from excel file based on excel spec""" + self.parsed_xl_data = self.excel_obj.get_data() + self.ipmi_data = self.parsed_xl_data['ipmi_data'][0] + self.hostnames = self.parsed_xl_data['ipmi_data'][1] + import pdb + pdb.set_trace() + """ + self.private_network_data = self._get_private_network_data( + self.parsed_xl_data['network_data']) + self.public_network_data = self._get_public_network_data( + self.parsed_xl_data['network_data']) + self.dns_ntp_ldap_data = self._get_dns_ntp_ldap_data( + self.parsed_xl_data['network_data']) + self.location_data = self._get_location_data( + self.parsed_xl_data['location_data']) + """ + + def get_plugin_conf(self, kwargs): + """ Validates the plugin param from CLI and return if correct + + + Ideally the CLICK module shall report an error if excel file + and excel specs are not specified. The below code has been + written as an additional safeguard. + """ + try: + assert (len(kwargs['excel']) != + 0), "Engineering Specification file not specified" + excel_file_info = kwargs['excel'] + assert (kwargs['excel_spec'] + ) is not None, "Excel Specification file not specified" + excel_spec_info = kwargs['excel_spec'] + except AssertionError: + LOG.error( + "Insufficient plugin parameter for Tugboat! Spyglass exited!") + raise + exit() + plugin_conf = { + 'excel_path': excel_file_info, + 'excel_spec': excel_spec_info + } + return plugin_conf + + def get_zones(self, site=None): + # TODO(pg710r): Code will be added later + pass + + def get_regions(self, zone): + # TODO(pg710r): Code will be added later + pass + + def get_racks(self, region): + # TODO(pg710r): Code will be added later + pass + + def get_hosts(self, region, rack=None): + """Return list of hosts in the region + :param string region: Region name + :param string rack: Rack name + :returns: list of hosts information + :rtype: list of dict + Example: [ + { + 'name': 'host01', + 'type': 'controller', + 'host_profile': 'hp_01' + }, + { + 'name': 'host02', + 'type': 'compute', + 'host_profile': 'hp_02'} + ] + """ + LOG.info("Get Host Information") + rackwise_hosts = self._get_rackwise_hosts() + host_list = [] + for rack in rackwise_hosts.keys(): + for host in rackwise_hosts[rack]: + host_list.append({ + 'rack_name': + rack, + 'name': + host, + 'host_profile': + self.ipmi_data[host]['host_profile'] + #'type': self.host_type[host] + }) + return host_list + + def _get_rack_data(self): + """ Format rack name """ + LOG.info("Getting rack data") + racks = {} + for host in self.hostnames: + rack = self._get_rack(host) + racks[rack] = rack.replace('r', 'rack') + return racks + + def _get_rack(self, host): + """ + Get rack id from the rack string extracted + from xl + """ + rack_pattern = '\w.*(r\d+)\w.*' + rack = re.findall(rack_pattern, host)[0] + if not self.region: + self.region = host.split(rack)[0] + return rack + + def _get_rackwise_hosts(self): + """ Mapping hosts with rack ids """ + rackwise_hosts = {} + racks = self._get_rack_data() + for rack in racks: + if rack not in rackwise_hosts: + rackwise_hosts[racks[rack]] = [] + for host in self.hostnames: + if rack in host: + rackwise_hosts[racks[rack]].append(host) + LOG.debug("rackwise hosts:\n%s", pprint.pformat(rackwise_hosts)) + return rackwise_hosts + + def _categorize_hosts(self): + """ + Categorize host as genesis, controller and compute based on + the hostname string extracted from xl + """ + """ loop through IPMI data and determine hosttype """ + is_genesis = False + sitetype = self.sitetype + ctrl_profile_type = \ + self.rules_data['hardware_profile'][sitetype]['profile_name']['ctrl'] + for host in sorted(self.ipmi_data.keys()): + if (self.ipmi_data[host]['host_profile'] == ctrl_profile_type): + if not is_genesis: + self.host_type[host] = 'genesis' + is_genesis = True + else: + self.host_type[host] = 'controller' + else: + self.host_type[host] = 'compute' + + def get_networks(self, region): + # TODO(pg710r): Code will be added later + pass + + def get_ips(self, region, host=None): + """Return list of IPs on the host + :param string region: Region name + :param string host: Host name + :returns: Dict of IPs per network on the host + :rtype: dict + Example: {'oob': {'ipv4': '192.168.1.10'}, + 'pxe': {'ipv4': '192.168.2.10'}} + The network name from get_networks is expected to be the keys of this + dict. In case some networks are missed, they are expected to be either + DHCP or internally generated n the next steps by the design rules. + """ + + ip_ = {} + ip_[host] = { + 'oob': self.ipmi_data[host].get('ipmi_address', ''), + 'oam': self.ipmi_data[host].get('oam', ''), + 'calico': self.ipmi_data[host].get('calico', ''), + 'overlay': self.ipmi_data[host].get('overlay', ''), + 'pxe': self.ipmi_data[host].get('pxe', ''), + 'storage': self.ipmi_data[host].get('storage', '') + } + return ip_ + + def get_dns_servers(self, region): + # TODO(pg710r): Code will be added later + pass + + def get_ntp_servers(self, region): + # TODO(pg710r): Code will be added later + pass + + def get_ldap_information(self, region): + # TODO(pg710r): Code will be added later + pass + + def get_location_information(self, region): + # TODO(pg710r): Code will be added later + pass + + def get_domain_name(self, region): + # TODO(pg710r): Code will be added later + pass diff --git a/spyglass/spyglass.py b/spyglass/spyglass.py index faec7aa..036905e 100644 --- a/spyglass/spyglass.py +++ b/spyglass/spyglass.py @@ -75,6 +75,18 @@ def generate_manifest_files(intermediary, manifest_dir=None): '-mdir', type=click.Path(exists=True), help='The path where manifest files needs to be generated') +@click.option( + '--excel', + '-x', + multiple=True, + type=click.Path(exists=True), + help= + 'Path to engineering excel file, to be passed with generate_intermediary') +@click.option( + '--excel_spec', + '-e', + type=click.Path(exists=True), + help='Path to excel spec, to be passed with generate_intermediary') @click.option( '--loglevel', '-l', From 2ba0f372579d2ef0355b9e19024494c2c5fe3ef1 Mon Sep 17 00:00:00 2001 From: Purnendu Ghosh Date: Sat, 24 Nov 2018 03:41:27 +0530 Subject: [PATCH 03/17] base version of working tugboat plugin --- spyglass/data_extractor/base.py | 10 +- .../tugboat/check_exceptions.py | 35 ++ .../data_extractor/tugboat/excel_parser.py | 433 ++++++++++++++++++ spyglass/data_extractor/tugboat/tugboat.py | 303 +++++++++--- spyglass/parser/generate_intermediary.py | 96 ++-- 5 files changed, 776 insertions(+), 101 deletions(-) create mode 100644 spyglass/data_extractor/tugboat/check_exceptions.py create mode 100644 spyglass/data_extractor/tugboat/excel_parser.py diff --git a/spyglass/data_extractor/base.py b/spyglass/data_extractor/base.py index 24cb894..54dee4c 100644 --- a/spyglass/data_extractor/base.py +++ b/spyglass/data_extractor/base.py @@ -278,8 +278,6 @@ def extract_baremetal_information(self): LOG.info("Extract baremetal information from plugin") baremetal = {} hosts = self.get_hosts(self.region) - import pdb - pdb.set_trace() # For each host list fill host profile and network IPs for host in hosts: @@ -398,12 +396,11 @@ def extract_network_information(self): 'calico', 'overlay', 'pxe', 'storage', 'oam', 'oob', 'ingress' ] network_data['vlan_network_data'] = {} - for net in networks: tmp_net = {} if net['name'] in networks_to_scan: - tmp_net['subnet'] = net['subnet'] - tmp_net['vlan'] = net['vlan'] + tmp_net['subnet'] = net.get('subnet', '') + tmp_net['vlan'] = net.get('vlan', '') network_data['vlan_network_data'][net['name']] = tmp_net @@ -420,9 +417,10 @@ def extract_data(self): LOG.info("Extract data from plugin") site_data = {} site_data['baremetal'] = self.extract_baremetal_information() - site_data['site_info'] = self.extract_site_information() site_data['network'] = self.extract_network_information() + site_data['site_info'] = self.extract_site_information() self.site_data = site_data + return site_data def apply_additional_data(self, extra_data): diff --git a/spyglass/data_extractor/tugboat/check_exceptions.py b/spyglass/data_extractor/tugboat/check_exceptions.py new file mode 100644 index 0000000..d11d58a --- /dev/null +++ b/spyglass/data_extractor/tugboat/check_exceptions.py @@ -0,0 +1,35 @@ +# Copyright 2018 AT&T Intellectual Property. All other rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +class BaseError(Exception): + pass + + +class NotEnoughIp(BaseError): + def __init__(self, cidr, total_nodes): + self.cidr = cidr + self.total_nodes = total_nodes + + def display_error(self): + print('{} can not handle {} nodes'.format(self.cidr, self.total_nodes)) + + +class NoSpecMatched(BaseError): + def __init__(self, excel_specs): + self.specs = excel_specs + + def display_error(self): + print('No spec matched. Following are the available specs:\n'.format( + self.specs)) diff --git a/spyglass/data_extractor/tugboat/excel_parser.py b/spyglass/data_extractor/tugboat/excel_parser.py new file mode 100644 index 0000000..264241d --- /dev/null +++ b/spyglass/data_extractor/tugboat/excel_parser.py @@ -0,0 +1,433 @@ +# Copyright 2018 AT&T Intellectual Property. All other rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import jsonschema +import logging +import pprint +import pkg_resources +import re +import sys +import yaml +from openpyxl import load_workbook +from openpyxl import Workbook +from .check_exceptions import ( + NoSpecMatched, ) +# from spyglass.data_extractor.custom_exceptions + +LOG = logging.getLogger(__name__) + + +class ExcelParser(): + """ Parse data from excel into a dict """ + + def __init__(self, file_name, excel_specs): + self.file_name = file_name + with open(excel_specs, 'r') as f: + spec_raw_data = f.read() + self.excel_specs = yaml.safe_load(spec_raw_data) + # A combined design spec, returns a workbok object after combining + # all the inputs excel specs + combined_design_spec = self.combine_excel_design_specs(file_name) + self.wb_combined = combined_design_spec + self.filenames = file_name + self.spec = 'xl_spec' + + @staticmethod + def sanitize(string): + """ Remove extra spaces and convert string to lower case """ + return string.replace(' ', '').lower() + + def compare(self, string1, string2): + """ Compare the strings """ + return bool(re.search(self.sanitize(string1), self.sanitize(string2))) + + def validate_sheet(self, spec, sheet): + """ Check if the sheet is correct or not """ + ws = self.wb_combined[sheet] + header_row = self.excel_specs['specs'][spec]['header_row'] + ipmi_header = self.excel_specs['specs'][spec]['ipmi_address_header'] + ipmi_column = self.excel_specs['specs'][spec]['ipmi_address_col'] + header_value = ws.cell(row=header_row, column=ipmi_column).value + import pdb + pdb.set_trace() + return bool(self.compare(ipmi_header, header_value)) + + def find_correct_spec(self): + """ Find the correct spec """ + for spec in self.excel_specs['specs']: + sheet_name = self.excel_specs['specs'][spec]['ipmi_sheet_name'] + for sheet in self.wb_combined.sheetnames: + if self.compare(sheet_name, sheet): + self.excel_specs['specs'][spec]['ipmi_sheet_name'] = sheet + if self.validate_sheet(spec, sheet): + return spec + raise NoSpecMatched(self.excel_specs) + + def get_ipmi_data(self): + """ Read IPMI data from the sheet """ + ipmi_data = {} + hosts = [] + provided_sheetname = self.excel_specs['specs'][self. + spec]['ipmi_sheet_name'] + workbook_object, extracted_sheetname = self.get_xl_obj_and_sheetname( + provided_sheetname) + if workbook_object is not None: + ws = workbook_object[extracted_sheetname] + else: + ws = self.wb_combined[provided_sheetname] + row = self.excel_specs['specs'][self.spec]['start_row'] + end_row = self.excel_specs['specs'][self.spec]['end_row'] + hostname_col = self.excel_specs['specs'][self.spec]['hostname_col'] + ipmi_address_col = self.excel_specs['specs'][self. + spec]['ipmi_address_col'] + host_profile_col = self.excel_specs['specs'][self. + spec]['host_profile_col'] + ipmi_gateway_col = self.excel_specs['specs'][self. + spec]['ipmi_gateway_col'] + previous_server_gateway = None + while row <= end_row: + hostname = self.sanitize( + ws.cell(row=row, column=hostname_col).value) + hosts.append(hostname) + ipmi_address = ws.cell(row=row, column=ipmi_address_col).value + if '/' in ipmi_address: + ipmi_address = ipmi_address.split('/')[0] + ipmi_gateway = ws.cell(row=row, column=ipmi_gateway_col).value + if ipmi_gateway: + previous_server_gateway = ipmi_gateway + else: + ipmi_gateway = previous_server_gateway + tmp_host_profile = ws.cell(row=row, column=host_profile_col).value + try: + if tmp_host_profile is None: + raise RuntimeError("No value read from {} ".format( + self.file_name) + "sheet:{} row:{}, col:{}".format( + self.spec, row, host_profile_col)) + except RuntimeError as rerror: + LOG.critical(rerror) + sys.exit("Tugboat exited!!") + host_profile = tmp_host_profile.split('-')[1] + ipmi_data[hostname] = { + 'ipmi_address': ipmi_address, + 'ipmi_gateway': ipmi_gateway, + 'host_profile': host_profile, + 'type': type, + } + row += 1 + LOG.debug("ipmi data extracted from excel:\n%s", + [pprint.pformat(ipmi_data), + pprint.pformat(hosts)]) + return [ipmi_data, hosts] + + def get_private_vlan_data(self, ws): + """ Get private vlan data from private IP sheet """ + vlan_data = {} + row = self.excel_specs['specs'][self.spec]['vlan_start_row'] + end_row = self.excel_specs['specs'][self.spec]['vlan_end_row'] + type_col = self.excel_specs['specs'][self.spec]['net_type_col'] + vlan_col = self.excel_specs['specs'][self.spec]['vlan_col'] + while row <= end_row: + cell_value = ws.cell(row=row, column=type_col).value + if cell_value: + vlan = ws.cell(row=row, column=vlan_col).value + if vlan: + vlan = vlan.lower() + vlan_data[vlan] = cell_value + row += 1 + LOG.debug("vlan data extracted from excel:\n%s", + pprint.pformat(vlan_data)) + return vlan_data + + def get_private_network_data(self): + """ Read network data from the private ip sheet """ + provided_sheetname = self.excel_specs['specs'][ + self.spec]['private_ip_sheet'] + workbook_object, extracted_sheetname = self.get_xl_obj_and_sheetname( + provided_sheetname) + if workbook_object is not None: + ws = workbook_object[extracted_sheetname] + else: + ws = self.wb_combined[provided_sheetname] + vlan_data = self.get_private_vlan_data(ws) + network_data = {} + row = self.excel_specs['specs'][self.spec]['net_start_row'] + end_row = self.excel_specs['specs'][self.spec]['net_end_row'] + col = self.excel_specs['specs'][self.spec]['net_col'] + vlan_col = self.excel_specs['specs'][self.spec]['net_vlan_col'] + old_vlan = '' + while row <= end_row: + vlan = ws.cell(row=row, column=vlan_col).value + if vlan: + vlan = vlan.lower() + network = ws.cell(row=row, column=col).value + if vlan and network: + net_type = vlan_data[vlan] + if 'vlan' not in network_data: + network_data[net_type] = { + 'vlan': vlan, + 'subnet': [], + } + elif not vlan and network: + # If vlan is not present then assign old vlan to vlan as vlan + # value is spread over several rows + vlan = old_vlan + else: + row += 1 + continue + network_data[vlan_data[vlan]]['subnet'].append(network) + old_vlan = vlan + row += 1 + for network in network_data: + network_data[network]['is_common'] = True + """ + if len(network_data[network]['subnet']) > 1: + network_data[network]['is_common'] = False + else: + network_data[network]['is_common'] = True + LOG.debug( + "private network data extracted from\ + excel:\n%s", pprint.pformat(network_data)) + """ + return network_data + + def get_public_network_data(self): + """ Read public network data from public ip data """ + network_data = {} + provided_sheetname = self.excel_specs['specs'][self. + spec]['public_ip_sheet'] + workbook_object, extracted_sheetname = self.get_xl_obj_and_sheetname( + provided_sheetname) + if workbook_object is not None: + ws = workbook_object[extracted_sheetname] + else: + ws = self.wb_combined[provided_sheetname] + oam_row = self.excel_specs['specs'][self.spec]['oam_ip_row'] + oam_col = self.excel_specs['specs'][self.spec]['oam_ip_col'] + oam_vlan_col = self.excel_specs['specs'][self.spec]['oam_vlan_col'] + ingress_row = self.excel_specs['specs'][self.spec]['ingress_ip_row'] + oob_row = self.excel_specs['specs'][self.spec]['oob_net_row'] + col = self.excel_specs['specs'][self.spec]['oob_net_start_col'] + end_col = self.excel_specs['specs'][self.spec]['oob_net_end_col'] + network_data = { + 'oam': { + 'subnet': [ws.cell(row=oam_row, column=oam_col).value], + 'vlan': ws.cell(row=oam_row, column=oam_vlan_col).value, + }, + 'ingress': ws.cell(row=ingress_row, column=oam_col).value, + } + network_data['oob'] = { + 'subnet': [], + } + while col <= end_col: + cell_value = ws.cell(row=oob_row, column=col).value + if cell_value: + network_data['oob']['subnet'].append(self.sanitize(cell_value)) + col += 1 + LOG.debug( + "public network data extracted from\ + excel:\n%s", pprint.pformat(network_data)) + return network_data + + def get_site_info(self): + """ Read location, dns, ntp and ldap data""" + site_info = {} + provided_sheetname = self.excel_specs['specs'][ + self.spec]['dns_ntp_ldap_sheet'] + workbook_object, extracted_sheetname = self.get_xl_obj_and_sheetname( + provided_sheetname) + if workbook_object is not None: + ws = workbook_object[extracted_sheetname] + else: + ws = self.wb_combined[provided_sheetname] + dns_row = self.excel_specs['specs'][self.spec]['dns_row'] + dns_col = self.excel_specs['specs'][self.spec]['dns_col'] + ntp_row = self.excel_specs['specs'][self.spec]['ntp_row'] + ntp_col = self.excel_specs['specs'][self.spec]['ntp_col'] + domain_row = self.excel_specs['specs'][self.spec]['domain_row'] + domain_col = self.excel_specs['specs'][self.spec]['domain_col'] + login_domain_row = self.excel_specs['specs'][self. + spec]['login_domain_row'] + ldap_col = self.excel_specs['specs'][self.spec]['ldap_col'] + global_group = self.excel_specs['specs'][self.spec]['global_group'] + ldap_search_url_row = self.excel_specs['specs'][ + self.spec]['ldap_search_url_row'] + dns_servers = ws.cell(row=dns_row, column=dns_col).value + ntp_servers = ws.cell(row=ntp_row, column=ntp_col).value + try: + if dns_servers is None: + raise RuntimeError( + "No value for dns_server from:{} Sheet:'{}' Row:{} Col:{}". + format(self.file_name, provided_sheetname, dns_row, + dns_col)) + raise RuntimeError( + "No value for ntp_server frome:{} Sheet:'{}' Row:{} Col:{}" + .format(self.file_name, provided_sheetname, ntp_row, + ntp_col)) + except RuntimeError as rerror: + LOG.critical(rerror) + sys.exit("Tugboat exited!!") + + dns_servers = dns_servers.replace('\n', ' ') + ntp_servers = ntp_servers.replace('\n', ' ') + if ',' in dns_servers: + dns_servers = dns_servers.split(',') + else: + dns_servers = dns_servers.split() + if ',' in ntp_servers: + ntp_servers = ntp_servers.split(',') + else: + ntp_servers = ntp_servers.split() + site_info = { + 'location': self.get_location_data(), + 'dns': dns_servers, + 'ntp': ntp_servers, + 'domain': ws.cell(row=domain_row, column=domain_col).value, + 'ldap': { + 'subdomain': ws.cell(row=login_domain_row, + column=ldap_col).value, + 'common_name': ws.cell(row=global_group, + column=ldap_col).value, + 'url': ws.cell(row=ldap_search_url_row, column=ldap_col).value, + } + } + LOG.debug( + "Site Info extracted from\ + excel:\n%s", pprint.pformat(site_info)) + return site_info + + def get_location_data(self): + """ Read location data from the site and zone sheet """ + provided_sheetname = self.excel_specs['specs'][self. + spec]['location_sheet'] + workbook_object, extracted_sheetname = self.get_xl_obj_and_sheetname( + provided_sheetname) + if workbook_object is not None: + ws = workbook_object[extracted_sheetname] + else: + ws = self.wb_combined[provided_sheetname] + corridor_row = self.excel_specs['specs'][self.spec]['corridor_row'] + column = self.excel_specs['specs'][self.spec]['column'] + site_name_row = self.excel_specs['specs'][self.spec]['site_name_row'] + state_name_row = self.excel_specs['specs'][self.spec]['state_name_row'] + country_name_row = self.excel_specs['specs'][self. + spec]['country_name_row'] + clli_name_row = self.excel_specs['specs'][self.spec]['clli_name_row'] + return { + 'corridor': ws.cell(row=corridor_row, column=column).value, + 'name': ws.cell(row=site_name_row, column=column).value, + 'state': ws.cell(row=state_name_row, column=column).value, + 'country': ws.cell(row=country_name_row, column=column).value, + 'physical_location': ws.cell(row=clli_name_row, + column=column).value, + } + + def validate_data(self, data): + LOG.info('Validating data read from sheet') + schema_dir = pkg_resources.resource_filename('tugboat', 'schemas/') + schema_file = schema_dir + "data_schema.json" + json_data = json.loads(json.dumps(data)) + with open(schema_file, 'r') as f: + json_schema = json.load(f) + try: + with open('data2.json', 'w') as outfile: + json.dump(data, outfile, sort_keys=True, indent=4) + jsonschema.validate(json_data, json_schema) + except jsonschema.exceptions.ValidationError as e: + LOG.error( + "Validation Failed:\n{}\n Please check excel spec(row,col)". + format(e.message)) + sys.exit(1) + LOG.info("Data validation\ + OK!") + + def validate_sheet_names_with_spec(self): + spec = list(self.excel_specs['specs'].keys())[0] + spec_item = self.excel_specs['specs'][spec] + sheet_name_list = [] + ipmi_header_sheet_name = spec_item['ipmi_sheet_name'] + sheet_name_list.append(ipmi_header_sheet_name) + private_ip_sheet_name = spec_item['private_ip_sheet'] + sheet_name_list.append(private_ip_sheet_name) + public_ip_sheet_name = spec_item['public_ip_sheet'] + sheet_name_list.append(public_ip_sheet_name) + dns_ntp_ldap_sheet_name = spec_item['dns_ntp_ldap_sheet'] + sheet_name_list.append(dns_ntp_ldap_sheet_name) + location_sheet_name = spec_item['location_sheet'] + sheet_name_list.append(location_sheet_name) + try: + for sheetname in sheet_name_list: + workbook_object, extracted_sheetname = \ + self.get_xl_obj_and_sheetname(sheetname) + if workbook_object is not None: + wb = workbook_object + sheetname = extracted_sheetname + else: + wb = self.wb_combined + + if sheetname not in wb.sheetnames: + raise RuntimeError( + "SheetName '{}' not found ".format(sheetname)) + except RuntimeError as rerror: + LOG.critical(rerror) + sys.exit("Tugboat exited!!") + + LOG.info("Sheet name in excel spec validated with") + + def get_data(self): + """ Create a dict with combined data """ + self.validate_sheet_names_with_spec() + ipmi_data = self.get_ipmi_data() + network_data = self.get_private_network_data() + public_network_data = self.get_public_network_data() + site_info_data = self.get_site_info() + data = { + 'ipmi_data': ipmi_data, + 'network_data': { + 'private': network_data, + 'public': public_network_data, + }, + 'site_info': site_info_data, + } + LOG.debug( + "Location data extracted from\ + excel:\n%s", pprint.pformat(data)) + # TODO(pg710r) self.validate_data(data) + return data + + def combine_excel_design_specs(self, filenames): + design_spec = Workbook() + for exel_file in filenames: + loaded_workbook = load_workbook(exel_file, data_only=True) + for names in loaded_workbook.sheetnames: + design_spec_worksheet = design_spec.create_sheet(names) + loaded_workbook_ws = loaded_workbook[names] + for row in loaded_workbook_ws: + for cell in row: + design_spec_worksheet[cell. + coordinate].value = cell.value + return design_spec + + def get_xl_obj_and_sheetname(self, sheetname): + """ + The logic confirms if the sheetname is specified for example as: + "MTN57a_AEC_Network_Design_v1.6.xlsx:Public IPs" + """ + if (re.search('.xlsx', sheetname) or re.search('.xls', sheetname)): + """ Extract file name """ + source_xl_file = sheetname.split(':')[0] + wb = load_workbook(source_xl_file, data_only=True) + return [wb, sheetname.split(':')[1]] + else: + return [None, sheetname] diff --git a/spyglass/data_extractor/tugboat/tugboat.py b/spyglass/data_extractor/tugboat/tugboat.py index 415056d..b09bc5e 100644 --- a/spyglass/data_extractor/tugboat/tugboat.py +++ b/spyglass/data_extractor/tugboat/tugboat.py @@ -12,11 +12,15 @@ # See the License for the specific language governing permissions and # limitations under the License. +import itertools import logging import pprint +import pkg_resources import re +import yaml from spyglass.data_extractor.base import BaseDataSourcePlugin from spyglass.data_extractor.tugboat.excel_parser import ExcelParser + LOG = logging.getLogger(__name__) @@ -32,18 +36,10 @@ def __init__(self, region): # Site related data self.region = region - self.region_zone_map = {} - self.site_name_id_mapping = {} - self.zone_name_id_mapping = {} - self.region_name_id_mapping = {} - self.rack_name_id_mapping = {} - self.device_name_id_mapping = {} # Raw data from excel self.parsed_xl_data = None - # TODO(pg710r) currently hardcoder. will be removed later - self.sitetype = '5ec' LOG.info("Initiated data extractor plugin:{}".format(self.source_name)) def set_config_opts(self, conf): @@ -78,20 +74,6 @@ def _get_excel_obj(self): def _extract_raw_data_from_excel(self): """ Extracts raw information from excel file based on excel spec""" self.parsed_xl_data = self.excel_obj.get_data() - self.ipmi_data = self.parsed_xl_data['ipmi_data'][0] - self.hostnames = self.parsed_xl_data['ipmi_data'][1] - import pdb - pdb.set_trace() - """ - self.private_network_data = self._get_private_network_data( - self.parsed_xl_data['network_data']) - self.public_network_data = self._get_public_network_data( - self.parsed_xl_data['network_data']) - self.dns_ntp_ldap_data = self._get_dns_ntp_ldap_data( - self.parsed_xl_data['network_data']) - self.location_data = self._get_location_data( - self.parsed_xl_data['location_data']) - """ def get_plugin_conf(self, kwargs): """ Validates the plugin param from CLI and return if correct @@ -102,11 +84,11 @@ def get_plugin_conf(self, kwargs): written as an additional safeguard. """ try: - assert (len(kwargs['excel']) != - 0), "Engineering Specification file not specified" + assert (len( + kwargs['excel'])), "Engineering Spec file not specified" excel_file_info = kwargs['excel'] assert (kwargs['excel_spec'] - ) is not None, "Excel Specification file not specified" + ) is not None, "Excel Spec file not specified" excel_spec_info = kwargs['excel_spec'] except AssertionError: LOG.error( @@ -119,18 +101,6 @@ def get_plugin_conf(self, kwargs): } return plugin_conf - def get_zones(self, site=None): - # TODO(pg710r): Code will be added later - pass - - def get_regions(self, zone): - # TODO(pg710r): Code will be added later - pass - - def get_racks(self, region): - # TODO(pg710r): Code will be added later - pass - def get_hosts(self, region, rack=None): """Return list of hosts in the region :param string region: Region name @@ -150,6 +120,7 @@ def get_hosts(self, region, rack=None): ] """ LOG.info("Get Host Information") + ipmi_data = self.parsed_xl_data['ipmi_data'][0] rackwise_hosts = self._get_rackwise_hosts() host_list = [] for rack in rackwise_hosts.keys(): @@ -160,8 +131,7 @@ def get_hosts(self, region, rack=None): 'name': host, 'host_profile': - self.ipmi_data[host]['host_profile'] - #'type': self.host_type[host] + ipmi_data[host]['host_profile'] }) return host_list @@ -169,7 +139,8 @@ def _get_rack_data(self): """ Format rack name """ LOG.info("Getting rack data") racks = {} - for host in self.hostnames: + hostnames = self.parsed_xl_data['ipmi_data'][1] + for host in hostnames: rack = self._get_rack(host) racks[rack] = rack.replace('r', 'rack') return racks @@ -179,7 +150,7 @@ def _get_rack(self, host): Get rack id from the rack string extracted from xl """ - rack_pattern = '\w.*(r\d+)\w.*' + rack_pattern = r'\w.*(r\d+)\w.*' rack = re.findall(rack_pattern, host)[0] if not self.region: self.region = host.split(rack)[0] @@ -188,11 +159,12 @@ def _get_rack(self, host): def _get_rackwise_hosts(self): """ Mapping hosts with rack ids """ rackwise_hosts = {} + hostnames = self.parsed_xl_data['ipmi_data'][1] racks = self._get_rack_data() for rack in racks: if rack not in rackwise_hosts: rackwise_hosts[racks[rack]] = [] - for host in self.hostnames: + for host in hostnames: if rack in host: rackwise_hosts[racks[rack]].append(host) LOG.debug("rackwise hosts:\n%s", pprint.pformat(rackwise_hosts)) @@ -206,10 +178,11 @@ def _categorize_hosts(self): """ loop through IPMI data and determine hosttype """ is_genesis = False sitetype = self.sitetype - ctrl_profile_type = \ - self.rules_data['hardware_profile'][sitetype]['profile_name']['ctrl'] - for host in sorted(self.ipmi_data.keys()): - if (self.ipmi_data[host]['host_profile'] == ctrl_profile_type): + ipmi_data = self.parsed_xl_data['ipmi_data'][0] + ctrl_profile_type = self.rules_data['hardware_profile'][sitetype][ + 'profile_name']['ctrl'] + for host in sorted(ipmi_data.keys()): + if (ipmi_data[host]['host_profile'] == ctrl_profile_type): if not is_genesis: self.host_type[host] = 'genesis' is_genesis = True @@ -219,8 +192,46 @@ def _categorize_hosts(self): self.host_type[host] = 'compute' def get_networks(self, region): - # TODO(pg710r): Code will be added later - pass + + vlan_list = [] + # Network data extracted from xl is formatted to have a predictable + # data type. For e.g VlAN 45 extracted from xl is formatted as 45 + vlan_pattern = r'\d+' + private_net = self.parsed_xl_data['network_data']['private'] + public_net = self.parsed_xl_data['network_data']['public'] + # Extract network information from private and public network data + for net_type, net_val in itertools.chain(private_net.items(), + public_net.items()): + tmp_vlan = {} + # Ingress is special network that has no vlan, only a subnet string + # So treatment for ingress is different + LOG.info(net_type) + LOG.info(net_val) + if net_type is not 'ingress': + # standardize the network name as net_type may ne different. + # For e.g insteas of pxe it may be PXE or instead of calico + # it may be ksn. Valid network names are pxe, calico, oob, oam, + # overlay, storage, ingress + tmp_vlan['name'] = self._get_network_name_from_vlan_name( + net_type) + + # extract vlan tag. It was extracted from xl file as 'VlAN 45' + # The code below extracts the numeric data fron net_val['vlan'] + if net_val.get('vlan', "") is not "": + value = re.findall(vlan_pattern, net_val['vlan']) + tmp_vlan['vlan'] = value[0] + else: + tmp_vlan['vlan'] = "" + + tmp_vlan['subnet'] = net_val.get('subnet', "") + tmp_vlan['gateway'] = net_val.get('gateway', "") + else: + tmp_vlan['name'] = 'ingress' + tmp_vlan['subnet'] = net_val + vlan_list.append(tmp_vlan) + LOG.debug("vlan list extracted from tugboat:\n{}".format( + pprint.pformat(vlan_list))) + return vlan_list def get_ips(self, region, host=None): """Return list of IPs on the host @@ -236,32 +247,192 @@ def get_ips(self, region, host=None): """ ip_ = {} + ipmi_data = self.parsed_xl_data['ipmi_data'][0] ip_[host] = { - 'oob': self.ipmi_data[host].get('ipmi_address', ''), - 'oam': self.ipmi_data[host].get('oam', ''), - 'calico': self.ipmi_data[host].get('calico', ''), - 'overlay': self.ipmi_data[host].get('overlay', ''), - 'pxe': self.ipmi_data[host].get('pxe', ''), - 'storage': self.ipmi_data[host].get('storage', '') + 'oob': ipmi_data[host].get('ipmi_address', ''), + 'oam': ipmi_data[host].get('oam', ''), + 'calico': ipmi_data[host].get('calico', ''), + 'overlay': ipmi_data[host].get('overlay', ''), + 'pxe': ipmi_data[host].get('pxe', ''), + 'storage': ipmi_data[host].get('storage', '') } return ip_ - def get_dns_servers(self, region): - # TODO(pg710r): Code will be added later - pass + def get_ldap_information(self, region): + """ Extract ldap information from excel and pass it""" + + ldap_raw_data = self.parsed_xl_data['site_info']['ldap'] + ldap_info = {} + # raw url is 'url: ldap://example.com' so we are converting to + # 'ldap://example.com' + ldap_info['url'] = ldap_raw_data['url'].split(' ')[1] + ldap_info['common_name'] = ldap_raw_data['common_name'] + ldap_info['domain'] = ldap_raw_data['url'].split('.')[1] + ldap_info['subdomain'] = ldap_raw_data['subdomain'] + + return ldap_info + + def _get_formatted_server_list(self, server_list): + """ Format dns and ntp server list as comma separated string """ + + # dns/ntp server info from excel is of the format + # 'xxx.xxx.xxx.xxx, (aaa.bbb.ccc.com)' + # The function returns a list of comma separated dns ip addresses + servers = [] + for data in server_list: + if '(' not in data: + servers.append(data) + formatted_server_list = ','.join(servers) + return formatted_server_list def get_ntp_servers(self, region): - # TODO(pg710r): Code will be added later - pass + """ Returns a comma separated list of ntp ip addresses""" - def get_ldap_information(self, region): - # TODO(pg710r): Code will be added later - pass + ntp_server_list = self._get_formatted_server_list( + self.parsed_xl_data['site_info']['ntp']) + return ntp_server_list - def get_location_information(self, region): - # TODO(pg710r): Code will be added later - pass + def get_dns_servers(self, region): + """ Returns a comma separated list of dns ip addresses""" + dns_server_list = self._get_formatted_server_list( + self.parsed_xl_data['site_info']['dns']) + return dns_server_list def get_domain_name(self, region): - # TODO(pg710r): Code will be added later + """ Returns domain name extracted from excel file""" + + return self.parsed_xl_data['site_info']['domain'] + + def _get_private_network_data(self, raw_data): + """ + Get private network data from information extracted + by ExcelParser(i.e raw data) + """ + network_data = {} + # Private Network Types are : pxe, storage, calico, overlay + private_network_types = { + 'pxe': 'pxe', + 'storage': 'storage', + 'calico': 'calico', + 'overlay': 'overlay' + } + for net_type in private_network_types: + for key in raw_data['private']: + if net_type.lower() in key.lower(): + network_data[private_network_types[net_type]] = raw_data[ + 'private'][key] + LOG.debug("Private Network Data:\n%s", pprint.pformat(network_data)) + return network_data + + def _get_public_network_data(self, raw_data): + """ + Get public network data from information extracted + by ExcelParser(i.e raw data) + """ + network_data = raw_data['public'] + LOG.debug("Public Network Data:\n%s", pprint.pformat(network_data)) + return network_data + + def _get_dns_ntp_ldap_data(self, raw_data): + """ + Get dns, ntp and ldap data from information extracted + by ExcelParser(i.e raw data) + """ + network_data = raw_data['dns_ntp_ldap'] + network_data['dns'] = " ".join(network_data['dns']) + network_data['ntp'] = " ".join(network_data['ntp']) + LOG.debug("DNS, NTP, LDAP data:\n%s", pprint.pformat(network_data)) + return network_data + + def get_location_information(self, region): + """ + Prepare location data from information extracted + by ExcelParser(i.e raw data) + """ + location_data = self.parsed_xl_data['site_info']['location'] + + corridor_pattern = r'\d+' + corridor_number = re.findall(corridor_pattern, + location_data['corridor'])[0] + name = location_data.get('name', '') + state = location_data.get('state', '') + country = location_data.get('country', '') + physical_location_id = location_data.get('physical_location', '') + + return { + 'name': name, + 'physical_location_id': physical_location_id, + 'state': state, + 'country': country, + 'corridor': 'c{}'.format(corridor_number), + } + + def collect_design_rules(self, site_config): + """ The function applies global and site specific design rules to + a common design rule + """ + """ Load and save global tugboat design rules.yaml """ + global_config_dir = pkg_resources.resource_filename( + 'tugboat', 'config/') + global_config_file = global_config_dir + 'global_config.yaml' + global_config_data = self.read_file(global_config_file) + global_config_yaml = yaml.safe_load(global_config_data) + """ Load site specific design rules """ + site_config_data = self.read_file(site_config) + site_config_yaml = yaml.safe_load(site_config_data) + """ combine global and site design rules """ + rules_data = {} + rules_data.update(global_config_yaml) + rules_data.update(site_config_yaml) + + self.rules_data = rules_data + + self.HOST_TYPES = self.rules_data['host_types'] + self.PRIVATE_NETWORK_TYPES = self.rules_data['private_network_types'] + self.IPS_TO_LEAVE = self.rules_data['ips_to_leave'] + self.OOB_IPS_TO_LEAVE = self.rules_data['oob_ips_to_leave'] + self.sitetype = self.rules_data['sitetype'] + + def _get_network_name_from_vlan_name(self, vlan_name): + """ network names are ksn, oam, oob, overlay, storage, pxe + + + This is a utility function to determine the vlan acceptable + vlan from the name extracted from excel file + + The following mapping rules apply: + vlan_name contains "ksn or calico" the network name is "calico" + vlan_name contains "storage" the network name is "storage" + vlan_name contains "server" the network name is "oam" + vlan_name contains "ovs" the network name is "overlay" + vlan_name contains "oob" the network name is "oob" + vlan_name contains "pxe" the network name is "pxe" + """ + network_names = [ + 'ksn|calico', 'storage', 'oam|server', 'ovs|overlay', 'oob', 'pxe' + ] + for name in network_names: + # Make a pattern that would ignore case. + # if name is 'ksn' pattern name is '(?i)(ksn)' + name_pattern = "(?i)({})".format(name) + if re.search(name_pattern, vlan_name): + if name is 'ksn|calico': + return 'calico' + if name is 'storage': + return 'storage' + if name is 'oam|server': + return 'oam' + if name is 'ovs|overlay': + return 'overlay' + if name is 'oob': + return 'oob' + if name is 'pxe': + return 'pxe' + # if nothing matches + LOG.error( + "Unable to recognize VLAN name extracted from Plugin data source") + return ("") + + def get_racks(self, region): + # TODO(pg710r) pass diff --git a/spyglass/parser/generate_intermediary.py b/spyglass/parser/generate_intermediary.py index 1321b05..f6a8d36 100644 --- a/spyglass/parser/generate_intermediary.py +++ b/spyglass/parser/generate_intermediary.py @@ -49,18 +49,22 @@ def _initialize_intermediary(self): self.sitetype = None self.genesis_node = None self.region_name = None + self.network_subnets = None def _get_network_subnets(self): - # Extract subnet information for networks + """ Extract subnet information for networks. + + + In some networks, there are multiple subnets, in that case + we assign only the first subnet """ LOG.info("Extracting network subnets") network_subnets = {} - # self.format_network_data() for net_type in self.data['network']['vlan_network_data']: # One of the type is ingress and we don't want that here if (net_type != 'ingress'): network_subnets[net_type] = netaddr.IPNetwork( self.data['network']['vlan_network_data'][net_type] - ['subnet']) + ['subnet'][0]) LOG.debug("Network subnets:\n{}".format( pprint.pformat(network_subnets))) @@ -158,6 +162,41 @@ def _apply_rule_hardware_profile(self, rule_data): pass def _apply_rule_ip_alloc_offset(self, rule_data): + """ Apply offset rules to update baremetal host ip's and vlan network + data """ + + # Get network subnets + self.network_subnets = self._get_network_subnets() + + self._update_vlan_net_data(rule_data) + self._update_baremetal_host_ip_data(rule_data) + + def _update_baremetal_host_ip_data(self, rule_data): + """ Update baremetal host ip's for applicable networks. + + + The applicable networks are oob, oam, ksn, storage and overlay. + These IPs are assigned based on network subnets ranges. + If a particular ip exists it is overridden.""" + + # Ger defult ip offset + default_ip_offset = rule_data['default'] + + host_idx = 0 + LOG.info("Looping through baremetal hosts") + for racks in self.data['baremetal'].keys(): + rack_hosts = self.data['baremetal'][racks] + for host in rack_hosts: + host_networks = rack_hosts[host]['ip'] + for net in host_networks: + ips = list(self.network_subnets[net]) + host_networks[net] = str(ips[host_idx + default_ip_offset]) + host_idx = host_idx + 1 + + LOG.debug("Updated baremetal host:{}".format( + pprint.pformat(self.data['baremetal']))) + + def _update_vlan_net_data(self, rule_data): """ Offset allocation rules to determine ip address range(s) @@ -165,7 +204,6 @@ def _apply_rule_ip_alloc_offset(self, rule_data): network address, gateway ip and other address ranges """ LOG.info("Apply network design rules") - vlan_network_data = {} # Collect Rules default_ip_offset = rule_data['default'] @@ -190,26 +228,23 @@ def _apply_rule_ip_alloc_offset(self, rule_data): pprint.pformat(self.data['network']['bgp']))) LOG.info("Applying rule to vlan network data") - # Get network subnets - network_subnets = self._get_network_subnets() # Apply rules to vlan networks - for net_type in network_subnets: + for net_type in self.network_subnets: if net_type == 'oob': ip_offset = oob_ip_offset else: ip_offset = default_ip_offset - vlan_network_data[net_type] = {} - subnet = network_subnets[net_type] - ips = list(subnet) - vlan_network_data[net_type]['network'] = str( - network_subnets[net_type]) + subnet = self.network_subnets[net_type] + ips = list(subnet) - vlan_network_data[net_type]['gateway'] = str( - ips[gateway_ip_offset]) + self.data['network']['vlan_network_data'][net_type][ + 'gateway'] = str(ips[gateway_ip_offset]) - vlan_network_data[net_type]['reserved_start'] = str(ips[1]) - vlan_network_data[net_type]['reserved_end'] = str(ips[ip_offset]) + self.data['network']['vlan_network_data'][net_type][ + 'reserved_start'] = str(ips[1]) + self.data['network']['vlan_network_data'][net_type][ + 'reserved_end'] = str(ips[ip_offset]) static_start = str(ips[ip_offset + 1]) static_end = str(ips[static_ip_end_offset]) @@ -220,30 +255,32 @@ def _apply_rule_ip_alloc_offset(self, rule_data): dhcp_start = str(ips[mid]) dhcp_end = str(ips[dhcp_ip_end_offset]) - vlan_network_data[net_type]['dhcp_start'] = dhcp_start - vlan_network_data[net_type]['dhcp_end'] = dhcp_end + self.data['network']['vlan_network_data'][net_type][ + 'dhcp_start'] = dhcp_start + self.data['network']['vlan_network_data'][net_type][ + 'dhcp_end'] = dhcp_end - vlan_network_data[net_type]['static_start'] = static_start - vlan_network_data[net_type]['static_end'] = static_end + self.data['network']['vlan_network_data'][net_type][ + 'static_start'] = static_start + self.data['network']['vlan_network_data'][net_type][ + 'static_end'] = static_end # There is no vlan for oob network if (net_type != 'oob'): - vlan_network_data[net_type]['vlan'] = self.data['network'][ - 'vlan_network_data'][net_type]['vlan'] + self.data['network']['vlan_network_data'][net_type][ + 'vlan'] = self.data['network']['vlan_network_data'][ + net_type]['vlan'] # OAM have default routes. Only for cruiser. TBD if (net_type == 'oam'): routes = ["0.0.0.0/0"] else: routes = [] - vlan_network_data[net_type]['routes'] = routes - - # Update network data to self.data - self.data['network']['vlan_network_data'][ - net_type] = vlan_network_data[net_type] + self.data['network']['vlan_network_data'][net_type][ + 'routes'] = routes LOG.debug("Updated vlan network data:\n{}".format( - pprint.pformat(vlan_network_data))) + pprint.pformat(self.data['network']['vlan_network_data']))) def load_extracted_data_from_data_source(self, extracted_data): """ @@ -265,7 +302,8 @@ def load_extracted_data_from_data_source(self, extracted_data): with open(extracted_file, 'w') as f: f.write(yaml_file) f.close() - self._validate_extracted_data(extracted_data) + # TODO(pg710r): validation stopped temporarily + # self._validate_extracted_data(extracted_data) # Append region_data supplied from CLI to self.data self.data['region_name'] = self.region_name From 4ad83a61bd158f9619368171201dc71355a9b8e5 Mon Sep 17 00:00:00 2001 From: Purnendu Ghosh Date: Mon, 26 Nov 2018 13:17:48 +0530 Subject: [PATCH 04/17] Added support for determining host type --- spyglass/parser/generate_intermediary.py | 27 ++++++++++++++++++++++-- 1 file changed, 25 insertions(+), 2 deletions(-) diff --git a/spyglass/parser/generate_intermediary.py b/spyglass/parser/generate_intermediary.py index f6a8d36..8f31d2c 100644 --- a/spyglass/parser/generate_intermediary.py +++ b/spyglass/parser/generate_intermediary.py @@ -158,8 +158,31 @@ def _apply_rule_host_profile_interfaces(self, rule_data): pass def _apply_rule_hardware_profile(self, rule_data): - # TODO(pg710r)Nothing to do as of now - pass + """ Apply rules to define host type from hardware profile info. + + + Host profile will define host types as "controller, compute or + genesis". The rule_data has pre-defined information to define + compute or controller based on host_profile. For defining 'genesis' + the first controller host is defined as genesis.""" + is_genesis = False + hardware_profile = rule_data[self.region_name] + # Getting individual racks. The racks are sorted to ensure that the + # first controller of the first rack is assigned as 'genesis' node. + for rack in sorted(self.data['baremetal'].keys()): + # Getting individual hosts in each rack. Sorting of the hosts are + # done to determine the genesis node. + for host in sorted(self.data['baremetal'][rack].keys()): + host_info = self.data['baremetal'][rack][host] + if (host_info['host_profile'] == hardware_profile[ + 'profile_name']['ctrl']): + if not is_genesis: + host_info['type'] = 'genesis' + is_genesis = True + else: + host_info['type'] = 'controller' + else: + host_info['type'] = 'compute' def _apply_rule_ip_alloc_offset(self, rule_data): """ Apply offset rules to update baremetal host ip's and vlan network From ab2cbcb9f1b7a507808378bf8ada87f853e7d104 Mon Sep 17 00:00:00 2001 From: Purnendu Ghosh Date: Tue, 27 Nov 2018 19:28:42 +0530 Subject: [PATCH 05/17] Formatting, Logging Changes --- spyglass/data_extractor/base.py | 2 +- spyglass/data_extractor/formation.py | 438 +++++++++--------- .../data_extractor/tugboat/excel_parser.py | 36 +- spyglass/data_extractor/tugboat/tugboat.py | 281 +++++------ spyglass/parser/generate_intermediary.py | 43 +- spyglass/schemas/data_schema.json | 34 +- spyglass/spyglass.py | 5 +- 7 files changed, 382 insertions(+), 457 deletions(-) diff --git a/spyglass/data_extractor/base.py b/spyglass/data_extractor/base.py index 54dee4c..494a347 100644 --- a/spyglass/data_extractor/base.py +++ b/spyglass/data_extractor/base.py @@ -432,7 +432,7 @@ def apply_additional_data(self, extra_data): If there is repetition of data then additional data supplied shall take precedence. """ - LOG.info("Update site data with additional input") + LOG.info("Merging site data with additional configuration") tmp_site_data = utils.dict_merge(self.site_data, extra_data) self.site_data = tmp_site_data return self.site_data diff --git a/spyglass/data_extractor/formation.py b/spyglass/data_extractor/formation.py index 9beca40..8945efd 100644 --- a/spyglass/data_extractor/formation.py +++ b/spyglass/data_extractor/formation.py @@ -63,6 +63,8 @@ def __init__(self, region): self.device_name_id_mapping = {} LOG.info("Initiated data extractor plugin:{}".format(self.source_name)) + # Implement Abstract functions + def set_config_opts(self, conf): """ Sets the config params passed by CLI""" LOG.info("Plugin params passed:\n{}".format(pprint.pformat(conf))) @@ -94,182 +96,6 @@ def get_plugin_conf(self, kwargs): plugin_conf = {'url': url, 'user': user, 'password': password} return plugin_conf - def _validate_config_options(self, conf): - """Validate the CLI params passed - - The method checks for missing parameters and terminates - Spyglass execution if found so. - """ - - missing_params = [] - for key in conf.keys(): - if conf[key] is None: - missing_params.append(key) - if len(missing_params) != 0: - LOG.error("Missing Plugin Params{}:".format(missing_params)) - exit() - - # Implement helper classes - - def _generate_token(self): - """Generate token for Formation - Formation API does not provide separate resource to generate - token. This is a workaround to call directly Formation API - to get token instead of using Formation client. - """ - # Create formation client config object - self.client_config = swagger_client.Configuration() - self.client_config.host = self.formation_api_url - self.client_config.username = self.user - self.client_config.password = self.password - self.client_config.verify_ssl = False - # self.client_config.debug = True - - # Assumes token is never expired in the execution of this tool - if self.token: - return self.token - - url = self.formation_api_url + '/zones' - try: - token_response = requests.get( - url, - auth=(self.user, self.password), - verify=self.client_config.verify_ssl) - except requests.exceptions.ConnectionError: - raise ConnectionError('Incorrect URL: {}'.format(url)) - - if token_response.status_code == 200: - self.token = token_response.json().get('X-Subject-Token', None) - else: - raise TokenGenerationError( - 'Unable to generate token because {}'.format( - token_response.reason)) - - return self.token - - def _get_formation_client(self): - """Create formation client object - - Formation uses X-Auth-Token for authentication and should be in - format "user|token". - Generate the token and add it formation config object. - """ - token = self._generate_token() - self.client_config.api_key = {'X-Auth-Token': self.user + '|' + token} - self.formation_api_client = swagger_client.ApiClient( - self.client_config) - - def _update_site_and_zone(self, region): - """Get Zone name and Site name from region""" - - # TODO(nh863p): Since the test environments taking lot of time - # to retrieve data, this is a tweak to determine zone name and - # rack name - zone = region[:-1] - # TODO(pg710r): site name is hardcoded - site = zone[:-1] - - # zone = self._get_zone_by_region_name(region) - # site = self._get_site_by_zone_name(zone) - - # TODO(nh863p): Raise exception if zone is None??? - - self.region_zone_map[region] = {} - self.region_zone_map[region]['zone'] = zone - self.region_zone_map[region]['site'] = site - - def _get_zone_by_region_name(self, region_name): - zone_api = swagger_client.ZonesApi(self.formation_api_client) - zones = zone_api.zones_get() - - # Walk through each zone and get regions - # Return when region name matches - for zone in zones: - self.zone_name_id_mapping[zone.name] = zone.id - zone_regions = self.get_regions(zone.name) - if region_name in zone_regions: - return zone.name - - return None - - def _get_site_by_zone_name(self, zone_name): - site_api = swagger_client.SitesApi(self.formation_api_client) - sites = site_api.sites_get() - - # Walk through each site and get zones - # Return when site name matches - for site in sites: - self.site_name_id_mapping[site.name] = site.id - site_zones = self.get_zones(site.name) - if zone_name in site_zones: - return site.name - - return None - - def _get_site_id_by_name(self, site_name): - if site_name in self.site_name_id_mapping: - return self.site_name_id_mapping.get(site_name) - - site_api = swagger_client.SitesApi(self.formation_api_client) - sites = site_api.sites_get() - for site in sites: - self.site_name_id_mapping[site.name] = site.id - if site.name == site_name: - return site.id - - def _get_zone_id_by_name(self, zone_name): - if zone_name in self.zone_name_id_mapping: - return self.zone_name_id_mapping.get(zone_name) - - zone_api = swagger_client.ZonesApi(self.formation_api_client) - zones = zone_api.zones_get() - for zone in zones: - if zone.name == zone_name: - self.zone_name_id_mapping[zone.name] = zone.id - return zone.id - - def _get_region_id_by_name(self, region_name): - if region_name in self.region_name_id_mapping: - return self.region_name_id_mapping.get(region_name) - - for zone in self.zone_name_id_mapping: - self.get_regions(zone) - - return self.region_name_id_mapping.get(region_name, None) - - def _get_rack_id_by_name(self, rack_name): - if rack_name in self.rack_name_id_mapping: - return self.rack_name_id_mapping.get(rack_name) - - for zone in self.zone_name_id_mapping: - self.get_racks(zone) - - return self.rack_name_id_mapping.get(rack_name, None) - - def _get_device_id_by_name(self, device_name): - if device_name in self.device_name_id_mapping: - return self.device_name_id_mapping.get(device_name) - - self.get_hosts(self.zone) - - return self.device_name_id_mapping.get(device_name, None) - - def _get_racks(self, zone, rack_type='compute'): - zone_id = self._get_zone_id_by_name(zone) - rack_api = swagger_client.RacksApi(self.formation_api_client) - racks = rack_api.zones_zone_id_racks_get(zone_id) - - racks_list = [] - for rack in racks: - rack_name = rack.name - self.rack_name_id_mapping[rack_name] = rack.id - if rack.rack_type.name == rack_type: - racks_list.append(rack_name) - - return racks_list - - # Functions that will be used internally within this plugin - def get_zones(self, site=None): zone_api = swagger_client.ZonesApi(self.formation_api_client) @@ -299,14 +125,11 @@ def get_regions(self, zone): return regions_list - # Implement Abstract functions - def get_racks(self, region): zone = self.region_zone_map[region]['zone'] return self._get_racks(zone, rack_type='compute') def get_hosts(self, region, rack=None): - # TODO(nh863p): Update the code to get rack wise hosts zone = self.region_zone_map[region]['zone'] zone_id = self._get_zone_id_by_name(zone) device_api = swagger_client.DevicesApi(self.formation_api_client) @@ -338,17 +161,6 @@ def get_hosts(self, region, rack=None): 'rack_name': host.rack_name, 'host_profile': host.host_profile_name }) - """ - for host in itertools.chain(control_hosts, compute_hosts): - self.device_name_id_mapping[host.aic_standard_name] = host.id - hosts_list.append({ - 'name': host.aic_standard_name, - 'type': host.categories[0], - 'rack_name': host.rack_name, - 'host_profile': host.host_profile_name - }) - """ - return hosts_list def get_networks(self, region): @@ -381,7 +193,6 @@ def get_networks(self, region): tmp_vlan['gateway'] = vlan_.ipv4_gateway tmp_vlan['subnet_level'] = vlan_.vlan.subnet_level vlans_list.append(tmp_vlan) - return vlans_list def get_ips(self, region, host=None): @@ -406,8 +217,7 @@ def get_ips(self, region, host=None): LOG.debug("Received VLAN Network Information\n{}".format(vlans)) ip_[host] = {} for vlan_ in vlans: - # TODO(pg710r) We need to handle the case when incoming ipv4 - # list is empty + # The plugin currently supports IPv4 if len(vlan_.vlan.ipv4) is not 0: name = self._get_network_name_from_vlan_name( vlan_.vlan.name) @@ -416,43 +226,10 @@ def get_ips(self, region, host=None): vlan_.vlan.vlan_id, name, ipv4, vlan_.vlan.name)) # TODD(pg710r) This code needs to extended to support ipv4 # and ipv6 - # ip_[host][name] = {'ipv4': ipv4} ip_[host][name] = ipv4 return ip_ - def _get_network_name_from_vlan_name(self, vlan_name): - """ network names are ksn, oam, oob, overlay, storage, pxe - - The following mapping rules apply: - vlan_name contains "ksn" the network name is "calico" - vlan_name contains "storage" the network name is "storage" - vlan_name contains "server" the network name is "oam" - vlan_name contains "ovs" the network name is "overlay" - vlan_name contains "ILO" the network name is "oob" - TODO(pg710r): need to find out for pxe - """ - network_names = ['ksn', 'storage', 'server', 'ovs', 'ILO', 'pxe'] - for name in network_names: - # Make a pattern that would ignore case. - # if name is 'ksn' pattern name is '(?i)(ksn)' - name_pattern = "(?i)({})".format(name) - if re.search(name_pattern, vlan_name): - if name is 'ksn': - return 'calico' - if name is 'storage': - return 'storage' - if name is 'server': - return 'oam' - if name is 'ovs': - return 'overlay' - if name is 'ILO': - return 'oob' - if name is 'pxe': - return 'pxe' - # if nothing matches - return ("") - def get_dns_servers(self, region): try: zone = self.region_zone_map[region]['zone'] @@ -473,9 +250,13 @@ def get_dns_servers(self, region): return dns_list def get_ntp_servers(self, region): + # These information are not available with the formation endpoint + # These will be supplied as site config parameters return [] def get_ldap_information(self, region): + # These information are not available with the formation endpoint + # These will be supplied as site config parameters return {} def get_location_information(self, region): @@ -511,3 +292,208 @@ def get_domain_name(self, region): return None return zone_.dns + + # Implement helper classes + # Functions that will be used internally within this plugin + + def _validate_config_options(self, conf): + """Validate Spyglass CLI params that are related to this plugin + + The method checks for missing parameters for this plugin and terminates + Spyglass execution if found so. + """ + + missing_params = [] + for key in conf.keys(): + if conf[key] is None: + missing_params.append(key) + if len(missing_params) != 0: + LOG.error("Missing Plugin Params{}:".format(missing_params)) + exit() + + def _generate_token(self): + """Generate token for a session with Formation endpoint + + + Formation API does not provide separate resource to generate + token. This is a workaround to call directly Formation API + to get token instead of using Formation client. + """ + # Create formation client config object + self.client_config = swagger_client.Configuration() + self.client_config.host = self.formation_api_url + self.client_config.username = self.user + self.client_config.password = self.password + self.client_config.verify_ssl = False + # self.client_config.debug = True + + # Assumes token is never expired in the execution of this tool + if self.token: + return self.token + + url = self.formation_api_url + '/zones' + try: + token_response = requests.get( + url, + auth=(self.user, self.password), + verify=self.client_config.verify_ssl) + except requests.exceptions.ConnectionError: + raise ConnectionError('Incorrect URL: {}'.format(url)) + + if token_response.status_code == 200: + self.token = token_response.json().get('X-Subject-Token', None) + else: + raise TokenGenerationError( + 'Unable to generate token because {}'.format( + token_response.reason)) + + return self.token + + def _get_formation_client(self): + """Create formation client object + + + Formation uses X-Auth-Token for authentication and should be in + format "user|token". + Generate the token and add it formation config object. + """ + token = self._generate_token() + self.client_config.api_key = {'X-Auth-Token': self.user + '|' + token} + self.formation_api_client = swagger_client.ApiClient( + self.client_config) + + def _update_site_and_zone(self, region): + """Get Zone name and Site name from region""" + + try: + zone = self._get_zone_by_region_name(region) + assert(zone is not None), "zone can't be None" + except AssertionError as e: + LOG.error("zone:None:{}".format(e)) + + site = self._get_site_by_zone_name(zone) + + self.region_zone_map[region] = {} + self.region_zone_map[region]['site'] = site + + def _get_zone_by_region_name(self, region_name): + zone_api = swagger_client.ZonesApi(self.formation_api_client) + zones = zone_api.zones_get() + + # Walk through each zone and get regions + # Return when region name matches + for zone in zones: + self.zone_name_id_mapping[zone.name] = zone.id + zone_regions = self.get_regions(zone.name) + if region_name in zone_regions: + return zone.name + + return None + + def _get_site_by_zone_name(self, zone_name): + site_api = swagger_client.SitesApi(self.formation_api_client) + sites = site_api.sites_get() + + # Walk through each site and get zones + # Return when site name matches + for site in sites: + self.site_name_id_mapping[site.name] = site.id + site_zones = self.get_zones(site.name) + if zone_name in site_zones: + return site.name + + return None + + def _get_site_id_by_name(self, site_name): + if site_name in self.site_name_id_mapping: + return self.site_name_id_mapping.get(site_name) + + site_api = swagger_client.SitesApi(self.formation_api_client) + sites = site_api.sites_get() + for site in sites: + self.site_name_id_mapping[site.name] = site.id + if site.name == site_name: + return site.id + + def _get_zone_id_by_name(self, zone_name): + if zone_name in self.zone_name_id_mapping: + return self.zone_name_id_mapping.get(zone_name) + + zone_api = swagger_client.ZonesApi(self.formation_api_client) + zones = zone_api.zones_get() + for zone in zones: + if zone.name == zone_name: + self.zone_name_id_mapping[zone.name] = zone.id + return zone.id + + def _get_region_id_by_name(self, region_name): + if region_name in self.region_name_id_mapping: + return self.region_name_id_mapping.get(region_name) + + for zone in self.zone_name_id_mapping: + self.get_regions(zone) + + return self.region_name_id_mapping.get(region_name, None) + + def _get_rack_id_by_name(self, rack_name): + if rack_name in self.rack_name_id_mapping: + return self.rack_name_id_mapping.get(rack_name) + + for zone in self.zone_name_id_mapping: + self.get_racks(zone) + + return self.rack_name_id_mapping.get(rack_name, None) + + def _get_device_id_by_name(self, device_name): + if device_name in self.device_name_id_mapping: + return self.device_name_id_mapping.get(device_name) + + self.get_hosts(self.zone) + + return self.device_name_id_mapping.get(device_name, None) + + def _get_racks(self, zone, rack_type='compute'): + zone_id = self._get_zone_id_by_name(zone) + rack_api = swagger_client.RacksApi(self.formation_api_client) + racks = rack_api.zones_zone_id_racks_get(zone_id) + + racks_list = [] + for rack in racks: + rack_name = rack.name + self.rack_name_id_mapping[rack_name] = rack.id + if rack.rack_type.name == rack_type: + racks_list.append(rack_name) + + return racks_list + + def _get_network_name_from_vlan_name(self, vlan_name): + """ network names are ksn, oam, oob, overlay, storage, pxe + + The following mapping rules apply: + vlan_name contains "ksn" the network name is "calico" + vlan_name contains "storage" the network name is "storage" + vlan_name contains "server" the network name is "oam" + vlan_name contains "ovs" the network name is "overlay" + vlan_name contains "ILO" the network name is "oob" + vlan_name contains "pxe" the network name is "pxe" + """ + network_names = ['ksn', 'storage', 'server', 'ovs', 'ILO', 'pxe'] + for name in network_names: + # Make a pattern that would ignore case. + # if name is 'ksn' pattern name is '(?i)(ksn)' + name_pattern = "(?i)({})".format(name) + if re.search(name_pattern, vlan_name): + if name is 'ksn': + return 'calico' + if name is 'storage': + return 'storage' + if name is 'server': + return 'oam' + if name is 'ovs': + return 'overlay' + if name is 'ILO': + return 'oob' + if name is 'pxe': + return 'pxe' + # if nothing matches + return ("") diff --git a/spyglass/data_extractor/tugboat/excel_parser.py b/spyglass/data_extractor/tugboat/excel_parser.py index 264241d..7f74c2f 100644 --- a/spyglass/data_extractor/tugboat/excel_parser.py +++ b/spyglass/data_extractor/tugboat/excel_parser.py @@ -12,11 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -import json -import jsonschema import logging import pprint -import pkg_resources import re import sys import yaml @@ -60,8 +57,6 @@ def validate_sheet(self, spec, sheet): ipmi_header = self.excel_specs['specs'][spec]['ipmi_address_header'] ipmi_column = self.excel_specs['specs'][spec]['ipmi_address_col'] header_value = ws.cell(row=header_row, column=ipmi_column).value - import pdb - pdb.set_trace() return bool(self.compare(ipmi_header, header_value)) def find_correct_spec(self): @@ -126,9 +121,10 @@ def get_ipmi_data(self): 'type': type, } row += 1 - LOG.debug("ipmi data extracted from excel:\n%s", - [pprint.pformat(ipmi_data), - pprint.pformat(hosts)]) + LOG.debug("ipmi data extracted from excel:\n{}".format( + pprint.pformat(ipmi_data))) + LOG.debug("host data extracted from excel:\n{}".format( + pprint.pformat(hosts))) return [ipmi_data, hosts] def get_private_vlan_data(self, ws): @@ -333,26 +329,8 @@ def get_location_data(self): column=column).value, } - def validate_data(self, data): - LOG.info('Validating data read from sheet') - schema_dir = pkg_resources.resource_filename('tugboat', 'schemas/') - schema_file = schema_dir + "data_schema.json" - json_data = json.loads(json.dumps(data)) - with open(schema_file, 'r') as f: - json_schema = json.load(f) - try: - with open('data2.json', 'w') as outfile: - json.dump(data, outfile, sort_keys=True, indent=4) - jsonschema.validate(json_data, json_schema) - except jsonschema.exceptions.ValidationError as e: - LOG.error( - "Validation Failed:\n{}\n Please check excel spec(row,col)". - format(e.message)) - sys.exit(1) - LOG.info("Data validation\ - OK!") - def validate_sheet_names_with_spec(self): + """ Checks is sheet name in spec file matches with excel file""" spec = list(self.excel_specs['specs'].keys())[0] spec_item = self.excel_specs['specs'][spec] sheet_name_list = [] @@ -383,7 +361,7 @@ def validate_sheet_names_with_spec(self): LOG.critical(rerror) sys.exit("Tugboat exited!!") - LOG.info("Sheet name in excel spec validated with") + LOG.info("Sheet names in excel spec validated") def get_data(self): """ Create a dict with combined data """ @@ -403,10 +381,10 @@ def get_data(self): LOG.debug( "Location data extracted from\ excel:\n%s", pprint.pformat(data)) - # TODO(pg710r) self.validate_data(data) return data def combine_excel_design_specs(self, filenames): + """ Combines multiple excel file to a single design spec""" design_spec = Workbook() for exel_file in filenames: loaded_workbook = load_workbook(exel_file, data_only=True) diff --git a/spyglass/data_extractor/tugboat/tugboat.py b/spyglass/data_extractor/tugboat/tugboat.py index b09bc5e..58fcb2e 100644 --- a/spyglass/data_extractor/tugboat/tugboat.py +++ b/spyglass/data_extractor/tugboat/tugboat.py @@ -15,9 +15,7 @@ import itertools import logging import pprint -import pkg_resources import re -import yaml from spyglass.data_extractor.base import BaseDataSourcePlugin from spyglass.data_extractor.tugboat.excel_parser import ExcelParser @@ -62,19 +60,6 @@ def set_config_opts(self, conf): self._extract_raw_data_from_excel() return - def _get_excel_obj(self): - """ Creation of an ExcelParser object to store site information. - - The information is obtained based on a excel spec yaml file. - This spec contains row, column and sheet information of - the excel file from where site specific data can be extracted. - """ - self.excel_obj = ExcelParser(self.excel_path, self.excel_spec) - - def _extract_raw_data_from_excel(self): - """ Extracts raw information from excel file based on excel spec""" - self.parsed_xl_data = self.excel_obj.get_data() - def get_plugin_conf(self, kwargs): """ Validates the plugin param from CLI and return if correct @@ -90,10 +75,8 @@ def get_plugin_conf(self, kwargs): assert (kwargs['excel_spec'] ) is not None, "Excel Spec file not specified" excel_spec_info = kwargs['excel_spec'] - except AssertionError: - LOG.error( - "Insufficient plugin parameter for Tugboat! Spyglass exited!") - raise + except AssertionError as e: + LOG.error("{}:Spyglass exited!".format(e)) exit() plugin_conf = { 'excel_path': excel_file_info, @@ -135,64 +118,8 @@ def get_hosts(self, region, rack=None): }) return host_list - def _get_rack_data(self): - """ Format rack name """ - LOG.info("Getting rack data") - racks = {} - hostnames = self.parsed_xl_data['ipmi_data'][1] - for host in hostnames: - rack = self._get_rack(host) - racks[rack] = rack.replace('r', 'rack') - return racks - - def _get_rack(self, host): - """ - Get rack id from the rack string extracted - from xl - """ - rack_pattern = r'\w.*(r\d+)\w.*' - rack = re.findall(rack_pattern, host)[0] - if not self.region: - self.region = host.split(rack)[0] - return rack - - def _get_rackwise_hosts(self): - """ Mapping hosts with rack ids """ - rackwise_hosts = {} - hostnames = self.parsed_xl_data['ipmi_data'][1] - racks = self._get_rack_data() - for rack in racks: - if rack not in rackwise_hosts: - rackwise_hosts[racks[rack]] = [] - for host in hostnames: - if rack in host: - rackwise_hosts[racks[rack]].append(host) - LOG.debug("rackwise hosts:\n%s", pprint.pformat(rackwise_hosts)) - return rackwise_hosts - - def _categorize_hosts(self): - """ - Categorize host as genesis, controller and compute based on - the hostname string extracted from xl - """ - """ loop through IPMI data and determine hosttype """ - is_genesis = False - sitetype = self.sitetype - ipmi_data = self.parsed_xl_data['ipmi_data'][0] - ctrl_profile_type = self.rules_data['hardware_profile'][sitetype][ - 'profile_name']['ctrl'] - for host in sorted(ipmi_data.keys()): - if (ipmi_data[host]['host_profile'] == ctrl_profile_type): - if not is_genesis: - self.host_type[host] = 'genesis' - is_genesis = True - else: - self.host_type[host] = 'controller' - else: - self.host_type[host] = 'compute' - def get_networks(self, region): - + """ Extracts vlan network info from raw network data from excel""" vlan_list = [] # Network data extracted from xl is formatted to have a predictable # data type. For e.g VlAN 45 extracted from xl is formatted as 45 @@ -205,8 +132,6 @@ def get_networks(self, region): tmp_vlan = {} # Ingress is special network that has no vlan, only a subnet string # So treatment for ingress is different - LOG.info(net_type) - LOG.info(net_val) if net_type is not 'ingress': # standardize the network name as net_type may ne different. # For e.g insteas of pxe it may be PXE or instead of calico @@ -259,32 +184,23 @@ def get_ips(self, region, host=None): return ip_ def get_ldap_information(self, region): - """ Extract ldap information from excel and pass it""" + """ Extract ldap information from excel""" ldap_raw_data = self.parsed_xl_data['site_info']['ldap'] ldap_info = {} # raw url is 'url: ldap://example.com' so we are converting to # 'ldap://example.com' - ldap_info['url'] = ldap_raw_data['url'].split(' ')[1] - ldap_info['common_name'] = ldap_raw_data['common_name'] - ldap_info['domain'] = ldap_raw_data['url'].split('.')[1] - ldap_info['subdomain'] = ldap_raw_data['subdomain'] + url = ldap_raw_data.get('url', '') + try: + ldap_info['url'] = url.split(' ')[1] + ldap_info['domain'] = url.split('.')[1] + except IndexError as e: + LOG.error("url.split:{}".format(e)) + ldap_info['common_name'] = ldap_raw_data.get('common_name', '') + ldap_info['subdomain'] = ldap_raw_data.get('subdomain', '') return ldap_info - def _get_formatted_server_list(self, server_list): - """ Format dns and ntp server list as comma separated string """ - - # dns/ntp server info from excel is of the format - # 'xxx.xxx.xxx.xxx, (aaa.bbb.ccc.com)' - # The function returns a list of comma separated dns ip addresses - servers = [] - for data in server_list: - if '(' not in data: - servers.append(data) - formatted_server_list = ','.join(servers) - return formatted_server_list - def get_ntp_servers(self, region): """ Returns a comma separated list of ntp ip addresses""" @@ -303,47 +219,6 @@ def get_domain_name(self, region): return self.parsed_xl_data['site_info']['domain'] - def _get_private_network_data(self, raw_data): - """ - Get private network data from information extracted - by ExcelParser(i.e raw data) - """ - network_data = {} - # Private Network Types are : pxe, storage, calico, overlay - private_network_types = { - 'pxe': 'pxe', - 'storage': 'storage', - 'calico': 'calico', - 'overlay': 'overlay' - } - for net_type in private_network_types: - for key in raw_data['private']: - if net_type.lower() in key.lower(): - network_data[private_network_types[net_type]] = raw_data[ - 'private'][key] - LOG.debug("Private Network Data:\n%s", pprint.pformat(network_data)) - return network_data - - def _get_public_network_data(self, raw_data): - """ - Get public network data from information extracted - by ExcelParser(i.e raw data) - """ - network_data = raw_data['public'] - LOG.debug("Public Network Data:\n%s", pprint.pformat(network_data)) - return network_data - - def _get_dns_ntp_ldap_data(self, raw_data): - """ - Get dns, ntp and ldap data from information extracted - by ExcelParser(i.e raw data) - """ - network_data = raw_data['dns_ntp_ldap'] - network_data['dns'] = " ".join(network_data['dns']) - network_data['ntp'] = " ".join(network_data['ntp']) - LOG.debug("DNS, NTP, LDAP data:\n%s", pprint.pformat(network_data)) - return network_data - def get_location_information(self, region): """ Prepare location data from information extracted @@ -367,31 +242,23 @@ def get_location_information(self, region): 'corridor': 'c{}'.format(corridor_number), } - def collect_design_rules(self, site_config): - """ The function applies global and site specific design rules to - a common design rule + def get_racks(self, region): + # This function is not required since the excel plugin + # already provide rack information. + pass + + def _get_excel_obj(self): + """ Creation of an ExcelParser object to store site information. + + The information is obtained based on a excel spec yaml file. + This spec contains row, column and sheet information of + the excel file from where site specific data can be extracted. """ - """ Load and save global tugboat design rules.yaml """ - global_config_dir = pkg_resources.resource_filename( - 'tugboat', 'config/') - global_config_file = global_config_dir + 'global_config.yaml' - global_config_data = self.read_file(global_config_file) - global_config_yaml = yaml.safe_load(global_config_data) - """ Load site specific design rules """ - site_config_data = self.read_file(site_config) - site_config_yaml = yaml.safe_load(site_config_data) - """ combine global and site design rules """ - rules_data = {} - rules_data.update(global_config_yaml) - rules_data.update(site_config_yaml) - - self.rules_data = rules_data - - self.HOST_TYPES = self.rules_data['host_types'] - self.PRIVATE_NETWORK_TYPES = self.rules_data['private_network_types'] - self.IPS_TO_LEAVE = self.rules_data['ips_to_leave'] - self.OOB_IPS_TO_LEAVE = self.rules_data['oob_ips_to_leave'] - self.sitetype = self.rules_data['sitetype'] + self.excel_obj = ExcelParser(self.excel_path, self.excel_spec) + + def _extract_raw_data_from_excel(self): + """ Extracts raw information from excel file based on excel spec""" + self.parsed_xl_data = self.excel_obj.get_data() def _get_network_name_from_vlan_name(self, vlan_name): """ network names are ksn, oam, oob, overlay, storage, pxe @@ -433,6 +300,92 @@ def _get_network_name_from_vlan_name(self, vlan_name): "Unable to recognize VLAN name extracted from Plugin data source") return ("") - def get_racks(self, region): - # TODO(pg710r) - pass + def _get_private_network_data(self, raw_data): + """ + Get private network data from information extracted + by ExcelParser(i.e raw data) + """ + network_data = {} + # Private Network Types are : pxe, storage, calico, overlay + private_network_types = { + 'pxe': 'pxe', + 'storage': 'storage', + 'calico': 'calico', + 'overlay': 'overlay' + } + for net_type in private_network_types: + for key in raw_data['private']: + if net_type.lower() in key.lower(): + network_data[private_network_types[net_type]] = raw_data[ + 'private'][key] + LOG.debug("Private Network Data:\n%s", pprint.pformat(network_data)) + return network_data + + def _get_formatted_server_list(self, server_list): + """ Format dns and ntp server list as comma separated string """ + + # dns/ntp server info from excel is of the format + # 'xxx.xxx.xxx.xxx, (aaa.bbb.ccc.com)' + # The function returns a list of comma separated dns ip addresses + servers = [] + for data in server_list: + if '(' not in data: + servers.append(data) + formatted_server_list = ','.join(servers) + return formatted_server_list + + def _categorize_hosts(self): + """ + Categorize host as genesis, controller and compute based on + the hostname string extracted from xl + """ + """ loop through IPMI data and determine hosttype """ + is_genesis = False + sitetype = self.sitetype + ipmi_data = self.parsed_xl_data['ipmi_data'][0] + ctrl_profile_type = self.rules_data['hardware_profile'][sitetype][ + 'profile_name']['ctrl'] + for host in sorted(ipmi_data.keys()): + if (ipmi_data[host]['host_profile'] == ctrl_profile_type): + if not is_genesis: + self.host_type[host] = 'genesis' + is_genesis = True + else: + self.host_type[host] = 'controller' + else: + self.host_type[host] = 'compute' + + def _get_rack(self, host): + """ + Get rack id from the rack string extracted + from xl + """ + rack_pattern = r'\w.*(r\d+)\w.*' + rack = re.findall(rack_pattern, host)[0] + if not self.region: + self.region = host.split(rack)[0] + return rack + + def _get_rackwise_hosts(self): + """ Mapping hosts with rack ids """ + rackwise_hosts = {} + hostnames = self.parsed_xl_data['ipmi_data'][1] + racks = self._get_rack_data() + for rack in racks: + if rack not in rackwise_hosts: + rackwise_hosts[racks[rack]] = [] + for host in hostnames: + if rack in host: + rackwise_hosts[racks[rack]].append(host) + LOG.debug("rackwise hosts:\n%s", pprint.pformat(rackwise_hosts)) + return rackwise_hosts + + def _get_rack_data(self): + """ Format rack name """ + LOG.info("Getting rack data") + racks = {} + hostnames = self.parsed_xl_data['ipmi_data'][1] + for host in hostnames: + rack = self._get_rack(host) + racks[rack] = rack.replace('r', 'rack') + return racks diff --git a/spyglass/parser/generate_intermediary.py b/spyglass/parser/generate_intermediary.py index 8f31d2c..d82b89b 100644 --- a/spyglass/parser/generate_intermediary.py +++ b/spyglass/parser/generate_intermediary.py @@ -71,26 +71,25 @@ def _get_network_subnets(self): return network_subnets def _get_genesis_node_details(self): - # Returns the genesis node details - LOG.info("Getting Genesis Node Details") + # Get genesis host node details from the hosts based on host type for racks in self.data['baremetal'].keys(): rack_hosts = self.data['baremetal'][racks] for host in rack_hosts: if rack_hosts[host]['type'] == 'genesis': self.genesis_node = rack_hosts[host] self.genesis_node['name'] = host - LOG.debug("Genesis Node Details:{}".format( + LOG.debug("Genesis Node Details:\n{}".format( pprint.pformat(self.genesis_node))) - def _validate_extracted_data(self, data): - """ Validates the extracted data from input source. + def _validate_intermediary_data(self, data): + """ Validates the intermediary data before generating manifests. It checks wether the data types and data format are as expected. The method validates this with regex pattern defined for each data type. """ - LOG.info('Validating data read from extracted data') + LOG.info('Validating Intermediary data') temp_data = {} # Peforming a deep copy temp_data = copy.deepcopy(data) @@ -154,7 +153,10 @@ def _apply_design_rules(self): LOG.info("Applying rule:{}".format(rule_name)) def _apply_rule_host_profile_interfaces(self, rule_data): - # TODO(pg710r)Nothing to do as of now + # TODO(pg710r)Nothing to do as of now since host profile + # information is already present in plugin data. + # This function shall be defined if plugin data source + # doesn't provide host profile information. pass def _apply_rule_hardware_profile(self, rule_data): @@ -166,7 +168,7 @@ def _apply_rule_hardware_profile(self, rule_data): compute or controller based on host_profile. For defining 'genesis' the first controller host is defined as genesis.""" is_genesis = False - hardware_profile = rule_data[self.region_name] + hardware_profile = rule_data[self.data['site_info']['sitetype']] # Getting individual racks. The racks are sorted to ensure that the # first controller of the first rack is assigned as 'genesis' node. for rack in sorted(self.data['baremetal'].keys()): @@ -206,7 +208,7 @@ def _update_baremetal_host_ip_data(self, rule_data): default_ip_offset = rule_data['default'] host_idx = 0 - LOG.info("Looping through baremetal hosts") + LOG.info("Update baremetal host ip's") for racks in self.data['baremetal'].keys(): rack_hosts = self.data['baremetal'][racks] for host in rack_hosts: @@ -216,7 +218,7 @@ def _update_baremetal_host_ip_data(self, rule_data): host_networks[net] = str(ips[host_idx + default_ip_offset]) host_idx = host_idx + 1 - LOG.debug("Updated baremetal host:{}".format( + LOG.debug("Updated baremetal host:\n{}".format( pprint.pformat(self.data['baremetal']))) def _update_vlan_net_data(self, rule_data): @@ -239,7 +241,7 @@ def _update_vlan_net_data(self, rule_data): dhcp_ip_end_offset = rule_data['dhcp_ip_end'] # Set ingress vip and CIDR for bgp - LOG.info("Applying rule to network bgp data") + LOG.info("Apply network design rules:bgp") subnet = netaddr.IPNetwork( self.data['network']['vlan_network_data']['ingress']['subnet'][0]) ips = list(subnet) @@ -250,7 +252,7 @@ def _update_vlan_net_data(self, rule_data): LOG.debug("Updated network bgp data:\n{}".format( pprint.pformat(self.data['network']['bgp']))) - LOG.info("Applying rule to vlan network data") + LOG.info("Apply network design rules:vlan") # Apply rules to vlan networks for net_type in self.network_subnets: if net_type == 'oob': @@ -316,23 +318,22 @@ def load_extracted_data_from_data_source(self, extracted_data): extracted_data = yaml.safe_load(raw_data) """ - LOG.info("Load extracted data from data source") + LOG.info("Loading plugin data source") self.data = extracted_data - LOG.debug("Extracted data from plugin data source:\n{}".format( + LOG.debug("Extracted data from plugin:\n{}".format( pprint.pformat(extracted_data))) extracted_file = "extracted_file.yaml" yaml_file = yaml.dump(extracted_data, default_flow_style=False) with open(extracted_file, 'w') as f: f.write(yaml_file) f.close() - # TODO(pg710r): validation stopped temporarily - # self._validate_extracted_data(extracted_data) + # Append region_data supplied from CLI to self.data self.data['region_name'] = self.region_name def dump_intermediary_file(self, intermediary_dir): - """ Dumping intermediary yaml """ - LOG.info("Dumping intermediary yaml") + """ Writing intermediary yaml """ + LOG.info("Writing intermediary yaml") intermediary_file = "{}_intermediary.yaml".format( self.data['region_name']) # Check of if output dir = intermediary_dir exists @@ -340,7 +341,7 @@ def dump_intermediary_file(self, intermediary_dir): outfile = "{}/{}".format(intermediary_dir, intermediary_file) else: outfile = intermediary_file - LOG.info("Intermediary file dir:{}".format(outfile)) + LOG.info("Intermediary file:{}".format(outfile)) yaml_file = yaml.dump(self.data, default_flow_style=False) with open(outfile, 'w') as f: f.write(yaml_file) @@ -348,9 +349,9 @@ def dump_intermediary_file(self, intermediary_dir): def generate_intermediary_yaml(self): """ Generating intermediary yaml """ - LOG.info("Generating intermediary yaml") + LOG.info("Start: Generate Intermediary") self._apply_design_rules() self._get_genesis_node_details() + self._validate_intermediary_data(self.data) self.intermediary_yaml = self.data - # TODO(pg710r):self._modify_intermediary() return self.intermediary_yaml diff --git a/spyglass/schemas/data_schema.json b/spyglass/schemas/data_schema.json index 53182cc..7be761f 100644 --- a/spyglass/schemas/data_schema.json +++ b/spyglass/schemas/data_schema.json @@ -140,8 +140,11 @@ "properties": { "subnet": { "description": "Subnet address of the network", - "type": "string", - "pattern": "^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])/([0-9]|[1-2][0-9]|3[0-2])$" + "type": "array", + "items": { + "type": "string", + "pattern": "^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])/([0-9]|[1-2][0-9]|3[0-2])$" + } }, "vlan": { "description": "Vlan id of the network", @@ -166,13 +169,8 @@ "pattern":"^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])/([0-9]|[1-2][0-9]|3[0-2])$" } ] + } }, - "vlan": { - "description": "Vlan id of the network", - "type": "string", - "pattern": "^([0-9]|[0-9][0-9]|[0-9][0-9][0-9]|[0-3][0-9][0-9][0-9]|40[0-9][0-5])$" - } - }, "required": [ "subnet" ] @@ -182,8 +180,11 @@ "properties": { "subnet": { "description": "Subnet address of the network", + "type": "array", + "items": { "type": "string", "pattern": "^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])/([0-9]|[1-2][0-9]|3[0-2])$" + } }, "vlan": { "description": "Vlan id of the network", @@ -201,18 +202,20 @@ "properties": { "subnet": { "description": "Subnet address of the network", + "type": "array", + "items": { "type": "string", "pattern": "^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])/([0-9]|[1-2][0-9]|3[0-2])$" + } }, "vlan": { "description": "Vlan id of the network", "type": "string", - "pattern": "^([0-9]|[0-9][0-9]|[0-9][0-9][0-9]|[0-3][0-9][0-9][0-9]|40[0-9][0-5])$" + "pattern": "^([0-9]|[0-9][0-9]|[0-9][0-9][0-9]|[0-3][0-9][0-9][0-9]|40[0-9][0-5])?$" } }, "required": [ - "subnet", - "vlan" + "subnet" ] }, "pxe": { @@ -220,8 +223,11 @@ "properties": { "subnet": { "description": "Subnet address of the network", + "type": "array", + "items": { "type": "string", "pattern": "^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])/([0-9]|[1-2][0-9]|3[0-2])$" + } }, "vlan": { "description": "Vlan id of the network", @@ -239,8 +245,11 @@ "properties": { "subnet": { "description": "Subnet address of the network", + "type": "array", + "items": { "type": "string", "pattern": "^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])/([0-9]|[1-2][0-9]|3[0-2])$" + } }, "vlan": { "description": "Vlan id of the network", @@ -252,8 +261,7 @@ "subnet", "vlan" ] - } - + } }, "required" :[ "calico", diff --git a/spyglass/spyglass.py b/spyglass/spyglass.py index 036905e..e14fada 100644 --- a/spyglass/spyglass.py +++ b/spyglass/spyglass.py @@ -113,7 +113,7 @@ def main(*args, **kwargs): stream_handle.setFormatter(formatter) LOG.addHandler(stream_handle) LOG.info("Spyglass start") - LOG.debug("CLI Parameters passed:\n{}".format(kwargs)) + LOG.info("CLI Parameters passed:\n{}".format(kwargs)) # When intermediary file is specified, Spyglass will generate the # manifest without extracting any data from plugin data source @@ -140,14 +140,13 @@ def main(*args, **kwargs): raw_data = config.read() additional_config_data = yaml.safe_load(raw_data) - LOG.debug("Additional config data:\n{}".format( + LOG.debug("Additional site config data passed:\n{}".format( pprint.pformat(additional_config_data))) data_extractor.set_config_opts(plugin_conf) data_extractor.extract_data() LOG.info( "Apply additional configuration from:{}".format(additional_config)) data_extractor.apply_additional_data(additional_config_data) - LOG.debug(pprint.pformat(data_extractor.site_data)) """ Initialize ProcessDataSource object to process received data """ From b138e2df400277ad72853059534e77893fa8c3ad Mon Sep 17 00:00:00 2001 From: Purnendu Ghosh Date: Tue, 27 Nov 2018 19:55:11 +0530 Subject: [PATCH 06/17] Documentation Updates --- doc/source/getting_started.rst | 56 ++++++++++++++++++++++------------ 1 file changed, 36 insertions(+), 20 deletions(-) diff --git a/doc/source/getting_started.rst b/doc/source/getting_started.rst index 05ee0ef..985e9fb 100644 --- a/doc/source/getting_started.rst +++ b/doc/source/getting_started.rst @@ -68,13 +68,13 @@ Basic Usage Before using Spyglass you must: -1. Clone the Tugboat repository: +1. Clone the Spyglass repository: .. code-block:: console git clone https://github.com/att-comdev/tugboat/tree/spyglass -2. Install the required packages in spyglass/: +2. Install the required packages in spyglass: .. code-block:: console @@ -87,24 +87,40 @@ CLI Options Usage: spyglass [OPTIONS] Options: - -s, --site TEXT Specify the site for which manifests to be - generated - -t, --type TEXT Specify the plugin type formation or tugboat - -f, --formation_url TEXT Specify the formation url - -u, --formation_user TEXT Specify the formation user id - -p, --formation_password TEXT Specify the formation user password - -d, --additional_config PATH Site specific configuraton details - -g, --generate_intermediary Dump intermediary file from passed excel and - excel spec - -m, --generate_manifests Generate manifests from the generated - intermediary file - -l, --loglevel INTEGER Loglevel NOTSET:0 ,DEBUG:10, INFO:20, - WARNING:30, ERROR:40, CRITICAL:50 [default: - 20] - --help Show this message and exit. - - -1. Running Spyglass with Remote Data Source + -s, --site TEXT Specify the site for which manifests to be + generated + -t, --type TEXT Specify the plugin type formation or tugboat + -f, --formation_url TEXT Specify the formation url + -u, --formation_user TEXT Specify the formation user id + -p, --formation_password TEXT Specify the formation user password + -i, --intermediary PATH Intermediary file path generate manifests, + use -m also with this option + -d, --additional_config PATH Site specific configuraton details + -g, --generate_intermediary Dump intermediary file from passed excel and + excel spec + -idir, --intermediary_dir PATH The path where intermediary file needs to be + generated + -e, --edit_intermediary / -nedit, --no_edit_intermediary + Flag to let user edit intermediary + -m, --generate_manifests Generate manifests from the generated + intermediary file + -mdir, --manifest_dir PATH The path where manifest files needs to be + generated + -x, --excel PATH Path to engineering excel file, to be passed + with generate_intermediary + -e, --excel_spec PATH Path to excel spec, to be passed with + generate_intermediary + -l, --loglevel INTEGER Loglevel NOTSET:0 ,DEBUG:10, INFO:20, + WARNING:30, ERROR:40, CRITICAL:50 [default: + 20] + --help Show this message and exit. + + +1. Running Spyglass with Remote Data Source Plugin spyglass -mg --type formation -f -u -p -d -s +2. Running Spyglass with Excel Plugin + +spyglass -mg --type tugboat -x -e -d -s + From 61b3c0879595f76e86e630c969c94902995dc536 Mon Sep 17 00:00:00 2001 From: Purnendu Ghosh Date: Tue, 27 Nov 2018 20:04:05 +0530 Subject: [PATCH 07/17] Fixed bug to set node type in formation --- spyglass/data_extractor/formation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spyglass/data_extractor/formation.py b/spyglass/data_extractor/formation.py index 8945efd..8a20ab9 100644 --- a/spyglass/data_extractor/formation.py +++ b/spyglass/data_extractor/formation.py @@ -145,7 +145,7 @@ def get_hosts(self, region, rack=None): node_type = 'genesis' genesis_set = True else: - node_type = 'genesis' + node_type = 'controller' hosts_list.append({ 'name': host.aic_standard_name, 'type': node_type, From 06cc70939c4b3288ce689a01ccef179addee2e7d Mon Sep 17 00:00:00 2001 From: Purnendu Ghosh Date: Tue, 27 Nov 2018 21:39:08 +0530 Subject: [PATCH 08/17] Adding a dummy site --- spyglass/config/rules.yaml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/spyglass/config/rules.yaml b/spyglass/config/rules.yaml index 726464d..63c1af0 100644 --- a/spyglass/config/rules.yaml +++ b/spyglass/config/rules.yaml @@ -63,6 +63,14 @@ rule_host_profile_interfaces: rule_hardware_profile: name: hardware_profile hardware_profile: + dummy: + profile_name: + compute: cmp + ctrl: ctr + host_type: + compute: nc-cmp + ctrl: nc-ctr + hw_type: dell_r740_purley_nc nc: profile_name: compute: nsb From b35c4b0b4e842b4701ec135932c31e11527d6fbd Mon Sep 17 00:00:00 2001 From: Purnendu Ghosh Date: Tue, 27 Nov 2018 21:39:56 +0530 Subject: [PATCH 09/17] Adding tags for missing values while dictionary get --- spyglass/data_extractor/base.py | 4 +-- spyglass/data_extractor/tugboat/tugboat.py | 30 +++++++++++----------- 2 files changed, 17 insertions(+), 17 deletions(-) diff --git a/spyglass/data_extractor/base.py b/spyglass/data_extractor/base.py index 494a347..cee358e 100644 --- a/spyglass/data_extractor/base.py +++ b/spyglass/data_extractor/base.py @@ -399,8 +399,8 @@ def extract_network_information(self): for net in networks: tmp_net = {} if net['name'] in networks_to_scan: - tmp_net['subnet'] = net.get('subnet', '') - tmp_net['vlan'] = net.get('vlan', '') + tmp_net['subnet'] = net.get('subnet', '#CHANGE_ME') + tmp_net['vlan'] = net.get('vlan', '#CHANGE_ME') network_data['vlan_network_data'][net['name']] = tmp_net diff --git a/spyglass/data_extractor/tugboat/tugboat.py b/spyglass/data_extractor/tugboat/tugboat.py index 58fcb2e..58f044e 100644 --- a/spyglass/data_extractor/tugboat/tugboat.py +++ b/spyglass/data_extractor/tugboat/tugboat.py @@ -146,10 +146,10 @@ def get_networks(self, region): value = re.findall(vlan_pattern, net_val['vlan']) tmp_vlan['vlan'] = value[0] else: - tmp_vlan['vlan'] = "" + tmp_vlan['vlan'] = "#CHANGE_ME" - tmp_vlan['subnet'] = net_val.get('subnet', "") - tmp_vlan['gateway'] = net_val.get('gateway', "") + tmp_vlan['subnet'] = net_val.get('subnet', "#CHANGE_ME") + tmp_vlan['gateway'] = net_val.get('gateway', "#CHANGE_ME") else: tmp_vlan['name'] = 'ingress' tmp_vlan['subnet'] = net_val @@ -174,12 +174,12 @@ def get_ips(self, region, host=None): ip_ = {} ipmi_data = self.parsed_xl_data['ipmi_data'][0] ip_[host] = { - 'oob': ipmi_data[host].get('ipmi_address', ''), - 'oam': ipmi_data[host].get('oam', ''), - 'calico': ipmi_data[host].get('calico', ''), - 'overlay': ipmi_data[host].get('overlay', ''), - 'pxe': ipmi_data[host].get('pxe', ''), - 'storage': ipmi_data[host].get('storage', '') + 'oob': ipmi_data[host].get('ipmi_address', '#CHANGE_ME'), + 'oam': ipmi_data[host].get('oam', '#CHANGE_ME'), + 'calico': ipmi_data[host].get('calico', '#CHANGE_ME'), + 'overlay': ipmi_data[host].get('overlay', '#CHANGE_ME'), + 'pxe': ipmi_data[host].get('pxe', '#CHANGE_ME'), + 'storage': ipmi_data[host].get('storage', '#CHANGE_ME') } return ip_ @@ -190,14 +190,14 @@ def get_ldap_information(self, region): ldap_info = {} # raw url is 'url: ldap://example.com' so we are converting to # 'ldap://example.com' - url = ldap_raw_data.get('url', '') + url = ldap_raw_data.get('url', '#CHANGE_ME') try: ldap_info['url'] = url.split(' ')[1] ldap_info['domain'] = url.split('.')[1] except IndexError as e: LOG.error("url.split:{}".format(e)) - ldap_info['common_name'] = ldap_raw_data.get('common_name', '') - ldap_info['subdomain'] = ldap_raw_data.get('subdomain', '') + ldap_info['common_name'] = ldap_raw_data.get('common_name', '#CHANGE_ME') + ldap_info['subdomain'] = ldap_raw_data.get('subdomain', '#CHANGE_ME') return ldap_info @@ -229,9 +229,9 @@ def get_location_information(self, region): corridor_pattern = r'\d+' corridor_number = re.findall(corridor_pattern, location_data['corridor'])[0] - name = location_data.get('name', '') - state = location_data.get('state', '') - country = location_data.get('country', '') + name = location_data.get('name', '#CHANGE_ME') + state = location_data.get('state', '#CHANGE_ME') + country = location_data.get('country', '#CHANGE_ME') physical_location_id = location_data.get('physical_location', '') return { From a714eed237d73bb6b2531056eabd6e56e7c3076a Mon Sep 17 00:00:00 2001 From: Purnendu Ghosh Date: Tue, 27 Nov 2018 21:44:13 +0530 Subject: [PATCH 10/17] Added sample tugboat excel file and spec --- spyglass/sample/SiteDesignSpec_v0.1.xlsx | Bin 0 -> 17313 bytes spyglass/sample/excel_spec.yaml | 63 +++++++++++++++++++++++ spyglass/sample/site_config.yaml | 33 ++++++++++++ spyglass/spyglass.py | 2 +- 4 files changed, 97 insertions(+), 1 deletion(-) create mode 100644 spyglass/sample/SiteDesignSpec_v0.1.xlsx create mode 100644 spyglass/sample/excel_spec.yaml create mode 100644 spyglass/sample/site_config.yaml diff --git a/spyglass/sample/SiteDesignSpec_v0.1.xlsx b/spyglass/sample/SiteDesignSpec_v0.1.xlsx new file mode 100644 index 0000000000000000000000000000000000000000..79917573d7630982dd43c22a9a1f2b8df886c26a GIT binary patch literal 17313 zcmeIaWq2JsvMy|9W@ct)h?&_jGcz;B%*@P=neEsyL(GnunVG%Jc0TVtv%fu)nR}o6 z|D68OdeqYUO06Yzl}c4hQ3ezY4G01V3J3^@7>GDpoW~g$2q+pH2nZDj3PfAP-p<9; z&P89<)4|kPm%+ogeo5h&x<$(zy{+)ul0U#9pz@W-5(p z%!s^meiqSXlh-9HM-fB3sZ-4tmhRd9=;o9z9gN&FZLBRB4Y}#!2qD3sNmKhpETyZN z3DQK10WV^H1WFZy4#e(y5M(&W1VO?PDon5kr?_ictgVi%E^erS9fxT11|`z&v^L*z zkrYRY1wSGQ{;UY^omQ#k*Ups3!#OD*q=O?DO=X>2&$~|fxNDYtJ>OX$zn~A+r6_Tk z=Yq898h`ESi-vp^2XWzt516_M~#_jH9V+Eh~^2*(4R8y%w!Unq*MNfkrLC3&r5i2+$go)7&t6 ztO1@ERynT@t^3BAf08tr<-eF(dJHcZ&MkX3lYu_$Y-GM%<1=hS40yu%tYX1!S!a~% z#6#k#Z(`GPE1K1f`sPC~pFONh!iMt5GAS`co)7TXYGS({%kdoGgzm2#z8VRPA}rhZ z5L^GbNTkb44WB3-pSg?-ro8Tk@nY`0sfF{A1wc~ua3kC zs{tms;LDJ%py^J>#COF}9&hT9&X!e3rh1exIUD10LC=@WShwa!f8+%PW|)ewj$)+0< z*X4&hb&U->5&@f(A4}g5X!)v#p$P)+nOmTfwB0`hswpbqh^J2WRi+J0<&|~Zh~=Ag zU;I4s1niKi08a_4HY2%nBMn@f@5ysLE!%|Py_4Fn6WISYbNIQ}tMflTt!5vp5aP#_ z_?S8WI%ld>_#=QW(%W$hNa5b3wV3z9AjpWgu1g)KpYER~87P@+9e7<*&6@{6mchsow$-voW zz@_4ldsi86{`-}^xv3ECsOx=;(p;aE=9=UNA5DD*)ZRlwSSqq@o^Giy9Rj`WU8)fH z^;sYnpw=5K67Y*Mid&p;T$K22q<6={e(o2PepdHr>(mXyQPH9B zlsJ14ha>?Pdac!SIv?NLPaJ*F|9&j?wSgjvfh}=TDI72ZhE)n(!L}oQJIal=hDPst zEosCL_N@oLOAzv|_kwjBBa}U&0RP@_TcokjMpQm`#?Ogc_UDTu3orJ-TjMRAz-gYz zb(4S21VsNb;LPgk{FgjUh5H=8o2y~jOr1-#{dGh~2mt+-tId{@NnceYjs3iXV)MU# z2D|#3^q3$&G<;SdAozbwW@i^q8&l^$K7VuC+HqN2Xny*Zzd(0#wo!(AY!dCvOQ+RN z)KYk8&qeg1;-f$JAamKySp9k-Sk=h}1CjcY5sN10aJGB77RM;NVN;~|j6p`-cdfc$ z&!!1ug!n?GC%5mPdk{%4L+xn13Gav}yRRERYH!y4i$iLJ0Zu>=5;)1lVHbG8s2=3KDFUO}EA)pWxiF^yr?^4X^-%8!AwFZ|AAo#(+ z49bTr6X%U+CYSRD$J_i<^QT4tRspg(3Fp!9inV4d;}*{nTgg!3KwAq9HZtyTr~;tn z=rk8P5RAc}u4*cqd$Qp`#y}_q=vZ%r7lHE*0=%6*60@Wt(1beNm7SoZ+2F+0k z+i3CHiCs5+g1JyqDeF)&MwPaTsK71?Y z$sLZcs4WpQ9~JB`w4RNmqP}J#gr^*c8N6+d)A36e!?PCza)Tjc0j5Wt&pD-4Ly1HsCSoYg$C`FKB-VvY21 z=OvwO??QF1B6v=@LG%K7w&`5W^c8~;WZA3BdJl|ijuPln-eLMSFlpA)=+GzRcriD` z`o`z{Y@lWy26P3%wK?#ssT8!t@IhVFQj*xy zy97U_5t1x$amyLgBNMlx**273{XE0kMU#EowKef`&b6A1Mk6h$phu%O;;h*ZPKaL* z>ayScUoCQAH-*r{F+rX*l%V~~|`h?B0oRayFE##><7)ND%@5lZhbVu4g-%$|dn@Xx#fz8A%8 zcaerIfTzk&5Ndj~d$IkKPqdR1tVnC+QIAXfpM(PI2w5b#mz`F1uYRdxCvP#>D5dmx zeY+H^@$hmyX)4sR@dhf8JVfvniy&T_Ah2aB1YKFA@GBsx!Ekmp^%TNpGB~HtCS@-> zIAQ6ws~BVdArH0;Y9Nb91|!B>4(1aYJP!3kZ^0KC?fEa>&|O5 zC+=-Ru5qaF0W!oDZf6Q;F-i9gZv1>s+Cxe%frp;v=rJK-DESFV(?OaDe|Ng&Qs0JLv^a6Pmlw%!1~ZkGuaVUso1xWcB)LBsUD*kNOKj^f8}Sf1hmL%mV&b%8nU_AF%Le09Y=} z`H6P6;6;MO=w-Oq!v<^a_3?UZ+ItTZYG)Zvf}w*q zUTZt(rD|CHf+Y()W!Lqn~gs+EdT#jV?41}9+Vg<^pc_rG^4#v8`W5LS+7Y7 zQLjk=sPBw{8V;!Oxz0c*QIHIav zUm?(8{*do^3z31RRZ&-C8dR-=rW}uT$$biqXrYeITM5Uq{XPr{(6}hs`mV5(o(EKSu5E@ygcHwm;>7^CPVLW$@@L z>!RUdGGW8*?aJHfOIluPjY`CxkED~Ine{`?=Zi^1!FLD${e2l%6Im{zZJp}%BBUQy zJk>L#yyWx2m}hX^eAhS0P>)&y3YCQ>BSQQxqhH@`IP!86&(5r%E1kmO-tUr2Y=&mX zB6317lpFYv^Zi}NBP&wfq{G5_$@j-f9;x@gf{5~&c~WeoC@ylBRMt+K>rupUkH1X7 zCN-s2cddmBnp%#~zLQZ9*;Sx-=G?B|*})_dTp(hPf682^UY=c9f2@34bN`C?F#5^g z{7g&p7cYtDYtlX)`Weva9#0_V=2Fs917BF!VZrGzy64`jn7}joCsv~JInqQjDY5%e zw%0{)x|8lmjCb%+%YMsW`nUc{){oxDcuOiyBR>dVpJB&x<3P|NdMIYtD*InzKT}sh zBPqN}O3Qg7kZjdDj}|)z$R%+CJSPx#THW4$klhvim;vp$9j3V^Ph!!f>{JcRd!MG7 z9G+43Aq5wRN$$y_E27F~#OyBt6OUxeGK`>rwRgjeMgnoM628(@u;5S~R>hTA_Du`l z+wR&^him~FucSd~E9m%K>)2a?(w?x@j4S1?(T-mAa2mvUd-=7tF1j4}2hJYjyfCPY z2)HMn1?RUijC2cYB7<4+Eb`DZ9wK+FhRZJ=jS#HOlt8lmltjHuz;)t9x@?rJ zOjvc^yg~7@LdL?l&JufP@DMGUq=z**l$oHz*aDEyrOx@4222(R$QeO{J$QQzDY;FY zq#|q(m{>LSU@9>YDmL&Cma=r;XR)-6Sy@Dlu`fa|$~6MZ2L~j+K?Jc;oYwtSlHm$z zP1h^B>)CER_iXnEiu3E>5>6I!vEvKc&2Bq0jttZM5~=sJLHwWzz8j9QwT*bF#hFkw z>)J4koNH;WPLQGlkxV@+5uQ-^!55{tiApmVB+THD~I@%qM1_j8EoaO?_Ti2R*+<7xJb&{u!N=e zWhTe2CS%~tdo2*e&6eNUh>G3Rz0RO%8+!1zSMi~pgy7mfla$-f!dZTjJg>o9{@Rk` zWN_|U&6n__gv;z$X56)4Rf!i;aq#%$=PYzkvirG<;w#`RIRLvI_AXQm@@e>9G0wxH zs#v518evhD&F>UZ1Cs~@GWTf@Q%k1 z{Qk@rW!9M7={)p)q~HDxb#ep2$qCgY40KjvGOlxKkr#X*>^l;D4SgM%SJDi1gevumyyB!Dv_uGb2!!!!v;C(^G|)ssxg(u;^yP*8x^fn` zZzA4o!I!)eKN_dz*d*<>mT-|4vcNiR;Tr%P=B;iPaf%w3N&$*`(ZgP-$8ri_5W#@1 zYX%jaWv@-Rb897-5C8$XifIT9pyjeVC`;FnhIx4?C)rg~@s4M-H1-P&sD9;F{?@A4;rsjfUbJP68vMl@az?3261tu-#A#y! zLm_O@3q;oYSJPleUu-&BA(k2E0tKG`sO#(`qM)#%oA0S$iVjronIQ?Aa*$c- z@Ru`@sfMpMk%PIpt4W%ou>*M)c1HL!*#{WpiC9xvfaI&`vZJD@0e_NAQaGq?qx0*$ zr!sAx5Q;nE5>6sf!At6Iheg6^AJZn&5)NwGyD@hOF^Av?WqWsC-C|V19vU7>A@8}` z?rsVml2T2eGBkcv>Pd{lj&A#;4SU{}+ObvRoVYe81b=th z)Eg-g*VrJcEtJCjAyHG(9(VVkr<0#j&&hDD{^Sp_*TD)J&rk&7V@s9~9%-9I=DYEe*CKB5f$$Vd`eC_0Q&!@^R#_}LCw9@cK%*}-fL`|++Edd8E7!CeRc5nVbsVlKg2u&vwG*OA-|Pt8%41G_UZ+6+v|nqdDG=q zyLcMhN&c0@ydh0my2}&eYND|&L%_?;idLz|=q)w54$gqxB0uq58tI43`egBY*a8hZ zjMQlYtc7SrH-3R{jL^3M8<6GK2xldKVDOSP!HiZk1V&biAZ39TH(0$$mXY9+>J^sR zQvZu>!T+im)tqpfDtwq~28sSSg8kF(=we}NYs&cNo%s(X@=S9g3YQbD8}nHZ&e{Ey zeK(GLV{6hTX`Rd@CmFA~8T#_bi&!0T=2^7BbE)JTk>6mbu za$%IMq0}aK1$-f^K0IQx0lzg%k@lWx|c*cY+qukewX46e|GdzI@xThzcm*X zn~#EIax}6jwA~Xd-K)%Mo}Ux4?FiJ+66lktO+hN56!$6(#t*2_gw`H2u7FL7=po5I z(maeIJp7=IbPU=O@A3|XZRP}F_-Lw4^@@7(g8JS5 z2I26KBjpQx;*}f2TxMQ^9q*Y3Dijnyizo88L(FfC7}IJLqWchwqYE4q|^bm;$HO@_?7VF6gHKNORU*;Uw=^TR(c0ufv{T=mn8TC-$?u zP*;6!ADgcsLKSzO*OxUNCY=y9NeAizksnQD89h2oy*9 zp({E9Ou503A&%t1n#=^;Um|U(SkdPXfWG6jMQ#d7Ag=ZRMy=;4e)KRJ7?EvxrOlt` zBx20v_kO=Rz76Z<_rE)O>ep{*Yo?L0gCti`xtNdB|Mm8UxuM_vesyxqc|1pX-8azv z`ZSi-{p*?sP`9&&#@OY1b90=8x#U$7Q0w|M|nA>PV5#!EJ9Qz+u*o-bBTB`c7C0B zpy8AN+mT{P)XP15QE0$2k(j#(_LaTERi#R+AcMjoR(I%6P*yyUn7g zC2=Py1YI6?WMhhkA%u41NOSq0WD&Aw<~vBjXr7$uF1-)wog#CrkHF-(>%LhAW9#w8y|K9<&oS>GdSX^&sGiwu<>)A=3vfsqp5ksjiWoY^SH5G zmc|=A<)5X&G)*DfK~7;Bj6d^JYJAElKFI{2waEmdH@tX^a$GHIXG`+ICJ%($&-lQ1 z*o81;JN;Dk%85mg+hPaVCme{HrCJ0(VhU{UmO71r(lk%=!np9zbr|!vZyYkv-{3#* z0>k#P!j?R z=3Z@5d#{0yW25oe{kT*)v5}_crG&|rYyD-QIV*Cz2N9ZN!f&3V&VKkg=2v&E703Dh zt=2(DDtE1^j=er%aq7qzW|_?mL)^vF0Is$Gs_=1_j$#L`%c4XN?t}+W(0BY2@5D-s zExN#3&7hG!Iqn%20`nVZlA<@LiF|g|1^DsuVO50?PGb-raW9Q;%Gc2A-!IwX9+tSy zT~fy?Dl;+_i8A8S`sTS@%1t6`=5H_7r8_UerxnEltK*1AcVYd%M795wVgE})hB#w}6$BV5%;Xx+% z4VQFu=9tvvkxiY;e}3l@K`qe+;bJ3haA*TK=QayUn64@<1$aob=Z6q?^Fex#+!9T~ z{x8^lN*PX3U7a4Po1c3(7Kv=@GQ!u*GqjM)PF?}!4j8ga|k_Qtt(mqc6pHgGp`Y@|P-%#9OOT?EL!RGT-ErQRj3h+bc zodoI9ymCfCxin1B()#bd`vIwAxg_YWoL)Cay?A)aI2#V^kj!G^={Q6}Yg(=+Gxf(S1o56^2CnY$cHvuqi2LlvGY`1&m)oBrtCHAzydDMMeKHt|kv zZyN2AHZR_<$IiW--xs_S|fRKuLA&`fbxyu-<}&Tb0wl124<@$rW+;z2K(d#Cn%d@2S6rhtu|UO|D>V z911Yhkzs45czkhh2}wFNE5N-txw_}>s#3O3ld--ebelw^9oxzmX8K;6E;YmxNR9J1U8LV!z)x9E!pF9HZIYD~U4WwV!W$t(N^RIl` zRSPgjNhj6knf?Cff@cS2{o{5}d2gXs-<6Nex}H5muqgdVeIPh&+Vi}QS*8HJX@@Nk zJ}WqyJi1jg+!YtrUz!lDzDUmj=d}#-`QNt zhV0is7QbL2@EzxFkzRajd+l+rFSxELP$ zvkRG50o(FtW8}g?64972G&IM#FZR3h77dku;=qg5nBK<^6+A_rlOWOG zLV@Lqc8AdADC%-N8<6Hrh(GGX@M#X9b~;kgD6Nos?@?xbQ%wpfqd=s2)(Y@MA#WD& z#y@xF7iHJPnBNl;>o)w9L{*m}r*%RJt0d=V_JE7PvK`nP&6sZ)Zd)?>R`WPAX?vA= z=m-lx)HamM{wV~s?PVQlgE{l{PR-Bf;iFAWRn!n2Ig#KYiyKy*C*lne)g)6TIW#9W zQ;}E+3*nabO#srh3L>(cfGSsfrRDH7mYiSgX(dx9t+E_OVJ2VM5vOSCU(e0`V*lhc zImi3wGm2Qlm7(P<_+?y`AyJF6pYdtTuCiw#Cve!9Dnon9WwBIgEJ<7iJVkWVi>+Ju z&M|fu<_>8M4%-&B-F}m@3?}`a&uIjG#sqEQTg7;OxGd+tZY=wC6Z6$q{HL33F60s)DZZYXQ*&&? zNs`t%JSblGP_>`-zT;S@QC zbpOn++&&!kAYD&}aoGG1l0V+`50XV2f!aU1q7gSdY0Z(h_!o*jYts5J zROVl(C{lXm&)wX6$?RZx|5T=1KxO9Nq=TssmcZpxi}EP zKf01+W}KYo5oZjll>gC9ORhl+VJ|LN$uZ^%6#tq)+kwaFYh4Mkl~|>Z5iq9xM6C%s8bFys?N96VFHMz}O`b4;~D>gc4p$84VQ0JMHn(3!!Wu<9nuK+I!eqgAo${V3Lz*S-)>n$$S{BDkBdLnL z{BfhW&&}AU;%fclp#lyMD^JGI6GBO1Gu0nI&@I`HRzg%yiEe3rU2BH#&p zlhKay^KI!Sh}sp_IXF8bv;QS5Y?ea_jbxlaIAQ_ckL;4zxt zWoD9JwZ_BuE)M(-%C%nr82jkOt3fTEkp64uS(i7$=YZoe-Xr&4A6Aq9JfEvyRjCd5 z@cy%h0|G+(hdssF!qCaoM8(C)($4%(4;UYHTl*Crw08l^U%jJWc`eE6p%rOH2THVu z=EkOGx*m!Qk>vc-`)RlES6#6^}Q|&kDqU*sHq@B0%tLP_#ddMJ^9Cj;}B|lvx zGqi@}c@d^7!{%lNA+-#HVua4-Z)My`7SdP}Gvay|CoW$~SLM-fuW2*Lw&*PF$F=Y^ zB-qMQrL0}8iW#Iu<-1v~FF-Op1nN|FM){yVXs#2r?WwhHIlC-&RGbM}F5;+*^#V|p z+>7PR1Dk1Jh^rlSzm39}M2A2$jHF_g#ZFvw!)un5V?fYb*U?hF2MD%#3eb9>@K#?m z;Qe^DdnCYa^uV_dT%uVA-q5dYAv3Z|T%U}qDF@&ceeJoWQ`+0C#HS9K$|vnxEoH*B z4IhTTMyDEeEUJD0BVV%{mM?kJZ0W72bSnlu!h71+NrWm{^X(Kx*&ShAF%?zy163H% zvK8ff{SqmT9J!Kt$Uk7e*CiqZD9g=Cq-5%2iYiSl_|~CT(GfCkA{##?Q*&K5x06pS zvL7^KYj#?uIYaz>!toi<(JocfR3W{X{tSKniNNJ9*Z_qR64&61Hn~(@*7& z_0A<|6vICIjNJNQ5??~(;zZKGWIs`)C<&7jCx1~hh6Z%RJ=kn)5dQwWiKqg*BeJ_? zt{ZIlH|Qq`oRcUCz2vql)XyExs9$&{rK(l904KF7=8K&4xqS}9hRrzDTeFX@4_p_f z*zp9Zz{iGvB}fA-ir~fd;))i_$l2n+BHHuFIrYSMaDx?swyeOQS1uj^JmrU4Gq@81 zDEZ~1C9of{$Z+qsxN|0B-TQa%=UL&;jF*D}@F(Z&^N-^x!_W(ptTBD>u8+Zdp@oc+5GB;-6It0Pk51wN8zl&A4)3Iyvw<|?XRRm-u+)#3~eMc`Wul-vRx z8#4wR;9Pazw3-grEHC^f$S;*Oa`zt#sn3FDx+M{HD9;0y=crs|YduxPYTIn0_W)qA2Q z92#pd#nRm+-B#E!UpHM1VoS@EODZMOHmYb@U5JINC)_W)g*Owis>V6~y=cSm$0Ow` zaBSZd1gAlULFzWaQDsE9ykswB@ye+e5L9eIC`2)PIvCh55Bj6QS8gUMR&$l*BBvaA z$3vT52}dVFS$6F98rbh1;oCQ;uU&qA*cHzyXYN0M_sh4Etqoe{pUi5>CF>t@M;DT* zbEV#Jtj*3=Y<;u?S_So58S!KJRj~|h)69Yv*BNOOTbBN+uG-#`2z|5IuVEGrqqe)uOC5CZ{y zxG4PL{p;`{E?i8VR7_o5{*W8L=b0IdEbY$Jy&cf!u?2x6*?2%xfKnrD>%igA6Oo() zs6p3;HY5%<&YavCdxdp5yeH*amfOu65fi7%3z0-5Nbs+PNTmY=21ekla+s=%En#D- z70c2wj>AoPKodhhQDea`lx{A|84<`fxt6{iZtyPOKI^@_bZxpjEfxXFpggBonxF=~ zw`GP%RF9&6IE)hGmS0T-WdxuV6T|wxj%2bq-mF?!=XU$w(rZ$|zR_pNO7(G}SLeis zQ1m#HR24!6rGhIMr(gKD z7-D2zER`-$_PEcaQ=$__8$X^kb`StKo>>8nSmQ_U5>P(blL~6uDKSWdsM6(oY^v$_ z5N>+~V09uwxH&wW32c$<$-F5Daj!k$dF|TaS#cGQL}}Z+bd-FtF)7= zXyt(`sEkzd0QHC(efXe)s?n9@NL~D5izE)(9`b4-Y46EgJT=RR^ciKQDH3`LvOf=Z ztc`}U3dZ!+1Q@uEd4%aw3!D@@wsPmO_^_4Ln`Bh%7Nu5Xn2&rqr!)8gfJTf%Fr}S% z`&tJ9I_5C7k_6*PRX}&-8t*$wQAd4fAp4jc)6if@D<=t%+l5W_8R(N2dl@VH< zEBayqyR1(5V`mdw4v7&vVSR-MdBj9|v5*+>YYhnH@zPd`9F`cikim|;8r!VAald_* zB30}c?HcB6Rk8xTq2Vkf4X_NwUT&kYPvvxmJ6{r|!O{DhxS%V(MpChe3-k3(Q5l4l zC)I1~35orfLmr}Ipuu*=nWnQTCR049lsuQ5VD3{=9feSVveafM2qVS6d(HsT)(hp*PO3z!Mv+F4&i+QGi?0>Z*2@mt!4a^)P!=uAmPjGR zS6BMuoE_f|;mqT+hXnjolo70%p|qMhrj2rH=sAo9+Y-u?UCj zomZhkiZJ1VV00+o$ZSIJPNiHS5yk^Wg6nN&+#)>&zP{qwKCYmnCf4Q9XwXFR0vg>K zWn$Y8QyWN6%n3}~XF6UD9;6f}+3@LN7)j=HHllt>vrZXpwIEYEIQRzHzytJyz=Qx4 zBv52Rd+Jmja+oOqn2i=ye)=5T#D(H9G{hh-y3QCf{&T^s7G$P&^v=0?k&RAtJ#c6$ zC3E%!mI){{h{>n=nMAvrFWlhrNJq?HK(I@C9RkW!)|o)Ul@QlyVpuX|@v>2c9jI+m z5@BlTP_>UdC;4>${#8k)=~@1_qNT=~8y18h7gJ6=MgQ zUC~HpK}^pqwrnDf$`y09L@!1Sz?!g=o%tC4T#8xHLwULH6CsdCs3>w{rI5NoPM z7UvD@XF;b|W|Sq4t?RO_DftVsV3iI;7L_#PeZWWX7%q}P5j9Jyqu68N?Ds&4Fd;a~ z5tfC$9$XuhG$~p*nfC^HQ8#vZ&fc}KMYB}Lrdurz-;@wscY%$Cj6@?;(4yf*2NF}C z;;#LJd&C|Ckfj{9+pcH}?c66B>;W$JKds*;ZN{jF?bNOod04}G1f9t8xWF_N}-aQ{+P8;)iae}rh zHB(*3<6L+(QQiOhNld2kqTYv|-2+_jZGILJQk~m%GQVjg5P3gY=6%}N?yNSEyX;+9 z`HO!#P4Gs_SiR*_>Iu&yYp-Y`h$$Z z{lrU>p7+W0>Y@F{>RZ-w+_@*8VCRc@3}xi1vrL!QSxcM50>(jO^{v1A&GS!7v4fkZ z#!|C9>-52}xv3ir&#`WKwaqIxbhf7!{AGu1zPnpNKljc1wYTm~omu8aZ_UH^8%U4( zn=$sr7qbbgryXy^jp=&EQeEuza2CLld^c_1vYNYB<-m9KYC0oZso4oIDDf186x8obD z_gzkIJNt)vCwVVFOYZhncb`uB?Z+!J{GQ>CZBVXxmk+z|&VROVPEw}1z3;CiB(^v+@0Ksq*JvVw&JI z{i*hR)LK`}a>y$<9?uc<2_gUv1RIP6$^>bcC|-!@_Xk_Q!g{*@YXAl{EF)F?M~U#q zP88{1Ra!#_hyT@RKNPHgUfGGd)~iftp_h;^h~f_sYc|QqCMgMaE_L;vW-hdn;*6Cv zQFy*mEZ1&2c6|Rih;?@1)6v0qD6)>ntA7_i{X;RL(v%toierHRkH)e4SE~ml3>m7` zuJIS?Y@i=8es9vY&a&|%z!)~jbtv>oS~3wWAwRbgR|9zE#&d|}Z-oX+9Wm^Rd{2nt zbyD`5gGq(T3iOIc6r*ELbKe)6V%@g-!``Tw>(nk)ZhqqIPy;Z{od=c_ueU9FX}Rrb zN>>AXM`Pu`t2*UY<7&$JAh&Q*MMMNBnfn0ni4t`@ysCxYWj zIg)duKr)7JVCEhYAU1uYKaj8ihDtH%)!++rKIe0@2b|DtZ7FkMMH8*HimvhAe9_OU z9=T)OVeKF0-QkNSyrqsl+;9G=NqLe(s_`o35^#?@PT@8ZjLNDoZH`HBEpWbDP3*Dp z%T(!HE2-chN$w#7<|I}<@6DjWQjY7>Th&t|v(H#9Fkl|g;G=RwI&LN1#n=7*aRT-a ztr!@D{=;ta-~S-suV3x2^uPRFfTGO56a0JB@?XF|(yEW8@h>sVzXSh0lJj4I(-42^ z=l_Se&fj@{kL&xJ3HIZie~a?_9sGM7)!*Qyk08Pi@ZV8Yze9ib6aO2^`r+pD&vyTB zUgN(L{_aBkH=*T+o$KET|8}VUo#l6TqrX{#@c(4_-TCNug5S?e|0Vz>|F7Nn^Yrw0 zmfx*(f3wUo{K@hkHoM;${@rNsHy#Kmlm!UrKTHU}!++oK{Y?PP{$~gNU$%e0lm7cx l|8GhlAWi;%>C-=DfT9fe$4vesKo1_s=3_Ase)=Q#{{f}~j0yk% literal 0 HcmV?d00001 diff --git a/spyglass/sample/excel_spec.yaml b/spyglass/sample/excel_spec.yaml new file mode 100644 index 0000000..62831a0 --- /dev/null +++ b/spyglass/sample/excel_spec.yaml @@ -0,0 +1,63 @@ +# Copyright 2018 The Openstack-Helm Authors. +# Copyright (c) 2018 AT&T Intellectual Property. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Important: Please modify the dictionary with appropriate +# design spec file. +--- +specs: + # Design Spec file name: SiteDesignSpec_v0.1.xlsx + xl_spec: + ipmi_sheet_name: 'Site-Information' + start_row: 4 + end_row: 15 + hostname_col: 2 + ipmi_address_col: 3 + host_profile_col: 5 + ipmi_gateway_col: 4 + private_ip_sheet: 'Site-Information' + net_type_col: 1 + vlan_col: 2 + vlan_start_row: 19 + vlan_end_row: 30 + net_start_row: 33 + net_end_row: 40 + net_col: 2 + net_vlan_col: 1 + public_ip_sheet: 'Site-Information' + oam_vlan_col: 1 + oam_ip_row: 43 + oam_ip_col: 2 + oob_net_row: 48 + oob_net_start_col: 2 + oob_net_end_col: 5 + ingress_ip_row: 45 + dns_ntp_ldap_sheet: 'Site-Information' + login_domain_row: 52 + ldap_col: 2 + global_group: 53 + ldap_search_url_row: 54 + ntp_row: 55 + ntp_col: 2 + dns_row: 56 + dns_col: 2 + domain_row: 51 + domain_col: 2 + location_sheet: 'Site-Information' + column: 2 + corridor_row: 59 + site_name_row: 58 + state_name_row: 60 + country_name_row: 61 + clli_name_row: 62 diff --git a/spyglass/sample/site_config.yaml b/spyglass/sample/site_config.yaml new file mode 100644 index 0000000..f51c1f0 --- /dev/null +++ b/spyglass/sample/site_config.yaml @@ -0,0 +1,33 @@ +################################## +# Site Specific Tugboat Settings # +################################## +--- +site_info: + ldap: + common_name: Test_Users + url: ldap://example.com + subdomain: testitservices + ntp: + servers: 10.10.10.10,20.20.20.20,30.30.30.30 + sitetype: dummy + domain: SAMPLE.example.com + dns: + servers: 10.10.10.10,20.20.20.20,30.30.30.30 +network: + vlan_network_data: + ingress: + subnet: + - 132.68.226.72/29 + bgp : + peers: + - '172.29.0.2' + - '172.29.0.3' + asnumber: 64671 + peer_asnumber: 64688 +storage: + ceph: + controller: + osd_count: 6 +... + + diff --git a/spyglass/spyglass.py b/spyglass/spyglass.py index 657bff8..2bc19d1 100644 --- a/spyglass/spyglass.py +++ b/spyglass/spyglass.py @@ -69,7 +69,7 @@ def generate_manifest_files(intermediary, manifest_dir=None): @click.option( '--edit_intermediary/--no_edit_intermediary', '-e/-nedit', - default=True, + default=False, help='Flag to let user edit intermediary') @click.option( '--generate_manifests', From 9e3b7a4f1270100efe19b1b177335ab47f894c0b Mon Sep 17 00:00:00 2001 From: Purnendu Ghosh Date: Wed, 28 Nov 2018 00:11:10 +0530 Subject: [PATCH 11/17] Suppress VLAN for ingress and oob --- spyglass/data_extractor/base.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/spyglass/data_extractor/base.py b/spyglass/data_extractor/base.py index cee358e..d211431 100644 --- a/spyglass/data_extractor/base.py +++ b/spyglass/data_extractor/base.py @@ -400,7 +400,8 @@ def extract_network_information(self): tmp_net = {} if net['name'] in networks_to_scan: tmp_net['subnet'] = net.get('subnet', '#CHANGE_ME') - tmp_net['vlan'] = net.get('vlan', '#CHANGE_ME') + if ((net['name'] != 'ingress') and (net['name'] != 'oob')): + tmp_net['vlan'] = net.get('vlan', '#CHANGE_ME') network_data['vlan_network_data'][net['name']] = tmp_net From 42521058895eef908e85d2fdd33912a2fd65fd16 Mon Sep 17 00:00:00 2001 From: Purnendu Ghosh Date: Wed, 28 Nov 2018 00:13:18 +0530 Subject: [PATCH 12/17] modified host profile extraction rule from excel file --- spyglass/data_extractor/tugboat/excel_parser.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/spyglass/data_extractor/tugboat/excel_parser.py b/spyglass/data_extractor/tugboat/excel_parser.py index 7f74c2f..d19ea68 100644 --- a/spyglass/data_extractor/tugboat/excel_parser.py +++ b/spyglass/data_extractor/tugboat/excel_parser.py @@ -104,16 +104,15 @@ def get_ipmi_data(self): previous_server_gateway = ipmi_gateway else: ipmi_gateway = previous_server_gateway - tmp_host_profile = ws.cell(row=row, column=host_profile_col).value + host_profile = ws.cell(row=row, column=host_profile_col).value try: - if tmp_host_profile is None: + if host_profile is None: raise RuntimeError("No value read from {} ".format( self.file_name) + "sheet:{} row:{}, col:{}".format( self.spec, row, host_profile_col)) except RuntimeError as rerror: LOG.critical(rerror) sys.exit("Tugboat exited!!") - host_profile = tmp_host_profile.split('-')[1] ipmi_data[hostname] = { 'ipmi_address': ipmi_address, 'ipmi_gateway': ipmi_gateway, From d639abdb894da73681be0dea0ea788648c671fe0 Mon Sep 17 00:00:00 2001 From: Purnendu Ghosh Date: Wed, 28 Nov 2018 00:13:52 +0530 Subject: [PATCH 13/17] Changes to vlan id's and host profile identifiers --- spyglass/sample/SiteDesignSpec_v0.1.xlsx | Bin 17313 -> 17291 bytes 1 file changed, 0 insertions(+), 0 deletions(-) diff --git a/spyglass/sample/SiteDesignSpec_v0.1.xlsx b/spyglass/sample/SiteDesignSpec_v0.1.xlsx index 79917573d7630982dd43c22a9a1f2b8df886c26a..cdf827808bc2f887f635c8985579449e8234fb03 100644 GIT binary patch delta 8830 zcmZ8{1yG$mxAmduK@U!G*WwPvrMSCWad$sx(P9Tmad#=jin|tfC|caz^@s1i_j~XB znP(=+%36CTlV>uMy^|9L6%__m^&SX@H6?NZzXJdWVF3U%007`=%k1IcY-8f!V8i5T zXIri|WWUG)YQfzV0)4zJ+(qYA&>IRglt6*8i{l%Vz)jWB4v|oyDw!7<2cO79>ipUx zlkh<|Tt7S>{?MNcublwTqG6ddeoC1QgEg0;v>38dDdf46sMW95CsRS16AL!l+uA%z zJ2OaImOLO50er^P;nDo+yL$ePvWBMVT_{Z>R|2nIYJ~#>^oJVQ07wA|Y-b8{ic+l@ zUdo)&?nhigoYXmt!~22zUU7^oS0-94lHTPO9fdM#OdhhQrw2* zcm^=|dJ0y$nF$Ij&Fg5-I0(N?#N9(kRP+cEL`L@~E0jZsiNPPEf>~^zdvXg4p!2>5 zH)^2r`kKEO-dX?L36*pkK=G{zh2oG9TQrL{?E7SE`Xhxw%I)e1#KlKn|0xdv1a&vUuI-nOnP8-gA+Oqh7V4}-| z<>eUZ6U|@lKf*<;nKlSk&aVH=V#JRzqqDWZ&oJYL_lM-6)~Z5%16gKRYOSZd8t3SC z8~5_t&5qB+L~HR#F51^kms(BsTBt9^0V@=G3&;5|O;^XJZ!=O`Kz z%7t}G9KsGw1>$BIim!P2;(J|qqBFvGZ<+8C_&&5hg=P|5DXKvTJv*ay3Hm&4tCocY zO>rJ7Oz|%y>K@9Uo@Y1tAz(IaAj34*jTAfpa0@v>ViJj=chmS2)&?SdUlu^&_$Pnn z`9`3=kp~Eq0GNuwl6UmI8-2Q+ms_#TQ@Y2er0Y6Un|0vOMl?Ztpf!-+0%z_(43OW0 zTVbH$jUst6elv%ZpQv$O83YZ;JDl=`|4pV+AW!ZvtyY( z_e=AD4ZHLvwIP>HZ}-F>D$77oad%rL?Z}qqn=(9_V{({c-DOt+tFl;33d2B7B1Q}{ z=1w|;1cL8sy+c&zL8j#~`^A--qcBqjU@nM#NoprBOMYFRBUXyu%M%k56p+p>{g`|s z$Ppv2z3%JVA&C9%(N80Hw`TnT{76?;6OlT?p7z1%gB&+pOCpMgp~)j-=G5vfdF`TY zf`4*b6It!9D+XE^Pdq|6+N=@dPXgGM{$PS!i6F~g1q3;c{b?wFIw&)*qClrvV6t@v zT<`QM+D!pB~j!DphLD#$j3T{6g>hdM%X?Nhg(#$-2sz`>SW`b>QHc6fFb3K~S z$Rz{pPE1!-v?K@N#J2?)6GmVH*b@~^<@(9%*B2giF`W*H;d{me%P1-qBG~Hhkl8)Qmbgm018Xa)&1H*-DvfCAM zJW)8xz)%1jZM5qP_`d+N0(%hOdyEEaijr#0jjIf}j!l5YG24a%bb9kv7 z1-z`u*GS`i0BXqxX)s?3b=p{qeFqIZT40hiG7vVCxnbWSI8oY$8qKjM<#l{YLnz#; zyd57Jm$;s_BgCXm zgIFHLGj7_4$eFZ^Iu;9*ECbVH?4zwlLj6NSxE_9+0>A*%5(4odFoy-&B(si8yJd-Y zKti)YSaXqlEkXh>!vZUDl}74ivf|B$hHqmIVzrTZ0aP1oS=**T(rf4%DrO>j*b8H! zWWWEiiAXJe&J!nvl*CJM;olBe!8EGkj%LB6?&2U=8>Wlz!P4yVyB0{W@9%xq;inqN z8zVj?U90H3ScMSESK{FNhF(qRBLt#~SvVvVX8vrnbyy4Q9* zFrskAR%v$_Rv@8Z(jai*L9T#vTxDFZ&ShJ_XISpAKa$i>cInEc>hsV<^m)FPc1XQ7%qH zHcN%m34`3HZ?foIX%zt6>;0s&t?&`(T@U`#ibNkCMqcDxJJ)4yoCy9@XErf(z

hw*(Te_#Ut%rrUi+z=xjL^l*1 zlqe!f6kgR!MS(Nc(aGjS>{A@l)4ZX7s}}aU`|IcTC*3)4;)H zdxz{SQnj2=*6o(O(;nRm(Nl^xd(-vhMsS&Uz9+;?z_4$tO z<9dc>{?}$h znNfIr7e&A_45V5V$c>S3aTBl5E%lKG@`;R*(tpThR|a(hC(ES!DSsVsN71?bhG3?vSKRT z;3Dk#+5L&f9t$<_CMF=NMmk~zCF3`^v$c;>{49CNws?)3HbhqYvL${sVKf#}!>V{W zz=3rrhu6VOuZbFm=+MRNuHiv{U2@2DM5fnP3j=nd6QarG)4zOI{bc90J<ZFfIyY%EP6AN>{jEB}>YWy;7QsRO;-ex-n{ zMRz3LtjBy!D9KJ$+jH%7;V)tb&QJb6Egze`R5lW;CX*O_-JkswvMIVvXFOadtJ*HI z9rpUS=HZ3=-p}R(8o!PaBHxYo=xH>D^k{%EK4y}Nj!@-%`*Aq@3=aul!-80efNGR6 zsiA-oZGX-W5CDL0kN_+)u${vq5Aw5r#}j<_YI2W*GywWsDBX#4r+$@5`{gxkVEV9uv61R5jes&5%dxpL=%54>0-rZeU_7;`#A z@dLFa;K$5V3lqb)<7u%llVfI_p*ymX*vfBJm3;$iv9Vr#7?~NUhqSeEk=P}GrvpEO z*{`)?T#`#<6`6`#VbLTqptCsKMo(2y{hhc9At55KBta~e6HR8h8p-brEi>pU(RB(d5gFElAaO!f&{6=5v#oS`S$*6-vVxa%_p`PvwKe^xSDB?F@ zeXyG|QRfizq4H8qZa`RThudc4N5|(6Bu~iMuyND-=+X)1_`WJ-w-wIrhpYnF>kj_UQCo^WP^IEV*jDLR$Bf2qaNx3c)9gYCD(nUU2)74> z&U*$%GneXqnHL%J4tiitq$f+aEvOYQqq^Hf*_291hIJ>}i*=%fnH61I{JShknYEG1 zTv|PYiauE=BwGEhC@kjGik4nC77>JTI3G3j)#--ae{lHPC5*ZS~}m zC6yKL@dhLH2yiQ8?JGgohYowr^4p;!H&1qOp!zT}^(@|tOa*(K-uHam=+BW>o(nEwD9ayA3 zS7Su<&C?&C=bl%uCjTfZ{?MtmV)gG>yc4}om2`iac22~k?EVNC1;I)lhk})q z@KZRn9g1aoyJs9X&9Hdxo?J16As!&gPG*yc7uO0fx6EHrWivQTaW(Hhqq@3;3%RuS z)40?NOf1R1g)tcGTNLkq8AUv?b z;e-#GZ01%*g~8RfR;U7dD3Qu&9lYA7l$E$ z)D|&^Tmj$O&zQiWB^KuLCY{Y!$WQq!Wl*~_0L_}AgxO!k|=#wMU#oo zY$mLAu48(Z<@k1BR-!CW6dl+{%Q66eBo`XXgcaKc4GAc?p=sDp)?*Sm%oMeR&|Y`H zFETInEjA@5a|TU{G$m22!+2y_l!cC%mP6dzYk7K!$&-@54`raz<}vWXo29^{N!5}C z^ji`s6%82?_Q;Wx5Kfgoi$C=E6jDmfMLR4M7qh9>w|WGo%ug6*$Jb@1Iy&bO2*kM_9`?eIN15h^;(Z^xGQTgvChbD2(F{_w`fYrz zK+Twf^Rs61Pl?DvuPl!e*KZU5mKs{uuq+&AxX>pHQm-%W+#Wn@9st%27Qo$6ANV8E}?shtW~zASm{mwG4G4z5AnH!mZP4!1Qvv;CJ`1 zdP{`WK!r%F^Fpnrl2$oE#xrDu1Nrr5Kj`W!F3r57ah#m06^Mzkr3_U*HzNuP!d~mX z-dQT*&}=h%t(`a>)~t_;R#FkAl|!@6)gT=Q7il@z!$S9a>d5~Pu670JM8lF1miZbK zRkNUsTlZ#!-%N+*9MN2Z>TeQl#+%2n!rk$%tp+FIbtYt63_z{khp_Q1RY$NW`Sp&I zf!aA+x*QMsJ;Hnu{=XE|(dZL~w2R;Gd#7B&Hse3LO)6Q=6J4u|3g=luSJN~S(t=s? zq^~MOpf!+$SK;y#Vg9)FqZjn@*iY4q=MHb+i(eK z({f8_cCTG=;!V%^Q`3~fIEqRnBEXbzEyO$-;t-QqQOpw*t1m31my?`?j#*pz&3vJS zX1iCC! zElTRh?2(dyw{q?Ij;)kHeciEcMN|d;*U=+J2lKw%+#OYj;q)@_jtV$;3VyC+Kr)D} zz-;h5F+K}+Lw;zA6?*q=#iEPqv09DWkr zHy)mg@^>(XD8zj-cWtLZDs~J}>*Vh4;^)VRALW%(G`sH;P+@}?OV^VGwCK--oXH5H z+D?lecPaHeMEA0|>~kM%3Bi_G&XJcn-+~l+QFYALl0@j!RZ$pq6>>7M3Hyu)n=G86 zHkijrXCCWSoh?Su0FJ`ysLWex87=M~s&b7Lt*#5_&DFGqPc?5O%#JAOO%S2nob`4`zLfbfxR+yW}-s__eFw z2sJzka&_GXtEE?DNEc^>>DJY`31z+)#h9D#n~NJ9ZeP`q{f?v3zu(d`IGE!Xf(kFv zedHj7_AnTGzjU0s;a+M!=&C|ew(aEMyQ1)Nak4&T&S%wpeyeqyX4;zZT)MdD;PLX< zf28a3ywWxecH<62?04t)+iRK{@mrv)H@kMz5K=8th%Vlug1ECx^?$(u%|jh zsGzjIXy1O|Z#=QLskH3tC&;yPzDV5nr26^u%v_%kxaB-d-_8Edc1fB@VdP$0>XlHNpnVZ=+Va3p ze=kQ-=Jr+@TypbpbSc7UH2EC+`RYW;*}p^@d~$%xBMr`^iM>+2YdU$hUU@F|7-J^l zF#!gHXHDC^9^DLoVIR8Q?rS$qtCyLhQLXK4R!toTW@tS3jQv_Ry~%w_EEu|ygz{5q zt=D(C&3X!3Ub6lE2jy?gs?x|T{UGka4R?ssI(L_aQFr}AK@k(I{)w-bxlKn=MLc!Q z7N2(ZejA*WPZ3J)kMVjTI%KPWW33lT2Q`m-zNlu}I+(j$6M!+V{ z8i~i?(IfnAX>}s=w}cJKgRjQ|o1J2a!5=U$8KT$Nkmqk!SaALXMYH=9c=HpENfhG) z!JN^q<5qx=W{$OJrr9jQ+Y!@YOO}Srq;lQ;3O|$_YBRconGmPqMY{tAn^|y}Hda_> zL*g5w;j@0az7HMK(#Lb*!*+}tS8fyHlV4FNQ9i%2^!$>5d3I;cAtnocu(|qekfjEg(lzNo&(BeFK2i&hNLFm(B;^25RY}8F_ z{EUDVq3T3lD5aN6(2SY38U!Bu>C1pK=hsncO^yW@gxApPJx`JvB(#_sjs6}5I#rS; zmLXM6hyP(v^dHo*AiS)_iI!e@n!;z3Chgb%l63-~C}pf$U5SWX1jISDab`9}B-)LI zA+%LV@;ZUrt8lvlTz}2S%^fb!p^JvlFBeN*{@x5i zZ9d`F)gMYDL@Nu)x=cCl-d8??PRL3JtBa`-c~d9J`YAZb76p#I!*5H%!l_B<2?u64 zHa(-Mh5E0kz6)ZNP$MbJ6RTYkkPrPYt}4{;gY#yT%>(rx5VOB(v40^A6*59c09JwU zNj3`{_7i$=v3zD9@DN&G+Pj_BZkSmJ1pR(lh*b-zQ^Dey z&Ct9VQMIzrVhuEU6+~X0E7&r9vGtE&IR=fu_hb)sVd1mHe2BAk_DVYc zC2LIZel{MXyS1|?nz(m09wQ3m%jE=b1&yrB)uM`XnN|Lv!K_%0BdVm(arkW8W|qr| zQQhq=Zj&D^z;w0j=snu(EDI4ZXPdwWF8pICoIxZ!ih=q5Pl5 z1O97$*OLD$?#J9Km++C0-LO0>nOum1e>W>>U~B+&4*kxnw~|%!4J!Y%xr2SM^B?@+ z4ZruII%j{IuK$-WZEa`hJj_4D{UzNSVg|+-xU%NnG_NtfH;S9Mzd_Ai3(np91!Dnn zu+WbM-&sWty?YML3(y<#1{g#=MjuuY$7jvi3jUCIwWBm9xoKe*b`bv9VtnC6@sF#O z*9oKlj~yY0dEb@re^lJIf(~!qwPFXx2)MFb;e33Db8I+!P_oO z|5zJc@Ev{g^`_!Y8*6-9^S*M#!1$L@H4JSXDqG1iaWiW%3yuie3Jk6yb#k`)9QIW4 zSq)&?3_W_v;`n=ca-Xu05u2oNWc%G{>iFKjtm^h(f24;wS2A5A3zP#%EcdefB8z)` z2uZ}y)l49|f4jjHK{74<8Da9|lKq9hy5KosQaxigHn4+aY=anlX+@PHZg>PwNsD_5 z8@l?jV3IszA@Dfl_QSLfi6U!}TE;a(1`t0kQZ4Ck8})f4<&;BCmXvIB50M;KdDoaH zPQJx@Rau^~Mb(6#ym_*!w3)|Wo$Kzs`TRM-_&O7CI!B-BcFc(mgDpnC&AQKTMBRoU z8Q&PwVD&fG^+K?(E=^rj{?kE)O}ZuQ-_FU#IB~KiL8Lz;?uW~L>r6UEpYg0ZspW6s zsYe)=84Elt=0vY zY((hu!^Tvqsq@#H<78&PaJWiBY%8iaUG!!n)ub#q1}k}gdJL*1=)0KQB$QCVnt3Gb2W`yrdgp=-&356!Y`%OO9HUo@v?F48Z;<=_4{iFr0NoFns&r zLb$zIAZhHX{rAPi@w~$I6=s_N@}JS|t93KDc8?M1cGYqp{nb=d1B~Mr@4&fU8&6Zl zl+JP?PWic)9Fp&CEdl;F-2X}U1b3NbI70&frjUImBJiU9A`i%qc^pi7IsLAVawM@9!` z(@~Oo$bfN|y0W}UzN;W*rUrxR2cU7jOl7a7TuSBATahsl_~a)ntjd&UQT<)gWs(_FG}3*1c5oq)}1_2@18O`Lcv zvr$Uh;7`6EQ~Ou0N<)GypHqLPqWT@sG$&`lYzT~$*l&I__N9DM4JmjS{}3?&yr#I! zV~8zhQZ%FgjUr13d6mK#=f{RTNVkA!?Rd^&S~Jrkf;_9zJs(RHnjOJms^`itY~MM3 z!@HS`Kk0hWE?AxW@af7r%6u#CsLe3f>tYkl4VWU!;|`?I+*Rf{u^N>;CUPmm-CRoi z7zpkSL}9r)ZQZI<*Vugm+r-DjilpW+qg7l>9*lqFOvvsYDj(B>XU(okQ( z-lk&)Yx14^X3k)$=jP9E_^QmAf<4)JInOs+wczu0zIF7X{J?|o-gLiP8@nn3={4#F z$~!Edb7nK*J@a8)Xb~deyAdmztDEGLJp+?nkkf>B1KU4)_hCea`-+2PV-RvL(M9w> zl26%#V+YEkYa;ArZE*&k;fS=3N6LIepu3FhSpk)7n{w0Ukyq#y|?_deJIjCOcS zybwL)pWI59X5^!aSO`_LDkK?h?Jk!%V|<#Cr4*B)_V(CF1@z(ai-rQSnZ(Mnt-$4E zSZw@3Q?y3A#eBK!Tp^jAI#*$KTtMSyXIVlW)Vj}E z3M4CS5yda(R#@|-s|I3U$C|KsJMSOl@~T#ux(-g+bntVo{J-Lo?YElL1bY7S<#>i4 z(lnAbWkuU(Tc{>lED-`(!Ftq{7OGL#OT;71C0?ZZh(k4mLX#nWO801cfd|e0Y>5)vc6I{;qO=FFe!%xwj zpPXK^PzEBG(o5@Hl;_Klijin*Ss`2C*Zlu~Z%Y6;00JbI4Hl*nZgQ{YI~y$&Shkx5 zD|8k1MQqR~tx%#O=ScW-8xJU*wk>qio{`H+immy{XUe@S2`EUtv&6ikm%w~o4Q%U} zZ&HUPSB`$C)Pi022WQu<6`37Nt3T05zARf9zRFVBz+h8Pbp_v(s{(Grx+;){PPUOr zX)C=DPuC#OjO!eAt5@EJ(&gC_T(8CcJXjw9;c4r`AjBMIU4j-9K%YRTyA7GN0n_N0@Q{$fkyJAdvcw+t zb)eGr?O|}@>f0d)K@bDg&~W}n0{-{)1>|0sgW$iZZgv0w<#jLqEx#brB7E=YF(HbW zRFF~;5aE9V`!oOm-oL!pmJt?&gbEvSAj14EkO-1Rg#AHD7U~@%0PAf-U&qM+|K5K8 ze=QW%s}>*PAqpb=?>USNc$NO&GRy=yVZwnlih>B&I(vyx1r!eH~lU~5QF0SS@S%3L@Y7)wML7;G3A7%zJ+PbU{UGbblI zPA>=h3SE%XGB-v$_5#4|wlJ1(%m~WpV_D+wfkTj;{CHfeX~Fm^zgpP^lDhy#h}&r67G<*GqQSLxM9UKrxr099`lpqd>$FVk(T>eE~C)T>J&pQyx3Wf_8^ zxor`ySQqqx)97P0E6rp&*0jCS{YmYpt1jQ2+0bzP)iCzG&kSiw_O%XcDq zX4gc9C)fHWPbz8q;Q$w6AajFaOIo};v9hKX#Hjc=eX~1YjP#d+?l^14p~ov{%}@D< zG@0|Hv|ScG7OfW!S< z$xH&FsT1?N6!KNq3E$?cP=Sa%iW}2S*}8mtIrZ!*e-{z89}mvFl42yiPj#2-+Sucb0fpR&_I5mb4u$ai?SWden5$je$$C7>@a2f=YewBC=&rrIUp^T0QBi- z>vScKOKr`*K=b|;Jxlkw?wk|90h$^11M_>(4v@VIk-#7Uq>y0~0^~B)cdTUQJj>F( z$ECN6g}f+w&qnZ=5>z*M3`&2Aq8dpR1?6iB@HojGas@8f^9Hh;;tRDHHH*7BmlOycg!VAkm+mq{qF$%lygE0-MkJo-=`n+}vGP z(Y-wpFTOn*NOUYs?;^km195v-LXxP*f*FNUg-(tr9HIQJ{0 z@#xP1a>~M^hiOMlFl{I_pgi^3Zoge+CCgF&7&!n$lasG65xzK-$Pj4VJ>%#|F;IJ5 z>#!UvG6jM}SSPr1wZUmKgCKBZXT4p5udAeCLhuxGt%bfR^Q1Nvd)OKFFPI=nARw5O z4%j5lnz4>A=Jb!Y04cc>L)W&S)v^+fqcH)^YRjcj>wCqGBoqahjX9{jswW1%Bz!3$ z>xmH(SMckzs`7@;OboaYI3`JUz6+Uo*qmKZoiZ^ys=iC0-uRVNm!0i=Do#ni6+Tr9 z8+j{@`?_ziFkT8%4PM%?)L$a9tp0nbI|VW(ORUdn?- z!m;MxDaOo31q0HnnK(LcS1hE-wF8NQH?2t9{usXkcfldAafXaxm(=Z>C7%RMRBKvT zegI=_U~jOy2kp-dEe2gh6;E-cmjRTy+g0mv&S2nTtYE?K(h&rp_k&HFN0BXihfXM(^9w1vk|Ejmr(8EbknOqXr(z%?6`Jq%uR;{28Sw{cc_(!LG+GabidJ25WN(Rf?+Rk^?`qV$D8~ z%__M%T!k8)MTj1Xz`YL?-DG#S{Gb_xr6y2W(zR<|wV%MA{EXJUM#Q@Mg`IE=Bu2C* z-#t3xZ;vu{+iNU^_)d_bl3Bx0o&w(OZSvxsh~7EW9+h!Ay?nkf#U4A8DR4O*MVna9 zu{a3KMz7e|@o6gaz=UXbMhrMI?@{Yn4m#Ykg-xk#%fXZ#Eoh!;Cr{y%uLDuzNuB zxLae0SKgm}bSU@PqrriEY7O5s3%?0N;#xkBxqgk&rdC_rY+H1Y;}D2(Rdsp|9&T6| zu8z8BhCWxO&@QYZ#xGU!-z-QN+PJ+H%<1A}ZhYtA-W-^y_yg%@<(v!?{ktY~tp&-7 zzNP9-(sqHbdT$@HWYEl8)tHV)E2Da6)gNd12;s`lx74ja*vSPhku~r875i%G5}~LM zsV-2ITEZKK9M{(c*_s0(F2?G%C8^_SPk|*9(~TL_cwK*JBvWvSI(Hw!--vzoJ1OL6 zh!7pTiZpG!zAJx?r1zd}JGN*1j%}Qg4`-z`>i2@gYw5rmDqba#MVDpcvp;W1k~TQ) zwURr%n@;7cyu6(c>hlflePHsG_OSfqBWM=JC>^--;g{wa{qtyns#mV=AHmWj+$P5y znREgry9c}-j%6bPed-9C7@ERqt72a`urL_BBV1VjAQHhqaQZ&DF6ZWb3X*AVm&1m= zbN~4mEkt&ikZm3+vx|##M%JDTUr5_=K^8xo71>AE>gJ8|MT6t>`->hj(w(UHosAtF z*!d*HGVAmviZdb9K&SMh$uCEl3*ATIF~eS!{*$ ziTIi(q+I#z!xFtU>mOfF;86T~23cg~>T;wA8hc;u2^HapZ!_SVDVClm@yOIR^{EdX z^XM8~m}qval|Oi%GBJ7)<8cq_z>1DdE-7{nvq0-HKy2qE!(ptmO9y&8T5HF4<7L6( zswzY|u=^xt=ZLyciT)vPLf3fwL*Bx{9EG$NOiJn@>wNqIno8gRzFJ8OQgGeuY^l|v>9T1re~yfEF)~sx?H^i9MKXfPq0ZwU#G9KEKV=2{w{x7@%)N?JNz23 zIx;l)BSs5;gzT^r9>E-Liv|*{FF+Qc;$dNXd5439;O$3w$$P@reAJ~gbcyuJ@;Af$ zkMoG^2OW{Go)L#_dTjoDxCFH9e*63)U(j(G>Z5wRM<2Vl~!-cM&K@g#9@EF$5W{;;n`m2JzDfqU_LDq8y2(K|yyKZ34^Ca{5 z;FPv69in7RQfCHx0dpo7QBM(qLL`5NX#^v>lLt{W4xF2<%$b3PwV>{xE}7z@-X7&vm|)a;n5etBVQUl$jQN@P zxQT0g#De10q|If>Tot#EFp(M!==Q3zn9|_~NhFa`i(PZeO}MSGaMOZ-{!a33&gATR zAv#%pBqHLfS_GY#2pxM&BpWsMZ_~u@%=vib%t=l{PfGOzOZ$5jn~;L|84qjz=}B`( zwqx&-+XAPxGpzt6yx3gCWwLX%r z6zf>?)Y&Auqo$<{ZQ^ppV-jWPQ`yDz} z@lA=vFL#R(M4s(NB$@`BZ~WAS9&f#mP}!iJK86Z(pTEq|$wy5}uxi6LwD|2p_FjT)5XdF-b7czu-gnWPo{DpVf%ZtzAk z&da)@P__saYhIV%{}5XrUyLwdMq?k885Y+VTEtK>k9fcn>=bsb@r#f&6Lu1xDppLo zav*o{0r22{&RjzJ@~IS#!U>T!C)A78_Aaly9;gP&F$k87Wpr$OZ}W&62+0b4Cjy*z-G zMwpSxA1SYh(`y!}f*fUR6=yb?%wxvW=Youm3FroK6bIp(@gO?BwA6zP+&;;$7c(E5 z(0sP~eQNU=-`HF(lKf_5DqIU%X zP;A4Nu2Fnwow#@PziU~`SjKKPnigb7eK8r0@Gh7L9?i_fHPFz+9yIkjYt+NWd+&;+ z=Yxajo)#SXp^e22P|>@7XaAbnSwTWxM|hX}o0fHAqMu;{F%GLkes(@f{T4#=@xY>X zULI-v2vUyYjQYe_NJbS$4ZSyHiX}o9V0lp29Dk3h7{U;OWWL%$*>DyG<1H!;JnR03 zNCN>W`1<=fr;gF0_d3S0ofcZiH6@|W#}L-*#?y|V3}aJPo^7Tpw0R}Cpt({Lz*DSV zQT9c_ronrvm4ye!)YPRPb3V#k2+{O~P2e9l_&z9y61z*TbRz_%O@2=}6TYj6h7)$J z>NxFoM{pd<#0#E*-WP_#2bmQU@yX(&HUqey`O~*|r{kbvB8xA7J0sjI_u#6DWEA7wqaHtD&)E zoD0@)AWJAX86bI=_vC44%-vTEiNq5b*Fe(QWXXQPL@z*_ zPKSZgVRn3;b64_CG=%YrrbvidPU@7UX|F)$%geWuZi9g`=~I`zfSpCTkGHus`__6` z!wD4=MI-0F((!5n6PZa*vOF|?Souy~(2;%fpoL`CfyKF5|CpvaXlv~euair9kBjCp zpH~uWNmR9wwOYyU1u!)(btU^CM^ROnEi4*8;?}rH;{Vtr@^hV5#}jqT9shYsKfa;k zfi4nOkvJyN@nzzPj#_x6AKw8*<7SV#9ekmvf?~epNG-jU+`0?XM?BRE&rjl@u?`(~zgmX~% z^Da{-7PesRDm4|$@2cqPCd1!ilq}W3AQiR8Pk=9%ewx)D(LRFhvrrYoC8%W>%dY9s z8|Jdhn*e84(LxW&f^S1kY*v1mmqt(gFei_bbpTe8su z{e2|wP;gP@67O^|aIz`&ucc5NMF29^z%WBjIw}mzpyYp+LWBU%-(#-Kcg9Xz!g#(P zHlBm>ZX4tcp3#1?dnBSXVWX2{wR(<|$}30La^w`LSw24rWF*ZJEb@qPIcu{tL|K$x zO->y*R+H7j=%1^^@oG zkz*g%=eh55?tsu|v;mKdHjmX`mV$QW^Y@yK)4RG%?$h?5Sf2~!ap4!p3L0LV!csokp(~Bc-%MG zVlgqFyEm=7OVwKQis%XfZR8oQckb9*&v_Tn1G_D%_rP6Gm8g)zQ9U(tG69Z+qV(fXVsuf(4VciuMUYaF^y~U z$&~kn!EvY8e(mc{5JiD7`3I_$*$&yE#UjS1_UaECv`>fP;If>NCcoR$-KU)Elbr^5 zMRt=3Z{SEsLyGF4F;F?II@=Q6xWV>D6V6{9OYE33wf+Lx|NCk7x3|Y%#otxTq3uHs zDv#59fMcnnUCW+PN0gkWP}^_XzbwDsd&v?r`P2C#3EH>jc>m&^xbFJ0*NEi1L~0;v zTrnk5c48M$k8K;EG6S2_*e{w>CT@K2c_3MT2@g24QLTWQVo6r;@Er!IK5BaGtRA<3 zb2?8D;GHyz)!b^&d4&2*7zqu2J6oxJBxcte%avfw(>uhwzyS`vg?Ur~4HN#=X?JRCG;)tV zuEGf~rC)#dM*>N6u<7W&cBi_G;@$Z^KRBZk)*|(f+tt`@Xq~fgk7tHF54k+W*Qr_S z_!I<2+Vw`tm`jn#xEaDoXXd30pyReD8o6VXjuYQ>;`?8TFQ z6j_U6IHQ&;uV#7pbUsNt67@6CC9@RPs;_*~LNh3g=kdfRsv@kV7Da1|22q||;NFei zJCA?yjX7?9KdoF$7y*IvOn}qYtTnX!Z(BZ`6q|+lXUU|GQX9ENtG9PJ$^lJ2Fc^l0 z>>>~oD@}E3FiZ|#0(ghpLfOlPf+GE8En3nQkdxTbk4%HTNIm?FRLX{AbRo!uPoc1& zf~^5U5Mxai7>YA%LGyw~6F2C^Z+B*9)mozS*=EXU(uIVSFk-Xb8-51kF*Hc}P#n8T z$O#y{n%$O`?=XE0VXjFA8A4cDFo3|WXs%qFaEGGtr>fsW;|^yjd(P;X11$sD0eIdD&f8Q$2V;oPcK7E0H+O)3j@E=#>wCDlff1&JVs3i(X5y(7~ zx~YI=B_F)*Wj2>Ao!t&8n3d5j4=K^ZX5~c_F}xNimk8^1f6mmafMb){Z3qjWqog(T zEBQqI_K+F!LXeC7FJYI!r6Cr2009h-G0{blu`P18 zx~XjWm;M*m-IE|M;!B8aQG#uQVYuyeu9-T`i{>b)(pgLXSrzkT+XkH31L?j@nW{Oh zH9)&u0!y>gUzogfJu>^%rP^+xMa)o+&f7z3u@Wnu-y$;D&>nXrseu{Or?%W%$Evs}%C$`LcM}gB^M^Gr zP#=2W-<1c_m-4PHw<*4Q{x4PtsGiTWIzADPdTcX!Y9qKKS(K*Yl=oNgXpS8mSDhPI zO~^^0n4`P6OuW9--nOy`yAFSBvv|TVzTcF5ctb@iR-w4P$8fk;G&nwpbzl(j;^ilb zgq*iW0@Eu0z|Ahf48R2+w8vY{e=hEI@c5w~trXqAn1mVQZ2wq@9NTO7x0b)}`d7=m zy=3)YU-aV!$L$1j=Ko;{WQ^PW!%F{$wHW>%mXN)qliYt;WP&+D|6&0zZenJPll?;r z9ovifYwQsB4&1X_ry?IJc$G_=9Lesmuf~v2`ewMeUPZo9&`kj{xWM`mVpv3LM*=BG2JUp4133@U0?G?Ila?ZJ_Nh|YFtuY*Ua_Z zra5zLC290*oItIJg$P0Ekv+lgplyx4y^MNpa#Vxu9Y!$+i6h+*6af(qX zdM~6w84G{5#;eSQ^1lr6kyGr-`NXt-HM^3_8jlRe?{c#feAh z_+qJQ42uFxH{59|ym6O)$_#dDq~7|F3W3t!YlYn&=DuZTt1m(gQc*t9w1GRUqQnN~ zzjJgcyk0cJ<%o~IloWbm%j<-h@m2-<`VMiO%aQ5LW$`+k-Wl;Rq5u~UFv-QURW}O% zbaJEneFjL``?6Y_b#!Ru`f(H|MuH=L5en}l|R>LPSNvc{f43; zJ?E~|ZckMApMQ;r?R)+~{+rJMYL|7YKf}YoIAOrR5d1qO>1u82^3g)a&Beyi%JuJp z&G)T?(~>B`v!u*Kx-$dKq2|7!`}Z)i(0d==P#Y zaaw1Ou0@zraowb-;200;q(|2Jv{kOH^!g-;vpFQkn<`ZsJv%)Jr*RPeRp@l?M%tB9 zKC2B47nx6C;^L`FMGnX2$~#WAMx%wDxJGekf`b}!^2*t=yvdiSTo0SoIb_b;K%?^Z zC|~?rgH`I5ZN26Vz}0QOt?WqJW}ft|eAhL;mS-WzDzJeSji%DsxM>*8B02;KI+Q|G z5<7O%foV`w`U;7|u7-{I`Lk3DSdz^PPpt9;O5XSA_?wcX&Wpk+aDjCdcI`uTBfXhp z;_7%@Rp~W(!Pm}9cCGF8atfA^iCntwFbj0 zj6^2xZtke!l)R=ky|T8DQ46*B3Dv6el9`{mG_pHEQx2AgB?eP8_XmP+uG?CbtLn>C z=2O46J$6jB0CpWm7`5$-Gy@LN--asUDfoz%V=?mq@k|Cdy@jE;jGH&N(_xu+~+$-yUXt%AlFQDR?b~~kI*9O!2QQ;OQ(gh~@ z%i%;RS{%3la+aJ>LhNnyOcFSWo}96$JjZ?dt3}}pfCTdi^-huWAWHFlQp*|sn>JVc z0MT*fN*&?rgK8bCc_EJM?oWfJ4WyMD)4$zsg-k zn&+!@#B>DI4GefyJkYk3s ztN9TA04a=-f9jHV1VF8UGZi0|?&?9!?sU}*Q89cid-v=Y8Ty%d^aXXnD zGiZ6}=9Hs`#*$I8ac`x1Z-w8OGJWX*&Y?`tG8}>C;`N1|uFIFad9xj+LB15_t^+-p z&{&fR-j44oE%|M;HItQa4s6`nbjq?V!$sFgB15X?fTn2%-!l`TyMk|Gy}B zhL_X;A;d_AknI1D!2g9!L!xDv$o|f|8Y~Qq!^_1H-2ZmI)IuRYWF+BG1t8c0ED#}C RR#*dwnJgU=)$6~g{{i5+{i*-} From 22acbab95addbe4e3d9fcb11634e04cbe4a61929 Mon Sep 17 00:00:00 2001 From: Purnendu Ghosh Date: Wed, 28 Nov 2018 00:19:11 +0530 Subject: [PATCH 14/17] removed unused functions --- spyglass/data_extractor/tugboat/tugboat.py | 42 ---------------------- 1 file changed, 42 deletions(-) diff --git a/spyglass/data_extractor/tugboat/tugboat.py b/spyglass/data_extractor/tugboat/tugboat.py index 58f044e..6816bd8 100644 --- a/spyglass/data_extractor/tugboat/tugboat.py +++ b/spyglass/data_extractor/tugboat/tugboat.py @@ -300,27 +300,6 @@ def _get_network_name_from_vlan_name(self, vlan_name): "Unable to recognize VLAN name extracted from Plugin data source") return ("") - def _get_private_network_data(self, raw_data): - """ - Get private network data from information extracted - by ExcelParser(i.e raw data) - """ - network_data = {} - # Private Network Types are : pxe, storage, calico, overlay - private_network_types = { - 'pxe': 'pxe', - 'storage': 'storage', - 'calico': 'calico', - 'overlay': 'overlay' - } - for net_type in private_network_types: - for key in raw_data['private']: - if net_type.lower() in key.lower(): - network_data[private_network_types[net_type]] = raw_data[ - 'private'][key] - LOG.debug("Private Network Data:\n%s", pprint.pformat(network_data)) - return network_data - def _get_formatted_server_list(self, server_list): """ Format dns and ntp server list as comma separated string """ @@ -334,27 +313,6 @@ def _get_formatted_server_list(self, server_list): formatted_server_list = ','.join(servers) return formatted_server_list - def _categorize_hosts(self): - """ - Categorize host as genesis, controller and compute based on - the hostname string extracted from xl - """ - """ loop through IPMI data and determine hosttype """ - is_genesis = False - sitetype = self.sitetype - ipmi_data = self.parsed_xl_data['ipmi_data'][0] - ctrl_profile_type = self.rules_data['hardware_profile'][sitetype][ - 'profile_name']['ctrl'] - for host in sorted(ipmi_data.keys()): - if (ipmi_data[host]['host_profile'] == ctrl_profile_type): - if not is_genesis: - self.host_type[host] = 'genesis' - is_genesis = True - else: - self.host_type[host] = 'controller' - else: - self.host_type[host] = 'compute' - def _get_rack(self, host): """ Get rack id from the rack string extracted From 781c69f297914890fc92184ca70d72aa5a722a4e Mon Sep 17 00:00:00 2001 From: Purnendu Ghosh Date: Wed, 28 Nov 2018 00:19:48 +0530 Subject: [PATCH 15/17] modification for airship-seaworthy --- spyglass/config/rules.yaml | 68 ++------------------------------ spyglass/sample/site_config.yaml | 12 +++--- 2 files changed, 10 insertions(+), 70 deletions(-) diff --git a/spyglass/config/rules.yaml b/spyglass/config/rules.yaml index 63c1af0..dfe4025 100644 --- a/spyglass/config/rules.yaml +++ b/spyglass/config/rules.yaml @@ -27,72 +27,12 @@ rule_ip_alloc_offset: ingress_vip: 1 static_ip_end: -2 dhcp_ip_end: -2 -rule_host_profile_interfaces: - name: host_profile_interfaces - host_profile_interfaces: - gv: - networks: - p1p1: sriov_nic01 - bond1: - - gp_nic01 - - gp_nic02 - p3p2: sriov_nic02 - nsb: - networks: - p1p1: sriov_nic01 - bond1: - - gp_nic01 - - gp_nic02 - pxe: en04 - p3p2: sriov_nic02 - cp: - networks: - p1p1: sriov_nic01 - bond1: - - gp_nic01 - - gp_nic02 - p3p2: sriov_nic02 - ns: - networks: - p1p1: sriov_nic01 - bond1: - - gp_nic01 - - gp_nic02 - pxe: en04 - p3p2: sriov_nic02 rule_hardware_profile: name: hardware_profile hardware_profile: - dummy: + foundry: profile_name: - compute: cmp - ctrl: ctr - host_type: - compute: nc-cmp - ctrl: nc-ctr - hw_type: dell_r740_purley_nc - nc: - profile_name: - compute: nsb - ctrl: cp - host_type: - compute: nc-ns-r740 - ctrl: nc-cp - hw_type: dell_r740_purley_nc - 5ec: - profile_name: - compute: nsb - ctrl: cp - host_type: - compute: 5ec-ns-r640 - ctrl: 5ec-cp - hw_type: dell_r640_purley_5ec - cruiser: - profile_name: - compute: nsb - ctrl: cp - host_type: - compute: nc-ns-r740 - ctrl: nc-cp - hw_type: dell_r740_purley_nc + compute: dp-r720 + ctrl: cp-r720 + hw_type: dell_r720 ... diff --git a/spyglass/sample/site_config.yaml b/spyglass/sample/site_config.yaml index f51c1f0..25fa990 100644 --- a/spyglass/sample/site_config.yaml +++ b/spyglass/sample/site_config.yaml @@ -4,15 +4,15 @@ --- site_info: ldap: - common_name: Test_Users - url: ldap://example.com - subdomain: testitservices + common_name: test + url: ldap://ldap.example.com + subdomain: test ntp: servers: 10.10.10.10,20.20.20.20,30.30.30.30 - sitetype: dummy - domain: SAMPLE.example.com + sitetype: foundry + domain: atlantafoundry.com dns: - servers: 10.10.10.10,20.20.20.20,30.30.30.30 + servers: 8.8.8.8,8.8.4.4,208.67.222.222 network: vlan_network_data: ingress: From 84b195f5c05165a432d68a771ecd0ade4e63007e Mon Sep 17 00:00:00 2001 From: Purnendu Ghosh Date: Wed, 28 Nov 2018 00:20:44 +0530 Subject: [PATCH 16/17] modification for pep8 confirmation --- spyglass/data_extractor/base.py | 2 +- spyglass/data_extractor/tugboat/tugboat.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/spyglass/data_extractor/base.py b/spyglass/data_extractor/base.py index d211431..a7a53da 100644 --- a/spyglass/data_extractor/base.py +++ b/spyglass/data_extractor/base.py @@ -400,7 +400,7 @@ def extract_network_information(self): tmp_net = {} if net['name'] in networks_to_scan: tmp_net['subnet'] = net.get('subnet', '#CHANGE_ME') - if ((net['name'] != 'ingress') and (net['name'] != 'oob')): + if ((net['name'] != 'ingress') and (net['name'] != 'oob')): tmp_net['vlan'] = net.get('vlan', '#CHANGE_ME') network_data['vlan_network_data'][net['name']] = tmp_net diff --git a/spyglass/data_extractor/tugboat/tugboat.py b/spyglass/data_extractor/tugboat/tugboat.py index 6816bd8..4a92657 100644 --- a/spyglass/data_extractor/tugboat/tugboat.py +++ b/spyglass/data_extractor/tugboat/tugboat.py @@ -196,7 +196,8 @@ def get_ldap_information(self, region): ldap_info['domain'] = url.split('.')[1] except IndexError as e: LOG.error("url.split:{}".format(e)) - ldap_info['common_name'] = ldap_raw_data.get('common_name', '#CHANGE_ME') + ldap_info['common_name'] = ldap_raw_data.get('common_name', + '#CHANGE_ME') ldap_info['subdomain'] = ldap_raw_data.get('subdomain', '#CHANGE_ME') return ldap_info From fbe8f9a197f80e666c87faef35701c824e309d54 Mon Sep 17 00:00:00 2001 From: Purnendu Ghosh Date: Wed, 28 Nov 2018 00:46:03 +0530 Subject: [PATCH 17/17] Added tugboat execution command using sample input files --- doc/source/getting_started.rst | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/doc/source/getting_started.rst b/doc/source/getting_started.rst index 985e9fb..5bb3382 100644 --- a/doc/source/getting_started.rst +++ b/doc/source/getting_started.rst @@ -124,3 +124,9 @@ spyglass -mg --type formation -f -u -p -d -e -d -s +for example: +spyglass -mg -t tugboat -x SiteDesignSpec_v0.1.xlsx -e excel_spec_upstream.yaml -d site_config.yaml -s airship-seaworthy +Where 'excel_spec_upstream.yaml', 'SiteDesignSpec_v0.1.xlsx' and +'site_config.yaml' are sample excel specificaton and file from +spyglass/sample folder. +