diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 923c8ff7fad0a3187d3b9e969d67ae34472f9f64..72b30f67b1e3161b9dbb50a2b611973b7152b158 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -23,7 +23,7 @@ ci_test_image: - docker push ${CI_REGISTRY_IMAGE}/openappstack-ci:${CI_COMMIT_REF_NAME} only: changes: - - test/Dockerfile + - Dockerfile - requirements.txt bootstrap: diff --git a/test/Dockerfile b/Dockerfile similarity index 90% rename from test/Dockerfile rename to Dockerfile index 0422d0d41d9699586dba97042747ab0919f0f77a..83c3a3ec51795f5d368bb1c5e8c115c406b363cb 100644 --- a/test/Dockerfile +++ b/Dockerfile @@ -20,6 +20,6 @@ RUN apk --no-cache add \ python3-dev \ rsync -COPY ../requirements.txt /requirements.txt +COPY ./test/requirements.txt /requirements.txt RUN pip3 install -r /requirements.txt RUN ln -s /usr/bin/python3 /usr/bin/python diff --git a/openappstack/__main__.py b/openappstack/__main__.py index f85238fb5346cf44fe9436bfbd22d573eecb4a86..4f18f56a07942f917b2fd3e8e380ef7ced424d76 100755 --- a/openappstack/__main__.py +++ b/openappstack/__main__.py @@ -192,7 +192,7 @@ def main(): # pylint: disable=too-many-statements,too-many-branches,too-many-lo if args.terminate_droplet: # In case none of the subparser's functions have been called, load data - clus.load_data(reload=False) + clus.load_data() cosmos.terminate_droplets_by_name(clus.hostname) if not hasattr(args, 'func') and not args.terminate_droplet: @@ -318,6 +318,7 @@ def create_domain_records(domain, droplet_ip, subdomain=None): :param subdomain: Optional subdomain to host OAS on. :type subdomain: str or None """ + subdomain_arg = subdomain if subdomain is None: subdomain_arg = "@" @@ -327,7 +328,6 @@ def create_domain_records(domain, droplet_ip, subdomain=None): log.info('Domain record: %s', domain_record) subdomain_arg = '*' - if subdomain is not None: subdomain_arg += '.' + subdomain diff --git a/openappstack/cluster.py b/openappstack/cluster.py index f03834f9d6eda12e405255d30e25ea27cb57196d..9d1094a00a9dab1d61a67cd03d12545f9a0ebf4f 100644 --- a/openappstack/cluster.py +++ b/openappstack/cluster.py @@ -40,36 +40,39 @@ class Cluster: self.domain = None # By default, use Let's Encrypt's staging environment self.acme_staging = True - + # Set this to true if the data needs to be loaded from file + self.data_loaded = False # Load data from inventory.yml and settings.yml if load_data: self.load_data() - def load_data(self, reload=True): + def load_data(self): """ Loads cluster data from inventory.yml and settings.yml files - :param bool reload: If reload is True (default), load values from the - files even if they are already set in the class. + Set self.data_loaded to false if this function should not re-read data + from file. """ - with open(self.settings_file, 'r') as stream: - settings = yaml.safe_load(stream) - if self.ip_address is None or reload: + if not self.data_loaded: + with open(self.settings_file, 'r') as stream: + settings = yaml.safe_load(stream) self.ip_address = settings['ip_address'] - if self.domain is None or reload: self.domain = settings['domain'] - log.debug("""Read data from settings.yml: - ip address: %s - domain: %s""", self.ip_address, self.domain) + log.debug("""Read data from settings.yml: + ip address: %s + domain: %s""", self.ip_address, self.domain) - with open(self.inventory_file, 'r') as stream: - inventory = yaml.safe_load(stream) - # Work with the master node from the inventory - if self.hostname is None or reload: + with open(self.inventory_file, 'r') as stream: + inventory = yaml.safe_load(stream) + # Work with the master node from the inventory self.hostname = inventory['all']['children']['master']['hosts'] - log.debug('Read data from inventory.yml:\n\thostname: %s', self.hostname) + log.debug('Read data from inventory.yml:\n\thostname: %s', self.hostname) + else: + log.debug('Not loading cluster data from file. Set reload_data to ' + 'True if you want a reload.') + self.data_loaded = True def create_droplet(self, ssh_key_id=0, hostname=None): """ @@ -130,6 +133,10 @@ class Cluster: with open(self.settings_file, 'w') as stream: stream.write(file_contents) + # Set self.data_loaded to True because the data in the class now + # reflects the data in the file. + self.data_loaded = True + def make_cluster_directory(self): """Make sure the cluster's file directory exists""" os.makedirs(self.cluster_dir, exist_ok=True)