From 4f82079aa9325b47bae5dfad9873d7f4d39b09de Mon Sep 17 00:00:00 2001 From: Seyeong Kim Date: Wed, 21 Oct 2015 23:54:06 +0000 Subject: [PATCH 01/23] change string debug and verbose to boolean LP#1398783 --- config.yaml | 8 ++++---- hooks/keystone_context.py | 8 +++----- 2 files changed, 7 insertions(+), 9 deletions(-) diff --git a/config.yaml b/config.yaml index b00c2f0..8167465 100644 --- a/config.yaml +++ b/config.yaml @@ -1,11 +1,11 @@ options: debug: - default: "false" - type: string + type: boolean + default: False description: Enable verbose logging. verbose: - default: "false" - type: string + type: boolean + default: False description: Enable debug logging. use-syslog: type: boolean diff --git a/hooks/keystone_context.py b/hooks/keystone_context.py index 0c02368..04918fd 100644 --- a/hooks/keystone_context.py +++ b/hooks/keystone_context.py @@ -198,10 +198,8 @@ def __call__(self): ctxt['public_port'] = determine_api_port(api_port('keystone-public'), singlenode_mode=True) - debug = config('debug') - ctxt['debug'] = debug and bool_from_string(debug) - verbose = config('verbose') - ctxt['verbose'] = verbose and bool_from_string(verbose) + ctxt['debug'] = config('debug') + ctxt['verbose'] = config('verbose') ctxt['token_expiration'] = config('token-expiration') ctxt['identity_backend'] = config('identity-backend') @@ -263,7 +261,7 @@ class KeystoneLoggingContext(context.OSContextGenerator): def __call__(self): ctxt = {} debug = config('debug') - if debug and bool_from_string(debug): + if debug: ctxt['root_level'] = 'DEBUG' return ctxt From 5c579613bfae5bfaa770dc48cfa36c6d51c82126 Mon Sep 17 00:00:00 2001 From: Edward Hope-Morley Date: Mon, 7 Dec 2015 15:04:38 +0100 Subject: [PATCH 02/23] [hopem,r=] Ensure ssl certs always synced. Partially-Closes-Bug: 1520339 --- hooks/keystone_context.py | 35 +++++++--------------- hooks/keystone_hooks.py | 43 ++++++++++++---------------- hooks/keystone_utils.py | 43 ++++++++++++++-------------- templates/kilo/keystone.conf | 2 -- unit_tests/test_keystone_contexts.py | 6 ---- unit_tests/test_keystone_hooks.py | 34 +++++++++++----------- unit_tests/test_keystone_utils.py | 30 ++----------------- 7 files changed, 67 insertions(+), 126 deletions(-) diff --git a/hooks/keystone_context.py b/hooks/keystone_context.py index 0c02368..5a5f5bb 100644 --- a/hooks/keystone_context.py +++ b/hooks/keystone_context.py @@ -189,7 +189,7 @@ class KeystoneContext(context.OSContextGenerator): def __call__(self): from keystone_utils import ( api_port, set_admin_token, endpoint_url, resolve_address, - PUBLIC, ADMIN, PKI_CERTS_DIR, SSH_USER, ensure_permissions, + PUBLIC, ADMIN, PKI_CERTS_DIR, ensure_pki_cert_paths, ) ctxt = {} ctxt['token'] = set_admin_token(config('admin-token')) @@ -219,32 +219,16 @@ def __call__(self): enable_pki = config('enable-pki') if enable_pki and bool_from_string(enable_pki): - ctxt['signing'] = True + log("Enabling PKI", level=DEBUG) ctxt['token_provider'] = 'pki' - if 'token_provider' in ctxt: - log("Configuring PKI token cert paths", level=DEBUG) - certs = os.path.join(PKI_CERTS_DIR, 'certs') - privates = os.path.join(PKI_CERTS_DIR, 'privates') - for path in [PKI_CERTS_DIR, certs, privates]: - perms = 0o755 - if not os.path.isdir(path): - mkdir(path=path, owner=SSH_USER, group='keystone', - perms=perms) - else: - # Ensure accessible by ssh user and group (for sync). - ensure_permissions(path, user=SSH_USER, - group='keystone', perms=perms) - - signing_paths = {'certfile': os.path.join(certs, - 'signing_cert.pem'), - 'keyfile': os.path.join(privates, - 'signing_key.pem'), - 'ca_certs': os.path.join(certs, 'ca.pem'), - 'ca_key': os.path.join(certs, 'ca_key.pem')} - - for key, val in signing_paths.iteritems(): - ctxt[key] = val + ensure_pki_cert_paths() + certs = os.path.join(PKI_CERTS_DIR, 'certs') + privates = os.path.join(PKI_CERTS_DIR, 'privates') + ctxt.update({'certfile': os.path.join(certs, 'signing_cert.pem'), + 'keyfile': os.path.join(privates, 'signing_key.pem'), + 'ca_certs': os.path.join(certs, 'ca.pem'), + 'ca_key': os.path.join(certs, 'ca_key.pem')}) # Base endpoint URL's which are used in keystone responses # to unauthenticated requests to redirect clients to the @@ -255,6 +239,7 @@ def __call__(self): ctxt['admin_endpoint'] = endpoint_url( resolve_address(ADMIN), api_port('keystone-admin')).rstrip('v2.0') + return ctxt diff --git a/hooks/keystone_hooks.py b/hooks/keystone_hooks.py index 6975ae0..2158c2d 100755 --- a/hooks/keystone_hooks.py +++ b/hooks/keystone_hooks.py @@ -75,7 +75,6 @@ clear_ssl_synced_units, is_db_initialised, update_certs_if_available, - is_pki_enabled, ensure_ssl_dir, ensure_pki_dir_permissions, ensure_permissions, @@ -84,6 +83,7 @@ ensure_ssl_dirs, REQUIRED_INTERFACES, check_optional_relations, + ensure_pki_cert_paths, ) from charmhelpers.contrib.hahelpers.cluster import ( @@ -177,8 +177,7 @@ def config_changed_postupgrade(): update_nrpe_config() CONFIGS.write_all() - if is_pki_enabled(): - initialise_pki() + initialise_pki() update_all_identity_relation_units() @@ -194,11 +193,14 @@ def config_changed_postupgrade(): @synchronize_ca_if_changed(fatal=True) def initialise_pki(): - """Create certs and keys required for PKI token signing. + """Create certs and keys required for token signing. + + Used for PKI and signing token revocation list. NOTE: keystone.conf [signing] section must be up-to-date prior to executing this. """ + ensure_pki_cert_paths() if not peer_units() or is_ssl_cert_master(): log("Ensuring PKI token certs created", level=DEBUG) cmd = ['keystone-manage', 'pki_setup', '--keystone-user', 'keystone', @@ -373,44 +375,36 @@ def send_ssl_sync_request(): Note the we do nothing if the setting is already applied. """ unit = local_unit().replace('/', '-') - count = 0 + # Start with core config (e.g. used for signing revoked token list) + ssl_config = 0b1 use_https = config('use-https') if use_https and bool_from_string(use_https): - count += 1 + ssl_config ^= 0b10 https_service_endpoints = config('https-service-endpoints') if (https_service_endpoints and bool_from_string(https_service_endpoints)): - count += 2 + ssl_config ^= 0b100 enable_pki = config('enable-pki') if enable_pki and bool_from_string(enable_pki): - count += 3 + ssl_config ^= 0b1000 key = 'ssl-sync-required-%s' % (unit) - settings = {key: count} - - # If all ssl is disabled ensure this is set to 0 so that cluster hook runs - # and endpoints are updated. - if not count: - log("Setting %s=%s" % (key, count), level=DEBUG) - for rid in relation_ids('cluster'): - relation_set(relation_id=rid, relation_settings=settings) - - return + settings = {key: ssl_config} - prev = 0 + prev = 0b0 rid = None for rid in relation_ids('cluster'): for unit in related_units(rid): - _prev = relation_get(rid=rid, unit=unit, attribute=key) or 0 + _prev = relation_get(rid=rid, unit=unit, attribute=key) or 0b0 if _prev and _prev > prev: - prev = _prev + prev = bin(_prev) - if rid and prev < count: + if rid and prev ^ ssl_config: clear_ssl_synced_units() - log("Setting %s=%s" % (key, count), level=DEBUG) + log("Setting %s=%s" % (key, bin(ssl_config)), level=DEBUG) relation_set(relation_id=rid, relation_settings=settings) @@ -455,8 +449,7 @@ def cluster_changed(): check_peer_actions() - if is_pki_enabled(): - initialise_pki() + initialise_pki() # Figure out if we need to mandate a sync units = get_ssl_sync_request_units() diff --git a/hooks/keystone_utils.py b/hooks/keystone_utils.py index 87e71b7..d6a0d03 100644 --- a/hooks/keystone_utils.py +++ b/hooks/keystone_utils.py @@ -963,20 +963,6 @@ def is_ssl_cert_master(votes=None): return False -def is_ssl_enabled(): - use_https = config('use-https') - https_service_endpoints = config('https-service-endpoints') - if ((use_https and bool_from_string(use_https)) or - (https_service_endpoints and - bool_from_string(https_service_endpoints)) or - is_pki_enabled()): - log("SSL/HTTPS is enabled", level=DEBUG) - return True - - log("SSL/HTTPS is NOT enabled", level=DEBUG) - return False - - def get_ssl_cert_master_votes(): """Returns a list of unique votes.""" votes = [] @@ -997,10 +983,6 @@ def ensure_ssl_cert_master(): Normally the cluster leader will take control but we allow for this to be ignored since this could be called before the cluster is ready. """ - # Don't do anything if we are not in ssl/https mode - if not is_ssl_enabled(): - return False - master_override = False elect = is_elected_leader(CLUSTER_RES) @@ -1060,6 +1042,23 @@ def is_pki_enabled(): return False +def ensure_pki_cert_paths(): + certs = os.path.join(PKI_CERTS_DIR, 'certs') + privates = os.path.join(PKI_CERTS_DIR, 'privates') + not_exists = [p for p in [PKI_CERTS_DIR, certs, privates] + if not os.path.exists(p)] + if not_exists: + log("Configuring token signing cert paths", level=DEBUG) + perms = 0o755 + for path in not_exists: + if not os.path.isdir(path): + mkdir(path=path, owner=SSH_USER, group='keystone', perms=perms) + else: + # Ensure accessible by ssh user and group (for sync). + ensure_permissions(path, user=SSH_USER, group='keystone', + perms=perms) + + def ensure_pki_dir_permissions(): # Ensure accessible by unison user and group (for sync). ensure_permissions(PKI_CERTS_DIR, user=SSH_USER, group='keystone', @@ -1131,10 +1130,10 @@ def synchronize_ca(fatal=False): peer_service_actions['restart'].append('apache2') peer_actions.append('update-ca-certificates') - if is_pki_enabled(): - log("Syncing token certs", level=DEBUG) - paths_to_sync.append(PKI_CERTS_DIR) - peer_actions.append('ensure-pki-permissions') + # NOTE: certs needed for token signing e.g. pki and revocation list query. + log("Syncing token certs", level=DEBUG) + paths_to_sync.append(PKI_CERTS_DIR) + peer_actions.append('ensure-pki-permissions') if not paths_to_sync: log("Nothing to sync - skipping", level=DEBUG) diff --git a/templates/kilo/keystone.conf b/templates/kilo/keystone.conf index 28454a3..4ab52ff 100644 --- a/templates/kilo/keystone.conf +++ b/templates/kilo/keystone.conf @@ -70,8 +70,6 @@ driver = keystone.assignment.backends.{{ assignment_backend }}.Assignment [oauth1] -[signing] - [auth] methods = external,password,token,oauth1 password = keystone.auth.plugins.password.Password diff --git a/unit_tests/test_keystone_contexts.py b/unit_tests/test_keystone_contexts.py index ef73264..a9cc606 100644 --- a/unit_tests/test_keystone_contexts.py +++ b/unit_tests/test_keystone_contexts.py @@ -26,11 +26,9 @@ def setUp(self): @patch('keystone_utils.ensure_permissions') @patch('keystone_utils.determine_ports') @patch('keystone_utils.is_ssl_cert_master') - @patch('keystone_utils.is_ssl_enabled') @patch.object(context, 'log') def test_apache_ssl_context_ssl_not_master(self, mock_log, - mock_is_ssl_enabled, mock_is_ssl_cert_master, mock_determine_ports, mock_ensure_permissions, @@ -38,7 +36,6 @@ def test_apache_ssl_context_ssl_not_master(self, mock_mkdir, mock_cert_provided_in_config): mock_cert_provided_in_config.return_value = False - mock_is_ssl_enabled.return_value = True mock_is_ssl_cert_master.return_value = False context.ApacheSSLContext().configure_cert('foo') @@ -49,7 +46,6 @@ def test_apache_ssl_context_ssl_not_master(self, @patch('keystone_utils.determine_ports') @patch('keystone_utils.is_ssl_cert_master') - @patch('keystone_utils.is_ssl_enabled') @patch('charmhelpers.contrib.openstack.context.config') @patch('charmhelpers.contrib.openstack.context.is_clustered') @patch('charmhelpers.contrib.openstack.context.determine_apache_port') @@ -62,10 +58,8 @@ def test_apache_ssl_context_service_enabled(self, mock_https, mock_determine_apache_port, mock_is_clustered, mock_config, - mock_is_ssl_enabled, mock_is_ssl_cert_master, mock_determine_ports): - mock_is_ssl_enabled.return_value = True mock_is_ssl_cert_master.return_value = True mock_https.return_value = True mock_unit_get.return_value = '1.2.3.4' diff --git a/unit_tests/test_keystone_hooks.py b/unit_tests/test_keystone_hooks.py index b942754..70b95fd 100644 --- a/unit_tests/test_keystone_hooks.py +++ b/unit_tests/test_keystone_hooks.py @@ -312,9 +312,9 @@ def test_postgresql_db_changed(self, identity_changed, configs, @patch('keystone_utils.ensure_ssl_cert_master') @patch('keystone_utils.ensure_ssl_dirs') @patch.object(hooks, 'ensure_permissions') + @patch.object(hooks, 'ensure_pki_cert_paths') @patch.object(hooks, 'ensure_pki_dir_permissions') @patch.object(hooks, 'ensure_ssl_dir') - @patch.object(hooks, 'is_pki_enabled') @patch.object(hooks, 'is_ssl_cert_master') @patch.object(hooks, 'send_ssl_sync_request') @patch.object(hooks, 'peer_units') @@ -334,15 +334,14 @@ def test_config_changed_no_upgrade_leader(self, configure_https, mock_peer_units, mock_send_ssl_sync_request, mock_is_ssl_cert_master, - mock_is_pki_enabled, mock_ensure_ssl_dir, + mock_ensure_pki_cert_paths, mock_ensure_permissions, mock_ensure_pki_dir_permissions, mock_ensure_ssl_dirs, mock_ensure_ssl_cert_master, mock_log, git_requested): git_requested.return_value = False - mock_is_pki_enabled.return_value = True mock_is_ssl_cert_master.return_value = True self.is_db_initialised.return_value = True self.is_db_ready.return_value = True @@ -376,9 +375,9 @@ def test_config_changed_no_upgrade_leader(self, configure_https, @patch('keystone_utils.ensure_ssl_dirs') @patch.object(hooks, 'update_all_identity_relation_units') @patch.object(hooks, 'ensure_permissions') + @patch.object(hooks, 'ensure_pki_cert_paths') @patch.object(hooks, 'ensure_pki_dir_permissions') @patch.object(hooks, 'ensure_ssl_dir') - @patch.object(hooks, 'is_pki_enabled') @patch.object(hooks, 'peer_units') @patch.object(hooks, 'is_ssl_cert_master') @patch.object(hooks, 'cluster_joined') @@ -393,16 +392,15 @@ def test_config_changed_no_upgrade_not_leader(self, configure_https, ensure_user, cluster_joined, mock_is_ssl_cert_master, mock_peer_units, - mock_is_pki_enabled, mock_ensure_ssl_dir, mock_ensure_permissions, + mock_ensure_pki_cert_paths, mock_ensure_pki_permissions, mock_update_all_id_rel_units, ensure_ssl_dirs, mock_ensure_ssl_cert_master, mock_log, git_requested): git_requested.return_value = False - mock_is_pki_enabled.return_value = True mock_is_ssl_cert_master.return_value = True mock_peer_units.return_value = [] self.openstack_upgrade_available.return_value = False @@ -426,9 +424,9 @@ def test_config_changed_no_upgrade_not_leader(self, configure_https, @patch('keystone_utils.ensure_ssl_cert_master') @patch('keystone_utils.ensure_ssl_dirs') @patch.object(hooks, 'ensure_permissions') + @patch.object(hooks, 'ensure_pki_cert_paths') @patch.object(hooks, 'ensure_pki_dir_permissions') @patch.object(hooks, 'ensure_ssl_dir') - @patch.object(hooks, 'is_pki_enabled') @patch.object(hooks, 'is_ssl_cert_master') @patch.object(hooks, 'send_ssl_sync_request') @patch.object(hooks, 'peer_units') @@ -447,15 +445,14 @@ def test_config_changed_with_openstack_upgrade(self, configure_https, mock_peer_units, mock_send_ssl_sync_request, mock_is_ssl_cert_master, - mock_is_pki_enabled, mock_ensure_ssl_dir, mock_ensure_permissions, + mock_ensure_pki_cert_paths, mock_ensure_pki_permissions, mock_ensure_ssl_dirs, mock_ensure_ssl_cert_master, mock_log, git_requested): git_requested.return_value = False - mock_is_pki_enabled.return_value = True mock_is_ssl_cert_master.return_value = True self.is_db_ready.return_value = True self.is_db_initialised.return_value = True @@ -485,12 +482,12 @@ def test_config_changed_with_openstack_upgrade(self, configure_https, remote_unit='unit/0') admin_relation_changed.assert_called_with('identity-service:0') + @patch.object(hooks, 'initialise_pki') @patch.object(hooks, 'git_install_requested') @patch.object(hooks, 'config_value_changed') @patch('keystone_utils.log') @patch('keystone_utils.ensure_ssl_cert_master') @patch.object(hooks, 'ensure_ssl_dir') - @patch.object(hooks, 'is_pki_enabled') @patch.object(hooks, 'send_ssl_sync_request') @patch.object(hooks, 'is_db_initialised') @patch.object(hooks, 'is_db_ready') @@ -510,14 +507,13 @@ def test_config_changed_git_updated(self, configure_https, mock_is_db_ready, mock_is_db_initialised, mock_send_ssl_sync_request, - mock_is_pki_enabled, mock_ensure_ssl_dir, mock_ensure_ssl_cert_master, mock_log, config_val_changed, - git_requested): + git_requested, + mock_initialise_pki): git_requested.return_value = True mock_ensure_ssl_cert_master.return_value = False - mock_is_pki_enabled.return_value = False self.openstack_upgrade_available.return_value = False self.is_elected_leader.return_value = True mock_peer_units.return_value = [] @@ -544,11 +540,11 @@ def test_config_changed_git_updated(self, configure_https, self.assertFalse(self.openstack_upgrade_available.called) self.assertFalse(self.do_openstack_upgrade_reexec.called) + @patch.object(hooks, 'initialise_pki') @patch.object(hooks, 'git_install_requested') @patch.object(hooks, 'config_value_changed') @patch.object(hooks, 'ensure_ssl_dir') @patch.object(hooks, 'configure_https') - @patch.object(hooks, 'is_pki_enabled') @patch.object(hooks, 'is_ssl_cert_master') @patch.object(hooks, 'peer_units') @patch.object(unison, 'get_homedir') @@ -559,12 +555,12 @@ def test_config_changed_with_openstack_upgrade_action(self, ensure_user, get_home, peer_units, is_ssl, - is_pki, config_https, + config_https, ensure_ssl_dir, config_value_changed, - git_requested): + git_requested, + mock_initialise_pki): ensure_ssl_cert.return_value = False - is_pki.return_value = False peer_units.return_value = [] git_requested.return_value = False @@ -619,6 +615,7 @@ def test_cluster_joined(self, ssh_authorized_peers, mock_peer_units, user=self.ssh_user, group='juju_keystone', peer_interface='cluster', ensure_local_user=True) + @patch.object(hooks, 'initialise_pki') @patch.object(hooks, 'update_all_identity_relation_units') @patch.object(hooks, 'get_ssl_sync_request_units') @patch.object(hooks, 'is_ssl_cert_master') @@ -638,7 +635,8 @@ def test_cluster_changed(self, configs, ssh_authorized_peers, mock_peer_units, mock_is_ssl_cert_master, mock_get_ssl_sync_request_units, - mock_update_all_identity_relation_units): + mock_update_all_identity_relation_units, + mock_initialise_pki): relation_settings = {'foo_passwd': '123', 'identity-service:16_foo': 'bar'} diff --git a/unit_tests/test_keystone_utils.py b/unit_tests/test_keystone_utils.py index 0362623..1da2644 100644 --- a/unit_tests/test_keystone_utils.py +++ b/unit_tests/test_keystone_utils.py @@ -479,22 +479,11 @@ def fake_rel_get(attribute=None, *args, **kwargs): self.assertTrue(utils.is_db_ready()) @patch.object(utils, 'peer_units') - @patch.object(utils, 'is_ssl_enabled') - def test_ensure_ssl_cert_master_no_ssl(self, mock_is_ssl_enabled, - mock_peer_units): - mock_is_ssl_enabled.return_value = False - self.assertFalse(utils.ensure_ssl_cert_master()) - self.assertFalse(self.relation_set.called) - - @patch.object(utils, 'peer_units') - @patch.object(utils, 'is_ssl_enabled') - def test_ensure_ssl_cert_master_ssl_no_peers(self, mock_is_ssl_enabled, - mock_peer_units): + def test_ensure_ssl_cert_master_ssl_no_peers(self, mock_peer_units): def mock_rel_get(unit=None, **kwargs): return None self.relation_get.side_effect = mock_rel_get - mock_is_ssl_enabled.return_value = True self.relation_ids.return_value = ['cluster:0'] self.local_unit.return_value = 'unit/0' self.related_units.return_value = [] @@ -508,9 +497,7 @@ def mock_rel_get(unit=None, **kwargs): relation_settings=settings) @patch.object(utils, 'peer_units') - @patch.object(utils, 'is_ssl_enabled') def test_ensure_ssl_cert_master_ssl_master_no_peers(self, - mock_is_ssl_enabled, mock_peer_units): def mock_rel_get(unit=None, **kwargs): if unit == 'unit/0': @@ -519,7 +506,6 @@ def mock_rel_get(unit=None, **kwargs): return None self.relation_get.side_effect = mock_rel_get - mock_is_ssl_enabled.return_value = True self.relation_ids.return_value = ['cluster:0'] self.local_unit.return_value = 'unit/0' self.related_units.return_value = [] @@ -533,10 +519,7 @@ def mock_rel_get(unit=None, **kwargs): relation_settings=settings) @patch.object(utils, 'peer_units') - @patch.object(utils, 'is_ssl_enabled') - def test_ensure_ssl_cert_master_ssl_not_leader(self, mock_is_ssl_enabled, - mock_peer_units): - mock_is_ssl_enabled.return_value = True + def test_ensure_ssl_cert_master_ssl_not_leader(self, mock_peer_units): self.relation_ids.return_value = ['cluster:0'] self.local_unit.return_value = 'unit/0' mock_peer_units.return_value = ['unit/1'] @@ -546,9 +529,7 @@ def test_ensure_ssl_cert_master_ssl_not_leader(self, mock_is_ssl_enabled, self.assertFalse(self.relation_set.called) @patch.object(utils, 'peer_units') - @patch.object(utils, 'is_ssl_enabled') def test_ensure_ssl_cert_master_is_leader_new_peer(self, - mock_is_ssl_enabled, mock_peer_units): def mock_rel_get(unit=None, **kwargs): if unit == 'unit/0': @@ -557,7 +538,6 @@ def mock_rel_get(unit=None, **kwargs): return 'unknown' self.relation_get.side_effect = mock_rel_get - mock_is_ssl_enabled.return_value = True self.relation_ids.return_value = ['cluster:0'] self.local_unit.return_value = 'unit/0' mock_peer_units.return_value = ['unit/1'] @@ -570,9 +550,7 @@ def mock_rel_get(unit=None, **kwargs): relation_settings=settings) @patch.object(utils, 'peer_units') - @patch.object(utils, 'is_ssl_enabled') def test_ensure_ssl_cert_master_is_leader_no_new_peer(self, - mock_is_ssl_enabled, mock_peer_units): def mock_rel_get(unit=None, **kwargs): if unit == 'unit/0': @@ -581,7 +559,6 @@ def mock_rel_get(unit=None, **kwargs): return 'unit/0' self.relation_get.side_effect = mock_rel_get - mock_is_ssl_enabled.return_value = True self.relation_ids.return_value = ['cluster:0'] self.local_unit.return_value = 'unit/0' mock_peer_units.return_value = ['unit/1'] @@ -621,9 +598,7 @@ def test_ensure_initial_admin_public_name(self, ) @patch.object(utils, 'peer_units') - @patch.object(utils, 'is_ssl_enabled') def test_ensure_ssl_cert_master_is_leader_bad_votes(self, - mock_is_ssl_enabled, mock_peer_units): counter = {0: 0} @@ -637,7 +612,6 @@ def mock_rel_get(unit=None, **kwargs): return ret self.relation_get.side_effect = mock_rel_get - mock_is_ssl_enabled.return_value = True self.relation_ids.return_value = ['cluster:0'] self.local_unit.return_value = 'unit/0' mock_peer_units.return_value = ['unit/1'] From 67dfef9bf3e2211839a00158be65b1dda91bde80 Mon Sep 17 00:00:00 2001 From: Corey Bryant Date: Fri, 8 Jan 2016 02:35:15 +0000 Subject: [PATCH 03/23] [corey.bryant,r=trivial] Enable sync of payload.archive --- charm-helpers-hooks.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/charm-helpers-hooks.yaml b/charm-helpers-hooks.yaml index 1c2d6c0..ee82b34 100644 --- a/charm-helpers-hooks.yaml +++ b/charm-helpers-hooks.yaml @@ -11,7 +11,7 @@ include: - cluster - contrib.python - contrib.unison - - payload.execd + - payload - contrib.peerstorage - contrib.network.ip - contrib.python.packages From b07ee44be3f4fa799709ec90ae7e4d2b56c99bd1 Mon Sep 17 00:00:00 2001 From: Corey Bryant Date: Fri, 8 Jan 2016 02:37:35 +0000 Subject: [PATCH 04/23] [corey.bryant,r=osci] Sync charm-helpers. --- charmhelpers/contrib/openstack/context.py | 14 ++++++++++++-- .../contrib/openstack/templates/haproxy.cfg | 5 +++-- charmhelpers/contrib/openstack/utils.py | 15 ++++++--------- charmhelpers/fetch/giturl.py | 3 --- 4 files changed, 21 insertions(+), 16 deletions(-) diff --git a/charmhelpers/contrib/openstack/context.py b/charmhelpers/contrib/openstack/context.py index 25125de..e930bf3 100644 --- a/charmhelpers/contrib/openstack/context.py +++ b/charmhelpers/contrib/openstack/context.py @@ -57,6 +57,7 @@ get_nic_hwaddr, mkdir, write_file, + pwgen, ) from charmhelpers.contrib.hahelpers.cluster import ( determine_apache_port, @@ -87,6 +88,8 @@ is_bridge_member, ) from charmhelpers.contrib.openstack.utils import get_host_ip +from charmhelpers.core.unitdata import kv + CA_CERT_PATH = '/usr/local/share/ca-certificates/keystone_juju_ca_cert.crt' ADDRESS_TYPES = ['admin', 'internal', 'public'] @@ -636,11 +639,18 @@ def __call__(self): ctxt['ipv6'] = True ctxt['local_host'] = 'ip6-localhost' ctxt['haproxy_host'] = '::' - ctxt['stat_port'] = ':::8888' else: ctxt['local_host'] = '127.0.0.1' ctxt['haproxy_host'] = '0.0.0.0' - ctxt['stat_port'] = ':8888' + + ctxt['stat_port'] = '8888' + + db = kv() + ctxt['stat_password'] = db.get('stat-password') + if not ctxt['stat_password']: + ctxt['stat_password'] = db.set('stat-password', + pwgen(32)) + db.flush() for frontend in cluster_hosts: if (len(cluster_hosts[frontend]['backends']) > 1 or diff --git a/charmhelpers/contrib/openstack/templates/haproxy.cfg b/charmhelpers/contrib/openstack/templates/haproxy.cfg index 8721d8a..32b6276 100644 --- a/charmhelpers/contrib/openstack/templates/haproxy.cfg +++ b/charmhelpers/contrib/openstack/templates/haproxy.cfg @@ -33,13 +33,14 @@ defaults timeout server 30000 {%- endif %} -listen stats {{ stat_port }} +listen stats + bind {{ local_host }}:{{ stat_port }} mode http stats enable stats hide-version stats realm Haproxy\ Statistics stats uri / - stats auth admin:password + stats auth admin:{{ stat_password }} {% if frontends -%} {% for service, ports in service_ports.items() -%} diff --git a/charmhelpers/contrib/openstack/utils.py b/charmhelpers/contrib/openstack/utils.py index 3cfc983..2af4476 100644 --- a/charmhelpers/contrib/openstack/utils.py +++ b/charmhelpers/contrib/openstack/utils.py @@ -593,7 +593,7 @@ def _git_yaml_load(projects_yaml): return yaml.load(projects_yaml) -def git_clone_and_install(projects_yaml, core_project, depth=1): +def git_clone_and_install(projects_yaml, core_project): """ Clone/install all specified OpenStack repositories. @@ -643,6 +643,9 @@ def git_clone_and_install(projects_yaml, core_project, depth=1): for p in projects['repositories']: repo = p['repository'] branch = p['branch'] + depth = '1' + if 'depth' in p.keys(): + depth = p['depth'] if p['name'] == 'requirements': repo_dir = _git_clone_and_install_single(repo, branch, depth, parent_dir, http_proxy, @@ -687,19 +690,13 @@ def _git_clone_and_install_single(repo, branch, depth, parent_dir, http_proxy, """ Clone and install a single git repository. """ - dest_dir = os.path.join(parent_dir, os.path.basename(repo)) - if not os.path.exists(parent_dir): juju_log('Directory already exists at {}. ' 'No need to create directory.'.format(parent_dir)) os.mkdir(parent_dir) - if not os.path.exists(dest_dir): - juju_log('Cloning git repo: {}, branch: {}'.format(repo, branch)) - repo_dir = install_remote(repo, dest=parent_dir, branch=branch, - depth=depth) - else: - repo_dir = dest_dir + juju_log('Cloning git repo: {}, branch: {}'.format(repo, branch)) + repo_dir = install_remote(repo, dest=parent_dir, branch=branch, depth=depth) venv = os.path.join(parent_dir, 'venv') diff --git a/charmhelpers/fetch/giturl.py b/charmhelpers/fetch/giturl.py index bbf89d5..a7ce90a 100644 --- a/charmhelpers/fetch/giturl.py +++ b/charmhelpers/fetch/giturl.py @@ -22,7 +22,6 @@ filter_installed_packages, apt_install, ) -from charmhelpers.core.host import mkdir if filter_installed_packages(['git']) != []: apt_install(['git']) @@ -62,8 +61,6 @@ def install(self, source, branch="master", dest=None, depth=None): else: dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched", branch_name) - if not os.path.exists(dest_dir): - mkdir(dest_dir, perms=0o755) try: self.clone(source, dest_dir, branch, depth) except OSError as e: From 8c7e482978c5d67a895ebc594b21c4d402f652f3 Mon Sep 17 00:00:00 2001 From: Corey Bryant Date: Fri, 8 Jan 2016 19:52:42 +0000 Subject: [PATCH 05/23] [corey.bryant,r=trivial] Fix unit test failures --- charmhelpers/contrib/openstack/context.py | 2 ++ unit_tests/test_keystone_contexts.py | 9 ++++++--- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/charmhelpers/contrib/openstack/context.py b/charmhelpers/contrib/openstack/context.py index e930bf3..7cffe77 100644 --- a/charmhelpers/contrib/openstack/context.py +++ b/charmhelpers/contrib/openstack/context.py @@ -647,7 +647,9 @@ def __call__(self): db = kv() ctxt['stat_password'] = db.get('stat-password') + print "ctxt[\'stat_password\'] = %s" % ctxt['stat_password'] if not ctxt['stat_password']: + print "HELLO AGAIN" ctxt['stat_password'] = db.set('stat-password', pwgen(32)) db.flush() diff --git a/unit_tests/test_keystone_contexts.py b/unit_tests/test_keystone_contexts.py index ef73264..c8cd132 100644 --- a/unit_tests/test_keystone_contexts.py +++ b/unit_tests/test_keystone_contexts.py @@ -97,10 +97,11 @@ def test_apache_ssl_context_service_enabled(self, mock_https, @patch('charmhelpers.contrib.openstack.context.related_units') @patch('charmhelpers.contrib.openstack.context.relation_get') @patch('charmhelpers.contrib.openstack.context.log') + @patch('charmhelpers.contrib.openstack.context.kv') @patch('__builtin__.open') def test_haproxy_context_service_enabled( - self, mock_open, mock_log, mock_relation_get, mock_related_units, - mock_unit_get, mock_relation_ids, mock_config, + self, mock_open, mock_kv, mock_log, mock_relation_get, + mock_related_units, mock_unit_get, mock_relation_ids, mock_config, mock_get_address_in_network, mock_get_netmask_for_address, mock_api_port): os.environ['JUJU_UNIT_NAME'] = 'keystone' @@ -114,6 +115,7 @@ def test_haproxy_context_service_enabled( mock_get_netmask_for_address.return_value = '255.255.255.0' self.determine_apache_port.return_value = '34' mock_api_port.return_value = '12' + mock_kv().get.return_value = 'abcdefghijklmnopqrstuvwxyz123456' ctxt = context.HAProxyContext() @@ -124,7 +126,8 @@ def test_haproxy_context_service_enabled( 'public_port': '12'}, 'local_host': '127.0.0.1', 'haproxy_host': '0.0.0.0', - 'stat_port': ':8888', + 'stat_port': '8888', + 'stat_password': 'abcdefghijklmnopqrstuvwxyz123456', 'service_ports': {'admin-port': ['12', '34'], 'public-port': ['12', '34']}, 'default_backend': '1.2.3.4', From 8b702d752b49c8f366b6bea756a075f8f9f2e883 Mon Sep 17 00:00:00 2001 From: Ryan Beisner Date: Fri, 8 Jan 2016 21:44:33 +0000 Subject: [PATCH 06/23] remove amulet tests for unsupported releases --- tests/019-basic-vivid-kilo | 9 --------- 1 file changed, 9 deletions(-) delete mode 100755 tests/019-basic-vivid-kilo diff --git a/tests/019-basic-vivid-kilo b/tests/019-basic-vivid-kilo deleted file mode 100755 index 109abb5..0000000 --- a/tests/019-basic-vivid-kilo +++ /dev/null @@ -1,9 +0,0 @@ -#!/usr/bin/python - -"""Amulet tests on a basic keystone deployment on vivid-kilo.""" - -from basic_deployment import KeystoneBasicDeployment - -if __name__ == '__main__': - deployment = KeystoneBasicDeployment(series='vivid') - deployment.run_tests() From 212acef05e7a7d95c31dde5a476b4f21e5fd143f Mon Sep 17 00:00:00 2001 From: Ryan Beisner Date: Fri, 8 Jan 2016 21:44:55 +0000 Subject: [PATCH 07/23] Move 00-setup to prevent extra, unnecessary bootstrap in test runs. --- Makefile | 1 + tests/{ => setup}/00-setup | 0 2 files changed, 1 insertion(+) rename tests/{ => setup}/00-setup (100%) diff --git a/Makefile b/Makefile index e94bd34..31a6980 100644 --- a/Makefile +++ b/Makefile @@ -13,6 +13,7 @@ test: functional_test: @echo Starting Amulet tests... + @tests/setup/00-setup @juju test -v -p AMULET_HTTP_PROXY,AMULET_OS_VIP --timeout 2700 bin/charm_helpers_sync.py: diff --git a/tests/00-setup b/tests/setup/00-setup similarity index 100% rename from tests/00-setup rename to tests/setup/00-setup From acee07803e25675eef10f336fa9cfb118066df52 Mon Sep 17 00:00:00 2001 From: Ryan Beisner Date: Fri, 8 Jan 2016 21:45:18 +0000 Subject: [PATCH 08/23] Re-number amulet test file names; add missing combos as not-executable for now. --- ...-basic-trusty-liberty => 018-basic-trusty-liberty} | 0 tests/019-basic-trusty-mitaka | 11 +++++++++++ ...{021-basic-wily-liberty => 020-basic-wily-liberty} | 0 tests/021-basic-xenial-mitaka | 9 +++++++++ 4 files changed, 20 insertions(+) rename tests/{020-basic-trusty-liberty => 018-basic-trusty-liberty} (100%) create mode 100644 tests/019-basic-trusty-mitaka rename tests/{021-basic-wily-liberty => 020-basic-wily-liberty} (100%) create mode 100644 tests/021-basic-xenial-mitaka diff --git a/tests/020-basic-trusty-liberty b/tests/018-basic-trusty-liberty similarity index 100% rename from tests/020-basic-trusty-liberty rename to tests/018-basic-trusty-liberty diff --git a/tests/019-basic-trusty-mitaka b/tests/019-basic-trusty-mitaka new file mode 100644 index 0000000..01f555c --- /dev/null +++ b/tests/019-basic-trusty-mitaka @@ -0,0 +1,11 @@ +#!/usr/bin/python + +"""Amulet tests on a basic keystone deployment on trusty-liberty.""" + +from basic_deployment import KeystoneBasicDeployment + +if __name__ == '__main__': + deployment = KeystoneBasicDeployment(series='trusty', + openstack='cloud:trusty-liberty', + source='cloud:trusty-updates/liberty') + deployment.run_tests() diff --git a/tests/021-basic-wily-liberty b/tests/020-basic-wily-liberty similarity index 100% rename from tests/021-basic-wily-liberty rename to tests/020-basic-wily-liberty diff --git a/tests/021-basic-xenial-mitaka b/tests/021-basic-xenial-mitaka new file mode 100644 index 0000000..1370acd --- /dev/null +++ b/tests/021-basic-xenial-mitaka @@ -0,0 +1,9 @@ +#!/usr/bin/python + +"""Amulet tests on a basic keystone deployment on xenial-mitaka.""" + +from basic_deployment import KeystoneBasicDeployment + +if __name__ == '__main__': + deployment = KeystoneBasicDeployment(series='xenial') + deployment.run_tests() From eded88a4baf55a6d38df6b1cdc529e09e6659800 Mon Sep 17 00:00:00 2001 From: Ryan Beisner Date: Fri, 8 Jan 2016 21:45:38 +0000 Subject: [PATCH 09/23] Update tests/README --- tests/README | 123 ++++++++++++++++++++++++++++++++++++--------------- 1 file changed, 87 insertions(+), 36 deletions(-) diff --git a/tests/README b/tests/README index 2fd5430..79c5b06 100644 --- a/tests/README +++ b/tests/README @@ -1,62 +1,113 @@ -This directory provides Amulet tests that focus on verification of Keystone -deployments. +This directory provides Amulet tests to verify basic deployment functionality +from the perspective of this charm, its requirements and its features, as +exercised in a subset of the full OpenStack deployment test bundle topology. -test_* methods are called in lexical sort order. +Reference: lp:openstack-charm-testing for full test bundles. -Test name convention to ensure desired test order: +A single topology and configuration is defined and deployed, once for each of +the defined Ubuntu:OpenStack release combos. The ongoing goal is for this +charm to always possess tests and combo definitions for all currently-supported +release combinations of U:OS. + +test_* methods are called in lexical sort order, as with most runners. However, +each individual test method should be idempotent and expected to pass regardless +of run order or Ubuntu:OpenStack combo. When writing or modifying tests, +ensure that every individual test is not dependent on another test_ method. + +Test naming convention, purely for code organization purposes: 1xx service and endpoint checks 2xx relation checks 3xx config checks 4xx functional checks - 9xx restarts and other final checks + 9xx restarts, config changes, actions and other final checks -In order to run tests, you'll need charm-tools installed (in addition to -juju, of course): +In order to run tests, charm-tools and juju must be installed: sudo add-apt-repository ppa:juju/stable sudo apt-get update - sudo apt-get install charm-tools + sudo apt-get install charm-tools juju juju-deployer amulet + +Alternatively, tests may be exercised with proposed or development versions +of juju and related tools: + + # juju proposed version + sudo add-apt-repository ppa:juju/proposed + sudo apt-get update + sudo apt-get install charm-tools juju juju-deployer + + # juju development version + sudo add-apt-repository ppa:juju/devel + sudo apt-get update + sudo apt-get install charm-tools juju juju-deployer -If you use a web proxy server to access the web, you'll need to set the -AMULET_HTTP_PROXY environment variable to the http URL of the proxy server. +Some tests may need to download files. If a web proxy server is required in +the environment, the AMULET_HTTP_PROXY environment variable must be set and +passed into the juju test command. This is unrelated to juju's http proxy +settings or behavior. The following examples demonstrate different ways that tests can be executed. All examples are run from the charm's root directory. - * To run all tests (starting with 00-setup): + * To run all +x tests in the tests directory: - make test + bzr branch lp:charms/trusty/foo + cd foo + make functional_test - * To run a specific test module (or modules): + * To run the tests against a specific release combo as defined in tests/: - juju test -v -p AMULET_HTTP_PROXY 15-basic-trusty-icehouse + bzr branch lp:charms/trusty/foo + cd foo + juju test -v -p AMULET_HTTP_PROXY 015-basic-trusty-icehouse - * To run a specific test module (or modules), and keep the environment - deployed after a failure: + * To run tests and keep the juju environment deployed after a failure: - juju test --set-e -v -p AMULET_HTTP_PROXY 15-basic-trusty-icehouse + bzr branch lp:charms/trusty/foo + cd foo + juju test --set-e -v -p AMULET_HTTP_PROXY 015-basic-trusty-icehouse * To re-run a test module against an already deployed environment (one that was deployed by a previous call to 'juju test --set-e'): - ./tests/15-basic-trusty-icehouse - -For debugging and test development purposes, all code should be idempotent. -In other words, the code should have the ability to be re-run without changing -the results beyond the initial run. This enables editing and re-running of a -test module against an already deployed environment, as described above. - -Manual debugging tips: - - * Set the following env vars before using the OpenStack CLI as admin: - export OS_AUTH_URL=http://`juju-deployer -f keystone 2>&1 | tail -n 1`:5000/v2.0 - export OS_TENANT_NAME=admin + ./tests/015-basic-trusty-icehouse + + * Even with --set-e, `juju test` will tear down the deployment when all + tests pass. The following work flow may be more effective when + iterating on test writing. + + bzr branch lp:charms/trusty/foo + cd foo + ./tests/setup/00-setup + juju bootstrap + ./tests/015-basic-trusty-icehouse + # make some changes, run tests again + ./tests/015-basic-trusty-icehouse + # make some changes, run tests again + ./tests/015-basic-trusty-icehouse + + * There may be test definitions in the tests/ dir which are not set +x + executable. This is generally true for deprecated releases, or for + upcoming releases which are not yet validated and enabled. To enable + and run these tests: + bzr branch lp:charms/trusty/foo + cd foo + ls tests + chmod +x tests/017-basic-trusty-kilo + ./tests/setup/00-setup + juju bootstrap + ./tests/017-basic-trusty-kilo + + +Additional notes: + + * Use DEBUG to turn on debug logging, use ERROR otherwise. + u = OpenStackAmuletUtils(ERROR) + u = OpenStackAmuletUtils(DEBUG) + + * To interact with the deployed environment: export OS_USERNAME=admin export OS_PASSWORD=openstack + export OS_TENANT_NAME=admin export OS_REGION_NAME=RegionOne - - * Set the following env vars before using the OpenStack CLI as demoUser: - export OS_AUTH_URL=http://`juju-deployer -f keystone 2>&1 | tail -n 1`:5000/v2.0 - export OS_TENANT_NAME=demoTenant - export OS_USERNAME=demoUser - export OS_PASSWORD=password - export OS_REGION_NAME=RegionOne + export OS_AUTH_URL=${OS_AUTH_PROTOCOL:-http}://`juju-deployer -e trusty -f keystone`:5000/v2.0 + keystone user-list + glance image-list From f64649dcd914dfb25bab92d1ebd1983cd9b6e5fe Mon Sep 17 00:00:00 2001 From: Ryan Beisner Date: Fri, 8 Jan 2016 21:45:53 +0000 Subject: [PATCH 10/23] Update bundletester testplan yaml file --- tests/tests.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/tests.yaml b/tests/tests.yaml index 64e3e2d..4d17631 100644 --- a/tests/tests.yaml +++ b/tests/tests.yaml @@ -1,5 +1,5 @@ bootstrap: true -reset: true +reset: false virtualenv: true makefile: - lint @@ -9,6 +9,7 @@ sources: packages: - amulet - distro-info-data + - python-ceilometerclient - python-cinderclient - python-distro-info - python-glanceclient From 1c32c99fd197f3191b43d3e19bb3730b07f1b11e Mon Sep 17 00:00:00 2001 From: Liam Young Date: Tue, 12 Jan 2016 11:09:46 +0000 Subject: [PATCH 11/23] Delete the old quantum catalog entry if a neutron entry is present --- hooks/keystone_hooks.py | 4 ++++ hooks/keystone_utils.py | 26 ++++++++++++++++++++++++++ hooks/manager.py | 10 +++++++--- unit_tests/test_keystone_hooks.py | 25 +++++++++++++++++++++++++ unit_tests/test_keystone_utils.py | 22 ++++++++++++++++++++++ 5 files changed, 84 insertions(+), 3 deletions(-) diff --git a/hooks/keystone_hooks.py b/hooks/keystone_hooks.py index 058abc0..c3aac1e 100755 --- a/hooks/keystone_hooks.py +++ b/hooks/keystone_hooks.py @@ -84,6 +84,8 @@ REQUIRED_INTERFACES, check_optional_relations, ensure_pki_cert_paths, + is_service_present, + delete_service_entry, ) from charmhelpers.contrib.hahelpers.cluster import ( @@ -341,6 +343,8 @@ def identity_changed(relation_id=None, remote_unit=None): return add_service_to_keystone(relation_id, remote_unit) + if is_service_present('neutron', 'network'): + delete_service_entry('quantum', 'network') settings = relation_get(rid=relation_id, unit=remote_unit) service = settings.get('service', None) if service: diff --git a/hooks/keystone_utils.py b/hooks/keystone_utils.py index d6a0d03..155ea64 100644 --- a/hooks/keystone_utils.py +++ b/hooks/keystone_utils.py @@ -244,6 +244,10 @@ "type": "network", "desc": "Quantum Networking Service" }, + "neutron": { + "type": "network", + "desc": "Neutron Networking Service" + }, "oxygen": { "type": "oxygen", "desc": "Oxygen Cloud Image Service" @@ -486,6 +490,28 @@ def get_admin_token(): error_out('Could not find admin_token line in %s' % KEYSTONE_CONF) +def is_service_present(service_name, service_type): + import manager + manager = manager.KeystoneManager(endpoint=get_local_endpoint(), + token=get_admin_token()) + service_id = manager.resolve_service_id(service_name, service_type) + return service_id is not None + + +def delete_service_entry(service_name, service_type): + """ Delete a service from keystone""" + import manager + manager = manager.KeystoneManager(endpoint=get_local_endpoint(), + token=get_admin_token()) + service_id = manager.resolve_service_id(service_name, service_type) + if service_id: + print "Hi!" + print service_id + print manager.api.services.delete + manager.api.services.delete(service_id) + log("Deleted service entry '%s'" % service_name, level=DEBUG) + + def create_service_entry(service_name, service_type, service_desc, owner=None): """ Add a new service entry to keystone if one does not already exist """ import manager diff --git a/hooks/manager.py b/hooks/manager.py index 8c8968d..17c6c02 100644 --- a/hooks/manager.py +++ b/hooks/manager.py @@ -28,12 +28,16 @@ def resolve_user_id(self, name): if name == u['name']: return u['id'] - def resolve_service_id(self, name): + def resolve_service_id(self, name, service_type=None): """Find the service_id of a given service""" services = [s._info for s in self.api.services.list()] for s in services: - if name == s['name']: - return s['id'] + if service_type: + if name == s['name'] and service_type == s['type']: + return s['id'] + else: + if name == s['name']: + return s['id'] def resolve_service_id_by_type(self, type): """Find the service_id of a given service""" diff --git a/unit_tests/test_keystone_hooks.py b/unit_tests/test_keystone_hooks.py index c6a05a2..dae936f 100644 --- a/unit_tests/test_keystone_hooks.py +++ b/unit_tests/test_keystone_hooks.py @@ -71,6 +71,8 @@ 'get_netmask_for_address', 'get_address_in_network', 'git_install', + 'is_service_present', + 'delete_service_entry', ] @@ -623,6 +625,7 @@ def test_identity_changed_leader(self, mock_send_notifications, mock_log, mock_is_db_initialised): mock_is_db_initialised.return_value = True self.is_db_ready.return_value = True + self.is_service_present.return_value = True mock_ensure_ssl_cert_master.return_value = False hooks.identity_changed( relation_id='identity-service:0', @@ -630,6 +633,28 @@ def test_identity_changed_leader(self, mock_send_notifications, self.add_service_to_keystone.assert_called_with( 'identity-service:0', 'unit/0') + self.delete_service_entry.assert_called_with( + 'quantum', + 'network') + + @patch.object(hooks, 'is_db_initialised') + @patch('keystone_utils.log') + @patch('keystone_utils.ensure_ssl_cert_master') + @patch.object(hooks, 'hashlib') + @patch.object(hooks, 'send_notifications') + def test_identity_changed_leader_no_neutron(self, mock_send_notifications, + mock_hashlib, + mock_ensure_ssl_cert_master, + mock_log, + mock_is_db_initialised): + mock_is_db_initialised.return_value = True + self.is_db_ready.return_value = True + self.is_service_present.return_value = False + mock_ensure_ssl_cert_master.return_value = False + hooks.identity_changed( + relation_id='identity-service:0', + remote_unit='unit/0') + self.assertFalse(self.delete_service_entry.called) @patch.object(hooks, 'local_unit') @patch('keystone_utils.log') diff --git a/unit_tests/test_keystone_utils.py b/unit_tests/test_keystone_utils.py index 1da2644..f8d75b0 100644 --- a/unit_tests/test_keystone_utils.py +++ b/unit_tests/test_keystone_utils.py @@ -703,3 +703,25 @@ def test_git_post_install(self, check_call, rmtree, copytree, symlink, ] self.assertEquals(render.call_args_list, expected) service_restart.assert_called_with('keystone') + + @patch.object(manager, 'KeystoneManager') + def test_is_service_present(self, KeystoneManager): + mock_keystone = MagicMock() + mock_keystone.resolve_service_id.return_value = 'sid1' + KeystoneManager.return_value = mock_keystone + self.assertTrue(utils.is_service_present('bob', 'bill')) + + @patch.object(manager, 'KeystoneManager') + def test_is_service_present_false(self, KeystoneManager): + mock_keystone = MagicMock() + mock_keystone.resolve_service_id.return_value = None + KeystoneManager.return_value = mock_keystone + self.assertFalse(utils.is_service_present('bob', 'bill')) + + @patch.object(manager, 'KeystoneManager') + def test_delete_service_entry(self, KeystoneManager): + mock_keystone = MagicMock() + mock_keystone.resolve_service_id.return_value = 'sid1' + KeystoneManager.return_value = mock_keystone + utils.delete_service_entry('bob', 'bill') + mock_keystone.api.services.delete.assert_called_with('sid1') From e654d7c84a3a05234e2839486b7afa49f998e504 Mon Sep 17 00:00:00 2001 From: Liam Young Date: Tue, 12 Jan 2016 15:50:54 +0000 Subject: [PATCH 12/23] Remove debug prints --- hooks/keystone_utils.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/hooks/keystone_utils.py b/hooks/keystone_utils.py index 155ea64..2612581 100644 --- a/hooks/keystone_utils.py +++ b/hooks/keystone_utils.py @@ -505,9 +505,6 @@ def delete_service_entry(service_name, service_type): token=get_admin_token()) service_id = manager.resolve_service_id(service_name, service_type) if service_id: - print "Hi!" - print service_id - print manager.api.services.delete manager.api.services.delete(service_id) log("Deleted service entry '%s'" % service_name, level=DEBUG) From cb79f3bf683c536d179ab2a591fa37eadc3d6751 Mon Sep 17 00:00:00 2001 From: Ryan Beisner Date: Wed, 13 Jan 2016 19:11:56 +0000 Subject: [PATCH 13/23] wait for status instead of sleep --- tests/basic_deployment.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/tests/basic_deployment.py b/tests/basic_deployment.py index 1511033..88586eb 100644 --- a/tests/basic_deployment.py +++ b/tests/basic_deployment.py @@ -6,7 +6,6 @@ import amulet import os -import time import yaml from charmhelpers.contrib.openstack.amulet.deployment import ( @@ -36,6 +35,11 @@ def __init__(self, series=None, openstack=None, self._add_relations() self._configure_services() self._deploy() + + u.log.info('Waiting on extended status checks...') + exclude_services = ['mysql'] + self._auto_wait_for_status(exclude_services=exclude_services) + self._initialize_tests() def _assert_services(self, should_run): @@ -113,9 +117,6 @@ def _initialize_tests(self): u.log.debug('openstack release str: {}'.format( self._get_openstack_release_string())) - # Let things settle a bit before moving forward - time.sleep(30) - # Authenticate keystone admin self.keystone = u.authenticate_keystone_admin(self.keystone_sentry, user='admin', From 905e892dd9a0dcd161350284b4eecf41bc6cb783 Mon Sep 17 00:00:00 2001 From: Ryan Beisner Date: Wed, 13 Jan 2016 19:12:20 +0000 Subject: [PATCH 14/23] flip all releases on for amulet tests --- tests/018-basic-trusty-liberty | 0 tests/019-basic-trusty-mitaka | 0 tests/020-basic-wily-liberty | 0 tests/021-basic-xenial-mitaka | 0 4 files changed, 0 insertions(+), 0 deletions(-) mode change 100644 => 100755 tests/018-basic-trusty-liberty mode change 100644 => 100755 tests/019-basic-trusty-mitaka mode change 100644 => 100755 tests/020-basic-wily-liberty mode change 100644 => 100755 tests/021-basic-xenial-mitaka diff --git a/tests/018-basic-trusty-liberty b/tests/018-basic-trusty-liberty old mode 100644 new mode 100755 diff --git a/tests/019-basic-trusty-mitaka b/tests/019-basic-trusty-mitaka old mode 100644 new mode 100755 diff --git a/tests/020-basic-wily-liberty b/tests/020-basic-wily-liberty old mode 100644 new mode 100755 diff --git a/tests/021-basic-xenial-mitaka b/tests/021-basic-xenial-mitaka old mode 100644 new mode 100755 From bd442598edf5436c38010e032d60742833e61f22 Mon Sep 17 00:00:00 2001 From: Ryan Beisner Date: Wed, 13 Jan 2016 21:33:59 +0000 Subject: [PATCH 15/23] add services and relations to satisfy required interfaces / workload status --- tests/basic_deployment.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/basic_deployment.py b/tests/basic_deployment.py index 88586eb..258a751 100644 --- a/tests/basic_deployment.py +++ b/tests/basic_deployment.py @@ -56,6 +56,7 @@ def _add_services(self): """ this_service = {'name': 'keystone'} other_services = [{'name': 'mysql'}, + {'name': 'rabbitmq-server'}, # satisfy wrkload stat {'name': 'cinder'}] super(KeystoneBasicDeployment, self)._add_services(this_service, other_services) @@ -63,6 +64,8 @@ def _add_services(self): def _add_relations(self): """Add all of the relations for the services.""" relations = {'keystone:shared-db': 'mysql:shared-db', + 'cinder:shared-db': 'mysql:shared-db', + 'cinder:amqp': 'rabbitmq-server:amqp', 'cinder:identity-service': 'keystone:identity-service'} super(KeystoneBasicDeployment, self)._add_relations(relations) From 600899a25714822a95931b28e8e780a867f299aa Mon Sep 17 00:00:00 2001 From: Ryan Beisner Date: Thu, 14 Jan 2016 19:09:25 +0000 Subject: [PATCH 16/23] disable xenial test, not quite yet juju-deployable with current tools/streams --- tests/021-basic-xenial-mitaka | 0 1 file changed, 0 insertions(+), 0 deletions(-) mode change 100755 => 100644 tests/021-basic-xenial-mitaka diff --git a/tests/021-basic-xenial-mitaka b/tests/021-basic-xenial-mitaka old mode 100755 new mode 100644 From 34248cf5b20d0c956cf75b427a5657e5e552fe8d Mon Sep 17 00:00:00 2001 From: Liam Young Date: Tue, 19 Jan 2016 08:56:29 +0000 Subject: [PATCH 17/23] Add dnsaas --- hooks/keystone_utils.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/hooks/keystone_utils.py b/hooks/keystone_utils.py index 2612581..f16dd95 100644 --- a/hooks/keystone_utils.py +++ b/hooks/keystone_utils.py @@ -279,6 +279,10 @@ "ironic": { "type": "baremetal", "desc": "Ironic bare metal provisioning service" + }, + "designate": { + "type": "designate", + "desc": "DNSaaS" } } From 12f730e83dd657138a895b4ce566d6ca3d334c8d Mon Sep 17 00:00:00 2001 From: Ryan Beisner Date: Tue, 19 Jan 2016 12:47:45 +0000 Subject: [PATCH 18/23] Fix typo in mitaka amulet test definition --- tests/019-basic-trusty-mitaka | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/019-basic-trusty-mitaka b/tests/019-basic-trusty-mitaka index 01f555c..8e1f168 100644 --- a/tests/019-basic-trusty-mitaka +++ b/tests/019-basic-trusty-mitaka @@ -1,11 +1,11 @@ #!/usr/bin/python -"""Amulet tests on a basic keystone deployment on trusty-liberty.""" +"""Amulet tests on a basic keystone deployment on trusty-mitaka.""" from basic_deployment import KeystoneBasicDeployment if __name__ == '__main__': deployment = KeystoneBasicDeployment(series='trusty', - openstack='cloud:trusty-liberty', - source='cloud:trusty-updates/liberty') + openstack='cloud:trusty-mitaka', + source='cloud:trusty-updates/mitaka') deployment.run_tests() From 9ec811306544c39154c0930e72f32e1d75af2b38 Mon Sep 17 00:00:00 2001 From: Liam Young Date: Fri, 22 Jan 2016 11:43:45 +0000 Subject: [PATCH 19/23] Fix designate endpoint type --- hooks/keystone_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hooks/keystone_utils.py b/hooks/keystone_utils.py index f16dd95..2a92ee3 100644 --- a/hooks/keystone_utils.py +++ b/hooks/keystone_utils.py @@ -281,7 +281,7 @@ "desc": "Ironic bare metal provisioning service" }, "designate": { - "type": "designate", + "type": "dns", "desc": "DNSaaS" } } From b93a4fcf3ad5520c25bc1c2fea9773fab4f4fb53 Mon Sep 17 00:00:00 2001 From: Adam Gandelman Date: Wed, 3 Feb 2016 11:43:18 -0800 Subject: [PATCH 20/23] Add astara to services --- hooks/keystone_utils.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/hooks/keystone_utils.py b/hooks/keystone_utils.py index 0135008..24e1113 100644 --- a/hooks/keystone_utils.py +++ b/hooks/keystone_utils.py @@ -286,7 +286,11 @@ "ironic": { "type": "baremetal", "desc": "Ironic bare metal provisioning service" - } + }, + "astara": { + "type": "astara", + "desc": "Astara Network Orchestration Service", + }, } # The interface is said to be satisfied if anyone of the interfaces in the From c617aa5624e99e5cb08e23ac9733eed9c7f99295 Mon Sep 17 00:00:00 2001 From: Edward Hope-Morley Date: Thu, 11 Feb 2016 14:43:33 +0000 Subject: [PATCH 21/23] [hopem,r=] Sync charmhelpers to get fix for bug 1518975 --- .../contrib/openstack/amulet/deployment.py | 5 +- charmhelpers/contrib/openstack/context.py | 18 +- charmhelpers/contrib/openstack/neutron.py | 24 ++- charmhelpers/contrib/openstack/utils.py | 155 +++++++++++++----- charmhelpers/contrib/python/packages.py | 29 +++- charmhelpers/core/host.py | 67 +++++--- charmhelpers/fetch/giturl.py | 4 +- .../contrib/openstack/amulet/deployment.py | 5 +- 8 files changed, 211 insertions(+), 96 deletions(-) diff --git a/charmhelpers/contrib/openstack/amulet/deployment.py b/charmhelpers/contrib/openstack/amulet/deployment.py index 58b1a79..d2ede32 100644 --- a/charmhelpers/contrib/openstack/amulet/deployment.py +++ b/charmhelpers/contrib/openstack/amulet/deployment.py @@ -121,11 +121,12 @@ def _add_services(self, this_service, other_services): # Charms which should use the source config option use_source = ['mysql', 'mongodb', 'rabbitmq-server', 'ceph', - 'ceph-osd', 'ceph-radosgw'] + 'ceph-osd', 'ceph-radosgw', 'ceph-mon'] # Charms which can not use openstack-origin, ie. many subordinates no_origin = ['cinder-ceph', 'hacluster', 'neutron-openvswitch', 'nrpe', - 'openvswitch-odl', 'neutron-api-odl', 'odl-controller'] + 'openvswitch-odl', 'neutron-api-odl', 'odl-controller', + 'cinder-backup'] if self.openstack: for svc in services: diff --git a/charmhelpers/contrib/openstack/context.py b/charmhelpers/contrib/openstack/context.py index e930bf3..ff597c9 100644 --- a/charmhelpers/contrib/openstack/context.py +++ b/charmhelpers/contrib/openstack/context.py @@ -90,6 +90,12 @@ from charmhelpers.contrib.openstack.utils import get_host_ip from charmhelpers.core.unitdata import kv +try: + import psutil +except ImportError: + apt_install('python-psutil', fatal=True) + import psutil + CA_CERT_PATH = '/usr/local/share/ca-certificates/keystone_juju_ca_cert.crt' ADDRESS_TYPES = ['admin', 'internal', 'public'] @@ -1258,13 +1264,11 @@ class WorkerConfigContext(OSContextGenerator): @property def num_cpus(self): - try: - from psutil import NUM_CPUS - except ImportError: - apt_install('python-psutil', fatal=True) - from psutil import NUM_CPUS - - return NUM_CPUS + # NOTE: use cpu_count if present (16.04 support) + if hasattr(psutil, 'cpu_count'): + return psutil.cpu_count() + else: + return psutil.NUM_CPUS def __call__(self): multiplier = config('worker-multiplier') or 0 diff --git a/charmhelpers/contrib/openstack/neutron.py b/charmhelpers/contrib/openstack/neutron.py index d17c847..dbc489a 100644 --- a/charmhelpers/contrib/openstack/neutron.py +++ b/charmhelpers/contrib/openstack/neutron.py @@ -50,7 +50,7 @@ def determine_dkms_package(): if kernel_version() >= (3, 13): return [] else: - return ['openvswitch-datapath-dkms'] + return [headers_package(), 'openvswitch-datapath-dkms'] # legacy @@ -70,7 +70,7 @@ def quantum_plugins(): relation_prefix='neutron', ssl_dir=QUANTUM_CONF_DIR)], 'services': ['quantum-plugin-openvswitch-agent'], - 'packages': [[headers_package()] + determine_dkms_package(), + 'packages': [determine_dkms_package(), ['quantum-plugin-openvswitch-agent']], 'server_packages': ['quantum-server', 'quantum-plugin-openvswitch'], @@ -111,7 +111,7 @@ def neutron_plugins(): relation_prefix='neutron', ssl_dir=NEUTRON_CONF_DIR)], 'services': ['neutron-plugin-openvswitch-agent'], - 'packages': [[headers_package()] + determine_dkms_package(), + 'packages': [determine_dkms_package(), ['neutron-plugin-openvswitch-agent']], 'server_packages': ['neutron-server', 'neutron-plugin-openvswitch'], @@ -155,7 +155,7 @@ def neutron_plugins(): relation_prefix='neutron', ssl_dir=NEUTRON_CONF_DIR)], 'services': [], - 'packages': [[headers_package()] + determine_dkms_package(), + 'packages': [determine_dkms_package(), ['neutron-plugin-cisco']], 'server_packages': ['neutron-server', 'neutron-plugin-cisco'], @@ -174,7 +174,7 @@ def neutron_plugins(): 'neutron-dhcp-agent', 'nova-api-metadata', 'etcd'], - 'packages': [[headers_package()] + determine_dkms_package(), + 'packages': [determine_dkms_package(), ['calico-compute', 'bird', 'neutron-dhcp-agent', @@ -219,7 +219,7 @@ def neutron_plugins(): relation_prefix='neutron', ssl_dir=NEUTRON_CONF_DIR)], 'services': [], - 'packages': [[headers_package()] + determine_dkms_package()], + 'packages': [determine_dkms_package()], 'server_packages': ['neutron-server', 'python-neutron-plugin-midonet'], 'server_services': ['neutron-server'] @@ -233,6 +233,18 @@ def neutron_plugins(): 'neutron-plugin-ml2'] # NOTE: patch in vmware renames nvp->nsx for icehouse onwards plugins['nvp'] = plugins['nsx'] + if release >= 'kilo': + plugins['midonet']['driver'] = ( + 'neutron.plugins.midonet.plugin.MidonetPluginV2') + if release >= 'liberty': + midonet_origin = config('midonet-origin') + if midonet_origin is not None and midonet_origin[4:5] == '1': + plugins['midonet']['driver'] = ( + 'midonet.neutron.plugin_v1.MidonetPluginV2') + plugins['midonet']['server_packages'].remove( + 'python-neutron-plugin-midonet') + plugins['midonet']['server_packages'].append( + 'python-networking-midonet') return plugins diff --git a/charmhelpers/contrib/openstack/utils.py b/charmhelpers/contrib/openstack/utils.py index 2af4476..8d9a01d 100644 --- a/charmhelpers/contrib/openstack/utils.py +++ b/charmhelpers/contrib/openstack/utils.py @@ -25,6 +25,7 @@ import re import six +import tempfile import traceback import uuid import yaml @@ -41,6 +42,7 @@ config, log as juju_log, charm_dir, + DEBUG, INFO, related_units, relation_ids, @@ -103,29 +105,28 @@ ('2016.1', 'mitaka'), ]) -# The ugly duckling +# The ugly duckling - must list releases oldest to newest SWIFT_CODENAMES = OrderedDict([ - ('1.4.3', 'diablo'), - ('1.4.8', 'essex'), - ('1.7.4', 'folsom'), - ('1.8.0', 'grizzly'), - ('1.7.7', 'grizzly'), - ('1.7.6', 'grizzly'), - ('1.10.0', 'havana'), - ('1.9.1', 'havana'), - ('1.9.0', 'havana'), - ('1.13.1', 'icehouse'), - ('1.13.0', 'icehouse'), - ('1.12.0', 'icehouse'), - ('1.11.0', 'icehouse'), - ('2.0.0', 'juno'), - ('2.1.0', 'juno'), - ('2.2.0', 'juno'), - ('2.2.1', 'kilo'), - ('2.2.2', 'kilo'), - ('2.3.0', 'liberty'), - ('2.4.0', 'liberty'), - ('2.5.0', 'liberty'), + ('diablo', + ['1.4.3']), + ('essex', + ['1.4.8']), + ('folsom', + ['1.7.4']), + ('grizzly', + ['1.7.6', '1.7.7', '1.8.0']), + ('havana', + ['1.9.0', '1.9.1', '1.10.0']), + ('icehouse', + ['1.11.0', '1.12.0', '1.13.0', '1.13.1']), + ('juno', + ['2.0.0', '2.1.0', '2.2.0']), + ('kilo', + ['2.2.1', '2.2.2']), + ('liberty', + ['2.3.0', '2.4.0', '2.5.0']), + ('mitaka', + ['2.5.0']), ]) # >= Liberty version->codename mapping @@ -227,6 +228,33 @@ def get_os_version_codename(codename, version_map=OPENSTACK_CODENAMES): error_out(e) +def get_os_version_codename_swift(codename): + '''Determine OpenStack version number of swift from codename.''' + for k, v in six.iteritems(SWIFT_CODENAMES): + if k == codename: + return v[-1] + e = 'Could not derive swift version for '\ + 'codename: %s' % codename + error_out(e) + + +def get_swift_codename(version): + '''Determine OpenStack codename that corresponds to swift version.''' + codenames = [k for k, v in six.iteritems(SWIFT_CODENAMES) if version in v] + if len(codenames) > 1: + # If more than one release codename contains this version we determine + # the actual codename based on the highest available install source. + for codename in reversed(codenames): + releases = UBUNTU_OPENSTACK_RELEASE + release = [k for k, v in six.iteritems(releases) if codename in v] + ret = subprocess.check_output(['apt-cache', 'policy', 'swift']) + if codename in ret or release[0] in ret: + return codename + elif len(codenames) == 1: + return codenames[0] + return None + + def get_os_codename_package(package, fatal=True): '''Derive OpenStack release codename from an installed package.''' import apt_pkg as apt @@ -270,7 +298,7 @@ def get_os_codename_package(package, fatal=True): # < Liberty co-ordinated project versions try: if 'swift' in pkg.name: - return SWIFT_CODENAMES[vers] + return get_swift_codename(vers) else: return OPENSTACK_CODENAMES[vers] except KeyError: @@ -289,12 +317,14 @@ def get_os_version_package(pkg, fatal=True): if 'swift' in pkg: vers_map = SWIFT_CODENAMES + for cname, version in six.iteritems(vers_map): + if cname == codename: + return version[-1] else: vers_map = OPENSTACK_CODENAMES - - for version, cname in six.iteritems(vers_map): - if cname == codename: - return version + for version, cname in six.iteritems(vers_map): + if cname == codename: + return version # e = "Could not determine OpenStack version for package: %s" % pkg # error_out(e) @@ -318,13 +348,43 @@ def os_release(package, base='essex'): return os_rel -def import_key(keyid): - cmd = "apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 " \ - "--recv-keys %s" % keyid - try: - subprocess.check_call(cmd.split(' ')) - except subprocess.CalledProcessError: - error_out("Error importing repo key %s" % keyid) +def import_pgp_key(key): + key = key.strip() + if (key.startswith('-----BEGIN PGP PUBLIC KEY BLOCK-----') and + key.endswith('-----END PGP PUBLIC KEY BLOCK-----')): + juju_log("PGP key found (looks like ASCII Armor format)", level=DEBUG) + juju_log("Importing ASCII Armor PGP key", level=DEBUG) + with tempfile.NamedTemporaryFile() as keyfile: + with open(keyfile.name, 'w') as fd: + fd.write(key) + fd.write("\n") + + cmd = ['apt-key', 'add', keyfile.name] + try: + subprocess.check_call(cmd) + except subprocess.CalledProcessError: + error_out("Error importing PGP key '%s'" % key) + else: + juju_log("PGP key found (looks like Radix64 format)", level=DEBUG) + juju_log("Importing PGP key from keyserver", level=DEBUG) + cmd = ['apt-key', 'adv', '--keyserver', + 'hkp://keyserver.ubuntu.com:80', '--recv-keys', key] + try: + subprocess.check_call(cmd) + except subprocess.CalledProcessError: + error_out("Error importing PGP key '%s'" % key) + + +def get_source_and_pgp_key(input): + """Look for a pgp key ID or ascii-armor key in the given input.""" + index = input.strip() + index = input.rfind('|') + if index < 0: + return input, None + + key = input[index + 1:].strip('|') + source = input[:index] + return source, key def configure_installation_source(rel): @@ -336,16 +396,16 @@ def configure_installation_source(rel): with open('/etc/apt/sources.list.d/juju_deb.list', 'w') as f: f.write(DISTRO_PROPOSED % ubuntu_rel) elif rel[:4] == "ppa:": - src = rel + src, key = get_source_and_pgp_key(rel) + if key: + import_pgp_key(key) + subprocess.check_call(["add-apt-repository", "-y", src]) elif rel[:3] == "deb": - l = len(rel.split('|')) - if l == 2: - src, key = rel.split('|') - juju_log("Importing PPA key from keyserver for %s" % src) - import_key(key) - elif l == 1: - src = rel + src, key = get_source_and_pgp_key(rel) + if key: + import_pgp_key(key) + with open('/etc/apt/sources.list.d/juju_deb.list', 'w') as f: f.write(src) elif rel[:6] == 'cloud:': @@ -460,11 +520,16 @@ def openstack_upgrade_available(package): cur_vers = get_os_version_package(package) if "swift" in package: codename = get_os_codename_install_source(src) - available_vers = get_os_version_codename(codename, SWIFT_CODENAMES) + avail_vers = get_os_version_codename_swift(codename) else: - available_vers = get_os_version_install_source(src) + avail_vers = get_os_version_install_source(src) apt.init() - return apt.version_compare(available_vers, cur_vers) == 1 + if "swift" in package: + major_cur_vers = cur_vers.split('.', 1)[0] + major_avail_vers = avail_vers.split('.', 1)[0] + major_diff = apt.version_compare(major_avail_vers, major_cur_vers) + return avail_vers > cur_vers and (major_diff == 1 or major_diff == 0) + return apt.version_compare(avail_vers, cur_vers) == 1 def ensure_block_device(block_device): diff --git a/charmhelpers/contrib/python/packages.py b/charmhelpers/contrib/python/packages.py index 8dcd6dd..a2411c3 100644 --- a/charmhelpers/contrib/python/packages.py +++ b/charmhelpers/contrib/python/packages.py @@ -19,20 +19,35 @@ import os import subprocess +import sys from charmhelpers.fetch import apt_install, apt_update from charmhelpers.core.hookenv import charm_dir, log -try: - from pip import main as pip_execute -except ImportError: - apt_update() - apt_install('python-pip') - from pip import main as pip_execute - __author__ = "Jorge Niedbalski " +def pip_execute(*args, **kwargs): + """Overriden pip_execute() to stop sys.path being changed. + + The act of importing main from the pip module seems to cause add wheels + from the /usr/share/python-wheels which are installed by various tools. + This function ensures that sys.path remains the same after the call is + executed. + """ + try: + _path = sys.path + try: + from pip import main as _pip_execute + except ImportError: + apt_update() + apt_install('python-pip') + from pip import main as _pip_execute + _pip_execute(*args, **kwargs) + finally: + sys.path = _path + + def parse_options(given, available): """Given a set of options, check if available""" for key, value in sorted(given.items()): diff --git a/charmhelpers/core/host.py b/charmhelpers/core/host.py index 710fdab..a772090 100644 --- a/charmhelpers/core/host.py +++ b/charmhelpers/core/host.py @@ -138,7 +138,8 @@ def service_running(service_name): except subprocess.CalledProcessError: return False else: - if ("start/running" in output or "is running" in output): + if ("start/running" in output or "is running" in output or + "up and running" in output): return True else: return False @@ -160,13 +161,13 @@ def service_available(service_name): def init_is_systemd(): + """Return True if the host system uses systemd, False otherwise.""" return os.path.isdir(SYSTEMD_SYSTEM) def adduser(username, password=None, shell='/bin/bash', system_user=False, primary_group=None, secondary_groups=None): - """ - Add a user to the system. + """Add a user to the system. Will log but otherwise succeed if the user already exists. @@ -174,7 +175,7 @@ def adduser(username, password=None, shell='/bin/bash', system_user=False, :param str password: Password for user; if ``None``, create a system user :param str shell: The default shell for the user :param bool system_user: Whether to create a login or system user - :param str primary_group: Primary group for user; defaults to their username + :param str primary_group: Primary group for user; defaults to username :param list secondary_groups: Optional list of additional groups :returns: The password database entry struct, as returned by `pwd.getpwnam` @@ -300,14 +301,12 @@ def write_file(path, content, owner='root', group='root', perms=0o444): def fstab_remove(mp): - """Remove the given mountpoint entry from /etc/fstab - """ + """Remove the given mountpoint entry from /etc/fstab""" return Fstab.remove_by_mountpoint(mp) def fstab_add(dev, mp, fs, options=None): - """Adds the given device entry to the /etc/fstab file - """ + """Adds the given device entry to the /etc/fstab file""" return Fstab.add(dev, mp, fs, options=options) @@ -363,8 +362,7 @@ def fstab_mount(mountpoint): def file_hash(path, hash_type='md5'): - """ - Generate a hash checksum of the contents of 'path' or None if not found. + """Generate a hash checksum of the contents of 'path' or None if not found. :param str hash_type: Any hash alrgorithm supported by :mod:`hashlib`, such as md5, sha1, sha256, sha512, etc. @@ -379,10 +377,9 @@ def file_hash(path, hash_type='md5'): def path_hash(path): - """ - Generate a hash checksum of all files matching 'path'. Standard wildcards - like '*' and '?' are supported, see documentation for the 'glob' module for - more information. + """Generate a hash checksum of all files matching 'path'. Standard + wildcards like '*' and '?' are supported, see documentation for the 'glob' + module for more information. :return: dict: A { filename: hash } dictionary for all matched files. Empty if none found. @@ -394,8 +391,7 @@ def path_hash(path): def check_hash(path, checksum, hash_type='md5'): - """ - Validate a file using a cryptographic checksum. + """Validate a file using a cryptographic checksum. :param str checksum: Value of the checksum used to validate the file. :param str hash_type: Hash algorithm used to generate `checksum`. @@ -410,6 +406,7 @@ def check_hash(path, checksum, hash_type='md5'): class ChecksumError(ValueError): + """A class derived from Value error to indicate the checksum failed.""" pass @@ -515,7 +512,7 @@ def get_bond_master(interface): def list_nics(nic_type=None): - '''Return a list of nics of given type(s)''' + """Return a list of nics of given type(s)""" if isinstance(nic_type, six.string_types): int_types = [nic_type] else: @@ -557,12 +554,13 @@ def list_nics(nic_type=None): def set_nic_mtu(nic, mtu): - '''Set MTU on a network interface''' + """Set the Maximum Transmission Unit (MTU) on a network interface.""" cmd = ['ip', 'link', 'set', nic, 'mtu', mtu] subprocess.check_call(cmd) def get_nic_mtu(nic): + """Return the Maximum Transmission Unit (MTU) for a network interface.""" cmd = ['ip', 'addr', 'show', nic] ip_output = subprocess.check_output(cmd).decode('UTF-8').split('\n') mtu = "" @@ -574,6 +572,7 @@ def get_nic_mtu(nic): def get_nic_hwaddr(nic): + """Return the Media Access Control (MAC) for a network interface.""" cmd = ['ip', '-o', '-0', 'addr', 'show', nic] ip_output = subprocess.check_output(cmd).decode('UTF-8') hwaddr = "" @@ -584,7 +583,7 @@ def get_nic_hwaddr(nic): def cmp_pkgrevno(package, revno, pkgcache=None): - '''Compare supplied revno with the revno of the installed package + """Compare supplied revno with the revno of the installed package * 1 => Installed revno is greater than supplied arg * 0 => Installed revno is the same as supplied arg @@ -593,7 +592,7 @@ def cmp_pkgrevno(package, revno, pkgcache=None): This function imports apt_cache function from charmhelpers.fetch if the pkgcache argument is None. Be sure to add charmhelpers.fetch if you call this function, or pass an apt_pkg.Cache() instance. - ''' + """ import apt_pkg if not pkgcache: from charmhelpers.fetch import apt_cache @@ -603,19 +602,27 @@ def cmp_pkgrevno(package, revno, pkgcache=None): @contextmanager -def chdir(d): +def chdir(directory): + """Change the current working directory to a different directory for a code + block and return the previous directory after the block exits. Useful to + run commands from a specificed directory. + + :param str directory: The directory path to change to for this context. + """ cur = os.getcwd() try: - yield os.chdir(d) + yield os.chdir(directory) finally: os.chdir(cur) def chownr(path, owner, group, follow_links=True, chowntopdir=False): - """ - Recursively change user and group ownership of files and directories + """Recursively change user and group ownership of files and directories in given path. Doesn't chown path itself by default, only its children. + :param str path: The string path to start changing ownership. + :param str owner: The owner string to use when looking up the uid. + :param str group: The group string to use when looking up the gid. :param bool follow_links: Also Chown links if True :param bool chowntopdir: Also chown path itself if True """ @@ -639,15 +646,23 @@ def chownr(path, owner, group, follow_links=True, chowntopdir=False): def lchownr(path, owner, group): + """Recursively change user and group ownership of files and directories + in a given path, not following symbolic links. See the documentation for + 'os.lchown' for more information. + + :param str path: The string path to start changing ownership. + :param str owner: The owner string to use when looking up the uid. + :param str group: The group string to use when looking up the gid. + """ chownr(path, owner, group, follow_links=False) def get_total_ram(): - '''The total amount of system RAM in bytes. + """The total amount of system RAM in bytes. This is what is reported by the OS, and may be overcommitted when there are multiple containers hosted on the same machine. - ''' + """ with open('/proc/meminfo', 'r') as f: for line in f.readlines(): if line: diff --git a/charmhelpers/fetch/giturl.py b/charmhelpers/fetch/giturl.py index 9ad8dc6..65ed531 100644 --- a/charmhelpers/fetch/giturl.py +++ b/charmhelpers/fetch/giturl.py @@ -15,7 +15,7 @@ # along with charm-helpers. If not, see . import os -from subprocess import check_call +from subprocess import check_call, CalledProcessError from charmhelpers.fetch import ( BaseFetchHandler, UnhandledSource, @@ -63,6 +63,8 @@ def install(self, source, branch="master", dest=None, depth=None): branch_name) try: self.clone(source, dest_dir, branch, depth) + except CalledProcessError as e: + raise UnhandledSource(e) except OSError as e: raise UnhandledSource(e.strerror) return dest_dir diff --git a/tests/charmhelpers/contrib/openstack/amulet/deployment.py b/tests/charmhelpers/contrib/openstack/amulet/deployment.py index 58b1a79..d2ede32 100644 --- a/tests/charmhelpers/contrib/openstack/amulet/deployment.py +++ b/tests/charmhelpers/contrib/openstack/amulet/deployment.py @@ -121,11 +121,12 @@ def _add_services(self, this_service, other_services): # Charms which should use the source config option use_source = ['mysql', 'mongodb', 'rabbitmq-server', 'ceph', - 'ceph-osd', 'ceph-radosgw'] + 'ceph-osd', 'ceph-radosgw', 'ceph-mon'] # Charms which can not use openstack-origin, ie. many subordinates no_origin = ['cinder-ceph', 'hacluster', 'neutron-openvswitch', 'nrpe', - 'openvswitch-odl', 'neutron-api-odl', 'odl-controller'] + 'openvswitch-odl', 'neutron-api-odl', 'odl-controller', + 'cinder-backup'] if self.openstack: for svc in services: From 6031103ed8c204fb92a7c9222ef0d265a75945ff Mon Sep 17 00:00:00 2001 From: Edward Hope-Morley Date: Thu, 11 Feb 2016 15:44:13 +0000 Subject: [PATCH 22/23] charm-helpers sync --- charmhelpers/contrib/openstack/utils.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/charmhelpers/contrib/openstack/utils.py b/charmhelpers/contrib/openstack/utils.py index 8d9a01d..8077712 100644 --- a/charmhelpers/contrib/openstack/utils.py +++ b/charmhelpers/contrib/openstack/utils.py @@ -348,8 +348,8 @@ def os_release(package, base='essex'): return os_rel -def import_pgp_key(key): - key = key.strip() +def import_key(keyid): + key = keyid.strip() if (key.startswith('-----BEGIN PGP PUBLIC KEY BLOCK-----') and key.endswith('-----END PGP PUBLIC KEY BLOCK-----')): juju_log("PGP key found (looks like ASCII Armor format)", level=DEBUG) @@ -398,13 +398,13 @@ def configure_installation_source(rel): elif rel[:4] == "ppa:": src, key = get_source_and_pgp_key(rel) if key: - import_pgp_key(key) + import_key(key) subprocess.check_call(["add-apt-repository", "-y", src]) elif rel[:3] == "deb": src, key = get_source_and_pgp_key(rel) if key: - import_pgp_key(key) + import_key(key) with open('/etc/apt/sources.list.d/juju_deb.list', 'w') as f: f.write(src) From be5d1d2570993d18af0e7f3215c36ada15b52eb3 Mon Sep 17 00:00:00 2001 From: James Page Date: Tue, 16 Feb 2016 07:03:45 +0000 Subject: [PATCH 23/23] Tidy tox targets --- tox.ini | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tox.ini b/tox.ini index 4e328e4..e8bf7cf 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = lint,py27 +envlist = pep8,py27 skipsdist = True [testenv] @@ -14,7 +14,7 @@ basepython = python2.7 deps = -r{toxinidir}/requirements.txt -r{toxinidir}/test-requirements.txt -[testenv:lint] +[testenv:pep8] basepython = python2.7 deps = -r{toxinidir}/requirements.txt -r{toxinidir}/test-requirements.txt