feat(tiller): Enhance Tiller pod search with labels
- Add labels to Tiller pod searching - Add Tiller pod labels to default config options - Add exception for case when Tiller pod cannot be found using labels - Add exception for case when Tiller pod is not in running state - Update exception documentation Closes #172 Change-Id: I7e54c4b4a60638bca1073457c256030344832ef9
This commit is contained in:
parent
4470f4ec64
commit
76906751f6
@ -66,6 +66,11 @@ The Keystone project domain name used for authentication.
|
||||
default='/home/user/.ssh/',
|
||||
help=utils.fmt('Path to SSH private key.')),
|
||||
|
||||
cfg.StrOpt(
|
||||
'tiller_pod_labels',
|
||||
default='app=helm,name=tiller',
|
||||
help=utils.fmt('Labels for the tiller pod.')),
|
||||
|
||||
cfg.ListOpt(
|
||||
'tiller_release_roles',
|
||||
default=['admin'],
|
||||
|
@ -123,6 +123,23 @@ class GetReleaseContentException(TillerException):
|
||||
super(GetReleaseContentException, self).__init__(message)
|
||||
|
||||
|
||||
class TillerPodNotFoundException(TillerException):
|
||||
'''Exception that occurs when a tiller pod cannot be found using the
|
||||
labels specified in the Armada config.
|
||||
'''
|
||||
|
||||
def __init__(self, labels):
|
||||
message = 'Could not find tiller pod with labels "{}"'.format(labels)
|
||||
|
||||
super(TillerPodNotFoundException, self).__init__(message)
|
||||
|
||||
|
||||
class TillerPodNotRunningException(TillerException):
|
||||
'''Exception that occurs when no tiller pod is found in a running state'''
|
||||
|
||||
message = 'No tiller pods found in running state'
|
||||
|
||||
|
||||
class TillerVersionException(TillerException):
|
||||
'''Exception that occurs during a failed Release Testing'''
|
||||
|
||||
|
@ -99,12 +99,22 @@ class Tiller(object):
|
||||
|
||||
def _get_tiller_pod(self):
|
||||
'''
|
||||
Search all namespaces for a pod beginning with tiller-deploy*
|
||||
Returns tiller pod using the tiller pod labels specified in the Armada
|
||||
config
|
||||
'''
|
||||
for i in self.k8s.get_namespace_pod('kube-system').items:
|
||||
# TODO(alanmeadows): this is a bit loose
|
||||
if i.metadata.name.startswith('tiller-deploy'):
|
||||
return i
|
||||
pods = self.k8s.get_namespace_pod('kube-system',
|
||||
CONF.tiller_pod_labels).items
|
||||
# No tiller pods found
|
||||
if not pods:
|
||||
raise ex.TillerPodNotFoundException(CONF.tiller_pod_labels)
|
||||
|
||||
# Return first tiller pod in running state
|
||||
for pod in pods:
|
||||
if pod.status.phase == 'Running':
|
||||
return pod
|
||||
|
||||
# No tiller pod found in running state
|
||||
raise ex.TillerPodNotRunningException()
|
||||
|
||||
def _get_tiller_ip(self):
|
||||
'''
|
||||
|
@ -31,6 +31,10 @@ Tiller Exceptions
|
||||
+------------------------------------+--------------------------------------------------------------------------------------------+
|
||||
| ReleaseUpdateException | A release failed to update. |
|
||||
+------------------------------------+--------------------------------------------------------------------------------------------+
|
||||
| TillerPodNotFoundException | Tiller pod could not be found using the labels specified in the Armada config. |
|
||||
+------------------------------------+--------------------------------------------------------------------------------------------+
|
||||
| TillerPodNotRunningException | Tiller pod was found but is not in a running state. |
|
||||
+------------------------------------+--------------------------------------------------------------------------------------------+
|
||||
| TillerServicesUnavailableException | Occurs when Tiller services are unavailable. |
|
||||
+------------------------------------+--------------------------------------------------------------------------------------------+
|
||||
|
||||
|
@ -25,6 +25,9 @@
|
||||
# Path to SSH private key. (string value)
|
||||
#ssh_key_path = /home/user/.ssh/
|
||||
|
||||
# Labels for the tiller pod. (string value)
|
||||
#tiller_pod_labels = app=helm,name=tiller
|
||||
|
||||
# IDs of approved API access roles. (list value)
|
||||
#tiller_release_roles = admin
|
||||
|
||||
@ -88,6 +91,10 @@
|
||||
# log_config_append is set. (string value)
|
||||
#syslog_log_facility = LOG_USER
|
||||
|
||||
# Use JSON formatting for logging. This option is ignored if log_config_append
|
||||
# is set. (boolean value)
|
||||
#use_json = false
|
||||
|
||||
# Log output to standard error. This option is ignored if log_config_append is
|
||||
# set. (boolean value)
|
||||
#use_stderr = false
|
||||
@ -439,3 +446,20 @@
|
||||
# directories to be searched. Missing or empty directories are ignored. (multi
|
||||
# valued)
|
||||
#policy_dirs = policy.d
|
||||
|
||||
# Content Type to send and receive data for REST based policy check (string
|
||||
# value)
|
||||
# Allowed values: application/x-www-form-urlencoded, application/json
|
||||
#remote_content_type = application/x-www-form-urlencoded
|
||||
|
||||
# server identity verification for REST based policy check (boolean value)
|
||||
#remote_ssl_verify_server_crt = false
|
||||
|
||||
# Absolute path to ca cert file for REST based policy check (string value)
|
||||
#remote_ssl_ca_crt_file = <None>
|
||||
|
||||
# Absolute path to client cert for REST based policy check (string value)
|
||||
#remote_ssl_client_crt_file = <None>
|
||||
|
||||
# Absolute path client key file REST based policy check (string value)
|
||||
#remote_ssl_client_key_file = <None>
|
||||
|
Loading…
x
Reference in New Issue
Block a user