Add support to '/sync' aka sync all subscription
When adding a new subscription check for an existing matching one, considering the source uri hierachy. Deny a new individual if there is already a sync all subscription, and deny a new sync all if there is already an invidual one. After a new sync all subscription is created a set of event messages are sent to the client containing the initial state of each source down in the hierarchy. And, every time one of the source states changes a new message is sent. Test Plan: PASS: Build the container images PASS: Mannually deploy them and test with v2 client PASS: Create a '/././sync' subscription and check the event messages PASS: Check current subscription list PASS: Change GNSS sync state and check the event messages PASS: Attempt to create a new individual subscription and check it fails PASS: Delete the '/././sync' subscription PASS: Check current subscription list again Closes-bug: 2009188 Signed-off-by: Andre Mauricio Zelak <andre.zelak@windriver.com> Change-Id: I90b642e73f30fb1798f4a93ab5313411c177949c
This commit is contained in:
parent
6ab1d6010f
commit
1ac45c8f43
@ -1,5 +1,5 @@
|
|||||||
#
|
#
|
||||||
# Copyright (c) 2021-2022 Wind River Systems, Inc.
|
# Copyright (c) 2021-2023 Wind River Systems, Inc.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: Apache-2.0
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
#
|
#
|
||||||
@ -24,9 +24,10 @@ def notify(subscriptioninfo, notification, timeout=2, retry=3):
|
|||||||
retry = retry - 1
|
retry = retry - 1
|
||||||
try:
|
try:
|
||||||
headers = {'Content-Type': 'application/json'}
|
headers = {'Content-Type': 'application/json'}
|
||||||
data = format_notification_data(subscriptioninfo, notification)
|
|
||||||
data = json.dumps(data)
|
|
||||||
url = subscriptioninfo.EndpointUri
|
url = subscriptioninfo.EndpointUri
|
||||||
|
for item in notification:
|
||||||
|
data = format_notification_data(subscriptioninfo, {item: notification[item]})
|
||||||
|
data = json.dumps(data)
|
||||||
response = requests.post(url, data=data, headers=headers,
|
response = requests.post(url, data=data, headers=headers,
|
||||||
timeout=timeout)
|
timeout=timeout)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
|
@ -44,6 +44,13 @@ class InvalidSubscription(Exception):
|
|||||||
return "Subscription is invalid:{0}".format(
|
return "Subscription is invalid:{0}".format(
|
||||||
self.subscriptioninfo.to_dict())
|
self.subscriptioninfo.to_dict())
|
||||||
|
|
||||||
|
class SubscriptionAlreadyExists(Exception):
|
||||||
|
def __init__(self, subscriptioninfo):
|
||||||
|
self.subscriptioninfo = subscriptioninfo
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return "Subscription already exists: {0}".format(
|
||||||
|
self.subscriptioninfo)
|
||||||
|
|
||||||
class ServiceError(Exception):
|
class ServiceError(Exception):
|
||||||
def __init__(self, code, *args):
|
def __init__(self, code, *args):
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
#
|
#
|
||||||
# Copyright (c) 2021-2022 Wind River Systems, Inc.
|
# Copyright (c) 2021-2023 Wind River Systems, Inc.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: Apache-2.0
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
#
|
#
|
||||||
@ -82,7 +82,9 @@ class NotificationHandler(NotificationHandlerBase):
|
|||||||
if entry.ResourceAddress:
|
if entry.ResourceAddress:
|
||||||
_, entry_node_name, entry_resource_path, _, _ = \
|
_, entry_node_name, entry_resource_path, _, _ = \
|
||||||
subscription_helper.parse_resource_address(entry.ResourceAddress)
|
subscription_helper.parse_resource_address(entry.ResourceAddress)
|
||||||
if entry_resource_path not in resource_address:
|
_, _, event_resource_path, _, _ = \
|
||||||
|
subscription_helper.parse_resource_address(resource_address)
|
||||||
|
if not event_resource_path.startswith(entry_resource_path):
|
||||||
continue
|
continue
|
||||||
subscription_dto2 = SubscriptionInfoV2(entry)
|
subscription_dto2 = SubscriptionInfoV2(entry)
|
||||||
else:
|
else:
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
#
|
#
|
||||||
# Copyright (c) 2021-2022 Wind River Systems, Inc.
|
# Copyright (c) 2021-2023 Wind River Systems, Inc.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: Apache-2.0
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
#
|
#
|
||||||
@ -127,75 +127,97 @@ class PtpService(object):
|
|||||||
notificationservice_client.cleanup()
|
notificationservice_client.cleanup()
|
||||||
del notificationservice_client
|
del notificationservice_client
|
||||||
|
|
||||||
|
def _match_resource_address(self, resource_address_a, resource_address_b):
|
||||||
|
|
||||||
|
clustername_a, nodename_a, resource_path_a, _, _ = \
|
||||||
|
subscription_helper.parse_resource_address(
|
||||||
|
resource_address_a)
|
||||||
|
|
||||||
|
clustername_b, nodename_b, resource_path_b, _, _ = \
|
||||||
|
subscription_helper.parse_resource_address(
|
||||||
|
resource_address_b)
|
||||||
|
|
||||||
|
# Compare cluster names
|
||||||
|
if clustername_a != clustername_b:
|
||||||
|
return False, "clusterName {0} is different from {1}".format(
|
||||||
|
clustername_a, clustername_b)
|
||||||
|
|
||||||
|
# Compare node names
|
||||||
|
# If one of them is '*' skip comparison
|
||||||
|
if nodename_a != constants.WILDCARD_ALL_NODES and \
|
||||||
|
nodename_b != constants.WILDCARD_ALL_NODES:
|
||||||
|
|
||||||
|
# If one of the nodename is '.' replace by
|
||||||
|
# current node.
|
||||||
|
if nodename_a == constants.WILDCARD_CURRENT_NODE:
|
||||||
|
nodename_a = self.daemon_control.get_residing_nodename()
|
||||||
|
if nodename_b == constants.WILDCARD_CURRENT_NODE:
|
||||||
|
nodename_b = self.daemon_control.get_residing_nodename()
|
||||||
|
|
||||||
|
if nodename_a != nodename_b:
|
||||||
|
return False, "nodeName {0} is different from {1}".format(
|
||||||
|
nodename_a, nodename_b)
|
||||||
|
|
||||||
|
# Compare resource path
|
||||||
|
if resource_path_a == resource_path_b:
|
||||||
|
return True, "resourceAddress {0} is equal to {1}".format(
|
||||||
|
resource_address_a, resource_address_b)
|
||||||
|
|
||||||
|
if resource_path_a.startswith(resource_path_b):
|
||||||
|
return True, "resourceAddress {1} contains {0}".format(
|
||||||
|
resource_address_a, resource_address_b)
|
||||||
|
|
||||||
|
if resource_path_b.startswith(resource_path_a):
|
||||||
|
return True, "resourceAddress {0} contains {1}".format(
|
||||||
|
resource_address_a, resource_address_b)
|
||||||
|
|
||||||
|
return False, "resourceAddress {0} is different from {1}".format(
|
||||||
|
resource_address_a, resource_address_b)
|
||||||
|
|
||||||
|
|
||||||
def add_subscription(self, subscription_dto):
|
def add_subscription(self, subscription_dto):
|
||||||
resource_address = None
|
resource_address = None
|
||||||
if hasattr(subscription_dto, 'ResourceAddress'):
|
if hasattr(subscription_dto, 'ResourceAddress'):
|
||||||
version = 2
|
version = 2
|
||||||
_, nodename, _, _, _ = subscription_helper.parse_resource_address(
|
endpoint = subscription_dto.EndpointUri
|
||||||
subscription_dto.ResourceAddress)
|
|
||||||
LOG.debug("nodename in ResourceAddress is '%s', residing is %s" %
|
|
||||||
(nodename, self.daemon_control.get_residing_nodename()))
|
|
||||||
|
|
||||||
resource_address = subscription_dto.ResourceAddress
|
resource_address = subscription_dto.ResourceAddress
|
||||||
|
|
||||||
LOG.debug('Looking for existing subscription for EndpointUri %s '
|
LOG.debug('Looking for existing subscription for EndpointUri %s '
|
||||||
'ResourceAddress %s' % (subscription_dto.EndpointUri,
|
'ResourceAddress %s' % (endpoint, resource_address))
|
||||||
resource_address))
|
|
||||||
entry = self.subscription_repo.get_one(
|
entry = self.subscription_repo.get_one(
|
||||||
EndpointUri=subscription_dto.EndpointUri,
|
EndpointUri=endpoint,
|
||||||
ResourceAddress=resource_address)
|
ResourceAddress=resource_address)
|
||||||
|
|
||||||
if entry is None:
|
|
||||||
# Did not find matched duplicated, but needs to look for other
|
# Did not find matched duplicated, but needs to look for other
|
||||||
# cases...
|
# cases...
|
||||||
if nodename != constants.WILDCARD_ALL_NODES:
|
if entry is None:
|
||||||
# There may be a subscription for all nodes already in
|
|
||||||
# place
|
|
||||||
resource_address_star = \
|
|
||||||
subscription_helper.set_nodename_in_resource_address(
|
|
||||||
resource_address, constants.WILDCARD_ALL_NODES)
|
|
||||||
LOG.debug('Additional lookup for existing subscription '
|
|
||||||
'for EndpointUri %s ResourceAddress %s'
|
|
||||||
% (subscription_dto.EndpointUri,
|
|
||||||
resource_address_star))
|
|
||||||
if self.subscription_repo.get_one(
|
|
||||||
EndpointUri=subscription_dto.EndpointUri,
|
|
||||||
ResourceAddress=resource_address_star) is not None:
|
|
||||||
LOG.debug('Found existing %s entry in subscription '
|
|
||||||
'repo' % constants.WILDCARD_ALL_NODES)
|
|
||||||
raise client_exception.ServiceError(409)
|
|
||||||
|
|
||||||
if nodename == constants.WILDCARD_CURRENT_NODE:
|
subscriptions = self.subscription_repo.get(
|
||||||
# There may be a subscription for the residing (current)
|
EndpointUri=endpoint)
|
||||||
# node already in place
|
|
||||||
resource_address_synonym = \
|
|
||||||
subscription_helper.set_nodename_in_resource_address(
|
|
||||||
resource_address,
|
|
||||||
self.daemon_control.get_residing_nodename())
|
|
||||||
LOG.debug('In addition, looking for existing subscription '
|
|
||||||
'for EndpointUri %s ResourceAddress %s' % (
|
|
||||||
subscription_dto.EndpointUri,
|
|
||||||
resource_address_synonym))
|
|
||||||
entry = self.subscription_repo.get_one(
|
|
||||||
EndpointUri=subscription_dto.EndpointUri,
|
|
||||||
ResourceAddress=resource_address_synonym)
|
|
||||||
|
|
||||||
if nodename == self.daemon_control.get_residing_nodename():
|
for subscription in subscriptions:
|
||||||
# There may be a subscription for '.' (current node)
|
match, message = self._match_resource_address(
|
||||||
# already in place
|
subscription.ResourceAddress, resource_address)
|
||||||
resource_address_synonym = \
|
|
||||||
subscription_helper.set_nodename_in_resource_address(
|
if match:
|
||||||
resource_address, constants.WILDCARD_CURRENT_NODE)
|
entry = subscription
|
||||||
LOG.debug('In addition, looking for existing subscription '
|
LOG.debug(message)
|
||||||
'for EndpointUri %s ResourceAddress %s' % (
|
break
|
||||||
subscription_dto.EndpointUri,
|
|
||||||
resource_address_synonym))
|
|
||||||
entry = self.subscription_repo.get_one(
|
|
||||||
EndpointUri=subscription_dto.EndpointUri,
|
|
||||||
ResourceAddress=resource_address_synonym)
|
|
||||||
|
|
||||||
if entry is not None:
|
if entry is not None:
|
||||||
LOG.debug('Found existing v2 entry in subscription repo')
|
LOG.debug('Found existing v2 entry in subscription repo')
|
||||||
raise client_exception.ServiceError(409)
|
subscriptioninfo = {
|
||||||
|
'SubscriptionId': entry.SubscriptionId,
|
||||||
|
'UriLocation': entry.UriLocation,
|
||||||
|
'EndpointUri': entry.EndpointUri,
|
||||||
|
'ResourceAddress': entry.ResourceAddress
|
||||||
|
}
|
||||||
|
raise client_exception.SubscriptionAlreadyExists(
|
||||||
|
subscriptioninfo)
|
||||||
|
|
||||||
|
_, nodename, _, _, _ = subscription_helper.parse_resource_address(
|
||||||
|
resource_address)
|
||||||
|
|
||||||
if nodename == constants.WILDCARD_ALL_NODES:
|
if nodename == constants.WILDCARD_ALL_NODES:
|
||||||
broker_names = self.daemon_control.list_of_service_nodenames()
|
broker_names = self.daemon_control.list_of_service_nodenames()
|
||||||
|
@ -63,6 +63,8 @@ class SubscriptionsControllerV2(rest.RestController):
|
|||||||
return subscription
|
return subscription
|
||||||
except client_exception.ServiceError as err:
|
except client_exception.ServiceError as err:
|
||||||
abort(int(str(err)))
|
abort(int(str(err)))
|
||||||
|
except client_exception.SubscriptionAlreadyExists as ex:
|
||||||
|
abort(409, str(ex))
|
||||||
except client_exception.InvalidSubscription:
|
except client_exception.InvalidSubscription:
|
||||||
abort(400)
|
abort(400)
|
||||||
except client_exception.InvalidEndpoint as ex:
|
except client_exception.InvalidEndpoint as ex:
|
||||||
|
@ -123,9 +123,8 @@ class PtpWatcherDefault:
|
|||||||
if resource_address:
|
if resource_address:
|
||||||
_, nodename, resource_path = utils.parse_resource_address(
|
_, nodename, resource_path = utils.parse_resource_address(
|
||||||
resource_address)
|
resource_address)
|
||||||
if resource_path == constants.SOURCE_SYNC_ALL:
|
if resource_path == constants.SOURCE_SYNC_GNSS_SYNC_STATUS or \
|
||||||
resource_path = constants.SOURCE_SYNC_SYNC_STATE
|
resource_path == constants.SOURCE_SYNC_ALL:
|
||||||
if resource_path == constants.SOURCE_SYNC_GNSS_SYNC_STATUS:
|
|
||||||
self.watcher.gnsstracker_context_lock.acquire()
|
self.watcher.gnsstracker_context_lock.acquire()
|
||||||
if optional and self.watcher.gnsstracker_context.get(optional):
|
if optional and self.watcher.gnsstracker_context.get(optional):
|
||||||
sync_state = \
|
sync_state = \
|
||||||
@ -135,7 +134,10 @@ class PtpWatcherDefault:
|
|||||||
self.watcher.gnsstracker_context[optional].get(
|
self.watcher.gnsstracker_context[optional].get(
|
||||||
'last_event_time', time.time())
|
'last_event_time', time.time())
|
||||||
lastStatus[optional] = self._build_event_response(
|
lastStatus[optional] = self._build_event_response(
|
||||||
resource_path, last_event_time, resource_address,
|
constants.SOURCE_SYNC_GNSS_SYNC_STATUS,
|
||||||
|
last_event_time,
|
||||||
|
utils.format_resource_address(nodename,
|
||||||
|
constants.SOURCE_SYNC_GNSS_SYNC_STATUS),
|
||||||
sync_state)
|
sync_state)
|
||||||
elif not optional:
|
elif not optional:
|
||||||
for config in self.daemon_context['GNSS_INSTANCES']:
|
for config in self.daemon_context['GNSS_INSTANCES']:
|
||||||
@ -146,12 +148,16 @@ class PtpWatcherDefault:
|
|||||||
self.watcher.gnsstracker_context[config].get(
|
self.watcher.gnsstracker_context[config].get(
|
||||||
'last_event_time', time.time())
|
'last_event_time', time.time())
|
||||||
lastStatus[config] = self._build_event_response(
|
lastStatus[config] = self._build_event_response(
|
||||||
resource_path, last_event_time,
|
constants.SOURCE_SYNC_GNSS_SYNC_STATUS,
|
||||||
resource_address, sync_state)
|
last_event_time,
|
||||||
|
utils.format_resource_address(nodename,
|
||||||
|
constants.SOURCE_SYNC_GNSS_SYNC_STATUS),
|
||||||
|
sync_state)
|
||||||
else:
|
else:
|
||||||
lastStatus = None
|
lastStatus = None
|
||||||
self.watcher.gnsstracker_context_lock.release()
|
self.watcher.gnsstracker_context_lock.release()
|
||||||
elif resource_path == constants.SOURCE_SYNC_PTP_CLOCK_CLASS:
|
if resource_path == constants.SOURCE_SYNC_PTP_CLOCK_CLASS or \
|
||||||
|
resource_path == constants.SOURCE_SYNC_ALL:
|
||||||
self.watcher.ptptracker_context_lock.acquire()
|
self.watcher.ptptracker_context_lock.acquire()
|
||||||
if optional and self.watcher.ptptracker_context.get(optional):
|
if optional and self.watcher.ptptracker_context.get(optional):
|
||||||
clock_class = \
|
clock_class = \
|
||||||
@ -161,9 +167,11 @@ class PtpWatcherDefault:
|
|||||||
self.watcher.ptptracker_context[optional].get(
|
self.watcher.ptptracker_context[optional].get(
|
||||||
'last_clock_class_event_time', time.time())
|
'last_clock_class_event_time', time.time())
|
||||||
lastStatus[optional] = self._build_event_response(
|
lastStatus[optional] = self._build_event_response(
|
||||||
resource_path, last_clock_class_event_time,
|
constants.SOURCE_SYNC_PTP_CLOCK_CLASS,
|
||||||
resource_address, clock_class,
|
last_clock_class_event_time,
|
||||||
constants.VALUE_TYPE_METRIC)
|
utils.format_resource_address(nodename,
|
||||||
|
constants.SOURCE_SYNC_PTP_CLOCK_CLASS),
|
||||||
|
clock_class, constants.VALUE_TYPE_METRIC)
|
||||||
elif not optional:
|
elif not optional:
|
||||||
for config in self.daemon_context['PTP4L_INSTANCES']:
|
for config in self.daemon_context['PTP4L_INSTANCES']:
|
||||||
clock_class = \
|
clock_class = \
|
||||||
@ -174,13 +182,16 @@ class PtpWatcherDefault:
|
|||||||
'last_clock_class_event_time',
|
'last_clock_class_event_time',
|
||||||
time.time())
|
time.time())
|
||||||
lastStatus[config] = self._build_event_response(
|
lastStatus[config] = self._build_event_response(
|
||||||
resource_path, last_clock_class_event_time,
|
constants.SOURCE_SYNC_PTP_CLOCK_CLASS,
|
||||||
resource_address, clock_class,
|
last_clock_class_event_time,
|
||||||
constants.VALUE_TYPE_METRIC)
|
utils.format_resource_address(nodename,
|
||||||
|
constants.SOURCE_SYNC_PTP_CLOCK_CLASS),
|
||||||
|
clock_class, constants.VALUE_TYPE_METRIC)
|
||||||
else:
|
else:
|
||||||
lastStatus = None
|
lastStatus = None
|
||||||
self.watcher.ptptracker_context_lock.release()
|
self.watcher.ptptracker_context_lock.release()
|
||||||
elif resource_path == constants.SOURCE_SYNC_PTP_LOCK_STATE:
|
if resource_path == constants.SOURCE_SYNC_PTP_LOCK_STATE or \
|
||||||
|
resource_path == constants.SOURCE_SYNC_ALL:
|
||||||
self.watcher.ptptracker_context_lock.acquire()
|
self.watcher.ptptracker_context_lock.acquire()
|
||||||
if optional and self.watcher.ptptracker_context.get(optional):
|
if optional and self.watcher.ptptracker_context.get(optional):
|
||||||
sync_state = \
|
sync_state = \
|
||||||
@ -190,7 +201,10 @@ class PtpWatcherDefault:
|
|||||||
self.watcher.ptptracker_context[optional].get(
|
self.watcher.ptptracker_context[optional].get(
|
||||||
'last_event_time', time.time())
|
'last_event_time', time.time())
|
||||||
lastStatus[optional] = self._build_event_response(
|
lastStatus[optional] = self._build_event_response(
|
||||||
resource_path, last_event_time, resource_address,
|
constants.SOURCE_SYNC_PTP_LOCK_STATE,
|
||||||
|
last_event_time,
|
||||||
|
utils.format_resource_address(nodename,
|
||||||
|
constants.SOURCE_SYNC_PTP_LOCK_STATE),
|
||||||
sync_state)
|
sync_state)
|
||||||
elif not optional:
|
elif not optional:
|
||||||
for config in self.daemon_context['PTP4L_INSTANCES']:
|
for config in self.daemon_context['PTP4L_INSTANCES']:
|
||||||
@ -201,13 +215,17 @@ class PtpWatcherDefault:
|
|||||||
self.watcher.ptptracker_context[config].get(
|
self.watcher.ptptracker_context[config].get(
|
||||||
'last_event_time', time.time())
|
'last_event_time', time.time())
|
||||||
lastStatus[config] = self._build_event_response(
|
lastStatus[config] = self._build_event_response(
|
||||||
resource_path, last_event_time,
|
constants.SOURCE_SYNC_PTP_LOCK_STATE,
|
||||||
resource_address, sync_state)
|
last_event_time,
|
||||||
|
utils.format_resource_address(nodename,
|
||||||
|
constants.SOURCE_SYNC_PTP_LOCK_STATE),
|
||||||
|
sync_state)
|
||||||
else:
|
else:
|
||||||
lastStatus = None
|
lastStatus = None
|
||||||
self.watcher.ptptracker_context_lock.release()
|
self.watcher.ptptracker_context_lock.release()
|
||||||
|
|
||||||
elif resource_path == constants.SOURCE_SYNC_OS_CLOCK:
|
if resource_path == constants.SOURCE_SYNC_OS_CLOCK or \
|
||||||
|
resource_path == constants.SOURCE_SYNC_ALL:
|
||||||
self.watcher.osclocktracker_context_lock.acquire()
|
self.watcher.osclocktracker_context_lock.acquire()
|
||||||
sync_state = \
|
sync_state = \
|
||||||
self.watcher.osclocktracker_context.get(
|
self.watcher.osclocktracker_context.get(
|
||||||
@ -217,9 +235,12 @@ class PtpWatcherDefault:
|
|||||||
'last_event_time', time.time())
|
'last_event_time', time.time())
|
||||||
self.watcher.osclocktracker_context_lock.release()
|
self.watcher.osclocktracker_context_lock.release()
|
||||||
lastStatus['os_clock_status'] = self._build_event_response(
|
lastStatus['os_clock_status'] = self._build_event_response(
|
||||||
resource_path, last_event_time, resource_address,
|
constants.SOURCE_SYNC_OS_CLOCK, last_event_time,
|
||||||
|
utils.format_resource_address(nodename,
|
||||||
|
constants.SOURCE_SYNC_OS_CLOCK),
|
||||||
sync_state)
|
sync_state)
|
||||||
elif resource_path == constants.SOURCE_SYNC_SYNC_STATE:
|
if resource_path == constants.SOURCE_SYNC_SYNC_STATE or \
|
||||||
|
resource_path == constants.SOURCE_SYNC_ALL:
|
||||||
self.watcher.overalltracker_context_lock.acquire()
|
self.watcher.overalltracker_context_lock.acquire()
|
||||||
sync_state = self.watcher.overalltracker_context.get(
|
sync_state = self.watcher.overalltracker_context.get(
|
||||||
'sync_state', OverallClockState.Freerun)
|
'sync_state', OverallClockState.Freerun)
|
||||||
@ -228,7 +249,9 @@ class PtpWatcherDefault:
|
|||||||
self.watcher.overalltracker_context_lock.release()
|
self.watcher.overalltracker_context_lock.release()
|
||||||
lastStatus['overall_sync_status'] = \
|
lastStatus['overall_sync_status'] = \
|
||||||
self._build_event_response(
|
self._build_event_response(
|
||||||
resource_path, last_event_time, resource_address,
|
constants.SOURCE_SYNC_SYNC_STATE, last_event_time,
|
||||||
|
utils.format_resource_address(nodename,
|
||||||
|
constants.SOURCE_SYNC_SYNC_STATE),
|
||||||
sync_state)
|
sync_state)
|
||||||
LOG.debug("query_status: {}".format(lastStatus))
|
LOG.debug("query_status: {}".format(lastStatus))
|
||||||
else:
|
else:
|
||||||
|
Loading…
x
Reference in New Issue
Block a user