Merge pull request #413 from macearl/master

Fix need for restart/reboot for sync settings changes to take effect
This commit is contained in:
mcarlton00 2020-10-19 22:01:37 -04:00 committed by GitHub
commit 8422b0d830
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23

View file

@ -20,8 +20,6 @@ from helper import LazyLogger
################################################################################################# #################################################################################################
LOG = LazyLogger(__name__) LOG = LazyLogger(__name__)
LIMIT = min(int(settings('limitIndex') or 50), 50)
DTHREADS = int(settings('limitThreads') or 3)
################################################################################################# #################################################################################################
@ -246,6 +244,9 @@ def _get_items(query, server_id=None):
'RestorePoint': {} 'RestorePoint': {}
} }
limit = min(int(settings('limitIndex') or 50), 50)
dthreads = int(settings('limitThreads') or 3)
url = query['url'] url = query['url']
query.setdefault('params', {}) query.setdefault('params', {})
params = query['params'] params = query['params']
@ -270,16 +271,16 @@ def _get_items(query, server_id=None):
return params_copy return params_copy
query_params = [ query_params = [
get_query_params(params, offset, LIMIT) get_query_params(params, offset, limit)
for offset for offset
in range(params['StartIndex'], items['TotalRecordCount'], LIMIT) in range(params['StartIndex'], items['TotalRecordCount'], limit)
] ]
# multiprocessing.dummy.Pool completes all requests in multiple threads but has to # multiprocessing.dummy.Pool completes all requests in multiple threads but has to
# complete all tasks before allowing any results to be processed. ThreadPoolExecutor # complete all tasks before allowing any results to be processed. ThreadPoolExecutor
# allows for completed tasks to be processed while other tasks are completed on other # allows for completed tasks to be processed while other tasks are completed on other
# threads. Dont be a dummy.Pool, be a ThreadPoolExecutor # threads. Dont be a dummy.Pool, be a ThreadPoolExecutor
p = concurrent.futures.ThreadPoolExecutor(DTHREADS) p = concurrent.futures.ThreadPoolExecutor(dthreads)
results = p.map(lambda params: _get(url, params, server_id=server_id), query_params) results = p.map(lambda params: _get(url, params, server_id=server_id), query_params)