mirror of
https://github.com/jellyfin/jellyfin-kodi.git
synced 2024-12-26 02:36:10 +00:00
Parallelize multiple http GET requests
Added ThreadPoolExecutor and used to process GET requests in multiple threads which enables chunks of data to always be available for processing. Processing of the data can happen as soon as the first chunk arrives. Refactored the code to help implement. The idea is the "params" are built in batch and passed to the thread pool which get the actual results.
This commit is contained in:
parent
5fc60fce6b
commit
302880f67a
2 changed files with 28 additions and 9 deletions
|
@ -10,6 +10,7 @@
|
||||||
<import addon="script.module.six" />
|
<import addon="script.module.six" />
|
||||||
<import addon="script.module.kodi-six" />
|
<import addon="script.module.kodi-six" />
|
||||||
<import addon="script.module.addon.signals" version="0.0.1"/>
|
<import addon="script.module.addon.signals" version="0.0.1"/>
|
||||||
|
<import addon="script.module.futures" version="2.2.0"/>
|
||||||
</requires>
|
</requires>
|
||||||
<extension point="xbmc.python.pluginsource"
|
<extension point="xbmc.python.pluginsource"
|
||||||
library="default.py">
|
library="default.py">
|
||||||
|
|
|
@ -19,6 +19,7 @@ from jellyfin.exceptions import HTTPException
|
||||||
|
|
||||||
LOG = logging.getLogger("JELLYFIN." + __name__)
|
LOG = logging.getLogger("JELLYFIN." + __name__)
|
||||||
LIMIT = min(int(settings('limitIndex') or 50), 50)
|
LIMIT = min(int(settings('limitIndex') or 50), 50)
|
||||||
|
DTHREADS = int(settings('limitThreads') or 3)
|
||||||
|
|
||||||
#################################################################################################
|
#################################################################################################
|
||||||
|
|
||||||
|
@ -243,7 +244,8 @@ def _get_items(query, server_id=None):
|
||||||
}
|
}
|
||||||
|
|
||||||
url = query['url']
|
url = query['url']
|
||||||
params = query.get('params', {})
|
query.setdefault('params', {})
|
||||||
|
params = query['params']
|
||||||
|
|
||||||
try:
|
try:
|
||||||
test_params = dict(params)
|
test_params = dict(params)
|
||||||
|
@ -256,21 +258,37 @@ def _get_items(query, server_id=None):
|
||||||
LOG.exception("Failed to retrieve the server response %s: %s params:%s", url, error, params)
|
LOG.exception("Failed to retrieve the server response %s: %s params:%s", url, error, params)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
index = params.get('StartIndex', 0)
|
params.setdefault('StartIndex', 0)
|
||||||
total = items['TotalRecordCount']
|
|
||||||
|
|
||||||
while index < total:
|
def get_query_params(params, start, count):
|
||||||
|
params_copy = dict(params)
|
||||||
|
params_copy['StartIndex'] = start
|
||||||
|
params_copy['Limit'] = count
|
||||||
|
return params_copy
|
||||||
|
|
||||||
params['StartIndex'] = index
|
query_params = [get_query_params(params, offset, LIMIT) \
|
||||||
params['Limit'] = LIMIT
|
for offset in xrange(params['StartIndex'], items['TotalRecordCount'], LIMIT)]
|
||||||
result = _get(url, params, server_id=server_id) or {'Items': []}
|
|
||||||
|
|
||||||
|
from itertools import izip
|
||||||
|
# multiprocessing.dummy.Pool completes all requests in multiple threads but has to
|
||||||
|
# complete all tasks before allowing any results to be processed. ThreadPoolExecutor
|
||||||
|
# allows for completed tasks to be processed while other tasks are completed on other
|
||||||
|
# threads. Dont be a dummy.Pool, be a ThreadPoolExecutor
|
||||||
|
import concurrent.futures
|
||||||
|
p = concurrent.futures.ThreadPoolExecutor(DTHREADS)
|
||||||
|
|
||||||
|
results = p.map(lambda params: _get(url, params, server_id=server_id), query_params)
|
||||||
|
|
||||||
|
for params, result in izip(query_params, results):
|
||||||
|
query['params'] = params
|
||||||
|
|
||||||
|
result = result or {'Items': []}
|
||||||
items['Items'].extend(result['Items'])
|
items['Items'].extend(result['Items'])
|
||||||
|
# Using items to return data and communicate a restore point back to the callee is
|
||||||
|
# a violation of the SRP. TODO: Seperate responsibilities.
|
||||||
items['RestorePoint'] = query
|
items['RestorePoint'] = query
|
||||||
yield items
|
yield items
|
||||||
|
|
||||||
del items['Items'][:]
|
del items['Items'][:]
|
||||||
index += LIMIT
|
|
||||||
|
|
||||||
|
|
||||||
class GetItemWorker(threading.Thread):
|
class GetItemWorker(threading.Thread):
|
||||||
|
|
Loading…
Reference in a new issue