From 09b0bdbc48026052dc698345a3e8cff124c9e22f Mon Sep 17 00:00:00 2001 From: mammo0 Date: Wed, 14 Oct 2020 08:24:37 +0200 Subject: [PATCH] use a semaphore to avoid fetching complete library to memory -> this happens if the processing of items is slower as the fetching of new -> if a big library is synced, the old behavior could lead to extensive use of memory -> the semaphore acts like a buffer that only allows fetching of new items from the library if old ones are processed -> the current size of the 'buffer' is hard coded to 2 * [max. item fetch limit] * [number of download threads] --- jellyfin_kodi/downloader.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/jellyfin_kodi/downloader.py b/jellyfin_kodi/downloader.py index 0dd37da9..41b5d747 100644 --- a/jellyfin_kodi/downloader.py +++ b/jellyfin_kodi/downloader.py @@ -281,7 +281,13 @@ def _get_items(query, server_id=None): # threads. Dont be a dummy.Pool, be a ThreadPoolExecutor p = concurrent.futures.ThreadPoolExecutor(DTHREADS) - results = p.map(lambda params: _get(url, params, server_id=server_id), query_params) + thread_buffer = threading.Semaphore(2 * LIMIT * DTHREADS) + + def get_wrapper(params): + thread_buffer.acquire() + return _get(url, params, server_id=server_id) + + results = p.map(get_wrapper, query_params) for params, result in zip(query_params, results): query['params'] = params @@ -302,6 +308,7 @@ def _get_items(query, server_id=None): items['RestorePoint'] = query yield items del items['Items'][:] + thread_buffer.release() class GetItemWorker(threading.Thread):