mirror of
https://github.com/jellyfin/jellyfin-kodi.git
synced 2024-11-10 04:06:11 +00:00
Merge pull request #142 from oddstr13/pr-unicode-servername-2
Handle unicode characters without using the io library
This commit is contained in:
commit
77074fcd1e
1 changed files with 19 additions and 13 deletions
|
@ -7,7 +7,6 @@ import logging
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import sqlite3
|
import sqlite3
|
||||||
from io import open
|
|
||||||
|
|
||||||
import xbmc
|
import xbmc
|
||||||
import xbmcvfs
|
import xbmcvfs
|
||||||
|
@ -23,6 +22,9 @@ LOG = logging.getLogger("JELLYFIN." + __name__)
|
||||||
#################################################################################################
|
#################################################################################################
|
||||||
|
|
||||||
|
|
||||||
|
UNICODE = type(u"")
|
||||||
|
|
||||||
|
|
||||||
class Database(object):
|
class Database(object):
|
||||||
|
|
||||||
''' This should be called like a context.
|
''' This should be called like a context.
|
||||||
|
@ -338,8 +340,8 @@ def get_sync():
|
||||||
xbmcvfs.mkdirs(path)
|
xbmcvfs.mkdirs(path)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with open(os.path.join(path, 'sync.json'), encoding='utf-8') as infile:
|
with open(os.path.join(path, 'sync.json'), 'rb') as infile:
|
||||||
sync = json.load(infile)
|
sync = json.load(infile, encoding='utf-8')
|
||||||
except Exception:
|
except Exception:
|
||||||
sync = {}
|
sync = {}
|
||||||
|
|
||||||
|
@ -360,9 +362,11 @@ def save_sync(sync):
|
||||||
|
|
||||||
sync['Date'] = datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ')
|
sync['Date'] = datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ')
|
||||||
|
|
||||||
with open(os.path.join(path, 'sync.json'), 'w', encoding='utf-8') as outfile:
|
with open(os.path.join(path, 'sync.json'), 'wb') as outfile:
|
||||||
data = json.dumps(sync, sort_keys=True, indent=4, ensure_ascii=False)
|
data = json.dumps(sync, sort_keys=True, indent=4, ensure_ascii=False)
|
||||||
outfile.write(unicode(data))
|
if isinstance(data, UNICODE):
|
||||||
|
data = data.encode('utf-8')
|
||||||
|
outfile.write(data)
|
||||||
|
|
||||||
|
|
||||||
def get_credentials():
|
def get_credentials():
|
||||||
|
@ -373,13 +377,13 @@ def get_credentials():
|
||||||
xbmcvfs.mkdirs(path)
|
xbmcvfs.mkdirs(path)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with open(os.path.join(path, 'data.json'), encoding='utf8') as infile:
|
with open(os.path.join(path, 'data.json'), 'rb') as infile:
|
||||||
credentials = json.load(infile)
|
credentials = json.load(infile, encoding='utf8')
|
||||||
except Exception:
|
except Exception:
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with open(os.path.join(path, 'data.txt'), encoding='utf-8') as infile:
|
with open(os.path.join(path, 'data.txt'), 'rb') as infile:
|
||||||
credentials = json.load(infile)
|
credentials = json.load(infile, encoding='utf-8')
|
||||||
save_credentials(credentials)
|
save_credentials(credentials)
|
||||||
|
|
||||||
xbmcvfs.delete(os.path.join(path, 'data.txt'))
|
xbmcvfs.delete(os.path.join(path, 'data.txt'))
|
||||||
|
@ -398,11 +402,13 @@ def save_credentials(credentials):
|
||||||
if not xbmcvfs.exists(path):
|
if not xbmcvfs.exists(path):
|
||||||
xbmcvfs.mkdirs(path)
|
xbmcvfs.mkdirs(path)
|
||||||
try:
|
try:
|
||||||
with open(os.path.join(path, 'data.json'), 'w', encoding='utf8') as outfile:
|
with open(os.path.join(path, 'data.json'), 'wb') as outfile:
|
||||||
data = json.dumps(credentials, sort_keys=True, indent=4, ensure_ascii=False)
|
data = json.dumps(credentials, sort_keys=True, indent=4, ensure_ascii=False)
|
||||||
outfile.write(unicode(data))
|
if isinstance(data, UNICODE):
|
||||||
except Exception as e:
|
data = data.encode('utf-8')
|
||||||
LOG.error("Failed to save credentials: {}".format(e))
|
outfile.write(data)
|
||||||
|
except Exception:
|
||||||
|
LOG.exception("Failed to save credentials:")
|
||||||
|
|
||||||
|
|
||||||
def get_item(kodi_id, media):
|
def get_item(kodi_id, media):
|
||||||
|
|
Loading…
Reference in a new issue