Skip to content

Commit

Permalink
Standardise input to SQL storage
Browse files Browse the repository at this point in the history
- Input/output will be always be JSON de(serialized) and (un)pickled
- Also set ensure_ascii=False
  • Loading branch information
MoojMidge committed Dec 15, 2023
1 parent f504681 commit 764caec
Show file tree
Hide file tree
Showing 9 changed files with 22 additions and 22 deletions.
3 changes: 1 addition & 2 deletions resources/lib/youtube_plugin/kodion/abstract_provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -247,8 +247,7 @@ def _internal_search(self, context, re_match):
channel_id = context.get_param('channel_id', '')

self._data_cache.set_item('search_query',
json.dumps({'query': quote(query)},
ensure_ascii=False))
{'query': quote(query)})

if not incognito and not channel_id:
try:
Expand Down
6 changes: 3 additions & 3 deletions resources/lib/youtube_plugin/kodion/json_store/json_store.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ def save(self, data, update=False, process=None):
try:
if not data:
raise ValueError
_data = json.loads(json.dumps(data))
_data = json.loads(json.dumps(data, ensure_ascii=False))
with open(self.filename, mode='w', encoding='utf-8') as jsonfile:
jsonfile.write(to_unicode(json.dumps(_data,
ensure_ascii=False,
Expand Down Expand Up @@ -98,12 +98,12 @@ def get_data(self, process=None):
try:
if not self._data:
raise ValueError
_data = json.loads(json.dumps(self._data))
_data = json.loads(json.dumps(self._data, ensure_ascii=False))
return process(_data) if process is not None else _data
except (TypeError, ValueError):
log_error('JSONStore.get_data - invalid data:\n|{data}|'.format(
data=self._data
))
self.set_defaults(reset=True)
_data = json.loads(json.dumps(self._data))
_data = json.loads(json.dumps(self._data, ensure_ascii=False))
return process(_data) if process is not None else _data
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ def get_items(self, properties=None, dumps=False):
result = response['result']['items']
else:
result = []
return json.dumps(result) if dumps else result
return json.dumps(result, ensure_ascii=False) if dumps else result

if 'error' in response:
message = response['error']['message']
Expand Down
6 changes: 3 additions & 3 deletions resources/lib/youtube_plugin/kodion/sql_store/data_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def is_empty(self):
return self._is_empty()

def get_items(self, content_ids, seconds):
query_result = self._get_by_ids(content_ids, process=json.loads)
query_result = self._get_by_ids(content_ids)
if not query_result:
return {}

Expand All @@ -48,7 +48,7 @@ def get_item(self, content_id, seconds):
if self.get_seconds_diff(query_result[1] or current_time) > seconds:
return None

return json.loads(query_result[0])
return query_result[0]

def set_item(self, content_id, item):
self._set(content_id, item)
Expand All @@ -63,7 +63,7 @@ def remove(self, content_id):
self._remove(content_id)

def update(self, content_id, item):
self._set(str(content_id), json.dumps(item))
self._set(str(content_id), item)

def _optimize_item_count(self):
pass
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ def is_empty(self):

@staticmethod
def _process_item(item):
return item.split(',')
return item.strip('"').split(',')

def get_items(self, keys):
query_result = self._get_by_ids(keys, process=self._process_item)
Expand Down
9 changes: 5 additions & 4 deletions resources/lib/youtube_plugin/kodion/sql_store/storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,7 @@ def _set(self, item_id, item):
# add 1 microsecond, required for dbapi2
now = since_epoch(datetime.now()) + 0.000001
self._open()
self._execute(True, self._set_query, values=[item_id,
self._execute(True, self._set_query, values=[str(item_id),
now,
self._encode(item)])
self._close()
Expand All @@ -176,7 +176,7 @@ def _set_all(self, items):
now = since_epoch(datetime.now()) + 0.000001
self._open()
self._execute(True, self._set_query,
values=[(key, now, self._encode(json.dumps(item)))
values=[(str(key), now, self._encode(item))
for key, item in items.items()],
many=True)
self._close()
Expand Down Expand Up @@ -222,12 +222,13 @@ def _decode(obj, process=None):
decoded_obj = pickle.loads(obj)
if process:
return process(decoded_obj)
return decoded_obj
return json.loads(decoded_obj)

@staticmethod
def _encode(obj):
return sqlite3.Binary(pickle.dumps(
obj, protocol=pickle.HIGHEST_PROTOCOL
json.dumps(obj, ensure_ascii=False),
protocol=pickle.HIGHEST_PROTOCOL
))

def _get(self, item_id):
Expand Down
6 changes: 3 additions & 3 deletions resources/lib/youtube_plugin/youtube/client/youtube.py
Original file line number Diff line number Diff line change
Expand Up @@ -381,7 +381,7 @@ def helper(video_id, responses):

# Truncate items to keep it manageable, and cache
items = items[:500]
cache.set_item(cache_items_key, json.dumps(items))
cache.set_item(cache_items_key, items)

# Build the result set
items.sort(
Expand Down Expand Up @@ -438,7 +438,7 @@ def _sort_by_date_time(item):
}
"""
# Update cache
cache.set_item(cache_home_key, json.dumps(payload))
cache.set_item(cache_home_key, payload)

# If there are no sorted_items we fall back to default API behaviour
return payload
Expand Down Expand Up @@ -884,7 +884,7 @@ def _sort_by_date_time(item):
_result['items'].sort(reverse=True, key=_sort_by_date_time)

# Update cache
cache.set_item(cache_items_key, json.dumps(_result['items']))
cache.set_item(cache_items_key, _result['items'])
""" no cache, get uploads data from web """

# trim result
Expand Down
7 changes: 3 additions & 4 deletions resources/lib/youtube_plugin/youtube/helper/video_info.py
Original file line number Diff line number Diff line change
Expand Up @@ -733,7 +733,7 @@ def _get_player_js(self):
return ''

js_url = self._normalize_url(js_url)
self._data_cache.set_item('player_js_url', json_dumps({'url': js_url}))
self._data_cache.set_item('player_js_url', {'url': js_url})

js_cache_key = quote(js_url)
cached = self._data_cache.get_item(js_cache_key,
Expand All @@ -752,7 +752,7 @@ def _get_player_js(self):
return ''

javascript = result.text
self._data_cache.set_item(js_cache_key, json_dumps({'js': javascript}))
self._data_cache.set_item(js_cache_key, {'js': javascript})
return javascript

@staticmethod
Expand Down Expand Up @@ -938,8 +938,7 @@ def _process_signature_cipher(self, stream_map):
'Failed to extract URL from signatureCipher'
)
return None
self._data_cache.set_item(encrypted_signature,
json_dumps({'sig': signature}))
self._data_cache.set_item(encrypted_signature, {'sig': signature})

if signature:
url = '{0}&{1}={2}'.format(url, query_var, signature)
Expand Down
3 changes: 2 additions & 1 deletion resources/lib/youtube_plugin/youtube/helper/yt_play.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,8 @@ def play_video(provider, context):
'refresh_only': screensaver
}

ui.set_property('playback_json', json.dumps(playback_json))
ui.set_property('playback_json', json.dumps(playback_json,
ensure_ascii=False))
context.send_notification('PlaybackInit', {
'video_id': video_id,
'channel_id': playback_json.get('channel_id', ''),
Expand Down

0 comments on commit 764caec

Please sign in to comment.