Optimize concurrent access to media player image cache (#10345)

We now do locking to ensure that an image is only downloaded and added
once, even when requested by multiple media players at the same time.
This commit is contained in:
Anders Melchiorsen 2017-11-13 18:03:12 +01:00 committed by Paulus Schoutsen
parent f6d511ac1a
commit 46fe9ed200

View File

@ -7,6 +7,7 @@ https://home-assistant.io/components/media_player/
import asyncio import asyncio
from datetime import timedelta from datetime import timedelta
import functools as ft import functools as ft
import collections
import hashlib import hashlib
import logging import logging
import os import os
@ -44,13 +45,14 @@ SCAN_INTERVAL = timedelta(seconds=10)
ENTITY_ID_FORMAT = DOMAIN + '.{}' ENTITY_ID_FORMAT = DOMAIN + '.{}'
ENTITY_IMAGE_URL = '/api/media_player_proxy/{0}?token={1}&cache={2}' ENTITY_IMAGE_URL = '/api/media_player_proxy/{0}?token={1}&cache={2}'
ATTR_CACHE_IMAGES = 'images' CACHE_IMAGES = 'images'
ATTR_CACHE_URLS = 'urls' CACHE_MAXSIZE = 'maxsize'
ATTR_CACHE_MAXSIZE = 'maxsize' CACHE_LOCK = 'lock'
CACHE_URL = 'url'
CACHE_CONTENT = 'content'
ENTITY_IMAGE_CACHE = { ENTITY_IMAGE_CACHE = {
ATTR_CACHE_IMAGES: {}, CACHE_IMAGES: collections.OrderedDict(),
ATTR_CACHE_URLS: [], CACHE_MAXSIZE: 16
ATTR_CACHE_MAXSIZE: 16
} }
SERVICE_PLAY_MEDIA = 'play_media' SERVICE_PLAY_MEDIA = 'play_media'
@ -894,12 +896,15 @@ def _async_fetch_image(hass, url):
Images are cached in memory (the images are typically 10-100kB in size). Images are cached in memory (the images are typically 10-100kB in size).
""" """
cache_images = ENTITY_IMAGE_CACHE[ATTR_CACHE_IMAGES] cache_images = ENTITY_IMAGE_CACHE[CACHE_IMAGES]
cache_urls = ENTITY_IMAGE_CACHE[ATTR_CACHE_URLS] cache_maxsize = ENTITY_IMAGE_CACHE[CACHE_MAXSIZE]
cache_maxsize = ENTITY_IMAGE_CACHE[ATTR_CACHE_MAXSIZE]
if url in cache_images: if url not in cache_images:
return cache_images[url] cache_images[url] = {CACHE_LOCK: asyncio.Lock(loop=hass.loop)}
with (yield from cache_images[url][CACHE_LOCK]):
if CACHE_CONTENT in cache_images[url]:
return cache_images[url][CACHE_CONTENT]
content, content_type = (None, None) content, content_type = (None, None)
websession = async_get_clientsession(hass) websession = async_get_clientsession(hass)
@ -912,23 +917,13 @@ def _async_fetch_image(hass, url):
content_type = response.headers.get(CONTENT_TYPE) content_type = response.headers.get(CONTENT_TYPE)
if content_type: if content_type:
content_type = content_type.split(';')[0] content_type = content_type.split(';')[0]
cache_images[url][CACHE_CONTENT] = content, content_type
except asyncio.TimeoutError: except asyncio.TimeoutError:
pass pass
if not content: while len(cache_images) > cache_maxsize:
return (None, None) cache_images.popitem(last=False)
cache_images[url] = (content, content_type)
cache_urls.append(url)
while len(cache_urls) > cache_maxsize:
# remove oldest item from cache
oldest_url = cache_urls[0]
if oldest_url in cache_images:
del cache_images[oldest_url]
cache_urls = cache_urls[1:]
return content, content_type return content, content_type