Skip to content

Commit ea0a4b5

Browse files
authored
smarter sensors
seer sensors will not show movies or shows that are already in seer fixed jellyfin websocket issues and tmdb errors
1 parent f7b4e9d commit ea0a4b5

File tree

3 files changed

+137
-83
lines changed

3 files changed

+137
-83
lines changed
Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
# mediarr/__init__.py
2+
"""The Mediarr integration."""
3+
4+
DOMAIN = "mediarr"
5+
6+
async def async_setup(hass, config):
7+
"""Set up the Mediarr component."""
8+
return True
9+
10+
async def async_setup_entry(hass, entry):
11+
"""Set up Mediarr from a config entry."""
12+
hass.async_create_task(
13+
hass.config_entries.async_forward_entry_setup(entry, "sensor")
14+
)
15+
return True

custom_components/mediarr/discovery/seer_discovery.py

Lines changed: 84 additions & 53 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
"""Jellyseerr/Overseerr discovery features for Mediarr."""
22
import logging
3+
import asyncio
34
from datetime import datetime
45
from ..common.tmdb_sensor import TMDBMediaSensor
56
import async_timeout
@@ -35,7 +36,7 @@ def unique_id(self):
3536
return f"seer_mediarr_{self._content_type}_{self._url}"
3637
return f"seer_mediarr_{self._content_type}_{self._url}"
3738

38-
async def _fetch_media_list(self):
39+
async def _fetch_media_list(self, media_type=None):
3940
"""Fetch media list from Seer."""
4041
try:
4142
headers = {'X-Api-Key': self._seer_api_key}
@@ -51,8 +52,8 @@ async def _fetch_media_list(self):
5152
url = f"{self._url}/api/v1/discover/tv"
5253
params["sortBy"] = "popularity.desc"
5354
elif self._content_type == "discover":
54-
# For discover, use movies as default
55-
media_type = "movies"
55+
# Use provided media_type or default to movies
56+
media_type = media_type or "movies"
5657
url = f"{self._url}/api/v1/discover/{media_type}"
5758
else:
5859
_LOGGER.error("Unknown content type: %s", self._content_type)
@@ -70,65 +71,95 @@ async def _fetch_media_list(self):
7071
except Exception as err:
7172
_LOGGER.error("Error fetching %s: %s", self._content_type, err)
7273
return None
74+
async def _fetch_all_requests(self):
75+
"""Fetch all current requests from Overseerr/Jellyseerr."""
76+
try:
77+
url = f"{self._url}/api/v1/request"
78+
headers = {"X-Api-Key": self._seer_api_key}
79+
params = {"take": 100, "skip": 0} # Adjust take value as needed
80+
all_requests = set()
81+
82+
async with async_timeout.timeout(10):
83+
async with self._session.get(url, headers=headers, params=params) as response:
84+
if response.status == 200:
85+
data = await response.json()
86+
if data.get('results'):
87+
for request in data['results']:
88+
if request.get('media'):
89+
tmdb_id = request['media'].get('tmdbId')
90+
if tmdb_id:
91+
all_requests.add(str(tmdb_id))
92+
93+
return all_requests
94+
except Exception as err:
95+
_LOGGER.error("Error fetching all requests: %s", err)
96+
return set()
97+
98+
async def _process_media_items(self, data, media_type, requested_ids):
99+
"""Process media items in parallel."""
100+
if not data or not data.get('results'):
101+
return []
102+
103+
async def process_item(item):
104+
try:
105+
tmdb_id = str(item.get('id'))
106+
if tmdb_id in requested_ids:
107+
return None
108+
109+
details = await self._get_tmdb_details(tmdb_id, media_type)
110+
if not details:
111+
return None
112+
113+
poster_url, backdrop_url, main_backdrop_url = await self._get_tmdb_images(tmdb_id, media_type)
114+
115+
return {
116+
'title': details['title'],
117+
'overview': details['overview'][:100] + '...' if details.get('overview') else 'No overview available',
118+
'year': details['year'],
119+
'poster': str(poster_url or ""),
120+
'fanart': str(main_backdrop_url or backdrop_url or ""),
121+
'banner': str(backdrop_url or ""),
122+
'release': details['year'],
123+
'type': 'Movie' if media_type == 'movie' else 'TV Show',
124+
'flag': 1,
125+
'id': tmdb_id
126+
}
127+
except Exception as err:
128+
_LOGGER.error("Error processing item %s: %s", tmdb_id, err)
129+
return None
130+
131+
# Process items in parallel
132+
tasks = [process_item(item) for item in data['results']]
133+
results = await asyncio.gather(*tasks, return_exceptions=True)
134+
135+
# Filter out None values and handle any exceptions
136+
return [item for item in results if item is not None and not isinstance(item, Exception)]
73137

74138
async def async_update(self):
75139
"""Update the sensor."""
76140
try:
141+
# Fetch all current requests first
142+
requested_ids = await self._fetch_all_requests()
77143
all_items = []
78144

79145
if self._content_type == "discover":
80-
# For discover, fetch both movies and TV
146+
# Fetch both movies and TV
81147
for media_type in ['movies', 'tv']:
82-
data = await self._fetch_media_list()
83-
if data and data.get('results'):
84-
for item in data['results']:
85-
tmdb_id = item.get('id')
86-
media_type = 'movie' if media_type == 'movies' else 'tv'
87-
88-
details = await self._get_tmdb_details(tmdb_id, media_type)
89-
if not details:
90-
continue
91-
92-
poster_url, backdrop_url, main_backdrop_url = await self._get_tmdb_images(tmdb_id, media_type)
93-
94-
all_items.append({
95-
'title': details['title'],
96-
'overview': details['overview'][:100] + '...',
97-
'year': details['year'],
98-
'poster': str(poster_url or ""),
99-
'fanart': str(main_backdrop_url or backdrop_url or ""),
100-
'banner': str(backdrop_url or ""),
101-
'release': details['year'],
102-
'type': 'Movie' if media_type == 'movie' else 'TV Show',
103-
'flag': 1
104-
})
148+
data = await self._fetch_media_list(media_type) # Pass media_type here
149+
processed_items = await self._process_media_items(
150+
data,
151+
'movie' if media_type == 'movies' else 'tv',
152+
requested_ids
153+
)
154+
all_items.extend(processed_items)
105155
else:
106-
# For trending and popular, fetch single type
156+
# Fetch single type (trending, popular movies, or popular TV)
107157
data = await self._fetch_media_list()
108-
if data and data.get('results'):
109-
for item in data['results']:
110-
media_type = 'movie' if self._content_type == 'popular_movies' else 'tv'
111-
tmdb_id = item.get('id')
112-
113-
details = await self._get_tmdb_details(tmdb_id, media_type)
114-
if not details:
115-
continue
116-
117-
poster_url, backdrop_url, main_backdrop_url = await self._get_tmdb_images(tmdb_id, media_type)
118-
119-
all_items.append({
120-
'title': details['title'],
121-
'overview': details['overview'][:100] + '...',
122-
'year': details['year'],
123-
'poster': str(poster_url or ""),
124-
'fanart': str(main_backdrop_url or backdrop_url or ""),
125-
'banner': str(backdrop_url or ""),
126-
'release': details['year'],
127-
'type': 'Movie' if media_type == 'movie' else 'TV Show',
128-
'flag': 1
129-
})
130-
131-
# Sort and limit items
158+
media_type = 'movie' if self._content_type == 'popular_movies' else 'tv'
159+
processed_items = await self._process_media_items(data, media_type, requested_ids)
160+
all_items.extend(processed_items)
161+
162+
# Ensure max_items limit is respected
132163
all_items = all_items[:self._max_items]
133164

134165
if not all_items:
@@ -148,4 +179,4 @@ async def async_update(self):
148179
_LOGGER.error("Error updating %s sensor: %s", self._content_type, err)
149180
self._state = 0
150181
self._attributes = {'data': []}
151-
self._available = False
182+
self._available = False

custom_components/mediarr/server/jellyfin.py

Lines changed: 38 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
import json
44
import aiohttp
55
import asyncio
6+
import aiofiles
67
import async_timeout
78
import voluptuous as vol
89
from datetime import timedelta
@@ -97,27 +98,32 @@ async def _schedule_reconnect(self):
9798
async def _listen(self):
9899
"""Listen for WebSocket messages."""
99100
try:
100-
async for msg in self._ws:
101-
if msg.type == aiohttp.WSMsgType.TEXT:
102-
data = json.loads(msg.data)
103-
104-
# Handle different message types
105-
if data.get("MessageType") == "Library":
106-
# Library changed, trigger an update
107-
if "ItemsAdded" in data.get("Data", {}) or "ItemsRemoved" in data.get("Data", {}):
108-
_LOGGER.debug("Library changed, triggering update")
109-
await self._sensor.async_update()
110-
111-
elif data.get("MessageType") == "ForceKeepAlive":
112-
# Respond to keep-alive messages
113-
await self._ws.send_str(json.dumps({
114-
"MessageType": "KeepAlive"
115-
}))
116-
elif msg.type in (aiohttp.WSMsgType.CLOSED, aiohttp.WSMsgType.ERROR):
117-
raise Exception("WebSocket connection closed or error")
118-
101+
async with async_timeout.timeout(30): # Add timeout
102+
async for msg in self._ws:
103+
if msg.type == aiohttp.WSMsgType.TEXT:
104+
data = json.loads(msg.data)
105+
106+
# Handle different message types
107+
if data.get("MessageType") == "Library":
108+
# Library changed, trigger an update
109+
if "ItemsAdded" in data.get("Data", {}) or "ItemsRemoved" in data.get("Data", {}):
110+
_LOGGER.debug("Library changed, triggering update")
111+
await self._sensor.async_update()
112+
113+
elif data.get("MessageType") == "ForceKeepAlive":
114+
# Respond to keep-alive messages
115+
await self._ws.send_str(json.dumps({
116+
"MessageType": "KeepAlive"
117+
}))
118+
elif msg.type in (aiohttp.WSMsgType.CLOSED, aiohttp.WSMsgType.ERROR):
119+
_LOGGER.warning("WebSocket connection closed or error")
120+
break
121+
122+
except asyncio.TimeoutError:
123+
_LOGGER.warning("WebSocket listener timeout")
119124
except Exception as err:
120125
_LOGGER.error("WebSocket listener error: %s", err)
126+
finally:
121127
self._connected = False
122128
await self.cleanup()
123129
await self._schedule_reconnect()
@@ -156,6 +162,13 @@ def __init__(self, hass, session, config, user_id):
156162
self._available = True
157163
self._remove_update_interval = None
158164

165+
166+
167+
@callback
168+
def _update_callback(self, now):
169+
"""Handle the update interval callback."""
170+
self.hass.loop.create_task(self.async_update())
171+
159172
async def async_added_to_hass(self):
160173
"""Handle entity which will be added."""
161174
await super().async_added_to_hass()
@@ -175,18 +188,10 @@ async def async_added_to_hass(self):
175188
# Set up periodic updates as fallback
176189
self._remove_update_interval = async_track_time_interval(
177190
self.hass,
178-
lambda now: self.hass.async_create_task(self.async_update()),
191+
self._update_callback, # Use the callback method
179192
UPDATE_INTERVAL
180193
)
181194

182-
async def async_will_remove_from_hass(self):
183-
"""Clean up after entity before removal."""
184-
await super().async_will_remove_from_hass()
185-
if self._ws_client:
186-
await self._ws_client.cleanup()
187-
if self._remove_update_interval:
188-
self._remove_update_interval()
189-
190195
@property
191196
def name(self):
192197
"""Return the name of the sensor."""
@@ -230,8 +235,11 @@ async def _download_and_cache_image(self, url, item_id, image_type):
230235
cached_path = cache_dir / file_name
231236

232237
content = await response.read()
233-
with open(cached_path, 'wb') as f:
234-
f.write(content)
238+
239+
# Use aiofiles for async file operations
240+
async with aiofiles.open(cached_path, 'wb') as f:
241+
await f.write(content)
242+
235243
_LOGGER.debug("Successfully cached image for %s: %s", item_id, image_type)
236244
return f"/local/mediarr/cache/{file_name}"
237245
else:

0 commit comments

Comments
 (0)