7
7
8
8
_LOGGER = logging .getLogger (__name__ )
9
9
10
+ # In seer_discovery.py
10
11
class SeerDiscoveryMediarrSensor (TMDBMediaSensor ):
11
12
"""Seer sensor for discover/trending/popular."""
12
13
13
- def __init__ (self , session , api_key , url , tmdb_api_key , max_items , content_type , media_type = None ):
14
+ def __init__ (self , session , api_key , url , tmdb_api_key , max_items , content_type , media_type = None , filters = None ):
14
15
"""Initialize the sensor."""
16
+ # Initialize TMDBMediaSensor with tmdb_api_key
15
17
super ().__init__ (session , tmdb_api_key )
18
+
16
19
self ._seer_api_key = api_key
17
20
self ._url = url .rstrip ('/' )
18
21
self ._max_items = max_items
19
22
self ._content_type = content_type
20
23
self ._media_type = media_type
24
+
25
+ # Initialize default filters
26
+ self ._filters = {
27
+ 'language' : 'en' ,
28
+ 'min_year' : 0 ,
29
+ 'exclude_talk_shows' : True ,
30
+ 'exclude_genres' : [10763 , 10764 , 10767 ], # News, Reality, Talk shows
31
+ 'exclude_non_english' : True
32
+ }
33
+
34
+ # Update with user-provided filters
35
+ if filters :
36
+ self ._filters .update (filters )
37
+
21
38
# Customize name based on content type and media type
22
39
if content_type in ["popular_movies" , "popular_tv" ]:
23
40
self ._name = f"Seer Mediarr Popular { 'Movies' if media_type == 'movies' else 'TV' } "
24
41
else :
25
42
self ._name = f"Seer Mediarr { content_type .title ()} "
43
+
44
+ def should_include_item (self , item , media_type ):
45
+ """Apply filters to determine if an item should be included."""
46
+ # Skip if no item
47
+ if not item :
48
+ return False
49
+
50
+ # Filter by year
51
+ year = None
52
+ if media_type == 'tv' and item .get ('first_air_date' ):
53
+ try :
54
+ year = int (item ['first_air_date' ].split ('-' )[0 ])
55
+ except (ValueError , IndexError , TypeError ):
56
+ pass
57
+ elif media_type == 'movie' and item .get ('release_date' ):
58
+ try :
59
+ year = int (item ['release_date' ].split ('-' )[0 ])
60
+ except (ValueError , IndexError , TypeError ):
61
+ pass
62
+
63
+ if year and year < self ._filters .get ('min_year' , 0 ):
64
+ return False
65
+
66
+ # Filter by language
67
+ if self ._filters .get ('exclude_non_english' , True ) and item .get ('original_language' ) != 'en' :
68
+ return False
69
+
70
+ # Filter by genre
71
+ excluded_genres = self ._filters .get ('exclude_genres' , [])
72
+ if excluded_genres and any (genre_id in excluded_genres for genre_id in item .get ('genre_ids' , [])):
73
+ return False
74
+
75
+ # Filter for TV talk shows
76
+ if media_type == 'tv' and self ._filters .get ('exclude_talk_shows' , True ):
77
+ title = item .get ('name' , '' ) or item .get ('title' , '' )
78
+ if self .is_talk_show (title ):
79
+ return False
80
+
81
+ return True
82
+
83
+ def is_talk_show (self , title ):
84
+ """Check if a show title appears to be a talk show or similar format."""
85
+ if not self ._filters .get ('exclude_talk_shows' , True ) or not title :
86
+ return False
87
+
88
+ keywords = [
89
+ 'tonight show' , 'late show' , 'late night' , 'daily show' ,
90
+ 'talk show' , 'with seth meyers' , 'with james corden' ,
91
+ 'with jimmy' , 'with stephen' , 'with trevor' , 'news' ,
92
+ 'live with' , 'watch what happens live' , 'the view' ,
93
+ 'good morning' , 'today show' , 'kimmel' , 'colbert' ,
94
+ 'fallon' , 'ellen' , 'conan' , 'graham norton' , 'meet the press' ,
95
+ 'face the nation' , 'last week tonight' , 'real time' ,
96
+ 'kelly and' , 'kelly &' , 'jeopardy' , 'wheel of fortune' ,
97
+ 'daily mail' , 'entertainment tonight' , 'zeiten' , 'schlechte'
98
+ ]
99
+
100
+ title_lower = title .lower ()
101
+ return any (keyword in title_lower for keyword in keywords )
26
102
27
103
@property
28
104
def name (self ):
@@ -96,22 +172,45 @@ async def _fetch_all_requests(self):
96
172
return set ()
97
173
98
174
async def _process_media_items (self , data , media_type , requested_ids ):
99
- """Process media items in parallel."""
175
+ """Process media items in parallel with filtering ."""
100
176
if not data or not data .get ('results' ):
177
+ _LOGGER .debug ("No data or results to process for %s" , media_type )
101
178
return []
102
179
180
+ filtered_count = 0
181
+ requested_count = 0
182
+ detail_failure_count = 0
183
+ success_count = 0
184
+
103
185
async def process_item (item ):
186
+ nonlocal filtered_count , requested_count , detail_failure_count , success_count
187
+
104
188
try :
105
189
tmdb_id = str (item .get ('id' ))
190
+ if not tmdb_id :
191
+ _LOGGER .debug ("Item has no TMDB ID" )
192
+ return None
193
+
106
194
if tmdb_id in requested_ids :
195
+ requested_count += 1
196
+ _LOGGER .debug ("Item %s already requested, skipping" , tmdb_id )
197
+ return None
198
+
199
+ # Apply filters
200
+ if not self .should_include_item (item , media_type ):
201
+ filtered_count += 1
202
+ _LOGGER .debug ("Item %s filtered out by criteria" , tmdb_id )
107
203
return None
108
204
109
205
details = await self ._get_tmdb_details (tmdb_id , media_type )
110
206
if not details :
207
+ detail_failure_count += 1
208
+ _LOGGER .debug ("Failed to get TMDB details for %s" , tmdb_id )
111
209
return None
112
210
113
211
poster_url , backdrop_url , main_backdrop_url = await self ._get_tmdb_images (tmdb_id , media_type )
114
212
213
+ success_count += 1
115
214
return {
116
215
'title' : details ['title' ],
117
216
'overview' : details ['overview' ][:100 ] + '...' if details .get ('overview' ) else 'No overview available' ,
@@ -125,44 +224,87 @@ async def process_item(item):
125
224
'id' : tmdb_id
126
225
}
127
226
except Exception as err :
128
- _LOGGER .error ("Error processing item %s: %s" , tmdb_id , err )
227
+ _LOGGER .error ("Error processing item %s: %s" , tmdb_id if 'tmdb_id' in locals () else 'unknown' , err )
129
228
return None
130
229
131
230
# Process items in parallel
231
+ _LOGGER .debug ("Processing %d items for %s" , len (data ['results' ]), media_type )
132
232
tasks = [process_item (item ) for item in data ['results' ]]
133
233
results = await asyncio .gather (* tasks , return_exceptions = True )
134
234
135
- # Filter out None values and handle any exceptions
136
- return [item for item in results if item is not None and not isinstance (item , Exception )]
235
+ # Handle exceptions
236
+ exceptions = [r for r in results if isinstance (r , Exception )]
237
+ if exceptions :
238
+ _LOGGER .error ("Got %d exceptions during processing" , len (exceptions ))
239
+ for exc in exceptions [:3 ]: # Log first 3 exceptions
240
+ _LOGGER .error ("Exception: %s" , exc )
241
+
242
+ # Filter out None values and exceptions
243
+ processed_results = [item for item in results if item is not None and not isinstance (item , Exception )]
244
+
245
+ _LOGGER .debug ("Processing summary for %s: %d items total, %d already requested, %d filtered out, "
246
+ "%d failed to get details, %d successful" ,
247
+ media_type , len (data ['results' ]), requested_count , filtered_count ,
248
+ detail_failure_count , success_count )
249
+
250
+ return processed_results
137
251
138
252
async def async_update (self ):
139
253
"""Update the sensor."""
140
254
try :
141
255
# Fetch all current requests first
142
256
requested_ids = await self ._fetch_all_requests ()
257
+ _LOGGER .debug ("Fetched %d requested IDs from Seer" , len (requested_ids ))
258
+
143
259
all_items = []
144
260
145
261
if self ._content_type == "discover" :
146
262
# Fetch both movies and TV
147
263
for media_type in ['movies' , 'tv' ]:
148
- data = await self ._fetch_media_list (media_type ) # Pass media_type here
264
+ _LOGGER .debug ("Fetching %s data from Seer for discover" , media_type )
265
+ data = await self ._fetch_media_list (media_type )
266
+
267
+ if data and 'results' in data :
268
+ _LOGGER .debug ("Received %d %s items from Seer" , len (data ['results' ]), media_type )
269
+ # Debug the first item to see its structure
270
+ if data ['results' ]:
271
+ _LOGGER .debug ("Sample item structure: %s" , data ['results' ][0 ])
272
+ else :
273
+ _LOGGER .debug ("No %s data or no results received from Seer" , media_type )
274
+
275
+ _LOGGER .debug ("Processing %s items through filters" , media_type )
149
276
processed_items = await self ._process_media_items (
150
277
data ,
151
278
'movie' if media_type == 'movies' else 'tv' ,
152
279
requested_ids
153
280
)
281
+ _LOGGER .debug ("After filtering: %d %s items remaining" , len (processed_items ), media_type )
154
282
all_items .extend (processed_items )
155
283
else :
156
284
# Fetch single type (trending, popular movies, or popular TV)
285
+ _LOGGER .debug ("Fetching %s data from Seer" , self ._content_type )
157
286
data = await self ._fetch_media_list ()
287
+
288
+ if data and 'results' in data :
289
+ _LOGGER .debug ("Received %d items from Seer for %s" , len (data ['results' ]), self ._content_type )
290
+ # Debug the first item to see its structure
291
+ if data ['results' ]:
292
+ _LOGGER .debug ("Sample item structure: %s" , data ['results' ][0 ])
293
+ else :
294
+ _LOGGER .debug ("No data or no results received from Seer for %s" , self ._content_type )
295
+
158
296
media_type = 'movie' if self ._content_type == 'popular_movies' else 'tv'
297
+ _LOGGER .debug ("Processing %s items through filters" , self ._content_type )
159
298
processed_items = await self ._process_media_items (data , media_type , requested_ids )
299
+ _LOGGER .debug ("After filtering: %d items remaining" , len (processed_items ))
160
300
all_items .extend (processed_items )
161
301
162
302
# Ensure max_items limit is respected
163
303
all_items = all_items [:self ._max_items ]
304
+ _LOGGER .debug ("Final number of items after max_items limit: %d" , len (all_items ))
164
305
165
306
if not all_items :
307
+ _LOGGER .warning ("No items passed filters for %s, using fallback" , self ._content_type )
166
308
all_items .append ({
167
309
'title_default' : '$title' ,
168
310
'line1_default' : '$type' ,
0 commit comments