3939from . import __version__
4040
4141if TYPE_CHECKING :
42+ from redgifs import API
4243 from redgifs .models import GIF , Image
4344
44- def download_gif (client , url : yarl .URL , quality : str , folder : Optional [Path ], * , skip_check : bool = False ):
45+ def download_gif (client : API , url : yarl .URL , quality : str , folder : Optional [Path ], * , skip_check : bool = False ):
4546 # If skip_check is true then this will be the GIF's ID and splitting the URL is not required
4647 id = str (url ).lower () if skip_check else url .path .split ('/' )[- 1 ]
4748 click .echo (f'Downloading { id } ...' )
4849 gif = client .get_gif (id )
49- gif_url = gif .urls .sd if quality == 'sd' else gif .urls .hd
50+ gif_url = gif .urls .sd if quality == 'sd' else ( gif .urls .hd or gif . urls . sd )
5051 filename = f'{ gif_url .split ("/" )[3 ].split ("." )[0 ]} .mp4'
5152 dir_ = f'{ folder } /{ filename } ' if folder else filename
5253 client .download (gif_url , dir_ )
5354 click .echo ('Download complete.' )
5455
55- def _dl_with_args (client , gif : GIF | Image , quality : str , folder : Optional [Path ], is_image : bool ):
56+ def _dl_with_args (client : API , gif : GIF | Image , quality : str , folder : Optional [Path ], is_image : bool ):
5657 gif_url = gif .urls .sd if quality == 'sd' else gif .urls .hd or gif .urls .sd
5758 filename = f'{ gif_url .split ("/" )[3 ].split ("." )[0 ]} .mp4'
5859 if is_image :
@@ -64,59 +65,45 @@ def _dl_with_args(client, gif: GIF | Image, quality: str, folder: Optional[Path]
6465 else :
6566 client .download (gif_url , f'{ filename } ' )
6667
67- def download_users_gifs (client , url : yarl .URL , quality : str , folder : Optional [Path ], images_only : bool ):
68+ def download_users_gifs (client : API , url : yarl .URL , quality : str , folder : Optional [Path ], images_only : bool ):
6869 match = re .match (r'https://(www\.)?redgifs\.com\/users\/(?P<username>\w+)' , str (url ))
6970 if not match :
7071 click .UsageError (f'Not a valid redgifs user URL: { url } ' )
7172 exit (1 )
7273
7374 is_image = images_only
74-
7575 user = match .groupdict ()['username' ]
76- data = client .search_creator (user , media_type = MediaType .IMAGE if is_image else MediaType .GIF )
76+
77+ media_type = MediaType .IMAGE if is_image else MediaType .GIF
78+ data = client .search_creator (user , type = media_type )
79+
7780 curr_page = data .page
7881 total_pages = data .pages
79- total_gifs_in_page = data .gifs if not is_image else data .images
82+ media_items = data .images if is_image else data .gifs
8083 total = data .total
8184 done = 0
8285
83- # Case where there is only 1 page
84- if curr_page == total_pages :
85- spinner = itertools .cycle (["-" , "\\ " , "|" , "/" ])
86- for gif in total_gifs_in_page :
87- try :
88- _dl_with_args (client , gif , quality , folder , is_image )
89- done += 1
90- click .echo (f'\r { next (spinner )} Downloaded { done } /{ total } { "GIFs" if not is_image else "image" } ' , nl = False )
91- except Exception as e :
92- click .UsageError (f'[!] An error occurred when downloading { url } :\n { e } . Continuing...' )
93- continue
94-
95- click .echo (f'\r [-] Downloaded { done } /{ total } { "videos" if not is_image else "images" } of user { user } { f"to folder '{ folder } '" if folder else "" } sucessfully!' )
96- exit (0 )
86+ spinner = itertools .cycle (["-" , "\\ " , "|" , "/" ])
9787
98- # Case where there is more than 1 page
9988 while curr_page <= total_pages :
100- spinner = itertools .cycle (["-" , "\\ " , "|" , "/" ])
101- for gif in total_gifs_in_page :
89+ for item in media_items :
10290 try :
103- _dl_with_args (client , gif , quality , folder , is_image )
91+ _dl_with_args (client , item , quality , folder , is_image )
10492 done += 1
105- click .echo (f'\r { next (spinner )} Downloaded { done } /{ total } { "GIFs " if not is_image else "image" } ' , nl = False )
93+ click .echo (f'\r { next (spinner )} Downloading { done } /{ total } { "images " if is_image else "GIFs" } ... ' , nl = False )
10694 except Exception as e :
107- click .echo (f'[!] An error occurred when downloading { url } :\n { e } . Continuing ...' )
95+ click .echo (f'[!] An error occurred when downloading { url } : { e } \n Continuing ...' )
10896 continue
10997
110- # If we are in the last page, break the loop
11198 if curr_page == total_pages :
11299 break
113100
114101 curr_page += 1
115- total_gifs_in_page .clear ()
116- data = client .search_creator (user , page = curr_page )
117- total_gifs_in_page .extend (data .gifs )
102+ data = client .search_creator (user , page = curr_page , type = media_type )
103+ media_items = data .images if is_image else data .gifs
118104
119- click .echo (f'\r [-] Downloaded { done } /{ total } { "videos" if not is_image else "images" } of user { user } { f"to folder '{ folder } '" if folder else "" } sucessfully!' )
105+ folder_info = f"to folder '{ folder } '" if folder else ""
106+ click .echo (f"\r [-] Downloaded { done } /{ total } { 'images' if is_image else 'GIFs' } of user { user } { folder_info } successfully!" )
120107
121108@click .command ()
122109@click .argument ('urls' , nargs = - 1 )
0 commit comments