Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 6 additions & 5 deletions pygooglenews/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,11 @@


class GoogleNews:
def __init__(self, lang = 'en', country = 'US'):
def __init__(self, lang = 'en', country = 'US', session = None):
self.lang = lang.lower()
self.country = country.upper()
self.BASE_URL = 'https://news.google.com/rss'
self._session = session if session is not None else requests.Session()

def __top_news_parser(self, text):
"""Return subarticles from the main and topic feeds"""
Expand Down Expand Up @@ -43,7 +44,7 @@ def __add_sub_articles(self, entries):
return entries

def __scaping_bee_request(self, api_key, url):
response = requests.get(
response = self._session.get(
url="https://app.scrapingbee.com/api/v1/",
params={
"api_key": api_key,
Expand All @@ -62,14 +63,14 @@ def __parse_feed(self, feed_url, proxies=None, scraping_bee = None):
raise Exception("Pick either ScrapingBee or proxies. Not both!")

if proxies:
r = requests.get(feed_url, proxies = proxies)
r = self._session.get(feed_url, proxies = proxies)
else:
r = requests.get(feed_url)
r = self._session.get(feed_url)

if scraping_bee:
r = self.__scaping_bee_request(url = feed_url, api_key = scraping_bee)
else:
r = requests.get(feed_url)
r = self._session.get(feed_url)


if 'https://news.google.com/rss/unsupported' in r.url:
Expand Down