Skip to content

Commit

Permalink
Update dependencies and improve code formatting
Browse files Browse the repository at this point in the history
  • Loading branch information
areed1192 committed Jan 13, 2025
1 parent 0b45296 commit 9577f91
Show file tree
Hide file tree
Showing 4 changed files with 6,179 additions and 9,699 deletions.
37 changes: 11 additions & 26 deletions finnews/cnbc.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,7 @@
from finnews.fields import cnbc_rss_feeds_id


class CNBC():

class CNBC:
"""
### Overview:
----
Expand All @@ -21,13 +20,13 @@ def __init__(self):
"""Initializes the `CNBC` client."""

# Define the URL used to query feeds.
self.url = 'https://www.cnbc.com/id/{topic_id}/device/rss/rss.html'
self.url = "https://www.cnbc.com/id/{topic_id}/device/rss/rss.html"

# Define the topic categories.
self.topic_categories: dict = cnbc_rss_feeds_id

# Define the parser client.
self.news_parser = NewsParser(client='cnbc')
self.news_parser = NewsParser(client="cnbc")

def __repr__(self) -> str:
"""Represents the string representation of the client object.
Expand Down Expand Up @@ -57,9 +56,7 @@ def _check_key(self, topic_id: str) -> str:

if topic_id in self.topic_categories:

full_url = self.url.format(
topic_id=self.topic_categories[topic_id]
)
full_url = self.url.format(topic_id=self.topic_categories[topic_id])
return full_url
else:
raise KeyError("The value you're searching for does not exist.")
Expand Down Expand Up @@ -90,7 +87,7 @@ def all_feeds(self) -> Dict:
# Loop through all the topics.
for topic_key in self.topic_categories:

print(f'PULLING TOPIC: {topic_key}')
print(f"PULLING TOPIC: {topic_key}")

# Grab the data.
try:
Expand Down Expand Up @@ -137,9 +134,7 @@ def news_feed(self, topic: Union[str, Enum]) -> List[Dict]:
topic = topic.name.lower()

# Grab the data.
data = self.news_parser._make_request(
url=self._check_key(topic_id=topic)
)
data = self.news_parser._make_request(url=self._check_key(topic_id=topic))

return data

Expand Down Expand Up @@ -174,9 +169,7 @@ def investing_feeds(self, topic: str) -> List[Dict]:
topic = topic.name.lower()

# Grab the data.
data = self.news_parser._make_request(
url=self._check_key(topic_id=topic)
)
data = self.news_parser._make_request(url=self._check_key(topic_id=topic))

return data

Expand Down Expand Up @@ -211,9 +204,7 @@ def blogs(self, topic: str) -> List[Dict]:
topic = topic.name.lower()

# Grab the data.
data = self.news_parser._make_request(
url=self._check_key(topic_id=topic)
)
data = self.news_parser._make_request(url=self._check_key(topic_id=topic))

return data

Expand Down Expand Up @@ -248,9 +239,7 @@ def videos_and_tv(self, topic: str) -> List[Dict]:
topic = topic.name.lower()

# Grab the data.
data = self.news_parser._make_request(
url=self._check_key(topic_id=topic)
)
data = self.news_parser._make_request(url=self._check_key(topic_id=topic))

return data

Expand Down Expand Up @@ -287,9 +276,7 @@ def tv_programs_europe(self, topic: str) -> List[Dict]:
topic = topic.name.lower()

# Grab the data.
data = self.news_parser._make_request(
url=self._check_key(topic_id=topic)
)
data = self.news_parser._make_request(url=self._check_key(topic_id=topic))

return data

Expand Down Expand Up @@ -326,8 +313,6 @@ def tv_programs_asia(self, topic: str) -> List[Dict]:
topic = topic.name.lower()

# Grab the data.
data = self.news_parser._make_request(
url=self._check_key(topic_id=topic)
)
data = self.news_parser._make_request(url=self._check_key(topic_id=topic))

return data
4 changes: 2 additions & 2 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
fake_useragent==0.1.11
requests==2.24.0
fake_useragent==2.0.3
requests==2.32.3
Loading

0 comments on commit 9577f91

Please sign in to comment.