Skip to content

Commit

Permalink
Merge pull request #52 from Hungry-Dolphin/HowToContribute
Browse files Browse the repository at this point in the history
Implemented issue #30, the nyaa search now returns a torrent object instead of a dict
  • Loading branch information
JuanjoSalvador authored Dec 13, 2020
2 parents 3d8406a + 9db8af3 commit 0a732b3
Show file tree
Hide file tree
Showing 11 changed files with 502 additions and 316 deletions.
5 changes: 4 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,7 @@ dist/
nyaapy.egg-info
.vscode
env/
*.pyc
*.pyc
test_files
venv
.idea
8 changes: 2 additions & 6 deletions NyaaPy/__init__.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,7 @@
# Info about the module
__version__ = '0.6.0'
__version__ = '0.6.3'
__author__ = 'Juanjo Salvador'
__email__ = '[email protected]'
__url__ = 'http://juanjosalvador.me'
__copyright__ = '2017 Juanjo Salvador'
__license__ = 'MIT license'

from NyaaPy.nyaa import Nyaa
from NyaaPy.pantsu import Pantsu
from NyaaPy.sukebei import SukebeiNyaa, SukebeiPantsu
__license__ = 'MIT license'
62 changes: 41 additions & 21 deletions NyaaPy/nyaa.py
Original file line number Diff line number Diff line change
@@ -1,54 +1,74 @@
import requests
import urllib.parse
from bs4 import BeautifulSoup
from NyaaPy import utils
from NyaaPy import torrent


class Nyaa:

def __init__(self):
self.URI = "http://nyaa.si"
self.SITE = utils.TorrentSite.NYAASI
self.URL = "https://nyaa.si"

def last_uploads(self, number_of_results):
r = requests.get(self.URI)
soup = BeautifulSoup(r.text, 'html.parser')
rows = soup.select('table tr')
r = requests.get(self.URL)

# If anything up with nyaa servers let the user know.
r.raise_for_status()

return utils.parse_nyaa(table_rows=rows, limit=number_of_results + 1)
json_data = utils.parse_nyaa(
request_text=r.text,
limit=number_of_results + 1,
site=self.SITE
)
return torrent.json_to_class(json_data)

def search(self, keyword, **kwargs):
url = self.URL

user = kwargs.get('user', None)
category = kwargs.get('category', 0)
subcategory = kwargs.get('subcategory', 0)
filters = kwargs.get('filters', 0)
page = kwargs.get('page', 0)

if user:
user_uri = "user/{}".format(user)
user_uri = f"user/{user}"
else:
user_uri = ""

if page > 0:
r = requests.get("{}/{}?f={}&c={}_{}&q={}&p={}".format(
self.URI, user_uri, filters, category, subcategory, keyword,
url, user_uri, filters, category, subcategory, keyword,
page))
else:
r = requests.get("{}/{}?f={}&c={}_{}&q={}".format(
self.URI, user_uri, filters, category, subcategory, keyword))
url, user_uri, filters, category, subcategory, keyword))

r.raise_for_status()

json_data = utils.parse_nyaa(
request_text=r.text,
limit=None,
site=self.SITE
)

soup = BeautifulSoup(r.text, 'html.parser')
rows = soup.select('table tr')
return torrent.json_to_class(json_data)

return utils.parse_nyaa(rows, limit=None)
def get(self, view_id):
r = requests.get(f'{self.URL}/view/{view_id}')
r.raise_for_status()

def get(self, id):
r = requests.get("{}/view/{}".format(self.URI, id))
soup = BeautifulSoup(r.text, 'html.parser')
content = soup.findAll("div", {"class": "panel", "id": None})
json_data = utils.parse_single(request_text=r.text, site=self.SITE)

return utils.parse_single(content)
return torrent.json_to_class(json_data)

def get_user(self, username):
r = requests.get("{}/user/{}".format(self.URI, username))
soup = BeautifulSoup(r.text, 'html.parser')
r = requests.get(f'{self.URL}/user/{username}')
r.raise_for_status()

return utils.parse_nyaa(soup.select('table tr'), limit=None)
json_data = utils.parse_nyaa(
request_text=r.text,
limit=None,
site=self.SITE
)
return torrent.json_to_class(json_data)
22 changes: 14 additions & 8 deletions NyaaPy/pantsu.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,24 @@
import requests
from NyaaPy import utils


class Pantsu:

def __init__(self):
self.BASE_URL = "https://nyaa.pantsu.cat/api"

def last_uploads(self, number_of_results):
r = requests.get(self.URI)
soup = BeautifulSoup(r.text, 'html.parser')
rows = soup.select('table tr')
self.SITE = utils.TorrentSite.NYAANET

return utils.parse_nyaa(rows, limit=number_of_results + 1)
def last_uploads(self, number_of_results):
r = requests.get(self.SITE.value)
r.raise_for_status()
with open("test.html", "w") as f:
f.write(r.text)

return utils.parse_nyaa(
request_text=r.text,
limit=number_of_results + 1,
site=self.SITE
)

# Torrents - GET
def search(self, keyword, **kwargs):
Expand All @@ -23,18 +29,18 @@ def search(self, keyword, **kwargs):
def view(self, item_id):
request = requests.get("{}/view/{}".format(self.BASE_URL, item_id))

request.raise_for_status()

return request.json()

# Torrents - POST

def upload(self):
return "Work in progress!"

def update(self):
return "Work in progress!"

# Users

def login(self, username, password):
login = requests.post("{}/login/".format(
self.BASE_URL), data={'username': username, 'password': password})
Expand Down
43 changes: 24 additions & 19 deletions NyaaPy/sukebei.py
Original file line number Diff line number Diff line change
@@ -1,47 +1,52 @@
import requests
from bs4 import BeautifulSoup
from NyaaPy import utils


class SukebeiNyaa:

def __init__(self):
self.SITE = utils.TorrentSite.SUKEBEINYAASI

def search(self, keyword, **kwargs):
uri = self.SITE.value
category = kwargs.get('category', 0)
subcategory = kwargs.get('subcategory', 0)
filters = kwargs.get('filters', 0)
page = kwargs.get('page', 0)

if page > 0:
r = requests.get("{}/?f={}&c={}_{}&q={}&p={}".format(
"http://sukebei.nyaa.si", filters, category, subcategory,
uri, filters, category, subcategory,
keyword, page))
else:
r = requests.get("{}/?f={}&c={}_{}&q={}".format(
"http://sukebei.nyaa.si", filters, category, subcategory,
uri, filters, category, subcategory,
keyword))

soup = BeautifulSoup(r.text, 'html.parser')
rows = soup.select('table tr')

return utils.parse_nyaa(rows, limit=None)
r.raise_for_status()
return utils.parse_nyaa(r.text, limit=None, site=self.SITE)

def get(self, id):
r = requests.get("http://sukebei.nyaa.si/view/{}".format(id))
soup = BeautifulSoup(r.text, 'html.parser')
content = soup.findAll("div", {"class": "panel", "id": None})
r = requests.get("{}/view/{}".format(self.SITE.value, id))
r.raise_for_status()

return utils.parse_single(content)
return utils.parse_single(r.text, self.SITE)

def get_user(self, username):
r = requests.get("http://sukebei.nyaa.si/user/{}".format(username))
soup = BeautifulSoup(r.text, 'html.parser')
r = requests.get("{}/user/{}".format(self.SITE.value, username))
r.raise_for_status()

return utils.parse_nyaa(soup.select('table tr'), limit=None)
return utils.parse_nyaa(r.text, limit=None, site=self.SITE)

def news(self, number_of_results):
r = requests.get("http://sukebei.nyaa.si/")
soup = BeautifulSoup(r.text, 'html.parser')
rows = soup.select('table tr')
def last_uploads(self, number_of_results):
r = requests.get(self.SITE.value)
r.raise_for_status()

return utils.parse_sukebei(rows, limit=number_of_results + 1)
return utils.parse_nyaa(
r.text,
limit=number_of_results + 1,
site=self.SITE
)


class SukebeiPantsu:
Expand Down
17 changes: 17 additions & 0 deletions NyaaPy/torrent.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
def json_to_class(data):
# We check if the data passed is a list or not
if isinstance(data, list):
object_list = []
for item in data:
object_list.append(Torrent(item))
# Return a list of Torrent objects
return object_list
else:
return Torrent(data)


# This deals with converting the dict to an object
class Torrent(object):
def __init__(self, my_dict):
for key in my_dict:
setattr(self, key, my_dict[key])
Loading

0 comments on commit 0a732b3

Please sign in to comment.