Skip to content

Commit

Permalink
Attempt to fix some download anomalies
Browse files Browse the repository at this point in the history
  • Loading branch information
AcidWeb committed Jan 21, 2020
1 parent 84a3964 commit 2788825
Show file tree
Hide file tree
Showing 5 changed files with 10 additions and 6 deletions.
4 changes: 2 additions & 2 deletions CB/Core.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
import zipfile
import datetime
import requests
import cfscrape
import cloudscraper
from tqdm import tqdm
from pathlib import Path
from checksumdir import dirhash
Expand Down Expand Up @@ -340,7 +340,7 @@ def parse_cf_id(self, url):
if slug in self.cfIDs:
project = self.cfIDs[slug]
else:
scraper = cfscrape.create_scraper()
scraper = cloudscraper.create_scraper()
payload = scraper.get(url + '/download-client')
if payload.status_code == 404:
raise RuntimeError(slug)
Expand Down
4 changes: 3 additions & 1 deletion CB/CurseForge.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import io
import zipfile
import requests
import cloudscraper
from . import retry, HEADERS
from operator import itemgetter

Expand All @@ -27,6 +28,7 @@ def __init__(self, project, checkcache, clienttype, allowdev):
self.currentVersion = None
self.archive = None
self.directories = []
self.scraper = cloudscraper.create_scraper()
self.get_current_version()

def get_current_version(self):
Expand All @@ -45,7 +47,7 @@ def get_current_version(self):

@retry()
def get_addon(self):
self.archive = zipfile.ZipFile(io.BytesIO(requests.get(self.downloadUrl, headers=HEADERS).content))
self.archive = zipfile.ZipFile(io.BytesIO(self.scraper.get(self.downloadUrl).content))
for file in self.archive.namelist():
if '/' not in os.path.dirname(file):
self.directories.append(os.path.dirname(file))
Expand Down
4 changes: 3 additions & 1 deletion CB/WoWInterface.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import re
import zipfile
import requests
import cloudscraper
from . import retry, HEADERS


Expand All @@ -24,10 +25,11 @@ def __init__(self, url, checkcache):
self.currentVersion = self.payload['UIVersion']
self.archive = None
self.directories = []
self.scraper = cloudscraper.create_scraper()

@retry()
def get_addon(self):
self.archive = zipfile.ZipFile(io.BytesIO(requests.get(self.downloadUrl, headers=HEADERS).content))
self.archive = zipfile.ZipFile(io.BytesIO(self.scraper.get(self.downloadUrl).content))
for file in self.archive.namelist():
if '/' not in os.path.dirname(file):
self.directories.append(os.path.dirname(file))
Expand Down
2 changes: 1 addition & 1 deletion CB/__init__.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import string
import random

__version__ = '3.1.2'
__version__ = '3.1.3'
__license__ = 'GPLv3'
__copyright__ = '2019-2020, Paweł Jastrzębski <[email protected]>'
__docformat__ = 'restructuredtext en'
Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
terminaltables
prompt_toolkit
cloudscraper
checksumdir
cfscrape
requests
tqdm
lupa

0 comments on commit 2788825

Please sign in to comment.