Skip to content

Commit

Permalink
Replace pycurl with requests
Browse files Browse the repository at this point in the history
Signed-off-by: Patrick Uiterwijk <[email protected]>
  • Loading branch information
puiterwijk authored and clalancette committed Feb 16, 2017
1 parent ac95dd2 commit 32a4edd
Show file tree
Hide file tree
Showing 4 changed files with 22 additions and 86 deletions.
2 changes: 1 addition & 1 deletion debian/control
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ Depends: ${misc:Depends}, ${python:Depends},
python-guestfs,
python-lxml,
python-libvirt (>= 0.9.7),
python-pycurl,
python-requests,
python-m2crypto,
Description: installing guest OSs with only minimal input the user
Oz is a tool for automatically installing guest OSs with only minimal
Expand Down
2 changes: 1 addition & 1 deletion oz.spec.in
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ Requires: libvirt-daemon-config-network
%else
Requires: libvirt >= 0.9.7
%endif
Requires: python-pycurl
Requires: python-requests
Requires: genisoimage
Requires: mtools
%if 0%{?fedora} < 26
Expand Down
102 changes: 19 additions & 83 deletions oz/ozutil.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# Copyright (C) 2010,2011 Chris Lalancette <[email protected]>
# Copyright (C) 2012-2016 Chris Lalancette <[email protected]>
# Copyright (C) 2012-2017 Chris Lalancette <[email protected]>

# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
Expand All @@ -25,10 +25,11 @@
import errno
import stat
import shutil
import pycurl
import requests
import gzip
import time
import select
import contextlib
try:
import configparser
except ImportError:
Expand Down Expand Up @@ -754,94 +755,29 @@ def http_get_header(url, redirect=True):
'Redirect-URL' will always be None in the redirect=True case, and may be
None in the redirect=True case if no redirects were required.
"""
info = {}
def _header(buf):
"""
Internal function that is called back from pycurl perform() for
header data.
"""
buf = buf.strip()
if len(buf) == 0:
return

split = buf.split(':')
if len(split) < 2:
# not a valid header; skip
return
key = split[0].strip()
value = split[1].strip()
info[key] = value

def _data(buf):
"""
Empty function that is called back from pycurl perform() for body data.
"""
pass

c = pycurl.Curl()
c.setopt(c.URL, url)
c.setopt(c.NOBODY, True)
c.setopt(c.HEADERFUNCTION, _header)
c.setopt(c.HEADER, True)
c.setopt(c.WRITEFUNCTION, _data)
if redirect:
c.setopt(c.FOLLOWLOCATION, True)
c.perform()
info['HTTP-Code'] = c.getinfo(c.HTTP_CODE)
if info['HTTP-Code'] == 0:
# if this was a file:/// URL, then the HTTP_CODE returned 0.
# set it to 200 to be compatible with http
info['HTTP-Code'] = 200
if not redirect:
info['Redirect-URL'] = c.getinfo(c.REDIRECT_URL)

c.close()
with contextlib.closing(requests.post(url, allow_redirects=redirect, stream=True, timeout=10)) as r:
info = r.headers
info['HTTP-Code'] = r.status_code
if not redirect:
info['Redirect-URL'] = r.headers.get('Location')
else:
info['Redirect-URL'] = None

return info

def http_download_file(url, fd, show_progress, logger):
"""
Function to download a file from url to file descriptor fd.
"""
class Progress(object):
"""
Internal class to represent progress on the connection. This is only
required so that we have somewhere to store the "last_mb" variable
that is not global.
"""
def __init__(self):
self.last_mb = -1

def progress(self, down_total, down_current, up_total, up_current):
"""
Function that is called back from the pycurl perform() method to
update the progress information.
"""
if down_total == 0:
return
current_mb = int(down_current) / 10485760
if current_mb > self.last_mb or down_current == down_total:
self.last_mb = current_mb
logger.debug("%dkB of %dkB" % (down_current/1024, down_total/1024))

def _data(buf):
"""
Function that is called back from the pycurl perform() method to
actually write data to disk.
"""
write_bytes_to_fd(fd, buf)

progress = Progress()
c = pycurl.Curl()
c.setopt(c.URL, url)
c.setopt(c.CONNECTTIMEOUT, 5)
c.setopt(c.WRITEFUNCTION, _data)
c.setopt(c.FOLLOWLOCATION, 1)
if show_progress:
c.setopt(c.NOPROGRESS, 0)
c.setopt(c.PROGRESSFUNCTION, progress.progress)
c.perform()
c.close()
with contextlib.closing(requests.get(url, stream=True, allow_redirects=True)) as r:
file_size = int(r.headers.get('Content-Length'))
chunk_size = 10*1024*1024
i = 0
for chunk in r.iter_content(chunk_size):
i = i + 1
write_bytes_to_fd(fd, chunk)
if show_progress:
logger.debug("%dkB of %dkB" % ((i * chunk_size) / 1024, file_size / 1024))

def ftp_download_directory(server, username, password, basepath, destination):
"""
Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
pycurl
requests
m2crypto
libvirt-python
lxml

3 comments on commit 32a4edd

@yiya1989gh
Copy link

@yiya1989gh yiya1989gh commented on 32a4edd Mar 2, 2017

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We can use install type like "file:///test.iso" before, but now it will raise error: "requests.exceptions.InvalidSchema: No connection adapters."

I think this commit cause an severity bug which direct affect the users.

@clalancette
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Oh, hm, very good point. I forgot to test that out. That definitely needs to work, as it is an important use case. @puiterwijk can you look into this and see if you can convince requests to deal with file:// URLs?

@clalancette
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

So, I just pushed a commit that adds a custom adapter for requests that re-enables file:// URLs. Please test it out. If it still doesn't work for you, please open up a new Issue so we can track it properly. Thanks for the report!

Please sign in to comment.