Commits

Anonymous committed 3ecda51

Last changes for the switch from urllib to our own HTTP download thingy. This also fixes a bug in Werkzeug inside Zine for people with older Werkzeug versions.

  • Participants
  • Parent commits f4f4fb5

Comments (0)

Files changed (6)

File zine/importers/feed.py

 from zine.utils.http import redirect_to
 from zine.utils.zeml import load_parser_data
 from zine.utils.exceptions import UserException
+from zine.utils.net import open_url
 from zine.zxa import ZINE_NS, ATOM_NS, XML_NS, ZINE_TAG_URI, ZINE_CATEGORY_URI
 
 
             feed = request.files.get('feed')
             if form.data['download_url']:
                 try:
-                    feed = urllib.urlopen(form.data['download_url'])
+                    feed = open_url(form.data['download_url']).stream
                 except Exception, e:
                     error = _(u'Error downloading from URL: %s') % e
             elif not feed:

File zine/importers/wordpress.py

     :license: BSD, see LICENSE for more details.
 """
 import re
-import urllib
 from time import strptime
 from datetime import datetime
 from lxml import etree
 from zine.utils.xml import Namespace, html_entities, escape
 from zine.utils.zeml import parse_html, inject_implicit_paragraphs
 from zine.utils.http import redirect_to
+from zine.utils.net import open_url
 from zine.models import COMMENT_UNMODERATED, COMMENT_MODERATED, \
      STATUS_DRAFT, STATUS_PUBLISHED
 
             dump = request.files.get('dump')
             if form.data['download_url']:
                 try:
-                    dump = urllib.urlopen(form.data['download_url'])
+                    dump = open_url(form.data['download_url']).stream
                 except Exception, e:
                     error = _(u'Error downloading from URL: %s') % e
             elif not dump:

File zine/pingback.py

     # next we check if the source URL does indeed exist
     try:
         response = open_url(source_uri)
-    except urllib2.HTTPError:
+    except NetException:
         raise Fault(16, 'The source URL does not exist.')
 
     # we only accept pingbacks for links below our blog URL

File zine/plugins/akismet_spam_filter/__init__.py

     :license: BSD, see LICENSE for more details.
 """
 from os.path import dirname, join
-from urllib import urlopen
 
 from werkzeug import escape, url_encode
 
 from zine.privileges import BLOG_ADMIN, MODERATE_COMMENTS, require_privilege
 from zine.utils.validators import ValidationError, check
 from zine.utils.http import redirect_to
+from zine.utils.net import open_url
 from zine.utils import forms
 
 
         endpoint
     )
     try:
-        f = urlopen(url, url_encode(data))
+        response = open_url(url, data=url_encode(data))
     except:
         return
     try:
-        return f.read().strip()
+        return response.data.strip()
     finally:
-        f.close()
+        response.close()
 
 
 def is_valid_key(message=None, memorize=False):

File zine/utils/admin.py

 
 from zine.privileges import ENTER_ADMIN_PANEL, require_privilege
 from zine.utils import local, load_json
+from zine.utils.net import open_url
 from zine.i18n import _
 
 
 
 def load_zine_reddit():
     """Load the zine reddit."""
-    import urllib
     reddit_url = 'http://www.reddit.com'
     reddit_zine_url = reddit_url + '/r/zine'
 
-    f = urllib.urlopen(reddit_zine_url + '.json')
+    response = open_url(reddit_zine_url + '.json')
     try:
-        data = load_json(f.read())
+        data = load_json(response.data)
     finally:
-        f.close()
+        response.close()
 
     result = []
     for item in islice(data['data']['children'], 20):

File zine/utils/net.py

 import socket
 import httplib
 
-from werkzeug import Response, Headers, url_decode
+from werkzeug import Response, Headers, url_decode, cached_property
+from werkzeug.contrib.iterio import IterO
 
 from zine.application import Response, get_application
 from zine.utils.datastructures import OrderedDict
     can be disabled by setting `allow_internal_requests` to False.
     """
     app = get_application()
-    blog_url = urlparse.urlsplit(app.cfg['blog_url'])
     parts = urlparse.urlsplit(url)
-    if allow_internal_requests and \
-       parts.scheme in ('http', 'https') and \
-       blog_url.netloc == parts.netloc and \
-       parts.path.startswith(blog_url.path):
-        path = parts.path[len(blog_url.path):].lstrip('/')
-        method = kwargs.pop('method', None)
-        if method is None:
-            method = data is not None and 'POST' or 'GET'
-        make_response = lambda *a: URLResponse(url, *a)
-        return app.perform_subrequest(path.decode('utf-8'),
-                                      url_decode(parts.query),
-                                      method, data, timeout=timeout,
-                                      response_wrapper=make_response,
-                                      **kwargs)
+    if app is not None:
+        blog_url = urlparse.urlsplit(app.cfg['blog_url'])
+        if allow_internal_requests and \
+           parts.scheme in ('http', 'https') and \
+           blog_url.netloc == parts.netloc and \
+           parts.path.startswith(blog_url.path):
+            path = parts.path[len(blog_url.path):].lstrip('/')
+            method = kwargs.pop('method', None)
+            if method is None:
+                method = data is not None and 'POST' or 'GET'
+            make_response = lambda *a: URLResponse(url, *a)
+            return app.perform_subrequest(path.decode('utf-8'),
+                                          url_decode(parts.query),
+                                          method, data, timeout=timeout,
+                                          response_wrapper=make_response,
+                                          **kwargs)
     handler = _url_handlers.get(parts.scheme)
     if handler is None:
         raise URLError('unsupported URL schema %r' % parts.scheme)
             pass
 
 
+class StreamBuffer(IterO):
+    """Provides a stream interface to an iterator.
+
+    This class includes a fix for a bug in werkzeug < 0.5.  Once we rewrite
+    Werkzeug 0.5 or higher this subclass can go away.
+    """
+
+    def read(self, n=-1):
+        if self.closed:
+            raise ValueError('I/O operation on closed file')
+        if n < 0:
+            self._buf += ''.join(self._gen)
+            return self._buf[self.pos:]
+        new_pos = self.pos + n
+        buf = []
+        try:
+            tmp_end_pos = len(self._buf)
+            while new_pos > tmp_end_pos:
+                item = self._gen.next()
+                tmp_end_pos += len(item)
+                buf.append(item)
+        except StopIteration:
+            pass
+        if buf:
+            self._buf += ''.join(buf)
+        new_pos = max(0, new_pos)
+        try:
+            return self._buf[self.pos:new_pos]
+        finally:
+            self.pos = new_pos
+
 class NetException(ZineException):
     pass
 
         Response.__init__(self, body, status, headers)
         self.url = url
 
+    @cached_property
+    def stream(self):
+        return StreamBuffer(self.data)
+
 
 class HTTPResponse(URLResponse):