Commits

Paul Tan committed de970a0

Preliminary Python2.6+ support. ++Ugliness.

  • Participants
  • Parent commits 4e5f08e

Comments (0)

Files changed (2)

 pypixiv
 =======
 
-pypixiv is a Python 3 API for Pixiv. 
+pypixiv is a Python API for Pixiv. 
 It aims to be a simple, easy to use,
 and lightweight alternative to other libraries,
 and to be specially suited for interactive python use
 
 Installation uses `distutils`::
 
-    python3 setup.py build install
+    python setup.py build install
+
+Running `2to3` is not required.
 
 Dependencies
 ==============
 
-* Python **3**
+* Python 2.6+/3.2+
 * BeautifulSoup_ 
 * requests_
 
 >>> context.logout()
 
 """
+from __future__ import print_function
+from __future__ import unicode_literals
 from collections import namedtuple
 from abc import ABCMeta
-from functools import total_ordering, lru_cache
+from functools import update_wrapper
+from threading import Lock
+
 #TODO: Write tests
 
 __all__ = ["Illust", "Manga", "Pixiv", "Sizes"]
 """
 
 """
+Code for Python 2.6 compatibility.
+====================================
+Nothing nice to see here. At all. 
+"""
+
+#Source: http://code.activestate.com/recipes/578078-py26-and-py30-backport-of-python-33s-lru-cache/
+#License: MIT
+_CacheInfo = namedtuple("CacheInfo", ["hits", "misses", "maxsize", "currsize"])
+
+def lru_cache(maxsize=100, typed=False):
+    """Least-recently-used cache decorator.
+
+    If *maxsize* is set to None, the LRU features are disabled and the cache
+    can grow without bound.
+
+    If *typed* is True, arguments of different types will be cached separately.
+    For example, f(3.0) and f(3) will be treated as distinct calls with
+    distinct results.
+
+    Arguments to the cached function must be hashable.
+
+    View the cache statistics named tuple (hits, misses, maxsize, currsize) with
+    f.cache_info().  Clear the cache and statistics with f.cache_clear().
+    Access the underlying function with f.__wrapped__.
+
+    See:  http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used
+
+    """
+
+    # Users should only access the lru_cache through its public API:
+    #       cache_info, cache_clear, and f.__wrapped__
+    # The internals of the lru_cache are encapsulated for thread safety and
+    # to allow the implementation to change (including a possible C version).
+
+    def decorating_function(user_function):
+
+        cache = dict()
+        stats = [0, 0]                  # make statistics updateable non-locally
+        HITS, MISSES = 0, 1             # names for the stats fields
+        kwd_mark = (object(),)          # separate positional and keyword args
+        cache_get = cache.get           # bound method to lookup key or return None
+        _len = len                      # localize the global len() function
+        lock = Lock()                   # because linkedlist updates aren't threadsafe
+        root = []                       # root of the circular doubly linked list
+        nonlocal_root = [root]                  # make updateable non-locally
+        root[:] = [root, root, None, None]      # initialize by pointing to self
+        PREV, NEXT, KEY, RESULT = 0, 1, 2, 3    # names for the link fields
+
+        def make_key(args, kwds, typed, tuple=tuple, sorted=sorted, type=type):
+            # helper function to build a cache key from positional and keyword args
+            key = args
+            if kwds:
+                sorted_items = tuple(sorted(kwds.items()))
+                key += kwd_mark + sorted_items
+            if typed:
+                key += tuple(type(v) for v in args)
+                if kwds:
+                    key += tuple(type(v) for k, v in sorted_items)
+            return key
+
+        if maxsize == 0:
+
+            def wrapper(*args, **kwds):
+                # no caching, just do a statistics update after a successful call
+                result = user_function(*args, **kwds)
+                stats[MISSES] += 1
+                return result
+
+        elif maxsize is None:
+
+            def wrapper(*args, **kwds):
+                # simple caching without ordering or size limit
+                key = make_key(args, kwds, typed) if kwds or typed else args
+                result = cache_get(key, root)   # root used here as a unique not-found sentinel
+                if result is not root:
+                    stats[HITS] += 1
+                    return result
+                result = user_function(*args, **kwds)
+                cache[key] = result
+                stats[MISSES] += 1
+                return result
+
+        else:
+
+            def wrapper(*args, **kwds):
+                # size limited caching that tracks accesses by recency
+                key = make_key(args, kwds, typed) if kwds or typed else args
+                with lock:
+                    link = cache_get(key)
+                    if link is not None:
+                        # record recent use of the key by moving it to the front of the list
+                        root, = nonlocal_root
+                        link_prev, link_next, key, result = link
+                        link_prev[NEXT] = link_next
+                        link_next[PREV] = link_prev
+                        last = root[PREV]
+                        last[NEXT] = root[PREV] = link
+                        link[PREV] = last
+                        link[NEXT] = root
+                        stats[HITS] += 1
+                        return result
+                result = user_function(*args, **kwds)
+                with lock:
+                    root = nonlocal_root[0]
+                    if _len(cache) < maxsize:
+                        # put result in a new link at the front of the list
+                        last = root[PREV]
+                        link = [last, root, key, result]
+                        cache[key] = last[NEXT] = root[PREV] = link
+                    else:
+                        # use root to store the new key and result
+                        root[KEY] = key
+                        root[RESULT] = result
+                        cache[key] = root
+                        # empty the oldest link and make it the new root
+                        root = nonlocal_root[0] = root[NEXT]
+                        del cache[root[KEY]]
+                        root[KEY] = None
+                        root[RESULT] = None
+                    stats[MISSES] += 1
+                return result
+
+        def cache_info():
+            """Report cache statistics"""
+            with lock:
+                return _CacheInfo(stats[HITS], stats[MISSES], maxsize, len(cache))
+
+        def cache_clear():
+            """Clear the cache and cache statistics"""
+            with lock:
+                cache.clear()
+                root = nonlocal_root[0]
+                root[:] = [root, root, None, None]
+                stats[:] = [0, 0]
+
+        wrapper.__wrapped__ = user_function
+        wrapper.cache_info = cache_info
+        wrapper.cache_clear = cache_clear
+        return update_wrapper(wrapper, user_function)
+
+    return decorating_function
+
+
+# Total Ordering: Class Decorator for Filling in Rich Comparison Methods When
+# Only One is Implemented (Python recipe) 
+# Source: http://code.activestate.com/recipes/576529-total-ordering-class-decorator-for-filling-in-rich/
+# By Christian Muirhead, Menno Smits and Michael Foord 2008
+# WTF license
+# http://voidspace.org.uk/blog
+
+import sys as _sys
+
+if _sys.version_info[0] == 3:
+    def _has_method(cls, name):
+        for B in cls.__mro__:
+            if B is object:
+                continue
+            if name in B.__dict__:
+                return True
+        return False
+else:
+    def _has_method(cls, name):
+        for B in cls.mro():
+            if B is object:
+                continue
+            if name in B.__dict__:
+                return True
+        return False
+
+
+def _ordering(cls, overwrite):
+    def setter(name, value):
+        if overwrite or not _has_method(cls, name):
+            value.__name__ = name
+            setattr(cls, name, value)
+            
+    comparison = None
+    if not _has_method(cls, '__lt__'):
+        for name in 'gt le ge'.split():
+            if not _has_method(cls, '__' + name + '__'):
+                continue
+            comparison = getattr(cls, '__' + name + '__')
+            if name.endswith('e'):
+                eq = lambda s, o: comparison(s, o) and comparison(o, s)
+            else:
+                eq = lambda s, o: not comparison(s, o) and not comparison(o, s)
+            ne = lambda s, o: not eq(s, o)
+            if name.startswith('l'):
+                setter('__lt__', lambda s, o: comparison(s, o) and ne(s, o))
+            else:
+                setter('__lt__', lambda s, o: comparison(o, s) and ne(s, o))
+            break
+        assert comparison is not None, 'must have at least one of ge, gt, le, lt'
+
+    setter('__ne__', lambda s, o: s < o or o < s)
+    setter('__eq__', lambda s, o: not s != o)
+    setter('__gt__', lambda s, o: o < s)
+    setter('__ge__', lambda s, o: not (s < o))
+    setter('__le__', lambda s, o: not (s > o))
+    return cls
+
+
+def total_ordering(cls):
+    return _ordering(cls, False)
+
+
+"""
 Low-level API
 ===============
 Kept closely in sync with the Pixiv Website.
 the Mid-Level API.
 """
 
-"""
-API Annotation functions
-"""
 
-class OneOf:
-    def __init__(self, *classes):
-        self.classes = classes
-    def __repr__(self):
-        return "OneOf({})".format(
-                ", ".join([repr(x) for x in self.classes]))
 
 """
 Base Definitions
 # Models data in illust URL
 IllustUrl = namedtuple('IllustUrl', ('id', 'mode', 'page'))
 
-def parse_illust_url(url) -> IllustUrl:
+def parse_illust_url(url):
     """Parse illustration URL, returning Illustration Id and mode"""
     #Form: http://www.pixiv.net/member_illust.php?mode=medium&illust_id=ID
     import re
-    from urllib.parse import urlsplit, parse_qs
+    try:
+        from urllib.parse import urlsplit, parse_qs
+    except ImportError:
+        from urlparse import urlsplit, parse_qs
     url = urlsplit(url)
     query = dict((k, v[0]) for k,v in parse_qs(url.query).items())
     if url.scheme == 'http' and \
 
 # URL Construction
 def gen_illust_url(id, mode = 'medium', page = None):
-    from urllib.parse import urlencode
+    try:
+        from urllib.parse import urlencode
+    except ImportError:
+        from urllib import urlencode
     p = {
             'illust_id': id,
             'mode': mode,
             'page': page
             }
     p = dict(x for x in p.items() if x[1] is not None)
-    return 'http://www.pixiv.net/member_illust.php?{}'.format(urlencode(p))
+    return 'http://www.pixiv.net/member_illust.php?{0}'.format(urlencode(p))
 
 """
 Illust Content Parsers
 IllustBigContentParseResult = namedtuple('IllustBigContentParseResult',
         ('imgurl',))
 
-def parse_illust_big_content(content) -> IllustBigContentParseResult:
+def parse_illust_big_content(content):
     from bs4 import BeautifulSoup
     p = BeautifulSoup(content)
     raise_for_illust_not_logged_in_content(p)
     'IllustMangaContentParseResult', 
         ('pages',))
 
-def parse_illust_manga_content(content) -> IllustMangaContentParseResult:
+def parse_illust_manga_content(content):
     import re
     #NOTE: No login check because pixiv displays the
     #same content as logged in user (with slight differences
         'IllustMangaBigContentParseResult',
         ('imgurl',))
 
-def parse_illust_manga_big_content(content) -> str:
+def parse_illust_manga_big_content(content):
     from bs4 import BeautifulSoup
     p = BeautifulSoup(content)
     raise_for_illust_manga_big_not_logged_in_content(p)
                 headers = {'Referer': gen_illust_referer(id, mode, page)},
                 cookies = {'PHPSESSID': session.id})
     else:
-        raise ValueError("modes must be one of {}. Got: {}".format(modes,
+        raise ValueError("modes must be one of {0}. Got: {1}".format(modes,
             mode))
 
 """
                 headers = {'Referer': gen_illust_img_referer(id, mode, page)},
                 cookies = {'PHPSESSID': session.id})
     else:
-        raise ValueError("modes must be one of {}".format(modes))
+        raise ValueError("modes must be one of {0}".format(modes))
 
 
 """
 
 def gen_illust_manga_permalink(id, page):
     """page is 0-index."""
-    from urllib.parse import urlencode
+    try:
+        from urllib.parse import urlencode
+    except ImportError:
+        from urllib import urlencode
     p = {'mode': 'manga_big',
             'illust_id': int(id),
             'page': int(page) }
     p = dict(x for x in p.items() if x[1] is not None)
-    return 'http://www.pixiv.net/member_illust.php?{}'.format(urlencode(p))
+    return 'http://www.pixiv.net/member_illust.php?{0}'.format(urlencode(p))
 
 """
 Session Authentication: Login
         def __getitem__(self, i):  
             return constants[i]
         def __repr__(self):        
-            return '{}({})'.format(classname,names)
+            return '{0}({1})'.format(classname,names)
         def __contains__(self, key):
             if hasattr(key, 'EnumType'):
                 return self is key.EnumType
         def __nonzero__(self):     
             return bool(self.__value)
         def __repr__(self):        
-            return '{}.{}'.format(classname, names[self.__value])
+            return '{0}.{1}'.format(classname, names[self.__value])
     maximum = len(names) - 1
     constants = [None] * len(names)
     for i, each in enumerate(names):
 """
 
 # Medium-level methods which perform the actual HTTP Requests
-def login(username_or_email, password, remember_me = False, ssl = True) -> Session:
+def login(username_or_email, password, remember_me = False, ssl = True):
     """Login using requests. Returns PHPSESSID on success."""
     #On login success, Header location set
     import requests
     else:
         raise Exception('Could not detect successful logout')
 
-def req_download(request:dict):
+def req_download(request):
     """Downloads the request using the requests library into the 
     current directory"""
-    from urllib.parse import urlsplit
+    try:
+        from urllib.parse import urlsplit
+    except ImportError:
+        from urlparse import urlsplit
     from os.path import basename
     import requests
     url = request['url']
         def imgurls(self):
             return [self.imgurl]
 
-    def __init__(self, context, id:int):
+    def __init__(self, context, id):
         self.context = context
         self.id = id
         self.sizes = dict((x, Illust.Size(self, x)) for x in ('medium', 'big'))
             raise ValueError('Not a Illust URL')
     
     def __str__(self):
-        return 'Illust({})'.format(self.id)
+        return 'Illust({0})'.format(self.id)
 
     def __repr__(self):
-        return 'Illust({}, {})'.format(repr(self.context), 
+        return 'Illust({0}, {1})'.format(repr(self.context), 
                 repr(self.id))
 
 class Manga:
         else:
             raise ValueError('Not a manga URL')
     def __str__(self):
-        return 'Manga({})'.format(self.id)
+        return 'Manga({0})'.format(self.id)
     def __repr__(self):
-        return 'Manga({}, {})'.format(repr(self.context), 
+        return 'Manga({0}, {1})'.format(repr(self.context), 
                 repr(self.id))
 
 
 class Pixiv:
     """High level API for Pixiv"""
-    def __init__(self, username_or_email:str, password:str, 
-            remember_me:bool = False, ssl:bool = True) -> None:
+    def __init__(self, username_or_email, password, 
+            remember_me = False, ssl = True):
         self.username_or_email = username_or_email
         self.remember_me = remember_me
         self.ssl = ssl
         self.session = login(username_or_email, password,
                 remember_me, ssl)
 
-    def get_url(self, url:str) -> OneOf(Illust, Manga):
+    def get_url(self, url):
         """Returns one of the following:
         - Illust
         - Manga
                 pass
         raise ValueError('Invalid URL')
 
-    def download(self, x, size = None) -> [str]:
+    def download(self, x, size = None):
         """Downloads an Illust or Manga `x` to the
         current directory. `size` can be used to select
         the sizes of the images. 
         filenames = [req_download(x._asdict()) for x in reqs]
         return filenames
 
-    def download_url(self, url:str, size = None) -> [str]:
+    def download_url(self, url, size = None):
         """
         Parses the URL, and if it is an Illust or Manga,
         downloads it to the current directory.
         x = self.get_url(url)
         return self.download(x, size)
 
-    def logout(self) -> None:
+    def logout(self):
         """Logs out the session. This instance is invalid
         once this function is called."""
         return logout(self.session)
 
     def __str__(self):
-        return 'Pixiv({})'.format(self.username_or_email)
+        return 'Pixiv({0})'.format(self.username_or_email)
 
     def __repr__(self):
-        return 'Pixiv({}, ..., {}, {})'.format(
+        return 'Pixiv({0}, ..., {1}, {2})'.format(
                 repr(self.username_or_email),
                 repr(self.remember_me),
                 repr(self.ssl))