Commits

Anonymous committed a9b457d

[svn r20] MOved s3 stuff into aws folder

Comments (0)

Files changed (4)

+'''
+    Requires Amazon S3 library!
+'''
+import os
+import sys
+import S3
+import time
+import mimetypes
+
+
+class S3Error(Exception):
+    "Misc. S3 Service Error"
+    pass
+
+
+class S3Proc(object):
+    def __init__(self, aws_key, aws_secret_key, bucket, default_perm='private'):
+        self.aws_key = aws_key
+        self.aws_secret_key = aws_secret_key
+        self.bucket = bucket
+        self.conn = None
+        self.perm_tuple = (
+            'private',
+            'public-read',
+            'public-read-write',
+            'authenticated-read',
+        )
+        if default_perm not in self.perm_tuple:
+            default_perm = 'private'
+        self.default_perm = default_perm
+
+    def connect(self):
+        self.conn = S3.AWSAuthConnection(self.aws_key, self.aws_secret_key)
+        buckets = self.conn.list_all_my_buckets()
+        if self.bucket not in [b.name for b in buckets.entries]:
+            res = self.conn.create_bucket(self.bucket)
+            if res.http_response.status != 200:
+                raise S3Error, 'Unable to create bucket %s' % (self.bucket)
+
+        return True
+
+    def put(self, filename, data, perm=None, fail_silently=True):
+        content_type = mimetypes.guess_type(filename)[0]
+        if content_type is None:
+            content_type = 'text/plain'
+
+        if perm is None or (perm is not None and perm not in self.perm_tuple):
+            perm = self.default_perm
+
+        res = self.conn.put(self.bucket, filename, S3.S3Object(data),
+                       {'x-amz-acl': perm, 'Content-Type': content_type})
+        if res.http_response.status != 200:
+            # Failed. Pause. Try once more.
+            time.sleep(1.0)
+            res = self.conn.put(self.bucket, filename, S3.S3Object(data),
+                             {'x-amz-acl': perm, 'Content-Type': content_type})
+            if res.http_response.status != 200:
+                # Failed again. Raise exception
+                if not fail_silently:
+                    raise S3Error, 'Unable to upload file %s' % (filename)
+                return False
+
+        return True
+
+    def put_from_file(self, filename, remote_filename=None, 
+                                                perm=None, fail_silently=True):
+        ''' Upload file from disk. If you want a different 
+            remote filename, specify in remote_filename
+        '''
+        try:
+            fp = open(filename, 'rb')
+            data = fp.read()
+            fp.close()
+        except IOError, err:
+            raise S3Error, 'Unable to read %s: %s' % (filename, err.strerror)
+
+        return self.put(remote_filename or filename, data, perm, fail_silently)
+
+    def get(self, filename):
+        ''' Returns file data '''
+        res = self.conn.get(self.bucket, filename)
+        if res.http_response.status != 200:
+            raise S3Error, 'Unable to fetch file %s' % (filename)
+        return res.body
+
+    def delete(self, filename, fail_silently=True):
+        ''' Returns true/false on delete 
+            Raises S3Error if fail_silently is set to False
+        '''
+        res = self.conn.delete(self.bucket, filename)
+        if not fail_silently:
+            if res.http_response.status != 204:
+                raise S3Error, 'Unable to delete file %s' % (filename)
+
+        return (res.http_response.status == 204)
+
+    def get_perm(self, filename):
+        ''' Returns permissions for set file. '''
+        res = self.conn.get_acl(self.bucket, filename)
+        if res.http_response.status != 200:
+            raise S3Error, \
+                'Unable to fetch permissions for file %s' % (filename)
+        return res.object.data
+
+    def set_perm(self, filename, perm, fail_silently=True):
+        ''' Sets permissions on file. '''
+        pass
+import hmac
+import time
+import base64
+import urllib
+import hashlib
+
+
+def gen_signature(secret_key, string_to_sign):
+    return base64.b64encode(
+        hmac.new(
+            secret_key,
+            string_to_sign,
+            hashlib.sha1
+        ).digest()
+    )
+
+
+def return_secure_link(key, secret_key, bucket, 
+                                filename, expires=300, timestamp=None):
+    ''' Return a secure S3 link with an expiration on the download.
+
+        key: S3 Access Key (login)
+        secret_key: S3 Secret Access Key (password)
+        bucket: Bucket name
+        filename: file path
+        expires: Seconds from NOW the link expires
+        timestamp: Epoch timestamp. If present, "expires" will not be used.
+    '''
+    filename = urllib.quote_plus(filename)
+    filename = filename.replace('%2F', '/')
+    path = '/%s/%s' % (bucket, filename)
+
+    if timestamp is not None:
+        expire_time = timestamp
+    else:
+        expire_time = time.time() + expires
+
+    string_to_sign = 'GET\n\n\n%f\n%s' % (expire_time, path)
+    params = {
+        'AWSAccessKeyId': key,
+        'Expires': '%.0f' % (expire_time),
+        'Signature': gen_signature(secret_key, string_to_sign),
+    }
+
+    return 'http://%s.s3.amazonaws.com/%s?%s' % (
+                                    bucket, filename, urllib.urlencode(params))

s3proc.py

-'''
-    Requires Amazon S3 library!
-'''
-import os
-import sys
-import S3
-import time
-import mimetypes
-
-
-class S3Error(Exception):
-    "Misc. S3 Service Error"
-    pass
-
-
-class S3Proc(object):
-    def __init__(self, aws_key, aws_secret_key, bucket, default_perm='private'):
-        self.aws_key = aws_key
-        self.aws_secret_key = aws_secret_key
-        self.bucket = bucket
-        self.conn = None
-        self.perm_tuple = (
-            'private',
-            'public-read',
-            'public-read-write',
-            'authenticated-read',
-        )
-        if default_perm not in self.perm_tuple:
-            default_perm = 'private'
-        self.default_perm = default_perm
-
-    def connect(self):
-        self.conn = S3.AWSAuthConnection(self.aws_key, self.aws_secret_key)
-        buckets = self.conn.list_all_my_buckets()
-        if self.bucket not in [b.name for b in buckets.entries]:
-            res = self.conn.create_bucket(self.bucket)
-            if res.http_response.status != 200:
-                raise S3Error, 'Unable to create bucket %s' % (self.bucket)
-
-        return True
-
-    def put(self, filename, data, perm=None, fail_silently=True):
-        content_type = mimetypes.guess_type(filename)[0]
-        if content_type is None:
-            content_type = 'text/plain'
-
-        if perm is None or (perm is not None and perm not in self.perm_tuple):
-            perm = self.default_perm
-
-        res = self.conn.put(self.bucket, filename, S3.S3Object(data),
-                       {'x-amz-acl': perm, 'Content-Type': content_type})
-        if res.http_response.status != 200:
-            # Failed. Pause. Try once more.
-            time.sleep(1.0)
-            res = self.conn.put(self.bucket, filename, S3.S3Object(data),
-                             {'x-amz-acl': perm, 'Content-Type': content_type})
-            if res.http_response.status != 200:
-                # Failed again. Raise exception
-                if not fail_silently:
-                    raise S3Error, 'Unable to upload file %s' % (filename)
-                return False
-
-        return True
-
-    def put_from_file(self, filename, remote_filename=None, 
-                                                perm=None, fail_silently=True):
-        ''' Upload file from disk. If you want a different 
-            remote filename, specify in remote_filename
-        '''
-        try:
-            fp = open(filename, 'rb')
-            data = fp.read()
-            fp.close()
-        except IOError, err:
-            raise S3Error, 'Unable to read %s: %s' % (filename, err.strerror)
-
-        return self.put(remote_filename or filename, data, perm, fail_silently)
-
-    def get(self, filename):
-        ''' Returns file data '''
-        res = self.conn.get(self.bucket, filename)
-        if res.http_response.status != 200:
-            raise S3Error, 'Unable to fetch file %s' % (filename)
-        return res.body
-
-    def delete(self, filename, fail_silently=True):
-        ''' Returns true/false on delete 
-            Raises S3Error if fail_silently is set to False
-        '''
-        res = self.conn.delete(self.bucket, filename)
-        if not fail_silently:
-            if res.http_response.status != 204:
-                raise S3Error, 'Unable to delete file %s' % (filename)
-
-        return (res.http_response.status == 204)
-
-    def get_perm(self, filename):
-        ''' Returns permissions for set file. '''
-        res = self.conn.get_acl(self.bucket, filename)
-        if res.http_response.status != 200:
-            raise S3Error, \
-                'Unable to fetch permissions for file %s' % (filename)
-        return res.object.data
-
-    def set_perm(self, filename, perm, fail_silently=True):
-        ''' Sets permissions on file. '''
-        pass

secures3.py

-import hmac
-import time
-import base64
-import urllib
-import hashlib
-
-
-def gen_signature(secret_key, string_to_sign):
-    return base64.b64encode(
-        hmac.new(
-            secret_key,
-            string_to_sign,
-            hashlib.sha1
-        ).digest()
-    )
-
-
-def return_secure_link(key, secret_key, bucket, 
-                                filename, expires=300, timestamp=None):
-    ''' Return a secure S3 link with an expiration on the download.
-
-        key: S3 Access Key (login)
-        secret_key: S3 Secret Access Key (password)
-        bucket: Bucket name
-        filename: file path
-        expires: Seconds from NOW the link expires
-        timestamp: Epoch timestamp. If present, "expires" will not be used.
-    '''
-    filename = urllib.quote_plus(filename)
-    filename = filename.replace('%2F', '/')
-    path = '/%s/%s' % (bucket, filename)
-
-    if timestamp is not None:
-        expire_time = timestamp
-    else:
-        expire_time = time.time() + expires
-
-    string_to_sign = 'GET\n\n\n%f\n%s' % (expire_time, path)
-    params = {
-        'AWSAccessKeyId': key,
-        'Expires': '%.0f' % (expire_time),
-        'Signature': gen_signature(secret_key, string_to_sign),
-    }
-
-    return 'http://%s.s3.amazonaws.com/%s?%s' % (
-                                    bucket, filename, urllib.urlencode(params))