Skip to content

Commit

Permalink
fix #63
Browse files Browse the repository at this point in the history
  • Loading branch information
yami committed May 2, 2017
1 parent ff98b0d commit 3dfaf95
Show file tree
Hide file tree
Showing 6 changed files with 62 additions and 25 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.rst
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ Python SDK的版本号遵循 `Semantic Versioning <http://semver.org/>`_ 规则
Version 2.3.1
-------------

- 修复:#63 增加 `oss2.defaults.logger` 配置项,用户可以设置该变量,来改变缺省的 `logger` (缺省是 `root` logger)
- 修复:#66 oss2相关的Adapter中用了__len__()函数会导致requests super_len()函数在32bit Windows上导致不能够上传超过2GB的文件。


Expand Down
7 changes: 4 additions & 3 deletions oss2/auth.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,12 @@
import hmac
import hashlib
import time
import logging

from . import utils
from .compat import urlquote, to_bytes

from .defaults import get_logger


class Auth(object):
"""用于保存用户AccessKeyId、AccessKeySecret,以及计算签名的对象。"""
Expand Down Expand Up @@ -48,7 +49,7 @@ def _sign_url(self, req, bucket_name, key, expires):
def __make_signature(self, req, bucket_name, key):
string_to_sign = self.__get_string_to_sign(req, bucket_name, key)

logging.debug('string_to_sign={0}'.format(string_to_sign))
get_logger().debug('string_to_sign={0}'.format(string_to_sign))

h = hmac.new(to_bytes(self.secret), to_bytes(string_to_sign), hashlib.sha1)
return utils.b64encode_as_string(h.digest())
Expand Down Expand Up @@ -128,7 +129,7 @@ def _sign_rtmp_url(self, url, bucket_name, channel_name, playlist_name, expires,

p = params if params else {}
string_to_sign = str(expiration_time) + "\n" + canon_params_str + canonicalized_resource
logging.debug('string_to_sign={0}'.format(string_to_sign))
get_logger().debug('string_to_sign={0}'.format(string_to_sign))

h = hmac.new(to_bytes(self.secret), to_bytes(string_to_sign), hashlib.sha1)
signature = utils.b64encode_as_string(h.digest())
Expand Down
11 changes: 10 additions & 1 deletion oss2/defaults.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@
"""

import logging


def get(value, default_value):
if value is None:
Expand Down Expand Up @@ -43,4 +45,11 @@ def get(value, default_value):
multiget_num_threads = 4

#: 并行下载(multiget)的缺省分片大小
multiget_part_size = 10 * 1024 * 1024
multiget_part_size = 10 * 1024 * 1024

#: 缺省 Logger
logger = logging.getLogger()


def get_logger():
return logger
37 changes: 18 additions & 19 deletions oss2/resumable.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,8 @@
from .models import PartInfo
from .compat import json, stringify, to_unicode
from .task_queue import TaskQueue
from .defaults import get_logger


import logging
import functools
import threading
import shutil
Expand Down Expand Up @@ -188,7 +187,7 @@ def __init__(self, bucket, key, filename, size, store,

self.__store = store
self.__record_key = self.__store.make_store_key(bucket.bucket_name, key, self._abspath)
logging.info('key is {0}'.format(self.__record_key))
get_logger().info('key is {0}'.format(self.__record_key))

# protect self.__progress_callback
self.__plock = threading.Lock()
Expand Down Expand Up @@ -332,20 +331,20 @@ def is_record_sane(record):
try:
for key in ('etag', 'tmp_suffix', 'abspath', 'bucket', 'key'):
if not isinstance(record[key], str):
logging.info('{0} is not a string: {1}, but {2}'.format(key, record[key], record[key].__class__))
get_logger().info('{0} is not a string: {1}, but {2}'.format(key, record[key], record[key].__class__))
return False

for key in ('part_size', 'size', 'mtime'):
if not isinstance(record[key], int):
logging.info('{0} is not an integer: {1}, but {2}'.format(key, record[key], record[key].__class__))
get_logger().info('{0} is not an integer: {1}, but {2}'.format(key, record[key], record[key].__class__))
return False

for key in ('parts'):
if not isinstance(record['parts'], list):
logging.info('{0} is not a list: {1}, but {2}'.format(key, record[key], record[key].__class__))
get_logger().info('{0} is not a list: {1}, but {2}'.format(key, record[key], record[key].__class__))
return False
except KeyError as e:
logging.info('Key not found: {0}'.format(e.args))
get_logger().info('Key not found: {0}'.format(e.args))
return False

return True
Expand All @@ -356,7 +355,7 @@ def __is_remote_changed(self, record):
record['etag'] != self.objectInfo.etag)

def __finish_part(self, part):
logging.debug('finishing part: part_number={0}, start={1}, end={2}'.format(part.part_number, part.start, part.end))
get_logger().debug('finishing part: part_number={0}, start={1}, end={2}'.format(part.part_number, part.start, part.end))

with self.__lock:
self.__finished_parts.append(part)
Expand Down Expand Up @@ -464,12 +463,12 @@ def __load_record(self):
record = None

if record and self.__file_changed(record):
logging.debug('{0} was changed, clear the record.'.format(self.filename))
get_logger().debug('{0} was changed, clear the record.'.format(self.filename))
self._del_record()
record = None

if record and not self.__upload_exists(record['upload_id']):
logging.debug('{0} upload not exist, clear the record.'.format(record['upload_id']))
get_logger().debug('{0} upload not exist, clear the record.'.format(record['upload_id']))
self._del_record()
record = None

Expand All @@ -480,7 +479,7 @@ def __load_record(self):
'abspath': self._abspath, 'bucket': self.bucket.bucket_name, 'key': self.key,
'part_size': part_size}

logging.debug('put new record upload_id={0} part_size={1}'.format(upload_id, part_size))
get_logger().debug('put new record upload_id={0} part_size={1}'.format(upload_id, part_size))
self._put_record(record)

self.__record = record
Expand Down Expand Up @@ -546,7 +545,7 @@ def __init__(self, root, dir):
def get(self, key):
pathname = self.__path(key)

logging.debug('get key={0}, pathname={1}'.format(key, pathname))
get_logger().debug('get key={0}, pathname={1}'.format(key, pathname))

if not os.path.exists(pathname):
return None
Expand All @@ -569,13 +568,13 @@ def put(self, key, value):
with open(to_unicode(pathname), 'w') as f:
json.dump(value, f)

logging.debug('put key={0}, pathname={1}'.format(key, pathname))
get_logger().debug('put key={0}, pathname={1}'.format(key, pathname))

def delete(self, key):
pathname = self.__path(key)
os.remove(pathname)

logging.debug('del key={0}, pathname={1}'.format(key, pathname))
get_logger().debug('del key={0}, pathname={1}'.format(key, pathname))

def __path(self, key):
return os.path.join(self.dir, key)
Expand Down Expand Up @@ -656,23 +655,23 @@ def _is_record_sane(record):
try:
for key in ('upload_id', 'abspath', 'key'):
if not isinstance(record[key], str):
logging.info('{0} is not a string: {1}, but {2}'.format(key, record[key], record[key].__class__))
get_logger().info('{0} is not a string: {1}, but {2}'.format(key, record[key], record[key].__class__))
return False

for key in ('size', 'part_size'):
if not isinstance(record[key], int):
logging.info('{0} is not an integer: {1}'.format(key, record[key]))
get_logger().info('{0} is not an integer: {1}'.format(key, record[key]))
return False

if not isinstance(record['mtime'], int) and not isinstance(record['mtime'], float):
logging.info('mtime is not a float or an integer: {0}'.format(record['mtime']))
get_logger().info('mtime is not a float or an integer: {0}'.format(record['mtime']))
return False

if not isinstance(record['parts'], list):
logging.info('parts is not a list: {0}'.format(record['parts'].__class__.__name__))
get_logger().info('parts is not a list: {0}'.format(record['parts'].__class__.__name__))
return False
except KeyError as e:
logging.info('Key not found: {0}'.format(e.args))
get_logger().info('Key not found: {0}'.format(e.args))
return False

return True
Expand Down
5 changes: 3 additions & 2 deletions oss2/task_queue.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@

import threading
import sys
import logging

from .defaults import get_logger

try:
import Queue as queue
Expand Down Expand Up @@ -38,7 +39,7 @@ def run(self):
t.join(1)

if self.__exc_info:
logging.debug('An exception was thrown by producer or consumer, backtrace: {0}'.format(self.__exc_stack))
get_logger().debug('An exception was thrown by producer or consumer, backtrace: {0}'.format(self.__exc_stack))
raise self.__exc_info[1]

def put(self, data):
Expand Down
26 changes: 26 additions & 0 deletions tests/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@

from common import *

import logging


is_py2 = (sys.version_info[0] == 2)
is_py3 = (sys.version_info[0] == 3)
Expand Down Expand Up @@ -109,6 +111,30 @@ def progress_callback(consumed_bytes, total_bytes):

self.assertEqual(progress_adapter.len, 3)

def test_default_logger_basic(self):
# verify default logger
self.assertEqual(oss2.defaults.get_logger(), logging.getLogger())

# verify custom logger
custom_logger = logging.getLogger('oss2')
oss2.defaults.logger = custom_logger

self.assertEqual(oss2.defaults.get_logger(), custom_logger)

def test_default_logger_put(self):
custom_logger = logging.getLogger('oss2')
oss2.defaults.logger = custom_logger

custom_logger.addHandler(logging.StreamHandler(sys.stdout))
custom_logger.setLevel(logging.DEBUG)

key = self.random_key()

self.bucket.put_object(key, 'abc')
resp = self.bucket.get_object(key).resp

self.assertEqual(b'abc', resp.read())


if __name__ == '__main__':
unittest.main()

0 comments on commit 3dfaf95

Please sign in to comment.