Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add support for python3 #20

Merged
merged 22 commits into from
Aug 9, 2016
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
22 commits
Select commit Hold shift + click to select a range
7156334
add six to dependencies
Changaco Jul 28, 2016
b59710c
`2to3 -x callable -x future -wn tests` + manual changes
Changaco Jul 27, 2016
2fa01de
`2to3 -x callable -x future -wn aspen` + manual changes
Changaco Jul 28, 2016
bfb16e8
drop utils `ascii_dammit` and `unicode_dammit`
Changaco Jul 28, 2016
4349bc4
clean up unused function and constants
Changaco Jul 28, 2016
e4b2e8c
update type information in comments
Changaco Jul 28, 2016
772e652
fix `json_.FriendlyEncoder`
Changaco Jul 29, 2016
430ebea
modify JSON renderer to always return unicode
Changaco Jul 29, 2016
2e75933
work around an issue with filesystem_tree
Changaco Jul 29, 2016
cc9f8bc
create an `Output.text` property and use it in tests
Changaco Jul 29, 2016
d1d52a0
fix `simplate._decode` to work with python3
Changaco Jul 29, 2016
e6c407f
store media types and renderers as text
Changaco Jul 29, 2016
278d665
treat the request path as text from the start
Changaco Jul 29, 2016
a42faf3
store simplate pages as text, not bytestrings
Changaco Jul 29, 2016
612c15d
import `unichr` from `six` in test simplate
Changaco Jul 29, 2016
c82b826
fix JSON test to work with both python 2 and 3
Changaco Jul 29, 2016
c0cfe41
modify test renderer to always return unicode
Changaco Jul 29, 2016
798ac15
replace bytes literals that contain unicode
Changaco Jul 29, 2016
ef9fa25
use tox to test with multiple python versions
Changaco Jul 29, 2016
1fede1b
drop special jython support from `build.py`
Changaco Jul 29, 2016
a3c8b65
change python version to 3.5 on Travis
Changaco Jul 30, 2016
b5b02f9
stop running `pylint` on Travis and AppVeyor
Changaco Jul 30, 2016
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
*.egg-info/
aspen/tests/log
env
.tox/
*.pyc
distribute-*
__pycache__
Expand Down
4 changes: 2 additions & 2 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,12 @@ branches:
- master
language: python
python:
- 2.7
- 3.5
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why not test both? Are we dropping Python 2 support?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Are we dropping Python 2 support?

No.

Why not test both?

We're testing all three of Python 2.7, 3.4, and 3.5, but we're using Tox instead of Travis's matrix functionality.

https://travis-ci.org/AspenWeb/aspen.py/builds/148520550

before_install:
- pip install --upgrade pip
- pip --version
install: python build.py dev
script: python build.py analyse
script: python build.py test_cov
notifications:
email: false
irc:
Expand Down
2 changes: 1 addition & 1 deletion appveyor.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ build: false # Not a C# project, build stuff at the test step instead.
test_script:

# Build the compiled extension and run the project tests
- "python build.py analyse"
- "python build.py test_cov"

branches:
only:
Expand Down
5 changes: 2 additions & 3 deletions aspen/configuration/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
import os

from ..exceptions import ConfigurationError
from ..utils import ascii_dammit


def configure(knobs, d, env_prefix, kwargs):
Expand Down Expand Up @@ -43,11 +42,11 @@ def parse_conf_var(raw, from_unicode, context, name_in_context):
value = raw
extend = False
try:
if isinstance(value, str):
if isinstance(value, bytes):
value = value.decode('US-ASCII')
return from_unicode(value), extend
except UnicodeDecodeError as error:
value = ascii_dammit(value)
value = value.decode('US-ASCII', 'repr')
error_detail = "Configuration values must be US-ASCII."
except ValueError as error:
error_detail = error.args[0]
Expand Down
8 changes: 4 additions & 4 deletions aspen/configuration/parse.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,17 +20,17 @@ def identity(value):

def media_type(media_type):
# XXX for now. Read a spec
return media_type.encode('US-ASCII')
return media_type.encode('ascii').decode('ascii')

def codec(value):
codecs.lookup(value)
return value

def yes_no(s):
s = s.lower()
if s in [u'yes', u'true', u'1']:
if s in ['yes', 'true', '1']:
return True
if s in [u'no', u'false', u'0']:
if s in ['no', 'false', '0']:
return False
raise ValueError("must be either yes/true/1 or no/false/0")

Expand All @@ -47,4 +47,4 @@ def renderer(value):
if value not in RENDERERS:
msg = "not one of {%s}" % (','.join(RENDERERS))
raise ValueError(msg)
return value.encode('US-ASCII')
return value
40 changes: 20 additions & 20 deletions aspen/http/request.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,18 +5,21 @@
from __future__ import print_function
from __future__ import unicode_literals


import cgi
import urllib
from six import PY2, text_type as str
from six.moves.urllib.parse import parse_qs, unquote, unquote_plus

from .mapping import Mapping


def _decode(o):
return o.decode('utf8') if isinstance(o, bytes) else o


def path_decode(bs):
return urllib.unquote(bs).decode('UTF-8')
return _decode(unquote(bs.encode('ascii') if PY2 else bs))


class PathPart(unicode):
class PathPart(str):
"""A string with a mapping for extra data about it."""

__slots__ = ['params']
Expand All @@ -40,17 +43,17 @@ def extract_rfc2396_params(path):
* path should be raw so we don't split or operate on a decoded character
* output is decoded
"""
pathsegs = path.lstrip(b'/').split(b'/')
pathsegs = path.lstrip('/').split('/')
segments_with_params = []
for component in pathsegs:
parts = component.split(b';')
parts = component.split(';')
params = Mapping()
segment = path_decode(parts[0])
for p in parts[1:]:
if b'=' in p:
k, v = p.split(b'=', 1)
if '=' in p:
k, v = p.split('=', 1)
else:
k, v = p, b''
k, v = p, ''
params.add(path_decode(k), path_decode(v))
segments_with_params.append(PathPart(segment, params))
return segments_with_params
Expand All @@ -59,7 +62,7 @@ def extract_rfc2396_params(path):
def split_path_no_params(path):
"""This splits a path into parts on "/" only (no split on ";" or ",").
"""
return [PathPart(path_decode(s)) for s in path.lstrip(b'/').split(b'/')]
return [PathPart(path_decode(s)) for s in path.lstrip('/').split('/')]


class Path(Mapping):
Expand All @@ -79,17 +82,14 @@ class Querystring(Mapping):
def __init__(self, raw):
"""Takes a string of type application/x-www-form-urlencoded.
"""
self.decoded = urllib.unquote_plus(raw).decode('UTF-8')
self.decoded = _decode(unquote_plus(raw))
self.raw = raw

# parse_qs does its own unquote_plus'ing ...
as_dict = cgi.parse_qs( raw
, keep_blank_values = True
, strict_parsing = False
)

# ... but doesn't decode to unicode.
for k, vals in as_dict.items():
as_dict[k.decode('UTF-8')] = [v.decode('UTF-8') for v in vals]
as_dict = parse_qs(raw, keep_blank_values=True, strict_parsing=False)

# ... but doesn't decode to unicode (in older python versions).
for k, vals in list(as_dict.items()):
as_dict[_decode(k)] = [_decode(v) for v in vals]

Mapping.__init__(self, as_dict)
4 changes: 4 additions & 0 deletions aspen/output.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,3 +3,7 @@ class Output(object):

def __init__(self, **kw):
self.__dict__.update(kw)

@property
def text(self):
return self.body.decode(self.charset) if self.charset else None
4 changes: 2 additions & 2 deletions aspen/request_processor/algorithm.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ def dispatch_path_to_filesystem(request_processor, path, querystring):
, uripath = path.decoded
, startdir = request_processor.www_root
)
for k, v in result.wildcards.iteritems():
for k, v in result.wildcards.items():
path[k] = v
return {'dispatch_result': result}

Expand All @@ -74,6 +74,6 @@ def render_resource(state, resource):


def encode_output(output, request_processor):
if isinstance(output.body, unicode):
if not isinstance(output.body, bytes):
output.charset = request_processor.charset_dynamic
output.body = output.body.encode(output.charset)
1 change: 1 addition & 0 deletions aspen/request_processor/dispatcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
import os
import posixpath
from collections import namedtuple
from functools import reduce


class DispatchError(Exception):
Expand Down
2 changes: 1 addition & 1 deletion aspen/request_processor/typecasting.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ def apply_typecasters(typecasters, path, state):
*without* those extensions attached anymore, but with typecast values.
It also then removes the string-value keys (the ones with the extensions).
"""
for part in path.keys():
for part in list(path.keys()):
pieces = part.rsplit('.',1)
if len(pieces) > 1:
var, ext = pieces
Expand Down
2 changes: 1 addition & 1 deletion aspen/simplates/json_.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ def default(self, obj):
cls = obj.__class__ # Use this instead of type(obj) because that
# isn't consistent between new- and old-style
# classes, and this is.
encode = encoders.get(cls, _json.JSONEncoder.default)
encode = encoders.get(cls, super(FriendlyEncoder, self).default)
return encode(obj)

def lazy_check():
Expand Down
2 changes: 1 addition & 1 deletion aspen/simplates/pagination.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ class Page(object):

def __init__(self, content, header='', offset=0):
self.content = content
self.header = header.decode('ascii')
self.header = header
self.offset = offset

@property
Expand Down
6 changes: 3 additions & 3 deletions aspen/simplates/renderers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,9 +105,9 @@ def factories(configuration):
try:
capture = {}
python_syntax = 'from aspen.simplates.renderers.%s import Factory'
exec python_syntax % name in capture
exec(python_syntax % name, capture)
make_renderer = capture['Factory'](configuration)
except ImportError, err:
except ImportError as err:
make_renderer = err
err.info = sys.exc_info()
renderer_factories[name] = make_renderer
Expand Down Expand Up @@ -135,7 +135,7 @@ def __init__(self, factory, filepath, raw, media_type, offset):
self.raw = raw
self.media_type = media_type
self.offset = offset
self.padded = (b'\n' * offset) + self.raw
self.padded = ('\n' * offset) + self.raw
self.compiled = self.compile(self._filepath, self.padded)

def __call__(self, context):
Expand Down
5 changes: 4 additions & 1 deletion aspen/simplates/renderers/json_dump.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,10 @@ def render_content(self, context):
output = context['output']
if not output.media_type:
output.media_type = context['request_processor'].media_type_json
return json_.dumps(eval(self.compiled, globals(), context))
r = json_.dumps(eval(self.compiled, globals(), context))
if isinstance(r, bytes):
r = r.decode('ascii')
return r


class Factory(Factory):
Expand Down
30 changes: 9 additions & 21 deletions aspen/simplates/simplate.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,17 +12,6 @@
renderer_re = re.compile(r'[a-z0-9.-_]+$')
media_type_re = re.compile(r'[A-Za-z0-9.+*-]+/[A-Za-z0-9.+*-]+$')

MIN_PAGES=2
MAX_PAGES=None


def _ordinal(n):
ords = [ 'zero' , 'one' , 'two', 'three', 'four'
, 'five', 'six', 'seven', 'eight', 'nine' ]
if 0 <= n < len(ords):
return ords[n]
return str(n)


def _decode(raw):
"""As per PEP 263, decode raw data according to the encoding specified in
Expand All @@ -31,7 +20,7 @@ def _decode(raw):
"""
assert type(raw) is bytes # sanity check

decl_re = re.compile(r'^[ \t\f]*#.*coding[:=][ \t]*([-\w.]+)')
decl_re = re.compile(br'^[ \t\f]*#.*coding[:=][ \t]*([-\w.]+)')

def get_declaration(line):
match = decl_re.match(line)
Expand All @@ -51,7 +40,7 @@ def get_declaration(line):
# observed behavior.

encoding = potential
munged = b'# encoding set to {0}\n'.format(encoding)
munged = b'# encoding set to ' + encoding + b'\n'

else:

Expand All @@ -61,14 +50,15 @@ def get_declaration(line):
# object, we'll get a SyntaxError if we have a well-formed
# `coding: # ` line in it.

munged = b'# encoding NOT set to {0}\n'.format(potential)
munged = b'# encoding NOT set to ' + potential + b'\n'

line = line.split(b'#')[0] + munged

fulltext += line
fulltext += sio.read()
sio.close()
return fulltext.decode(encoding or b'ascii')
encoding = encoding.decode('ascii') if encoding else 'ascii'
return fulltext.decode(encoding)


class SimplateDefaults(object):
Expand Down Expand Up @@ -101,8 +91,8 @@ def __init__(self, defaults, fs, raw, default_media_type):

self.defaults = defaults # type: SimplateDefaults
self.fs = fs # type: str
self.raw = raw # type: str
self.decoded = _decode(raw) # type: unicode
self.raw = raw # type: bytes
self.decoded = _decode(raw) # type: str
self.default_media_type = default_media_type # type: str

self.renderers = {} # mapping of media type to Renderer objects
Expand Down Expand Up @@ -158,7 +148,7 @@ def parse_into_pages(self, decoded):

pages = list(split_and_escape(decoded))
npages = len(pages)
blank = [ Page(b'') ]
blank = [ Page('') ]

if npages == 1:
pages = blank + blank + pages
Expand Down Expand Up @@ -271,15 +261,13 @@ def _get_renderer_factory(self, media_type, renderer):
"renderers (might need third-party libs): %s.")
raise SyntaxError(msg % (renderer, renderer_re.pattern, possible))

renderer = renderer.decode('US-ASCII')

make_renderer = factories.get(renderer, None)
if isinstance(make_renderer, ImportError):
raise make_renderer
elif make_renderer is None:
possible = []
legend = ''
for k, v in sorted(factories.iteritems()):
for k, v in sorted(factories.items()):
if isinstance(v, ImportError):
k = '*' + k
legend = " (starred are missing third-party libraries)"
Expand Down
6 changes: 5 additions & 1 deletion aspen/testing.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,11 @@ def simple(self, contents='Greetings, program!', filepath='index.html.spt', urip
"""A helper to create a file and hit it through our machinery.
"""
if filepath is not None:
self.fs.www.mk((filepath, contents))
if isinstance(contents, tuple):
contents, encoding = contents
else:
encoding = 'utf8'
self.fs.www.mk((filepath, contents, True, encoding))
if request_processor_configuration is not None:
self.hydrate_request_processor(**request_processor_configuration)

Expand Down
Loading