diff --git a/hgdemandimport/py3token.py b/hgdemandimport/py3token.py --- a/hgdemandimport/py3token.py +++ b/hgdemandimport/py3token.py @@ -56,9 +56,12 @@ # Changes from official Python source code: # # * _main() and related functionality removed. +# * absolute_import added. """Token constants (from "token.h").""" +from __future__ import absolute_import + __all__ = ['tok_name', 'ISTERMINAL', 'ISNONTERMINAL', 'ISEOF'] # This file is automatically generated; please don't muck it up! diff --git a/hgdemandimport/py3tokenize.py b/hgdemandimport/py3tokenize.py --- a/hgdemandimport/py3tokenize.py +++ b/hgdemandimport/py3tokenize.py @@ -59,6 +59,10 @@ # * Removed generate_tokens(). # * Removed open(). # * Removed module docstring. +# * Adjusted for relative imports. +# * absolute_import added. + +from __future__ import absolute_import __author__ = 'Ka-Ping Yee ' __credits__ = ('GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, ' @@ -69,15 +73,15 @@ from itertools import chain import itertools as _itertools import re -from token import * +from .py3token import * cookie_re = re.compile(r'^[ \t\f]*#.*?coding[:=][ \t]*([-\w.]+)', re.ASCII) blank_re = re.compile(br'^[ \t\f]*(?:[#\r\n]|$)', re.ASCII) -import token -__all__ = token.__all__ + ["tokenize", "detect_encoding", - "untokenize", "TokenInfo"] -del token +from . import py3token +__all__ = py3token.__all__ + ["tokenize", "detect_encoding", + "untokenize", "TokenInfo"] +del py3token EXACT_TOKEN_TYPES = { '(': LPAR, diff --git a/tests/test-check-module-imports.t b/tests/test-check-module-imports.t --- a/tests/test-check-module-imports.t +++ b/tests/test-check-module-imports.t @@ -25,6 +25,7 @@ > -X contrib/win32/hgwebdir_wsgi.py \ > -X doc/gendoc.py \ > -X doc/hgmanpage.py \ + > -X hgdemandimport/py3tokenize.py \ > -X i18n/posplit \ > -X mercurial/thirdparty \ > -X tests/hypothesishelpers.py \