Statistics
| Revision:

gvsig-scripting / org.gvsig.scripting / trunk / org.gvsig.scripting / org.gvsig.scripting.app / org.gvsig.scripting.app.mainplugin / src / main / resources-plugin / scripting / lib / astroid / brain / brain_six.py @ 745

History | View | Annotate | Download (9.94 KB)

1
# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
2
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
3
#
4
# This file is part of astroid.
5
#
6
# astroid is free software: you can redistribute it and/or modify it under
7
# the terms of the GNU Lesser General Public License as published by the Free
8
# Software Foundation, either version 2.1 of the License, or (at your option) any
9
# later version.
10
#
11
# astroid is distributed in the hope that it will be useful, but WITHOUT
12
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
13
# FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more
14
# details.
15
#
16
# You should have received a copy of the GNU Lesser General Public License along
17
# with astroid.  If not, see <http://www.gnu.org/licenses/>.
18

    
19
"""Astroid hooks for six.moves."""
20

    
21
import sys
22
from textwrap import dedent
23

    
24
from astroid import MANAGER, register_module_extender
25
from astroid.builder import AstroidBuilder
26
from astroid.exceptions import AstroidBuildingException, InferenceError
27
from astroid import nodes
28

    
29

    
30
SIX_ADD_METACLASS = 'six.add_metaclass'
31

    
32

    
33
def _indent(text, prefix, predicate=None):
34
    """Adds 'prefix' to the beginning of selected lines in 'text'.
35

36
    If 'predicate' is provided, 'prefix' will only be added to the lines
37
    where 'predicate(line)' is True. If 'predicate' is not provided,
38
    it will default to adding 'prefix' to all non-empty lines that do not
39
    consist solely of whitespace characters.
40
    """
41
    if predicate is None:
42
        predicate = lambda line: line.strip()
43

    
44
    def prefixed_lines():
45
        for line in text.splitlines(True):
46
            yield prefix + line if predicate(line) else line
47
    return ''.join(prefixed_lines())
48

    
49

    
50
if sys.version_info[0] == 2:
51
    _IMPORTS_2 = """
52
    import BaseHTTPServer
53
    import CGIHTTPServer
54
    import SimpleHTTPServer
55

56
    from StringIO import StringIO
57
    from cStringIO import StringIO as cStringIO
58
    from UserDict import UserDict
59
    from UserList import UserList
60
    from UserString import UserString
61

62
    import __builtin__ as builtins
63
    import thread as _thread
64
    import dummy_thread as _dummy_thread
65
    import ConfigParser as configparser
66
    import copy_reg as copyreg
67
    from itertools import (imap as map,
68
                           ifilter as filter,
69
                           ifilterfalse as filterfalse,
70
                           izip_longest as zip_longest,
71
                           izip as zip)
72
    import htmlentitydefs as html_entities
73
    import HTMLParser as html_parser
74
    import httplib as http_client
75
    import cookielib as http_cookiejar
76
    import Cookie as http_cookies
77
    import Queue as queue
78
    import repr as reprlib
79
    from pipes import quote as shlex_quote
80
    import SocketServer as socketserver
81
    import SimpleXMLRPCServer as xmlrpc_server
82
    import xmlrpclib as xmlrpc_client
83
    import _winreg as winreg
84
    import robotparser as urllib_robotparser
85
    import Tkinter as tkinter
86
    import tkFileDialog as tkinter_tkfiledialog
87

88
    input = raw_input
89
    intern = intern
90
    range = xrange
91
    xrange = xrange
92
    reduce = reduce
93
    reload_module = reload
94

95
    class UrllibParse(object):
96
        import urlparse as _urlparse
97
        import urllib as _urllib
98
        ParseResult = _urlparse.ParseResult
99
        SplitResult = _urlparse.SplitResult
100
        parse_qs = _urlparse.parse_qs
101
        parse_qsl = _urlparse.parse_qsl
102
        urldefrag = _urlparse.urldefrag
103
        urljoin = _urlparse.urljoin
104
        urlparse = _urlparse.urlparse
105
        urlsplit = _urlparse.urlsplit
106
        urlunparse = _urlparse.urlunparse
107
        urlunsplit = _urlparse.urlunsplit
108
        quote = _urllib.quote
109
        quote_plus = _urllib.quote_plus
110
        unquote = _urllib.unquote
111
        unquote_plus = _urllib.unquote_plus
112
        urlencode = _urllib.urlencode
113
        splitquery = _urllib.splitquery
114
        splittag = _urllib.splittag
115
        splituser = _urllib.splituser
116
        uses_fragment = _urlparse.uses_fragment       
117
        uses_netloc = _urlparse.uses_netloc
118
        uses_params = _urlparse.uses_params
119
        uses_query = _urlparse.uses_query
120
        uses_relative = _urlparse.uses_relative
121

122
    class UrllibError(object):
123
        import urllib2 as _urllib2
124
        import urllib as _urllib
125
        URLError = _urllib2.URLError
126
        HTTPError = _urllib2.HTTPError
127
        ContentTooShortError = _urllib.ContentTooShortError
128

129
    class DummyModule(object):
130
        pass
131

132
    class UrllibRequest(object):
133
        import urlparse as _urlparse
134
        import urllib2 as _urllib2
135
        import urllib as _urllib
136
        urlopen = _urllib2.urlopen
137
        install_opener = _urllib2.install_opener
138
        build_opener = _urllib2.build_opener
139
        pathname2url = _urllib.pathname2url
140
        url2pathname = _urllib.url2pathname
141
        getproxies = _urllib.getproxies
142
        Request = _urllib2.Request
143
        OpenerDirector = _urllib2.OpenerDirector
144
        HTTPDefaultErrorHandler = _urllib2.HTTPDefaultErrorHandler
145
        HTTPRedirectHandler = _urllib2.HTTPRedirectHandler
146
        HTTPCookieProcessor = _urllib2.HTTPCookieProcessor
147
        ProxyHandler = _urllib2.ProxyHandler
148
        BaseHandler = _urllib2.BaseHandler
149
        HTTPPasswordMgr = _urllib2.HTTPPasswordMgr
150
        HTTPPasswordMgrWithDefaultRealm = _urllib2.HTTPPasswordMgrWithDefaultRealm
151
        AbstractBasicAuthHandler = _urllib2.AbstractBasicAuthHandler
152
        HTTPBasicAuthHandler = _urllib2.HTTPBasicAuthHandler
153
        ProxyBasicAuthHandler = _urllib2.ProxyBasicAuthHandler
154
        AbstractDigestAuthHandler = _urllib2.AbstractDigestAuthHandler
155
        HTTPDigestAuthHandler = _urllib2.HTTPDigestAuthHandler
156
        ProxyDigestAuthHandler = _urllib2.ProxyDigestAuthHandler
157
        HTTPHandler = _urllib2.HTTPHandler
158
        HTTPSHandler = _urllib2.HTTPSHandler
159
        FileHandler = _urllib2.FileHandler
160
        FTPHandler = _urllib2.FTPHandler
161
        CacheFTPHandler = _urllib2.CacheFTPHandler
162
        UnknownHandler = _urllib2.UnknownHandler
163
        HTTPErrorProcessor = _urllib2.HTTPErrorProcessor
164
        urlretrieve = _urllib.urlretrieve
165
        urlcleanup = _urllib.urlcleanup
166
        proxy_bypass = _urllib.proxy_bypass
167

168
    urllib_parse = UrllibParse()
169
    urllib_error = UrllibError()
170
    urllib = DummyModule()
171
    urllib.request = UrllibRequest()
172
    urllib.parse = UrllibParse()
173
    urllib.error = UrllibError()
174
    """
175
else:
176
    _IMPORTS_3 = """
177
    import _io
178
    cStringIO = _io.StringIO
179
    filter = filter
180
    from itertools import filterfalse
181
    input = input
182
    from sys import intern
183
    map = map
184
    range = range
185
    from imp import reload as reload_module
186
    from functools import reduce
187
    from shlex import quote as shlex_quote
188
    from io import StringIO
189
    from collections import UserDict, UserList, UserString
190
    xrange = range
191
    zip = zip
192
    from itertools import zip_longest
193
    import builtins
194
    import configparser
195
    import copyreg
196
    import _dummy_thread
197
    import http.cookiejar as http_cookiejar
198
    import http.cookies as http_cookies
199
    import html.entities as html_entities
200
    import html.parser as html_parser
201
    import http.client as http_client
202
    import http.server
203
    BaseHTTPServer = CGIHTTPServer = SimpleHTTPServer = http.server
204
    import pickle as cPickle
205
    import queue
206
    import reprlib
207
    import socketserver
208
    import _thread
209
    import winreg
210
    import xmlrpc.server as xmlrpc_server
211
    import xmlrpc.client as xmlrpc_client
212
    import urllib.robotparser as urllib_robotparser
213
    import email.mime.multipart as email_mime_multipart
214
    import email.mime.nonmultipart as email_mime_nonmultipart
215
    import email.mime.text as email_mime_text
216
    import email.mime.base as email_mime_base
217
    import urllib.parse as urllib_parse
218
    import urllib.error as urllib_error
219
    import tkinter
220
    import tkinter.dialog as tkinter_dialog
221
    import tkinter.filedialog as tkinter_filedialog
222
    import tkinter.scrolledtext as tkinter_scrolledtext
223
    import tkinter.simpledialog as tkinder_simpledialog
224
    import tkinter.tix as tkinter_tix
225
    import tkinter.ttk as tkinter_ttk
226
    import tkinter.constants as tkinter_constants
227
    import tkinter.dnd as tkinter_dnd
228
    import tkinter.colorchooser as tkinter_colorchooser
229
    import tkinter.commondialog as tkinter_commondialog
230
    import tkinter.filedialog as tkinter_tkfiledialog
231
    import tkinter.font as tkinter_font
232
    import tkinter.messagebox as tkinter_messagebox
233
    import urllib.request
234
    import urllib.robotparser as urllib_robotparser
235
    import urllib.parse as urllib_parse
236
    import urllib.error as urllib_error
237
    """
238
if sys.version_info[0] == 2:
239
    _IMPORTS = dedent(_IMPORTS_2)
240
else:
241
    _IMPORTS = dedent(_IMPORTS_3)
242

    
243

    
244
def six_moves_transform():
245
    code = dedent('''
246
    class Moves(object):
247
    {}
248
    moves = Moves()
249
    ''').format(_indent(_IMPORTS, "    "))
250
    module = AstroidBuilder(MANAGER).string_build(code)
251
    module.name = 'six.moves'
252
    return module
253

    
254

    
255
def _six_fail_hook(modname):
256
    if modname != 'six.moves':
257
        raise AstroidBuildingException
258
    module = AstroidBuilder(MANAGER).string_build(_IMPORTS)
259
    module.name = 'six.moves'
260
    return module
261

    
262
def transform_six_add_metaclass(node):
263
    """Check if the given class node is decorated with *six.add_metaclass*
264

265
    If so, inject its argument as the metaclass of the underlying class.
266
    """
267
    if not node.decorators:
268
        return
269

    
270
    for decorator in node.decorators.nodes:
271
        if not isinstance(decorator, nodes.Call):
272
            continue
273

    
274
        try:
275
            func = next(decorator.func.infer())
276
        except InferenceError:
277
            continue
278
        if func.qname() == SIX_ADD_METACLASS and decorator.args:
279
            metaclass = decorator.args[0]
280
            node._metaclass = metaclass
281
            return node
282

    
283

    
284
register_module_extender(MANAGER, 'six', six_moves_transform)
285
register_module_extender(MANAGER, 'requests.packages.urllib3.packages.six',
286
                         six_moves_transform)
287
MANAGER.register_failed_import_hook(_six_fail_hook)
288
MANAGER.register_transform(nodes.ClassDef, transform_six_add_metaclass)