commit a053bfb742e428017620cf24befd62b1b4e2baaf
parent 09ac20dff153d09766fcc87eab99c247ea4a965a
Author: Anders Damsgaard Christensen <adc@geo.au.dk>
Date: Wed, 3 Apr 2013 12:04:30 +0200
added python-mode
Diffstat:
193 files changed, 47767 insertions(+), 0 deletions(-)
diff --git a/.vim/bundle/python-mode/.gitignore b/.vim/bundle/python-mode/.gitignore
@@ -0,0 +1,8 @@
+*.py[cod]
+.vimrc
+*.sw?
+*~
+.DS_Store
+.ropeproject
+tags
+todo.txt
diff --git a/.vim/bundle/python-mode/Changelog.rst b/.vim/bundle/python-mode/Changelog.rst
@@ -0,0 +1,238 @@
+Changelog
+=========
+
+## 2013-04-01 0.6.15
+--------------------
+* Bugfix release
+
+## 2013-03-16 0.6.14
+--------------------
+* Update `PEP8` to version 1.4.5;
+* Update `Pylint` to version 0.27.0;
+* Update `pyflakes` to version 0.6.1;
+* Update `autopep8` to version 0.8.7;
+* Fix breakpoint definition;
+* Update python syntax;
+* Fixed run-time error when output non-ascii in multibyte locale;
+* Move initialization into ftplugin as it is python specific;
+* Pyrex (Cython) files support;
+* Support `raw_input` in run python code;
+
+## 2012-09-07 0.6.10
+--------------------
+* Dont raise an exception when Logger has no message handler (c) nixon
+* Improve performance of white space removal (c) Dave Smith
+* Improve ropemode support (c) s0undt3ch
+* Add `g:pymode_updatetime` option
+* Update autopep8 to version 0.8.1
+
+## 2012-09-07 0.6.9
+-------------------
+* Update autopep8
+* Improve pymode#troubleshooting#Test()
+
+## 2012-09-06 0.6.8
+-------------------
+* Add PEP8 indentation ":help 'pymode_indent'"
+
+## 2012-08-15 0.6.7
+-------------------
+* Fix documentation. Thanks (c) bgrant;
+* Fix pymode "async queue" support.
+
+## 2012-08-02 0.6.6
+-------------------
+* Updated Pep8 to version 1.3.3
+* Updated Pylint to version 0.25.2
+* Fixed virtualenv support for windows users
+* Added pymode modeline ':help PythonModeModeline'
+* Added diagnostic tool ':call pymode#troubleshooting#Test()'
+* Added `PyLintAuto` command ':help PyLintAuto'
+* Code checking is async operation now
+* More, more fast the pymode folding
+* Repaired execution of python code
+
+## 2012-05-24 0.6.4
+-------------------
+* Add 'pymode_paths' option
+* Rope updated to version 0.9.4
+
+## 2012-04-18 0.6.3
+-------------------
+* Fix pydocs integration
+
+## 2012-04-10 0.6.2
+-------------------
+* Fix pymode_run for "unnamed" clipboard
+* Add 'pymode_lint_mccabe_complexity' option
+* Update Pep8 to version 1.0.1
+* Warning! Change 'pymode_rope_goto_def_newwin' option
+ for open "goto definition" in new window, set it to 'new' or 'vnew'
+ for horizontally or vertically split
+ If you use default behaviour (in the same buffer), not changes needed.
+
+## 2012-03-13 0.6.0
+-------------------
+* Add 'pymode_lint_hold' option
+* Improve pymode loading speed
+* Add pep8, mccabe lint checkers
+* Now g:pymode_lint_checker can have many values
+ Ex. "pep8,pyflakes,mccabe"
+* Add 'pymode_lint_ignore' and 'pymode_lint_select' options
+* Fix rope keys
+* Fix python motion in visual mode
+* Add folding 'pymode_folding'
+* Warning: 'pymode_lint_checker' now set to 'pyflakes,pep8,mccabe' by default
+
+## 2012-02-12 0.5.8
+-------------------
+* Fix pylint for Windows users
+* Python documentation search running from Vim (delete g:pydoc option)
+* Python code execution running from Vim (delete g:python option)
+
+## 2012-02-11 0.5.7
+-------------------
+* Fix 'g:pymode_lint_message' mode error
+* Fix breakpoints
+* Fix python paths and virtualenv detection
+
+## 2012-02-06 0.5.6
+-------------------
+* Fix 'g:pymode_syntax' option
+* Show error message in bottom part of screen
+ see 'g:pymode_lint_message'
+* Fix pylint for windows users
+* Fix breakpoint command (Use pdb when idpb not installed)
+
+## 2012-01-17 0.5.5
+-------------------
+* Add a sign for info messages from pylint.
+ (c) Fredrik Henrysson
+* Change motion keys: vic - viC, dam - daM and etc
+* Add 'g:pymode_lint_onfly' option
+
+## 2012-01-09 0.5.3
+-------------------
+* Prevent the configuration from breaking python-mode
+ (c) Dirk Wallenstein
+
+## 2012-01-08 0.5.2
+-------------------
+* Fix ropeomnicompletion
+* Add preview documentation
+
+## 2012-01-06 0.5.1
+-------------------
+* Happy new year!
+* Objects and motion fixes
+
+## 2011-11-30 0.5.0
+-------------------
+* Add python objects and motions (beta)
+ :h pymode_motion
+
+## 2011-11-27 0.4.8
+-------------------
+* Add `PyLintWindowToggle` command
+* Fix some bugs
+
+## 2011-11-23 0.4.6
+-------------------
+* Enable all syntax highlighting
+ For old settings set in your vimrc:
+ let g:pymode_syntax_builtin_objs = 0
+ let g:pymode_syntax_builtin_funcs = 0
+
+* Change namespace of syntax variables
+ See README
+
+## 2011-11-18 0.4.5
+-------------------
+* Add 'g:pymode_syntax' option
+* Highlight 'self' keyword
+
+## 2011-11-16 0.4.4
+-------------------
+* Minor fixes
+
+## 2011-11-11 0.4.3
+-------------------
+* Fix pyflakes
+
+## 2011-11-09 0.4.2
+-------------------
+* Add FAQ
+* Some refactoring and fixes
+
+## 2011-11-08 0.4.0
+-------------------
+* Add alternative code checker "pyflakes"
+ See :h 'pymode_lint_checker'
+* Update install docs
+
+## 2011-10-30 0.3.3
+-------------------
+* Fix RopeShowDoc
+
+## 2011-10-28 0.3.2
+-------------------
+* Add 'g:pymode_options_*' stuff, for ability
+ to disable default pymode options for python buffers
+
+## 2011-10-27 0.3.1
+-------------------
+* Add 'g:pymode_rope_always_show_complete_menu' option
+* Some pylint fixes
+
+## 2011-10-25 0.3.0
+-------------------
+* Add g:pymode_lint_minheight and g:pymode_lint_maxheight
+ options
+* Fix PyLintToggle
+* Fix Rope and PyLint libs loading
+
+## 2011-10-21 0.2.12
+--------------------
+* Auto open cwindow with results
+ on rope find operations
+
+## 2011-10-20 0.2.11
+--------------------
+* Add 'pymode_lint_jump' option
+
+## 2011-10-19 0.2.10
+--------------------
+* Minor fixes (virtualenv loading, buffer commands)
+
+## 2011-10-18 0.2.6
+-------------------
+* Add <C-space> shortcut for macvim users.
+* Add VIRTUALENV support
+
+## 2011-10-17 0.2.4
+-------------------
+* Add current work path to sys.path
+* Add 'g:pymode' option (disable/enable pylint and rope)
+* Fix pylint copyright
+* Hotfix rope autocomplete
+
+## 2011-10-15 0.2.1
+-------------------
+* Change rope variables (ropevim_<name> -> pymode_rope_<name>)
+* Add "pymode_rope_auto_project" option (default: 1)
+* Update and fix docs
+* 'pymode_rope_extended_complete' set by default
+* Auto generate rope project and cache
+* "<C-c>r a" for RopeAutoImport
+
+## 2011-10-12 0.1.4
+-------------------
+* Add default pylint configuration
+
+## 2011-10-12 0.1.3
+-------------------
+* Fix pylint and update docs
+
+## 2011-10-11 0.1.2
+-------------------
+* First public release
diff --git a/.vim/bundle/python-mode/Makefile b/.vim/bundle/python-mode/Makefile
@@ -0,0 +1,3 @@
+.PHONY: clean
+clean:
+ find . -name "*.pyc" -delete
diff --git a/.vim/bundle/python-mode/README.rst b/.vim/bundle/python-mode/README.rst
@@ -0,0 +1,528 @@
+Python-mode, Python in VIM
+##########################
+
+Python-mode is a vim plugin that allows you to use the pylint_, rope_, pydoc_, pyflakes_, pep8_, mccabe_ libraries in vim to provide
+features like python code looking for bugs, refactoring and some other useful things.
+
+This plugin allow you create python code in vim very easily.
+There is no need to install the pylint_, rope_ or any used python library on your system.
+
+- Python objects and motion (]], 3[[, ]]M, vaC, viM, daC, ciM, ...)
+- Folding of python code
+- Virtualenv support
+- Highlight syntax errors
+- Highlight and auto fix unused imports
+- Many linters (pylint_, pyflakes_, ...) that can be run simultaneously
+- Strong code completion
+- Code refactoring
+- Python documentation
+- Run python code
+- Go to definition
+- Powerful customization
+- And more, more ...
+
+See (very old) screencast here: http://t.co/3b0bzeXA (sorry for quality, this is my first screencast)
+Another old presentation here: http://www.youtube.com/watch?v=YhqsjUUHj6g
+
+
+.. contents::
+
+
+Changelog
+=========
+
+## 2013-03-15 0.6.12
+--------------------
+* Update `PEP8` to version 1.4.5;
+* Update `Pylint` to version 0.27.0;
+* Update `autopep8` to version 0.8.7;
+* Fix breakpoint definition;
+* Update python syntax;
+* Fixed run-time error when output non-ascii in multibyte locale;
+* Move initialization into ftplugin as it is python specific;
+* Pyrex (Cython) files support;
+* Support `raw_input` in run python code;
+
+
+Requirements
+============
+
+- VIM >= 7.0 with python support
+ (also ``--with-features=big`` if you want use g:pymode_lint_signs)
+
+
+
+How to install
+==============
+
+
+Using pathogen_ (recomended)
+----------------------------
+::
+
+ % cd ~/.vim
+ % mkdir -p bundle && cd bundle
+ % git clone git://github.com/klen/python-mode.git
+
+- Enable pathogen_ in your ``~/.vimrc``: ::
+
+ " Pathogen load
+ filetype off
+
+ call pathogen#infect()
+ call pathogen#helptags()
+
+ filetype plugin indent on
+ syntax on
+
+
+Manually
+--------
+::
+
+ % git clone git://github.com/klen/python-mode.git
+ % cd python-mode
+ % cp -R * ~/.vim
+
+Then rebuild **helptags** in vim::
+
+ :helptags ~/.vim/doc/
+
+
+.. note:: **filetype-plugin** (``:help filetype-plugin-on``) and **filetype-indent** (``:help filetype-indent-on``)
+ must be enabled for use python-mode.
+
+
+Troubleshooting
+===============
+
+If your python-mode dont work, open any python file and type command: ::
+
+ :call pymode#troubleshooting#Test()
+
+And fix warnings or copy output and send it to me (ex. with github issue).
+
+
+Settings
+========
+
+.. note:: Also you can see vim help. ``:help PythonModeOptions``
+
+To change this settings, edit your ``~/.vimrc``: ::
+
+ " Disable pylint checking every save
+ let g:pymode_lint_write = 0
+
+ " Set key 'R' for run python code
+ let g:pymode_run_key = 'R'
+
+
+Loading the Plugin
+------------------
+
+Default values: ::
+
+ " Load the whole plugin
+ let g:pymode = 1
+
+
+Show documentation
+------------------
+
+Default values: ::
+
+ " Load show documentation plugin
+ let g:pymode_doc = 1
+
+ " Key for show python documentation
+ let g:pymode_doc_key = 'K'
+
+
+Run python code
+---------------
+
+Default values: ::
+
+ " Load run code plugin
+ let g:pymode_run = 1
+
+ " Key for run python code
+ let g:pymode_run_key = '<leader>r'
+
+
+Code checking
+-------------
+
+Default values: ::
+
+ " Load pylint code plugin
+ let g:pymode_lint = 1
+
+ " Switch pylint, pyflakes, pep8, mccabe code-checkers
+ " Can have multiply values "pep8,pyflakes,mcccabe"
+ let g:pymode_lint_checker = "pyflakes,pep8,mccabe"
+
+ " Skip errors and warnings
+ " E.g. "E501,W002", "E2,W" (Skip all Warnings and Errors startswith E2) and etc
+ let g:pymode_lint_ignore = "E501"
+
+ " Select errors and warnings
+ " E.g. "E4,W"
+ let g:pymode_lint_select = ""
+
+ " Run linter on the fly
+ let g:pymode_lint_onfly = 0
+
+ " Pylint configuration file
+ " If file not found use 'pylintrc' from python-mode plugin directory
+ let g:pymode_lint_config = "$HOME/.pylintrc"
+
+ " Check code every save
+ let g:pymode_lint_write = 1
+
+ " Auto open cwindow if errors be finded
+ let g:pymode_lint_cwindow = 1
+
+ " Show error message if cursor placed at the error line
+ let g:pymode_lint_message = 1
+
+ " Auto jump on first error
+ let g:pymode_lint_jump = 0
+
+ " Hold cursor in current window
+ " when quickfix is open
+ let g:pymode_lint_hold = 0
+
+ " Place error signs
+ let g:pymode_lint_signs = 1
+
+ " Maximum allowed mccabe complexity
+ let g:pymode_lint_mccabe_complexity = 8
+
+ " Minimal height of pylint error window
+ let g:pymode_lint_minheight = 3
+
+ " Maximal height of pylint error window
+ let g:pymode_lint_maxheight = 6
+
+
+.. note::
+ Pylint options (ex. disable messages) may be defined in ``$HOME/pylint.rc``
+ See pylint documentation: http://pylint-messages.wikidot.com/all-codes
+
+
+Rope refactoring library
+------------------------
+
+Default values: ::
+
+ " Load rope plugin
+ let g:pymode_rope = 1
+
+ " Auto create and open ropeproject
+ let g:pymode_rope_auto_project = 1
+
+ " Enable autoimport
+ let g:pymode_rope_enable_autoimport = 1
+
+ " Auto generate global cache
+ let g:pymode_rope_autoimport_generate = 1
+
+ let g:pymode_rope_autoimport_underlineds = 0
+
+ let g:pymode_rope_codeassist_maxfixes = 10
+
+ let g:pymode_rope_sorted_completions = 1
+
+ let g:pymode_rope_extended_complete = 1
+
+ let g:pymode_rope_autoimport_modules = ["os","shutil","datetime"]
+
+ let g:pymode_rope_confirm_saving = 1
+
+ let g:pymode_rope_global_prefix = "<C-x>p"
+
+ let g:pymode_rope_local_prefix = "<C-c>r"
+
+ let g:pymode_rope_vim_completion = 1
+
+ let g:pymode_rope_guess_project = 1
+
+ let g:pymode_rope_goto_def_newwin = ""
+
+ let g:pymode_rope_always_show_complete_menu = 0
+
+
+Automatically folding of python code
+--------------------------------------
+
+Default values: ::
+
+ " Enable python folding
+ let g:pymode_folding = 1
+
+
+Vim python motions and operators
+--------------------------------
+
+Default values: ::
+
+ " Enable python objects and motion
+ let g:pymode_motion = 1
+
+
+Virtualenv support
+------------------
+
+Default values: ::
+
+ " Auto fix vim python paths if virtualenv enabled
+ let g:pymode_virtualenv = 1
+
+
+Other stuff
+-----------
+
+Default values: ::
+
+ " Additional python paths
+ let g:pymode_paths = []
+
+ " Load breakpoints plugin
+ let g:pymode_breakpoint = 1
+
+ " Key for set/unset breakpoint
+ let g:pymode_breakpoint_key = '<leader>b'
+
+ " Autoremove unused whitespaces
+ let g:pymode_utils_whitespaces = 1
+
+ " Enable pymode indentation
+ let g:pymode_indent = 1
+
+ " Set default pymode python options
+ let g:pymode_options = 1
+
+
+Syntax highlight
+----------------
+
+Default values: ::
+
+ " Enable pymode's custom syntax highlighting
+ let g:pymode_syntax = 1
+
+ " Enable all python highlightings
+ let g:pymode_syntax_all = 1
+
+ " Highlight "print" as function
+ let g:pymode_syntax_print_as_function = 0
+
+ " Highlight indentation errors
+ let g:pymode_syntax_indent_errors = g:pymode_syntax_all
+
+ " Highlight trailing spaces
+ let g:pymode_syntax_space_errors = g:pymode_syntax_all
+
+ " Highlight string formatting
+ let g:pymode_syntax_string_formatting = g:pymode_syntax_all
+
+ " Highlight str.format syntax
+ let g:pymode_syntax_string_format = g:pymode_syntax_all
+
+ " Highlight string.Template syntax
+ let g:pymode_syntax_string_templates = g:pymode_syntax_all
+
+ " Highlight doc-tests
+ let g:pymode_syntax_doctests = g:pymode_syntax_all
+
+ " Highlight builtin objects (__doc__, self, etc)
+ let g:pymode_syntax_builtin_objs = g:pymode_syntax_all
+
+ " Highlight builtin functions
+ let g:pymode_syntax_builtin_funcs = g:pymode_syntax_all
+
+ " Highlight exceptions
+ let g:pymode_syntax_highlight_exceptions = g:pymode_syntax_all
+
+ " For fast machines
+ let g:pymode_syntax_slow_sync = 0
+
+
+Default keys
+============
+
+.. note:: Also you can see vim help ``:help PythonModeKeys``
+
+============== =============
+Keys Command
+============== =============
+**K** Show python docs (g:pymode_doc enabled)
+-------------- -------------
+**<C-Space>** Rope autocomplete (g:pymode_rope enabled)
+-------------- -------------
+**<C-c>g** Rope goto definition (g:pymode_rope enabled)
+-------------- -------------
+**<C-c>d** Rope show documentation (g:pymode_rope enabled)
+-------------- -------------
+**<C-c>f** Rope find occurrences (g:pymode_rope enabled)
+-------------- -------------
+**<Leader>r** Run python (g:pymode_run enabled)
+-------------- -------------
+**<Leader>b** Set, unset breakpoint (g:pymode_breakpoint enabled)
+-------------- -------------
+[[ Jump on previous class or function (normal, visual, operator modes)
+-------------- -------------
+]] Jump on next class or function (normal, visual, operator modes)
+-------------- -------------
+[M Jump on previous class or method (normal, visual, operator modes)
+-------------- -------------
+]M Jump on next class or method (normal, visual, operator modes)
+-------------- -------------
+aC C Select a class. Ex: vaC, daC, dC, yaC, yC, caC, cC (normal, operator modes)
+-------------- -------------
+iC Select inner class. Ex: viC, diC, yiC, ciC (normal, operator modes)
+-------------- -------------
+aM M Select a function or method. Ex: vaM, daM, dM, yaM, yM, caM, cM (normal, operator modes)
+-------------- -------------
+iM Select inner function or method. Ex: viM, diM, yiM, ciM (normal, operator modes)
+============== =============
+
+.. note:: See also ``:help ropevim.txt``
+
+
+Commands
+========
+
+.. note:: Also you can see vim help ``:help PythonModeCommands``
+
+==================== =============
+Command Description
+==================== =============
+:Pydoc <args> Show python documentation
+-------------------- -------------
+PyLintToggle Enable, disable pylint
+-------------------- -------------
+PyLintCheckerToggle Toggle code checker (pylint, pyflakes)
+-------------------- -------------
+PyLint Check current buffer
+-------------------- -------------
+PyLintAuto Automatic fix PEP8 errors
+-------------------- -------------
+Pyrun Run current buffer in python
+==================== =============
+
+.. note:: See also ``:help ropevim.txt``
+
+
+F.A.Q.
+======
+
+Rope completion is very slow
+----------------------------
+
+To work rope_ creates a service directory: ``.ropeproject``.
+If ``g:pymode_rope_guess_project`` set (by default) and ``.ropeproject`` in current dir not found, rope scan ``.ropeproject`` on every dir in parent path.
+If rope finded ``.ropeproject`` in parent dirs, rope set project for all child dir and scan may be slow for many dirs and files.
+
+Solutions:
+
+- Disable ``g:pymode_rope_guess_project`` to make rope always create ``.ropeproject`` in current dir.
+- Delete ``.ropeproject`` from dip parent dir to make rope create ``.ropeproject`` in current dir.
+- Press ``<C-x>po`` or ``:RopeOpenProject`` to make force rope create ``.ropeproject`` in current dir.
+
+
+
+Pylint check is very slow
+-------------------------
+
+In some projects pylint_ may check slowly, because it also scan imported modules if posible.
+Try use pyflakes_, see ``:h 'pymode_lint_checker'``.
+
+.. note:: You may ``set exrc`` and ``set secure`` in your ``vimrc`` for auto set custom settings from ``.vimrc`` from your projects directories.
+ Example: On Flask projects I automaticly set ``g:pymode_lint_checker = "pyflakes"``, on django ``g:pymode_lint_cheker = "pylint"``
+
+
+OSX cannot import urandom
+-------------------------
+
+See: https://groups.google.com/forum/?fromgroups=#!topic/vim_dev/2NXKF6kDONo
+
+The sequence of commands that fixed this: ::
+
+ brew unlink python
+ brew unlink macvim
+ brew remove macvim
+ brew install -v --force macvim
+ brew link macvim
+ brew link python
+
+
+Bugtracker
+===========
+
+If you have any suggestions, bug reports or
+annoyances please report them to the issue tracker
+at https://github.com/klen/python-mode/issues
+
+
+Contributing
+============
+
+Development of pylint-mode happens at github: https://github.com/klen/python-mode
+
+
+Copyright
+=========
+
+Copyright (C) 2012 Kirill Klenov (klen_)
+
+ **Rope**
+ Copyright (C) 2006-2010 Ali Gholami Rudi
+
+ Copyright (C) 2009-2010 Anton Gritsay
+
+ **Pylint**
+ Copyright (C) 2003-2011 LOGILAB S.A. (Paris, FRANCE).
+ http://www.logilab.fr/
+
+ **Pyflakes**:
+ Copyright (c) 2005 Divmod, Inc.
+ http://www.divmod.com/
+
+ **PEP8**
+ Copyright (C) 2006 Johann C. Rocholl <johann@rocholl.net>
+ http://github.com/jcrocholl/pep8
+
+ **autopep8**:
+ Copyright (c) 2012 hhatto <hhatto.jp@gmail.com>
+ https://github.com/hhatto/autopep8
+
+ **Python syntax for vim**
+ Copyright (c) 2010 Dmitry Vasiliev
+ http://www.hlabs.spb.ru/vim/python.vim
+
+ **PEP8 VIM indentation**
+ Copyright (c) 2012 Hynek Schlawack <hs@ox.cx>
+ http://github.com/hynek/vim-python-pep8-indent
+
+
+License
+=======
+
+Licensed under a `GNU lesser general public license`_.
+
+If you like this plugin, you can send me postcard :)
+My address is here: "Russia, 143401, Krasnogorsk, Shkolnaya 1-19" to "Kirill Klenov".
+**Thanks for support!**
+
+
+.. _GNU lesser general public license: http://www.gnu.org/copyleft/lesser.html
+.. _klen: http://klen.github.com/
+.. _pylint: http://www.logilab.org/857
+.. _pyflakes: http://pypi.python.org/pypi/pyflakes
+.. _rope: http://rope.sourceforge.net/
+.. _pydoc: http://docs.python.org/library/pydoc.html
+.. _pathogen: https://github.com/tpope/vim-pathogen
+.. _pep8: http://pypi.python.org/pypi/pep8
+.. _mccabe: http://en.wikipedia.org/wiki/Cyclomatic_complexity
diff --git a/.vim/bundle/python-mode/after/ftplugin/pyrex.vim b/.vim/bundle/python-mode/after/ftplugin/pyrex.vim
@@ -0,0 +1 @@
+runtime after/ftplugin/python.vim
diff --git a/.vim/bundle/python-mode/after/ftplugin/python.vim b/.vim/bundle/python-mode/after/ftplugin/python.vim
@@ -0,0 +1,43 @@
+" Fix omnifunc
+if g:pymode && g:pymode_rope && g:pymode_rope_vim_completion
+ setlocal omnifunc=RopeOmni
+endif
+
+" Motion {{{
+
+ if !pymode#Default('g:pymode_motion', 1) || g:pymode_motion
+
+ nnoremap <buffer> ]] :<C-U>call pymode#motion#move('^\(class\\|def\)\s', '')<CR>
+ nnoremap <buffer> [[ :<C-U>call pymode#motion#move('^\(class\\|def\)\s', 'b')<CR>
+ nnoremap <buffer> ]C :<C-U>call pymode#motion#move('^\(class\\|def\)\s', '')<CR>
+ nnoremap <buffer> [C :<C-U>call pymode#motion#move('^\(class\\|def\)\s', 'b')<CR>
+ nnoremap <buffer> ]M :<C-U>call pymode#motion#move('^\s*def\s', '')<CR>
+ nnoremap <buffer> [M :<C-U>call pymode#motion#move('^\s*def\s', 'b')<CR>
+
+ onoremap <buffer> ]] :<C-U>call pymode#motion#move('^\(class\\|def\)\s', '')<CR>
+ onoremap <buffer> [[ :<C-U>call pymode#motion#move('^\(class\\|def\)\s', 'b')<CR>
+ onoremap <buffer> ]C :<C-U>call pymode#motion#move('^\(class\\|def\)\s', '')<CR>
+ onoremap <buffer> [C :<C-U>call pymode#motion#move('^\(class\\|def\)\s', 'b')<CR>
+ onoremap <buffer> ]M :<C-U>call pymode#motion#move('^\s*def\s', '')<CR>
+ onoremap <buffer> [M :<C-U>call pymode#motion#move('^\s*def\s', 'b')<CR>
+
+ vnoremap <buffer> ]] :call pymode#motion#vmove('^\(class\\|def\)\s', '')<CR>
+ vnoremap <buffer> [[ :call pymode#motion#vmove('^\(class\\|def\)\s', 'b')<CR>
+ vnoremap <buffer> ]M :call pymode#motion#vmove('^\s*def\s', '')<CR>
+ vnoremap <buffer> [M :call pymode#motion#vmove('^\s*def\s', 'b')<CR>
+
+ onoremap <buffer> C :<C-U>call pymode#motion#select('^\s*class\s', 0)<CR>
+ onoremap <buffer> aC :<C-U>call pymode#motion#select('^\s*class\s', 0)<CR>
+ onoremap <buffer> iC :<C-U>call pymode#motion#select('^\s*class\s', 1)<CR>
+ vnoremap <buffer> aC :<C-U>call pymode#motion#select('^\s*class\s', 0)<CR>
+ vnoremap <buffer> iC :<C-U>call pymode#motion#select('^\s*class\s', 1)<CR>
+
+ onoremap <buffer> M :<C-U>call pymode#motion#select('^\s*def\s', 0)<CR>
+ onoremap <buffer> aM :<C-U>call pymode#motion#select('^\s*def\s', 0)<CR>
+ onoremap <buffer> iM :<C-U>call pymode#motion#select('^\s*def\s', 1)<CR>
+ vnoremap <buffer> aM :<C-U>call pymode#motion#select('^\s*def\s', 0)<CR>
+ vnoremap <buffer> iM :<C-U>call pymode#motion#select('^\s*def\s', 1)<CR>
+
+ endif
+
+" }}}
diff --git a/.vim/bundle/python-mode/after/indent/pyrex.vim b/.vim/bundle/python-mode/after/indent/pyrex.vim
@@ -0,0 +1 @@
+runtime after/indent/python.vim
diff --git a/.vim/bundle/python-mode/after/indent/python.vim b/.vim/bundle/python-mode/after/indent/python.vim
@@ -0,0 +1,14 @@
+if pymode#Default('b:pymode_indent', 1) || !g:pymode_indent
+ finish
+endif
+
+
+setlocal nolisp
+setlocal tabstop=4
+setlocal softtabstop=4
+setlocal shiftwidth=4
+setlocal shiftround
+setlocal expandtab
+setlocal autoindent
+setlocal indentexpr=pymode#indent#Indent(v:lnum)
+setlocal indentkeys=!^F,o,O,<:>,0),0],0},=elif,=except
diff --git a/.vim/bundle/python-mode/autoload/pymode.vim b/.vim/bundle/python-mode/autoload/pymode.vim
@@ -0,0 +1,196 @@
+" Python-mode base functions
+
+
+fun! pymode#Default(name, default) "{{{
+ " DESC: Set default value if it not exists
+ "
+ if !exists(a:name)
+ let {a:name} = a:default
+ return 0
+ endif
+ return 1
+endfunction "}}}
+
+
+fun! pymode#Option(name) "{{{
+
+ let name = 'b:pymode_' . a:name
+ if exists(name)
+ return eval(name)
+ endif
+
+ let name = 'g:pymode_' . a:name
+ return eval(name)
+
+endfunction "}}}
+
+
+fun! pymode#QuickfixOpen(onlyRecognized, holdCursor, maxHeight, minHeight, jumpError) "{{{
+ " DESC: Open quickfix window
+ "
+ let numErrors = len(filter(getqflist(), 'v:val.valid'))
+ let numOthers = len(getqflist()) - numErrors
+ if numErrors > 0 || (!a:onlyRecognized && numOthers > 0)
+ botright copen
+ exe max([min([line("$"), a:maxHeight]), a:minHeight]) . "wincmd _"
+ if a:jumpError
+ cc
+ elseif !a:holdCursor
+ wincmd p
+ endif
+ else
+ cclose
+ endif
+ redraw
+ if numOthers > 0
+ echo printf('Quickfix: %d(+%d)', numErrors, numOthers)
+ else
+ echo printf('Quickfix: %d', numErrors)
+ endif
+endfunction "}}}
+
+
+fun! pymode#PlaceSigns(bnum) "{{{
+ " DESC: Place error signs
+ "
+ if has('signs')
+ call pymode#Default('b:pymode_signs', [])
+
+ for item in b:pymode_signs
+ execute printf('sign unplace %d buffer=%d', item.lnum, item.bufnr)
+ endfor
+ let b:pymode_signs = []
+
+ if !pymode#Default("g:pymode_lint_signs_always_visible", 0) || g:pymode_lint_signs_always_visible
+ call RopeShowSignsRulerIfNeeded()
+ endif
+
+ for item in filter(getqflist(), 'v:val.bufnr != ""')
+ call add(b:pymode_signs, item)
+ execute printf('sign place %d line=%d name=%s buffer=%d', item.lnum, item.lnum, "Pymode".item.type, item.bufnr)
+ endfor
+
+ endif
+endfunction "}}}
+
+
+fun! pymode#CheckProgram(name, append) "{{{
+ " DESC: Check program is executable or redifined by user.
+ "
+ let name = 'g:' . a:name
+ if pymode#Default(name, a:name)
+ return 1
+ endif
+ if !executable(eval(l:name))
+ echoerr "Can't find '".eval(name)."'. Please set ".name .", or extend $PATH, ".a:append
+ return 0
+ endif
+ return 1
+endfunction "}}}
+
+
+fun! pymode#TempBuffer() "{{{
+ " DESC: Open temp buffer.
+ "
+ pclose | botright 8new
+ setlocal buftype=nofile bufhidden=delete noswapfile nowrap previewwindow
+ redraw
+endfunction "}}}
+
+
+fun! pymode#ShowStr(str) "{{{
+ " DESC: Open temp buffer with `str`.
+ "
+ let g:pymode_curbuf = bufnr("%")
+ call pymode#TempBuffer()
+ put! =a:str
+ wincmd p
+ redraw
+endfunction "}}}
+
+
+fun! pymode#ShowCommand(cmd) "{{{
+ " DESC: Run command and open temp buffer with result
+ "
+ call pymode#TempBuffer()
+ try
+ silent exec 'r!' . a:cmd
+ catch /.*/
+ close
+ echoerr 'Command fail: '.a:cmd
+ endtry
+ redraw
+ normal gg
+ wincmd p
+endfunction "}}}
+
+
+fun! pymode#WideMessage(msg) "{{{
+ " DESC: Show wide message
+
+ let x=&ruler | let y=&showcmd
+ set noruler noshowcmd
+ redraw
+ echohl Debug | echo strpart(a:msg, 0, &columns-1) | echohl none
+ let &ruler=x | let &showcmd=y
+endfunction "}}}
+
+
+fun! pymode#BlockStart(lnum, ...) "{{{
+ let pattern = a:0 ? a:1 : '^\s*\(@\|class\s.*:\|def\s\)'
+ let lnum = a:lnum + 1
+ let indent = 100
+ while lnum
+ let lnum = prevnonblank(lnum - 1)
+ let test = indent(lnum)
+ let line = getline(lnum)
+ if line =~ '^\s*#' " Skip comments
+ continue
+ elseif !test " Zero-level regular line
+ return lnum
+ elseif test >= indent " Skip deeper or equal lines
+ continue
+ " Indent is strictly less at this point: check for def/class
+ elseif line =~ pattern && line !~ '^\s*@'
+ return lnum
+ endif
+ let indent = indent(lnum)
+ endwhile
+ return 0
+endfunction "}}}
+
+
+fun! pymode#BlockEnd(lnum, ...) "{{{
+ let indent = a:0 ? a:1 : indent(a:lnum)
+ let lnum = a:lnum
+ while lnum
+ let lnum = nextnonblank(lnum + 1)
+ if getline(lnum) =~ '^\s*#' | continue
+ elseif lnum && indent(lnum) <= indent
+ return lnum - 1
+ endif
+ endwhile
+ return line('$')
+endfunction "}}}
+
+
+fun! pymode#Modeline() "{{{
+ let modeline = getline(prevnonblank('$'))
+ if modeline =~ '^#\s\+pymode:'
+ for ex in split(modeline, ':')[1:]
+ let [name, value] = split(ex, '=')
+ let {'b:pymode_'.name} = value
+ endfor
+ endif
+ au BufRead <buffer> call pymode#Modeline()
+endfunction "}}}
+
+
+fun! pymode#TrimWhiteSpace() "{{{
+ let cursor_pos = getpos('.')
+ silent! %s/\s\+$//
+ call setpos('.', cursor_pos)
+endfunction "}}}
+
+
+" vim: fdm=marker:fdl=0
diff --git a/.vim/bundle/python-mode/autoload/pymode/breakpoint.vim b/.vim/bundle/python-mode/autoload/pymode/breakpoint.vim
@@ -0,0 +1,14 @@
+fun! pymode#breakpoint#Set(lnum) "{{{
+ let line = getline(a:lnum)
+ if strridx(line, g:pymode_breakpoint_cmd) != -1
+ normal dd
+ else
+ let plnum = prevnonblank(a:lnum)
+ call append(line('.')-1, repeat(' ', indent(plnum)).g:pymode_breakpoint_cmd)
+ normal k
+ endif
+
+ " Save file
+ if &modifiable && &modified | noautocmd write | endif
+
+endfunction "}}}
diff --git a/.vim/bundle/python-mode/autoload/pymode/doc.vim b/.vim/bundle/python-mode/autoload/pymode/doc.vim
@@ -0,0 +1,19 @@
+" Python-mode search by documentation
+
+
+fun! pymode#doc#Show(word) "{{{
+ if a:word == ''
+ echoerr "No name/symbol under cursor!"
+ else
+ py import StringIO
+ py sys.stdout, _ = StringIO.StringIO(), sys.stdout
+ py help(vim.eval('a:word'))
+ py sys.stdout, out = _, sys.stdout.getvalue()
+ call pymode#TempBuffer()
+ py vim.current.buffer.append(str(out).split('\n'), 0)
+ wincmd p
+ endif
+endfunction "}}}
+
+
+" vim: fdm=marker:fdl=0
diff --git a/.vim/bundle/python-mode/autoload/pymode/folding.vim b/.vim/bundle/python-mode/autoload/pymode/folding.vim
@@ -0,0 +1,60 @@
+" Python-mode folding functions
+
+
+let s:blank_regex = '^\s*$'
+let s:def_regex = '^\s*\(class\|def\) \w\+'
+
+
+fun! pymode#folding#text() " {{{
+ let fs = v:foldstart
+ while getline(fs) =~ '^\s*@'
+ let fs = nextnonblank(fs + 1)
+ endwhile
+ let line = getline(fs)
+
+ let nucolwidth = &fdc + &number * &numberwidth
+ let windowwidth = winwidth(0) - nucolwidth - 3
+ let foldedlinecount = v:foldend - v:foldstart
+
+ " expand tabs into spaces
+ let onetab = strpart(' ', 0, &tabstop)
+ let line = substitute(line, '\t', onetab, 'g')
+
+ let line = strpart(line, 0, windowwidth - 2 -len(foldedlinecount))
+ let fillcharcount = windowwidth - len(line) - len(foldedlinecount)
+ return line . '…' . repeat(" ",fillcharcount) . foldedlinecount . '…' . ' '
+endfunction "}}}
+
+
+fun! pymode#folding#expr(lnum) "{{{
+
+ let line = getline(a:lnum)
+ let indent = indent(a:lnum)
+
+ if line =~ s:def_regex
+ return ">".(indent / &shiftwidth + 1)
+ endif
+
+ if line =~ '^\s*@'
+ return -1
+ endif
+
+ if line =~ s:blank_regex
+ let prev_line = getline(a:lnum - 1)
+ if prev_line =~ s:blank_regex
+ return -1
+ else
+ return foldlevel(prevnonblank(a:lnum))
+ endif
+ endif
+
+ if indent == 0
+ return 0
+ endif
+
+ return '='
+
+endfunction "}}}
+
+
+" vim: fdm=marker:fdl=0
diff --git a/.vim/bundle/python-mode/autoload/pymode/indent.vim b/.vim/bundle/python-mode/autoload/pymode/indent.vim
@@ -0,0 +1,184 @@
+" PEP8 compatible Python indent file
+" Language: Python
+" Maintainer: Hynek Schlawack <hs@ox.cx>
+" Prev Maintainer: Eric Mc Sween <em@tomcom.de> (address invalid)
+" Original Author: David Bustos <bustos@caltech.edu> (address invalid)
+" Last Change: 2012-06-21
+" License: Public Domainlet
+
+
+function! pymode#indent#Indent(lnum)
+
+ " First line has indent 0
+ if a:lnum == 1
+ return 0
+ endif
+
+ " If we can find an open parenthesis/bracket/brace, line up with it.
+ call cursor(a:lnum, 1)
+ let parlnum = s:SearchParensPair()
+ if parlnum > 0
+ let parcol = col('.')
+ let closing_paren = match(getline(a:lnum), '^\s*[])}]') != -1
+ if match(getline(parlnum), '[([{]\s*$', parcol - 1) != -1
+ if closing_paren
+ return indent(parlnum)
+ else
+ return indent(parlnum) + &shiftwidth
+ endif
+ else
+ return parcol
+ endif
+ endif
+
+ " Examine this line
+ let thisline = getline(a:lnum)
+ let thisindent = indent(a:lnum)
+
+ " If the line starts with 'elif' or 'else', line up with 'if' or 'elif'
+ if thisline =~ '^\s*\(elif\|else\)\>'
+ let bslnum = s:BlockStarter(a:lnum, '^\s*\(if\|elif\)\>')
+ if bslnum > 0
+ return indent(bslnum)
+ else
+ return -1
+ endif
+ endif
+
+ " If the line starts with 'except' or 'finally', line up with 'try'
+ " or 'except'
+ if thisline =~ '^\s*\(except\|finally\)\>'
+ let bslnum = s:BlockStarter(a:lnum, '^\s*\(try\|except\)\>')
+ if bslnum > 0
+ return indent(bslnum)
+ else
+ return -1
+ endif
+ endif
+
+ " Examine previous line
+ let plnum = a:lnum - 1
+ let pline = getline(plnum)
+ let sslnum = s:StatementStart(plnum)
+
+ " If the previous line is blank, keep the same indentation
+ if pline =~ '^\s*$'
+ return -1
+ endif
+
+ " If this line is explicitly joined, try to find an indentation that looks
+ " good.
+ if pline =~ '\\$'
+ let compound_statement = '^\s*\(if\|while\|for\s.*\sin\|except\)\s*'
+ let maybe_indent = matchend(getline(sslnum), compound_statement)
+ if maybe_indent != -1
+ return maybe_indent
+ else
+ return indent(sslnum) + &sw * 2
+ endif
+ endif
+
+ " If the previous line ended with a colon and is not a comment, indent
+ " relative to statement start.
+ if pline =~ ':\s*$' && pline !~ '^\s*#'
+ return indent(sslnum) + &sw
+ endif
+
+ " If the previous line was a stop-execution statement or a pass
+ if getline(sslnum) =~ '^\s*\(break\|continue\|raise\|return\|pass\)\>'
+ " See if the user has already dedented
+ if indent(a:lnum) > indent(sslnum) - &sw
+ " If not, recommend one dedent
+ return indent(sslnum) - &sw
+ endif
+ " Otherwise, trust the user
+ return -1
+ endif
+
+ " In all other cases, line up with the start of the previous statement.
+ return indent(sslnum)
+endfunction
+
+
+" Find backwards the closest open parenthesis/bracket/brace.
+function! s:SearchParensPair()
+ let line = line('.')
+ let col = col('.')
+
+ " Skip strings and comments and don't look too far
+ let skip = "line('.') < " . (line - 50) . " ? dummy :" .
+ \ 'synIDattr(synID(line("."), col("."), 0), "name") =~? ' .
+ \ '"string\\|comment"'
+
+ " Search for parentheses
+ call cursor(line, col)
+ let parlnum = searchpair('(', '', ')', 'bW', skip)
+ let parcol = col('.')
+
+ " Search for brackets
+ call cursor(line, col)
+ let par2lnum = searchpair('\[', '', '\]', 'bW', skip)
+ let par2col = col('.')
+
+ " Search for braces
+ call cursor(line, col)
+ let par3lnum = searchpair('{', '', '}', 'bW', skip)
+ let par3col = col('.')
+
+ " Get the closest match
+ if par2lnum > parlnum || (par2lnum == parlnum && par2col > parcol)
+ let parlnum = par2lnum
+ let parcol = par2col
+ endif
+ if par3lnum > parlnum || (par3lnum == parlnum && par3col > parcol)
+ let parlnum = par3lnum
+ let parcol = par3col
+ endif
+
+ " Put the cursor on the match
+ if parlnum > 0
+ call cursor(parlnum, parcol)
+ endif
+ return parlnum
+endfunction
+
+
+" Find the start of a multi-line statement
+function! s:StatementStart(lnum)
+ let lnum = a:lnum
+ while 1
+ if getline(lnum - 1) =~ '\\$'
+ let lnum = lnum - 1
+ else
+ call cursor(lnum, 1)
+ let maybe_lnum = s:SearchParensPair()
+ if maybe_lnum < 1
+ return lnum
+ else
+ let lnum = maybe_lnum
+ endif
+ endif
+ endwhile
+endfunction
+
+
+" Find the block starter that matches the current line
+function! s:BlockStarter(lnum, block_start_re)
+ let lnum = a:lnum
+ let maxindent = 10000 " whatever
+ while lnum > 1
+ let lnum = prevnonblank(lnum - 1)
+ if indent(lnum) < maxindent
+ if getline(lnum) =~ a:block_start_re
+ return lnum
+ else
+ let maxindent = indent(lnum)
+ " It's not worth going further if we reached the top level
+ if maxindent == 0
+ return -1
+ endif
+ endif
+ endif
+ endwhile
+ return -1
+endfunction
diff --git a/.vim/bundle/python-mode/autoload/pymode/lint.vim b/.vim/bundle/python-mode/autoload/pymode/lint.vim
@@ -0,0 +1,108 @@
+fun! pymode#lint#Check() "{{{
+ " DESC: Run checkers on current file.
+ "
+ if !g:pymode_lint | return | endif
+
+ if &modifiable && &modified
+ try
+ noautocmd write
+ catch /E212/
+ echohl Error | echo "File modified and I can't save it. Cancel code checking." | echohl None
+ return 0
+ endtry
+ endif
+
+ let g:pymode_lint_buffer = bufnr('%')
+
+ py from pymode import lint
+ py lint.check_file()
+
+endfunction " }}}
+
+
+fun! pymode#lint#Parse(bnum)
+ " DESC: Parse result of code checking.
+ "
+ call setqflist(g:qf_list, 'r')
+
+ if g:pymode_lint_signs
+ call pymode#PlaceSigns(a:bnum)
+ endif
+
+ if g:pymode_lint_cwindow
+ call pymode#QuickfixOpen(0, g:pymode_lint_hold, g:pymode_lint_maxheight, g:pymode_lint_minheight, g:pymode_lint_jump)
+ endif
+
+ if !len(g:qf_list)
+ call pymode#WideMessage('Code checking is completed. No errors found.')
+ endif
+
+endfunction
+
+
+fun! pymode#lint#Toggle() "{{{
+ let g:pymode_lint = g:pymode_lint ? 0 : 1
+ call pymode#lint#toggle_win(g:pymode_lint, "Pymode lint")
+endfunction "}}}
+
+
+fun! pymode#lint#ToggleWindow() "{{{
+ let g:pymode_lint_cwindow = g:pymode_lint_cwindow ? 0 : 1
+ call pymode#lint#toggle_win(g:pymode_lint_cwindow, "Pymode lint cwindow")
+endfunction "}}}
+
+
+fun! pymode#lint#ToggleChecker() "{{{
+ let g:pymode_lint_checker = g:pymode_lint_checker == "pylint" ? "pyflakes" : "pylint"
+ echomsg "Pymode lint checker: " . g:pymode_lint_checker
+endfunction "}}}
+
+
+fun! pymode#lint#toggle_win(toggle, msg) "{{{
+ if a:toggle
+ echomsg a:msg." enabled"
+ botright cwindow
+ if &buftype == "quickfix"
+ wincmd p
+ endif
+ else
+ echomsg a:msg." disabled"
+ cclose
+ endif
+endfunction "}}}
+
+
+fun! pymode#lint#show_errormessage() "{{{
+ if g:pymode_lint_buffer != bufnr('%')
+ return 0
+ endif
+ let errors = getqflist()
+ if !len(errors)
+ return
+ endif
+ let [_, line, _, _] = getpos(".")
+ for e in errors
+ if e['lnum'] == line
+ call pymode#WideMessage(e['text'])
+ else
+ echo
+ endif
+ endfor
+endfunction " }}}
+
+
+fun! pymode#lint#Auto() "{{{
+ if &modifiable && &modified
+ try
+ noautocmd write
+ catch /E212/
+ echohl Error | echo "File modified and I can't save it. Cancel operation." | echohl None
+ return 0
+ endtry
+ endif
+ py from pymode import auto
+ py auto.fix_current_file()
+ cclose
+ edit
+ call pymode#WideMessage("AutoPep8 done.")
+endfunction "}}}
diff --git a/.vim/bundle/python-mode/autoload/pymode/motion.vim b/.vim/bundle/python-mode/autoload/pymode/motion.vim
@@ -0,0 +1,61 @@
+" Python-mode motion functions
+
+
+fun! pymode#motion#move(pattern, flags, ...) "{{{
+ let cnt = v:count1 - 1
+ let [line, column] = searchpos(a:pattern, a:flags . 'sW')
+ let indent = indent(line)
+ while cnt && line
+ let [line, column] = searchpos(a:pattern, a:flags . 'W')
+ if indent(line) == indent
+ let cnt = cnt - 1
+ endif
+ endwhile
+ return [line, column]
+endfunction "}}}
+
+
+fun! pymode#motion#vmove(pattern, flags) range "{{{
+ call cursor(a:lastline, 0)
+ let end = pymode#motion#move(a:pattern, a:flags)
+ call cursor(a:firstline, 0)
+ normal! v
+ call cursor(end)
+endfunction "}}}
+
+
+fun! pymode#motion#pos_le(pos1, pos2) "{{{
+ return ((a:pos1[0] < a:pos2[0]) || (a:pos1[0] == a:pos2[0] && a:pos1[1] <= a:pos2[1]))
+endfunction "}}}
+
+
+fun! pymode#motion#select(pattern, inner) "{{{
+ let cnt = v:count1 - 1
+ let orig = getpos('.')[1:2]
+ let snum = pymode#BlockStart(orig[0], a:pattern)
+ if getline(snum) !~ a:pattern
+ return 0
+ endif
+ let enum = pymode#BlockEnd(snum, indent(snum))
+ while cnt
+ let lnum = search(a:pattern, 'nW')
+ if lnum
+ let enum = pymode#BlockEnd(lnum, indent(lnum))
+ call cursor(enum, 1)
+ endif
+ let cnt = cnt - 1
+ endwhile
+ if pymode#motion#pos_le([snum, 0], orig) && pymode#motion#pos_le(orig, [enum, 1])
+ if a:inner
+ let snum = snum + 1
+ let enum = prevnonblank(enum)
+ endif
+
+ call cursor(snum, 1)
+ normal! v
+ call cursor(enum, len(getline(enum)))
+ endif
+endfunction "}}}
+
+
+" vim: fdm=marker:fdl=0
diff --git a/.vim/bundle/python-mode/autoload/pymode/queue.vim b/.vim/bundle/python-mode/autoload/pymode/queue.vim
@@ -0,0 +1,15 @@
+fun! pymode#queue#Poll() "{{{
+
+ " Check current tasks
+ py queue.check_task()
+
+ " Update interval
+ if mode() == 'i'
+ let p = getpos('.')
+ silent exe 'call feedkeys("\<Up>\<Down>", "n")'
+ call setpos('.', p)
+ else
+ call feedkeys("f\e", "n")
+ endif
+
+endfunction "}}}
diff --git a/.vim/bundle/python-mode/autoload/pymode/run.vim b/.vim/bundle/python-mode/autoload/pymode/run.vim
@@ -0,0 +1,45 @@
+" DESC: Save file if it modified and run python code
+fun! pymode#run#Run(line1, line2) "{{{
+ if &modifiable && &modified
+ try
+ noautocmd write
+ catch /E212/
+ echohl Error | echo "File modified and I can't save it. Cancel code checking." | echohl None
+ return 0
+ endtry
+ endif
+ py import StringIO
+ py sys.stdout, stdout_ = StringIO.StringIO(), sys.stdout
+ py sys.stderr, stderr_ = StringIO.StringIO(), sys.stderr
+ py enc = vim.eval('&enc')
+ call setqflist([])
+ call pymode#WideMessage("Code running.")
+ try
+ py context = globals()
+ py context['raw_input'] = context['input'] = lambda s: vim.eval('input("{0}")'.format(s))
+ py execfile(vim.eval('expand("%:p")'), context)
+ py out, err = sys.stdout.getvalue().strip(), sys.stderr.getvalue()
+ py sys.stdout, sys.stderr = stdout_, stderr_
+
+ cexpr ""
+ py for x in err.strip().split('\n'): vim.command('caddexpr "' + x.replace('"', r'\"') + '"')
+ let l:oldefm = &efm
+ set efm=%C\ %.%#,%A\ \ File\ \"%f\"\\,\ line\ %l%.%#,%Z%[%^\ ]%\\@=%m
+ call pymode#QuickfixOpen(0, g:pymode_lint_hold, g:pymode_lint_maxheight, g:pymode_lint_minheight, 0)
+ let &efm = l:oldefm
+
+python << EOF
+if out:
+ vim.command("call pymode#TempBuffer()")
+ vim.current.buffer.append([x.encode(enc) for x in out.split('\n')], 0)
+ vim.command("wincmd p")
+else:
+ vim.command('call pymode#WideMessage("No output.")')
+EOF
+
+ catch /.*/
+
+ echohl Error | echo "Run-time error." | echohl none
+
+ endtry
+endfunction "}}}
diff --git a/.vim/bundle/python-mode/autoload/pymode/troubleshooting.vim b/.vim/bundle/python-mode/autoload/pymode/troubleshooting.vim
@@ -0,0 +1,87 @@
+" DESC: Get debug information about pymode problem
+fun! pymode#troubleshooting#Test() "{{{
+ new
+ setlocal buftype=nofile bufhidden=delete noswapfile nowrap
+
+ let os = "Unknown"
+ if has('win16') || has('win32') || has('win64')
+ let os = "Windows"
+ else
+ let os = substitute(system('uname'), "\n", "", "")
+ endif
+
+ call append('0', ['Pymode diagnostic',
+ \ '===================',
+ \ 'VIM:' . v:version . ', OS: ' . os .', multi_byte:' . has('multi_byte') . ', pymode: ' . g:pymode_version,
+ \ ''])
+
+ let python = 1
+ let output = []
+
+ if !exists('#filetypeplugin')
+ call append('$', ['WARNING: ', 'Python-mode required :filetype plugin indent on', ''])
+ endif
+
+ if !has('python')
+ call append('$', ['WARNING: ', 'Python-mode required vim compiled with +python.',
+ \ '"lint, rope, run, doc, virtualenv" features disabled.', ''])
+ let python = 0
+ endif
+
+ call append('$', 'Pymode variables:')
+ call append('$', '-------------------')
+ call append('$', 'let pymode = ' . string(g:pymode))
+ if g:pymode
+ call append('$', 'let pymode_path = ' . string(g:pymode_path))
+ call append('$', 'let pymode_paths = ' . string(g:pymode_paths))
+
+ call append('$', 'let pymode_doc = ' . string(g:pymode_doc))
+ if g:pymode_doc
+ call append('$', 'let pymode_doc_key = ' . string(g:pymode_doc_key))
+ endif
+
+ call append('$', 'let pymode_run = ' . string(g:pymode_run))
+ if g:pymode_run
+ call append('$', 'let pymode_run_key = ' . string(g:pymode_run_key))
+ endif
+
+ call append('$', 'let pymode_lint = ' . string(g:pymode_lint))
+ if g:pymode_lint
+ call append('$', 'let pymode_lint_checker = ' . string(g:pymode_lint_checker))
+ call append('$', 'let pymode_lint_ignore = ' . string(g:pymode_lint_ignore))
+ call append('$', 'let pymode_lint_select = ' . string(g:pymode_lint_select))
+ call append('$', 'let pymode_lint_onfly = ' . string(g:pymode_lint_onfly))
+ call append('$', 'let pymode_lint_config = ' . string(g:pymode_lint_config))
+ call append('$', 'let pymode_lint_write = ' . string(g:pymode_lint_write))
+ call append('$', 'let pymode_lint_cwindow = ' . string(g:pymode_lint_cwindow))
+ call append('$', 'let pymode_lint_message = ' . string(g:pymode_lint_message))
+ call append('$', 'let pymode_lint_signs = ' . string(g:pymode_lint_signs))
+ call append('$', 'let pymode_lint_jump = ' . string(g:pymode_lint_jump))
+ call append('$', 'let pymode_lint_hold = ' . string(g:pymode_lint_hold))
+ call append('$', 'let pymode_lint_minheight = ' . string(g:pymode_lint_minheight))
+ call append('$', 'let pymode_lint_maxheight = ' . string(g:pymode_lint_maxheight))
+ endif
+
+ call append('$', 'let pymode_rope = ' . string(g:pymode_rope))
+ call append('$', 'let pymode_folding = ' . string(g:pymode_folding))
+ call append('$', 'let pymode_breakpoint = ' . string(g:pymode_breakpoint))
+ call append('$', 'let pymode_syntax = ' . string(g:pymode_syntax))
+ call append('$', 'let pymode_virtualenv = ' . string(g:pymode_virtualenv))
+ if g:pymode_virtualenv
+ call append('$', 'let pymode_virtualenv_enabled = ' . string(g:pymode_virtualenv_enabled))
+ endif
+ call append('$', 'pymode_utils_whitespaces:' . string(g:pymode_utils_whitespaces))
+ call append('$', 'pymode_options:' . string(g:pymode_options))
+ endif
+
+ if python
+ call append('$', 'VIM python paths:')
+ call append('$', '-----------------')
+python << EOF
+vim.command('let l:output = %s' % repr(sys.path))
+EOF
+ call append('$', output)
+ call append('$', '')
+ endif
+
+endfunction "}}}
diff --git a/.vim/bundle/python-mode/autoload/pymode/virtualenv.vim b/.vim/bundle/python-mode/autoload/pymode/virtualenv.vim
@@ -0,0 +1,31 @@
+fun! pymode#virtualenv#Activate() "{{{
+
+ if !exists("$VIRTUAL_ENV")
+ return
+ endif
+
+ for env in g:pymode_virtualenv_enabled
+ if env == $VIRTUAL_ENV
+ return 0
+ endif
+ endfor
+
+ call add(g:pymode_virtualenv_enabled, $VIRTUAL_ENV)
+
+python << EOF
+import sys, vim, os
+
+ve_dir = vim.eval('$VIRTUAL_ENV')
+ve_dir in sys.path or sys.path.insert(0, ve_dir)
+activate_this = os.path.join(os.path.join(ve_dir, 'bin'), 'activate_this.py')
+
+# Fix for windows
+if not os.path.exists(activate_this):
+ activate_this = os.path.join(os.path.join(ve_dir, 'Scripts'), 'activate_this.py')
+
+execfile(activate_this, dict(__file__=activate_this))
+EOF
+
+ call pymode#WideMessage("Activate virtualenv: ".$VIRTUAL_ENV)
+
+endfunction "}}}
diff --git a/.vim/bundle/python-mode/doc/pymode.txt b/.vim/bundle/python-mode/doc/pymode.txt
@@ -0,0 +1,558 @@
+*pymode.txt* *python-mode.txt* Python-mode for vim!
+
+ ____ _ _ ____ _ _ _____ _ _ __ __ _____ ____ ____ ~
+ ( _ \( \/ )(_ _)( )_( )( _ )( \( )___( \/ )( _ )( _ \( ___) ~
+ )___/ \ / )( ) _ ( )(_)( ) ((___)) ( )(_)( )(_) ))__) ~
+ (__) (__) (__) (_) (_)(_____)(_)\_) (_/\/\_)(_____)(____/(____) ~
+
+
+ Version: 0.6.15
+
+==============================================================================
+CONTENTS *Python-mode-contents*
+
+ 1.Intro...................................|PythonMode|
+ 2.Options.................................|PythonModeOptions|
+ 2.1.Customisation details.............|PythonModeOptionsDetails|
+ 2.2.Modeline..........................|PythonModeModeline|
+ 3.Default Keys............................|PythonModeKeys|
+ 4.Commands................................|PythonModeCommands|
+ 5.FAQ.....................................|PythonModeFAQ|
+ 6.Credits.................................|PythonModeCredits|
+ 7.License.................................|PythonModeLicense|
+
+==============================================================================
+1. Intro ~
+ *PythonMode*
+
+Python-mode is a vim plugin that allows you to use the pylint, rope, and pydoc
+libraries in vim to provide features like python code bug checking,
+refactoring, and some other useful things.
+
+This plugin allows you to create python code in vim very easily. There is no need
+to install the pylint or rope libraries on your system.
+
+
+==============================================================================
+2. Options ~
+ *PythonModeOptions*
+
+ Note:
+ Pylint options (ex. disable messages) may be defined in '$HOME/pylint.rc'
+ See pylint documentation.
+
+This script provides the following options that can customise the behaviour of
+PythonMode. These options should be set in your vimrc.
+
+|'pymode'| Turn off the whole plugin
+
+|'pymode_paths'| Additional python paths for pymode
+
+|'pymode_doc'| Turns off the documentation script
+
+|'pymode_doc_key'| Key for show documentation
+
+|'pymode_run'| Turns off the run code script
+
+|'pymode_run_key'| Key for run python code
+
+|'pymode_lint'| Turns off pylint script
+
+|'pymode_lint_checker'| Switch code checkers (pylint, pyflakes, pep8, mccabe)
+
+|'pymode_lint_ignore'| Skip errors and warnings
+
+|'pymode_lint_select'| Select errors and warnings
+
+|'pymode_lint_onfly'| Run linter on the fly
+
+|'pymode_lint_config'| Filepath to pylint configuration
+
+|'pymode_lint_write'| Check code every save
+
+|'pymode_lint_cwindow'| Show cwindow
+
+|'pymode_lint_message'| Show current line errors in bottom
+
+|'pymode_lint_signs'| Place signs
+
+|'pymode_lint_jump'| Auto jump to first error
+
+|'pymode_lint_hold'| Hold cursor in current window
+
+|'pymode_lint_minheight'| Minimal height of pylint error window
+
+|'pymode_lint_mccabe_complexity'| Maximum allowed mccabe complexity
+
+|'pymode_lint_maxheight'| Maximal height of pylint error window
+
+|'pymode_rope'| Turns off rope script
+
+|'pymode_folding'| Turns on/off python folding
+
+|'pymode_breakpoint'| Turns off breakpoint script
+
+|'pymode_breakpoint_key'| Key for breakpoint
+
+|'pymode_virtualenv'| Turns off virtualenv
+
+|'pymode_utils_whitespaces'| Remove unused whitespaces
+
+|'pymode_syntax'| Turns off the custom syntax highlighting
+
+|'pymode_indent'| Enable/Disable pymode PEP8 indentation
+
+|'pymode_options'| Set default pymode options for
+ python codding
+
+|'pymode_motion'| Enable pymode motion stuff
+
+ Note:
+ Also see |ropevim.txt|
+
+
+------------------------------------------------------------------------------
+2.1. Customisation details ~
+ *PythonModeOptionsDetails*
+
+To enable any of the options below you should put the given line in your
+'$HOME/.vimrc'. See |vimrc-intro|.
+
+------------------------------------------------------------------------------
+2.2. Modeline ~
+ *PythonModeModeline*
+
+The VIM modeline `:help modeline` feature allows you to change pymode
+options for the current file. Pymode modeline should always be the
+last line in the vimrc file and look like:
+
+>
+ # pymode:lint_ignore=E0202:doc=0:lint_write=0
+<
+
+Examples:
+
+Disable folding on current file:
+>
+ # pymode:folding=0
+<
+
+Set linters and mccabe complexity.
+>
+ # pymode:lint_checker=pip,mccabe:lint_mccabe_complexity=10
+<
+
+These changes will work only in the current buffer.
+
+------------------------------------------------------------------------------
+ *'pymode'*
+Values: 0 or 1.
+Default: 1.
+
+If this option is set to 0 then the whole plugin is disabled
+
+------------------------------------------------------------------------------
+ *'pymode_paths'*
+Values: List of strings
+Default: [].
+
+This option sets additional python import paths
+
+------------------------------------------------------------------------------
+ *'pymode_doc'*
+Values: 0 or 1.
+Default: 1.
+
+If this option is set to 0 then the doc script is disabled.
+
+------------------------------------------------------------------------------
+ *'pymode_doc_key'*
+Default: 'K'.
+
+Set the key to show the show python documentation.
+
+------------------------------------------------------------------------------
+ *'pymode_run'*
+Values: 0 or 1.
+Default: 1.
+
+If this option is set to 0 then the run script is disabled.
+
+------------------------------------------------------------------------------
+ *'pymode_run_key'*
+Default: '<leader>r'.
+
+Set the key for running python code.
+
+------------------------------------------------------------------------------
+ *'pymode_lint'*
+Values: 0 or 1.
+Default: 1.
+
+If this option is set to 0 then the pylint script is disabled.
+
+------------------------------------------------------------------------------
+ *'pymode_lint_checker'*
+Values: "pylint", "pyflakes", "pep8", "mccabe"
+ You can set many checkers. E.g. "pyflakes,pep8,mccabe" ~
+
+Default: "pyflakes,pep8,mccabe".
+
+This option sets code checkers.
+
+------------------------------------------------------------------------------
+ *'pymode_lint_ignore'*
+Values: IDs of errors, separated by commas or empty strings
+ E.g. "E501,W002", "E2,W" (Skip all Warnings and Errors startswith E2) and etc ~
+
+Default: "".
+
+Skip errors and warnings.
+See also: |'pymode_lint_select'|, |'pymode_lint_config'|
+
+------------------------------------------------------------------------------
+ *'pymode_lint_select'*
+Values: IDs of errors, separated by commas or empty strings
+ E.g. "W002,C" Force W002 and all C-ids ~
+
+Default: "".
+
+Select errors and warnings.
+See also: |'pymode_lint_ignore'|, |'pymode_lint_config'|
+
+------------------------------------------------------------------------------
+ *'pymode_lint_onfly'*
+Values: 0 or 1
+Default: 0
+
+This option enables "on the fly" code checking
+
+------------------------------------------------------------------------------
+ *'pymode_lint_config'*
+Values: 'Path to pylint configuration file'
+Default: "$HOME/.pylintrc"
+
+This option sets the path to the pylint configuration file. If the
+file is not found, use the 'pylintrc' file from python-mode sources.
+
+See also: |'pymode_lint_ignore'|, |'pymode_lint_select'|
+
+------------------------------------------------------------------------------
+ *'pymode_lint_write'*
+Values: 0 or 1.
+Default: 1.
+
+If this option is set to 0, then pylint auto-checking on every save is
+disabled.
+
+------------------------------------------------------------------------------
+ *'pymode_lint_cwindow'*
+Values: 0 or 1.
+Default: 1.
+
+If this option is set to 0 then pylint will not show cwindow.
+
+------------------------------------------------------------------------------
+ *'pymode_lint_message'*
+Values: 0 or 1.
+Default: 1.
+
+If this option is set to 0 then pylint will not show errors at bottom.
+
+------------------------------------------------------------------------------
+ *'pymode_lint_signs'*
+Values: 0 or 1.
+Default: 1.
+
+If this option is set to 0 then pylint will not place error signs.
+
+------------------------------------------------------------------------------
+ *'pymode_lint_jump'*
+Values: 0 or 1.
+Default: 0.
+
+If this option is set to 0 then pylint will not jump to the first error.
+
+------------------------------------------------------------------------------
+ *'pymode_lint_hold'*
+Values: 0 or 1.
+Default: 0.
+
+If this option is set to 0 then pylint will switch on the quickfix window when
+it opens. Doesn't work when |'pymode_lint_jump'| is enabled.
+
+------------------------------------------------------------------------------
+ *'pymode_lint_minheight'*
+Values: int
+Default: 3.
+
+Set minimal height for the pylint cwindow.
+
+------------------------------------------------------------------------------
+ *'pymode_lint_mccabe_complexity'*
+Values: int
+Default: 8.
+
+Set minimal complexity for the mccabe linter.
+
+------------------------------------------------------------------------------
+ *'pymode_lint_maxheight'*
+Values: int
+Default: 6.
+
+Set maximal height for the pylint cwindow.
+
+------------------------------------------------------------------------------
+ *'pymode_rope'*
+Values: 0 or 1.
+Default: 1.
+
+If this option is set to 0 then the rope script is disabled.
+
+------------------------------------------------------------------------------
+ *'pymode_breakpoint'*
+Values: 0 or 1.
+Default: 1.
+
+If this option is set to 0 then the breakpoint script is disabled.
+
+------------------------------------------------------------------------------
+ *'pymode_breakpoint_key'*
+Default: '<leader>b'.
+
+Key for setting/unsetting breakpoints.
+
+------------------------------------------------------------------------------
+ *'pymode_virtualenv'*
+Values: 0 or 1.
+Default: 1.
+
+If this option is set to 0 then virtualenv support is disabled.
+
+------------------------------------------------------------------------------
+ *'pymode_utils_whitespaces'*
+Values: 0 or 1.
+Default: 1.
+
+Auto-remove unused whitespaces.
+
+------------------------------------------------------------------------------
+ *'pymode_syntax'*
+Values: 0 or 1.
+Default: 1.
+
+If this option is set to 0 then the custom syntax highlighting will
+not be used.
+
+------------------------------------------------------------------------------
+ *'pymode_indent'*
+Values: 0 or 1.
+Default: 1.
+
+If this option is set to 1, pymode will enable python indentation support.
+
+------------------------------------------------------------------------------
+ *'pymode_folding'*
+Values: 0 or 1.
+Default: 1.
+
+If this option is set to 1, pymode will enable python-folding.
+
+------------------------------------------------------------------------------
+ *'pymode_options'*
+Values: 0 or 1.
+Default: 1.
+
+If this option is set to 1, pymode will enable the following options for python
+buffers: >
+
+ setlocal complete+=t
+ setlocal formatoptions-=t
+ setlocal number
+ setlocal nowrap
+ setlocal textwidth=80
+<
+------------------------------------------------------------------------------
+ *'pymode_motion'*
+Values: 0 or 1.
+Default: 1.
+
+If this option is set to 1, pymode will enable some python motions.
+Pymode-motion is beta.
+
+================ ============================
+Key Command
+================ ============================
+[[ Jump to previous class or function (normal, visual, operator modes)
+]] Jump to next class or function (normal, visual, operator modes)
+[M Jump to previous class or method (normal, visual, operator modes)
+]M Jump to next class or method (normal, visual, operator modes)
+aC Select a class. Ex: vaC, daC, yaC, caC (normal, operator modes)
+iC Select inner class. Ex: viC, diC, yiC, ciC (normal, operator modes)
+aM Select a function or method. Ex: vaM, daM, yaM, caM (normal, operator modes)
+iM Select inner function or method. Ex: viM, diM, yiM, ciM (normal, operator modes)
+================ ============================
+
+
+==============================================================================
+3. Default Keys ~
+ *PythonModeKeys*
+
+To redefine keys, see: |PythonModeOptions|
+
+================ ============================
+Key Command
+================ ============================
+K Show python docs for current word under cursor
+C-Space Rope code assist
+<leader>r Run current buffer
+<leader>b Set breakpoints
+[[ Jump to previous class or function (normal, visual, operator modes)
+]] Jump to next class or function (normal, visual, operator modes)
+[M Jump to previous class or method (normal, visual, operator modes)
+]M Jump to next class or method (normal, visual, operator modes)
+aC C Operation with a class.
+ Ex: vaC, daC, dC, yaC, yC, caC, cC (normal, operator modes)
+iC Operation with inner class.
+ Ex: viC, diC, yiC, ciC (normal, operator modes)
+aM M Operation with function or method.
+ Ex: vaM, daM, dM, yaM, yM, caM, cM (normal, operator modes)
+iM Operation with inner function or method.
+ Ex: viM, diM, yiM, ciM (normal, operator modes)
+================ ============================
+
+ Note:
+ Also see: |RopeShortcuts|
+
+
+==============================================================================
+4. Commands ~
+ *PythonModeCommands*
+
+*:Pydoc* <args> *Pydoc*
+ Show python documentation
+
+*:PyLintToggle* *PyLintToggle*
+ Enable, disable pylint
+
+*:PyLint* *PyLint*
+ Check current buffer
+
+*:PyLintAuto* *PyLintAuto*
+ Automatically fix PEP8 errors in the current buffer
+
+*:Pyrun* *Pyrun*
+ Run current buffer
+
+
+==============================================================================
+5. FAQ ~
+ *PythonModeFAQ*
+
+Python-mode doesn't work
+------------------------
+
+Open any python file and run ":call pymode#troubleshooting#Test()",
+fix the warning or send me the output.
+
+
+Rope completion is very slow
+----------------------------
+
+To work, rope_ creates a service directory: `.ropeproject`. If
+|'pymode_rope_guess_project'| is set on (as it is by default) and
+`.ropeproject` is not found in the current dir, rope will scan for
+`.ropeproject` in every dir in the parent path. If rope finds `.ropeproject`
+in parent dirs, rope sets projectis for all child dirs and the scan may be
+slow for many dirs and files.
+
+Solutions:
+
+- Disable |'pymode_rope_guess_project'| to make rope always create
+ `.ropeproject` in the current dir.
+- Delete `.ropeproject` from the parent dir to make rope create `.ropeproject`
+ in the current dir.
+- Press `<C-x>po` or `:RopeOpenProject` to force rope to create `.ropeproject`
+ in the current dir.
+
+
+Pylint check is very slow
+-------------------------
+
+In some projects pylint_ may check slowly, because it also scans imported
+modules if possible. Try using pyflakes: see |'pymode_lint_checker'|.
+
+You may set |exrc| and |secure| in your |vimrc| to auto-set custom settings
+from `.vimrc` from your projects directories.
+>
+ Example: On Flask projects I automatically set
+ 'g:pymode_lint_checker = "pyflakes"'.
+ On Django 'g:pymode_lint_checker = "pylint"'
+<
+
+OSX cannot import urandom
+-------------------------
+
+See: https://groups.google.com/forum/?fromgroups=#!topic/vim_dev/2NXKF6kDONo
+
+The sequence of commands that fixed this:
+>
+ brew unlink python
+ brew unlink macvim
+ brew remove macvim
+ brew install -v --force macvim
+ brew link macvim
+ brew link python
+<
+
+==============================================================================
+6. Credits ~
+ *PythonModeCredits*
+ Kirill Klenov
+ http://klen.github.com/
+ http://github.com/klen/
+
+ Rope
+ Copyright (C) 2006-2010 Ali Gholami Rudi
+ Copyright (C) 2009-2010 Anton Gritsay
+
+ Pylint
+ Copyright (C) 2003-2011 LOGILAB S.A. (Paris, FRANCE).
+ http://www.logilab.fr/
+
+ Pyflakes:
+ Copyright (c) 2005 Divmod, Inc.
+ http://www.divmod.com/
+
+ PEP8:
+ Copyright (c) 2006 Johann C. Rocholl <johann@rocholl.net>
+ http://github.com/jcrocholl/pep8
+
+ autopep8:
+ Copyright (c) 2012 hhatto <hhatto.jp@gmail.com>
+ https://github.com/hhatto/autopep8
+
+ Python syntax for vim:
+ Copyright (c) 2010 Dmitry Vasiliev
+ http://www.hlabs.spb.ru/vim/python.vim
+
+ PEP8 VIM indentation
+ Copyright (c) 2012 Hynek Schlawack <hs@ox.cx>
+ http://github.com/hynek/vim-python-pep8-indent
+
+
+==============================================================================
+7. License ~
+ *PythonModeLicense*
+
+Python-mode is released under the GNU lesser general public license.
+See: http://www.gnu.org/copyleft/lesser.html
+
+If you like this plugin, you can send me a postcard :)
+My address is: "Russia, 143401, Krasnogorsk, Shkolnaya 1-19" to "Kirill Klenov".
+Thanks for your support!
+
+
+------------------------------------------------------------------------------
+
+ vim:tw=78:ts=8:ft=help:norl:
diff --git a/.vim/bundle/python-mode/doc/ropevim.txt b/.vim/bundle/python-mode/doc/ropevim.txt
@@ -0,0 +1,340 @@
+*ropevim.txt* *Ropevim* Rope in VIM
+
+==============================================================================
+CONTENTS *Rope contents*
+
+ 1.Refactoring Dialog......................|RopeRefactoringDialog|
+ 2.Finding Files...........................|RopeFindingFiles|
+ 3.Code Assist.............................|RopeCodeAssist|
+ 4.Enabling Autoimport.....................|RopeEnablingAutoimport|
+ 5.Filtering Resources.....................|RopeFilteringResources|
+ 6.Finding Occurrences.....................|RopeFindOccurrences|
+ 7.Dialog Batchset Command.................|RopeDialogBatchsetCommand|
+ 8.Variables...............................|RopeVariables|
+ 9.Keybindings.............................|RopeKeys|
+
+
+==============================================================================
+1. Refactoring Dialog ~
+ *RopeRefactoringDialog*
+
+Ropevim refactorings use a special kind of dialog. Depending on the
+refactoring, you'll be asked about the essential information a
+refactoring needs to know (like the new name in rename refactoring).
+
+Next you'll see the base prompt of a refactoring dialog that shows
+something like "Choose what to do". By entering the name of a
+refactoring option you can set its value. After setting each option
+you'll be returned back to the base prompt. Finally, you can ask rope
+to perform, preview or cancel the refactoring.
+
+See |RopeKeys| section and try the refactorings yourself.
+
+
+==============================================================================
+2. Finding Files ~
+ *RopeFindingFiles*
+ *:RopeFindFile*
+ *:RopeFindFileOtherWindow*
+
+By using |:RopeFindFile| ("<C-x> p f" by default), you can search for
+files in your project. When you complete the minibuffer you'll see
+all files in the project; files are shown as their reversed paths.
+For instance ``projectroot/docs/todo.txt`` is shown like
+``todo.txt<docs``. This way you can find files faster in your
+project. |:RopeFindFileOtherWindow| ("<C-x> p 4 f") opens the
+file in the other window.
+
+
+==============================================================================
+3. Code Assist ~
+ *RopeCodeAssist*
+ *:RopeCodeAssist*
+ *:RopeLuckyAssist*
+ *'pymode_rope_vim_completion'*
+ *'pymode_rope_extended_complete'*
+
+|:RopeCodeAssist| command (<M-/>) will let you select from a list
+of completions. |:RopeLuckyAssist| command (<M-?>) does not ask
+anything; instead, it inserts the first proposal.
+
+You can tell ropevim to use vim's complete function in insert mode;
+Add: >
+
+ let pymode_rope_vim_completion=1
+<
+to your '~/.vimrc' file.
+
+ Note:
+ That when this variable is set, autoimport completions no longer
+ work since they need to insert an import to the top of the module,
+ too.
+
+By default autocomplete feature will use plain list of proposed completion
+items. You can enable showing extended information about completion
+proposals by setting : >
+
+ let pymode_rope_extended_complete=1
+<
+Completion menu list will show the proposed name itself, one letter which
+shows where this proposal came from (it can be "L" for locals, "G" for
+globals, "B" for builtins, or empty string if such scope definition is not
+applicable), a short object type description (such as "func", "param",
+"meth" and so forth) and a first line of proposed object's docstring (if it
+has one). For function's keyword parameters the last field shows "*" symbol
+if this param is required or "= <default value>" if it is not.
+
+
+==============================================================================
+4. Enabling Autoimport ~
+ *RopeEnablingAutoimport*
+ *:RopevimAutoImport*
+ *:RopeGenerateAutoimportCache*
+
+Rope can propose and automatically import global names in other
+modules. Rope maintains a cache of global names for each project. It
+updates the cache only when modules are changed; if you want to cache
+all your modules at once, use |:RopeGenerateAutoimportCache|. It
+will cache all of the modules inside the project plus those whose
+names are listed in |'pymode_rope_autoimport_modules'| list: >
+
+ " add the name of modules you want to autoimport
+ let g:pymode_rope_autoimport_modules = ["os", "shutil"]
+<
+Now if you are in a buffer that contains: >
+
+ rmtree
+<
+
+and you execute |:RopevimAutoImport| you'll end up with: >
+
+ from shutil import rmtree
+ rmtree
+<
+Also |:RopeCodeAssist| and |:RopeLuckyAssist| propose auto-imported
+names by using "name : module" style. Selecting them will import
+the module automatically.
+
+
+==============================================================================
+5. Filtering Resources ~
+ *RopeFilteringResources*
+
+Some refactorings, restructuring and find occurrences take an option
+called resources. This option can be used to limit the resources on
+which a refactoring should be applied.
+
+It uses a simple format: each line starts with either '+' or '-'.
+Each '+' means include the file (or its children if it's a folder)
+that comes after it. '-' has the same meaning for exclusion. So
+using: >
+
+ +rope
+ +ropetest
+ -rope/contrib
+<
+means include all python files inside ``rope`` and ``ropetest``
+folders and their subfolder, but those that are in ``rope/contrib``.
+Or: >
+
+ -ropetest
+ -setup.py
+<
+means include all python files inside the project but ``setup.py`` and
+those under ``ropetest`` folder.
+
+
+==============================================================================
+6. Finding Occurrences ~
+ *RopeFindOccurrences*
+
+The find occurrences command ("<C-c> f" by default) can be used to
+find the occurrences of a python name. If ``unsure`` option is
+``yes``, it will also show unsure occurrences; unsure occurrences are
+indicated with a ``?`` mark in the end.
+
+ Note:
+ That ropevim uses the quickfix feature of vim for
+ marking occurrence locations.
+
+
+==============================================================================
+7. Dialog Batchset Command ~
+ *RopeDialogBatchsetCommand*
+
+When you use ropevim dialogs there is a command called ``batchset``.
+It can set many options at the same time. After selecting this
+command from dialog base prompt, you are asked to enter a string.
+
+``batchset`` strings can set the value of configs in two ways. The
+single line form is like this: >
+
+ name1 value1
+ name2 value2
+<
+
+That is the name of config is followed its value. For multi-line
+values you can use: >
+
+ name1
+ line1
+ line2
+
+ name2
+ line3
+<
+Each line of the definition should start with a space or a tab.
+ Note:
+ That blank lines before the name of config definitions are ignored.
+
+``batchset`` command is useful when performing refactorings with long
+configs, like restructurings: >
+
+ pattern ${pycore}.create_module(${project}.root, ${name})
+
+ goal generate.create_module(${project}, ${name})
+
+ imports
+ from rope.contrib import generate
+
+ args
+ pycore: type=rope.base.pycore.PyCore
+ project: type=rope.base.project.Project
+<
+.. ignore the two-space indents
+
+This is a valid ``batchset`` string for restructurings.
+
+Just for the sake of completeness, the reverse of the above
+restructuring can be: >
+
+ pattern ${create_module}(${project}, ${name})
+
+ goal ${project}.pycore.create_module(${project}.root, ${name})
+
+ args
+ create_module: name=rope.contrib.generate.create_module
+ project: type=rope.base.project.Project
+<
+
+==============================================================================
+8. Variables ~
+ *RopeVariables*
+
+*'pymode_rope_codeassist_maxfixes'* The maximum number of syntax errors
+ to fix for code assists.
+ The default value is `1`.
+
+*'pymode_rope_local_prefix'* The prefix for ropevim refactorings.
+ Defaults to `<C-c> r`.
+
+*'pymode_rope_global_prefix'* The prefix for ropevim project commands
+ Defaults to `<C-x> p`.
+
+*'pymode_rope_enable_shortcuts'* Shows whether to bind ropevim shortcuts keys.
+ Defaults to `1`.
+
+*'pymode_rope_guess_project'* If non-zero, ropevim tries to guess and
+ open the project that contains the file on which
+ a ropevim command is performed when no project
+ is already open.
+
+*'pymode_rope_enable_autoimport'* Shows whether to enable autoimport.
+
+*'pymode_rope_autoimport_modules'* The name of modules whose global names should
+ be cached. |:RopeGenerateAutoimportCache| reads
+ this list and fills its cache.
+
+*'pymode_rope_autoimport_underlineds'* If set, autoimport will cache names starting
+ with underlines, too.
+
+*'pymode_rope_goto_def_newwin'* If set, ropevim will open a new buffer
+ for "go to definition" result if the definition
+ found is located in another file. By default the
+ file is open in the same buffer.
+ Values: '' -- same buffer, 'new' --
+ horizontally split, 'vnew' --
+ vertically split
+
+*'pymode_rope_always_show_complete_menu'* If set, rope autocompletion menu
+always show.
+
+
+==============================================================================
+9. Keybinding ~
+ *RopeKeys*
+
+Uses almost the same keybinding as ropemacs.
+ Note:
+ That global commands have a `<C-x> p` prefix and local commands
+ have a ``<C-c> r`` prefix.
+ You can change that (see |RopeVariables| section).
+
+
+================ ============================
+Key Command
+================ ============================
+C-x p o |:RopeOpenProject|
+C-x p k |:RopeCloseProject|
+C-x p f |:RopeFindFile|
+C-x p 4 f |:RopeFindFileOtherWindow|
+C-x p u |:RopeUndo|
+C-x p r |:RopeRedo|
+C-x p c |:RopeProjectConfig|
+C-x p n [mpfd] |:RopeCreate|(Module|Package|File|Directory)
+ |:RopeWriteProject|
+
+C-c r r |:RopeRename|
+C-c r l |:RopeExtractVariable|
+C-c r m |:RopeExtractMethod|
+C-c r i |:RopeInline|
+C-c r v |:RopeMove|
+C-c r x |:RopeRestructure|
+C-c r u |:RopeUseFunction|
+C-c r f |:RopeIntroduceFactory|
+C-c r s |:RopeChangeSignature|
+C-c r 1 r |:RopeRenameCurrentModule|
+C-c r 1 v |:RopeMoveCurrentModule|
+C-c r 1 p |:RopeModuleToPackage|
+
+C-c r o |:RopeOrganizeImports|
+C-c r n [vfcmp] |:RopeGenerate|(Variable|Function|Class|Module|Package)
+
+C-c r a / |:RopeCodeAssist|
+C-c r a g |:RopeGotoDefinition|
+C-c r a d |:RopeShowDoc|
+C-c r a f |:RopeFindOccurrences|
+C-c r a ? |:RopeLuckyAssist|
+C-c r a j |:RopeJumpToGlobal|
+C-c r a c |:RopeShowCalltip|
+ |:RopeAnalyzeModule|
+
+ |:RopeAutoImport|
+ |:RopeGenerateAutoimportCache|
+=============== ============================
+
+
+==============================================================================
+10. Shortcuts ~
+ *RopeShortcuts*
+
+Some commands are used very frequently; specially the commands in
+code-assist group. You can define your own shortcuts like this: >
+
+ :map <C-c>g :call RopeGotoDefinition()
+
+<
+
+================ ============================
+Key Command
+================ ============================
+<C-Space> |:RopeCodeAssist|
+<C-?> |:RopeLuckyAssist|
+<C-c> g |:RopeGotoDefinition|
+<C-c> d |:RopeShowDoc|
+<C-c> f |:RopeFindOccurrences|
+================ ============================
+
+------------------------------------------------------------------------------
+
+ vim:tw=78:fo=tcq2:isk=!-~,^*,^\|,^\":ts=8:ft=help:norl:
diff --git a/.vim/bundle/python-mode/ftplugin/pyrex.vim b/.vim/bundle/python-mode/ftplugin/pyrex.vim
@@ -0,0 +1 @@
+runtime ftplugin/python/pymode.vim
diff --git a/.vim/bundle/python-mode/ftplugin/python/init-pymode.vim b/.vim/bundle/python-mode/ftplugin/python/init-pymode.vim
@@ -0,0 +1,344 @@
+if exists('did_init_pymode_vim')
+ finish
+endif
+let did_init_pymode_vim = 1
+
+let g:pymode_version = "0.6.15"
+
+com! PymodeVersion echomsg "Current python-mode version: " . g:pymode_version
+
+" OPTION: g:pymode -- bool. Run pymode.
+if pymode#Default('g:pymode', 1) || !g:pymode
+ " DESC: Disable script loading
+ finish
+endif
+
+" DESC: Check python support
+if !has('python')
+ let g:pymode_virtualenv = 0
+ let g:pymode_path = 0
+ let g:pymode_lint = 0
+ let g:pymode_doc = 0
+ let g:pymode_breakpoint = 0
+ let g:pymode_rope = 0
+ let g:pymode_run = 0
+endif
+
+
+" Virtualenv {{{
+
+if !pymode#Default("g:pymode_virtualenv", 1) || g:pymode_virtualenv
+
+ call pymode#Default("g:pymode_virtualenv_enabled", [])
+
+ " Add virtualenv paths
+ call pymode#virtualenv#Activate()
+
+endif
+
+" }}}
+
+
+" DESC: Add pymode's pylibs to sys.path {{{
+if !pymode#Default('g:pymode_path', 1) || g:pymode_path
+
+ call pymode#Default('g:pymode_paths', [])
+
+python << EOF
+import sys, vim, os
+
+curpath = vim.eval("getcwd()")
+libpath = os.path.join(vim.eval("expand('<sfile>:p:h:h:h')"), 'pylibs')
+
+sys.path = [libpath, curpath] + vim.eval("g:pymode_paths") + sys.path
+EOF
+
+endif " }}}
+
+
+" Lint {{{
+
+if !pymode#Default("g:pymode_lint", 1) || g:pymode_lint
+
+ let g:qf_list = []
+ let g:pymode_lint_buffer = 0
+
+ " OPTION: g:pymode_lint_write -- bool. Check code every save.
+ call pymode#Default("g:pymode_lint_write", 1)
+
+ " OPTION: g:pymode_lint_onfly -- bool. Check code every save.
+ call pymode#Default("g:pymode_lint_onfly", 0)
+
+ " OPTION: g:pymode_lint_message -- bool. Show current line error message
+ call pymode#Default("g:pymode_lint_message", 1)
+
+ " OPTION: g:pymode_lint_checker -- str. Choices are: pylint, pyflakes, pep8, mccabe
+ call pymode#Default("g:pymode_lint_checker", "pyflakes,pep8,mccabe")
+
+ " OPTION: g:pymode_lint_config -- str. Path to pylint config file
+ call pymode#Default("g:pymode_lint_config", $HOME . "/.pylintrc")
+
+ " OPTION: g:pymode_lint_cwindow -- bool. Auto open cwindow if errors find
+ call pymode#Default("g:pymode_lint_cwindow", 1)
+
+ " OPTION: g:pymode_lint_jump -- int. Jump on first error.
+ call pymode#Default("g:pymode_lint_jump", 0)
+
+ " OPTION: g:pymode_lint_hold -- int. Hold cursor on current window when
+ " quickfix open
+ call pymode#Default("g:pymode_lint_hold", 0)
+
+ " OPTION: g:pymode_lint_minheight -- int. Minimal height of pymode lint window
+ call pymode#Default("g:pymode_lint_minheight", 3)
+
+ " OPTION: g:pymode_lint_maxheight -- int. Maximal height of pymode lint window
+ call pymode#Default("g:pymode_lint_maxheight", 6)
+
+ " OPTION: g:pymode_lint_ignore -- string. Skip errors and warnings (e.g. E4,W)
+ call pymode#Default("g:pymode_lint_ignore", "")
+
+ " OPTION: g:pymode_lint_select -- string. Select errors and warnings (e.g. E4,W)
+ call pymode#Default("g:pymode_lint_select", "")
+
+ " OPTION: g:pymode_lint_mccabe_complexity -- int. Maximum allowed complexity
+ call pymode#Default("g:pymode_lint_mccabe_complexity", 8)
+
+ " OPTION: g:pymode_lint_signs_always_visible -- bool. Always show the
+ " errors ruller, even if there's no errors.
+ call pymode#Default("g:pymode_lint_signs_always_visible", 0)
+
+ " OPTION: g:pymode_lint_signs -- bool. Place error signs
+ if (!pymode#Default("g:pymode_lint_signs", 1) || g:pymode_lint_signs) && has('signs')
+
+ " DESC: Signs definition
+ sign define PymodeW text=WW texthl=Todo
+ sign define PymodeC text=CC texthl=Comment
+ sign define PymodeR text=RR texthl=Visual
+ sign define PymodeE text=EE texthl=Error
+ sign define PymodeI text=II texthl=Info
+ sign define PymodeF text=FF texthl=Info
+
+ if !pymode#Default("g:pymode_lint_signs_always_visible", 0) || g:pymode_lint_signs_always_visible
+ " Show the sign's ruller if asked for, even it there's no error to show
+ sign define __dummy__
+ autocmd BufRead,BufNew * call RopeShowSignsRulerIfNeeded()
+ endif
+
+ endif
+
+ " DESC: Set default pylint configuration
+ if !filereadable(g:pymode_lint_config)
+ let g:pymode_lint_config = expand("<sfile>:p:h:h:h") . "/pylint.ini"
+ endif
+
+ py from pymode import queue
+
+ au VimLeavePre * py queue.stop_queue()
+
+endif
+
+" }}}
+
+
+" Documentation {{{
+
+if !pymode#Default("g:pymode_doc", 1) || g:pymode_doc
+
+ " OPTION: g:pymode_doc_key -- string. Key for show python documantation.
+ call pymode#Default("g:pymode_doc_key", "K")
+
+endif
+
+" }}}
+
+
+" Breakpoints {{{
+
+if !pymode#Default("g:pymode_breakpoint", 1) || g:pymode_breakpoint
+
+ if !pymode#Default("g:pymode_breakpoint_cmd", "import ipdb; ipdb.set_trace() # XXX BREAKPOINT") && has("python")
+python << EOF
+from imp import find_module
+try:
+ find_module('ipdb')
+except ImportError:
+ vim.command('let g:pymode_breakpoint_cmd = "import pdb; pdb.set_trace() # XXX BREAKPOINT"')
+EOF
+ endif
+
+ " OPTION: g:pymode_breakpoint_key -- string. Key for set/unset breakpoint.
+ call pymode#Default("g:pymode_breakpoint_key", "<leader>b")
+
+endif
+
+" }}}
+
+
+" Execution {{{
+
+if !pymode#Default("g:pymode_run", 1) || g:pymode_run
+
+ " OPTION: g:pymode_doc_key -- string. Key for show python documentation.
+ call pymode#Default("g:pymode_run_key", "<leader>r")
+
+endif
+
+" }}}
+
+
+" Rope {{{
+
+if !pymode#Default("g:pymode_rope", 1) || g:pymode_rope
+
+ " OPTION: g:pymode_rope_auto_project -- bool. Auto create ropeproject
+ call pymode#Default("g:pymode_rope_auto_project", 1)
+
+ " OPTION: g:pymode_rope_auto_project_open -- bool.
+ " Auto open existing projects, ie, if the current directory has a
+ " `.ropeproject` subdirectory.
+ call pymode#Default("g:pymode_rope_auto_project_open", 1)
+
+ " OPTION: g:pymode_rope_auto_session_manage -- bool
+ call pymode#Default("g:pymode_rope_auto_session_manage", 0)
+
+ " OPTION: g:pymode_rope_enable_autoimport -- bool. Enable autoimport
+ call pymode#Default("g:pymode_rope_enable_autoimport", 1)
+
+ " OPTION: g:pymode_rope_autoimport_generate -- bool.
+ call pymode#Default("g:pymode_rope_autoimport_generate", 1)
+
+ " OPTION: g:pymode_rope_autoimport_underlines -- bool.
+ call pymode#Default("g:pymode_rope_autoimport_underlineds", 0)
+
+ " OPTION: g:pymode_rope_codeassist_maxfiles -- bool.
+ call pymode#Default("g:pymode_rope_codeassist_maxfixes", 10)
+
+ " OPTION: g:pymode_rope_sorted_completions -- bool.
+ call pymode#Default("g:pymode_rope_sorted_completions", 1)
+
+ " OPTION: g:pymode_rope_extended_complete -- bool.
+ call pymode#Default("g:pymode_rope_extended_complete", 1)
+
+ " OPTION: g:pymode_rope_autoimport_modules -- array.
+ call pymode#Default("g:pymode_rope_autoimport_modules", ["os","shutil","datetime"])
+
+ " OPTION: g:pymode_rope_confirm_saving -- bool.
+ call pymode#Default("g:pymode_rope_confirm_saving", 1)
+
+ " OPTION: g:pymode_rope_global_prefix -- string.
+ call pymode#Default("g:pymode_rope_global_prefix", "<C-x>p")
+
+ " OPTION: g:pymode_rope_local_prefix -- string.
+ call pymode#Default("g:pymode_rope_local_prefix", "<C-c>r")
+
+ " OPTION: g:pymode_rope_short_prefix -- string.
+ call pymode#Default("g:pymode_rope_short_prefix", "<C-c>")
+
+ " OPTION: g:pymode_rope_map_space -- string.
+ call pymode#Default("g:pymode_rope_map_space", 1)
+
+ " OPTION: g:pymode_rope_vim_completion -- bool.
+ call pymode#Default("g:pymode_rope_vim_completion", 1)
+
+ " OPTION: g:pymode_rope_guess_project -- bool.
+ call pymode#Default("g:pymode_rope_guess_project", 1)
+
+ " OPTION: g:pymode_rope_goto_def_newwin -- str ('new', 'vnew', '').
+ call pymode#Default("g:pymode_rope_goto_def_newwin", "")
+
+ " OPTION: g:pymode_rope_always_show_complete_menu -- bool.
+ call pymode#Default("g:pymode_rope_always_show_complete_menu", 0)
+
+ " DESC: Init Rope
+ py import ropevim
+
+ fun! RopeCodeAssistInsertMode() "{{{
+ call RopeCodeAssist()
+ return ""
+ endfunction "}}}
+
+ fun! RopeOpenExistingProject() "{{{
+ if isdirectory(getcwd() . '/.ropeproject')
+ " In order to pass it the quiet kwarg I need to open the project
+ " using python and not vim, which should be no major issue
+ py ropevim._interface.open_project(quiet=True)
+ return ""
+ endif
+ endfunction "}}}
+
+ fun! RopeLuckyAssistInsertMode() "{{{
+ call RopeLuckyAssist()
+ return ""
+ endfunction "}}}
+
+ fun! RopeOmni(findstart, base) "{{{
+ if a:findstart
+ py ropevim._interface._find_start()
+ return g:pymode_offset
+ else
+ call RopeOmniComplete()
+ return g:pythoncomplete_completions
+ endif
+ endfunction "}}}
+
+ fun! RopeShowSignsRulerIfNeeded() "{{{
+ if &ft == 'python'
+ execute printf('silent! sign place 1 line=1 name=__dummy__ file=%s', expand("%:p"))
+ endif
+ endfunction "}}}
+
+
+ " Rope menu
+ menu <silent> Rope.Autoimport :RopeAutoImport<CR>
+ menu <silent> Rope.ChangeSignature :RopeChangeSignature<CR>
+ menu <silent> Rope.CloseProject :RopeCloseProject<CR>
+ menu <silent> Rope.GenerateAutoImportCache :RopeGenerateAutoimportCache<CR>
+ menu <silent> Rope.ExtractVariable :RopeExtractVariable<CR>
+ menu <silent> Rope.ExtractMethod :RopeExtractMethod<CR>
+ menu <silent> Rope.Inline :RopeInline<CR>
+ menu <silent> Rope.IntroduceFactory :RopeIntroduceFactory<CR>
+ menu <silent> Rope.FindFile :RopeFindFile<CR>
+ menu <silent> Rope.OpenProject :RopeOpenProject<CR>
+ menu <silent> Rope.Move :RopeMove<CR>
+ menu <silent> Rope.MoveCurrentModule :RopeMoveCurrentModule<CR>
+ menu <silent> Rope.ModuleToPackage :RopeModuleToPackage<CR>
+ menu <silent> Rope.Redo :RopeRedo<CR>
+ menu <silent> Rope.Rename :RopeRename<CR>
+ menu <silent> Rope.RenameCurrentModule :RopeRenameCurrentModule<CR>
+ menu <silent> Rope.Restructure :RopeRestructure<CR>
+ menu <silent> Rope.Undo :RopeUndo<CR>
+ menu <silent> Rope.UseFunction :RopeUseFunction<CR>
+
+ if !pymode#Default("g:pymode_rope_auto_project_open", 1) || g:pymode_rope_auto_project_open
+ call RopeOpenExistingProject()
+ endif
+
+ if !pymode#Default("g:pymode_rope_auto_session_manage", 0) || g:pymode_rope_auto_session_manage
+ autocmd VimLeave * call RopeSaveSession()
+ autocmd VimEnter * call RopeRestoreSession()
+ endif
+
+endif
+
+" }}}
+
+
+" OPTION: g:pymode_folding -- bool. Enable python-mode folding for pyfiles.
+call pymode#Default("g:pymode_folding", 1)
+
+" OPTION: g:pymode_syntax -- bool. Enable python-mode syntax for pyfiles.
+call pymode#Default("g:pymode_syntax", 1)
+
+" OPTION: g:pymode_indent -- bool. Enable/Disable pymode PEP8 indentation
+call pymode#Default("g:pymode_indent", 1)
+
+" OPTION: g:pymode_utils_whitespaces -- bool. Remove unused whitespaces on save
+call pymode#Default("g:pymode_utils_whitespaces", 1)
+
+" OPTION: g:pymode_options -- bool. To set some python options.
+call pymode#Default("g:pymode_options", 1)
+
+" OPTION: g:pymode_updatetime -- int. Set updatetime for async pymode's operation
+call pymode#Default("g:pymode_updatetime", 1000)
+
+" vim: fdm=marker:fdl=0
diff --git a/.vim/bundle/python-mode/ftplugin/python/pymode.vim b/.vim/bundle/python-mode/ftplugin/python/pymode.vim
@@ -0,0 +1,156 @@
+runtime ftplugin/python/init-pymode.vim
+
+if pymode#Default('b:pymode', 1)
+ finish
+endif
+
+
+" Parse pymode modeline
+call pymode#Modeline()
+
+
+" Syntax highlight
+if pymode#Option('syntax')
+ let python_highlight_all=1
+endif
+
+
+" Options {{{
+
+" Python other options
+if pymode#Option('options')
+ setlocal complete+=t
+ setlocal formatoptions-=t
+ if v:version > 702 && !&relativenumber
+ setlocal number
+ endif
+ setlocal nowrap
+ setlocal textwidth=79
+endif
+
+" }}}
+
+
+" Documentation {{{
+
+if pymode#Option('doc')
+
+ " DESC: Set commands
+ command! -buffer -nargs=1 Pydoc call pymode#doc#Show("<args>")
+
+ " DESC: Set keys
+ exe "nnoremap <silent> <buffer> " g:pymode_doc_key ":call pymode#doc#Show(expand('<cword>'))<CR>"
+ exe "vnoremap <silent> <buffer> " g:pymode_doc_key ":<C-U>call pymode#doc#Show(@*)<CR>"
+
+endif
+
+" }}}
+
+
+" Lint {{{
+
+if pymode#Option('lint')
+
+ " DESC: Set commands
+ command! -buffer -nargs=0 PyLintToggle :call pymode#lint#Toggle()
+ command! -buffer -nargs=0 PyLintWindowToggle :call pymode#lint#ToggleWindow()
+ command! -buffer -nargs=0 PyLintCheckerToggle :call pymode#lint#ToggleChecker()
+ command! -buffer -nargs=0 PyLint :call pymode#lint#Check()
+ command! -buffer -nargs=0 PyLintAuto :call pymode#lint#Auto()
+
+ " DESC: Set autocommands
+ if pymode#Option('lint_write')
+ au BufWritePost <buffer> PyLint
+ endif
+
+ if pymode#Option('lint_onfly')
+ au InsertLeave <buffer> PyLint
+ endif
+
+ if pymode#Option('lint_message')
+ au CursorHold <buffer> call pymode#lint#show_errormessage()
+ au CursorMoved <buffer> call pymode#lint#show_errormessage()
+ endif
+
+ " DESC: Run queue
+ let &l:updatetime = g:pymode_updatetime
+ au CursorHold <buffer> call pymode#queue#Poll()
+ au BufLeave <buffer> py queue.stop_queue()
+
+endif
+
+" }}}
+
+
+" Rope {{{
+
+if pymode#Option('rope')
+
+ " DESC: Set keys
+ exe "noremap <silent> <buffer> " . g:pymode_rope_short_prefix . "g :RopeGotoDefinition<CR>"
+ exe "noremap <silent> <buffer> " . g:pymode_rope_short_prefix . "d :RopeShowDoc<CR>"
+ exe "noremap <silent> <buffer> " . g:pymode_rope_short_prefix . "f :RopeFindOccurrences<CR>"
+ exe "noremap <silent> <buffer> " . g:pymode_rope_short_prefix . "m :emenu Rope . <TAB>"
+ inoremap <silent> <buffer> <S-TAB> <C-R>=RopeLuckyAssistInsertMode()<CR>
+
+ if g:pymode_rope_map_space
+ let s:prascm = g:pymode_rope_always_show_complete_menu ? "<C-P>" : ""
+ exe "inoremap <silent> <buffer> <Nul> <C-R>=RopeCodeAssistInsertMode()<CR>" . s:prascm
+ exe "inoremap <silent> <buffer> <c-space> <C-R>=RopeCodeAssistInsertMode()<CR>" . s:prascm
+ endif
+
+endif
+
+" }}}
+
+
+" Execution {{{
+
+if pymode#Option('run')
+
+ " DESC: Set commands
+ command! -buffer -nargs=0 -range=% Pyrun call pymode#run#Run(<f-line1>, <f-line2>)
+
+ " DESC: Set keys
+ exe "nnoremap <silent> <buffer> " g:pymode_run_key ":Pyrun<CR>"
+ exe "vnoremap <silent> <buffer> " g:pymode_run_key ":Pyrun<CR>"
+
+endif
+
+" }}}
+
+
+" Breakpoints {{{
+
+if pymode#Option('breakpoint')
+
+ " DESC: Set keys
+ exe "nnoremap <silent> <buffer> " g:pymode_breakpoint_key ":call pymode#breakpoint#Set(line('.'))<CR>"
+
+endif
+
+" }}}
+
+
+" Utils {{{
+
+if pymode#Option('utils_whitespaces')
+ au BufWritePre <buffer> call pymode#TrimWhiteSpace()
+endif
+
+" }}}
+
+
+" Folding {{{
+
+if pymode#Option('folding')
+
+ setlocal foldmethod=expr
+ setlocal foldexpr=pymode#folding#expr(v:lnum)
+ setlocal foldtext=pymode#folding#text()
+
+endif
+
+" }}}
+
+" vim: fdm=marker:fdl=0
diff --git a/.vim/bundle/python-mode/pylibs/__init__.py b/.vim/bundle/python-mode/pylibs/__init__.py
diff --git a/.vim/bundle/python-mode/pylibs/autopep8.py b/.vim/bundle/python-mode/pylibs/autopep8.py
@@ -0,0 +1,2161 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2010-2011 Hideo Hattori
+# Copyright (C) 2011-2013 Hideo Hattori, Steven Myint
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+"""Automatically formats Python code to conform to the PEP 8 style guide."""
+
+from __future__ import print_function
+from __future__ import division
+
+import codecs
+import copy
+import fnmatch
+import inspect
+import os
+import re
+import sys
+try:
+ from StringIO import StringIO
+except ImportError:
+ from io import StringIO
+import token
+import tokenize
+from optparse import OptionParser
+import difflib
+import tempfile
+
+from pylama import pep8
+
+
+try:
+ unicode
+except NameError:
+ unicode = str
+
+
+__version__ = '0.8.7'
+
+
+CR = '\r'
+LF = '\n'
+CRLF = '\r\n'
+
+
+# For generating line shortening candidates.
+SHORTEN_OPERATOR_GROUPS = frozenset([
+ frozenset([',']),
+ frozenset(['%']),
+ frozenset([',', '(', '[', '{']),
+ frozenset([',', '(', '[', '{', '%', '+', '-', '*', '/', '//']),
+])
+
+
+def open_with_encoding(filename, encoding=None, mode='r'):
+ """Return opened file with a specific encoding."""
+ if not encoding:
+ encoding = detect_encoding(filename)
+
+ import io
+ return io.open(filename, mode=mode, encoding=encoding,
+ newline='') # Preserve line endings
+
+
+def detect_encoding(filename):
+ """Return file encoding."""
+ try:
+ with open(filename, 'rb') as input_file:
+ from lib2to3.pgen2 import tokenize as lib2to3_tokenize
+ encoding = lib2to3_tokenize.detect_encoding(input_file.readline)[0]
+
+ # Check for correctness of encoding
+ with open_with_encoding(filename, encoding) as test_file:
+ test_file.read()
+
+ return encoding
+ except (SyntaxError, LookupError, UnicodeDecodeError):
+ return 'latin-1'
+
+
+def read_from_filename(filename, readlines=False):
+ """Return contents of file."""
+ with open_with_encoding(filename) as input_file:
+ return input_file.readlines() if readlines else input_file.read()
+
+
+class FixPEP8(object):
+
+ """Fix invalid code.
+
+ Fixer methods are prefixed "fix_". The _fix_source() method looks for these
+ automatically.
+
+ The fixer method can take either one or two arguments (in addition to
+ self). The first argument is "result", which is the error information from
+ pep8. The second argument, "logical", is required only for logical-line
+ fixes.
+
+ The fixer method can return the list of modified lines or None. An empty
+ list would mean that no changes were made. None would mean that only the
+ line reported in the pep8 error was modified. Note that the modified line
+ numbers that are returned are indexed at 1. This typically would correspond
+ with the line number reported in the pep8 error information.
+
+ [fixed method list]
+ - e111
+ - e121,e122,e123,e124,e125,e126,e127,e128
+ - e201,e202,e203
+ - e211
+ - e221,e222,e223,e224,e225
+ - e231
+ - e251
+ - e261,e262
+ - e271,e272,e273,e274
+ - e301,e302,e303
+ - e401
+ - e502
+ - e701,e702
+ - e711
+ - e721
+ - w291,w293
+ - w391
+ - w602,w603,w604
+
+ """
+
+ def __init__(self, filename, options, contents=None):
+ self.filename = filename
+ if contents is None:
+ self.source = read_from_filename(filename, readlines=True)
+ else:
+ sio = StringIO(contents)
+ self.source = sio.readlines()
+ self.newline = find_newline(self.source)
+ self.options = options
+ self.indent_word = _get_indentword(unicode().join(self.source))
+ self.logical_start = None
+ self.logical_end = None
+ # method definition
+ self.fix_e111 = self.fix_e101
+ self.fix_e128 = self.fix_e127
+ self.fix_e202 = self.fix_e201
+ self.fix_e203 = self.fix_e201
+ self.fix_e211 = self.fix_e201
+ self.fix_e221 = self.fix_e271
+ self.fix_e222 = self.fix_e271
+ self.fix_e223 = self.fix_e271
+ self.fix_e226 = self.fix_e225
+ self.fix_e227 = self.fix_e225
+ self.fix_e228 = self.fix_e225
+ self.fix_e241 = self.fix_e271
+ self.fix_e242 = self.fix_e224
+ self.fix_e261 = self.fix_e262
+ self.fix_e272 = self.fix_e271
+ self.fix_e273 = self.fix_e271
+ self.fix_e274 = self.fix_e271
+ self.fix_e703 = self.fix_e702
+ self.fix_w191 = self.fix_e101
+
+ def _fix_source(self, results):
+ completed_lines = set()
+ for result in sorted(results, key=_priority_key):
+ if result['line'] in completed_lines:
+ continue
+
+ fixed_methodname = 'fix_%s' % result['id'].lower()
+ if hasattr(self, fixed_methodname):
+ fix = getattr(self, fixed_methodname)
+
+ is_logical_fix = len(inspect.getargspec(fix).args) > 2
+ if is_logical_fix:
+ # Do not run logical fix if any lines have been modified.
+ if completed_lines:
+ continue
+
+ logical = self._get_logical(result)
+ if not logical:
+ continue
+
+ modified_lines = fix(result, logical)
+ else:
+ modified_lines = fix(result)
+
+ if modified_lines:
+ completed_lines.update(modified_lines)
+ elif modified_lines == []: # Empty list means no fix
+ if self.options.verbose >= 2:
+ print(
+ '---> Not fixing {f} on line {l}'.format(
+ f=result['id'], l=result['line']),
+ file=sys.stderr)
+ else: # We assume one-line fix when None
+ completed_lines.add(result['line'])
+ else:
+ if self.options.verbose >= 3:
+ print("---> '%s' is not defined." % fixed_methodname,
+ file=sys.stderr)
+ info = result['info'].strip()
+ print('---> %s:%s:%s:%s' % (self.filename,
+ result['line'],
+ result['column'],
+ info),
+ file=sys.stderr)
+
+ def fix(self):
+ """Return a version of the source code with PEP 8 violations fixed."""
+ pep8_options = {
+ 'ignore': self.options.ignore,
+ 'select': self.options.select,
+ 'max_line_length': self.options.max_line_length,
+ }
+ results = _execute_pep8(pep8_options, self.source)
+
+ if self.options.verbose:
+ progress = {}
+ for r in results:
+ if r['id'] not in progress:
+ progress[r['id']] = set()
+ progress[r['id']].add(r['line'])
+ print('---> {n} issue(s) to fix {progress}'.format(
+ n=len(results), progress=progress), file=sys.stderr)
+
+ self._fix_source(filter_results(source=unicode().join(self.source),
+ results=results,
+ aggressive=self.options.aggressive))
+ return unicode().join(self.source)
+
+ def fix_e101(self, _):
+ """Reindent all lines."""
+ reindenter = Reindenter(self.source, self.newline)
+ modified_line_numbers = reindenter.run()
+ if modified_line_numbers:
+ self.source = reindenter.fixed_lines()
+ return modified_line_numbers
+ else:
+ return []
+
+ def find_logical(self, force=False):
+ # make a variable which is the index of all the starts of lines
+ if not force and self.logical_start is not None:
+ return
+ logical_start = []
+ logical_end = []
+ last_newline = True
+ sio = StringIO(''.join(self.source))
+ parens = 0
+ for t in tokenize.generate_tokens(sio.readline):
+ if t[0] in [tokenize.COMMENT, tokenize.DEDENT,
+ tokenize.INDENT, tokenize.NL,
+ tokenize.ENDMARKER]:
+ continue
+ if not parens and t[0] in [
+ tokenize.NEWLINE, tokenize.SEMI
+ ]:
+ last_newline = True
+ logical_end.append((t[3][0] - 1, t[2][1]))
+ continue
+ if last_newline and not parens:
+ logical_start.append((t[2][0] - 1, t[2][1]))
+ last_newline = False
+ if t[0] == tokenize.OP:
+ if t[1] in '([{':
+ parens += 1
+ elif t[1] in '}])':
+ parens -= 1
+ self.logical_start = logical_start
+ self.logical_end = logical_end
+
+ def _get_logical(self, result):
+ """Return the logical line corresponding to the result.
+
+ Assumes input is already E702-clean.
+
+ """
+ try:
+ self.find_logical()
+ except (IndentationError, tokenize.TokenError):
+ return None
+
+ row = result['line'] - 1
+ col = result['column'] - 1
+ ls = None
+ le = None
+ for i in range(0, len(self.logical_start), 1):
+ x = self.logical_end[i]
+ if x[0] > row or (x[0] == row and x[1] > col):
+ le = x
+ ls = self.logical_start[i]
+ break
+ if ls is None:
+ return None
+ original = self.source[ls[0]:le[0] + 1]
+ return ls, le, original
+
+ def _fix_reindent(self, result, logical, fix_distinct=False):
+ """Fix a badly indented line.
+
+ This is done by adding or removing from its initial indent only.
+
+ """
+ if not logical:
+ return []
+ ls, _, original = logical
+ try:
+ rewrapper = Wrapper(original)
+ except (tokenize.TokenError, IndentationError):
+ return []
+ valid_indents = rewrapper.pep8_expected()
+ if not rewrapper.rel_indent:
+ return []
+ if result['line'] > ls[0]:
+ # got a valid continuation line number from pep8
+ row = result['line'] - ls[0] - 1
+ # always pick the first option for this
+ valid = valid_indents[row]
+ got = rewrapper.rel_indent[row]
+ else:
+ # Line number from pep8 isn't a continuation line. Instead,
+ # compare our own function's result, look for the first mismatch,
+ # and just hope that we take fewer than 100 iterations to finish.
+ for row in range(0, len(original), 1):
+ valid = valid_indents[row]
+ got = rewrapper.rel_indent[row]
+ if valid != got:
+ break
+ line = ls[0] + row
+ # always pick the expected indent, for now.
+ indent_to = valid[0]
+ if fix_distinct and indent_to == 4:
+ if len(valid) == 1:
+ return []
+ else:
+ indent_to = valid[1]
+
+ if got != indent_to:
+ orig_line = self.source[line]
+ new_line = ' ' * (indent_to) + orig_line.lstrip()
+ if new_line == orig_line:
+ return []
+ else:
+ self.source[line] = new_line
+ return [line + 1] # Line indexed at 1
+ else:
+ return []
+
+ def fix_e121(self, result, logical):
+ """Fix indentation to be a multiple of four."""
+ # Fix by adjusting initial indent level.
+ return self._fix_reindent(result, logical)
+
+ def fix_e122(self, result, logical):
+ """Add absent indentation for hanging indentation."""
+ # Fix by adding an initial indent.
+ return self._fix_reindent(result, logical)
+
+ def fix_e123(self, result, logical):
+ """Align closing bracket to match opening bracket."""
+ # Fix by deleting whitespace to the correct level.
+ if not logical:
+ return []
+ logical_lines = logical[2]
+ line_index = result['line'] - 1
+ original_line = self.source[line_index]
+
+ fixed_line = (_get_indentation(logical_lines[0]) +
+ original_line.lstrip())
+ if fixed_line == original_line:
+ # Fall back to slower method.
+ return self._fix_reindent(result, logical)
+ else:
+ self.source[line_index] = fixed_line
+
+ def fix_e124(self, result, logical):
+ """Align closing bracket to match visual indentation."""
+ # Fix by inserting whitespace before the closing bracket.
+ return self._fix_reindent(result, logical)
+
+ def fix_e125(self, result, logical):
+ """Indent to distinguish line from next logical line."""
+ # Fix by indenting the line in error to the next stop.
+ modified_lines = self._fix_reindent(result, logical, fix_distinct=True)
+ if modified_lines:
+ return modified_lines
+ else:
+ # Fallback
+ line_index = result['line'] - 1
+ original_line = self.source[line_index]
+ self.source[line_index] = self.indent_word + original_line
+
+ def fix_e126(self, result, logical):
+ """Fix over-indented hanging indentation."""
+ # fix by deleting whitespace to the left
+ if not logical:
+ return []
+ logical_lines = logical[2]
+ line_index = result['line'] - 1
+ original = self.source[line_index]
+
+ fixed = (_get_indentation(logical_lines[0]) +
+ self.indent_word + original.lstrip())
+ if fixed == original:
+ # Fall back to slower method.
+ return self._fix_reindent(result, logical)
+ else:
+ self.source[line_index] = fixed
+
+ def fix_e127(self, result, logical):
+ """Fix visual indentation."""
+ # Fix by inserting/deleting whitespace to the correct level.
+ modified_lines = self._align_visual_indent(result, logical)
+ if modified_lines != []:
+ return modified_lines
+ else:
+ # Fall back to slower method.
+ return self._fix_reindent(result, logical)
+
+ def _align_visual_indent(self, result, logical):
+ """Correct visual indent.
+
+ This includes over (E127) and under (E128) indented lines.
+
+ """
+ if not logical:
+ return []
+ logical_lines = logical[2]
+ line_index = result['line'] - 1
+ original = self.source[line_index]
+ fixed = original
+
+ if logical_lines[0].rstrip().endswith('\\'):
+ fixed = (_get_indentation(logical_lines[0]) +
+ self.indent_word + original.lstrip())
+ else:
+ for symbol in '([{':
+ if symbol in logical_lines[0]:
+ fixed = logical_lines[0].find(
+ symbol) * ' ' + original.lstrip()
+ break
+
+ if fixed == original:
+ return []
+ else:
+ self.source[line_index] = fixed
+
+ def fix_e201(self, result):
+ """Remove extraneous whitespace."""
+ line_index = result['line'] - 1
+ target = self.source[line_index]
+ offset = result['column'] - 1
+
+ # When multiline strings are involved, pep8 reports the error as
+ # being at the start of the multiline string, which doesn't work
+ # for us.
+ if ('"""' in target or
+ "'''" in target or
+ target.rstrip().endswith('\\')):
+ return []
+
+ fixed = fix_whitespace(target,
+ offset=offset,
+ replacement='')
+
+ if fixed == target:
+ return []
+ else:
+ self.source[line_index] = fixed
+
+ def fix_e224(self, result):
+ """Remove extraneous whitespace around operator."""
+ target = self.source[result['line'] - 1]
+ offset = result['column'] - 1
+ fixed = target[:offset] + target[offset:].replace('\t', ' ')
+ self.source[result['line'] - 1] = fixed
+
+ def fix_e225(self, result):
+ """Fix missing whitespace around operator."""
+ target = self.source[result['line'] - 1]
+ offset = result['column'] - 1
+ fixed = target[:offset] + ' ' + target[offset:]
+
+ # Only proceed if non-whitespace characters match.
+ # And make sure we don't break the indentation.
+ if (fixed.replace(' ', '') == target.replace(' ', '') and
+ _get_indentation(fixed) == _get_indentation(target)):
+ self.source[result['line'] - 1] = fixed
+ else:
+ return []
+
+ def fix_e231(self, result):
+ """Add missing whitespace."""
+ line_index = result['line'] - 1
+ target = self.source[line_index]
+ offset = result['column']
+ fixed = target[:offset] + ' ' + target[offset:]
+ self.source[line_index] = fixed
+
+ def fix_e251(self, result):
+ """Remove whitespace around parameter '=' sign."""
+ line_index = result['line'] - 1
+ target = self.source[line_index]
+
+ # This is necessary since pep8 sometimes reports columns that goes
+ # past the end of the physical line. This happens in cases like,
+ # foo(bar\n=None)
+ c = min(result['column'] - 1,
+ len(target) - 1)
+
+ if target[c].strip():
+ fixed = target
+ else:
+ fixed = target[:c].rstrip() + target[c:].lstrip()
+
+ # There could be an escaped newline
+ #
+ # def foo(a=\
+ # 1)
+ if (fixed.endswith('=\\\n') or
+ fixed.endswith('=\\\r\n') or
+ fixed.endswith('=\\\r')):
+ self.source[line_index] = fixed.rstrip('\n\r \t\\')
+ self.source[line_index + 1] = self.source[line_index + 1].lstrip()
+ return [line_index + 1, line_index + 2] # Line indexed at 1
+
+ self.source[result['line'] - 1] = fixed
+
+ def fix_e262(self, result):
+ """Fix spacing after comment hash."""
+ target = self.source[result['line'] - 1]
+ offset = result['column']
+
+ code = target[:offset].rstrip(' \t#')
+ comment = target[offset:].lstrip(' \t#')
+
+ fixed = code + (' # ' + comment if comment.strip()
+ else self.newline)
+
+ self.source[result['line'] - 1] = fixed
+
+ def fix_e271(self, result):
+ """Fix extraneous whitespace around keywords."""
+ line_index = result['line'] - 1
+ target = self.source[line_index]
+ offset = result['column'] - 1
+
+ # When multiline strings are involved, pep8 reports the error as
+ # being at the start of the multiline string, which doesn't work
+ # for us.
+ if ('"""' in target or
+ "'''" in target or
+ target.rstrip().endswith('\\')):
+ return []
+
+ fixed = fix_whitespace(target,
+ offset=offset,
+ replacement=' ')
+
+ if fixed == target:
+ return []
+ else:
+ self.source[line_index] = fixed
+
+ def fix_e301(self, result):
+ """Add missing blank line."""
+ cr = self.newline
+ self.source[result['line'] - 1] = cr + self.source[result['line'] - 1]
+
+ def fix_e302(self, result):
+ """Add missing 2 blank lines."""
+ add_linenum = 2 - int(result['info'].split()[-1])
+ cr = self.newline * add_linenum
+ self.source[result['line'] - 1] = cr + self.source[result['line'] - 1]
+
+ def fix_e303(self, result):
+ """Remove extra blank lines."""
+ delete_linenum = int(result['info'].split('(')[1].split(')')[0]) - 2
+ delete_linenum = max(1, delete_linenum)
+
+ # We need to count because pep8 reports an offset line number if there
+ # are comments.
+ cnt = 0
+ line = result['line'] - 2
+ modified_lines = []
+ while cnt < delete_linenum:
+ if line < 0:
+ break
+ if not self.source[line].strip():
+ self.source[line] = ''
+ modified_lines.append(1 + line) # Line indexed at 1
+ cnt += 1
+ line -= 1
+
+ return modified_lines
+
+ def fix_e304(self, result):
+ """Remove blank line following function decorator."""
+ line = result['line'] - 2
+ if not self.source[line].strip():
+ self.source[line] = ''
+
+ def fix_e401(self, result):
+ """Put imports on separate lines."""
+ line_index = result['line'] - 1
+ target = self.source[line_index]
+ offset = result['column'] - 1
+
+ if not target.lstrip().startswith('import'):
+ return []
+
+ # pep8 (1.3.1) reports false positive if there is an import statement
+ # followed by a semicolon and some unrelated statement with commas in
+ # it.
+ if ';' in target:
+ return []
+
+ indentation = re.split(pattern=r'\bimport\b',
+ string=target, maxsplit=1)[0]
+ fixed = (target[:offset].rstrip('\t ,') + self.newline +
+ indentation + 'import ' + target[offset:].lstrip('\t ,'))
+ self.source[line_index] = fixed
+
+ def fix_e501(self, result):
+ """Try to make lines fit within --max-line-length characters."""
+ line_index = result['line'] - 1
+ target = self.source[line_index]
+
+ if target.lstrip().startswith('#'):
+ # Shorten comment if it is the last comment line.
+ try:
+ if self.source[line_index + 1].lstrip().startswith('#'):
+ return []
+ except IndexError:
+ pass
+
+ # Wrap commented lines.
+ fixed = shorten_comment(
+ line=target,
+ newline=self.newline,
+ max_line_length=self.options.max_line_length)
+ if fixed == self.source[line_index]:
+ return []
+ else:
+ self.source[line_index] = fixed
+ return
+
+ indent = _get_indentation(target)
+ source = target[len(indent):]
+ assert source.lstrip() == source
+ sio = StringIO(source)
+
+ # Check for multiline string.
+ try:
+ tokens = list(tokenize.generate_tokens(sio.readline))
+ except (tokenize.TokenError, IndentationError):
+ multi_line_candidate = break_multi_line(
+ target, newline=self.newline,
+ indent_word=self.indent_word)
+
+ if multi_line_candidate:
+ self.source[line_index] = multi_line_candidate
+ return
+ else:
+ return []
+
+ candidates = shorten_line(
+ tokens, source, indent,
+ self.indent_word, newline=self.newline,
+ aggressive=self.options.aggressive)
+
+ candidates = list(sorted(
+ set(candidates),
+ key=lambda x: line_shortening_rank(x,
+ self.newline,
+ self.indent_word)))
+
+ if self.options.verbose >= 4:
+ print(('-' * 79 + '\n').join([''] + candidates + ['']),
+ file=sys.stderr)
+
+ for _candidate in candidates:
+ if _candidate is None:
+ continue
+
+ if _candidate == target:
+ continue
+
+ if (get_longest_length(_candidate, self.newline) >=
+ get_longest_length(target, self.newline)):
+ continue
+
+ self.source[line_index] = _candidate
+ return
+
+ return []
+
+ def fix_e502(self, result):
+ """Remove extraneous escape of newline."""
+ line_index = result['line'] - 1
+ target = self.source[line_index]
+ self.source[line_index] = target.rstrip('\n\r \t\\') + self.newline
+
+ def fix_e701(self, result):
+ """Put colon-separated compound statement on separate lines."""
+ line_index = result['line'] - 1
+ target = self.source[line_index]
+ c = result['column']
+
+ fixed_source = (target[:c] + self.newline +
+ _get_indentation(target) + self.indent_word +
+ target[c:].lstrip('\n\r \t\\'))
+ self.source[result['line'] - 1] = fixed_source
+
+ def fix_e702(self, result, logical):
+ """Put semicolon-separated compound statement on separate lines."""
+ logical_lines = logical[2]
+
+ line_index = result['line'] - 1
+ target = self.source[line_index]
+
+ if target.rstrip().endswith('\\'):
+ # Normalize '1; \\\n2' into '1; 2'.
+ self.source[line_index] = target.rstrip('\n \r\t\\')
+ self.source[line_index + 1] = self.source[line_index + 1].lstrip()
+ return [line_index + 1, line_index + 2]
+
+ if target.rstrip().endswith(';'):
+ self.source[line_index] = target.rstrip('\n \r\t;') + self.newline
+ return
+
+ offset = result['column'] - 1
+ first = target[:offset].rstrip(';').rstrip()
+ second = (_get_indentation(logical_lines[0]) +
+ target[offset:].lstrip(';').lstrip())
+
+ self.source[line_index] = first + self.newline + second
+
+ def fix_e711(self, result):
+ """Fix comparison with None."""
+ line_index = result['line'] - 1
+ target = self.source[line_index]
+ offset = result['column'] - 1
+
+ right_offset = offset + 2
+ if right_offset >= len(target):
+ return []
+
+ left = target[:offset].rstrip()
+ center = target[offset:right_offset]
+ right = target[right_offset:].lstrip()
+
+ if not right.startswith('None'):
+ return []
+
+ if center.strip() == '==':
+ new_center = 'is'
+ elif center.strip() == '!=':
+ new_center = 'is not'
+ else:
+ return []
+
+ self.source[line_index] = ' '.join([left, new_center, right])
+
+ def fix_e712(self, result):
+ """Fix comparison with boolean."""
+ line_index = result['line'] - 1
+ target = self.source[line_index]
+ offset = result['column'] - 1
+
+ right_offset = offset + 2
+ if right_offset >= len(target):
+ return []
+
+ left = target[:offset].rstrip()
+ center = target[offset:right_offset]
+ right = target[right_offset:].lstrip()
+
+ # Handle simple cases only.
+ new_right = None
+ if center.strip() == '==':
+ if re.match(r'\bTrue\b', right):
+ new_right = re.sub(r'\bTrue\b *', '', right, count=1)
+ elif center.strip() == '!=':
+ if re.match(r'\bFalse\b', right):
+ new_right = re.sub(r'\bFalse\b *', '', right, count=1)
+
+ if new_right is None:
+ return []
+
+ if new_right[0].isalnum():
+ new_right = ' ' + new_right
+
+ self.source[line_index] = left + new_right
+
+ def fix_e721(self, _):
+ """Switch to use isinstance()."""
+ return self.refactor('idioms')
+
+ def fix_w291(self, result):
+ """Remove trailing whitespace."""
+ fixed_line = self.source[result['line'] - 1].rstrip()
+ self.source[result['line'] - 1] = '%s%s' % (fixed_line, self.newline)
+
+ def fix_w293(self, result):
+ """Remove trailing whitespace on blank line."""
+ assert not self.source[result['line'] - 1].strip()
+ self.source[result['line'] - 1] = self.newline
+
+ def fix_w391(self, _):
+ """Remove trailing blank lines."""
+ blank_count = 0
+ for line in reversed(self.source):
+ line = line.rstrip()
+ if line:
+ break
+ else:
+ blank_count += 1
+
+ original_length = len(self.source)
+ self.source = self.source[:original_length - blank_count]
+ return range(1, 1 + original_length)
+
+ def refactor(self, fixer_name, ignore=None):
+ """Return refactored code using lib2to3.
+
+ Skip if ignore string is produced in the refactored code.
+
+ """
+ from lib2to3 import pgen2
+ try:
+ new_text = refactor_with_2to3(''.join(self.source),
+ fixer_name=fixer_name)
+ except (pgen2.parse.ParseError,
+ UnicodeDecodeError, UnicodeEncodeError):
+ return []
+
+ original = unicode().join(self.source).strip()
+ if original == new_text.strip():
+ return []
+ else:
+ if ignore:
+ if ignore in new_text and ignore not in ''.join(self.source):
+ return []
+ original_length = len(self.source)
+ self.source = [new_text]
+ return range(1, 1 + original_length)
+
+ def fix_w601(self, _):
+ """Replace the {}.has_key() form with 'in'."""
+ return self.refactor('has_key')
+
+ def fix_w602(self, _):
+ """Fix deprecated form of raising exception."""
+ return self.refactor('raise',
+ ignore='with_traceback')
+
+ def fix_w603(self, _):
+ """Replace <> with !=."""
+ return self.refactor('ne')
+
+ def fix_w604(self, _):
+ """Replace backticks with repr()."""
+ return self.refactor('repr')
+
+
+def find_newline(source):
+ """Return type of newline used in source."""
+ cr, lf, crlf = 0, 0, 0
+ for s in source:
+ if s.endswith(CRLF):
+ crlf += 1
+ elif s.endswith(CR):
+ cr += 1
+ elif s.endswith(LF):
+ lf += 1
+ _max = max(cr, crlf, lf)
+ if _max == lf:
+ return LF
+ elif _max == crlf:
+ return CRLF
+ elif _max == cr:
+ return CR
+ else:
+ return LF
+
+
+def _get_indentword(source):
+ """Return indentation type."""
+ sio = StringIO(source)
+ indent_word = ' ' # Default in case source has no indentation
+ try:
+ for t in tokenize.generate_tokens(sio.readline):
+ if t[0] == token.INDENT:
+ indent_word = t[1]
+ break
+ except (tokenize.TokenError, IndentationError):
+ pass
+ return indent_word
+
+
+def _get_indentation(line):
+ """Return leading whitespace."""
+ if line.strip():
+ non_whitespace_index = len(line) - len(line.lstrip())
+ return line[:non_whitespace_index]
+ else:
+ return ''
+
+
+def _get_difftext(old, new, filename):
+ diff = difflib.unified_diff(
+ old, new,
+ 'original/' + filename,
+ 'fixed/' + filename)
+ return ''.join(diff)
+
+
+def _priority_key(pep8_result):
+ """Key for sorting PEP8 results.
+
+ Global fixes should be done first. This is important for things
+ like indentation.
+
+ """
+ priority = ['e101', 'e111', 'w191', # Global fixes
+ 'e701', # Fix multiline colon-based before semicolon based
+ 'e702', # Break multiline statements early
+ 'e225', 'e231', # things that make lines longer
+ 'e201', # Remove extraneous whitespace before breaking lines
+ 'e501', # before we break lines
+ ]
+ key = pep8_result['id'].lower()
+ if key in priority:
+ return priority.index(key)
+ else:
+ # Lowest priority
+ return len(priority)
+
+
+def shorten_line(tokens, source, indentation, indent_word, newline,
+ aggressive=False):
+ """Separate line at OPERATOR.
+
+ Multiple candidates will be yielded.
+
+ """
+ for candidate in _shorten_line(tokens=tokens,
+ source=source,
+ indentation=indentation,
+ indent_word=indent_word,
+ newline=newline,
+ aggressive=aggressive):
+ yield candidate
+
+ if aggressive:
+ for key_token_strings in SHORTEN_OPERATOR_GROUPS:
+ shortened = _shorten_line_at_tokens(
+ tokens=tokens,
+ source=source,
+ indentation=indentation,
+ indent_word=indent_word,
+ newline=newline,
+ key_token_strings=key_token_strings,
+ aggressive=aggressive)
+
+ if shortened is not None and shortened != source:
+ yield shortened
+
+
+def _shorten_line(tokens, source, indentation, indent_word, newline,
+ aggressive=False):
+ """Separate line at OPERATOR.
+
+ Multiple candidates will be yielded.
+
+ """
+ for tkn in tokens:
+ # Don't break on '=' after keyword as this violates PEP 8.
+ if token.OP == tkn[0] and tkn[1] != '=':
+ assert tkn[0] != token.INDENT
+
+ offset = tkn[2][1] + 1
+ first = source[:offset]
+
+ second_indent = indentation
+ if first.rstrip().endswith('('):
+ second_indent += indent_word
+ elif '(' in first:
+ second_indent += ' ' * (1 + first.find('('))
+ else:
+ second_indent += indent_word
+
+ second = (second_indent + source[offset:].lstrip())
+ if not second.strip():
+ continue
+
+ # Do not begin a line with a comma
+ if second.lstrip().startswith(','):
+ continue
+ # Do end a line with a dot
+ if first.rstrip().endswith('.'):
+ continue
+ if tkn[1] in '+-*/':
+ fixed = first + ' \\' + newline + second
+ else:
+ fixed = first + newline + second
+
+ # Only fix if syntax is okay.
+ if check_syntax(normalize_multiline(fixed)
+ if aggressive else fixed):
+ yield indentation + fixed
+
+
+def _shorten_line_at_tokens(tokens, source, indentation, indent_word, newline,
+ key_token_strings, aggressive):
+ """Separate line by breaking at tokens in key_token_strings.
+
+ This will always break the line at the first parenthesis.
+
+ """
+ offsets = []
+ first_paren = True
+ for tkn in tokens:
+ token_type = tkn[0]
+ token_string = tkn[1]
+ next_offset = tkn[2][1] + 1
+
+ assert token_type != token.INDENT
+
+ if token_string in key_token_strings or (first_paren and
+ token_string == '('):
+ # Don't split right before newline.
+ if next_offset < len(source) - 1:
+ offsets.append(next_offset)
+
+ if token_string == '(':
+ first_paren = False
+
+ current_indent = None
+ fixed = None
+ for line in split_at_offsets(source, offsets):
+ if fixed:
+ fixed += newline + current_indent + line
+
+ for symbol in '([{':
+ if line.endswith(symbol):
+ current_indent += indent_word
+ else:
+ # First line.
+ fixed = line
+ assert not current_indent
+ current_indent = indent_word
+
+ assert fixed is not None
+
+ if check_syntax(normalize_multiline(fixed)
+ if aggressive > 1 else fixed):
+ return indentation + fixed
+ else:
+ return None
+
+
+def normalize_multiline(line):
+ """Remove multiline-related code that will cause syntax error.
+
+ This is for purposes of checking syntax.
+
+ """
+ for quote in '\'"':
+ dict_pattern = r'^{q}[^{q}]*{q}\s*:\s*'.format(q=quote)
+ if re.match(dict_pattern, line):
+ if not line.strip().endswith('}'):
+ line += '}'
+ return '{' + line
+
+ if line.startswith('def ') and line.rstrip().endswith(':'):
+ # Do not allow ':' to be alone. That is invalid.
+ if ':' not in line.split():
+ return line[len('def'):].strip().rstrip(':')
+
+ return line
+
+
+def fix_whitespace(line, offset, replacement):
+ """Replace whitespace at offset and return fixed line."""
+ # Replace escaped newlines too
+ left = line[:offset].rstrip('\n\r \t\\')
+ right = line[offset:].lstrip('\n\r \t\\')
+ if right.startswith('#'):
+ return line
+ else:
+ return left + replacement + right
+
+
+def _execute_pep8(pep8_options, source):
+ """Execute pep8 via python method calls."""
+ class QuietReport(pep8.BaseReport):
+
+ """Version of checker that does not print."""
+
+ def __init__(self, options):
+ super(QuietReport, self).__init__(options)
+ self.__full_error_results = []
+
+ def error(self, line_number, offset, text, _):
+ """Collect errors."""
+ code = super(QuietReport, self).error(line_number, offset, text, _)
+ if code:
+ self.__full_error_results.append(
+ {'id': code,
+ 'line': line_number,
+ 'column': offset + 1,
+ 'info': text})
+
+ def full_error_results(self):
+ """Return error results in detail.
+
+ Results are in the form of a list of dictionaries. Each dictionary
+ contains 'id', 'line', 'column', and 'info'.
+
+ """
+ return self.__full_error_results
+
+ checker = pep8.Checker('', lines=source,
+ reporter=QuietReport, **pep8_options)
+ checker.check_all()
+ return checker.report.full_error_results()
+
+
+class Reindenter(object):
+
+ """Reindents badly-indented code to uniformly use four-space indentation.
+
+ Released to the public domain, by Tim Peters, 03 October 2000.
+
+ """
+
+ def __init__(self, input_text, newline):
+ self.newline = newline
+
+ # Raw file lines.
+ self.raw = input_text
+ self.after = None
+
+ self.string_content_line_numbers = multiline_string_lines(
+ ''.join(self.raw))
+
+ # File lines, rstripped & tab-expanded. Dummy at start is so
+ # that we can use tokenize's 1-based line numbering easily.
+ # Note that a line is all-blank iff it is a newline.
+ self.lines = []
+ for line_number, line in enumerate(self.raw, start=1):
+ # Do not modify if inside a multi-line string.
+ if line_number in self.string_content_line_numbers:
+ self.lines.append(line)
+ else:
+ # Only expand leading tabs.
+ self.lines.append(_get_indentation(line).expandtabs() +
+ line.strip() + newline)
+
+ self.lines.insert(0, None)
+ self.index = 1 # index into self.lines of next line
+
+ def run(self):
+ """Fix indentation and return modified line numbers.
+
+ Line numbers are indexed at 1.
+
+ """
+ try:
+ stats = reindent_stats(tokenize.generate_tokens(self.getline))
+ except (tokenize.TokenError, IndentationError):
+ return set()
+ # Remove trailing empty lines.
+ lines = self.lines
+ while lines and lines[-1] == self.newline:
+ lines.pop()
+ # Sentinel.
+ stats.append((len(lines), 0))
+ # Map count of leading spaces to # we want.
+ have2want = {}
+ # Program after transformation.
+ after = self.after = []
+ # Copy over initial empty lines -- there's nothing to do until
+ # we see a line with *something* on it.
+ i = stats[0][0]
+ after.extend(lines[1:i])
+ for i in range(len(stats) - 1):
+ thisstmt, thislevel = stats[i]
+ nextstmt = stats[i + 1][0]
+ have = _leading_space_count(lines[thisstmt])
+ want = thislevel * 4
+ if want < 0:
+ # A comment line.
+ if have:
+ # An indented comment line. If we saw the same
+ # indentation before, reuse what it most recently
+ # mapped to.
+ want = have2want.get(have, - 1)
+ if want < 0:
+ # Then it probably belongs to the next real stmt.
+ for j in range(i + 1, len(stats) - 1):
+ jline, jlevel = stats[j]
+ if jlevel >= 0:
+ if have == _leading_space_count(lines[jline]):
+ want = jlevel * 4
+ break
+ if want < 0: # Maybe it's a hanging
+ # comment like this one,
+ # in which case we should shift it like its base
+ # line got shifted.
+ for j in range(i - 1, -1, -1):
+ jline, jlevel = stats[j]
+ if jlevel >= 0:
+ want = (have + _leading_space_count(
+ after[jline - 1]) -
+ _leading_space_count(lines[jline]))
+ break
+ if want < 0:
+ # Still no luck -- leave it alone.
+ want = have
+ else:
+ want = 0
+ assert want >= 0
+ have2want[have] = want
+ diff = want - have
+ if diff == 0 or have == 0:
+ after.extend(lines[thisstmt:nextstmt])
+ else:
+ for line_number, line in enumerate(lines[thisstmt:nextstmt],
+ start=thisstmt):
+ if line_number in self.string_content_line_numbers:
+ after.append(line)
+ elif diff > 0:
+ if line == self.newline:
+ after.append(line)
+ else:
+ after.append(' ' * diff + line)
+ else:
+ remove = min(_leading_space_count(line), -diff)
+ after.append(line[remove:])
+
+ if self.raw == self.after:
+ return set()
+ else:
+ return (set(range(1, 1 + len(self.raw))) -
+ self.string_content_line_numbers)
+
+ def fixed_lines(self):
+ return self.after
+
+ def getline(self):
+ """Line-getter for tokenize."""
+ if self.index >= len(self.lines):
+ line = ''
+ else:
+ line = self.lines[self.index]
+ self.index += 1
+ return line
+
+
+def reindent_stats(tokens):
+ """Return list of (lineno, indentlevel) pairs.
+
+ One for each stmt and comment line. indentlevel is -1 for comment lines, as
+ a signal that tokenize doesn't know what to do about them; indeed, they're
+ our headache!
+
+ """
+ find_stmt = 1 # next token begins a fresh stmt?
+ level = 0 # current indent level
+ stats = []
+
+ for t in tokens:
+ token_type = t[0]
+ sline = t[2][0]
+ line = t[4]
+
+ if token_type == tokenize.NEWLINE:
+ # A program statement, or ENDMARKER, will eventually follow,
+ # after some (possibly empty) run of tokens of the form
+ # (NL | COMMENT)* (INDENT | DEDENT+)?
+ find_stmt = 1
+
+ elif token_type == tokenize.INDENT:
+ find_stmt = 1
+ level += 1
+
+ elif token_type == tokenize.DEDENT:
+ find_stmt = 1
+ level -= 1
+
+ elif token_type == tokenize.COMMENT:
+ if find_stmt:
+ stats.append((sline, -1))
+ # but we're still looking for a new stmt, so leave
+ # find_stmt alone
+
+ elif token_type == tokenize.NL:
+ pass
+
+ elif find_stmt:
+ # This is the first "real token" following a NEWLINE, so it
+ # must be the first token of the next program statement, or an
+ # ENDMARKER.
+ find_stmt = 0
+ if line: # not endmarker
+ stats.append((sline, level))
+
+ return stats
+
+
+class Wrapper(object):
+
+ """Class for functions relating to continuation lines and line folding.
+
+ Each instance operates on a single logical line.
+
+ """
+
+ SKIP_TOKENS = frozenset([
+ tokenize.COMMENT, tokenize.NL, tokenize.INDENT,
+ tokenize.DEDENT, tokenize.NEWLINE, tokenize.ENDMARKER
+ ])
+
+ def __init__(self, physical_lines):
+ self.lines = physical_lines
+ self.tokens = []
+ self.rel_indent = None
+ sio = StringIO(''.join(physical_lines))
+ for t in tokenize.generate_tokens(sio.readline):
+ if not len(self.tokens) and t[0] in self.SKIP_TOKENS:
+ continue
+ if t[0] != tokenize.ENDMARKER:
+ self.tokens.append(t)
+
+ self.logical_line = self.build_tokens_logical(self.tokens)
+
+ def build_tokens_logical(self, tokens):
+ """Build a logical line from a list of tokens.
+
+ Return the logical line and a list of (offset, token) tuples. Does
+ not mute strings like the version in pep8.py.
+
+ """
+ # from pep8.py with minor modifications
+ logical = []
+ previous = None
+ for t in tokens:
+ token_type, text = t[0:2]
+ if token_type in self.SKIP_TOKENS:
+ continue
+ if previous:
+ end_line, end = previous[3]
+ start_line, start = t[2]
+ if end_line != start_line: # different row
+ prev_text = self.lines[end_line - 1][end - 1]
+ if prev_text == ',' or (prev_text not in '{[('
+ and text not in '}])'):
+ logical.append(' ')
+ elif end != start: # different column
+ fill = self.lines[end_line - 1][end:start]
+ logical.append(fill)
+ logical.append(text)
+ previous = t
+ logical_line = ''.join(logical)
+ assert logical_line.lstrip() == logical_line
+ assert logical_line.rstrip() == logical_line
+ return logical_line
+
+ def pep8_expected(self):
+ """Replicate logic in pep8.py, to know what level to indent things to.
+
+ Return a list of lists; each list represents valid indent levels for
+ the line in question, relative from the initial indent. However, the
+ first entry is the indent level which was expected.
+
+ """
+ # What follows is an adjusted version of
+ # pep8.py:continuation_line_indentation. All of the comments have been
+ # stripped and the 'yield' statements replaced with 'pass'.
+ tokens = self.tokens
+ if not tokens:
+ return
+
+ first_row = tokens[0][2][0]
+ nrows = 1 + tokens[-1][2][0] - first_row
+
+ # here are the return values
+ valid_indents = [list()] * nrows
+ indent_level = tokens[0][2][1]
+ valid_indents[0].append(indent_level)
+
+ if nrows == 1:
+ # bug, really.
+ return valid_indents
+
+ indent_next = self.logical_line.endswith(':')
+
+ row = depth = 0
+ parens = [0] * nrows
+ self.rel_indent = rel_indent = [0] * nrows
+ indent = [indent_level]
+ indent_chances = {}
+ last_indent = (0, 0)
+ last_token_multiline = None
+
+ for token_type, text, start, end, _ in self.tokens:
+ newline = row < start[0] - first_row
+ if newline:
+ row = start[0] - first_row
+ newline = (not last_token_multiline and
+ token_type not in (tokenize.NL, tokenize.NEWLINE))
+
+ if newline:
+ # This is where the differences start. Instead of looking at
+ # the line and determining whether the observed indent matches
+ # our expectations, we decide which type of indentation is in
+ # use at the given indent level, and return the offset. This
+ # algorithm is susceptible to "carried errors", but should
+ # through repeated runs eventually solve indentation for
+ # multi-line expressions less than PEP8_PASSES_MAX lines long.
+
+ if depth:
+ for open_row in range(row - 1, -1, -1):
+ if parens[open_row]:
+ break
+ else:
+ open_row = 0
+
+ # That's all we get to work with. This code attempts to
+ # "reverse" the below logic, and place into the valid indents
+ # list
+ vi = []
+ add_second_chances = False
+ if token_type == tokenize.OP and text in ']})':
+ # this line starts with a closing bracket, so it needs to
+ # be closed at the same indent as the opening one.
+ if indent[depth]:
+ # hanging indent
+ vi.append(indent[depth])
+ else:
+ # visual indent
+ vi.append(indent_level + rel_indent[open_row])
+ elif depth and indent[depth]:
+ # visual indent was previously confirmed.
+ vi.append(indent[depth])
+ add_second_chances = True
+ elif depth and True in indent_chances.values():
+ # visual indent happened before, so stick to
+ # visual indent this time.
+ if depth > 1 and indent[depth - 1]:
+ vi.append(indent[depth - 1])
+ else:
+ # stupid fallback
+ vi.append(indent_level + 4)
+ add_second_chances = True
+ elif not depth:
+ vi.append(indent_level + 4)
+ else:
+ # must be in hanging indent
+ hang = rel_indent[open_row] + 4
+ vi.append(indent_level + hang)
+
+ # about the best we can do without look-ahead
+ if (indent_next and vi[0] == indent_level + 4 and
+ nrows == row + 1):
+ vi[0] += 4
+
+ if add_second_chances:
+ # visual indenters like to line things up.
+ min_indent = vi[0]
+ for col, what in indent_chances.items():
+ if col > min_indent and (
+ what is True or
+ (what == str and token_type == tokenize.STRING) or
+ (what == text and token_type == tokenize.OP)
+ ):
+ vi.append(col)
+ vi = sorted(vi)
+
+ valid_indents[row] = vi
+
+ # Returning to original continuation_line_indentation() from
+ # pep8.
+ visual_indent = indent_chances.get(start[1])
+ last_indent = start
+ rel_indent[row] = start[1] - indent_level
+ hang = rel_indent[row] - rel_indent[open_row]
+
+ if token_type == tokenize.OP and text in ']})':
+ pass
+ elif visual_indent is True:
+ if not indent[depth]:
+ indent[depth] = start[1]
+
+ # line altered: comments shouldn't define a visual indent
+ if parens[row] and not indent[depth] and token_type not in (
+ tokenize.NL, tokenize.COMMENT
+ ):
+ indent[depth] = start[1]
+ indent_chances[start[1]] = True
+ elif token_type == tokenize.STRING or text in (
+ 'u', 'ur', 'b', 'br'
+ ):
+ indent_chances[start[1]] = str
+
+ if token_type == tokenize.OP:
+ if text in '([{':
+ depth += 1
+ indent.append(0)
+ parens[row] += 1
+ elif text in ')]}' and depth > 0:
+ prev_indent = indent.pop() or last_indent[1]
+ for d in range(depth):
+ if indent[d] > prev_indent:
+ indent[d] = 0
+ for ind in list(indent_chances):
+ if ind >= prev_indent:
+ del indent_chances[ind]
+ depth -= 1
+ if depth and indent[depth]: # modified
+ indent_chances[indent[depth]] = True
+ for idx in range(row, -1, -1):
+ if parens[idx]:
+ parens[idx] -= 1
+ break
+ assert len(indent) == depth + 1
+ if start[1] not in indent_chances:
+ indent_chances[start[1]] = text
+
+ last_token_multiline = (start[0] != end[0])
+
+ return valid_indents
+
+
+def _leading_space_count(line):
+ """Return number of leading spaces in line."""
+ i = 0
+ while i < len(line) and line[i] == ' ':
+ i += 1
+ return i
+
+
+def refactor_with_2to3(source_text, fixer_name):
+ """Use lib2to3 to refactor the source.
+
+ Return the refactored source code.
+
+ """
+ from lib2to3 import refactor
+ fixers = ['lib2to3.fixes.fix_' + fixer_name]
+ tool = refactor.RefactoringTool(
+ fixer_names=fixers,
+ explicit=fixers)
+ return unicode(tool.refactor_string(source_text, name=''))
+
+
+def break_multi_line(source_text, newline, indent_word):
+ """Break first line of multi-line code.
+
+ Return None if a break is not possible.
+
+ """
+ indentation = _get_indentation(source_text)
+
+ # Handle special case only.
+ for symbol in '([{':
+ # Only valid if symbol is not on a line by itself.
+ if (symbol in source_text
+ and source_text.rstrip().endswith(',')
+ and not source_text.lstrip().startswith(symbol)):
+
+ index = 1 + source_text.find(symbol)
+
+ if index <= len(indent_word) + len(indentation):
+ continue
+
+ if is_probably_inside_string_or_comment(source_text, index - 1):
+ continue
+
+ return (
+ source_text[:index].rstrip() + newline +
+ indentation + indent_word +
+ source_text[index:].lstrip())
+
+ return None
+
+
+def is_probably_inside_string_or_comment(line, index):
+ """Return True if index may be inside a string or comment."""
+ # Make sure we are not in a string.
+ for quote in ['"', "'"]:
+ if quote in line:
+ if line.find(quote) <= index:
+ return True
+
+ # Make sure we are not in a comment.
+ if '#' in line:
+ if line.find('#') <= index:
+ return True
+
+ return False
+
+
+def check_syntax(code):
+ """Return True if syntax is okay."""
+ try:
+ return compile(code, '<string>', 'exec')
+ except (SyntaxError, TypeError, UnicodeDecodeError):
+ return False
+
+
+def filter_results(source, results, aggressive=False):
+ """Filter out spurious reports from pep8.
+
+ If aggressive is True, we allow possibly unsafe fixes (E711, E712).
+
+ """
+ non_docstring_string_line_numbers = multiline_string_lines(
+ source, include_docstrings=False)
+ all_string_line_numbers = multiline_string_lines(
+ source, include_docstrings=True)
+
+ split_source = [None] + source.splitlines()
+
+ for r in results:
+ issue_id = r['id'].lower()
+
+ if r['line'] in non_docstring_string_line_numbers:
+ if issue_id.startswith('e1'):
+ continue
+ elif issue_id in ['e501', 'w191']:
+ continue
+
+ if r['line'] in all_string_line_numbers:
+ if issue_id in ['e501']:
+ continue
+
+ # Filter out incorrect E101 reports when there are no tabs.
+ # pep8 will complain about this even if the tab indentation found
+ # elsewhere is in a multi-line string.
+ if issue_id == 'e101' and '\t' not in split_source[r['line']]:
+ continue
+
+ if issue_id in ['e711', 'e712'] and not aggressive:
+ continue
+
+ yield r
+
+
+def multiline_string_lines(source, include_docstrings=False):
+ """Return line numbers that are within multiline strings.
+
+ The line numbers are indexed at 1.
+
+ Docstrings are ignored.
+
+ """
+ sio = StringIO(source)
+ line_numbers = set()
+ previous_token_type = ''
+ try:
+ for t in tokenize.generate_tokens(sio.readline):
+ token_type = t[0]
+ start_row = t[2][0]
+ end_row = t[3][0]
+ start_row = t[2][0]
+ end_row = t[3][0]
+
+ if (token_type == tokenize.STRING and start_row != end_row):
+ if (include_docstrings or
+ previous_token_type != tokenize.INDENT):
+ # We increment by one since we want the contents of the
+ # string.
+ line_numbers |= set(range(1 + start_row, 1 + end_row))
+
+ previous_token_type = token_type
+ except (IndentationError, tokenize.TokenError):
+ pass
+
+ return line_numbers
+
+
+def shorten_comment(line, newline, max_line_length):
+ """Return trimmed or split long comment line."""
+ assert len(line) > max_line_length
+ line = line.rstrip()
+
+ # PEP 8 recommends 72 characters for comment text.
+ indentation = _get_indentation(line) + '# '
+ max_line_length = min(max_line_length,
+ len(indentation) + 72)
+
+ MIN_CHARACTER_REPEAT = 5
+ if (len(line) - len(line.rstrip(line[-1])) >= MIN_CHARACTER_REPEAT and
+ not line[-1].isalnum()):
+ # Trim comments that end with things like ---------
+ return line[:max_line_length] + newline
+ elif re.match(r'\s*#+\s*\w+', line):
+ import textwrap
+ split_lines = textwrap.wrap(line.lstrip(' \t#'),
+ initial_indent=indentation,
+ subsequent_indent=indentation,
+ width=max_line_length,
+ break_long_words=False)
+ return newline.join(split_lines) + newline
+ else:
+ return line + newline
+
+
+def format_block_comments(source):
+ """Format block comments."""
+ if '#' not in source:
+ # Optimization.
+ return source
+
+ string_line_numbers = multiline_string_lines(source,
+ include_docstrings=True)
+ fixed_lines = []
+ sio = StringIO(source)
+ for (line_number, line) in enumerate(sio.readlines(), start=1):
+ if (re.match(r'\s*#+\w+', line) and
+ line_number not in string_line_numbers):
+ fixed_lines.append(_get_indentation(line) +
+ '# ' +
+ line.lstrip().lstrip('#'))
+ else:
+ fixed_lines.append(line)
+
+ return ''.join(fixed_lines)
+
+
+def normalize_line_endings(lines):
+ """Return fixed line endings.
+
+ All lines will be modified to use the most common line ending.
+
+ """
+ newline = find_newline(lines)
+ return [line.rstrip('\n\r') + newline for line in lines]
+
+
+def mutual_startswith(a, b):
+ return b.startswith(a) or a.startswith(b)
+
+
+def code_match(code, select, ignore):
+ if ignore:
+ for ignored_code in [c.strip() for c in ignore]:
+ if mutual_startswith(code.lower(), ignored_code.lower()):
+ return False
+
+ if select:
+ for selected_code in [c.strip() for c in select]:
+ if mutual_startswith(code.lower(), selected_code.lower()):
+ return True
+ return False
+
+ return True
+
+
+def fix_string(source, options=None):
+ """Return fixed source code."""
+ if not options:
+ options = parse_args([''])[0]
+
+ sio = StringIO(source)
+ return fix_lines(sio.readlines(), options=options)
+
+
+def fix_lines(source_lines, options, filename=''):
+ """Return fixed source code."""
+ tmp_source = unicode().join(normalize_line_endings(source_lines))
+
+ # Keep a history to break out of cycles.
+ previous_hashes = set([hash(tmp_source)])
+
+ fixed_source = tmp_source
+ if code_match('e26', select=options.select, ignore=options.ignore):
+ fixed_source = format_block_comments(fixed_source)
+
+ for _ in range(-1, options.pep8_passes):
+ tmp_source = copy.copy(fixed_source)
+
+ fix = FixPEP8(filename, options, contents=tmp_source)
+ fixed_source = fix.fix()
+
+ if hash(fixed_source) in previous_hashes:
+ break
+ else:
+ previous_hashes.add(hash(fixed_source))
+
+ return fixed_source
+
+
+def fix_file(filename, options=None, output=None):
+ if not options:
+ options = parse_args([filename])[0]
+
+ original_source = read_from_filename(filename, readlines=True)
+
+ fixed_source = original_source
+
+ if options.in_place:
+ encoding = detect_encoding(filename)
+
+ fixed_source = fix_lines(fixed_source, options, filename=filename)
+
+ if options.diff:
+ new = StringIO(fixed_source)
+ new = new.readlines()
+ diff = _get_difftext(original_source, new, filename)
+ if output:
+ output.write(diff)
+ else:
+ return output
+ elif options.in_place:
+ fp = open_with_encoding(filename, encoding=encoding,
+ mode='w')
+ fp.write(fixed_source)
+ fp.close()
+ else:
+ if output:
+ output.write(fixed_source)
+ else:
+ return fixed_source
+
+
+def parse_args(args):
+ """Parse command-line options."""
+ parser = OptionParser(usage='Usage: autopep8 [options] '
+ '[filename [filename ...]]'
+ '\nUse filename \'-\' for stdin.',
+ version='autopep8: %s' % __version__,
+ description=__doc__,
+ prog='autopep8')
+ parser.add_option('-v', '--verbose', action='count', dest='verbose',
+ default=0,
+ help='print verbose messages; '
+ 'multiple -v result in more verbose messages')
+ parser.add_option('-d', '--diff', action='store_true', dest='diff',
+ help='print the diff for the fixed source')
+ parser.add_option('-i', '--in-place', action='store_true',
+ help='make changes to files in place')
+ parser.add_option('-r', '--recursive', action='store_true',
+ help='run recursively; must be used with --in-place or '
+ '--diff')
+ parser.add_option('-j', '--jobs', type=int, metavar='n', default=1,
+ help='number of parallel jobs; '
+ 'match CPU count if value is less than 1')
+ parser.add_option('-p', '--pep8-passes', metavar='n',
+ default=100, type=int,
+ help='maximum number of additional pep8 passes'
+ ' (default: %default)')
+ parser.add_option('-a', '--aggressive', action='count', default=0,
+ help='enable possibly unsafe changes (E711, E712); '
+ 'multiple -a result in more aggressive changes')
+ parser.add_option('--exclude', metavar='globs',
+ help='exclude files/directories that match these '
+ 'comma-separated globs')
+ parser.add_option('--list-fixes', action='store_true',
+ help='list codes for fixes; '
+ 'used by --ignore and --select')
+ parser.add_option('--ignore', metavar='errors', default='',
+ help='do not fix these errors/warnings '
+ '(default {0})'.format(pep8.DEFAULT_IGNORE))
+ parser.add_option('--select', metavar='errors', default='',
+ help='fix only these errors/warnings (e.g. E4,W)')
+ parser.add_option('--max-line-length', metavar='n', default=79, type=int,
+ help='set maximum allowed line length '
+ '(default: %default)')
+ options, args = parser.parse_args(args)
+
+ if not len(args) and not options.list_fixes:
+ parser.error('incorrect number of arguments')
+
+ if '-' in args and len(args) > 1:
+ parser.error('cannot mix stdin and regular files')
+
+ if len(args) > 1 and not (options.in_place or options.diff):
+ parser.error('autopep8 only takes one filename as argument '
+ 'unless the "--in-place" or "--diff" options are '
+ 'used')
+
+ if options.recursive and not (options.in_place or options.diff):
+ parser.error('--recursive must be used with --in-place or --diff')
+
+ if options.exclude and not options.recursive:
+ parser.error('--exclude is only relevant when used with --recursive')
+
+ if options.in_place and options.diff:
+ parser.error('--in-place and --diff are mutually exclusive')
+
+ if options.max_line_length <= 0:
+ parser.error('--max-line-length must be greater than 0')
+
+ if args == ['-'] and (options.in_place or options.recursive):
+ parser.error('--in-place or --recursive cannot be used with '
+ 'standard input')
+
+ if options.select:
+ options.select = options.select.split(',')
+
+ if options.ignore:
+ options.ignore = options.ignore.split(',')
+ elif not options.select and pep8.DEFAULT_IGNORE:
+ options.ignore = pep8.DEFAULT_IGNORE.split(',')
+
+ if options.exclude:
+ options.exclude = options.exclude.split(',')
+ else:
+ options.exclude = []
+
+ if options.jobs < 1:
+ # Do not import multiprocessing globally in case it is not supported
+ # on the platform.
+ import multiprocessing
+ options.jobs = multiprocessing.cpu_count()
+
+ if options.jobs > 1 and not options.in_place:
+ parser.error('parallel jobs requires --in-place')
+
+ return options, args
+
+
+def supported_fixes():
+ """Yield pep8 error codes that autopep8 fixes.
+
+ Each item we yield is a tuple of the code followed by its description.
+
+ """
+ instance = FixPEP8(filename=None, options=None, contents='')
+ for attribute in dir(instance):
+ code = re.match('fix_([ew][0-9][0-9][0-9])', attribute)
+ if code:
+ yield (code.group(1).upper(),
+ re.sub(r'\s+', ' ',
+ getattr(instance, attribute).__doc__))
+
+
+def line_shortening_rank(candidate, newline, indent_word):
+ """Return rank of candidate.
+
+ This is for sorting candidates.
+
+ """
+ rank = 0
+ if candidate:
+ lines = candidate.split(newline)
+
+ offset = 0
+ if lines[0].rstrip()[-1] not in '([{':
+ for symbol in '([{':
+ offset = max(offset, 1 + lines[0].find(symbol))
+
+ max_length = max([offset + len(x.strip()) for x in lines])
+ rank += max_length
+ rank += len(lines)
+
+ bad_staring_symbol = {
+ '(': ')',
+ '[': ']',
+ '{': '}'}.get(lines[0][-1], None)
+
+ if len(lines) > 1:
+ if (bad_staring_symbol and
+ lines[1].lstrip().startswith(bad_staring_symbol)):
+ rank += 20
+ else:
+ rank -= 10
+
+ if lines[0].endswith('(['):
+ rank += 10
+
+ for current_line in lines:
+ for bad_start in ['.', '%', '+', '-', '/']:
+ if current_line.startswith(bad_start):
+ rank += 100
+
+ for ending in '([{':
+ # Avoid lonely opening. They result in longer lines.
+ if (current_line.endswith(ending) and
+ len(current_line.strip()) <= len(indent_word)):
+ rank += 100
+
+ if current_line.endswith('%'):
+ rank -= 20
+ else:
+ rank = 100000
+
+ return max(0, rank)
+
+
+def split_at_offsets(line, offsets):
+ """Split line at offsets.
+
+ Return list of strings.
+
+ """
+ result = []
+
+ previous_offset = 0
+ current_offset = 0
+ for current_offset in sorted(offsets):
+ if current_offset < len(line) and previous_offset != current_offset:
+ result.append(line[previous_offset:current_offset])
+ previous_offset = current_offset
+
+ result.append(line[current_offset:])
+
+ return result
+
+
+def get_longest_length(text, newline):
+ """Return length of longest line."""
+ return max([len(line) for line in text.split(newline)])
+
+
+class LineEndingWrapper(object):
+
+ r"""Replace line endings to work with sys.stdout.
+
+ It seems that sys.stdout expects only '\n' as the line ending, no matter
+ the platform. Otherwise, we get repeated line endings.
+
+ """
+
+ def __init__(self, output):
+ self.__output = output
+
+ def write(self, s):
+ self.__output.write(s.replace('\r\n', '\n').replace('\r', '\n'))
+
+ def __getattr__(self, key):
+ return getattr(self.__output, key)
+
+
+def temporary_file():
+ """Return temporary file."""
+ try:
+ return tempfile.NamedTemporaryFile(mode='w', encoding='utf-8')
+ except TypeError:
+ return tempfile.NamedTemporaryFile(mode='w')
+
+
+def match_file(filename, exclude):
+ """Return True if file is okay for modifying/recursing."""
+ if not filename.endswith('.py'):
+ return False
+
+ if filename.startswith('.'):
+ return False
+
+ for pattern in exclude:
+ if fnmatch.fnmatch(filename, pattern):
+ return False
+
+ return True
+
+
+def find_files(filenames, recursive, exclude):
+ """Yield filenames."""
+ while filenames:
+ name = filenames.pop(0)
+ if recursive and os.path.isdir(name):
+ for root, directories, children in os.walk(name):
+ filenames += [os.path.join(root, f) for f in children
+ if match_file(f, exclude)]
+ for d in directories:
+ if d.startswith('.'):
+ directories.remove(d)
+ else:
+ yield name
+
+
+def _fix_file(parameters):
+ """Helper function for optionally running fix_file() in parallel."""
+ if parameters[1].verbose:
+ print('[file:{0}]'.format(parameters[0]), file=sys.stderr)
+ try:
+ fix_file(*parameters)
+ except IOError as error:
+ print(str(error), file=sys.stderr)
+
+
+def fix_multiple_files(filenames, options, output=None):
+ """Fix list of files.
+
+ Optionally fix files recursively.
+
+ """
+ filenames = find_files(filenames, options.recursive, options.exclude)
+ if options.jobs > 1:
+ import multiprocessing
+ pool = multiprocessing.Pool(options.jobs)
+ pool.map(_fix_file,
+ [(name, options) for name in filenames])
+ else:
+ for name in filenames:
+ _fix_file((name, options, output))
+
+
+def main():
+ """Tool main."""
+ if not pep8:
+ print('pep8 >= 1.3.2 required', file=sys.stderr)
+ return 1
+
+ try:
+ options, args = parse_args(sys.argv[1:])
+
+ if options.list_fixes:
+ for code, description in supported_fixes():
+ print('{code} - {description}'.format(
+ code=code, description=description))
+ return 0
+
+ if options.in_place or options.diff:
+ filenames = list(set(args))
+ else:
+ assert len(args) == 1
+ assert not options.recursive
+ if args == ['-']:
+ assert not options.in_place
+ temp = temporary_file()
+ temp.write(sys.stdin.read())
+ temp.flush()
+ filenames = [temp.name]
+ else:
+ filenames = args[:1]
+
+ output = codecs.getwriter('utf-8')(sys.stdout.buffer
+ if sys.version_info[0] >= 3
+ else sys.stdout)
+
+ output = LineEndingWrapper(output)
+
+ fix_multiple_files(filenames, options, output)
+ except KeyboardInterrupt:
+ return 1
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/.vim/bundle/python-mode/pylibs/pylama/__init__.py b/.vim/bundle/python-mode/pylibs/pylama/__init__.py
@@ -0,0 +1,8 @@
+" pylama -- Python code audit. "
+
+version_info = (0, 3, 0)
+
+__version__ = version = '.'.join(map(str, version_info))
+__project__ = __name__
+__author__ = "Kirill Klenov <horneds@gmail.com>"
+__license__ = "GNU LGPL"
diff --git a/.vim/bundle/python-mode/pylibs/pylama/hook.py b/.vim/bundle/python-mode/pylibs/pylama/hook.py
@@ -0,0 +1,88 @@
+import sys
+from os import path as op, chmod
+from subprocess import Popen, PIPE
+from .main import logger
+
+
+try:
+ from configparser import ConfigParser # nolint
+except ImportError: # Python 2
+ from ConfigParser import ConfigParser
+
+
+def run(command):
+ p = Popen(command.split(), stdout=PIPE, stderr=PIPE)
+ (stdout, stderr) = p.communicate()
+ return (p.returncode, [line.strip() for line in stdout.splitlines()],
+ [line.strip() for line in stderr.splitlines()])
+
+
+def git_hook():
+ from .main import check_files
+ _, files_modified, _ = run("git diff-index --cached --name-only HEAD")
+ logger.setLevel('WARN')
+ check_files([f for f in map(str, files_modified) if f.endswith('.py')])
+
+
+def hg_hook(ui, repo, **kwargs):
+ from .main import check_files
+ seen = set()
+ paths = []
+ for rev in range(repo[kwargs['node']], len(repo)):
+ for file_ in repo[rev].files():
+ file_ = op.join(repo.root, file_)
+ if file_ in seen or not op.exists(file_):
+ continue
+ seen.add(file_)
+ if file_.endswith('.py'):
+ paths.append(file_)
+ logger.setLevel('WARN')
+ check_files(paths)
+
+
+def install_git(path):
+ hook = op.join(path, 'pre-commit')
+ with open(hook, 'w+') as fd:
+ fd.write("""#!/usr/bin/env python
+import sys
+from pylama.hook import git_hook
+
+if __name__ == '__main__':
+ sys.exit(git_hook())
+""")
+ chmod(hook, 484)
+ return True
+
+
+def install_hg(path):
+ hook = op.join(path, 'hgrc')
+ if not op.isfile(hook):
+ open(hook, 'w+').close()
+
+ c = ConfigParser()
+ c.readfp(open(path, 'r'))
+ if not c.has_section('hooks'):
+ c.add_section('hooks')
+
+ if not c.has_option('hooks', 'commit'):
+ c.set('hooks', 'commit', 'python:pylama.hooks.hg_hook')
+
+ if not c.has_option('hooks', 'qrefresh'):
+ c.set('hooks', 'qrefresh', 'python:pylama.hooks.hg_hook')
+
+ c.write(open(path, 'w+'))
+ return True
+
+
+def install_hook(path):
+ git = op.join(path, '.git', 'hooks')
+ hg = op.join(path, '.hg')
+ if op.exists(git):
+ install_git(git) and logger.warn('Git hook has been installed.') # nolint
+
+ elif op.exists(hg):
+ install_hg(git) and logger.warn('Mercurial hook has been installed.') # nolint
+
+ else:
+ logger.error('VCS has not found. Check your path.')
+ sys.exit(1)
diff --git a/.vim/bundle/python-mode/pylibs/pylama/main.py b/.vim/bundle/python-mode/pylibs/pylama/main.py
@@ -0,0 +1,205 @@
+import fnmatch
+import re
+import sys
+from os import getcwd, walk, path as op
+
+import logging
+from argparse import ArgumentParser
+
+from . import utils
+
+
+default_linters = 'pep8', 'pyflakes', 'mccabe'
+default_complexity = 10
+logger = logging.Logger('pylama')
+stream = logging.StreamHandler()
+logger.addHandler(stream)
+
+SKIP_PATTERN = '# nolint'
+
+
+def run(path, ignore=None, select=None, linters=default_linters, **meta): # nolint
+ errors = []
+ ignore = ignore and list(ignore) or []
+ select = select and list(select) or []
+
+ for lint in linters:
+ try:
+ linter = getattr(utils, lint)
+ except AttributeError:
+ logging.warning("Linter `{0}` not found.".format(lint))
+ continue
+
+ try:
+ with open(path, "rU") as f:
+ code = f.read() + '\n\n'
+ params = parse_modeline(code)
+ params['skip'] = [False]
+ for line in code.split('\n'):
+ params['skip'].append(line.endswith(SKIP_PATTERN))
+
+ if params.get('lint_ignore'):
+ ignore += params.get('lint_ignore').split(',')
+
+ if params.get('lint_select'):
+ select += params.get('lint_select').split(',')
+
+ if params.get('lint'):
+ result = linter(path, code=code, **meta)
+ for e in result:
+ e['col'] = e.get('col') or 0
+ e['lnum'] = e.get('lnum') or 0
+ e['type'] = e.get('type') or 'E'
+ e['text'] = "{0} [{1}]".format((e.get(
+ 'text') or '').strip()
+ .replace("'", "\"").split('\n')[0], lint)
+ e['filename'] = path or ''
+ if not params['skip'][e['lnum']]:
+ errors.append(e)
+
+ except IOError as e:
+ errors.append(dict(
+ lnum=0,
+ type='E',
+ col=0,
+ text=str(e)
+ ))
+ except SyntaxError as e:
+ errors.append(dict(
+ lnum=e.lineno or 0,
+ type='E',
+ col=e.offset or 0,
+ text=e.args[0]
+ ))
+ break
+
+ except Exception as e:
+ import traceback
+ logging.error(traceback.format_exc())
+
+ errors = [e for e in errors if _ignore_error(e, select, ignore)]
+ return sorted(errors, key=lambda x: x['lnum'])
+
+
+def _ignore_error(e, select, ignore):
+ for s in select:
+ if e['text'].startswith(s):
+ return True
+ for i in ignore:
+ if e['text'].startswith(i):
+ return False
+ return True
+
+
+def shell():
+ parser = ArgumentParser(description="Code audit tool for python.")
+ parser.add_argument("path", nargs='?', default=getcwd(),
+ help="Path on file or directory.")
+ parser.add_argument(
+ "--verbose", "-v", action='store_true', help="Verbose mode.")
+
+ split_csp_list = lambda s: list(set(i for i in s.split(',') if i))
+
+ parser.add_argument(
+ "--format", "-f", default='pep8', choices=['pep8', 'pylint'],
+ help="Error format.")
+ parser.add_argument(
+ "--select", "-s", default='',
+ type=split_csp_list,
+ help="Select errors and warnings. (comma-separated)")
+ parser.add_argument(
+ "--linters", "-l", default=','.join(default_linters),
+ type=split_csp_list,
+ help="Select linters. (comma-separated)")
+ parser.add_argument(
+ "--ignore", "-i", default='',
+ type=split_csp_list,
+ help="Ignore errors and warnings. (comma-separated)")
+ parser.add_argument(
+ "--skip", default='',
+ type=lambda s: [re.compile(fnmatch.translate(p))
+ for p in s.split(',')],
+ help="Skip files by masks (comma-separated, Ex. */messages.py)")
+ parser.add_argument("--complexity", "-c", default=default_complexity,
+ type=int, help="Set mccabe complexity.")
+ parser.add_argument("--report", "-r", help="Filename for report.")
+ parser.add_argument("--hook", action="store_true",
+ help="Install Git (Mercurial) hook.")
+ args = parser.parse_args()
+
+ # Setup logger
+ logger.setLevel(logging.INFO if args.verbose else logging.WARN)
+ if args.report:
+ logger.removeHandler(stream)
+ logger.addHandler(logging.FileHandler(args.report, mode='w'))
+
+ if args.hook:
+ from .hook import install_hook
+ return install_hook(args.path)
+
+ paths = [args.path]
+
+ if op.isdir(args.path):
+ paths = []
+ for root, _, files in walk(args.path):
+ paths += [op.join(root, f) for f in files if f.endswith('.py')]
+
+ check_files(
+ paths,
+ rootpath=args.path,
+ skip=args.skip,
+ frmt=args.format,
+ ignore=args.ignore,
+ select=args.select,
+ linters=args.linters,
+ complexity=args.complexity,
+ )
+
+
+def check_files(paths, rootpath=None, skip=None, frmt="pep8",
+ select=None, ignore=None, linters=default_linters,
+ complexity=default_complexity):
+ rootpath = rootpath or getcwd()
+ pattern = "%(rel)s:%(lnum)s:%(col)s: %(text)s"
+ if frmt == 'pylint':
+ pattern = "%(rel)s:%(lnum)s: [%(type)s] %(text)s"
+
+ errors = []
+ for path in skip_paths(skip, paths):
+ logger.info("Parse file: %s" % path)
+ errors = run(path, ignore=ignore, select=select,
+ linters=linters, complexity=complexity)
+ for error in errors:
+ try:
+ error['rel'] = op.relpath(
+ error['filename'], op.dirname(rootpath))
+ error['col'] = error.get('col', 1)
+ logger.warning(pattern, error)
+ except KeyError:
+ continue
+
+ sys.exit(int(bool(errors)))
+
+
+MODERE = re.compile(
+ r'^\s*#\s+(?:pymode\:)?((?:lint[\w_]*=[^:\n\s]+:?)+)', re.I | re.M)
+
+
+def skip_paths(skip, paths):
+ for path in paths:
+ if skip and any(pattern.match(path) for pattern in skip):
+ continue
+ yield path
+
+
+def parse_modeline(code):
+ seek = MODERE.search(code)
+ params = dict(lint=1)
+ if seek:
+ params = dict(v.split('=') for v in seek.group(1).split(':'))
+ params['lint'] = int(params.get('lint', 1))
+ return params
+
+
+if __name__ == '__main__':
+ shell()
diff --git a/.vim/bundle/python-mode/pylibs/pylama/mccabe.py b/.vim/bundle/python-mode/pylibs/pylama/mccabe.py
@@ -0,0 +1,312 @@
+""" Meager code path measurement tool.
+ Ned Batchelder
+ http://nedbatchelder.com/blog/200803/python_code_complexity_microtool.html
+ MIT License.
+"""
+from __future__ import with_statement
+
+import sys
+
+import ast
+import optparse
+from ast import iter_child_nodes
+from collections import defaultdict
+
+
+__version__ = '0.2'
+
+
+class ASTVisitor(object):
+ """Performs a depth-first walk of the AST."""
+
+ def __init__(self):
+ self.node = None
+ self._cache = {}
+
+ def default(self, node, *args):
+ for child in iter_child_nodes(node):
+ self.dispatch(child, *args)
+
+ def dispatch(self, node, *args):
+ self.node = node
+ klass = node.__class__
+ meth = self._cache.get(klass)
+ if meth is None:
+ className = klass.__name__
+ meth = getattr(self.visitor, 'visit' + className, self.default)
+ self._cache[klass] = meth
+ return meth(node, *args)
+
+ def preorder(self, tree, visitor, *args):
+ """Do preorder walk of tree using visitor"""
+ self.visitor = visitor
+ visitor.visit = self.dispatch
+ self.dispatch(tree, *args) # XXX *args make sense?
+
+
+class PathNode(object):
+ def __init__(self, name, look="circle"):
+ self.name = name
+ self.look = look
+
+ def to_dot(self):
+ print('node [shape=%s,label="%s"] %d;' % (
+ self.look, self.name, self.dot_id()))
+
+ def dot_id(self):
+ return id(self)
+
+
+class PathGraph(object):
+ def __init__(self, name, entity, lineno):
+ self.name = name
+ self.entity = entity
+ self.lineno = lineno
+ self.nodes = defaultdict(list)
+
+ def connect(self, n1, n2):
+ self.nodes[n1].append(n2)
+
+ def to_dot(self):
+ print('subgraph {')
+ for node in self.nodes:
+ node.to_dot()
+ for node, nexts in self.nodes.items():
+ for next in nexts:
+ print('%s -- %s;' % (node.dot_id(), next.dot_id()))
+ print('}')
+
+ def complexity(self):
+ """ Return the McCabe complexity for the graph.
+ V-E+2
+ """
+ num_edges = sum([len(n) for n in self.nodes.values()])
+ num_nodes = len(self.nodes)
+ return num_edges - num_nodes + 2
+
+
+class PathGraphingAstVisitor(ASTVisitor):
+ """ A visitor for a parsed Abstract Syntax Tree which finds executable
+ statements.
+ """
+
+ def __init__(self):
+ super(PathGraphingAstVisitor, self).__init__()
+ self.classname = ""
+ self.graphs = {}
+ self.reset()
+
+ def reset(self):
+ self.graph = None
+ self.tail = None
+
+ def dispatch_list(self, node_list):
+ for node in node_list:
+ self.dispatch(node)
+
+ def visitFunctionDef(self, node):
+
+ if self.classname:
+ entity = '%s%s' % (self.classname, node.name)
+ else:
+ entity = node.name
+
+ name = '%d:1: %r' % (node.lineno, entity)
+
+ if self.graph is not None:
+ # closure
+ pathnode = self.appendPathNode(name)
+ self.tail = pathnode
+ self.dispatch_list(node.body)
+ bottom = PathNode("", look='point')
+ self.graph.connect(self.tail, bottom)
+ self.graph.connect(pathnode, bottom)
+ self.tail = bottom
+ else:
+ self.graph = PathGraph(name, entity, node.lineno)
+ pathnode = PathNode(name)
+ self.tail = pathnode
+ self.dispatch_list(node.body)
+ self.graphs["%s%s" % (self.classname, node.name)] = self.graph
+ self.reset()
+
+ def visitClassDef(self, node):
+ old_classname = self.classname
+ self.classname += node.name + "."
+ self.dispatch_list(node.body)
+ self.classname = old_classname
+
+ def appendPathNode(self, name):
+ if not self.tail:
+ return
+ pathnode = PathNode(name)
+ self.graph.connect(self.tail, pathnode)
+ self.tail = pathnode
+ return pathnode
+
+ def visitSimpleStatement(self, node):
+ if node.lineno is None:
+ lineno = 0
+ else:
+ lineno = node.lineno
+ name = "Stmt %d" % lineno
+ self.appendPathNode(name)
+
+ visitAssert = visitAssign = visitAugAssign = visitDelete = visitPrint = \
+ visitRaise = visitYield = visitImport = visitCall = visitSubscript = \
+ visitPass = visitContinue = visitBreak = visitGlobal = visitReturn = \
+ visitSimpleStatement
+
+ def visitLoop(self, node):
+ name = "Loop %d" % node.lineno
+
+ if self.graph is None:
+ # global loop
+ self.graph = PathGraph(name, name, node.lineno)
+ pathnode = PathNode(name)
+ self.tail = pathnode
+ self.dispatch_list(node.body)
+ self.graphs["%s%s" % (self.classname, name)] = self.graph
+ self.reset()
+ else:
+ pathnode = self.appendPathNode(name)
+ self.tail = pathnode
+ self.dispatch_list(node.body)
+ bottom = PathNode("", look='point')
+ self.graph.connect(self.tail, bottom)
+ self.graph.connect(pathnode, bottom)
+ self.tail = bottom
+
+ # TODO: else clause in node.orelse
+
+ visitFor = visitWhile = visitLoop
+
+ def visitIf(self, node):
+ name = "If %d" % node.lineno
+ pathnode = self.appendPathNode(name)
+ loose_ends = []
+ self.dispatch_list(node.body)
+ loose_ends.append(self.tail)
+ if node.orelse:
+ self.tail = pathnode
+ self.dispatch_list(node.orelse)
+ loose_ends.append(self.tail)
+ else:
+ loose_ends.append(pathnode)
+ if pathnode:
+ bottom = PathNode("", look='point')
+ for le in loose_ends:
+ self.graph.connect(le, bottom)
+ self.tail = bottom
+
+ def visitTryExcept(self, node):
+ name = "TryExcept %d" % node.lineno
+ pathnode = self.appendPathNode(name)
+ loose_ends = []
+ self.dispatch_list(node.body)
+ loose_ends.append(self.tail)
+ for handler in node.handlers:
+ self.tail = pathnode
+ self.dispatch_list(handler.body)
+ loose_ends.append(self.tail)
+ if pathnode:
+ bottom = PathNode("", look='point')
+ for le in loose_ends:
+ self.graph.connect(le, bottom)
+ self.tail = bottom
+
+ def visitWith(self, node):
+ name = "With %d" % node.lineno
+ self.appendPathNode(name)
+ self.dispatch_list(node.body)
+
+
+class McCabeChecker(object):
+ """McCabe cyclomatic complexity checker."""
+ name = 'mccabe'
+ version = __version__
+ _code = 'C901'
+ _error_tmpl = "C901 %r is too complex (%d)"
+ max_complexity = 0
+
+ def __init__(self, tree, filename):
+ self.tree = tree
+
+ @classmethod
+ def add_options(cls, parser):
+ parser.add_option('--max-complexity', default=-1, action='store',
+ type='int', help="McCabe complexity threshold")
+ parser.config_options.append('max-complexity')
+
+ @classmethod
+ def parse_options(cls, options):
+ cls.max_complexity = options.max_complexity
+
+ def run(self):
+ if self.max_complexity < 0:
+ return
+ visitor = PathGraphingAstVisitor()
+ visitor.preorder(self.tree, visitor)
+ for graph in visitor.graphs.values():
+ if graph.complexity() >= self.max_complexity:
+ text = self._error_tmpl % (graph.entity, graph.complexity())
+ yield graph.lineno, 0, text, type(self)
+
+
+def get_code_complexity(code, threshold=7, filename='stdin'):
+ try:
+ tree = compile(code, filename, "exec", ast.PyCF_ONLY_AST)
+ except SyntaxError:
+ e = sys.exc_info()[1]
+ sys.stderr.write("Unable to parse %s: %s\n" % (filename, e))
+ return 0
+
+ complx = []
+ McCabeChecker.max_complexity = threshold
+ for lineno, offset, text, _ in McCabeChecker(tree, filename).run():
+ complx.append(dict(
+ type=McCabeChecker._code,
+ lnum=lineno,
+ text=text,
+ ))
+
+ return complx
+
+
+def get_module_complexity(module_path, threshold=7):
+ """Returns the complexity of a module"""
+ with open(module_path, "rU") as mod:
+ code = mod.read()
+ return get_code_complexity(code, threshold, filename=module_path)
+
+
+def main(argv):
+ opar = optparse.OptionParser()
+ opar.add_option("-d", "--dot", dest="dot",
+ help="output a graphviz dot file", action="store_true")
+ opar.add_option("-m", "--min", dest="threshold",
+ help="minimum complexity for output", type="int",
+ default=2)
+
+ options, args = opar.parse_args(argv)
+
+ with open(args[0], "rU") as mod:
+ code = mod.read()
+ tree = compile(code, args[0], "exec", ast.PyCF_ONLY_AST)
+ visitor = PathGraphingAstVisitor()
+ visitor.preorder(tree, visitor)
+
+ if options.dot:
+ print('graph {')
+ for graph in visitor.graphs.values():
+ if graph.complexity() >= options.threshold:
+ graph.to_dot()
+ print('}')
+ else:
+ for graph in visitor.graphs.values():
+ if graph.complexity() >= options.threshold:
+ print(graph.name, graph.complexity())
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pep8.py b/.vim/bundle/python-mode/pylibs/pylama/pep8.py
@@ -0,0 +1,1852 @@
+#!/usr/bin/env python
+# pep8.py - Check Python source code formatting, according to PEP 8
+# Copyright (C) 2006-2009 Johann C. Rocholl <johann@rocholl.net>
+# Copyright (C) 2009-2013 Florent Xicluna <florent.xicluna@gmail.com>
+#
+# Permission is hereby granted, free of charge, to any person
+# obtaining a copy of this software and associated documentation files
+# (the "Software"), to deal in the Software without restriction,
+# including without limitation the rights to use, copy, modify, merge,
+# publish, distribute, sublicense, and/or sell copies of the Software,
+# and to permit persons to whom the Software is furnished to do so,
+# subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be
+# included in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
+# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
+# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+r"""
+Check Python source code formatting, according to PEP 8:
+http://www.python.org/dev/peps/pep-0008/
+
+For usage and a list of options, try this:
+$ python pep8.py -h
+
+This program and its regression test suite live here:
+http://github.com/jcrocholl/pep8
+
+Groups of errors and warnings:
+E errors
+W warnings
+100 indentation
+200 whitespace
+300 blank lines
+400 imports
+500 line length
+600 deprecation
+700 statements
+900 syntax error
+"""
+__version__ = '1.4.6a0'
+
+import os
+import sys
+import re
+import time
+import inspect
+import keyword
+import tokenize
+from optparse import OptionParser
+from fnmatch import fnmatch
+try:
+ from configparser import RawConfigParser
+ from io import TextIOWrapper
+except ImportError:
+ from ConfigParser import RawConfigParser
+
+DEFAULT_EXCLUDE = '.svn,CVS,.bzr,.hg,.git,__pycache__'
+DEFAULT_IGNORE = 'E226,E24'
+if sys.platform == 'win32':
+ DEFAULT_CONFIG = os.path.expanduser(r'~\.pep8')
+else:
+ DEFAULT_CONFIG = os.path.join(os.getenv('XDG_CONFIG_HOME') or
+ os.path.expanduser('~/.config'), 'pep8')
+PROJECT_CONFIG = ('setup.cfg', 'tox.ini', '.pep8')
+TESTSUITE_PATH = os.path.join(os.path.dirname(__file__), 'testsuite')
+MAX_LINE_LENGTH = 79
+REPORT_FORMAT = {
+ 'default': '%(path)s:%(row)d:%(col)d: %(code)s %(text)s',
+ 'pylint': '%(path)s:%(row)d: [%(code)s] %(text)s',
+}
+
+PyCF_ONLY_AST = 1024
+SINGLETONS = frozenset(['False', 'None', 'True'])
+KEYWORDS = frozenset(keyword.kwlist + ['print']) - SINGLETONS
+UNARY_OPERATORS = frozenset(['>>', '**', '*', '+', '-'])
+ARITHMETIC_OP = frozenset(['**', '*', '/', '//', '+', '-'])
+WS_OPTIONAL_OPERATORS = ARITHMETIC_OP.union(['^', '&', '|', '<<', '>>', '%'])
+WS_NEEDED_OPERATORS = frozenset([
+ '**=', '*=', '/=', '//=', '+=', '-=', '!=', '<>', '<', '>',
+ '%=', '^=', '&=', '|=', '==', '<=', '>=', '<<=', '>>=', '='])
+WHITESPACE = frozenset(' \t')
+SKIP_TOKENS = frozenset([tokenize.COMMENT, tokenize.NL, tokenize.NEWLINE,
+ tokenize.INDENT, tokenize.DEDENT])
+BENCHMARK_KEYS = ['directories', 'files', 'logical lines', 'physical lines']
+
+INDENT_REGEX = re.compile(r'([ \t]*)')
+RAISE_COMMA_REGEX = re.compile(r'raise\s+\w+\s*,')
+RERAISE_COMMA_REGEX = re.compile(r'raise\s+\w+\s*,\s*\w+\s*,\s*\w+')
+ERRORCODE_REGEX = re.compile(r'\b[A-Z]\d{3}\b')
+DOCSTRING_REGEX = re.compile(r'u?r?["\']')
+EXTRANEOUS_WHITESPACE_REGEX = re.compile(r'[[({] | []}),;:]')
+WHITESPACE_AFTER_COMMA_REGEX = re.compile(r'[,;:]\s*(?: |\t)')
+COMPARE_SINGLETON_REGEX = re.compile(r'([=!]=)\s*(None|False|True)')
+COMPARE_TYPE_REGEX = re.compile(r'(?:[=!]=|is(?:\s+not)?)\s*type(?:s.\w+Type'
+ r'|\s*\(\s*([^)]*[^ )])\s*\))')
+KEYWORD_REGEX = re.compile(r'(\s*)\b(?:%s)\b(\s*)' % r'|'.join(KEYWORDS))
+OPERATOR_REGEX = re.compile(r'(?:[^,\s])(\s*)(?:[-+*/|!<=>%&^]+)(\s*)')
+LAMBDA_REGEX = re.compile(r'\blambda\b')
+HUNK_REGEX = re.compile(r'^@@ -\d+(?:,\d+)? \+(\d+)(?:,(\d+))? @@.*$')
+
+# Work around Python < 2.6 behaviour, which does not generate NL after
+# a comment which is on a line by itself.
+COMMENT_WITH_NL = tokenize.generate_tokens(['#\n'].pop).send(None)[1] == '#\n'
+
+
+##############################################################################
+# Plugins (check functions) for physical lines
+##############################################################################
+
+
+def tabs_or_spaces(physical_line, indent_char):
+ r"""
+ Never mix tabs and spaces.
+
+ The most popular way of indenting Python is with spaces only. The
+ second-most popular way is with tabs only. Code indented with a mixture
+ of tabs and spaces should be converted to using spaces exclusively. When
+ invoking the Python command line interpreter with the -t option, it issues
+ warnings about code that illegally mixes tabs and spaces. When using -tt
+ these warnings become errors. These options are highly recommended!
+
+ Okay: if a == 0:\n a = 1\n b = 1
+ E101: if a == 0:\n a = 1\n\tb = 1
+ """
+ indent = INDENT_REGEX.match(physical_line).group(1)
+ for offset, char in enumerate(indent):
+ if char != indent_char:
+ return offset, "E101 indentation contains mixed spaces and tabs"
+
+
+def tabs_obsolete(physical_line):
+ r"""
+ For new projects, spaces-only are strongly recommended over tabs. Most
+ editors have features that make this easy to do.
+
+ Okay: if True:\n return
+ W191: if True:\n\treturn
+ """
+ indent = INDENT_REGEX.match(physical_line).group(1)
+ if '\t' in indent:
+ return indent.index('\t'), "W191 indentation contains tabs"
+
+
+def trailing_whitespace(physical_line):
+ r"""
+ JCR: Trailing whitespace is superfluous.
+ FBM: Except when it occurs as part of a blank line (i.e. the line is
+ nothing but whitespace). According to Python docs[1] a line with only
+ whitespace is considered a blank line, and is to be ignored. However,
+ matching a blank line to its indentation level avoids mistakenly
+ terminating a multi-line statement (e.g. class declaration) when
+ pasting code into the standard Python interpreter.
+
+ [1] http://docs.python.org/reference/lexical_analysis.html#blank-lines
+
+ The warning returned varies on whether the line itself is blank, for easier
+ filtering for those who want to indent their blank lines.
+
+ Okay: spam(1)\n#
+ W291: spam(1) \n#
+ W293: class Foo(object):\n \n bang = 12
+ """
+ physical_line = physical_line.rstrip('\n') # chr(10), newline
+ physical_line = physical_line.rstrip('\r') # chr(13), carriage return
+ physical_line = physical_line.rstrip('\x0c') # chr(12), form feed, ^L
+ stripped = physical_line.rstrip(' \t\v')
+ if physical_line != stripped:
+ if stripped:
+ return len(stripped), "W291 trailing whitespace"
+ else:
+ return 0, "W293 blank line contains whitespace"
+
+
+def trailing_blank_lines(physical_line, lines, line_number):
+ r"""
+ JCR: Trailing blank lines are superfluous.
+
+ Okay: spam(1)
+ W391: spam(1)\n
+ """
+ if not physical_line.rstrip() and line_number == len(lines):
+ return 0, "W391 blank line at end of file"
+
+
+def missing_newline(physical_line):
+ """
+ JCR: The last line should have a newline.
+
+ Reports warning W292.
+ """
+ if physical_line.rstrip() == physical_line:
+ return len(physical_line), "W292 no newline at end of file"
+
+
+def maximum_line_length(physical_line, max_line_length):
+ """
+ Limit all lines to a maximum of 79 characters.
+
+ There are still many devices around that are limited to 80 character
+ lines; plus, limiting windows to 80 characters makes it possible to have
+ several windows side-by-side. The default wrapping on such devices looks
+ ugly. Therefore, please limit all lines to a maximum of 79 characters.
+ For flowing long blocks of text (docstrings or comments), limiting the
+ length to 72 characters is recommended.
+
+ Reports error E501.
+ """
+ line = physical_line.rstrip()
+ length = len(line)
+ if length > max_line_length and not noqa(line):
+ if hasattr(line, 'decode'): # Python 2
+ # The line could contain multi-byte characters
+ try:
+ length = len(line.decode('utf-8'))
+ except UnicodeError:
+ pass
+ if length > max_line_length:
+ return (max_line_length, "E501 line too long "
+ "(%d > %d characters)" % (length, max_line_length))
+
+
+##############################################################################
+# Plugins (check functions) for logical lines
+##############################################################################
+
+
+def blank_lines(logical_line, blank_lines, indent_level, line_number,
+ previous_logical, previous_indent_level):
+ r"""
+ Separate top-level function and class definitions with two blank lines.
+
+ Method definitions inside a class are separated by a single blank line.
+
+ Extra blank lines may be used (sparingly) to separate groups of related
+ functions. Blank lines may be omitted between a bunch of related
+ one-liners (e.g. a set of dummy implementations).
+
+ Use blank lines in functions, sparingly, to indicate logical sections.
+
+ Okay: def a():\n pass\n\n\ndef b():\n pass
+ Okay: def a():\n pass\n\n\n# Foo\n# Bar\n\ndef b():\n pass
+
+ E301: class Foo:\n b = 0\n def bar():\n pass
+ E302: def a():\n pass\n\ndef b(n):\n pass
+ E303: def a():\n pass\n\n\n\ndef b(n):\n pass
+ E303: def a():\n\n\n\n pass
+ E304: @decorator\n\ndef a():\n pass
+ """
+ if line_number < 3 and not previous_logical:
+ return # Don't expect blank lines before the first line
+ if previous_logical.startswith('@'):
+ if blank_lines:
+ yield 0, "E304 blank lines found after function decorator"
+ elif blank_lines > 2 or (indent_level and blank_lines == 2):
+ yield 0, "E303 too many blank lines (%d)" % blank_lines
+ elif logical_line.startswith(('def ', 'class ', '@')):
+ if indent_level:
+ if not (blank_lines or previous_indent_level < indent_level or
+ DOCSTRING_REGEX.match(previous_logical)):
+ yield 0, "E301 expected 1 blank line, found 0"
+ elif blank_lines != 2:
+ yield 0, "E302 expected 2 blank lines, found %d" % blank_lines
+
+
+def extraneous_whitespace(logical_line):
+ """
+ Avoid extraneous whitespace in the following situations:
+
+ - Immediately inside parentheses, brackets or braces.
+
+ - Immediately before a comma, semicolon, or colon.
+
+ Okay: spam(ham[1], {eggs: 2})
+ E201: spam( ham[1], {eggs: 2})
+ E201: spam(ham[ 1], {eggs: 2})
+ E201: spam(ham[1], { eggs: 2})
+ E202: spam(ham[1], {eggs: 2} )
+ E202: spam(ham[1 ], {eggs: 2})
+ E202: spam(ham[1], {eggs: 2 })
+
+ E203: if x == 4: print x, y; x, y = y , x
+ E203: if x == 4: print x, y ; x, y = y, x
+ E203: if x == 4 : print x, y; x, y = y, x
+ """
+ line = logical_line
+ for match in EXTRANEOUS_WHITESPACE_REGEX.finditer(line):
+ text = match.group()
+ char = text.strip()
+ found = match.start()
+ if text == char + ' ':
+ # assert char in '([{'
+ yield found + 1, "E201 whitespace after '%s'" % char
+ elif line[found - 1] != ',':
+ code = ('E202' if char in '}])' else 'E203') # if char in ',;:'
+ yield found, "%s whitespace before '%s'" % (code, char)
+
+
+def whitespace_around_keywords(logical_line):
+ r"""
+ Avoid extraneous whitespace around keywords.
+
+ Okay: True and False
+ E271: True and False
+ E272: True and False
+ E273: True and\tFalse
+ E274: True\tand False
+ """
+ for match in KEYWORD_REGEX.finditer(logical_line):
+ before, after = match.groups()
+
+ if '\t' in before:
+ yield match.start(1), "E274 tab before keyword"
+ elif len(before) > 1:
+ yield match.start(1), "E272 multiple spaces before keyword"
+
+ if '\t' in after:
+ yield match.start(2), "E273 tab after keyword"
+ elif len(after) > 1:
+ yield match.start(2), "E271 multiple spaces after keyword"
+
+
+def missing_whitespace(logical_line):
+ """
+ JCR: Each comma, semicolon or colon should be followed by whitespace.
+
+ Okay: [a, b]
+ Okay: (3,)
+ Okay: a[1:4]
+ Okay: a[:4]
+ Okay: a[1:]
+ Okay: a[1:4:2]
+ E231: ['a','b']
+ E231: foo(bar,baz)
+ E231: [{'a':'b'}]
+ """
+ line = logical_line
+ for index in range(len(line) - 1):
+ char = line[index]
+ if char in ',;:' and line[index + 1] not in WHITESPACE:
+ before = line[:index]
+ if char == ':' and before.count('[') > before.count(']') and \
+ before.rfind('{') < before.rfind('['):
+ continue # Slice syntax, no space required
+ if char == ',' and line[index + 1] == ')':
+ continue # Allow tuple with only one element: (3,)
+ yield index, "E231 missing whitespace after '%s'" % char
+
+
+def indentation(logical_line, previous_logical, indent_char,
+ indent_level, previous_indent_level):
+ r"""
+ Use 4 spaces per indentation level.
+
+ For really old code that you don't want to mess up, you can continue to
+ use 8-space tabs.
+
+ Okay: a = 1
+ Okay: if a == 0:\n a = 1
+ E111: a = 1
+
+ Okay: for item in items:\n pass
+ E112: for item in items:\npass
+
+ Okay: a = 1\nb = 2
+ E113: a = 1\n b = 2
+ """
+ if indent_char == ' ' and indent_level % 4:
+ yield 0, "E111 indentation is not a multiple of four"
+ indent_expect = previous_logical.endswith(':')
+ if indent_expect and indent_level <= previous_indent_level:
+ yield 0, "E112 expected an indented block"
+ if indent_level > previous_indent_level and not indent_expect:
+ yield 0, "E113 unexpected indentation"
+
+
+def continued_indentation(logical_line, tokens, indent_level, noqa, verbose):
+ r"""
+ Continuation lines should align wrapped elements either vertically using
+ Python's implicit line joining inside parentheses, brackets and braces, or
+ using a hanging indent.
+
+ When using a hanging indent the following considerations should be applied:
+
+ - there should be no arguments on the first line, and
+
+ - further indentation should be used to clearly distinguish itself as a
+ continuation line.
+
+ Okay: a = (\n)
+ E123: a = (\n )
+
+ Okay: a = (\n 42)
+ E121: a = (\n 42)
+ E122: a = (\n42)
+ E123: a = (\n 42\n )
+ E124: a = (24,\n 42\n)
+ E125: if (a or\n b):\n pass
+ E126: a = (\n 42)
+ E127: a = (24,\n 42)
+ E128: a = (24,\n 42)
+ """
+ first_row = tokens[0][2][0]
+ nrows = 1 + tokens[-1][2][0] - first_row
+ if noqa or nrows == 1:
+ return
+
+ # indent_next tells us whether the next block is indented; assuming
+ # that it is indented by 4 spaces, then we should not allow 4-space
+ # indents on the final continuation line; in turn, some other
+ # indents are allowed to have an extra 4 spaces.
+ indent_next = logical_line.endswith(':')
+
+ row = depth = 0
+ # remember how many brackets were opened on each line
+ parens = [0] * nrows
+ # relative indents of physical lines
+ rel_indent = [0] * nrows
+ # visual indents
+ indent_chances = {}
+ last_indent = tokens[0][2]
+ indent = [last_indent[1]]
+ if verbose >= 3:
+ print(">>> " + tokens[0][4].rstrip())
+
+ for token_type, text, start, end, line in tokens:
+
+ newline = row < start[0] - first_row
+ if newline:
+ row = start[0] - first_row
+ newline = (not last_token_multiline and
+ token_type not in (tokenize.NL, tokenize.NEWLINE))
+
+ if newline:
+ # this is the beginning of a continuation line.
+ last_indent = start
+ if verbose >= 3:
+ print("... " + line.rstrip())
+
+ # record the initial indent.
+ rel_indent[row] = expand_indent(line) - indent_level
+
+ if depth:
+ # a bracket expression in a continuation line.
+ # find the line that it was opened on
+ for open_row in range(row - 1, -1, -1):
+ if parens[open_row]:
+ break
+ else:
+ # an unbracketed continuation line (ie, backslash)
+ open_row = 0
+ hang = rel_indent[row] - rel_indent[open_row]
+ visual_indent = indent_chances.get(start[1])
+
+ if token_type == tokenize.OP and text in ']})':
+ # this line starts with a closing bracket
+ if indent[depth]:
+ if start[1] != indent[depth]:
+ yield (start, "E124 closing bracket does not match "
+ "visual indentation")
+ elif hang:
+ yield (start, "E123 closing bracket does not match "
+ "indentation of opening bracket's line")
+ elif visual_indent is True:
+ # visual indent is verified
+ if not indent[depth]:
+ indent[depth] = start[1]
+ elif visual_indent in (text, str):
+ # ignore token lined up with matching one from a previous line
+ pass
+ elif indent[depth] and start[1] < indent[depth]:
+ # visual indent is broken
+ yield (start, "E128 continuation line "
+ "under-indented for visual indent")
+ elif hang == 4 or (indent_next and rel_indent[row] == 8):
+ # hanging indent is verified
+ pass
+ else:
+ # indent is broken
+ if hang <= 0:
+ error = "E122", "missing indentation or outdented"
+ elif indent[depth]:
+ error = "E127", "over-indented for visual indent"
+ elif hang % 4:
+ error = "E121", "indentation is not a multiple of four"
+ else:
+ error = "E126", "over-indented for hanging indent"
+ yield start, "%s continuation line %s" % error
+
+ # look for visual indenting
+ if (parens[row] and token_type not in (tokenize.NL, tokenize.COMMENT)
+ and not indent[depth]):
+ indent[depth] = start[1]
+ indent_chances[start[1]] = True
+ if verbose >= 4:
+ print("bracket depth %s indent to %s" % (depth, start[1]))
+ # deal with implicit string concatenation
+ elif (token_type in (tokenize.STRING, tokenize.COMMENT) or
+ text in ('u', 'ur', 'b', 'br')):
+ indent_chances[start[1]] = str
+ # special case for the "if" statement because len("if (") == 4
+ elif not indent_chances and not row and not depth and text == 'if':
+ indent_chances[end[1] + 1] = True
+
+ # keep track of bracket depth
+ if token_type == tokenize.OP:
+ if text in '([{':
+ depth += 1
+ indent.append(0)
+ parens[row] += 1
+ if verbose >= 4:
+ print("bracket depth %s seen, col %s, visual min = %s" %
+ (depth, start[1], indent[depth]))
+ elif text in ')]}' and depth > 0:
+ # parent indents should not be more than this one
+ prev_indent = indent.pop() or last_indent[1]
+ for d in range(depth):
+ if indent[d] > prev_indent:
+ indent[d] = 0
+ for ind in list(indent_chances):
+ if ind >= prev_indent:
+ del indent_chances[ind]
+ depth -= 1
+ if depth:
+ indent_chances[indent[depth]] = True
+ for idx in range(row, -1, -1):
+ if parens[idx]:
+ parens[idx] -= 1
+ break
+ assert len(indent) == depth + 1
+ if start[1] not in indent_chances:
+ # allow to line up tokens
+ indent_chances[start[1]] = text
+
+ last_token_multiline = (start[0] != end[0])
+
+ if indent_next and rel_indent[-1] == 4:
+ yield (last_indent, "E125 continuation line does not distinguish "
+ "itself from next logical line")
+
+
+def whitespace_before_parameters(logical_line, tokens):
+ """
+ Avoid extraneous whitespace in the following situations:
+
+ - Immediately before the open parenthesis that starts the argument
+ list of a function call.
+
+ - Immediately before the open parenthesis that starts an indexing or
+ slicing.
+
+ Okay: spam(1)
+ E211: spam (1)
+
+ Okay: dict['key'] = list[index]
+ E211: dict ['key'] = list[index]
+ E211: dict['key'] = list [index]
+ """
+ prev_type, prev_text, __, prev_end, __ = tokens[0]
+ for index in range(1, len(tokens)):
+ token_type, text, start, end, __ = tokens[index]
+ if (token_type == tokenize.OP and
+ text in '([' and
+ start != prev_end and
+ (prev_type == tokenize.NAME or prev_text in '}])') and
+ # Syntax "class A (B):" is allowed, but avoid it
+ (index < 2 or tokens[index - 2][1] != 'class') and
+ # Allow "return (a.foo for a in range(5))"
+ not keyword.iskeyword(prev_text)):
+ yield prev_end, "E211 whitespace before '%s'" % text
+ prev_type = token_type
+ prev_text = text
+ prev_end = end
+
+
+def whitespace_around_operator(logical_line):
+ r"""
+ Avoid extraneous whitespace in the following situations:
+
+ - More than one space around an assignment (or other) operator to
+ align it with another.
+
+ Okay: a = 12 + 3
+ E221: a = 4 + 5
+ E222: a = 4 + 5
+ E223: a = 4\t+ 5
+ E224: a = 4 +\t5
+ """
+ for match in OPERATOR_REGEX.finditer(logical_line):
+ before, after = match.groups()
+
+ if '\t' in before:
+ yield match.start(1), "E223 tab before operator"
+ elif len(before) > 1:
+ yield match.start(1), "E221 multiple spaces before operator"
+
+ if '\t' in after:
+ yield match.start(2), "E224 tab after operator"
+ elif len(after) > 1:
+ yield match.start(2), "E222 multiple spaces after operator"
+
+
+def missing_whitespace_around_operator(logical_line, tokens):
+ r"""
+ - Always surround these binary operators with a single space on
+ either side: assignment (=), augmented assignment (+=, -= etc.),
+ comparisons (==, <, >, !=, <>, <=, >=, in, not in, is, is not),
+ Booleans (and, or, not).
+
+ - Use spaces around arithmetic operators.
+
+ Okay: i = i + 1
+ Okay: submitted += 1
+ Okay: x = x * 2 - 1
+ Okay: hypot2 = x * x + y * y
+ Okay: c = (a + b) * (a - b)
+ Okay: foo(bar, key='word', *args, **kwargs)
+ Okay: alpha[:-i]
+
+ E225: i=i+1
+ E225: submitted +=1
+ E225: x = x /2 - 1
+ E225: z = x **y
+ E226: c = (a+b) * (a-b)
+ E226: hypot2 = x*x + y*y
+ E227: c = a|b
+ E228: msg = fmt%(errno, errmsg)
+ """
+ parens = 0
+ need_space = False
+ prev_type = tokenize.OP
+ prev_text = prev_end = None
+ for token_type, text, start, end, line in tokens:
+ if token_type in (tokenize.NL, tokenize.NEWLINE, tokenize.ERRORTOKEN):
+ # ERRORTOKEN is triggered by backticks in Python 3
+ continue
+ if text in ('(', 'lambda'):
+ parens += 1
+ elif text == ')':
+ parens -= 1
+ if need_space:
+ if start != prev_end:
+ # Found a (probably) needed space
+ if need_space is not True and not need_space[1]:
+ yield (need_space[0],
+ "E225 missing whitespace around operator")
+ need_space = False
+ elif text == '>' and prev_text in ('<', '-'):
+ # Tolerate the "<>" operator, even if running Python 3
+ # Deal with Python 3's annotated return value "->"
+ pass
+ else:
+ if need_space is True or need_space[1]:
+ # A needed trailing space was not found
+ yield prev_end, "E225 missing whitespace around operator"
+ else:
+ code, optype = 'E226', 'arithmetic'
+ if prev_text == '%':
+ code, optype = 'E228', 'modulo'
+ elif prev_text not in ARITHMETIC_OP:
+ code, optype = 'E227', 'bitwise or shift'
+ yield (need_space[0], "%s missing whitespace "
+ "around %s operator" % (code, optype))
+ need_space = False
+ elif token_type == tokenize.OP and prev_end is not None:
+ if text == '=' and parens:
+ # Allow keyword args or defaults: foo(bar=None).
+ pass
+ elif text in WS_NEEDED_OPERATORS:
+ need_space = True
+ elif text in UNARY_OPERATORS:
+ # Check if the operator is being used as a binary operator
+ # Allow unary operators: -123, -x, +1.
+ # Allow argument unpacking: foo(*args, **kwargs).
+ if prev_type == tokenize.OP:
+ binary_usage = (prev_text in '}])')
+ elif prev_type == tokenize.NAME:
+ binary_usage = (prev_text not in KEYWORDS)
+ else:
+ binary_usage = (prev_type not in SKIP_TOKENS)
+
+ if binary_usage:
+ need_space = None
+ elif text in WS_OPTIONAL_OPERATORS:
+ need_space = None
+
+ if need_space is None:
+ # Surrounding space is optional, but ensure that
+ # trailing space matches opening space
+ need_space = (prev_end, start != prev_end)
+ elif need_space and start == prev_end:
+ # A needed opening space was not found
+ yield prev_end, "E225 missing whitespace around operator"
+ need_space = False
+ prev_type = token_type
+ prev_text = text
+ prev_end = end
+
+
+def whitespace_around_comma(logical_line):
+ r"""
+ Avoid extraneous whitespace in the following situations:
+
+ - More than one space around an assignment (or other) operator to
+ align it with another.
+
+ Note: these checks are disabled by default
+
+ Okay: a = (1, 2)
+ E241: a = (1, 2)
+ E242: a = (1,\t2)
+ """
+ line = logical_line
+ for m in WHITESPACE_AFTER_COMMA_REGEX.finditer(line):
+ found = m.start() + 1
+ if '\t' in m.group():
+ yield found, "E242 tab after '%s'" % m.group()[0]
+ else:
+ yield found, "E241 multiple spaces after '%s'" % m.group()[0]
+
+
+def whitespace_around_named_parameter_equals(logical_line, tokens):
+ """
+ Don't use spaces around the '=' sign when used to indicate a
+ keyword argument or a default parameter value.
+
+ Okay: def complex(real, imag=0.0):
+ Okay: return magic(r=real, i=imag)
+ Okay: boolean(a == b)
+ Okay: boolean(a != b)
+ Okay: boolean(a <= b)
+ Okay: boolean(a >= b)
+
+ E251: def complex(real, imag = 0.0):
+ E251: return magic(r = real, i = imag)
+ """
+ parens = 0
+ no_space = False
+ prev_end = None
+ message = "E251 unexpected spaces around keyword / parameter equals"
+ for token_type, text, start, end, line in tokens:
+ if no_space:
+ no_space = False
+ if start != prev_end:
+ yield (prev_end, message)
+ elif token_type == tokenize.OP:
+ if text == '(':
+ parens += 1
+ elif text == ')':
+ parens -= 1
+ elif parens and text == '=':
+ no_space = True
+ if start != prev_end:
+ yield (prev_end, message)
+ prev_end = end
+
+
+def whitespace_before_inline_comment(logical_line, tokens):
+ """
+ Separate inline comments by at least two spaces.
+
+ An inline comment is a comment on the same line as a statement. Inline
+ comments should be separated by at least two spaces from the statement.
+ They should start with a # and a single space.
+
+ Okay: x = x + 1 # Increment x
+ Okay: x = x + 1 # Increment x
+ E261: x = x + 1 # Increment x
+ E262: x = x + 1 #Increment x
+ E262: x = x + 1 # Increment x
+ """
+ prev_end = (0, 0)
+ for token_type, text, start, end, line in tokens:
+ if token_type == tokenize.COMMENT:
+ if not line[:start[1]].strip():
+ continue
+ if prev_end[0] == start[0] and start[1] < prev_end[1] + 2:
+ yield (prev_end,
+ "E261 at least two spaces before inline comment")
+ symbol, sp, comment = text.partition(' ')
+ if symbol not in ('#', '#:') or comment[:1].isspace():
+ yield start, "E262 inline comment should start with '# '"
+ elif token_type != tokenize.NL:
+ prev_end = end
+
+
+def imports_on_separate_lines(logical_line):
+ r"""
+ Imports should usually be on separate lines.
+
+ Okay: import os\nimport sys
+ E401: import sys, os
+
+ Okay: from subprocess import Popen, PIPE
+ Okay: from myclas import MyClass
+ Okay: from foo.bar.yourclass import YourClass
+ Okay: import myclass
+ Okay: import foo.bar.yourclass
+ """
+ line = logical_line
+ if line.startswith('import '):
+ found = line.find(',')
+ if -1 < found and ';' not in line[:found]:
+ yield found, "E401 multiple imports on one line"
+
+
+def compound_statements(logical_line):
+ r"""
+ Compound statements (multiple statements on the same line) are
+ generally discouraged.
+
+ While sometimes it's okay to put an if/for/while with a small body
+ on the same line, never do this for multi-clause statements. Also
+ avoid folding such long lines!
+
+ Okay: if foo == 'blah':\n do_blah_thing()
+ Okay: do_one()
+ Okay: do_two()
+ Okay: do_three()
+
+ E701: if foo == 'blah': do_blah_thing()
+ E701: for x in lst: total += x
+ E701: while t < 10: t = delay()
+ E701: if foo == 'blah': do_blah_thing()
+ E701: else: do_non_blah_thing()
+ E701: try: something()
+ E701: finally: cleanup()
+ E701: if foo == 'blah': one(); two(); three()
+
+ E702: do_one(); do_two(); do_three()
+ E703: do_four(); # useless semicolon
+ """
+ line = logical_line
+ last_char = len(line) - 1
+ found = line.find(':')
+ if -1 < found < last_char:
+ before = line[:found]
+ if (before.count('{') <= before.count('}') and # {'a': 1} (dict)
+ before.count('[') <= before.count(']') and # [1:2] (slice)
+ before.count('(') <= before.count(')') and # (Python 3 annotation)
+ not LAMBDA_REGEX.search(before)): # lambda x: x
+ yield found, "E701 multiple statements on one line (colon)"
+ found = line.find(';')
+ if -1 < found:
+ if found < last_char:
+ yield found, "E702 multiple statements on one line (semicolon)"
+ else:
+ yield found, "E703 statement ends with a semicolon"
+
+
+def explicit_line_join(logical_line, tokens):
+ r"""
+ Avoid explicit line join between brackets.
+
+ The preferred way of wrapping long lines is by using Python's implied line
+ continuation inside parentheses, brackets and braces. Long lines can be
+ broken over multiple lines by wrapping expressions in parentheses. These
+ should be used in preference to using a backslash for line continuation.
+
+ E502: aaa = [123, \\n 123]
+ E502: aaa = ("bbb " \\n "ccc")
+
+ Okay: aaa = [123,\n 123]
+ Okay: aaa = ("bbb "\n "ccc")
+ Okay: aaa = "bbb " \\n "ccc"
+ """
+ prev_start = prev_end = parens = 0
+ for token_type, text, start, end, line in tokens:
+ if start[0] != prev_start and parens and backslash:
+ yield backslash, "E502 the backslash is redundant between brackets"
+ if end[0] != prev_end:
+ if line.rstrip('\r\n').endswith('\\'):
+ backslash = (end[0], len(line.splitlines()[-1]) - 1)
+ else:
+ backslash = None
+ prev_start = prev_end = end[0]
+ else:
+ prev_start = start[0]
+ if token_type == tokenize.OP:
+ if text in '([{':
+ parens += 1
+ elif text in ')]}':
+ parens -= 1
+
+
+def comparison_to_singleton(logical_line, noqa):
+ """
+ Comparisons to singletons like None should always be done
+ with "is" or "is not", never the equality operators.
+
+ Okay: if arg is not None:
+ E711: if arg != None:
+ E712: if arg == True:
+
+ Also, beware of writing if x when you really mean if x is not None --
+ e.g. when testing whether a variable or argument that defaults to None was
+ set to some other value. The other value might have a type (such as a
+ container) that could be false in a boolean context!
+ """
+ match = not noqa and COMPARE_SINGLETON_REGEX.search(logical_line)
+ if match:
+ same = (match.group(1) == '==')
+ singleton = match.group(2)
+ msg = "'if cond is %s:'" % (('' if same else 'not ') + singleton)
+ if singleton in ('None',):
+ code = 'E711'
+ else:
+ code = 'E712'
+ nonzero = ((singleton == 'True' and same) or
+ (singleton == 'False' and not same))
+ msg += " or 'if %scond:'" % ('' if nonzero else 'not ')
+ yield match.start(1), ("%s comparison to %s should be %s" %
+ (code, singleton, msg))
+
+
+def comparison_type(logical_line):
+ """
+ Object type comparisons should always use isinstance() instead of
+ comparing types directly.
+
+ Okay: if isinstance(obj, int):
+ E721: if type(obj) is type(1):
+
+ When checking if an object is a string, keep in mind that it might be a
+ unicode string too! In Python 2.3, str and unicode have a common base
+ class, basestring, so you can do:
+
+ Okay: if isinstance(obj, basestring):
+ Okay: if type(a1) is type(b1):
+ """
+ match = COMPARE_TYPE_REGEX.search(logical_line)
+ if match:
+ inst = match.group(1)
+ if inst and isidentifier(inst) and inst not in SINGLETONS:
+ return # Allow comparison for types which are not obvious
+ yield match.start(), "E721 do not compare types, use 'isinstance()'"
+
+
+def python_3000_has_key(logical_line):
+ r"""
+ The {}.has_key() method is removed in the Python 3.
+ Use the 'in' operation instead.
+
+ Okay: if "alph" in d:\n print d["alph"]
+ W601: assert d.has_key('alph')
+ """
+ pos = logical_line.find('.has_key(')
+ if pos > -1:
+ yield pos, "W601 .has_key() is deprecated, use 'in'"
+
+
+def python_3000_raise_comma(logical_line):
+ """
+ When raising an exception, use "raise ValueError('message')"
+ instead of the older form "raise ValueError, 'message'".
+
+ The paren-using form is preferred because when the exception arguments
+ are long or include string formatting, you don't need to use line
+ continuation characters thanks to the containing parentheses. The older
+ form is removed in Python 3.
+
+ Okay: raise DummyError("Message")
+ W602: raise DummyError, "Message"
+ """
+ match = RAISE_COMMA_REGEX.match(logical_line)
+ if match and not RERAISE_COMMA_REGEX.match(logical_line):
+ yield match.end() - 1, "W602 deprecated form of raising exception"
+
+
+def python_3000_not_equal(logical_line):
+ """
+ != can also be written <>, but this is an obsolete usage kept for
+ backwards compatibility only. New code should always use !=.
+ The older syntax is removed in Python 3.
+
+ Okay: if a != 'no':
+ W603: if a <> 'no':
+ """
+ pos = logical_line.find('<>')
+ if pos > -1:
+ yield pos, "W603 '<>' is deprecated, use '!='"
+
+
+def python_3000_backticks(logical_line):
+ """
+ Backticks are removed in Python 3.
+ Use repr() instead.
+
+ Okay: val = repr(1 + 2)
+ W604: val = `1 + 2`
+ """
+ pos = logical_line.find('`')
+ if pos > -1:
+ yield pos, "W604 backticks are deprecated, use 'repr()'"
+
+
+##############################################################################
+# Helper functions
+##############################################################################
+
+
+if '' == ''.encode():
+ # Python 2: implicit encoding.
+ def readlines(filename):
+ f = open(filename)
+ try:
+ return f.readlines()
+ finally:
+ f.close()
+
+ isidentifier = re.compile(r'[a-zA-Z_]\w*').match
+ stdin_get_value = sys.stdin.read
+else:
+ # Python 3
+ def readlines(filename):
+ f = open(filename, 'rb')
+ try:
+ coding, lines = tokenize.detect_encoding(f.readline)
+ f = TextIOWrapper(f, coding, line_buffering=True)
+ return [l.decode(coding) for l in lines] + f.readlines()
+ except (LookupError, SyntaxError, UnicodeError):
+ f.close()
+ # Fall back if files are improperly declared
+ f = open(filename, encoding='latin-1')
+ return f.readlines()
+ finally:
+ f.close()
+
+ isidentifier = str.isidentifier
+
+ def stdin_get_value():
+ return TextIOWrapper(sys.stdin.buffer, errors='ignore').read()
+readlines.__doc__ = " Read the source code."
+noqa = re.compile(r'# no(?:qa|pep8)\b', re.I).search
+
+
+def expand_indent(line):
+ r"""
+ Return the amount of indentation.
+ Tabs are expanded to the next multiple of 8.
+
+ >>> expand_indent(' ')
+ 4
+ >>> expand_indent('\t')
+ 8
+ >>> expand_indent(' \t')
+ 8
+ >>> expand_indent(' \t')
+ 8
+ >>> expand_indent(' \t')
+ 16
+ """
+ if '\t' not in line:
+ return len(line) - len(line.lstrip())
+ result = 0
+ for char in line:
+ if char == '\t':
+ result = result // 8 * 8 + 8
+ elif char == ' ':
+ result += 1
+ else:
+ break
+ return result
+
+
+def mute_string(text):
+ """
+ Replace contents with 'xxx' to prevent syntax matching.
+
+ >>> mute_string('"abc"')
+ '"xxx"'
+ >>> mute_string("'''abc'''")
+ "'''xxx'''"
+ >>> mute_string("r'abc'")
+ "r'xxx'"
+ """
+ # String modifiers (e.g. u or r)
+ start = text.index(text[-1]) + 1
+ end = len(text) - 1
+ # Triple quotes
+ if text[-3:] in ('"""', "'''"):
+ start += 2
+ end -= 2
+ return text[:start] + 'x' * (end - start) + text[end:]
+
+
+def parse_udiff(diff, patterns=None, parent='.'):
+ """Return a dictionary of matching lines."""
+ # For each file of the diff, the entry key is the filename,
+ # and the value is a set of row numbers to consider.
+ rv = {}
+ path = nrows = None
+ for line in diff.splitlines():
+ if nrows:
+ if line[:1] != '-':
+ nrows -= 1
+ continue
+ if line[:3] == '@@ ':
+ hunk_match = HUNK_REGEX.match(line)
+ row, nrows = [int(g or '1') for g in hunk_match.groups()]
+ rv[path].update(range(row, row + nrows))
+ elif line[:3] == '+++':
+ path = line[4:].split('\t', 1)[0]
+ if path[:2] == 'b/':
+ path = path[2:]
+ rv[path] = set()
+ return dict([(os.path.join(parent, path), rows)
+ for (path, rows) in rv.items()
+ if rows and filename_match(path, patterns)])
+
+
+def filename_match(filename, patterns, default=True):
+ """
+ Check if patterns contains a pattern that matches filename.
+ If patterns is unspecified, this always returns True.
+ """
+ if not patterns:
+ return default
+ return any(fnmatch(filename, pattern) for pattern in patterns)
+
+
+##############################################################################
+# Framework to run all checks
+##############################################################################
+
+
+_checks = {'physical_line': {}, 'logical_line': {}, 'tree': {}}
+
+
+def register_check(check, codes=None):
+ """
+ Register a new check object.
+ """
+ def _add_check(check, kind, codes, args):
+ if check in _checks[kind]:
+ _checks[kind][check][0].extend(codes or [])
+ else:
+ _checks[kind][check] = (codes or [''], args)
+ if inspect.isfunction(check):
+ args = inspect.getargspec(check)[0]
+ if args and args[0] in ('physical_line', 'logical_line'):
+ if codes is None:
+ codes = ERRORCODE_REGEX.findall(check.__doc__ or '')
+ _add_check(check, args[0], codes, args)
+ elif inspect.isclass(check):
+ if inspect.getargspec(check.__init__)[0][:2] == ['self', 'tree']:
+ _add_check(check, 'tree', codes, None)
+
+
+def init_checks_registry():
+ """
+ Register all globally visible functions where the first argument name
+ is 'physical_line' or 'logical_line'.
+ """
+ mod = inspect.getmodule(register_check)
+ for (name, function) in inspect.getmembers(mod, inspect.isfunction):
+ register_check(function)
+init_checks_registry()
+
+
+class Checker(object):
+ """
+ Load a Python source file, tokenize it, check coding style.
+ """
+
+ def __init__(self, filename=None, lines=None,
+ options=None, report=None, **kwargs):
+ if options is None:
+ options = StyleGuide(kwargs).options
+ else:
+ assert not kwargs
+ self._io_error = None
+ self._physical_checks = options.physical_checks
+ self._logical_checks = options.logical_checks
+ self._ast_checks = options.ast_checks
+ self.max_line_length = options.max_line_length
+ self.verbose = options.verbose
+ self.filename = filename
+ if filename is None:
+ self.filename = 'stdin'
+ self.lines = lines or []
+ elif filename == '-':
+ self.filename = 'stdin'
+ self.lines = stdin_get_value().splitlines(True)
+ elif lines is None:
+ try:
+ self.lines = readlines(filename)
+ except IOError:
+ exc_type, exc = sys.exc_info()[:2]
+ self._io_error = '%s: %s' % (exc_type.__name__, exc)
+ self.lines = []
+ else:
+ self.lines = lines
+ self.report = report or options.report
+ self.report_error = self.report.error
+
+ def report_invalid_syntax(self):
+ exc_type, exc = sys.exc_info()[:2]
+ offset = exc.args[1]
+ if len(offset) > 2:
+ offset = offset[1:3]
+ self.report_error(offset[0], offset[1] or 0,
+ 'E901 %s: %s' % (exc_type.__name__, exc.args[0]),
+ self.report_invalid_syntax)
+ report_invalid_syntax.__doc__ = " Check if the syntax is valid."
+
+ def readline(self):
+ """
+ Get the next line from the input buffer.
+ """
+ self.line_number += 1
+ if self.line_number > len(self.lines):
+ return ''
+ return self.lines[self.line_number - 1]
+
+ def readline_check_physical(self):
+ """
+ Check and return the next physical line. This method can be
+ used to feed tokenize.generate_tokens.
+ """
+ line = self.readline()
+ if line:
+ self.check_physical(line)
+ return line
+
+ def run_check(self, check, argument_names):
+ """
+ Run a check plugin.
+ """
+ arguments = []
+ for name in argument_names:
+ arguments.append(getattr(self, name))
+ return check(*arguments)
+
+ def check_physical(self, line):
+ """
+ Run all physical checks on a raw input line.
+ """
+ self.physical_line = line
+ if self.indent_char is None and line[:1] in WHITESPACE:
+ self.indent_char = line[0]
+ for name, check, argument_names in self._physical_checks:
+ result = self.run_check(check, argument_names)
+ if result is not None:
+ offset, text = result
+ self.report_error(self.line_number, offset, text, check)
+
+ def build_tokens_line(self):
+ """
+ Build a logical line from tokens.
+ """
+ self.mapping = []
+ logical = []
+ comments = []
+ length = 0
+ previous = None
+ for token in self.tokens:
+ token_type, text = token[0:2]
+ if token_type == tokenize.COMMENT:
+ comments.append(text)
+ continue
+ if token_type in SKIP_TOKENS:
+ continue
+ if token_type == tokenize.STRING:
+ text = mute_string(text)
+ if previous:
+ end_row, end = previous[3]
+ start_row, start = token[2]
+ if end_row != start_row: # different row
+ prev_text = self.lines[end_row - 1][end - 1]
+ if prev_text == ',' or (prev_text not in '{[('
+ and text not in '}])'):
+ logical.append(' ')
+ length += 1
+ elif end != start: # different column
+ fill = self.lines[end_row - 1][end:start]
+ logical.append(fill)
+ length += len(fill)
+ self.mapping.append((length, token))
+ logical.append(text)
+ length += len(text)
+ previous = token
+ self.logical_line = ''.join(logical)
+ self.noqa = comments and noqa(''.join(comments))
+ # With Python 2, if the line ends with '\r\r\n' the assertion fails
+ # assert self.logical_line.strip() == self.logical_line
+
+ def check_logical(self):
+ """
+ Build a line from tokens and run all logical checks on it.
+ """
+ self.build_tokens_line()
+ self.report.increment_logical_line()
+ first_line = self.lines[self.mapping[0][1][2][0] - 1]
+ indent = first_line[:self.mapping[0][1][2][1]]
+ self.previous_indent_level = self.indent_level
+ self.indent_level = expand_indent(indent)
+ if self.verbose >= 2:
+ print(self.logical_line[:80].rstrip())
+ for name, check, argument_names in self._logical_checks:
+ if self.verbose >= 4:
+ print(' ' + name)
+ for result in self.run_check(check, argument_names):
+ offset, text = result
+ if isinstance(offset, tuple):
+ orig_number, orig_offset = offset
+ else:
+ for token_offset, token in self.mapping:
+ if offset >= token_offset:
+ orig_number = token[2][0]
+ orig_offset = (token[2][1] + offset - token_offset)
+ self.report_error(orig_number, orig_offset, text, check)
+ self.previous_logical = self.logical_line
+
+ def check_ast(self):
+ try:
+ tree = compile(''.join(self.lines), '', 'exec', PyCF_ONLY_AST)
+ except SyntaxError:
+ return self.report_invalid_syntax()
+ for name, cls, _ in self._ast_checks:
+ checker = cls(tree, self.filename)
+ for lineno, offset, text, check in checker.run():
+ if not noqa(self.lines[lineno - 1]):
+ self.report_error(lineno, offset, text, check)
+
+ def generate_tokens(self):
+ if self._io_error:
+ self.report_error(1, 0, 'E902 %s' % self._io_error, readlines)
+ tokengen = tokenize.generate_tokens(self.readline_check_physical)
+ try:
+ for token in tokengen:
+ yield token
+ except (SyntaxError, tokenize.TokenError):
+ self.report_invalid_syntax()
+
+ def check_all(self, expected=None, line_offset=0):
+ """
+ Run all checks on the input file.
+ """
+ self.report.init_file(self.filename, self.lines, expected, line_offset)
+ if self._ast_checks:
+ self.check_ast()
+ self.line_number = 0
+ self.indent_char = None
+ self.indent_level = 0
+ self.previous_logical = ''
+ self.tokens = []
+ self.blank_lines = blank_lines_before_comment = 0
+ parens = 0
+ for token in self.generate_tokens():
+ self.tokens.append(token)
+ token_type, text = token[0:2]
+ if self.verbose >= 3:
+ if token[2][0] == token[3][0]:
+ pos = '[%s:%s]' % (token[2][1] or '', token[3][1])
+ else:
+ pos = 'l.%s' % token[3][0]
+ print('l.%s\t%s\t%s\t%r' %
+ (token[2][0], pos, tokenize.tok_name[token[0]], text))
+ if token_type == tokenize.OP:
+ if text in '([{':
+ parens += 1
+ elif text in '}])':
+ parens -= 1
+ elif not parens:
+ if token_type == tokenize.NEWLINE:
+ if self.blank_lines < blank_lines_before_comment:
+ self.blank_lines = blank_lines_before_comment
+ self.check_logical()
+ self.tokens = []
+ self.blank_lines = blank_lines_before_comment = 0
+ elif token_type == tokenize.NL:
+ if len(self.tokens) == 1:
+ # The physical line contains only this token.
+ self.blank_lines += 1
+ self.tokens = []
+ elif token_type == tokenize.COMMENT and len(self.tokens) == 1:
+ if blank_lines_before_comment < self.blank_lines:
+ blank_lines_before_comment = self.blank_lines
+ self.blank_lines = 0
+ if COMMENT_WITH_NL:
+ # The comment also ends a physical line
+ self.tokens = []
+ return self.report.get_file_results()
+
+
+class BaseReport(object):
+ """Collect the results of the checks."""
+ print_filename = False
+
+ def __init__(self, options):
+ self._benchmark_keys = options.benchmark_keys
+ self._ignore_code = options.ignore_code
+ # Results
+ self.elapsed = 0
+ self.total_errors = 0
+ self.counters = dict.fromkeys(self._benchmark_keys, 0)
+ self.messages = {}
+
+ def start(self):
+ """Start the timer."""
+ self._start_time = time.time()
+
+ def stop(self):
+ """Stop the timer."""
+ self.elapsed = time.time() - self._start_time
+
+ def init_file(self, filename, lines, expected, line_offset):
+ """Signal a new file."""
+ self.filename = filename
+ self.lines = lines
+ self.expected = expected or ()
+ self.line_offset = line_offset
+ self.file_errors = 0
+ self.counters['files'] += 1
+ self.counters['physical lines'] += len(lines)
+
+ def increment_logical_line(self):
+ """Signal a new logical line."""
+ self.counters['logical lines'] += 1
+
+ def error(self, line_number, offset, text, check):
+ """Report an error, according to options."""
+ code = text[:4]
+ if self._ignore_code(code):
+ return
+ if code in self.counters:
+ self.counters[code] += 1
+ else:
+ self.counters[code] = 1
+ self.messages[code] = text[5:]
+ # Don't care about expected errors or warnings
+ if code in self.expected:
+ return
+ if self.print_filename and not self.file_errors:
+ print(self.filename)
+ self.file_errors += 1
+ self.total_errors += 1
+ return code
+
+ def get_file_results(self):
+ """Return the count of errors and warnings for this file."""
+ return self.file_errors
+
+ def get_count(self, prefix=''):
+ """Return the total count of errors and warnings."""
+ return sum([self.counters[key]
+ for key in self.messages if key.startswith(prefix)])
+
+ def get_statistics(self, prefix=''):
+ """
+ Get statistics for message codes that start with the prefix.
+
+ prefix='' matches all errors and warnings
+ prefix='E' matches all errors
+ prefix='W' matches all warnings
+ prefix='E4' matches all errors that have to do with imports
+ """
+ return ['%-7s %s %s' % (self.counters[key], key, self.messages[key])
+ for key in sorted(self.messages) if key.startswith(prefix)]
+
+ def print_statistics(self, prefix=''):
+ """Print overall statistics (number of errors and warnings)."""
+ for line in self.get_statistics(prefix):
+ print(line)
+
+ def print_benchmark(self):
+ """Print benchmark numbers."""
+ print('%-7.2f %s' % (self.elapsed, 'seconds elapsed'))
+ if self.elapsed:
+ for key in self._benchmark_keys:
+ print('%-7d %s per second (%d total)' %
+ (self.counters[key] / self.elapsed, key,
+ self.counters[key]))
+
+
+class FileReport(BaseReport):
+ """Collect the results of the checks and print only the filenames."""
+ print_filename = True
+
+
+class StandardReport(BaseReport):
+ """Collect and print the results of the checks."""
+
+ def __init__(self, options):
+ super(StandardReport, self).__init__(options)
+ self._fmt = REPORT_FORMAT.get(options.format.lower(),
+ options.format)
+ self._repeat = options.repeat
+ self._show_source = options.show_source
+ self._show_pep8 = options.show_pep8
+
+ def init_file(self, filename, lines, expected, line_offset):
+ """Signal a new file."""
+ self._deferred_print = []
+ return super(StandardReport, self).init_file(
+ filename, lines, expected, line_offset)
+
+ def error(self, line_number, offset, text, check):
+ """Report an error, according to options."""
+ code = super(StandardReport, self).error(line_number, offset,
+ text, check)
+ if code and (self.counters[code] == 1 or self._repeat):
+ self._deferred_print.append(
+ (line_number, offset, code, text[5:], check.__doc__))
+ return code
+
+ def get_file_results(self):
+ """Print the result and return the overall count for this file."""
+ self._deferred_print.sort()
+ for line_number, offset, code, text, doc in self._deferred_print:
+ print(self._fmt % {
+ 'path': self.filename,
+ 'row': self.line_offset + line_number, 'col': offset + 1,
+ 'code': code, 'text': text,
+ })
+ if self._show_source:
+ if line_number > len(self.lines):
+ line = ''
+ else:
+ line = self.lines[line_number - 1]
+ print(line.rstrip())
+ print(' ' * offset + '^')
+ if self._show_pep8 and doc:
+ print(doc.lstrip('\n').rstrip())
+ return self.file_errors
+
+
+class DiffReport(StandardReport):
+ """Collect and print the results for the changed lines only."""
+
+ def __init__(self, options):
+ super(DiffReport, self).__init__(options)
+ self._selected = options.selected_lines
+
+ def error(self, line_number, offset, text, check):
+ if line_number not in self._selected[self.filename]:
+ return
+ return super(DiffReport, self).error(line_number, offset, text, check)
+
+
+class StyleGuide(object):
+ """Initialize a PEP-8 instance with few options."""
+
+ def __init__(self, *args, **kwargs):
+ # build options from the command line
+ self.checker_class = kwargs.pop('checker_class', Checker)
+ parse_argv = kwargs.pop('parse_argv', False)
+ config_file = kwargs.pop('config_file', None)
+ parser = kwargs.pop('parser', None)
+ options, self.paths = process_options(
+ parse_argv=parse_argv, config_file=config_file, parser=parser)
+ if args or kwargs:
+ # build options from dict
+ options_dict = dict(*args, **kwargs)
+ options.__dict__.update(options_dict)
+ if 'paths' in options_dict:
+ self.paths = options_dict['paths']
+
+ self.runner = self.input_file
+ self.options = options
+
+ if not options.reporter:
+ options.reporter = BaseReport if options.quiet else StandardReport
+
+ for index, value in enumerate(options.exclude):
+ options.exclude[index] = value.rstrip('/')
+ options.select = tuple(options.select or ())
+ if not (options.select or options.ignore or
+ options.testsuite or options.doctest) and DEFAULT_IGNORE:
+ # The default choice: ignore controversial checks
+ options.ignore = tuple(DEFAULT_IGNORE.split(','))
+ else:
+ # Ignore all checks which are not explicitly selected
+ options.ignore = tuple(options.ignore or options.select and ('',))
+ options.benchmark_keys = BENCHMARK_KEYS[:]
+ options.ignore_code = self.ignore_code
+ options.physical_checks = self.get_checks('physical_line')
+ options.logical_checks = self.get_checks('logical_line')
+ options.ast_checks = self.get_checks('tree')
+ self.init_report()
+
+ def init_report(self, reporter=None):
+ """Initialize the report instance."""
+ self.options.report = (reporter or self.options.reporter)(self.options)
+ return self.options.report
+
+ def check_files(self, paths=None):
+ """Run all checks on the paths."""
+ if paths is None:
+ paths = self.paths
+ report = self.options.report
+ runner = self.runner
+ report.start()
+ try:
+ for path in paths:
+ if os.path.isdir(path):
+ self.input_dir(path)
+ elif not self.excluded(path):
+ runner(path)
+ except KeyboardInterrupt:
+ print('... stopped')
+ report.stop()
+ return report
+
+ def input_file(self, filename, lines=None, expected=None, line_offset=0):
+ """Run all checks on a Python source file."""
+ if self.options.verbose:
+ print('checking %s' % filename)
+ fchecker = self.checker_class(
+ filename, lines=lines, options=self.options)
+ return fchecker.check_all(expected=expected, line_offset=line_offset)
+
+ def input_dir(self, dirname):
+ """Check all files in this directory and all subdirectories."""
+ dirname = dirname.rstrip('/')
+ if self.excluded(dirname):
+ return 0
+ counters = self.options.report.counters
+ verbose = self.options.verbose
+ filepatterns = self.options.filename
+ runner = self.runner
+ for root, dirs, files in os.walk(dirname):
+ if verbose:
+ print('directory ' + root)
+ counters['directories'] += 1
+ for subdir in sorted(dirs):
+ if self.excluded(os.path.join(root, subdir)):
+ dirs.remove(subdir)
+ for filename in sorted(files):
+ # contain a pattern that matches?
+ if ((filename_match(filename, filepatterns) and
+ not self.excluded(filename))):
+ runner(os.path.join(root, filename))
+
+ def excluded(self, filename):
+ """
+ Check if options.exclude contains a pattern that matches filename.
+ """
+ basename = os.path.basename(filename)
+ return any((filename_match(filename, self.options.exclude,
+ default=False),
+ filename_match(basename, self.options.exclude,
+ default=False)))
+
+ def ignore_code(self, code):
+ """
+ Check if the error code should be ignored.
+
+ If 'options.select' contains a prefix of the error code,
+ return False. Else, if 'options.ignore' contains a prefix of
+ the error code, return True.
+ """
+ return (code.startswith(self.options.ignore) and
+ not code.startswith(self.options.select))
+
+ def get_checks(self, argument_name):
+ """
+ Find all globally visible functions where the first argument name
+ starts with argument_name and which contain selected tests.
+ """
+ checks = []
+ for check, attrs in _checks[argument_name].items():
+ (codes, args) = attrs
+ if any(not (code and self.ignore_code(code)) for code in codes):
+ checks.append((check.__name__, check, args))
+ return sorted(checks)
+
+
+def get_parser(prog='pep8', version=__version__):
+ parser = OptionParser(prog=prog, version=version,
+ usage="%prog [options] input ...")
+ parser.config_options = [
+ 'exclude', 'filename', 'select', 'ignore', 'max-line-length', 'count',
+ 'format', 'quiet', 'show-pep8', 'show-source', 'statistics', 'verbose']
+ parser.add_option('-v', '--verbose', default=0, action='count',
+ help="print status messages, or debug with -vv")
+ parser.add_option('-q', '--quiet', default=0, action='count',
+ help="report only file names, or nothing with -qq")
+ parser.add_option('-r', '--repeat', default=True, action='store_true',
+ help="(obsolete) show all occurrences of the same error")
+ parser.add_option('--first', action='store_false', dest='repeat',
+ help="show first occurrence of each error")
+ parser.add_option('--exclude', metavar='patterns', default=DEFAULT_EXCLUDE,
+ help="exclude files or directories which match these "
+ "comma separated patterns (default: %default)")
+ parser.add_option('--filename', metavar='patterns', default='*.py',
+ help="when parsing directories, only check filenames "
+ "matching these comma separated patterns "
+ "(default: %default)")
+ parser.add_option('--select', metavar='errors', default='',
+ help="select errors and warnings (e.g. E,W6)")
+ parser.add_option('--ignore', metavar='errors', default='',
+ help="skip errors and warnings (e.g. E4,W)")
+ parser.add_option('--show-source', action='store_true',
+ help="show source code for each error")
+ parser.add_option('--show-pep8', action='store_true',
+ help="show text of PEP 8 for each error "
+ "(implies --first)")
+ parser.add_option('--statistics', action='store_true',
+ help="count errors and warnings")
+ parser.add_option('--count', action='store_true',
+ help="print total number of errors and warnings "
+ "to standard error and set exit code to 1 if "
+ "total is not null")
+ parser.add_option('--max-line-length', type='int', metavar='n',
+ default=MAX_LINE_LENGTH,
+ help="set maximum allowed line length "
+ "(default: %default)")
+ parser.add_option('--format', metavar='format', default='default',
+ help="set the error format [default|pylint|<custom>]")
+ parser.add_option('--diff', action='store_true',
+ help="report only lines changed according to the "
+ "unified diff received on STDIN")
+ group = parser.add_option_group("Testing Options")
+ if os.path.exists(TESTSUITE_PATH):
+ group.add_option('--testsuite', metavar='dir',
+ help="run regression tests from dir")
+ group.add_option('--doctest', action='store_true',
+ help="run doctest on myself")
+ group.add_option('--benchmark', action='store_true',
+ help="measure processing speed")
+ return parser
+
+
+def read_config(options, args, arglist, parser):
+ """Read both user configuration and local configuration."""
+ config = RawConfigParser()
+
+ user_conf = options.config
+ if user_conf and os.path.isfile(user_conf):
+ if options.verbose:
+ print('user configuration: %s' % user_conf)
+ config.read(user_conf)
+
+ parent = tail = args and os.path.abspath(os.path.commonprefix(args))
+ while tail:
+ if config.read([os.path.join(parent, fn) for fn in PROJECT_CONFIG]):
+ if options.verbose:
+ print('local configuration: in %s' % parent)
+ break
+ parent, tail = os.path.split(parent)
+
+ pep8_section = parser.prog
+ if config.has_section(pep8_section):
+ option_list = dict([(o.dest, o.type or o.action)
+ for o in parser.option_list])
+
+ # First, read the default values
+ new_options, _ = parser.parse_args([])
+
+ # Second, parse the configuration
+ for opt in config.options(pep8_section):
+ if options.verbose > 1:
+ print(" %s = %s" % (opt, config.get(pep8_section, opt)))
+ if opt.replace('_', '-') not in parser.config_options:
+ print("Unknown option: '%s'\n not in [%s]" %
+ (opt, ' '.join(parser.config_options)))
+ sys.exit(1)
+ normalized_opt = opt.replace('-', '_')
+ opt_type = option_list[normalized_opt]
+ if opt_type in ('int', 'count'):
+ value = config.getint(pep8_section, opt)
+ elif opt_type == 'string':
+ value = config.get(pep8_section, opt)
+ else:
+ assert opt_type in ('store_true', 'store_false')
+ value = config.getboolean(pep8_section, opt)
+ setattr(new_options, normalized_opt, value)
+
+ # Third, overwrite with the command-line options
+ options, _ = parser.parse_args(arglist, values=new_options)
+ options.doctest = options.testsuite = False
+ return options
+
+
+def process_options(arglist=None, parse_argv=False, config_file=None,
+ parser=None):
+ """Process options passed either via arglist or via command line args."""
+ if not arglist and not parse_argv:
+ # Don't read the command line if the module is used as a library.
+ arglist = []
+ if not parser:
+ parser = get_parser()
+ if not parser.has_option('--config'):
+ if config_file is True:
+ config_file = DEFAULT_CONFIG
+ group = parser.add_option_group("Configuration", description=(
+ "The project options are read from the [%s] section of the "
+ "tox.ini file or the setup.cfg file located in any parent folder "
+ "of the path(s) being processed. Allowed options are: %s." %
+ (parser.prog, ', '.join(parser.config_options))))
+ group.add_option('--config', metavar='path', default=config_file,
+ help="user config file location (default: %default)")
+ options, args = parser.parse_args(arglist)
+ options.reporter = None
+
+ if options.ensure_value('testsuite', False):
+ args.append(options.testsuite)
+ elif not options.ensure_value('doctest', False):
+ if parse_argv and not args:
+ if options.diff or any(os.path.exists(name)
+ for name in PROJECT_CONFIG):
+ args = ['.']
+ else:
+ parser.error('input not specified')
+ options = read_config(options, args, arglist, parser)
+ options.reporter = parse_argv and options.quiet == 1 and FileReport
+
+ options.filename = options.filename and options.filename.split(',')
+ options.exclude = options.exclude.split(',')
+ options.select = options.select and options.select.split(',')
+ options.ignore = options.ignore and options.ignore.split(',')
+
+ if options.diff:
+ options.reporter = DiffReport
+ stdin = stdin_get_value()
+ options.selected_lines = parse_udiff(stdin, options.filename, args[0])
+ args = sorted(options.selected_lines)
+
+ return options, args
+
+
+def _main():
+ """Parse options and run checks on Python source."""
+ pep8style = StyleGuide(parse_argv=True, config_file=True)
+ options = pep8style.options
+ if options.doctest or options.testsuite:
+ from testsuite.support import run_tests
+ report = run_tests(pep8style)
+ else:
+ report = pep8style.check_files()
+ if options.statistics:
+ report.print_statistics()
+ if options.benchmark:
+ report.print_benchmark()
+ if options.testsuite and not options.quiet:
+ report.print_results()
+ if report.total_errors:
+ if options.count:
+ sys.stderr.write(str(report.total_errors) + '\n')
+ sys.exit(1)
+
+if __name__ == '__main__':
+ _main()
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pyflakes/__init__.py b/.vim/bundle/python-mode/pylibs/pylama/pyflakes/__init__.py
@@ -0,0 +1,2 @@
+
+__version__ = '0.6.1'
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pyflakes/checker.py b/.vim/bundle/python-mode/pylibs/pylama/pyflakes/checker.py
@@ -0,0 +1,738 @@
+# -*- test-case-name: pyflakes -*-
+# (c) 2005-2010 Divmod, Inc.
+# See LICENSE file for details
+
+import os.path
+try:
+ import builtins
+ PY2 = False
+except ImportError:
+ import __builtin__ as builtins
+ PY2 = True
+
+try:
+ import ast
+ iter_child_nodes = ast.iter_child_nodes
+except ImportError: # Python 2.5
+ import _ast as ast
+
+ if 'decorator_list' not in ast.ClassDef._fields:
+ # Patch the missing attribute 'decorator_list'
+ ast.ClassDef.decorator_list = ()
+ ast.FunctionDef.decorator_list = property(lambda s: s.decorators)
+
+ def iter_child_nodes(node):
+ """
+ Yield all direct child nodes of *node*, that is, all fields that
+ are nodes and all items of fields that are lists of nodes.
+ """
+ for name in node._fields:
+ field = getattr(node, name, None)
+ if isinstance(field, ast.AST):
+ yield field
+ elif isinstance(field, list):
+ for item in field:
+ yield item
+# Python >= 3.3 uses ast.Try instead of (ast.TryExcept + ast.TryFinally)
+if hasattr(ast, 'Try'):
+ ast_TryExcept = ast.Try
+ ast_TryFinally = ()
+else:
+ ast_TryExcept = ast.TryExcept
+ ast_TryFinally = ast.TryFinally
+
+from . import messages
+
+
+class Binding(object):
+ """
+ Represents the binding of a value to a name.
+
+ The checker uses this to keep track of which names have been bound and
+ which names have not. See L{Assignment} for a special type of binding that
+ is checked with stricter rules.
+
+ @ivar used: pair of (L{Scope}, line-number) indicating the scope and
+ line number that this binding was last used
+ """
+
+ def __init__(self, name, source):
+ self.name = name
+ self.source = source
+ self.used = False
+
+ def __str__(self):
+ return self.name
+
+ def __repr__(self):
+ return '<%s object %r from line %r at 0x%x>' % (self.__class__.__name__,
+ self.name,
+ self.source.lineno,
+ id(self))
+
+
+class UnBinding(Binding):
+ """Created by the 'del' operator."""
+
+
+class Importation(Binding):
+ """
+ A binding created by an import statement.
+
+ @ivar fullName: The complete name given to the import statement,
+ possibly including multiple dotted components.
+ @type fullName: C{str}
+ """
+ def __init__(self, name, source):
+ self.fullName = name
+ name = name.split('.')[0]
+ super(Importation, self).__init__(name, source)
+
+
+class Argument(Binding):
+ """
+ Represents binding a name as an argument.
+ """
+
+
+class Definition(Binding):
+ """
+ A binding that defines a function or a class.
+ """
+
+
+class Assignment(Binding):
+ """
+ Represents binding a name with an explicit assignment.
+
+ The checker will raise warnings for any Assignment that isn't used. Also,
+ the checker does not consider assignments in tuple/list unpacking to be
+ Assignments, rather it treats them as simple Bindings.
+ """
+
+
+class FunctionDefinition(Definition):
+ pass
+
+
+class ClassDefinition(Definition):
+ pass
+
+
+class ExportBinding(Binding):
+ """
+ A binding created by an C{__all__} assignment. If the names in the list
+ can be determined statically, they will be treated as names for export and
+ additional checking applied to them.
+
+ The only C{__all__} assignment that can be recognized is one which takes
+ the value of a literal list containing literal strings. For example::
+
+ __all__ = ["foo", "bar"]
+
+ Names which are imported and not otherwise used but appear in the value of
+ C{__all__} will not have an unused import warning reported for them.
+ """
+ def names(self):
+ """
+ Return a list of the names referenced by this binding.
+ """
+ names = []
+ if isinstance(self.source, ast.List):
+ for node in self.source.elts:
+ if isinstance(node, ast.Str):
+ names.append(node.s)
+ return names
+
+
+class Scope(dict):
+ importStarred = False # set to True when import * is found
+ usesLocals = False
+
+ def __repr__(self):
+ return '<%s at 0x%x %s>' % (self.__class__.__name__, id(self), dict.__repr__(self))
+
+
+class ClassScope(Scope):
+ pass
+
+
+class FunctionScope(Scope):
+ """
+ I represent a name scope for a function.
+
+ @ivar globals: Names declared 'global' in this function.
+ """
+ def __init__(self):
+ super(FunctionScope, self).__init__()
+ self.globals = {}
+
+
+class ModuleScope(Scope):
+ pass
+
+
+# Globally defined names which are not attributes of the builtins module, or
+# are only present on some platforms.
+_MAGIC_GLOBALS = ['__file__', '__builtins__', 'WindowsError']
+
+
+def getNodeName(node):
+ # Returns node.id, or node.name, or None
+ if hasattr(node, 'id'): # One of the many nodes with an id
+ return node.id
+ if hasattr(node, 'name'): # a ExceptHandler node
+ return node.name
+
+
+class Checker(object):
+ """
+ I check the cleanliness and sanity of Python code.
+
+ @ivar _deferredFunctions: Tracking list used by L{deferFunction}. Elements
+ of the list are two-tuples. The first element is the callable passed
+ to L{deferFunction}. The second element is a copy of the scope stack
+ at the time L{deferFunction} was called.
+
+ @ivar _deferredAssignments: Similar to C{_deferredFunctions}, but for
+ callables which are deferred assignment checks.
+ """
+
+ nodeDepth = 0
+ traceTree = False
+ builtIns = set(dir(builtins)) | set(_MAGIC_GLOBALS)
+
+ def __init__(self, tree, filename='(none)', builtins=None):
+ self._deferredFunctions = []
+ self._deferredAssignments = []
+ self.deadScopes = []
+ self.messages = []
+ self.filename = filename
+ if builtins:
+ self.builtIns = self.builtIns.union(builtins)
+ self.scopeStack = [ModuleScope()]
+ self.futuresAllowed = True
+ self.root = tree
+ self.handleChildren(tree)
+ self.runDeferred(self._deferredFunctions)
+ # Set _deferredFunctions to None so that deferFunction will fail
+ # noisily if called after we've run through the deferred functions.
+ self._deferredFunctions = None
+ self.runDeferred(self._deferredAssignments)
+ # Set _deferredAssignments to None so that deferAssignment will fail
+ # noisily if called after we've run through the deferred assignments.
+ self._deferredAssignments = None
+ del self.scopeStack[1:]
+ self.popScope()
+ self.checkDeadScopes()
+
+ def deferFunction(self, callable):
+ """
+ Schedule a function handler to be called just before completion.
+
+ This is used for handling function bodies, which must be deferred
+ because code later in the file might modify the global scope. When
+ `callable` is called, the scope at the time this is called will be
+ restored, however it will contain any new bindings added to it.
+ """
+ self._deferredFunctions.append((callable, self.scopeStack[:]))
+
+ def deferAssignment(self, callable):
+ """
+ Schedule an assignment handler to be called just after deferred
+ function handlers.
+ """
+ self._deferredAssignments.append((callable, self.scopeStack[:]))
+
+ def runDeferred(self, deferred):
+ """
+ Run the callables in C{deferred} using their associated scope stack.
+ """
+ for handler, scope in deferred:
+ self.scopeStack = scope
+ handler()
+
+ @property
+ def scope(self):
+ return self.scopeStack[-1]
+
+ def popScope(self):
+ self.deadScopes.append(self.scopeStack.pop())
+
+ def checkDeadScopes(self):
+ """
+ Look at scopes which have been fully examined and report names in them
+ which were imported but unused.
+ """
+ for scope in self.deadScopes:
+ export = isinstance(scope.get('__all__'), ExportBinding)
+ if export:
+ all = scope['__all__'].names()
+ if not scope.importStarred and os.path.basename(self.filename) != '__init__.py':
+ # Look for possible mistakes in the export list
+ undefined = set(all) - set(scope)
+ for name in undefined:
+ self.report(messages.UndefinedExport,
+ scope['__all__'].source.lineno, name)
+ else:
+ all = []
+
+ # Look for imported names that aren't used.
+ for importation in scope.values():
+ if isinstance(importation, Importation):
+ if not importation.used and importation.name not in all:
+ self.report(messages.UnusedImport,
+ importation.source.lineno, importation.name)
+
+ def pushFunctionScope(self):
+ self.scopeStack.append(FunctionScope())
+
+ def pushClassScope(self):
+ self.scopeStack.append(ClassScope())
+
+ def report(self, messageClass, *args, **kwargs):
+ self.messages.append(messageClass(self.filename, *args, **kwargs))
+
+ def hasParent(self, node, kind):
+ while hasattr(node, 'parent'):
+ node = node.parent
+ if isinstance(node, kind):
+ return True
+
+ def getCommonAncestor(self, lnode, rnode, stop=None):
+ if not stop:
+ stop = self.root
+ if lnode is rnode:
+ return lnode
+ if stop in (lnode, rnode):
+ return stop
+
+ if not hasattr(lnode, 'parent') or not hasattr(rnode, 'parent'):
+ return
+ if (lnode.level > rnode.level):
+ return self.getCommonAncestor(lnode.parent, rnode, stop)
+ if (rnode.level > lnode.level):
+ return self.getCommonAncestor(lnode, rnode.parent, stop)
+ return self.getCommonAncestor(lnode.parent, rnode.parent, stop)
+
+ def descendantOf(self, node, ancestors, stop=None):
+ for a in ancestors:
+ if self.getCommonAncestor(node, a, stop) not in (stop, None):
+ return True
+ return False
+
+ def onFork(self, parent, lnode, rnode, items):
+ return (self.descendantOf(lnode, items, parent) ^
+ self.descendantOf(rnode, items, parent))
+
+ def differentForks(self, lnode, rnode):
+ """True, if lnode and rnode are located on different forks of IF/TRY"""
+ ancestor = self.getCommonAncestor(lnode, rnode)
+ if isinstance(ancestor, ast.If):
+ for fork in (ancestor.body, ancestor.orelse):
+ if self.onFork(ancestor, lnode, rnode, fork):
+ return True
+ elif isinstance(ancestor, ast_TryExcept):
+ body = ancestor.body + ancestor.orelse
+ for fork in [body] + [[hdl] for hdl in ancestor.handlers]:
+ if self.onFork(ancestor, lnode, rnode, fork):
+ return True
+ elif isinstance(ancestor, ast_TryFinally):
+ if self.onFork(ancestor, lnode, rnode, ancestor.body):
+ return True
+ return False
+
+ def addBinding(self, node, value, reportRedef=True):
+ """
+ Called when a binding is altered.
+
+ - `node` is the statement responsible for the change
+ - `value` is the optional new value, a Binding instance, associated
+ with the binding; if None, the binding is deleted if it exists.
+ - if `reportRedef` is True (default), rebinding while unused will be
+ reported.
+ """
+ redefinedWhileUnused = False
+ if not isinstance(self.scope, ClassScope):
+ for scope in self.scopeStack[::-1]:
+ existing = scope.get(value.name)
+ if (isinstance(existing, Importation)
+ and not existing.used
+ and (not isinstance(value, Importation) or value.fullName == existing.fullName)
+ and reportRedef
+ and not self.differentForks(node, existing.source)):
+ redefinedWhileUnused = True
+ self.report(messages.RedefinedWhileUnused,
+ node.lineno, value.name, existing.source.lineno)
+
+ existing = self.scope.get(value.name)
+ if not redefinedWhileUnused and self.hasParent(value.source, ast.ListComp):
+ if (existing and reportRedef
+ and not self.hasParent(existing.source, (ast.For, ast.ListComp))
+ and not self.differentForks(node, existing.source)):
+ self.report(messages.RedefinedInListComp,
+ node.lineno, value.name, existing.source.lineno)
+
+ if isinstance(value, UnBinding):
+ try:
+ del self.scope[value.name]
+ except KeyError:
+ self.report(messages.UndefinedName, node.lineno, value.name)
+ elif (isinstance(existing, Definition)
+ and not existing.used
+ and not self.differentForks(node, existing.source)):
+ self.report(messages.RedefinedWhileUnused,
+ node.lineno, value.name, existing.source.lineno)
+ else:
+ self.scope[value.name] = value
+
+ def handleNodeLoad(self, node):
+ name = getNodeName(node)
+ if not name:
+ return
+ # try local scope
+ importStarred = self.scope.importStarred
+ try:
+ self.scope[name].used = (self.scope, node.lineno)
+ except KeyError:
+ pass
+ else:
+ return
+
+ # try enclosing function scopes
+ for scope in self.scopeStack[-2:0:-1]:
+ importStarred = importStarred or scope.importStarred
+ if not isinstance(scope, FunctionScope):
+ continue
+ try:
+ scope[name].used = (self.scope, node.lineno)
+ except KeyError:
+ pass
+ else:
+ return
+
+ # try global scope
+ importStarred = importStarred or self.scopeStack[0].importStarred
+ try:
+ self.scopeStack[0][name].used = (self.scope, node.lineno)
+ except KeyError:
+ if not importStarred and name not in self.builtIns:
+ if (os.path.basename(self.filename) == '__init__.py' and name == '__path__'):
+ # the special name __path__ is valid only in packages
+ pass
+ else:
+ self.report(messages.UndefinedName, node.lineno, name)
+
+ def handleNodeStore(self, node):
+ name = getNodeName(node)
+ if not name:
+ return
+ # if the name hasn't already been defined in the current scope
+ if isinstance(self.scope, FunctionScope) and name not in self.scope:
+ # for each function or module scope above us
+ for scope in self.scopeStack[:-1]:
+ if not isinstance(scope, (FunctionScope, ModuleScope)):
+ continue
+ # if the name was defined in that scope, and the name has
+ # been accessed already in the current scope, and hasn't
+ # been declared global
+ if (name in scope and scope[name].used and scope[name].used[0] is self.scope
+ and name not in self.scope.globals):
+ # then it's probably a mistake
+ self.report(messages.UndefinedLocal,
+ scope[name].used[1], name, scope[name].source.lineno)
+ break
+
+ parent = getattr(node, 'parent', None)
+ if isinstance(parent, (ast.For, ast.comprehension, ast.Tuple, ast.List)):
+ binding = Binding(name, node)
+ elif parent is not None and name == '__all__' and isinstance(self.scope, ModuleScope):
+ binding = ExportBinding(name, parent.value)
+ else:
+ binding = Assignment(name, node)
+ if name in self.scope:
+ binding.used = self.scope[name].used
+ self.addBinding(node, binding)
+
+ def handleNodeDelete(self, node):
+ name = getNodeName(node)
+ if not name:
+ return
+ if isinstance(self.scope, FunctionScope) and name in self.scope.globals:
+ del self.scope.globals[name]
+ else:
+ self.addBinding(node, UnBinding(name, node))
+
+ def handleChildren(self, tree):
+ for node in iter_child_nodes(tree):
+ self.handleNode(node, tree)
+
+ def isDocstring(self, node):
+ """
+ Determine if the given node is a docstring, as long as it is at the
+ correct place in the node tree.
+ """
+ return isinstance(node, ast.Str) or (isinstance(node, ast.Expr) and
+ isinstance(node.value, ast.Str))
+
+ def handleNode(self, node, parent):
+ if node is None:
+ return
+ node.parent = parent
+ if self.traceTree:
+ print(' ' * self.nodeDepth + node.__class__.__name__)
+ self.nodeDepth += 1
+ if self.futuresAllowed and not (isinstance(node, ast.ImportFrom) or
+ self.isDocstring(node)):
+ self.futuresAllowed = False
+ nodeType = node.__class__.__name__.upper()
+ node.level = self.nodeDepth
+ try:
+ handler = getattr(self, nodeType)
+ handler(node)
+ finally:
+ self.nodeDepth -= 1
+ if self.traceTree:
+ print(' ' * self.nodeDepth + 'end ' + node.__class__.__name__)
+
+ def ignore(self, node):
+ pass
+
+ # "stmt" type nodes
+ RETURN = DELETE = PRINT = WHILE = IF = WITH = WITHITEM = RAISE = \
+ TRYEXCEPT = TRYFINALLY = TRY = ASSERT = EXEC = EXPR = handleChildren
+
+ CONTINUE = BREAK = PASS = ignore
+
+ # "expr" type nodes
+ BOOLOP = BINOP = UNARYOP = IFEXP = DICT = SET = YIELD = YIELDFROM = \
+ COMPARE = CALL = REPR = ATTRIBUTE = SUBSCRIPT = LIST = TUPLE = \
+ STARRED = handleChildren
+
+ NUM = STR = BYTES = ELLIPSIS = ignore
+
+ # "slice" type nodes
+ SLICE = EXTSLICE = INDEX = handleChildren
+
+ # expression contexts are node instances too, though being constants
+ LOAD = STORE = DEL = AUGLOAD = AUGSTORE = PARAM = ignore
+
+ # same for operators
+ AND = OR = ADD = SUB = MULT = DIV = MOD = POW = LSHIFT = RSHIFT = \
+ BITOR = BITXOR = BITAND = FLOORDIV = INVERT = NOT = UADD = USUB = \
+ EQ = NOTEQ = LT = LTE = GT = GTE = IS = ISNOT = IN = NOTIN = ignore
+
+ # additional node types
+ COMPREHENSION = KEYWORD = handleChildren
+
+ def GLOBAL(self, node):
+ """
+ Keep track of globals declarations.
+ """
+ if isinstance(self.scope, FunctionScope):
+ self.scope.globals.update(dict.fromkeys(node.names))
+
+ NONLOCAL = GLOBAL
+
+ def LISTCOMP(self, node):
+ # handle generators before element
+ for gen in node.generators:
+ self.handleNode(gen, node)
+ self.handleNode(node.elt, node)
+
+ def GENERATOREXP(self, node):
+ self.pushFunctionScope()
+ # handle generators before element
+ for gen in node.generators:
+ self.handleNode(gen, node)
+ self.handleNode(node.elt, node)
+ self.popScope()
+
+ SETCOMP = GENERATOREXP
+
+ def DICTCOMP(self, node):
+ self.pushFunctionScope()
+ for gen in node.generators:
+ self.handleNode(gen, node)
+ self.handleNode(node.key, node)
+ self.handleNode(node.value, node)
+ self.popScope()
+
+ def FOR(self, node):
+ """
+ Process bindings for loop variables.
+ """
+ vars = []
+
+ def collectLoopVars(n):
+ if isinstance(n, ast.Name):
+ vars.append(n.id)
+ elif isinstance(n, ast.expr_context):
+ return
+ else:
+ for c in iter_child_nodes(n):
+ collectLoopVars(c)
+
+ collectLoopVars(node.target)
+ for varn in vars:
+ if (isinstance(self.scope.get(varn), Importation)
+ # unused ones will get an unused import warning
+ and self.scope[varn].used):
+ self.report(messages.ImportShadowedByLoopVar,
+ node.lineno, varn, self.scope[varn].source.lineno)
+
+ self.handleChildren(node)
+
+ def NAME(self, node):
+ """
+ Handle occurrence of Name (which can be a load/store/delete access.)
+ """
+ if node.id == 'locals' and isinstance(node.parent, ast.Call):
+ # we are doing locals() call in current scope
+ self.scope.usesLocals = True
+ # Locate the name in locals / function / globals scopes.
+ if isinstance(node.ctx, (ast.Load, ast.AugLoad)):
+ self.handleNodeLoad(node)
+ elif isinstance(node.ctx, (ast.Store, ast.AugStore)):
+ self.handleNodeStore(node)
+ elif isinstance(node.ctx, ast.Del):
+ self.handleNodeDelete(node)
+ else:
+ # must be a Param context -- this only happens for names in function
+ # arguments, but these aren't dispatched through here
+ raise RuntimeError("Got impossible expression context: %r" % (node.ctx,))
+
+ def FUNCTIONDEF(self, node):
+ for deco in node.decorator_list:
+ self.handleNode(deco, node)
+ self.addBinding(node, FunctionDefinition(node.name, node))
+ self.LAMBDA(node)
+
+ def LAMBDA(self, node):
+ args = []
+
+ if PY2:
+ def addArgs(arglist):
+ for arg in arglist:
+ if isinstance(arg, ast.Tuple):
+ addArgs(arg.elts)
+ else:
+ if arg.id in args:
+ self.report(messages.DuplicateArgument,
+ node.lineno, arg.id)
+ args.append(arg.id)
+ addArgs(node.args.args)
+ defaults = node.args.defaults
+ else:
+ for arg in node.args.args + node.args.kwonlyargs:
+ if arg.arg in args:
+ self.report(messages.DuplicateArgument,
+ node.lineno, arg.arg)
+ args.append(arg.arg)
+ self.handleNode(arg.annotation, node)
+ if hasattr(node, 'returns'): # Only for FunctionDefs
+ for annotation in (node.args.varargannotation,
+ node.args.kwargannotation, node.returns):
+ self.handleNode(annotation, node)
+ defaults = node.args.defaults + node.args.kw_defaults
+
+ # vararg/kwarg identifiers are not Name nodes
+ for wildcard in (node.args.vararg, node.args.kwarg):
+ if not wildcard:
+ continue
+ if wildcard in args:
+ self.report(messages.DuplicateArgument, node.lineno, wildcard)
+ args.append(wildcard)
+ for default in defaults:
+ self.handleNode(default, node)
+
+ def runFunction():
+
+ self.pushFunctionScope()
+ for name in args:
+ self.addBinding(node, Argument(name, node), reportRedef=False)
+ if isinstance(node.body, list):
+ # case for FunctionDefs
+ for stmt in node.body:
+ self.handleNode(stmt, node)
+ else:
+ # case for Lambdas
+ self.handleNode(node.body, node)
+
+ def checkUnusedAssignments():
+ """
+ Check to see if any assignments have not been used.
+ """
+ for name, binding in self.scope.items():
+ if (not binding.used and name not in self.scope.globals
+ and not self.scope.usesLocals
+ and isinstance(binding, Assignment)):
+ self.report(messages.UnusedVariable,
+ binding.source.lineno, name)
+ self.deferAssignment(checkUnusedAssignments)
+ self.popScope()
+
+ self.deferFunction(runFunction)
+
+ def CLASSDEF(self, node):
+ """
+ Check names used in a class definition, including its decorators, base
+ classes, and the body of its definition. Additionally, add its name to
+ the current scope.
+ """
+ for deco in node.decorator_list:
+ self.handleNode(deco, node)
+ for baseNode in node.bases:
+ self.handleNode(baseNode, node)
+ if not PY2:
+ for keywordNode in node.keywords:
+ self.handleNode(keywordNode, node)
+ self.pushClassScope()
+ for stmt in node.body:
+ self.handleNode(stmt, node)
+ self.popScope()
+ self.addBinding(node, ClassDefinition(node.name, node))
+
+ def ASSIGN(self, node):
+ self.handleNode(node.value, node)
+ for target in node.targets:
+ self.handleNode(target, node)
+
+ def AUGASSIGN(self, node):
+ self.handleNodeLoad(node.target)
+ self.handleNode(node.value, node)
+ self.handleNode(node.target, node)
+
+ def IMPORT(self, node):
+ for alias in node.names:
+ name = alias.asname or alias.name
+ importation = Importation(name, node)
+ self.addBinding(node, importation)
+
+ def IMPORTFROM(self, node):
+ if node.module == '__future__':
+ if not self.futuresAllowed:
+ self.report(messages.LateFutureImport,
+ node.lineno, [n.name for n in node.names])
+ else:
+ self.futuresAllowed = False
+
+ for alias in node.names:
+ if alias.name == '*':
+ self.scope.importStarred = True
+ self.report(messages.ImportStarUsed, node.lineno, node.module)
+ continue
+ name = alias.asname or alias.name
+ importation = Importation(name, node)
+ if node.module == '__future__':
+ importation.used = (self.scope, node.lineno)
+ self.addBinding(node, importation)
+
+ def EXCEPTHANDLER(self, node):
+ # 3.x: in addition to handling children, we must handle the name of
+ # the exception, which is not a Name node, but a simple string.
+ if isinstance(node.name, str):
+ self.handleNodeStore(node)
+ self.handleChildren(node)
+
+# pymode:lint=0
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pyflakes/messages.py b/.vim/bundle/python-mode/pylibs/pylama/pyflakes/messages.py
@@ -0,0 +1,113 @@
+# (c) 2005 Divmod, Inc. See LICENSE file for details
+
+
+class Message(object):
+ message = ''
+ message_args = ()
+
+ def __init__(self, filename, lineno):
+ self.filename = filename
+ self.lineno = lineno
+
+ def __str__(self):
+ return '%s:%s: %s' % (self.filename, self.lineno, self.message % self.message_args)
+
+
+class UnusedImport(Message):
+ message = 'W402 %r imported but unused'
+
+ def __init__(self, filename, lineno, name):
+ Message.__init__(self, filename, lineno)
+ self.message_args = (name,)
+
+
+class RedefinedWhileUnused(Message):
+ message = 'W801 redefinition of unused %r from line %r'
+
+ def __init__(self, filename, lineno, name, orig_lineno):
+ Message.__init__(self, filename, lineno)
+ self.message_args = (name, orig_lineno)
+
+
+class RedefinedInListComp(Message):
+ message = 'W801 rlist comprehension redefines %r from line %r'
+
+ def __init__(self, filename, lineno, name, orig_lineno):
+ Message.__init__(self, filename, lineno)
+ self.message_args = (name, orig_lineno)
+
+
+class ImportShadowedByLoopVar(Message):
+ message = 'W403 iimport %r from line %r shadowed by loop variable'
+
+ def __init__(self, filename, lineno, name, orig_lineno):
+ Message.__init__(self, filename, lineno)
+ self.message_args = (name, orig_lineno)
+
+
+class ImportStarUsed(Message):
+ message = "W404 ''from %s import *' used; unable to detect undefined names"
+
+ def __init__(self, filename, lineno, modname):
+ Message.__init__(self, filename, lineno)
+ self.message_args = (modname,)
+
+
+class UndefinedName(Message):
+ message = 'W802 undefined name %r'
+
+ def __init__(self, filename, lineno, name):
+ Message.__init__(self, filename, lineno)
+ self.message_args = (name,)
+
+
+class UndefinedExport(Message):
+ message = 'W803 undefined name %r in __all__'
+
+ def __init__(self, filename, lineno, name):
+ Message.__init__(self, filename, lineno)
+ self.message_args = (name,)
+
+
+class UndefinedLocal(Message):
+ message = "W804 local variable %r (defined in enclosing scope on line %r) referenced before assignment"
+
+ def __init__(self, filename, lineno, name, orig_lineno):
+ Message.__init__(self, filename, lineno)
+ self.message_args = (name, orig_lineno)
+
+
+class DuplicateArgument(Message):
+ message = 'W805 duplicate argument %r in function definition'
+
+ def __init__(self, filename, lineno, name):
+ Message.__init__(self, filename, lineno)
+ self.message_args = (name,)
+
+
+class Redefined(Message):
+ message = 'W806 redefinition of %r from line %r'
+
+ def __init__(self, filename, lineno, name, orig_lineno):
+ Message.__init__(self, filename, lineno)
+ self.message_args = (name, orig_lineno)
+
+
+class LateFutureImport(Message):
+ message = 'W405 future import(s) %r after other statements'
+
+ def __init__(self, filename, lineno, names):
+ Message.__init__(self, filename, lineno)
+ self.message_args = (names,)
+
+
+class UnusedVariable(Message):
+ """
+ Indicates that a variable has been explicity assigned to but not actually
+ used.
+ """
+ message = 'W806 local variable %r is assigned to but never used'
+
+ def __init__(self, filename, lineno, names):
+ Message.__init__(self, filename, lineno)
+ self.message_args = (names,)
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint.rc b/.vim/bundle/python-mode/pylibs/pylama/pylint.rc
@@ -0,0 +1,25 @@
+[MESSAGES CONTROL]
+# Disable the message(s) with the given id(s).
+# http://pylint-messages.wikidot.com/all-codes
+#
+# C0103: Invalid name "%s" (should match %s)
+# C0111: Missing docstring
+# E1101: %s %r has no %r member
+# R0901: Too many ancestors (%s/%s)
+# R0902: Too many instance attributes (%s/%s)
+# R0903: Too few public methods (%s/%s)
+# R0904: Too many public methods (%s/%s)
+# R0913: Too many arguments (%s/%s)
+# R0915: Too many statements (%s/%s)
+# W0141: Used builtin function %r
+# W0142: Used * or ** magic
+# W0221: Arguments number differs from %s method
+# W0232: Class has no __init__ method
+# W0401: Wildcard import %s
+# W0613: Unused argument %r
+# W0631: Using possibly undefined loop variable %r
+#
+disable = C0103,C0111,E1101,R0901,R0902,R0903,R0904,R0913,R0915,W0141,W0142,W0221,W0232,W0401,W0613,W0631
+
+[TYPECHECK]
+generated-members = REQUEST,acl_users,aq_parent,objects,DoesNotExist,_meta,status_code,content,context
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/__init__.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/__init__.py
@@ -0,0 +1,45 @@
+# Copyright (c) 2003-2012 LOGILAB S.A. (Paris, FRANCE).
+# http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This program is free software; you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation; either version 2 of the License, or (at your option) any later
+# version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details
+#
+# You should have received a copy of the GNU General Public License along with
+# this program; if not, write to the Free Software Foundation, Inc.,
+# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+import sys
+
+def run_pylint():
+ """run pylint"""
+ from .lint import Run
+ Run(sys.argv[1:])
+
+def run_pylint_gui():
+ """run pylint-gui"""
+ try:
+ from pylint.gui import Run
+ Run(sys.argv[1:])
+ except ImportError:
+ sys.exit('tkinter is not available')
+
+def run_epylint():
+ """run pylint"""
+ from pylint.epylint import Run
+ Run()
+
+def run_pyreverse():
+ """run pyreverse"""
+ from pylint.pyreverse.main import Run
+ Run(sys.argv[1:])
+
+def run_symilar():
+ """run symilar"""
+ from .checkers.similar import Run
+ Run(sys.argv[1:])
+
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/__pkginfo__.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/__pkginfo__.py
@@ -0,0 +1,70 @@
+# pylint: disable=W0622,C0103
+# Copyright (c) 2003-2013 LOGILAB S.A. (Paris, FRANCE).
+# http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This program is free software; you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation; either version 2 of the License, or (at your option) any later
+# version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details
+#
+# You should have received a copy of the GNU General Public License along with
+# this program; if not, write to the Free Software Foundation, Inc.,
+# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+"""pylint packaging information"""
+
+modname = distname = 'pylint'
+
+numversion = (0, 27, 0)
+version = '.'.join([str(num) for num in numversion])
+
+install_requires = ['logilab-common >= 0.53.0', 'logilab-astng >= 0.21.1']
+
+license = 'GPL'
+copyright = 'Logilab S.A.'
+description = "python code static checker"
+web = "http://www.logilab.org/project/%s" % distname
+ftp = "ftp://ftp.logilab.org/pub/%s" % modname
+mailinglist = "mailto://python-projects@lists.logilab.org"
+author = 'Logilab'
+author_email = 'python-projects@lists.logilab.org'
+
+classifiers = ['Development Status :: 4 - Beta',
+ 'Environment :: Console',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: GNU General Public License (GPL)',
+ 'Operating System :: OS Independent',
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 3',
+ 'Topic :: Software Development :: Debuggers',
+ 'Topic :: Software Development :: Quality Assurance',
+ 'Topic :: Software Development :: Testing',
+ ]
+
+
+long_desc = """\
+ Pylint is a Python source code analyzer which looks for programming
+ errors, helps enforcing a coding standard and sniffs for some code
+ smells (as defined in Martin Fowler's Refactoring book)
+ .
+ Pylint can be seen as another PyChecker since nearly all tests you
+ can do with PyChecker can also be done with Pylint. However, Pylint
+ offers some more features, like checking length of lines of code,
+ checking if variable names are well-formed according to your coding
+ standard, or checking if declared interfaces are truly implemented,
+ and much more.
+ .
+ Additionally, it is possible to write plugins to add your own checks.
+ .
+ Pylint is shipped with "pylint-gui", "pyreverse" (UML diagram generator)
+ and "symilar" (an independent similarities checker)."""
+
+from os.path import join
+scripts = [join('bin', filename)
+ for filename in ('pylint', 'pylint-gui', "symilar", "epylint",
+ "pyreverse")]
+
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/checkers/__init__.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/checkers/__init__.py
@@ -0,0 +1,163 @@
+# Copyright (c) 2003-2013 LOGILAB S.A. (Paris, FRANCE).
+# http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This program is free software; you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation; either version 2 of the License, or (at your option) any later
+# version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# this program; if not, write to the Free Software Foundation, Inc.,
+# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+"""utilities methods and classes for checkers
+
+Base id of standard checkers (used in msg and report ids):
+01: base
+02: classes
+03: format
+04: import
+05: misc
+06: variables
+07: exceptions
+08: similar
+09: design_analysis
+10: newstyle
+11: typecheck
+12: logging
+13: string_format
+14: string_constant
+14-50: not yet used: reserved for future internal checkers.
+51-99: perhaps used: reserved for external checkers
+
+The raw_metrics checker has no number associated since it doesn't emit any
+messages nor reports. XXX not true, emit a 07 report !
+
+"""
+
+import tokenize
+from os import listdir
+from os.path import dirname, join, isdir, splitext
+
+from ..logilab.astng.utils import ASTWalker
+from ..logilab.common.modutils import load_module_from_file
+from ..logilab.common.configuration import OptionsProviderMixIn
+
+from ..reporters import diff_string, EmptyReport
+
+def table_lines_from_stats(stats, old_stats, columns):
+ """get values listed in <columns> from <stats> and <old_stats>,
+ and return a formated list of values, designed to be given to a
+ ureport.Table object
+ """
+ lines = []
+ for m_type in columns:
+ new = stats[m_type]
+ format = str
+ if isinstance(new, float):
+ format = lambda num: '%.3f' % num
+ old = old_stats.get(m_type)
+ if old is not None:
+ diff_str = diff_string(old, new)
+ old = format(old)
+ else:
+ old, diff_str = 'NC', 'NC'
+ lines += (m_type.replace('_', ' '), format(new), old, diff_str)
+ return lines
+
+
+class BaseChecker(OptionsProviderMixIn, ASTWalker):
+ """base class for checkers"""
+ # checker name (you may reuse an existing one)
+ name = None
+ # options level (0 will be displaying in --help, 1 in --long-help)
+ level = 1
+ # ordered list of options to control the ckecker behaviour
+ options = ()
+ # messages issued by this checker
+ msgs = {}
+ # reports issued by this checker
+ reports = ()
+
+ def __init__(self, linter=None):
+ """checker instances should have the linter as argument
+
+ linter is an object implementing ILinter
+ """
+ ASTWalker.__init__(self, self)
+ self.name = self.name.lower()
+ OptionsProviderMixIn.__init__(self)
+ self.linter = linter
+ # messages that are active for the current check
+ self.active_msgs = set()
+
+ def add_message(self, msg_id, line=None, node=None, args=None):
+ """add a message of a given type"""
+ self.linter.add_message(msg_id, line, node, args)
+
+ def package_dir(self):
+ """return the base directory for the analysed package"""
+ return dirname(self.linter.base_file)
+
+ # dummy methods implementing the IChecker interface
+
+ def open(self):
+ """called before visiting project (i.e set of modules)"""
+
+ def close(self):
+ """called after visiting project (i.e set of modules)"""
+
+
+class BaseRawChecker(BaseChecker):
+ """base class for raw checkers"""
+
+ def process_module(self, node):
+ """process a module
+
+ the module's content is accessible via the stream object
+
+ stream must implement the readline method
+ """
+ stream = node.file_stream
+ stream.seek(0) # XXX may be removed with astng > 0.23
+ self.process_tokens(tokenize.generate_tokens(stream.readline))
+
+ def process_tokens(self, tokens):
+ """should be overridden by subclasses"""
+ raise NotImplementedError()
+
+
+PY_EXTS = ('.py', '.pyc', '.pyo', '.pyw', '.so', '.dll')
+
+def initialize(linter):
+ """initialize linter with checkers in this package """
+ package_load(linter, __path__[0])
+
+def package_load(linter, directory):
+ """load all module and package in the given directory, looking for a
+ 'register' function in each one, used to register pylint checkers
+ """
+ imported = {}
+ for filename in listdir(directory):
+ basename, extension = splitext(filename)
+ if basename in imported or basename == '__pycache__':
+ continue
+ if extension in PY_EXTS and basename != '__init__' or (
+ not extension and isdir(join(directory, basename))):
+ try:
+ module = load_module_from_file(join(directory, filename))
+ except ValueError:
+ # empty module name (usually emacs auto-save files)
+ continue
+ except ImportError, exc:
+ import sys
+ print >> sys.stderr, "Problem importing module %s: %s" % (filename, exc)
+ else:
+ if hasattr(module, 'register'):
+ module.register(linter)
+ imported[basename] = 1
+
+__all__ = ('BaseChecker', 'initialize', 'package_load')
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/checkers/base.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/checkers/base.py
@@ -0,0 +1,851 @@
+# Copyright (c) 2003-2012 LOGILAB S.A. (Paris, FRANCE).
+# Copyright (c) 2009-2010 Arista Networks, Inc.
+# http://www.logilab.fr/ -- mailto:contact@logilab.fr
+# This program is free software; you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation; either version 2 of the License, or (at your option) any later
+# version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# this program; if not, write to the Free Software Foundation, Inc.,
+# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+"""basic checker for Python code"""
+
+
+from ..logilab import astng
+from ..logilab.common.ureports import Table
+from ..logilab.astng import are_exclusive
+
+from ..interfaces import IASTNGChecker
+from ..reporters import diff_string
+from ..checkers import BaseChecker, EmptyReport
+from ..checkers.utils import (
+ check_messages,
+ clobber_in_except,
+ is_inside_except,
+ safe_infer,
+ )
+
+
+import re
+
+# regex for class/function/variable/constant name
+CLASS_NAME_RGX = re.compile('[A-Z_][a-zA-Z0-9]+$')
+MOD_NAME_RGX = re.compile('(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$')
+CONST_NAME_RGX = re.compile('(([A-Z_][A-Z0-9_]*)|(__.*__))$')
+COMP_VAR_RGX = re.compile('[A-Za-z_][A-Za-z0-9_]*$')
+DEFAULT_NAME_RGX = re.compile('[a-z_][a-z0-9_]{2,30}$')
+# do not require a doc string on system methods
+NO_REQUIRED_DOC_RGX = re.compile('__.*__')
+
+del re
+
+def in_loop(node):
+ """return True if the node is inside a kind of for loop"""
+ parent = node.parent
+ while parent is not None:
+ if isinstance(parent, (astng.For, astng.ListComp, astng.SetComp,
+ astng.DictComp, astng.GenExpr)):
+ return True
+ parent = parent.parent
+ return False
+
+def in_nested_list(nested_list, obj):
+ """return true if the object is an element of <nested_list> or of a nested
+ list
+ """
+ for elmt in nested_list:
+ if isinstance(elmt, (list, tuple)):
+ if in_nested_list(elmt, obj):
+ return True
+ elif elmt == obj:
+ return True
+ return False
+
+def report_by_type_stats(sect, stats, old_stats):
+ """make a report of
+
+ * percentage of different types documented
+ * percentage of different types with a bad name
+ """
+ # percentage of different types documented and/or with a bad name
+ nice_stats = {}
+ for node_type in ('module', 'class', 'method', 'function'):
+ try:
+ total = stats[node_type]
+ except KeyError:
+ raise EmptyReport()
+ nice_stats[node_type] = {}
+ if total != 0:
+ try:
+ documented = total - stats['undocumented_'+node_type]
+ percent = (documented * 100.) / total
+ nice_stats[node_type]['percent_documented'] = '%.2f' % percent
+ except KeyError:
+ nice_stats[node_type]['percent_documented'] = 'NC'
+ try:
+ percent = (stats['badname_'+node_type] * 100.) / total
+ nice_stats[node_type]['percent_badname'] = '%.2f' % percent
+ except KeyError:
+ nice_stats[node_type]['percent_badname'] = 'NC'
+ lines = ('type', 'number', 'old number', 'difference',
+ '%documented', '%badname')
+ for node_type in ('module', 'class', 'method', 'function'):
+ new = stats[node_type]
+ old = old_stats.get(node_type, None)
+ if old is not None:
+ diff_str = diff_string(old, new)
+ else:
+ old, diff_str = 'NC', 'NC'
+ lines += (node_type, str(new), str(old), diff_str,
+ nice_stats[node_type].get('percent_documented', '0'),
+ nice_stats[node_type].get('percent_badname', '0'))
+ sect.append(Table(children=lines, cols=6, rheaders=1))
+
+def redefined_by_decorator(node):
+ """return True if the object is a method redefined via decorator.
+
+ For example:
+ @property
+ def x(self): return self._x
+ @x.setter
+ def x(self, value): self._x = value
+ """
+ if node.decorators:
+ for decorator in node.decorators.nodes:
+ if (isinstance(decorator, astng.Getattr) and
+ getattr(decorator.expr, 'name', None) == node.name):
+ return True
+ return False
+
+class _BasicChecker(BaseChecker):
+ __implements__ = IASTNGChecker
+ name = 'basic'
+
+class BasicErrorChecker(_BasicChecker):
+ msgs = {
+ 'E0100': ('__init__ method is a generator',
+ 'init-is-generator',
+ 'Used when the special class method __init__ is turned into a '
+ 'generator by a yield in its body.'),
+ 'E0101': ('Explicit return in __init__',
+ 'return-in-init',
+ 'Used when the special class method __init__ has an explicit \
+ return value.'),
+ 'E0102': ('%s already defined line %s',
+ 'function-redefined',
+ 'Used when a function / class / method is redefined.'),
+ 'E0103': ('%r not properly in loop',
+ 'not-in-loop',
+ 'Used when break or continue keywords are used outside a loop.'),
+
+ 'E0104': ('Return outside function',
+ 'return-outside-function',
+ 'Used when a "return" statement is found outside a function or '
+ 'method.'),
+ 'E0105': ('Yield outside function',
+ 'yield-outside-function',
+ 'Used when a "yield" statement is found outside a function or '
+ 'method.'),
+ 'E0106': ('Return with argument inside generator',
+ 'return-arg-in-generator',
+ 'Used when a "return" statement with an argument is found '
+ 'outside in a generator function or method (e.g. with some '
+ '"yield" statements).'),
+ 'E0107': ("Use of the non-existent %s operator",
+ 'nonexistent-operator',
+ "Used when you attempt to use the C-style pre-increment or"
+ "pre-decrement operator -- and ++, which doesn't exist in Python."),
+ }
+
+ def __init__(self, linter):
+ _BasicChecker.__init__(self, linter)
+
+ @check_messages('E0102')
+ def visit_class(self, node):
+ self._check_redefinition('class', node)
+
+ @check_messages('E0100', 'E0101', 'E0102', 'E0106')
+ def visit_function(self, node):
+ if not redefined_by_decorator(node):
+ self._check_redefinition(node.is_method() and 'method' or 'function', node)
+ # checks for max returns, branch, return in __init__
+ returns = node.nodes_of_class(astng.Return,
+ skip_klass=(astng.Function, astng.Class))
+ if node.is_method() and node.name == '__init__':
+ if node.is_generator():
+ self.add_message('E0100', node=node)
+ else:
+ values = [r.value for r in returns]
+ if [v for v in values if not (v is None or
+ (isinstance(v, astng.Const) and v.value is None)
+ or (isinstance(v, astng.Name) and v.name == 'None'))]:
+ self.add_message('E0101', node=node)
+ elif node.is_generator():
+ # make sure we don't mix non-None returns and yields
+ for retnode in returns:
+ if isinstance(retnode.value, astng.Const) and \
+ retnode.value.value is not None:
+ self.add_message('E0106', node=node,
+ line=retnode.fromlineno)
+
+ @check_messages('E0104')
+ def visit_return(self, node):
+ if not isinstance(node.frame(), astng.Function):
+ self.add_message('E0104', node=node)
+
+ @check_messages('E0105')
+ def visit_yield(self, node):
+ if not isinstance(node.frame(), astng.Function):
+ self.add_message('E0105', node=node)
+
+ @check_messages('E0103')
+ def visit_continue(self, node):
+ self._check_in_loop(node, 'continue')
+
+ @check_messages('E0103')
+ def visit_break(self, node):
+ self._check_in_loop(node, 'break')
+
+ @check_messages('E0107')
+ def visit_unaryop(self, node):
+ """check use of the non-existent ++ adn -- operator operator"""
+ if ((node.op in '+-') and
+ isinstance(node.operand, astng.UnaryOp) and
+ (node.operand.op == node.op)):
+ self.add_message('E0107', node=node, args=node.op*2)
+
+ def _check_in_loop(self, node, node_name):
+ """check that a node is inside a for or while loop"""
+ _node = node.parent
+ while _node:
+ if isinstance(_node, (astng.For, astng.While)):
+ break
+ _node = _node.parent
+ else:
+ self.add_message('E0103', node=node, args=node_name)
+
+ def _check_redefinition(self, redeftype, node):
+ """check for redefinition of a function / method / class name"""
+ defined_self = node.parent.frame()[node.name]
+ if defined_self is not node and not are_exclusive(node, defined_self):
+ self.add_message('E0102', node=node,
+ args=(redeftype, defined_self.fromlineno))
+
+
+
+class BasicChecker(_BasicChecker):
+ """checks for :
+ * doc strings
+ * modules / classes / functions / methods / arguments / variables name
+ * number of arguments, local variables, branches, returns and statements in
+functions, methods
+ * required module attributes
+ * dangerous default values as arguments
+ * redefinition of function / method / class
+ * uses of the global statement
+ """
+
+ __implements__ = IASTNGChecker
+
+ name = 'basic'
+ msgs = {
+ 'W0101': ('Unreachable code',
+ 'unreachable',
+ 'Used when there is some code behind a "return" or "raise" \
+ statement, which will never be accessed.'),
+ 'W0102': ('Dangerous default value %s as argument',
+ 'dangerous-default-value',
+ 'Used when a mutable value as list or dictionary is detected in \
+ a default value for an argument.'),
+ 'W0104': ('Statement seems to have no effect',
+ 'pointless-statement',
+ 'Used when a statement doesn\'t have (or at least seems to) \
+ any effect.'),
+ 'W0105': ('String statement has no effect',
+ 'pointless-string-statement',
+ 'Used when a string is used as a statement (which of course \
+ has no effect). This is a particular case of W0104 with its \
+ own message so you can easily disable it if you\'re using \
+ those strings as documentation, instead of comments.'),
+ 'W0106': ('Expression "%s" is assigned to nothing',
+ 'expression-not-assigned',
+ 'Used when an expression that is not a function call is assigned\
+ to nothing. Probably something else was intended.'),
+ 'W0108': ('Lambda may not be necessary',
+ 'unnecessary-lambda',
+ 'Used when the body of a lambda expression is a function call \
+ on the same argument list as the lambda itself; such lambda \
+ expressions are in all but a few cases replaceable with the \
+ function being called in the body of the lambda.'),
+ 'W0109': ("Duplicate key %r in dictionary",
+ 'duplicate-key',
+ "Used when a dictionary expression binds the same key multiple \
+ times."),
+ 'W0122': ('Use of the exec statement',
+ 'exec-statement',
+ 'Used when you use the "exec" statement, to discourage its \
+ usage. That doesn\'t mean you can not use it !'),
+
+ 'W0141': ('Used builtin function %r',
+ 'bad-builtin',
+ 'Used when a black listed builtin function is used (see the '
+ 'bad-function option). Usual black listed functions are the ones '
+ 'like map, or filter , where Python offers now some cleaner '
+ 'alternative like list comprehension.'),
+ 'W0142': ('Used * or ** magic',
+ 'star-args',
+ 'Used when a function or method is called using `*args` or '
+ '`**kwargs` to dispatch arguments. This doesn\'t improve '
+ 'readability and should be used with care.'),
+ 'W0150': ("%s statement in finally block may swallow exception",
+ 'lost-exception',
+ "Used when a break or a return statement is found inside the \
+ finally clause of a try...finally block: the exceptions raised \
+ in the try clause will be silently swallowed instead of being \
+ re-raised."),
+ 'W0199': ('Assert called on a 2-uple. Did you mean \'assert x,y\'?',
+ 'assert-on-tuple',
+ 'A call of assert on a tuple will always evaluate to true if '
+ 'the tuple is not empty, and will always evaluate to false if '
+ 'it is.'),
+
+ 'C0121': ('Missing required attribute "%s"', # W0103
+ 'missing-module-attribute',
+ 'Used when an attribute required for modules is missing.'),
+
+ }
+
+ options = (('required-attributes',
+ {'default' : (), 'type' : 'csv',
+ 'metavar' : '<attributes>',
+ 'help' : 'Required attributes for module, separated by a '
+ 'comma'}
+ ),
+ ('bad-functions',
+ {'default' : ('map', 'filter', 'apply', 'input'),
+ 'type' :'csv', 'metavar' : '<builtin function names>',
+ 'help' : 'List of builtins function names that should not be '
+ 'used, separated by a comma'}
+ ),
+ )
+ reports = ( ('RP0101', 'Statistics by type', report_by_type_stats), )
+
+ def __init__(self, linter):
+ _BasicChecker.__init__(self, linter)
+ self.stats = None
+ self._tryfinallys = None
+
+ def open(self):
+ """initialize visit variables and statistics
+ """
+ self._tryfinallys = []
+ self.stats = self.linter.add_stats(module=0, function=0,
+ method=0, class_=0)
+
+ def visit_module(self, node):
+ """check module name, docstring and required arguments
+ """
+ self.stats['module'] += 1
+ for attr in self.config.required_attributes:
+ if attr not in node:
+ self.add_message('C0121', node=node, args=attr)
+
+ def visit_class(self, node):
+ """check module name, docstring and redefinition
+ increment branch counter
+ """
+ self.stats['class'] += 1
+
+ @check_messages('W0104', 'W0105')
+ def visit_discard(self, node):
+ """check for various kind of statements without effect"""
+ expr = node.value
+ if isinstance(expr, astng.Const) and isinstance(expr.value,
+ basestring):
+ # treat string statement in a separated message
+ self.add_message('W0105', node=node)
+ return
+ # ignore if this is :
+ # * a direct function call
+ # * the unique child of a try/except body
+ # * a yield (which are wrapped by a discard node in _ast XXX)
+ # warn W0106 if we have any underlying function call (we can't predict
+ # side effects), else W0104
+ if (isinstance(expr, (astng.Yield, astng.CallFunc)) or
+ (isinstance(node.parent, astng.TryExcept) and
+ node.parent.body == [node])):
+ return
+ if any(expr.nodes_of_class(astng.CallFunc)):
+ self.add_message('W0106', node=node, args=expr.as_string())
+ else:
+ self.add_message('W0104', node=node)
+
+ @check_messages('W0108')
+ def visit_lambda(self, node):
+ """check whether or not the lambda is suspicious
+ """
+ # if the body of the lambda is a call expression with the same
+ # argument list as the lambda itself, then the lambda is
+ # possibly unnecessary and at least suspicious.
+ if node.args.defaults:
+ # If the arguments of the lambda include defaults, then a
+ # judgment cannot be made because there is no way to check
+ # that the defaults defined by the lambda are the same as
+ # the defaults defined by the function called in the body
+ # of the lambda.
+ return
+ call = node.body
+ if not isinstance(call, astng.CallFunc):
+ # The body of the lambda must be a function call expression
+ # for the lambda to be unnecessary.
+ return
+ # XXX are lambda still different with astng >= 0.18 ?
+ # *args and **kwargs need to be treated specially, since they
+ # are structured differently between the lambda and the function
+ # call (in the lambda they appear in the args.args list and are
+ # indicated as * and ** by two bits in the lambda's flags, but
+ # in the function call they are omitted from the args list and
+ # are indicated by separate attributes on the function call node).
+ ordinary_args = list(node.args.args)
+ if node.args.kwarg:
+ if (not call.kwargs
+ or not isinstance(call.kwargs, astng.Name)
+ or node.args.kwarg != call.kwargs.name):
+ return
+ elif call.kwargs:
+ return
+ if node.args.vararg:
+ if (not call.starargs
+ or not isinstance(call.starargs, astng.Name)
+ or node.args.vararg != call.starargs.name):
+ return
+ elif call.starargs:
+ return
+ # The "ordinary" arguments must be in a correspondence such that:
+ # ordinary_args[i].name == call.args[i].name.
+ if len(ordinary_args) != len(call.args):
+ return
+ for i in xrange(len(ordinary_args)):
+ if not isinstance(call.args[i], astng.Name):
+ return
+ if node.args.args[i].name != call.args[i].name:
+ return
+ self.add_message('W0108', line=node.fromlineno, node=node)
+
+ def visit_function(self, node):
+ """check function name, docstring, arguments, redefinition,
+ variable names, max locals
+ """
+ self.stats[node.is_method() and 'method' or 'function'] += 1
+ # check for dangerous default values as arguments
+ for default in node.args.defaults:
+ try:
+ value = default.infer().next()
+ except astng.InferenceError:
+ continue
+ if isinstance(value, (astng.Dict, astng.List)):
+ if value is default:
+ msg = default.as_string()
+ else:
+ msg = '%s (%s)' % (default.as_string(), value.as_string())
+ self.add_message('W0102', node=node, args=(msg,))
+ if value.qname() == '__builtin__.set':
+ if isinstance(default, astng.CallFunc):
+ msg = default.as_string()
+ else:
+ msg = '%s (%s)' % (default.as_string(), value.qname())
+ self.add_message('W0102', node=node, args=(msg,))
+
+ @check_messages('W0101', 'W0150')
+ def visit_return(self, node):
+ """1 - check is the node has a right sibling (if so, that's some
+ unreachable code)
+ 2 - check is the node is inside the finally clause of a try...finally
+ block
+ """
+ self._check_unreachable(node)
+ # Is it inside final body of a try...finally bloc ?
+ self._check_not_in_finally(node, 'return', (astng.Function,))
+
+ @check_messages('W0101')
+ def visit_continue(self, node):
+ """check is the node has a right sibling (if so, that's some unreachable
+ code)
+ """
+ self._check_unreachable(node)
+
+ @check_messages('W0101', 'W0150')
+ def visit_break(self, node):
+ """1 - check is the node has a right sibling (if so, that's some
+ unreachable code)
+ 2 - check is the node is inside the finally clause of a try...finally
+ block
+ """
+ # 1 - Is it right sibling ?
+ self._check_unreachable(node)
+ # 2 - Is it inside final body of a try...finally bloc ?
+ self._check_not_in_finally(node, 'break', (astng.For, astng.While,))
+
+ @check_messages('W0101')
+ def visit_raise(self, node):
+ """check is the node has a right sibling (if so, that's some unreachable
+ code)
+ """
+ self._check_unreachable(node)
+
+ @check_messages('W0122')
+ def visit_exec(self, node):
+ """just print a warning on exec statements"""
+ self.add_message('W0122', node=node)
+
+ @check_messages('W0141', 'W0142')
+ def visit_callfunc(self, node):
+ """visit a CallFunc node -> check if this is not a blacklisted builtin
+ call and check for * or ** use
+ """
+ if isinstance(node.func, astng.Name):
+ name = node.func.name
+ # ignore the name if it's not a builtin (i.e. not defined in the
+ # locals nor globals scope)
+ if not (name in node.frame() or
+ name in node.root()):
+ if name in self.config.bad_functions:
+ self.add_message('W0141', node=node, args=name)
+ if node.starargs or node.kwargs:
+ scope = node.scope()
+ if isinstance(scope, astng.Function):
+ toprocess = [(n, vn) for (n, vn) in ((node.starargs, scope.args.vararg),
+ (node.kwargs, scope.args.kwarg)) if n]
+ if toprocess:
+ for cfnode, fargname in toprocess[:]:
+ if getattr(cfnode, 'name', None) == fargname:
+ toprocess.remove((cfnode, fargname))
+ if not toprocess:
+ return # W0142 can be skipped
+ self.add_message('W0142', node=node.func)
+
+ @check_messages('W0199')
+ def visit_assert(self, node):
+ """check the use of an assert statement on a tuple."""
+ if node.fail is None and isinstance(node.test, astng.Tuple) and \
+ len(node.test.elts) == 2:
+ self.add_message('W0199', line=node.fromlineno, node=node)
+
+ @check_messages('W0109')
+ def visit_dict(self, node):
+ """check duplicate key in dictionary"""
+ keys = set()
+ for k, _ in node.items:
+ if isinstance(k, astng.Const):
+ key = k.value
+ if key in keys:
+ self.add_message('W0109', node=node, args=key)
+ keys.add(key)
+
+ def visit_tryfinally(self, node):
+ """update try...finally flag"""
+ self._tryfinallys.append(node)
+
+ def leave_tryfinally(self, node):
+ """update try...finally flag"""
+ self._tryfinallys.pop()
+
+ def _check_unreachable(self, node):
+ """check unreachable code"""
+ unreach_stmt = node.next_sibling()
+ if unreach_stmt is not None:
+ self.add_message('W0101', node=unreach_stmt)
+
+ def _check_not_in_finally(self, node, node_name, breaker_classes=()):
+ """check that a node is not inside a finally clause of a
+ try...finally statement.
+ If we found before a try...finally bloc a parent which its type is
+ in breaker_classes, we skip the whole check."""
+ # if self._tryfinallys is empty, we're not a in try...finally bloc
+ if not self._tryfinallys:
+ return
+ # the node could be a grand-grand...-children of the try...finally
+ _parent = node.parent
+ _node = node
+ while _parent and not isinstance(_parent, breaker_classes):
+ if hasattr(_parent, 'finalbody') and _node in _parent.finalbody:
+ self.add_message('W0150', node=node, args=node_name)
+ return
+ _node = _parent
+ _parent = _node.parent
+
+
+
+class NameChecker(_BasicChecker):
+ msgs = {
+ 'C0102': ('Black listed name "%s"',
+ 'blacklisted-name',
+ 'Used when the name is listed in the black list (unauthorized \
+ names).'),
+ 'C0103': ('Invalid name "%s" for type %s (should match %s)',
+ 'invalid-name',
+ 'Used when the name doesn\'t match the regular expression \
+ associated to its type (constant, variable, class...).'),
+
+ }
+ options = (('module-rgx',
+ {'default' : MOD_NAME_RGX,
+ 'type' :'regexp', 'metavar' : '<regexp>',
+ 'help' : 'Regular expression which should only match correct '
+ 'module names'}
+ ),
+ ('const-rgx',
+ {'default' : CONST_NAME_RGX,
+ 'type' :'regexp', 'metavar' : '<regexp>',
+ 'help' : 'Regular expression which should only match correct '
+ 'module level names'}
+ ),
+ ('class-rgx',
+ {'default' : CLASS_NAME_RGX,
+ 'type' :'regexp', 'metavar' : '<regexp>',
+ 'help' : 'Regular expression which should only match correct '
+ 'class names'}
+ ),
+ ('function-rgx',
+ {'default' : DEFAULT_NAME_RGX,
+ 'type' :'regexp', 'metavar' : '<regexp>',
+ 'help' : 'Regular expression which should only match correct '
+ 'function names'}
+ ),
+ ('method-rgx',
+ {'default' : DEFAULT_NAME_RGX,
+ 'type' :'regexp', 'metavar' : '<regexp>',
+ 'help' : 'Regular expression which should only match correct '
+ 'method names'}
+ ),
+ ('attr-rgx',
+ {'default' : DEFAULT_NAME_RGX,
+ 'type' :'regexp', 'metavar' : '<regexp>',
+ 'help' : 'Regular expression which should only match correct '
+ 'instance attribute names'}
+ ),
+ ('argument-rgx',
+ {'default' : DEFAULT_NAME_RGX,
+ 'type' :'regexp', 'metavar' : '<regexp>',
+ 'help' : 'Regular expression which should only match correct '
+ 'argument names'}),
+ ('variable-rgx',
+ {'default' : DEFAULT_NAME_RGX,
+ 'type' :'regexp', 'metavar' : '<regexp>',
+ 'help' : 'Regular expression which should only match correct '
+ 'variable names'}
+ ),
+ ('inlinevar-rgx',
+ {'default' : COMP_VAR_RGX,
+ 'type' :'regexp', 'metavar' : '<regexp>',
+ 'help' : 'Regular expression which should only match correct '
+ 'list comprehension / generator expression variable \
+ names'}
+ ),
+ # XXX use set
+ ('good-names',
+ {'default' : ('i', 'j', 'k', 'ex', 'Run', '_'),
+ 'type' :'csv', 'metavar' : '<names>',
+ 'help' : 'Good variable names which should always be accepted,'
+ ' separated by a comma'}
+ ),
+ ('bad-names',
+ {'default' : ('foo', 'bar', 'baz', 'toto', 'tutu', 'tata'),
+ 'type' :'csv', 'metavar' : '<names>',
+ 'help' : 'Bad variable names which should always be refused, '
+ 'separated by a comma'}
+ ),
+ )
+
+ def open(self):
+ self.stats = self.linter.add_stats(badname_module=0,
+ badname_class=0, badname_function=0,
+ badname_method=0, badname_attr=0,
+ badname_const=0,
+ badname_variable=0,
+ badname_inlinevar=0,
+ badname_argument=0)
+
+ @check_messages('C0102', 'C0103')
+ def visit_module(self, node):
+ self._check_name('module', node.name.split('.')[-1], node)
+
+ @check_messages('C0102', 'C0103')
+ def visit_class(self, node):
+ self._check_name('class', node.name, node)
+ for attr, anodes in node.instance_attrs.iteritems():
+ self._check_name('attr', attr, anodes[0])
+
+ @check_messages('C0102', 'C0103')
+ def visit_function(self, node):
+ self._check_name(node.is_method() and 'method' or 'function',
+ node.name, node)
+ # check arguments name
+ args = node.args.args
+ if args is not None:
+ self._recursive_check_names(args, node)
+
+ @check_messages('C0102', 'C0103')
+ def visit_assname(self, node):
+ """check module level assigned names"""
+ frame = node.frame()
+ ass_type = node.ass_type()
+ if isinstance(ass_type, (astng.Comprehension, astng.Comprehension)):
+ self._check_name('inlinevar', node.name, node)
+ elif isinstance(frame, astng.Module):
+ if isinstance(ass_type, astng.Assign) and not in_loop(ass_type):
+ self._check_name('const', node.name, node)
+ elif isinstance(ass_type, astng.ExceptHandler):
+ self._check_name('variable', node.name, node)
+ elif isinstance(frame, astng.Function):
+ # global introduced variable aren't in the function locals
+ if node.name in frame:
+ self._check_name('variable', node.name, node)
+
+ def _recursive_check_names(self, args, node):
+ """check names in a possibly recursive list <arg>"""
+ for arg in args:
+ if isinstance(arg, astng.AssName):
+ self._check_name('argument', arg.name, node)
+ else:
+ self._recursive_check_names(arg.elts, node)
+
+ def _check_name(self, node_type, name, node):
+ """check for a name using the type's regexp"""
+ if is_inside_except(node):
+ clobbering, _ = clobber_in_except(node)
+ if clobbering:
+ return
+ if name in self.config.good_names:
+ return
+ if name in self.config.bad_names:
+ self.stats['badname_' + node_type] += 1
+ self.add_message('C0102', node=node, args=name)
+ return
+ regexp = getattr(self.config, node_type + '_rgx')
+ if regexp.match(name) is None:
+ type_label = {'inlinedvar': 'inlined variable',
+ 'const': 'constant',
+ 'attr': 'attribute',
+ }.get(node_type, node_type)
+ self.add_message('C0103', node=node, args=(name, type_label, regexp.pattern))
+ self.stats['badname_' + node_type] += 1
+
+
+class DocStringChecker(_BasicChecker):
+ msgs = {
+ 'C0111': ('Missing docstring', # W0131
+ 'missing-docstring',
+ 'Used when a module, function, class or method has no docstring.\
+ Some special methods like __init__ doesn\'t necessary require a \
+ docstring.'),
+ 'C0112': ('Empty docstring', # W0132
+ 'empty-docstring',
+ 'Used when a module, function, class or method has an empty \
+ docstring (it would be too easy ;).'),
+ }
+ options = (('no-docstring-rgx',
+ {'default' : NO_REQUIRED_DOC_RGX,
+ 'type' : 'regexp', 'metavar' : '<regexp>',
+ 'help' : 'Regular expression which should only match '
+ 'functions or classes name which do not require a '
+ 'docstring'}
+ ),
+ )
+
+ def open(self):
+ self.stats = self.linter.add_stats(undocumented_module=0,
+ undocumented_function=0,
+ undocumented_method=0,
+ undocumented_class=0)
+
+ def visit_module(self, node):
+ self._check_docstring('module', node)
+
+ def visit_class(self, node):
+ if self.config.no_docstring_rgx.match(node.name) is None:
+ self._check_docstring('class', node)
+
+ def visit_function(self, node):
+ if self.config.no_docstring_rgx.match(node.name) is None:
+ ftype = node.is_method() and 'method' or 'function'
+ if isinstance(node.parent.frame(), astng.Class):
+ overridden = False
+ # check if node is from a method overridden by its ancestor
+ for ancestor in node.parent.frame().ancestors():
+ if node.name in ancestor and \
+ isinstance(ancestor[node.name], astng.Function):
+ overridden = True
+ break
+ if not overridden:
+ self._check_docstring(ftype, node)
+ else:
+ self._check_docstring(ftype, node)
+
+ def _check_docstring(self, node_type, node):
+ """check the node has a non empty docstring"""
+ docstring = node.doc
+ if docstring is None:
+ self.stats['undocumented_'+node_type] += 1
+ self.add_message('C0111', node=node)
+ elif not docstring.strip():
+ self.stats['undocumented_'+node_type] += 1
+ self.add_message('C0112', node=node)
+
+
+class PassChecker(_BasicChecker):
+ """check is the pass statement is really necessary"""
+ msgs = {'W0107': ('Unnecessary pass statement',
+ 'unnecessary-pass',
+ 'Used when a "pass" statement that can be avoided is '
+ 'encountered.'),
+ }
+
+ def visit_pass(self, node):
+ if len(node.parent.child_sequence(node)) > 1:
+ self.add_message('W0107', node=node)
+
+
+class LambdaForComprehensionChecker(_BasicChecker):
+ """check for using a lambda where a comprehension would do.
+
+ See <http://www.artima.com/weblogs/viewpost.jsp?thread=98196>
+ where GvR says comprehensions would be clearer.
+ """
+
+ msgs = {'W0110': ('map/filter on lambda could be replaced by comprehension',
+ 'deprecated-lambda',
+ 'Used when a lambda is the first argument to "map" or '
+ '"filter". It could be clearer as a list '
+ 'comprehension or generator expression.'),
+ }
+
+ @check_messages('W0110')
+ def visit_callfunc(self, node):
+ """visit a CallFunc node, check if map or filter are called with a
+ lambda
+ """
+ if not node.args:
+ return
+ if not isinstance(node.args[0], astng.Lambda):
+ return
+ infered = safe_infer(node.func)
+ if (infered
+ and infered.parent.name == '__builtin__'
+ and infered.name in ['map', 'filter']):
+ self.add_message('W0110', node=node)
+
+
+def register(linter):
+ """required method to auto register this checker"""
+ linter.register_checker(BasicErrorChecker(linter))
+ linter.register_checker(BasicChecker(linter))
+ linter.register_checker(NameChecker(linter))
+ linter.register_checker(DocStringChecker(linter))
+ linter.register_checker(PassChecker(linter))
+ linter.register_checker(LambdaForComprehensionChecker(linter))
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/checkers/classes.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/checkers/classes.py
@@ -0,0 +1,662 @@
+# Copyright (c) 2003-2012 LOGILAB S.A. (Paris, FRANCE).
+# http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This program is free software; you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation; either version 2 of the License, or (at your option) any later
+# version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# this program; if not, write to the Free Software Foundation, Inc.,
+# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+"""classes checker for Python code
+"""
+from __future__ import generators
+
+from ..logilab import astng
+from ..logilab.astng import YES, Instance, are_exclusive, AssAttr
+
+from ..interfaces import IASTNGChecker
+from ..checkers import BaseChecker
+from ..checkers.utils import (PYMETHODS, overrides_a_method,
+ check_messages, is_attr_private, is_attr_protected, node_frame_class)
+
+def class_is_abstract(node):
+ """return true if the given class node should be considered as an abstract
+ class
+ """
+ for method in node.methods():
+ if method.parent.frame() is node:
+ if method.is_abstract(pass_is_abstract=False):
+ return True
+ return False
+
+
+MSGS = {
+ 'F0202': ('Unable to check methods signature (%s / %s)',
+ 'method-check-failed',
+ 'Used when PyLint has been unable to check methods signature \
+ compatibility for an unexpected reason. Please report this kind \
+ if you don\'t make sense of it.'),
+
+ 'E0202': ('An attribute affected in %s line %s hide this method',
+ 'method-hidden',
+ 'Used when a class defines a method which is hidden by an '
+ 'instance attribute from an ancestor class or set by some '
+ 'client code.'),
+ 'E0203': ('Access to member %r before its definition line %s',
+ 'access-member-before-definition',
+ 'Used when an instance member is accessed before it\'s actually\
+ assigned.'),
+ 'W0201': ('Attribute %r defined outside __init__',
+ 'attribute-defined-outside-init',
+ 'Used when an instance attribute is defined outside the __init__\
+ method.'),
+
+ 'W0212': ('Access to a protected member %s of a client class', # E0214
+ 'protected-access',
+ 'Used when a protected member (i.e. class member with a name \
+ beginning with an underscore) is access outside the class or a \
+ descendant of the class where it\'s defined.'),
+
+ 'E0211': ('Method has no argument',
+ 'no-method-argument',
+ 'Used when a method which should have the bound instance as \
+ first argument has no argument defined.'),
+ 'E0213': ('Method should have "self" as first argument',
+ 'no-self-argument',
+ 'Used when a method has an attribute different the "self" as\
+ first argument. This is considered as an error since this is\
+ a so common convention that you shouldn\'t break it!'),
+ 'C0202': ('Class method %s should have %s as first argument', # E0212
+ 'bad-classmethod-argument',
+ 'Used when a class method has a first argument named differently '
+ 'than the value specified in valid-classmethod-first-arg option '
+ '(default to "cls"), recommended to easily differentiate them '
+ 'from regular instance methods.'),
+ 'C0203': ('Metaclass method %s should have %s as first argument', # E0214
+ 'bad-mcs-method-argument',
+ 'Used when a metaclass method has a first agument named '
+ 'differently than the value specified in valid-classmethod-first'
+ '-arg option (default to "cls"), recommended to easily '
+ 'differentiate them from regular instance methods.'),
+ 'C0204': ('Metaclass class method %s should have %s as first argument',
+ 'bad-mcs-classmethod-argument',
+ 'Used when a metaclass class method has a first argument named '
+ 'differently than the value specified in valid-metaclass-'
+ 'classmethod-first-arg option (default to "mcs"), recommended to '
+ 'easily differentiate them from regular instance methods.'),
+
+ 'W0211': ('Static method with %r as first argument',
+ 'bad-staticmethod-argument',
+ 'Used when a static method has "self" or a value specified in '
+ 'valid-classmethod-first-arg option or '
+ 'valid-metaclass-classmethod-first-arg option as first argument.'
+ ),
+ 'R0201': ('Method could be a function',
+ 'no-self-use',
+ 'Used when a method doesn\'t use its bound instance, and so could\
+ be written as a function.'
+ ),
+
+ 'E0221': ('Interface resolved to %s is not a class',
+ 'interface-is-not-class',
+ 'Used when a class claims to implement an interface which is not \
+ a class.'),
+ 'E0222': ('Missing method %r from %s interface',
+ 'missing-interface-method',
+ 'Used when a method declared in an interface is missing from a \
+ class implementing this interface'),
+ 'W0221': ('Arguments number differs from %s method',
+ 'arguments-differ',
+ 'Used when a method has a different number of arguments than in \
+ the implemented interface or in an overridden method.'),
+ 'W0222': ('Signature differs from %s method',
+ 'signature-differs',
+ 'Used when a method signature is different than in the \
+ implemented interface or in an overridden method.'),
+ 'W0223': ('Method %r is abstract in class %r but is not overridden',
+ 'abstract-method',
+ 'Used when an abstract method (i.e. raise NotImplementedError) is \
+ not overridden in concrete class.'
+ ),
+ 'F0220': ('failed to resolve interfaces implemented by %s (%s)', # W0224
+ 'unresolved-interface',
+ 'Used when a PyLint as failed to find interfaces implemented by \
+ a class'),
+
+
+ 'W0231': ('__init__ method from base class %r is not called',
+ 'super-init-not-called',
+ 'Used when an ancestor class method has an __init__ method \
+ which is not called by a derived class.'),
+ 'W0232': ('Class has no __init__ method',
+ 'no-init',
+ 'Used when a class has no __init__ method, neither its parent \
+ classes.'),
+ 'W0233': ('__init__ method from a non direct base class %r is called',
+ 'non-parent-init-called',
+ 'Used when an __init__ method is called on a class which is not \
+ in the direct ancestors for the analysed class.'),
+
+ }
+
+
+class ClassChecker(BaseChecker):
+ """checks for :
+ * methods without self as first argument
+ * overridden methods signature
+ * access only to existent members via self
+ * attributes not defined in the __init__ method
+ * supported interfaces implementation
+ * unreachable code
+ """
+
+ __implements__ = (IASTNGChecker,)
+
+ # configuration section name
+ name = 'classes'
+ # messages
+ msgs = MSGS
+ priority = -2
+ # configuration options
+ options = (('ignore-iface-methods',
+ {'default' : (#zope interface
+ 'isImplementedBy', 'deferred', 'extends', 'names',
+ 'namesAndDescriptions', 'queryDescriptionFor', 'getBases',
+ 'getDescriptionFor', 'getDoc', 'getName', 'getTaggedValue',
+ 'getTaggedValueTags', 'isEqualOrExtendedBy', 'setTaggedValue',
+ 'isImplementedByInstancesOf',
+ # twisted
+ 'adaptWith',
+ # logilab.common interface
+ 'is_implemented_by'),
+ 'type' : 'csv',
+ 'metavar' : '<method names>',
+ 'help' : 'List of interface methods to ignore, \
+separated by a comma. This is used for instance to not check methods defines \
+in Zope\'s Interface base class.'}
+ ),
+
+ ('defining-attr-methods',
+ {'default' : ('__init__', '__new__', 'setUp'),
+ 'type' : 'csv',
+ 'metavar' : '<method names>',
+ 'help' : 'List of method names used to declare (i.e. assign) \
+instance attributes.'}
+ ),
+ ('valid-classmethod-first-arg',
+ {'default' : ('cls',),
+ 'type' : 'csv',
+ 'metavar' : '<argument names>',
+ 'help' : 'List of valid names for the first argument in \
+a class method.'}
+ ),
+ ('valid-metaclass-classmethod-first-arg',
+ {'default' : ('mcs',),
+ 'type' : 'csv',
+ 'metavar' : '<argument names>',
+ 'help' : 'List of valid names for the first argument in \
+a metaclass class method.'}
+ ),
+
+ )
+
+ def __init__(self, linter=None):
+ BaseChecker.__init__(self, linter)
+ self._accessed = []
+ self._first_attrs = []
+ self._meth_could_be_func = None
+
+ def visit_class(self, node):
+ """init visit variable _accessed and check interfaces
+ """
+ self._accessed.append({})
+ self._check_bases_classes(node)
+ self._check_interfaces(node)
+ # if not an interface, exception, metaclass
+ if node.type == 'class':
+ try:
+ node.local_attr('__init__')
+ except astng.NotFoundError:
+ self.add_message('W0232', args=node, node=node)
+
+ @check_messages('E0203', 'W0201')
+ def leave_class(self, cnode):
+ """close a class node:
+ check that instance attributes are defined in __init__ and check
+ access to existent members
+ """
+ # check access to existent members on non metaclass classes
+ accessed = self._accessed.pop()
+ if cnode.type != 'metaclass':
+ self._check_accessed_members(cnode, accessed)
+ # checks attributes are defined in an allowed method such as __init__
+ if 'W0201' not in self.active_msgs:
+ return
+ defining_methods = self.config.defining_attr_methods
+ for attr, nodes in cnode.instance_attrs.iteritems():
+ nodes = [n for n in nodes if not
+ isinstance(n.statement(), (astng.Delete, astng.AugAssign))]
+ if not nodes:
+ continue # error detected by typechecking
+ attr_defined = False
+ # check if any method attr is defined in is a defining method
+ for node in nodes:
+ if node.frame().name in defining_methods:
+ attr_defined = True
+ if not attr_defined:
+ # check attribute is defined in a parent's __init__
+ for parent in cnode.instance_attr_ancestors(attr):
+ attr_defined = False
+ # check if any parent method attr is defined in is a defining method
+ for node in parent.instance_attrs[attr]:
+ if node.frame().name in defining_methods:
+ attr_defined = True
+ if attr_defined:
+ # we're done :)
+ break
+ else:
+ # check attribute is defined as a class attribute
+ try:
+ cnode.local_attr(attr)
+ except astng.NotFoundError:
+ self.add_message('W0201', args=attr, node=node)
+
+ def visit_function(self, node):
+ """check method arguments, overriding"""
+ # ignore actual functions
+ if not node.is_method():
+ return
+ klass = node.parent.frame()
+ self._meth_could_be_func = True
+ # check first argument is self if this is actually a method
+ self._check_first_arg_for_type(node, klass.type == 'metaclass')
+ if node.name == '__init__':
+ self._check_init(node)
+ return
+ # check signature if the method overloads inherited method
+ for overridden in klass.local_attr_ancestors(node.name):
+ # get astng for the searched method
+ try:
+ meth_node = overridden[node.name]
+ except KeyError:
+ # we have found the method but it's not in the local
+ # dictionary.
+ # This may happen with astng build from living objects
+ continue
+ if not isinstance(meth_node, astng.Function):
+ continue
+ self._check_signature(node, meth_node, 'overridden')
+ break
+ if node.decorators:
+ for decorator in node.decorators.nodes:
+ if isinstance(decorator, astng.Getattr) and \
+ decorator.attrname in ('getter', 'setter', 'deleter'):
+ # attribute affectation will call this method, not hiding it
+ return
+ if isinstance(decorator, astng.Name) and decorator.name == 'property':
+ # attribute affectation will either call a setter or raise
+ # an attribute error, anyway not hiding the function
+ return
+ # check if the method is hidden by an attribute
+ try:
+ overridden = klass.instance_attr(node.name)[0] # XXX
+ args = (overridden.root().name, overridden.fromlineno)
+ self.add_message('E0202', args=args, node=node)
+ except astng.NotFoundError:
+ pass
+
+ def leave_function(self, node):
+ """on method node, check if this method couldn't be a function
+
+ ignore class, static and abstract methods, initializer,
+ methods overridden from a parent class and any
+ kind of method defined in an interface for this warning
+ """
+ if node.is_method():
+ if node.args.args is not None:
+ self._first_attrs.pop()
+ if 'R0201' not in self.active_msgs:
+ return
+ class_node = node.parent.frame()
+ if (self._meth_could_be_func and node.type == 'method'
+ and not node.name in PYMETHODS
+ and not (node.is_abstract() or
+ overrides_a_method(class_node, node.name))
+ and class_node.type != 'interface'):
+ self.add_message('R0201', node=node)
+
+ def visit_getattr(self, node):
+ """check if the getattr is an access to a class member
+ if so, register it. Also check for access to protected
+ class member from outside its class (but ignore __special__
+ methods)
+ """
+ attrname = node.attrname
+ # Check self
+ if self.is_first_attr(node):
+ self._accessed[-1].setdefault(attrname, []).append(node)
+ return
+ if 'W0212' not in self.active_msgs:
+ return
+
+ self._check_protected_attribute_access(node)
+
+ def visit_assign(self, assign_node):
+ if 'W0212' not in self.active_msgs:
+ return
+
+ node = assign_node.targets[0]
+ if not isinstance(node, AssAttr):
+ return
+
+ if self.is_first_attr(node):
+ return
+
+ self._check_protected_attribute_access(node)
+
+ def _check_protected_attribute_access(self, node):
+ '''Given an attribute access node (set or get), check if attribute
+ access is legitimate. Call _check_first_attr with node before calling
+ this method. Valid cases are:
+ * self._attr in a method or cls._attr in a classmethod. Checked by
+ _check_first_attr.
+ * Klass._attr inside "Klass" class.
+ * Klass2._attr inside "Klass" class when Klass2 is a base class of
+ Klass.
+ '''
+ attrname = node.attrname
+
+ if is_attr_protected(attrname):
+
+ klass = node_frame_class(node)
+
+ # XXX infer to be more safe and less dirty ??
+ # in classes, check we are not getting a parent method
+ # through the class object or through super
+ callee = node.expr.as_string()
+
+ # We are not in a class, no remaining valid case
+ if klass is None:
+ self.add_message('W0212', node=node, args=attrname)
+ return
+
+ # If the expression begins with a call to super, that's ok.
+ if isinstance(node.expr, astng.CallFunc) and \
+ isinstance(node.expr.func, astng.Name) and \
+ node.expr.func.name == 'super':
+ return
+
+ # We are in a class, one remaining valid cases, Klass._attr inside
+ # Klass
+ if not (callee == klass.name or callee in klass.basenames):
+ self.add_message('W0212', node=node, args=attrname)
+
+ def visit_name(self, node):
+ """check if the name handle an access to a class member
+ if so, register it
+ """
+ if self._first_attrs and (node.name == self._first_attrs[-1] or
+ not self._first_attrs[-1]):
+ self._meth_could_be_func = False
+
+ def _check_accessed_members(self, node, accessed):
+ """check that accessed members are defined"""
+ # XXX refactor, probably much simpler now that E0201 is in type checker
+ for attr, nodes in accessed.iteritems():
+ # deactivate "except doesn't do anything", that's expected
+ # pylint: disable=W0704
+ # is it a class attribute ?
+ try:
+ node.local_attr(attr)
+ # yes, stop here
+ continue
+ except astng.NotFoundError:
+ pass
+ # is it an instance attribute of a parent class ?
+ try:
+ node.instance_attr_ancestors(attr).next()
+ # yes, stop here
+ continue
+ except StopIteration:
+ pass
+ # is it an instance attribute ?
+ try:
+ defstmts = node.instance_attr(attr)
+ except astng.NotFoundError:
+ pass
+ else:
+ if len(defstmts) == 1:
+ defstmt = defstmts[0]
+ # check that if the node is accessed in the same method as
+ # it's defined, it's accessed after the initial assignment
+ frame = defstmt.frame()
+ lno = defstmt.fromlineno
+ for _node in nodes:
+ if _node.frame() is frame and _node.fromlineno < lno \
+ and not are_exclusive(_node.statement(), defstmt, ('AttributeError', 'Exception', 'BaseException')):
+ self.add_message('E0203', node=_node,
+ args=(attr, lno))
+
+ def _check_first_arg_for_type(self, node, metaclass=0):
+ """check the name of first argument, expect:
+
+ * 'self' for a regular method
+ * 'cls' for a class method or a metaclass regular method (actually
+ valid-classmethod-first-arg value)
+ * 'mcs' for a metaclass class method (actually
+ valid-metaclass-classmethod-first-arg)
+ * not one of the above for a static method
+ """
+ # don't care about functions with unknown argument (builtins)
+ if node.args.args is None:
+ return
+ first_arg = node.args.args and node.argnames()[0]
+ self._first_attrs.append(first_arg)
+ first = self._first_attrs[-1]
+ # static method
+ if node.type == 'staticmethod':
+ if (first_arg == 'self' or
+ first_arg in self.config.valid_classmethod_first_arg or
+ first_arg in self.config.valid_metaclass_classmethod_first_arg):
+ self.add_message('W0211', args=first, node=node)
+ return
+ self._first_attrs[-1] = None
+ # class / regular method with no args
+ elif not node.args.args:
+ self.add_message('E0211', node=node)
+ # metaclass
+ elif metaclass:
+ # metaclass __new__ or classmethod
+ if node.type == 'classmethod':
+ self._check_first_arg_config(first,
+ self.config.valid_metaclass_classmethod_first_arg, node,
+ 'C0204', node.name)
+ # metaclass regular method
+ else:
+ self._check_first_arg_config(first,
+ self.config.valid_classmethod_first_arg, node, 'C0203',
+ node.name)
+ # regular class
+ else:
+ # class method
+ if node.type == 'classmethod':
+ self._check_first_arg_config(first,
+ self.config.valid_classmethod_first_arg, node, 'C0202',
+ node.name)
+ # regular method without self as argument
+ elif first != 'self':
+ self.add_message('E0213', node=node)
+
+ def _check_first_arg_config(self, first, config, node, message,
+ method_name):
+ if first not in config:
+ if len(config) == 1:
+ valid = repr(config[0])
+ else:
+ valid = ', '.join(
+ repr(v)
+ for v in config[:-1])
+ valid = '%s or %r' % (
+ valid, config[-1])
+ self.add_message(message, args=(method_name, valid), node=node)
+
+ def _check_bases_classes(self, node):
+ """check that the given class node implements abstract methods from
+ base classes
+ """
+ # check if this class abstract
+ if class_is_abstract(node):
+ return
+ for method in node.methods():
+ owner = method.parent.frame()
+ if owner is node:
+ continue
+ # owner is not this class, it must be a parent class
+ # check that the ancestor's method is not abstract
+ if method.is_abstract(pass_is_abstract=False):
+ self.add_message('W0223', node=node,
+ args=(method.name, owner.name))
+
+ def _check_interfaces(self, node):
+ """check that the given class node really implements declared
+ interfaces
+ """
+ e0221_hack = [False]
+ def iface_handler(obj):
+ """filter interface objects, it should be classes"""
+ if not isinstance(obj, astng.Class):
+ e0221_hack[0] = True
+ self.add_message('E0221', node=node,
+ args=(obj.as_string(),))
+ return False
+ return True
+ ignore_iface_methods = self.config.ignore_iface_methods
+ try:
+ for iface in node.interfaces(handler_func=iface_handler):
+ for imethod in iface.methods():
+ name = imethod.name
+ if name.startswith('_') or name in ignore_iface_methods:
+ # don't check method beginning with an underscore,
+ # usually belonging to the interface implementation
+ continue
+ # get class method astng
+ try:
+ method = node_method(node, name)
+ except astng.NotFoundError:
+ self.add_message('E0222', args=(name, iface.name),
+ node=node)
+ continue
+ # ignore inherited methods
+ if method.parent.frame() is not node:
+ continue
+ # check signature
+ self._check_signature(method, imethod,
+ '%s interface' % iface.name)
+ except astng.InferenceError:
+ if e0221_hack[0]:
+ return
+ implements = Instance(node).getattr('__implements__')[0]
+ assignment = implements.parent
+ assert isinstance(assignment, astng.Assign)
+ # assignment.expr can be a Name or a Tuple or whatever.
+ # Use as_string() for the message
+ # FIXME: in case of multiple interfaces, find which one could not
+ # be resolved
+ self.add_message('F0220', node=implements,
+ args=(node.name, assignment.value.as_string()))
+
+ def _check_init(self, node):
+ """check that the __init__ method call super or ancestors'__init__
+ method
+ """
+ if not set(('W0231', 'W0233')) & self.active_msgs:
+ return
+ klass_node = node.parent.frame()
+ to_call = _ancestors_to_call(klass_node)
+ not_called_yet = dict(to_call)
+ for stmt in node.nodes_of_class(astng.CallFunc):
+ expr = stmt.func
+ if not isinstance(expr, astng.Getattr) \
+ or expr.attrname != '__init__':
+ continue
+ # skip the test if using super
+ if isinstance(expr.expr, astng.CallFunc) and \
+ isinstance(expr.expr.func, astng.Name) and \
+ expr.expr.func.name == 'super':
+ return
+ try:
+ klass = expr.expr.infer().next()
+ if klass is YES:
+ continue
+ try:
+ del not_called_yet[klass]
+ except KeyError:
+ if klass not in to_call:
+ self.add_message('W0233', node=expr, args=klass.name)
+ except astng.InferenceError:
+ continue
+ for klass, method in not_called_yet.iteritems():
+ if klass.name == 'object' or method.parent.name == 'object':
+ continue
+ self.add_message('W0231', args=klass.name, node=node)
+
+ def _check_signature(self, method1, refmethod, class_type):
+ """check that the signature of the two given methods match
+
+ class_type is in 'class', 'interface'
+ """
+ if not (isinstance(method1, astng.Function)
+ and isinstance(refmethod, astng.Function)):
+ self.add_message('F0202', args=(method1, refmethod), node=method1)
+ return
+ # don't care about functions with unknown argument (builtins)
+ if method1.args.args is None or refmethod.args.args is None:
+ return
+ # if we use *args, **kwargs, skip the below checks
+ if method1.args.vararg or method1.args.kwarg:
+ return
+ if is_attr_private(method1.name):
+ return
+ if len(method1.args.args) != len(refmethod.args.args):
+ self.add_message('W0221', args=class_type, node=method1)
+ elif len(method1.args.defaults) < len(refmethod.args.defaults):
+ self.add_message('W0222', args=class_type, node=method1)
+
+ def is_first_attr(self, node):
+ """Check that attribute lookup name use first attribute variable name
+ (self for method, cls for classmethod and mcs for metaclass).
+ """
+ return self._first_attrs and isinstance(node.expr, astng.Name) and \
+ node.expr.name == self._first_attrs[-1]
+
+def _ancestors_to_call(klass_node, method='__init__'):
+ """return a dictionary where keys are the list of base classes providing
+ the queried method, and so that should/may be called from the method node
+ """
+ to_call = {}
+ for base_node in klass_node.ancestors(recurs=False):
+ try:
+ to_call[base_node] = base_node.igetattr(method).next()
+ except astng.InferenceError:
+ continue
+ return to_call
+
+
+def node_method(node, method_name):
+ """get astng for <method_name> on the given class node, ensuring it
+ is a Function node
+ """
+ for n in node.local_attr(method_name):
+ if isinstance(n, astng.Function):
+ return n
+ raise astng.NotFoundError(method_name)
+
+def register(linter):
+ """required method to auto register this checker """
+ linter.register_checker(ClassChecker(linter))
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/checkers/design_analysis.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/checkers/design_analysis.py
@@ -0,0 +1,407 @@
+# Copyright (c) 2003-2012 LOGILAB S.A. (Paris, FRANCE).
+# http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This program is free software; you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation; either version 2 of the License, or (at your option) any later
+# version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# this program; if not, write to the Free Software Foundation, Inc.,
+# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+"""check for signs of poor design
+
+
+ see http://intranet.logilab.fr/jpl/view?rql=Any%20X%20where%20X%20eid%201243
+ FIXME: missing 13, 15, 16
+"""
+
+from ..logilab.astng import Function, If, InferenceError
+
+from ..interfaces import IASTNGChecker
+from ..checkers import BaseChecker
+
+import re
+
+# regexp for ignored argument name
+IGNORED_ARGUMENT_NAMES = re.compile('_.*')
+
+SPECIAL_METHODS = [('Context manager', set(('__enter__',
+ '__exit__',))),
+ ('Container', set(('__len__',
+ '__getitem__',
+ '__setitem__',
+ '__delitem__',))),
+ ('Callable', set(('__call__',))),
+ ]
+
+class SpecialMethodChecker(object):
+ """A functor that checks for consistency of a set of special methods"""
+ def __init__(self, methods_found, on_error):
+ """Stores the set of __x__ method names that were found in the
+ class and a callable that will be called with args to R0024 if
+ the check fails
+ """
+ self.methods_found = methods_found
+ self.on_error = on_error
+
+ def __call__(self, methods_required, protocol):
+ """Checks the set of method names given to __init__ against the set
+ required.
+
+ If they are all present, returns true.
+ If they are all absent, returns false.
+ If some are present, reports the error and returns false.
+ """
+ required_methods_found = methods_required & self.methods_found
+ if required_methods_found == methods_required:
+ return True
+ if required_methods_found != set():
+ required_methods_missing = methods_required - self.methods_found
+ self.on_error((protocol,
+ ', '.join(sorted(required_methods_found)),
+ ', '.join(sorted(required_methods_missing))))
+ return False
+
+
+def class_is_abstract(klass):
+ """return true if the given class node should be considered as an abstract
+ class
+ """
+ for attr in klass.values():
+ if isinstance(attr, Function):
+ if attr.is_abstract(pass_is_abstract=False):
+ return True
+ return False
+
+
+MSGS = {
+ 'R0901': ('Too many ancestors (%s/%s)',
+ 'too-many-ancestors',
+ 'Used when class has too many parent classes, try to reduce \
+ this to get a more simple (and so easier to use) class.'),
+ 'R0902': ('Too many instance attributes (%s/%s)',
+ 'too-many-instance-attributes',
+ 'Used when class has too many instance attributes, try to reduce \
+ this to get a more simple (and so easier to use) class.'),
+ 'R0903': ('Too few public methods (%s/%s)',
+ 'too-few-public-methods',
+ 'Used when class has too few public methods, so be sure it\'s \
+ really worth it.'),
+ 'R0904': ('Too many public methods (%s/%s)',
+ 'too-many-public-methods',
+ 'Used when class has too many public methods, try to reduce \
+ this to get a more simple (and so easier to use) class.'),
+
+ 'R0911': ('Too many return statements (%s/%s)',
+ 'too-many-return-statements',
+ 'Used when a function or method has too many return statement, \
+ making it hard to follow.'),
+ 'R0912': ('Too many branches (%s/%s)',
+ 'too-many-branches',
+ 'Used when a function or method has too many branches, \
+ making it hard to follow.'),
+ 'R0913': ('Too many arguments (%s/%s)',
+ 'too-many-arguments',
+ 'Used when a function or method takes too many arguments.'),
+ 'R0914': ('Too many local variables (%s/%s)',
+ 'too-many-locals',
+ 'Used when a function or method has too many local variables.'),
+ 'R0915': ('Too many statements (%s/%s)',
+ 'too-many-statements',
+ 'Used when a function or method has too many statements. You \
+ should then split it in smaller functions / methods.'),
+
+ 'R0921': ('Abstract class not referenced',
+ 'abstract-class-not-used',
+ 'Used when an abstract class is not used as ancestor anywhere.'),
+ 'R0922': ('Abstract class is only referenced %s times',
+ 'abstract-class-little-used',
+ 'Used when an abstract class is used less than X times as \
+ ancestor.'),
+ 'R0923': ('Interface not implemented',
+ 'interface-not-implemented',
+ 'Used when an interface class is not implemented anywhere.'),
+ 'R0924': ('Badly implemented %s, implements %s but not %s',
+ 'incomplete-protocol',
+ 'A class implements some of the special methods for a particular \
+ protocol, but not all of them')
+ }
+
+
+class MisdesignChecker(BaseChecker):
+ """checks for sign of poor/misdesign:
+ * number of methods, attributes, local variables...
+ * size, complexity of functions, methods
+ """
+
+ __implements__ = (IASTNGChecker,)
+
+ # configuration section name
+ name = 'design'
+ # messages
+ msgs = MSGS
+ priority = -2
+ # configuration options
+ options = (('max-args',
+ {'default' : 5, 'type' : 'int', 'metavar' : '<int>',
+ 'help': 'Maximum number of arguments for function / method'}
+ ),
+ ('ignored-argument-names',
+ {'default' : IGNORED_ARGUMENT_NAMES,
+ 'type' :'regexp', 'metavar' : '<regexp>',
+ 'help' : 'Argument names that match this expression will be '
+ 'ignored. Default to name with leading underscore'}
+ ),
+ ('max-locals',
+ {'default' : 15, 'type' : 'int', 'metavar' : '<int>',
+ 'help': 'Maximum number of locals for function / method body'}
+ ),
+ ('max-returns',
+ {'default' : 6, 'type' : 'int', 'metavar' : '<int>',
+ 'help': 'Maximum number of return / yield for function / '
+ 'method body'}
+ ),
+ ('max-branchs',
+ {'default' : 12, 'type' : 'int', 'metavar' : '<int>',
+ 'help': 'Maximum number of branch for function / method body'}
+ ),
+ ('max-statements',
+ {'default' : 50, 'type' : 'int', 'metavar' : '<int>',
+ 'help': 'Maximum number of statements in function / method '
+ 'body'}
+ ),
+ ('max-parents',
+ {'default' : 7,
+ 'type' : 'int',
+ 'metavar' : '<num>',
+ 'help' : 'Maximum number of parents for a class (see R0901).'}
+ ),
+ ('max-attributes',
+ {'default' : 7,
+ 'type' : 'int',
+ 'metavar' : '<num>',
+ 'help' : 'Maximum number of attributes for a class \
+(see R0902).'}
+ ),
+ ('min-public-methods',
+ {'default' : 2,
+ 'type' : 'int',
+ 'metavar' : '<num>',
+ 'help' : 'Minimum number of public methods for a class \
+(see R0903).'}
+ ),
+ ('max-public-methods',
+ {'default' : 20,
+ 'type' : 'int',
+ 'metavar' : '<num>',
+ 'help' : 'Maximum number of public methods for a class \
+(see R0904).'}
+ ),
+ )
+
+ def __init__(self, linter=None):
+ BaseChecker.__init__(self, linter)
+ self.stats = None
+ self._returns = None
+ self._branchs = None
+ self._used_abstracts = None
+ self._used_ifaces = None
+ self._abstracts = None
+ self._ifaces = None
+ self._stmts = 0
+
+ def open(self):
+ """initialize visit variables"""
+ self.stats = self.linter.add_stats()
+ self._returns = []
+ self._branchs = []
+ self._used_abstracts = {}
+ self._used_ifaces = {}
+ self._abstracts = []
+ self._ifaces = []
+
+ def close(self):
+ """check that abstract/interface classes are used"""
+ for abstract in self._abstracts:
+ if not abstract in self._used_abstracts:
+ self.add_message('R0921', node=abstract)
+ elif self._used_abstracts[abstract] < 2:
+ self.add_message('R0922', node=abstract,
+ args=self._used_abstracts[abstract])
+ for iface in self._ifaces:
+ if not iface in self._used_ifaces:
+ self.add_message('R0923', node=iface)
+
+ def visit_class(self, node):
+ """check size of inheritance hierarchy and number of instance attributes
+ """
+ self._inc_branch()
+ # Is the total inheritance hierarchy is 7 or less?
+ nb_parents = len(list(node.ancestors()))
+ if nb_parents > self.config.max_parents:
+ self.add_message('R0901', node=node,
+ args=(nb_parents, self.config.max_parents))
+ # Does the class contain less than 20 attributes for
+ # non-GUI classes (40 for GUI)?
+ # FIXME detect gui classes
+ if len(node.instance_attrs) > self.config.max_attributes:
+ self.add_message('R0902', node=node,
+ args=(len(node.instance_attrs),
+ self.config.max_attributes))
+ # update abstract / interface classes structures
+ if class_is_abstract(node):
+ self._abstracts.append(node)
+ elif node.type == 'interface' and node.name != 'Interface':
+ self._ifaces.append(node)
+ for parent in node.ancestors(False):
+ if parent.name == 'Interface':
+ continue
+ self._used_ifaces[parent] = 1
+ try:
+ for iface in node.interfaces():
+ self._used_ifaces[iface] = 1
+ except InferenceError:
+ # XXX log ?
+ pass
+ for parent in node.ancestors():
+ try:
+ self._used_abstracts[parent] += 1
+ except KeyError:
+ self._used_abstracts[parent] = 1
+
+ def leave_class(self, node):
+ """check number of public methods"""
+ nb_public_methods = 0
+ special_methods = set()
+ for method in node.methods():
+ if not method.name.startswith('_'):
+ nb_public_methods += 1
+ if method.name.startswith("__"):
+ special_methods.add(method.name)
+ # Does the class contain less than 20 public methods ?
+ if nb_public_methods > self.config.max_public_methods:
+ self.add_message('R0904', node=node,
+ args=(nb_public_methods,
+ self.config.max_public_methods))
+ # stop here for exception, metaclass and interface classes
+ if node.type != 'class':
+ return
+ # Does the class implement special methods consitently?
+ # If so, don't enforce minimum public methods.
+ check_special = SpecialMethodChecker(
+ special_methods, lambda args: self.add_message('R0924', node=node, args=args))
+ protocols = [check_special(pmethods, pname) for pname, pmethods in SPECIAL_METHODS]
+ if True in protocols:
+ return
+ # Does the class contain more than 5 public methods ?
+ if nb_public_methods < self.config.min_public_methods:
+ self.add_message('R0903', node=node,
+ args=(nb_public_methods,
+ self.config.min_public_methods))
+
+ def visit_function(self, node):
+ """check function name, docstring, arguments, redefinition,
+ variable names, max locals
+ """
+ self._inc_branch()
+ # init branch and returns counters
+ self._returns.append(0)
+ self._branchs.append(0)
+ # check number of arguments
+ args = node.args.args
+ if args is not None:
+ ignored_args_num = len(
+ [arg for arg in args
+ if self.config.ignored_argument_names.match(arg.name)])
+ argnum = len(args) - ignored_args_num
+ if argnum > self.config.max_args:
+ self.add_message('R0913', node=node,
+ args=(len(args), self.config.max_args))
+ else:
+ ignored_args_num = 0
+ # check number of local variables
+ locnum = len(node.locals) - ignored_args_num
+ if locnum > self.config.max_locals:
+ self.add_message('R0914', node=node,
+ args=(locnum, self.config.max_locals))
+ # init statements counter
+ self._stmts = 1
+
+ def leave_function(self, node):
+ """most of the work is done here on close:
+ checks for max returns, branch, return in __init__
+ """
+ returns = self._returns.pop()
+ if returns > self.config.max_returns:
+ self.add_message('R0911', node=node,
+ args=(returns, self.config.max_returns))
+ branchs = self._branchs.pop()
+ if branchs > self.config.max_branchs:
+ self.add_message('R0912', node=node,
+ args=(branchs, self.config.max_branchs))
+ # check number of statements
+ if self._stmts > self.config.max_statements:
+ self.add_message('R0915', node=node,
+ args=(self._stmts, self.config.max_statements))
+
+ def visit_return(self, _):
+ """count number of returns"""
+ if not self._returns:
+ return # return outside function, reported by the base checker
+ self._returns[-1] += 1
+
+ def visit_default(self, node):
+ """default visit method -> increments the statements counter if
+ necessary
+ """
+ if node.is_statement:
+ self._stmts += 1
+
+ def visit_tryexcept(self, node):
+ """increments the branchs counter"""
+ branchs = len(node.handlers)
+ if node.orelse:
+ branchs += 1
+ self._inc_branch(branchs)
+ self._stmts += branchs
+
+ def visit_tryfinally(self, _):
+ """increments the branchs counter"""
+ self._inc_branch(2)
+ self._stmts += 2
+
+ def visit_if(self, node):
+ """increments the branchs counter"""
+ branchs = 1
+ # don't double count If nodes coming from some 'elif'
+ if node.orelse and (len(node.orelse)>1 or
+ not isinstance(node.orelse[0], If)):
+ branchs += 1
+ self._inc_branch(branchs)
+ self._stmts += branchs
+
+ def visit_while(self, node):
+ """increments the branchs counter"""
+ branchs = 1
+ if node.orelse:
+ branchs += 1
+ self._inc_branch(branchs)
+
+ visit_for = visit_while
+
+ def _inc_branch(self, branchsnum=1):
+ """increments the branchs counter"""
+ branchs = self._branchs
+ for i in xrange(len(branchs)):
+ branchs[i] += branchsnum
+
+ # FIXME: make a nice report...
+
+def register(linter):
+ """required method to auto register this checker """
+ linter.register_checker(MisdesignChecker(linter))
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/checkers/exceptions.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/checkers/exceptions.py
@@ -0,0 +1,213 @@
+# Copyright (c) 2003-2007 LOGILAB S.A. (Paris, FRANCE).
+# http://www.logilab.fr/ -- mailto:contact@logilab.fr
+# This program is free software; you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation; either version 2 of the License, or (at your option) any later
+# version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# this program; if not, write to the Free Software Foundation, Inc.,
+# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+"""exceptions handling (raising, catching, exceptions classes) checker
+"""
+import sys
+
+from ..logilab.common.compat import builtins
+BUILTINS_NAME = builtins.__name__
+from ..logilab import astng
+from ..logilab.astng import YES, Instance, unpack_infer
+
+from ..checkers import BaseChecker
+from ..checkers.utils import is_empty, is_raising
+from ..interfaces import IASTNGChecker
+
+
+OVERGENERAL_EXCEPTIONS = ('Exception',)
+
+MSGS = {
+ 'E0701': ('Bad except clauses order (%s)',
+ 'bad-except-order',
+ 'Used when except clauses are not in the correct order (from the '
+ 'more specific to the more generic). If you don\'t fix the order, '
+ 'some exceptions may not be catched by the most specific handler.'),
+ 'E0702': ('Raising %s while only classes, instances or string are allowed',
+ 'raising-bad-type',
+ 'Used when something which is neither a class, an instance or a \
+ string is raised (i.e. a `TypeError` will be raised).'),
+ 'E0710': ('Raising a new style class which doesn\'t inherit from BaseException',
+ 'raising-non-exception',
+ 'Used when a new style class which doesn\'t inherit from \
+ BaseException is raised.'),
+ 'E0711': ('NotImplemented raised - should raise NotImplementedError',
+ 'notimplemented-raised',
+ 'Used when NotImplemented is raised instead of \
+ NotImplementedError'),
+
+ 'W0701': ('Raising a string exception',
+ 'raising-string',
+ 'Used when a string exception is raised.'),
+ 'W0702': ('No exception type(s) specified',
+ 'bare-except',
+ 'Used when an except clause doesn\'t specify exceptions type to \
+ catch.'),
+ 'W0703': ('Catching too general exception %s',
+ 'broad-except',
+ 'Used when an except catches a too general exception, \
+ possibly burying unrelated errors.'),
+ 'W0704': ('Except doesn\'t do anything',
+ 'pointless-except',
+ 'Used when an except clause does nothing but "pass" and there is\
+ no "else" clause.'),
+ 'W0710': ('Exception doesn\'t inherit from standard "Exception" class',
+ 'nonstandard-exception',
+ 'Used when a custom exception class is raised but doesn\'t \
+ inherit from the builtin "Exception" class.'),
+ 'W0711': ('Exception to catch is the result of a binary "%s" operation',
+ 'binary-op-exception',
+ 'Used when the exception to catch is of the form \
+ "except A or B:". If intending to catch multiple, \
+ rewrite as "except (A, B):"'),
+ }
+
+
+if sys.version_info < (3, 0):
+ EXCEPTIONS_MODULE = "exceptions"
+else:
+ EXCEPTIONS_MODULE = "builtins"
+
+class ExceptionsChecker(BaseChecker):
+ """checks for
+ * excepts without exception filter
+ * type of raise argument : string, Exceptions, other values
+ """
+
+ __implements__ = IASTNGChecker
+
+ name = 'exceptions'
+ msgs = MSGS
+ priority = -4
+ options = (('overgeneral-exceptions',
+ {'default' : OVERGENERAL_EXCEPTIONS,
+ 'type' :'csv', 'metavar' : '<comma-separated class names>',
+ 'help' : 'Exceptions that will emit a warning '
+ 'when being caught. Defaults to "%s"' % (
+ ', '.join(OVERGENERAL_EXCEPTIONS),)}
+ ),
+ )
+
+ def visit_raise(self, node):
+ """visit raise possibly inferring value"""
+ # ignore empty raise
+ if node.exc is None:
+ return
+ expr = node.exc
+ if self._check_raise_value(node, expr):
+ return
+ else:
+ try:
+ value = unpack_infer(expr).next()
+ except astng.InferenceError:
+ return
+ self._check_raise_value(node, value)
+
+ def _check_raise_value(self, node, expr):
+ """check for bad values, string exception and class inheritance
+ """
+ value_found = True
+ if isinstance(expr, astng.Const):
+ value = expr.value
+ if isinstance(value, str):
+ self.add_message('W0701', node=node)
+ else:
+ self.add_message('E0702', node=node,
+ args=value.__class__.__name__)
+ elif (isinstance(expr, astng.Name) and \
+ expr.name in ('None', 'True', 'False')) or \
+ isinstance(expr, (astng.List, astng.Dict, astng.Tuple,
+ astng.Module, astng.Function)):
+ self.add_message('E0702', node=node, args=expr.name)
+ elif ( (isinstance(expr, astng.Name) and expr.name == 'NotImplemented')
+ or (isinstance(expr, astng.CallFunc) and
+ isinstance(expr.func, astng.Name) and
+ expr.func.name == 'NotImplemented') ):
+ self.add_message('E0711', node=node)
+ elif isinstance(expr, astng.BinOp) and expr.op == '%':
+ self.add_message('W0701', node=node)
+ elif isinstance(expr, (Instance, astng.Class)):
+ if isinstance(expr, Instance):
+ expr = expr._proxied
+ if (isinstance(expr, astng.Class) and
+ not inherit_from_std_ex(expr) and
+ expr.root().name != BUILTINS_NAME):
+ if expr.newstyle:
+ self.add_message('E0710', node=node)
+ else:
+ self.add_message('W0710', node=node)
+ else:
+ value_found = False
+ else:
+ value_found = False
+ return value_found
+
+
+ def visit_tryexcept(self, node):
+ """check for empty except"""
+ exceptions_classes = []
+ nb_handlers = len(node.handlers)
+ for index, handler in enumerate(node.handlers):
+ # single except doing nothing but "pass" without else clause
+ if nb_handlers == 1 and is_empty(handler.body) and not node.orelse:
+ self.add_message('W0704', node=handler.type or handler.body[0])
+ if handler.type is None:
+ if nb_handlers == 1 and not is_raising(handler.body):
+ self.add_message('W0702', node=handler)
+ # check if a "except:" is followed by some other
+ # except
+ elif index < (nb_handlers - 1):
+ msg = 'empty except clause should always appear last'
+ self.add_message('E0701', node=node, args=msg)
+
+ elif isinstance(handler.type, astng.BoolOp):
+ self.add_message('W0711', node=handler, args=handler.type.op)
+ else:
+ try:
+ excs = list(unpack_infer(handler.type))
+ except astng.InferenceError:
+ continue
+ for exc in excs:
+ # XXX skip other non class nodes
+ if exc is YES or not isinstance(exc, astng.Class):
+ continue
+ exc_ancestors = [anc for anc in exc.ancestors()
+ if isinstance(anc, astng.Class)]
+ for previous_exc in exceptions_classes:
+ if previous_exc in exc_ancestors:
+ msg = '%s is an ancestor class of %s' % (
+ previous_exc.name, exc.name)
+ self.add_message('E0701', node=handler.type, args=msg)
+ if (exc.name in self.config.overgeneral_exceptions
+ and exc.root().name == EXCEPTIONS_MODULE
+ and nb_handlers == 1 and not is_raising(handler.body)):
+ self.add_message('W0703', args=exc.name, node=handler.type)
+ exceptions_classes += excs
+
+
+def inherit_from_std_ex(node):
+ """return true if the given class node is subclass of
+ exceptions.Exception
+ """
+ if node.name in ('Exception', 'BaseException') \
+ and node.root().name == EXCEPTIONS_MODULE:
+ return True
+ for parent in node.ancestors(recurs=False):
+ if inherit_from_std_ex(parent):
+ return True
+ return False
+
+def register(linter):
+ """required method to auto register this checker"""
+ linter.register_checker(ExceptionsChecker(linter))
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/checkers/format.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/checkers/format.py
@@ -0,0 +1,476 @@
+# Copyright (c) 2003-2010 Sylvain Thenault (thenault@gmail.com).
+# Copyright (c) 2003-2012 LOGILAB S.A. (Paris, FRANCE).
+# Copyright 2012 Google Inc.
+#
+# This program is free software; you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation; either version 2 of the License, or (at your option) any later
+# version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# this program; if not, write to the Free Software Foundation, Inc.,
+# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+"""Python code format's checker.
+
+By default try to follow Guido's style guide :
+
+http://www.python.org/doc/essays/styleguide.html
+
+Some parts of the process_token method is based from The Tab Nanny std module.
+"""
+
+import re, sys
+import tokenize
+if not hasattr(tokenize, 'NL'):
+ raise ValueError("tokenize.NL doesn't exist -- tokenize module too old")
+
+from ..logilab.common.textutils import pretty_match
+from ..logilab.astng import nodes
+
+from ..interfaces import IRawChecker, IASTNGChecker
+from ..checkers import BaseRawChecker
+from ..checkers.utils import check_messages
+
+MSGS = {
+ 'C0301': ('Line too long (%s/%s)',
+ 'line-too-long',
+ 'Used when a line is longer than a given number of characters.'),
+ 'C0302': ('Too many lines in module (%s)', # was W0302
+ 'too-many-lines',
+ 'Used when a module has too much lines, reducing its readability.'
+ ),
+
+ 'W0311': ('Bad indentation. Found %s %s, expected %s',
+ 'bad-indentation',
+ 'Used when an unexpected number of indentation\'s tabulations or '
+ 'spaces has been found.'),
+ 'W0312': ('Found indentation with %ss instead of %ss',
+ 'mixed-indentation',
+ 'Used when there are some mixed tabs and spaces in a module.'),
+ 'W0301': ('Unnecessary semicolon', # was W0106
+ 'unnecessary-semicolon',
+ 'Used when a statement is ended by a semi-colon (";"), which \
+ isn\'t necessary (that\'s python, not C ;).'),
+ 'C0321': ('More than one statement on a single line',
+ 'multiple-statements',
+ 'Used when more than on statement are found on the same line.'),
+ 'C0322': ('Operator not preceded by a space\n%s',
+ 'no-space-before-operator',
+ 'Used when one of the following operator (!= | <= | == | >= | < '
+ '| > | = | \\+= | -= | \\*= | /= | %) is not preceded by a space.'),
+ 'C0323': ('Operator not followed by a space\n%s',
+ 'no-space-after-operator',
+ 'Used when one of the following operator (!= | <= | == | >= | < '
+ '| > | = | \\+= | -= | \\*= | /= | %) is not followed by a space.'),
+ 'C0324': ('Comma not followed by a space\n%s',
+ 'no-space-after-comma',
+ 'Used when a comma (",") is not followed by a space.'),
+ }
+
+if sys.version_info < (3, 0):
+
+ MSGS.update({
+ 'W0331': ('Use of the <> operator',
+ 'old-ne-operator',
+ 'Used when the deprecated "<>" operator is used instead \
+ of "!=".'),
+ 'W0332': ('Use of "l" as long integer identifier',
+ 'lowercase-l-suffix',
+ 'Used when a lower case "l" is used to mark a long integer. You '
+ 'should use a upper case "L" since the letter "l" looks too much '
+ 'like the digit "1"'),
+ 'W0333': ('Use of the `` operator',
+ 'backtick',
+ 'Used when the deprecated "``" (backtick) operator is used '
+ 'instead of the str() function.'),
+ })
+
+# simple quoted string rgx
+SQSTRING_RGX = r'"([^"\\]|\\.)*?"'
+# simple apostrophed rgx
+SASTRING_RGX = r"'([^'\\]|\\.)*?'"
+# triple quoted string rgx
+TQSTRING_RGX = r'"""([^"]|("(?!"")))*?(""")'
+# triple apostrophe'd string rgx
+TASTRING_RGX = r"'''([^']|('(?!'')))*?(''')"
+
+# finally, the string regular expression
+STRING_RGX = re.compile('(%s)|(%s)|(%s)|(%s)' % (TQSTRING_RGX, TASTRING_RGX,
+ SQSTRING_RGX, SASTRING_RGX),
+ re.MULTILINE|re.DOTALL)
+
+COMMENT_RGX = re.compile("#.*$", re.M)
+
+OPERATORS = r'!=|<=|==|>=|<|>|=|\+=|-=|\*=|/=|%'
+
+OP_RGX_MATCH_1 = r'[^(]*(?<!\s|\^|<|>|=|\+|-|\*|/|!|%%|&|\|)(%s).*' % OPERATORS
+OP_RGX_SEARCH_1 = r'(?<!\s|\^|<|>|=|\+|-|\*|/|!|%%|&|\|)(%s)' % OPERATORS
+
+OP_RGX_MATCH_2 = r'[^(]*(%s)(?!\s|=|>|<).*' % OPERATORS
+OP_RGX_SEARCH_2 = r'(%s)(?!\s|=|>)' % OPERATORS
+
+BAD_CONSTRUCT_RGXS = (
+
+ (re.compile(OP_RGX_MATCH_1, re.M),
+ re.compile(OP_RGX_SEARCH_1, re.M),
+ 'C0322'),
+
+ (re.compile(OP_RGX_MATCH_2, re.M),
+ re.compile(OP_RGX_SEARCH_2, re.M),
+ 'C0323'),
+
+ (re.compile(r'.*,[^(\s|\]|}|\))].*', re.M),
+ re.compile(r',[^\s)]', re.M),
+ 'C0324'),
+ )
+
+_PY3K = sys.version_info >= (3, 0)
+
+def get_string_coords(line):
+ """return a list of string positions (tuple (start, end)) in the line
+ """
+ result = []
+ for match in re.finditer(STRING_RGX, line):
+ result.append( (match.start(), match.end()) )
+ return result
+
+def in_coords(match, string_coords):
+ """return true if the match is in the string coord"""
+ mstart = match.start()
+ for start, end in string_coords:
+ if mstart >= start and mstart < end:
+ return True
+ return False
+
+def check_line(line):
+ """check a line for a bad construction
+ if it founds one, return a message describing the problem
+ else return None
+ """
+ cleanstr = COMMENT_RGX.sub('', STRING_RGX.sub('', line))
+ for rgx_match, rgx_search, msg_id in BAD_CONSTRUCT_RGXS:
+ if rgx_match.match(cleanstr):
+ string_positions = get_string_coords(line)
+ for match in re.finditer(rgx_search, line):
+ if not in_coords(match, string_positions):
+ return msg_id, pretty_match(match, line.rstrip())
+
+
+class FormatChecker(BaseRawChecker):
+ """checks for :
+ * unauthorized constructions
+ * strict indentation
+ * line length
+ * use of <> instead of !=
+ """
+
+ __implements__ = (IRawChecker, IASTNGChecker)
+
+ # configuration section name
+ name = 'format'
+ # messages
+ msgs = MSGS
+ # configuration options
+ # for available dict keys/values see the optik parser 'add_option' method
+ options = (('max-line-length',
+ {'default' : 80, 'type' : "int", 'metavar' : '<int>',
+ 'help' : 'Maximum number of characters on a single line.'}),
+ ('max-module-lines',
+ {'default' : 1000, 'type' : 'int', 'metavar' : '<int>',
+ 'help': 'Maximum number of lines in a module'}
+ ),
+ ('indent-string',
+ {'default' : ' ', 'type' : "string", 'metavar' : '<string>',
+ 'help' : 'String used as indentation unit. This is usually \
+" " (4 spaces) or "\\t" (1 tab).'}),
+ )
+ def __init__(self, linter=None):
+ BaseRawChecker.__init__(self, linter)
+ self._lines = None
+ self._visited_lines = None
+
+ def process_module(self, node):
+ """extracts encoding from the stream and decodes each line, so that
+ international text's length is properly calculated.
+ """
+ stream = node.file_stream
+ stream.seek(0) # XXX may be removed with astng > 0.23
+ readline = stream.readline
+ if sys.version_info < (3, 0):
+ if node.file_encoding is not None:
+ readline = lambda: stream.readline().decode(node.file_encoding, 'replace')
+ self.process_tokens(tokenize.generate_tokens(readline))
+
+ def new_line(self, tok_type, line, line_num, junk):
+ """a new line has been encountered, process it if necessary"""
+ if not tok_type in junk:
+ self._lines[line_num] = line.split('\n')[0]
+ self.check_lines(line, line_num)
+
+ def process_tokens(self, tokens):
+ """process tokens and search for :
+
+ _ non strict indentation (i.e. not always using the <indent> parameter as
+ indent unit)
+ _ too long lines (i.e. longer than <max_chars>)
+ _ optionally bad construct (if given, bad_construct must be a compiled
+ regular expression).
+ """
+ indent = tokenize.INDENT
+ dedent = tokenize.DEDENT
+ newline = tokenize.NEWLINE
+ junk = (tokenize.COMMENT, tokenize.NL)
+ indents = [0]
+ check_equal = 0
+ line_num = 0
+ previous = None
+ self._lines = {}
+ self._visited_lines = {}
+ for (tok_type, token, start, _, line) in tokens:
+ if start[0] != line_num:
+ if previous is not None and previous[0] == tokenize.OP and previous[1] == ';':
+ self.add_message('W0301', line=previous[2])
+ previous = None
+ line_num = start[0]
+ self.new_line(tok_type, line, line_num, junk)
+ if tok_type not in (indent, dedent, newline) + junk:
+ previous = tok_type, token, start[0]
+
+ if tok_type == tokenize.OP:
+ if token == '<>':
+ self.add_message('W0331', line=line_num)
+ elif tok_type == tokenize.NUMBER:
+ if token.endswith('l'):
+ self.add_message('W0332', line=line_num)
+
+ elif tok_type == newline:
+ # a program statement, or ENDMARKER, will eventually follow,
+ # after some (possibly empty) run of tokens of the form
+ # (NL | COMMENT)* (INDENT | DEDENT+)?
+ # If an INDENT appears, setting check_equal is wrong, and will
+ # be undone when we see the INDENT.
+ check_equal = 1
+
+ elif tok_type == indent:
+ check_equal = 0
+ self.check_indent_level(token, indents[-1]+1, line_num)
+ indents.append(indents[-1]+1)
+
+ elif tok_type == dedent:
+ # there's nothing we need to check here! what's important is
+ # that when the run of DEDENTs ends, the indentation of the
+ # program statement (or ENDMARKER) that triggered the run is
+ # equal to what's left at the top of the indents stack
+ check_equal = 1
+ if len(indents) > 1:
+ del indents[-1]
+
+ elif check_equal and tok_type not in junk:
+ # this is the first "real token" following a NEWLINE, so it
+ # must be the first token of the next program statement, or an
+ # ENDMARKER; the "line" argument exposes the leading whitespace
+ # for this statement; in the case of ENDMARKER, line is an empty
+ # string, so will properly match the empty string with which the
+ # "indents" stack was seeded
+ check_equal = 0
+ self.check_indent_level(line, indents[-1], line_num)
+
+ line_num -= 1 # to be ok with "wc -l"
+ if line_num > self.config.max_module_lines:
+ self.add_message('C0302', args=line_num, line=1)
+
+ @check_messages('C0321' ,'C03232', 'C0323', 'C0324')
+ def visit_default(self, node):
+ """check the node line number and check it if not yet done"""
+ if not node.is_statement:
+ return
+ if not node.root().pure_python:
+ return # XXX block visit of child nodes
+ prev_sibl = node.previous_sibling()
+ if prev_sibl is not None:
+ prev_line = prev_sibl.fromlineno
+ else:
+ prev_line = node.parent.statement().fromlineno
+ line = node.fromlineno
+ assert line, node
+ if prev_line == line and self._visited_lines.get(line) != 2:
+ # py2.5 try: except: finally:
+ if not (isinstance(node, nodes.TryExcept)
+ and isinstance(node.parent, nodes.TryFinally)
+ and node.fromlineno == node.parent.fromlineno):
+ self.add_message('C0321', node=node)
+ self._visited_lines[line] = 2
+ return
+ if line in self._visited_lines:
+ return
+ try:
+ tolineno = node.blockstart_tolineno
+ except AttributeError:
+ tolineno = node.tolineno
+ assert tolineno, node
+ lines = []
+ for line in xrange(line, tolineno + 1):
+ self._visited_lines[line] = 1
+ try:
+ lines.append(self._lines[line].rstrip())
+ except KeyError:
+ lines.append('')
+ try:
+ msg_def = check_line('\n'.join(lines))
+ if msg_def:
+ self.add_message(msg_def[0], node=node, args=msg_def[1])
+ except KeyError:
+ # FIXME: internal error !
+ pass
+
+ @check_messages('W0333')
+ def visit_backquote(self, node):
+ self.add_message('W0333', node=node)
+
+ def check_lines(self, lines, i):
+ """check lines have less than a maximum number of characters
+ """
+ max_chars = self.config.max_line_length
+ for line in lines.splitlines():
+ if len(line) > max_chars:
+ self.add_message('C0301', line=i, args=(len(line), max_chars))
+ i += 1
+
+ def check_indent_level(self, string, expected, line_num):
+ """return the indent level of the string
+ """
+ indent = self.config.indent_string
+ if indent == '\\t': # \t is not interpreted in the configuration file
+ indent = '\t'
+ level = 0
+ unit_size = len(indent)
+ while string[:unit_size] == indent:
+ string = string[unit_size:]
+ level += 1
+ suppl = ''
+ while string and string[0] in ' \t':
+ if string[0] != indent[0]:
+ if string[0] == '\t':
+ args = ('tab', 'space')
+ else:
+ args = ('space', 'tab')
+ self.add_message('W0312', args=args, line=line_num)
+ return level
+ suppl += string[0]
+ string = string [1:]
+ if level != expected or suppl:
+ i_type = 'spaces'
+ if indent[0] == '\t':
+ i_type = 'tabs'
+ self.add_message('W0311', line=line_num,
+ args=(level * unit_size + len(suppl), i_type,
+ expected * unit_size))
+
+
+class StringConstantChecker(BaseRawChecker):
+ """Check string literals"""
+
+ msgs = {
+ 'W1401': ('Anomalous backslash in string: \'%s\'. '
+ 'String constant might be missing an r prefix.',
+ 'anomalous-backslash-in-string',
+ 'Used when a backslash is in a literal string but not as an '
+ 'escape.'),
+ 'W1402': ('Anomalous Unicode escape in byte string: \'%s\'. '
+ 'String constant might be missing an r or u prefix.',
+ 'anomalous-unicode-escape-in-string',
+ 'Used when an escape like \\u is encountered in a byte '
+ 'string where it has no effect.'),
+ }
+ name = 'string_constant'
+ __implements__ = (IRawChecker, IASTNGChecker)
+
+ # Characters that have a special meaning after a backslash in either
+ # Unicode or byte strings.
+ ESCAPE_CHARACTERS = 'abfnrtvx\n\r\t\\\'\"01234567'
+
+ # TODO(mbp): Octal characters are quite an edge case today; people may
+ # prefer a separate warning where they occur. \0 should be allowed.
+
+ # Characters that have a special meaning after a backslash but only in
+ # Unicode strings.
+ UNICODE_ESCAPE_CHARACTERS = 'uUN'
+
+ def process_tokens(self, tokens):
+ for (tok_type, token, (start_row, start_col), _, _) in tokens:
+ if tok_type == tokenize.STRING:
+ # 'token' is the whole un-parsed token; we can look at the start
+ # of it to see whether it's a raw or unicode string etc.
+ self.process_string_token(token, start_row, start_col)
+
+ def process_string_token(self, token, start_row, start_col):
+ for i, c in enumerate(token):
+ if c in '\'\"':
+ quote_char = c
+ break
+ prefix = token[:i].lower() # markers like u, b, r.
+ after_prefix = token[i:]
+ if after_prefix[:3] == after_prefix[-3:] == 3 * quote_char:
+ string_body = after_prefix[3:-3]
+ else:
+ string_body = after_prefix[1:-1] # Chop off quotes
+ # No special checks on raw strings at the moment.
+ if 'r' not in prefix:
+ self.process_non_raw_string_token(prefix, string_body,
+ start_row, start_col)
+
+ def process_non_raw_string_token(self, prefix, string_body, start_row,
+ start_col):
+ """check for bad escapes in a non-raw string.
+
+ prefix: lowercase string of eg 'ur' string prefix markers.
+ string_body: the un-parsed body of the string, not including the quote
+ marks.
+ start_row: integer line number in the source.
+ start_col: integer column number in the source.
+ """
+ # Walk through the string; if we see a backslash then escape the next
+ # character, and skip over it. If we see a non-escaped character,
+ # alert, and continue.
+ #
+ # Accept a backslash when it escapes a backslash, or a quote, or
+ # end-of-line, or one of the letters that introduce a special escape
+ # sequence <http://docs.python.org/reference/lexical_analysis.html>
+ #
+ # TODO(mbp): Maybe give a separate warning about the rarely-used
+ # \a \b \v \f?
+ #
+ # TODO(mbp): We could give the column of the problem character, but
+ # add_message doesn't seem to have a way to pass it through at present.
+ i = 0
+ while True:
+ i = string_body.find('\\', i)
+ if i == -1:
+ break
+ # There must be a next character; having a backslash at the end
+ # of the string would be a SyntaxError.
+ next_char = string_body[i+1]
+ match = string_body[i:i+2]
+ if next_char in self.UNICODE_ESCAPE_CHARACTERS:
+ if 'u' in prefix:
+ pass
+ elif _PY3K and 'b' not in prefix:
+ pass # unicode by default
+ else:
+ self.add_message('W1402', line=start_row, args=(match, ))
+ elif next_char not in self.ESCAPE_CHARACTERS:
+ self.add_message('W1401', line=start_row, args=(match, ))
+ # Whether it was a valid escape or not, backslash followed by
+ # another character can always be consumed whole: the second
+ # character can never be the start of a new backslash escape.
+ i += 2
+
+
+def register(linter):
+ """required method to auto register this checker """
+ linter.register_checker(FormatChecker(linter))
+ linter.register_checker(StringConstantChecker(linter))
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/checkers/imports.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/checkers/imports.py
@@ -0,0 +1,392 @@
+# Copyright (c) 2003-2012 LOGILAB S.A. (Paris, FRANCE).
+# http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This program is free software; you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation; either version 2 of the License, or (at your option) any later
+# version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# this program; if not, write to the Free Software Foundation, Inc.,
+# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+"""imports checkers for Python code"""
+
+from ..logilab.common.graph import get_cycles, DotBackend
+from ..logilab.common.modutils import is_standard_module
+from ..logilab.common.ureports import VerbatimText, Paragraph
+
+from ..logilab import astng
+from ..logilab.astng import are_exclusive
+
+from ..interfaces import IASTNGChecker
+from ..checkers import BaseChecker, EmptyReport
+
+
+def get_first_import(node, context, name, base, level):
+ """return the node where [base.]<name> is imported or None if not found
+ """
+ fullname = '%s.%s' % (base, name) if base else name
+
+ first = None
+ found = False
+ for first in context.body:
+ if first is node:
+ continue
+ if first.scope() is node.scope() and first.fromlineno > node.fromlineno:
+ continue
+ if isinstance(first, astng.Import):
+ if any(fullname == iname[0] for iname in first.names):
+ found = True
+ break
+ elif isinstance(first, astng.From):
+ if level == first.level and any(
+ fullname == '%s.%s' % (first.modname, iname[0]) for iname in first.names):
+ found = True
+ break
+ if found and not are_exclusive(first, node):
+ return first
+
+# utilities to represents import dependencies as tree and dot graph ###########
+
+def filter_dependencies_info(dep_info, package_dir, mode='external'):
+ """filter external or internal dependencies from dep_info (return a
+ new dictionary containing the filtered modules only)
+ """
+ if mode == 'external':
+ filter_func = lambda x: not is_standard_module(x, (package_dir,))
+ else:
+ assert mode == 'internal'
+ filter_func = lambda x: is_standard_module(x, (package_dir,))
+ result = {}
+ for importee, importers in dep_info.iteritems():
+ if filter_func(importee):
+ result[importee] = importers
+ return result
+
+def make_tree_defs(mod_files_list):
+ """get a list of 2-uple (module, list_of_files_which_import_this_module),
+ it will return a dictionary to represent this as a tree
+ """
+ tree_defs = {}
+ for mod, files in mod_files_list:
+ node = (tree_defs, ())
+ for prefix in mod.split('.'):
+ node = node[0].setdefault(prefix, [{}, []])
+ node[1] += files
+ return tree_defs
+
+def repr_tree_defs(data, indent_str=None):
+ """return a string which represents imports as a tree"""
+ lines = []
+ nodes = data.items()
+ for i, (mod, (sub, files)) in enumerate(sorted(nodes, key=lambda x: x[0])):
+ if not files:
+ files = ''
+ else:
+ files = '(%s)' % ','.join(files)
+ if indent_str is None:
+ lines.append('%s %s' % (mod, files))
+ sub_indent_str = ' '
+ else:
+ lines.append(r'%s\-%s %s' % (indent_str, mod, files))
+ if i == len(nodes)-1:
+ sub_indent_str = '%s ' % indent_str
+ else:
+ sub_indent_str = '%s| ' % indent_str
+ if sub:
+ lines.append(repr_tree_defs(sub, sub_indent_str))
+ return '\n'.join(lines)
+
+
+def dependencies_graph(filename, dep_info):
+ """write dependencies as a dot (graphviz) file
+ """
+ done = {}
+ printer = DotBackend(filename[:-4], rankdir = "LR")
+ printer.emit('URL="." node[shape="box"]')
+ for modname, dependencies in sorted(dep_info.iteritems()):
+ done[modname] = 1
+ printer.emit_node(modname)
+ for modname in dependencies:
+ if modname not in done:
+ done[modname] = 1
+ printer.emit_node(modname)
+ for depmodname, dependencies in sorted(dep_info.iteritems()):
+ for modname in dependencies:
+ printer.emit_edge(modname, depmodname)
+ printer.generate(filename)
+
+
+def make_graph(filename, dep_info, sect, gtype):
+ """generate a dependencies graph and add some information about it in the
+ report's section
+ """
+ dependencies_graph(filename, dep_info)
+ sect.append(Paragraph('%simports graph has been written to %s'
+ % (gtype, filename)))
+
+
+# the import checker itself ###################################################
+
+MSGS = {
+ 'F0401': ('Unable to import %s',
+ 'import-error',
+ 'Used when pylint has been unable to import a module.'),
+ 'R0401': ('Cyclic import (%s)',
+ 'cyclic-import',
+ 'Used when a cyclic import between two or more modules is \
+ detected.'),
+
+ 'W0401': ('Wildcard import %s',
+ 'wildcard-import',
+ 'Used when `from module import *` is detected.'),
+ 'W0402': ('Uses of a deprecated module %r',
+ 'deprecated-module',
+ 'Used a module marked as deprecated is imported.'),
+ 'W0403': ('Relative import %r, should be %r',
+ 'relative-import',
+ 'Used when an import relative to the package directory is \
+ detected.'),
+ 'W0404': ('Reimport %r (imported line %s)',
+ 'reimported',
+ 'Used when a module is reimported multiple times.'),
+ 'W0406': ('Module import itself',
+ 'import-self',
+ 'Used when a module is importing itself.'),
+
+ 'W0410': ('__future__ import is not the first non docstring statement',
+ 'misplaced-future',
+ 'Python 2.5 and greater require __future__ import to be the \
+ first non docstring statement in the module.'),
+ }
+
+class ImportsChecker(BaseChecker):
+ """checks for
+ * external modules dependencies
+ * relative / wildcard imports
+ * cyclic imports
+ * uses of deprecated modules
+ """
+
+ __implements__ = IASTNGChecker
+
+ name = 'imports'
+ msgs = MSGS
+ priority = -2
+
+ options = (('deprecated-modules',
+ {'default' : ('regsub', 'string', 'TERMIOS',
+ 'Bastion', 'rexec'),
+ 'type' : 'csv',
+ 'metavar' : '<modules>',
+ 'help' : 'Deprecated modules which should not be used, \
+separated by a comma'}
+ ),
+ ('import-graph',
+ {'default' : '',
+ 'type' : 'string',
+ 'metavar' : '<file.dot>',
+ 'help' : 'Create a graph of every (i.e. internal and \
+external) dependencies in the given file (report RP0402 must not be disabled)'}
+ ),
+ ('ext-import-graph',
+ {'default' : '',
+ 'type' : 'string',
+ 'metavar' : '<file.dot>',
+ 'help' : 'Create a graph of external dependencies in the \
+given file (report RP0402 must not be disabled)'}
+ ),
+ ('int-import-graph',
+ {'default' : '',
+ 'type' : 'string',
+ 'metavar' : '<file.dot>',
+ 'help' : 'Create a graph of internal dependencies in the \
+given file (report RP0402 must not be disabled)'}
+ ),
+
+ )
+
+ def __init__(self, linter=None):
+ BaseChecker.__init__(self, linter)
+ self.stats = None
+ self.import_graph = None
+ self.__int_dep_info = self.__ext_dep_info = None
+ self.reports = (('RP0401', 'External dependencies',
+ self.report_external_dependencies),
+ ('RP0402', 'Modules dependencies graph',
+ self.report_dependencies_graph),
+ )
+
+ def open(self):
+ """called before visiting project (i.e set of modules)"""
+ self.linter.add_stats(dependencies={})
+ self.linter.add_stats(cycles=[])
+ self.stats = self.linter.stats
+ self.import_graph = {}
+
+ def close(self):
+ """called before visiting project (i.e set of modules)"""
+ # don't try to compute cycles if the associated message is disabled
+ if self.linter.is_message_enabled('R0401'):
+ for cycle in get_cycles(self.import_graph):
+ self.add_message('R0401', args=' -> '.join(cycle))
+
+ def visit_import(self, node):
+ """triggered when an import statement is seen"""
+ modnode = node.root()
+ for name, _ in node.names:
+ importedmodnode = self.get_imported_module(modnode, node, name)
+ if importedmodnode is None:
+ continue
+ self._check_relative_import(modnode, node, importedmodnode, name)
+ self._add_imported_module(node, importedmodnode.name)
+ self._check_deprecated_module(node, name)
+ self._check_reimport(node, name)
+
+
+ def visit_from(self, node):
+ """triggered when a from statement is seen"""
+ basename = node.modname
+ if basename == '__future__':
+ # check if this is the first non-docstring statement in the module
+ prev = node.previous_sibling()
+ if prev:
+ # consecutive future statements are possible
+ if not (isinstance(prev, astng.From)
+ and prev.modname == '__future__'):
+ self.add_message('W0410', node=node)
+ return
+ modnode = node.root()
+ importedmodnode = self.get_imported_module(modnode, node, basename)
+ if importedmodnode is None:
+ return
+ self._check_relative_import(modnode, node, importedmodnode, basename)
+ self._check_deprecated_module(node, basename)
+ for name, _ in node.names:
+ if name == '*':
+ self.add_message('W0401', args=basename, node=node)
+ continue
+ self._add_imported_module(node, '%s.%s' % (importedmodnode.name, name))
+ self._check_reimport(node, name, basename, node.level)
+
+ def get_imported_module(self, modnode, importnode, modname):
+ try:
+ return importnode.do_import_module(modname)
+ except astng.InferenceError, ex:
+ if str(ex) != modname:
+ args = '%r (%s)' % (modname, ex)
+ else:
+ args = repr(modname)
+ self.add_message("F0401", args=args, node=importnode)
+
+ def _check_relative_import(self, modnode, importnode, importedmodnode,
+ importedasname):
+ """check relative import. node is either an Import or From node, modname
+ the imported module name.
+ """
+ if 'W0403' not in self.active_msgs:
+ return
+ if importedmodnode.file is None:
+ return False # built-in module
+ if modnode is importedmodnode:
+ return False # module importing itself
+ if modnode.absolute_import_activated() or getattr(importnode, 'level', None):
+ return False
+ if importedmodnode.name != importedasname:
+ # this must be a relative import...
+ self.add_message('W0403', args=(importedasname, importedmodnode.name),
+ node=importnode)
+
+ def _add_imported_module(self, node, importedmodname):
+ """notify an imported module, used to analyze dependencies"""
+ context_name = node.root().name
+ if context_name == importedmodname:
+ # module importing itself !
+ self.add_message('W0406', node=node)
+ elif not is_standard_module(importedmodname):
+ # handle dependencies
+ importedmodnames = self.stats['dependencies'].setdefault(
+ importedmodname, set())
+ if not context_name in importedmodnames:
+ importedmodnames.add(context_name)
+ if is_standard_module( importedmodname, (self.package_dir(),) ):
+ # update import graph
+ mgraph = self.import_graph.setdefault(context_name, set())
+ if not importedmodname in mgraph:
+ mgraph.add(importedmodname)
+
+ def _check_deprecated_module(self, node, mod_path):
+ """check if the module is deprecated"""
+ for mod_name in self.config.deprecated_modules:
+ if mod_path == mod_name or mod_path.startswith(mod_name + '.'):
+ self.add_message('W0402', node=node, args=mod_path)
+
+ def _check_reimport(self, node, name, basename=None, level=None):
+ """check if the import is necessary (i.e. not already done)"""
+ if 'W0404' not in self.active_msgs:
+ return
+ frame = node.frame()
+ root = node.root()
+ contexts = [(frame, level)]
+ if root is not frame:
+ contexts.append((root, None))
+ for context, level in contexts:
+ first = get_first_import(node, context, name, basename, level)
+ if first is not None:
+ self.add_message('W0404', node=node,
+ args=(name, first.fromlineno))
+
+
+ def report_external_dependencies(self, sect, _, dummy):
+ """return a verbatim layout for displaying dependencies"""
+ dep_info = make_tree_defs(self._external_dependencies_info().iteritems())
+ if not dep_info:
+ raise EmptyReport()
+ tree_str = repr_tree_defs(dep_info)
+ sect.append(VerbatimText(tree_str))
+
+ def report_dependencies_graph(self, sect, _, dummy):
+ """write dependencies as a dot (graphviz) file"""
+ dep_info = self.stats['dependencies']
+ if not dep_info or not (self.config.import_graph
+ or self.config.ext_import_graph
+ or self.config.int_import_graph):
+ raise EmptyReport()
+ filename = self.config.import_graph
+ if filename:
+ make_graph(filename, dep_info, sect, '')
+ filename = self.config.ext_import_graph
+ if filename:
+ make_graph(filename, self._external_dependencies_info(),
+ sect, 'external ')
+ filename = self.config.int_import_graph
+ if filename:
+ make_graph(filename, self._internal_dependencies_info(),
+ sect, 'internal ')
+
+ def _external_dependencies_info(self):
+ """return cached external dependencies information or build and
+ cache them
+ """
+ if self.__ext_dep_info is None:
+ self.__ext_dep_info = filter_dependencies_info(
+ self.stats['dependencies'], self.package_dir(), 'external')
+ return self.__ext_dep_info
+
+ def _internal_dependencies_info(self):
+ """return cached internal dependencies information or build and
+ cache them
+ """
+ if self.__int_dep_info is None:
+ self.__int_dep_info = filter_dependencies_info(
+ self.stats['dependencies'], self.package_dir(), 'internal')
+ return self.__int_dep_info
+
+
+def register(linter):
+ """required method to auto register this checker """
+ linter.register_checker(ImportsChecker(linter))
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/checkers/logging.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/checkers/logging.py
@@ -0,0 +1,179 @@
+# Copyright (c) 2009-2010 Google, Inc.
+# This program is free software; you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation; either version 2 of the License, or (at your option) any later
+# version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# this program; if not, write to the Free Software Foundation, Inc.,
+# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+"""checker for use of Python logging
+"""
+
+from ..logilab import astng
+from .. import checkers
+from .. import interfaces
+from ..checkers import utils
+
+
+MSGS = {
+ 'W1201': ('Specify string format arguments as logging function parameters',
+ 'logging-not-lazy',
+ 'Used when a logging statement has a call form of '
+ '"logging.<logging method>(format_string % (format_args...))". '
+ 'Such calls should leave string interpolation to the logging '
+ 'method itself and be written '
+ '"logging.<logging method>(format_string, format_args...)" '
+ 'so that the program may avoid incurring the cost of the '
+ 'interpolation in those cases in which no message will be '
+ 'logged. For more, see '
+ 'http://www.python.org/dev/peps/pep-0282/.'),
+ 'E1200': ('Unsupported logging format character %r (%#02x) at index %d',
+ 'logging-unsupported-format',
+ 'Used when an unsupported format character is used in a logging\
+ statement format string.'),
+ 'E1201': ('Logging format string ends in middle of conversion specifier',
+ 'logging-format-truncated',
+ 'Used when a logging statement format string terminates before\
+ the end of a conversion specifier.'),
+ 'E1205': ('Too many arguments for logging format string',
+ 'logging-too-many-args',
+ 'Used when a logging format string is given too few arguments.'),
+ 'E1206': ('Not enough arguments for logging format string',
+ 'logging-too-few-args',
+ 'Used when a logging format string is given too many arguments'),
+ }
+
+
+CHECKED_CONVENIENCE_FUNCTIONS = set([
+ 'critical', 'debug', 'error', 'exception', 'fatal', 'info', 'warn',
+ 'warning'])
+
+
+class LoggingChecker(checkers.BaseChecker):
+ """Checks use of the logging module."""
+
+ __implements__ = interfaces.IASTNGChecker
+ name = 'logging'
+ msgs = MSGS
+
+ def visit_module(self, unused_node):
+ """Clears any state left in this checker from last module checked."""
+ # The code being checked can just as easily "import logging as foo",
+ # so it is necessary to process the imports and store in this field
+ # what name the logging module is actually given.
+ self._logging_name = None
+
+ def visit_import(self, node):
+ """Checks to see if this module uses Python's built-in logging."""
+ for module, as_name in node.names:
+ if module == 'logging':
+ if as_name:
+ self._logging_name = as_name
+ else:
+ self._logging_name = 'logging'
+
+ def visit_callfunc(self, node):
+ """Checks calls to (simple forms of) logging methods."""
+ if (not isinstance(node.func, astng.Getattr)
+ or not isinstance(node.func.expr, astng.Name)):
+ return
+ try:
+ logger_class = [inferred for inferred in node.func.expr.infer() if (
+ isinstance(inferred, astng.Instance)
+ and [ancestor for ancestor in inferred._proxied.ancestors() if (
+ ancestor.name == 'Logger'
+ and ancestor.parent.name == 'logging')])]
+ except astng.exceptions.InferenceError:
+ return
+ if (node.func.expr.name != self._logging_name and not logger_class):
+ return
+ self._check_convenience_methods(node)
+ self._check_log_methods(node)
+
+ def _check_convenience_methods(self, node):
+ """Checks calls to logging convenience methods (like logging.warn)."""
+ if node.func.attrname not in CHECKED_CONVENIENCE_FUNCTIONS:
+ return
+ if node.starargs or node.kwargs or not node.args:
+ # Either no args, star args, or double-star args. Beyond the
+ # scope of this checker.
+ return
+ if isinstance(node.args[0], astng.BinOp) and node.args[0].op == '%':
+ self.add_message('W1201', node=node)
+ elif isinstance(node.args[0], astng.Const):
+ self._check_format_string(node, 0)
+
+ def _check_log_methods(self, node):
+ """Checks calls to logging.log(level, format, *format_args)."""
+ if node.func.attrname != 'log':
+ return
+ if node.starargs or node.kwargs or len(node.args) < 2:
+ # Either a malformed call, star args, or double-star args. Beyond
+ # the scope of this checker.
+ return
+ if isinstance(node.args[1], astng.BinOp) and node.args[1].op == '%':
+ self.add_message('W1201', node=node)
+ elif isinstance(node.args[1], astng.Const):
+ self._check_format_string(node, 1)
+
+ def _check_format_string(self, node, format_arg):
+ """Checks that format string tokens match the supplied arguments.
+
+ Args:
+ node: AST node to be checked.
+ format_arg: Index of the format string in the node arguments.
+ """
+ num_args = self._count_supplied_tokens(node.args[format_arg + 1:])
+ if not num_args:
+ # If no args were supplied, then all format strings are valid -
+ # don't check any further.
+ return
+ format_string = node.args[format_arg].value
+ if not isinstance(format_string, basestring):
+ # If the log format is constant non-string (e.g. logging.debug(5)),
+ # ensure there are no arguments.
+ required_num_args = 0
+ else:
+ try:
+ keyword_args, required_num_args = \
+ utils.parse_format_string(format_string)
+ if keyword_args:
+ # Keyword checking on logging strings is complicated by
+ # special keywords - out of scope.
+ return
+ except utils.UnsupportedFormatCharacter, e:
+ c = format_string[e.index]
+ self.add_message('E1200', node=node, args=(c, ord(c), e.index))
+ return
+ except utils.IncompleteFormatString:
+ self.add_message('E1201', node=node)
+ return
+ if num_args > required_num_args:
+ self.add_message('E1205', node=node)
+ elif num_args < required_num_args:
+ self.add_message('E1206', node=node)
+
+ def _count_supplied_tokens(self, args):
+ """Counts the number of tokens in an args list.
+
+ The Python log functions allow for special keyword arguments: func,
+ exc_info and extra. To handle these cases correctly, we only count
+ arguments that aren't keywords.
+
+ Args:
+ args: List of AST nodes that are arguments for a log format string.
+
+ Returns:
+ Number of AST nodes that aren't keywords.
+ """
+ return sum(1 for arg in args if not isinstance(arg, astng.Keyword))
+
+
+def register(linter):
+ """Required method to auto-register this checker."""
+ linter.register_checker(LoggingChecker(linter))
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/checkers/misc.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/checkers/misc.py
@@ -0,0 +1,78 @@
+# pylint: disable=W0511
+# This program is free software; you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation; either version 2 of the License, or (at your option) any later
+# version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# this program; if not, write to the Free Software Foundation, Inc.,
+# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+""" Copyright (c) 2000-2010 LOGILAB S.A. (Paris, FRANCE).
+ http://www.logilab.fr/ -- mailto:contact@logilab.fr
+
+Check source code is ascii only or has an encoding declaration (PEP 263)
+"""
+
+import re
+
+from ..interfaces import IRawChecker
+from ..checkers import BaseChecker
+
+
+MSGS = {
+ 'W0511': ('%s',
+ 'fixme',
+ 'Used when a warning note as FIXME or XXX is detected.'),
+ }
+
+class EncodingChecker(BaseChecker):
+ """checks for:
+ * warning notes in the code like FIXME, XXX
+ * PEP 263: source code with non ascii character but no encoding declaration
+ """
+ __implements__ = IRawChecker
+
+ # configuration section name
+ name = 'miscellaneous'
+ msgs = MSGS
+
+ options = (('notes',
+ {'type' : 'csv', 'metavar' : '<comma separated values>',
+ 'default' : ('FIXME', 'XXX', 'TODO'),
+ 'help' : 'List of note tags to take in consideration, \
+separated by a comma.'
+ }),
+ )
+
+ def __init__(self, linter=None):
+ BaseChecker.__init__(self, linter)
+
+ def process_module(self, node):
+ """inspect the source file to found encoding problem or fixmes like
+ notes
+ """
+ stream = node.file_stream
+ stream.seek(0) # XXX may be removed with astng > 0.23
+ # warning notes in the code
+ notes = []
+ for note in self.config.notes:
+ notes.append(re.compile(note))
+ linenum = 1
+ for line in stream.readlines():
+ for note in notes:
+ match = note.search(line)
+ if match:
+ self.add_message('W0511', args=line[match.start():-1],
+ line=linenum)
+ break
+ linenum += 1
+
+
+
+def register(linter):
+ """required method to auto register this checker"""
+ linter.register_checker(EncodingChecker(linter))
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/checkers/newstyle.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/checkers/newstyle.py
@@ -0,0 +1,112 @@
+# Copyright (c) 2005-2006 LOGILAB S.A. (Paris, FRANCE).
+# http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This program is free software; you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation; either version 2 of the License, or (at your option) any later
+# version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# this program; if not, write to the Free Software Foundation, Inc.,
+# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+"""check for new / old style related problems
+"""
+
+from ..logilab import astng
+
+from ..interfaces import IASTNGChecker
+from ..checkers import BaseChecker
+from ..checkers.utils import check_messages
+
+MSGS = {
+ 'E1001': ('Use of __slots__ on an old style class',
+ 'slots-on-old-class',
+ 'Used when an old style class uses the __slots__ attribute.'),
+ 'E1002': ('Use of super on an old style class',
+ 'super-on-old-class',
+ 'Used when an old style class uses the super builtin.'),
+ 'E1003': ('Bad first argument %r given to super class',
+ 'bad-super-call',
+ 'Used when another argument than the current class is given as \
+ first argument of the super builtin.'),
+ 'W1001': ('Use of "property" on an old style class',
+ 'property-on-old-class',
+ 'Used when PyLint detect the use of the builtin "property" \
+ on an old style class while this is relying on new style \
+ classes features'),
+ }
+
+
+class NewStyleConflictChecker(BaseChecker):
+ """checks for usage of new style capabilities on old style classes and
+ other new/old styles conflicts problems
+ * use of property, __slots__, super
+ * "super" usage
+ """
+
+ __implements__ = (IASTNGChecker,)
+
+ # configuration section name
+ name = 'newstyle'
+ # messages
+ msgs = MSGS
+ priority = -2
+ # configuration options
+ options = ()
+
+ @check_messages('E1001')
+ def visit_class(self, node):
+ """check __slots__ usage
+ """
+ if '__slots__' in node and not node.newstyle:
+ self.add_message('E1001', node=node)
+
+ @check_messages('W1001')
+ def visit_callfunc(self, node):
+ """check property usage"""
+ parent = node.parent.frame()
+ if (isinstance(parent, astng.Class) and
+ not parent.newstyle and
+ isinstance(node.func, astng.Name)):
+ name = node.func.name
+ if name == 'property':
+ self.add_message('W1001', node=node)
+
+ @check_messages('E1002', 'E1003')
+ def visit_function(self, node):
+ """check use of super"""
+ # ignore actual functions or method within a new style class
+ if not node.is_method():
+ return
+ klass = node.parent.frame()
+ for stmt in node.nodes_of_class(astng.CallFunc):
+ expr = stmt.func
+ if not isinstance(expr, astng.Getattr):
+ continue
+ call = expr.expr
+ # skip the test if using super
+ if isinstance(call, astng.CallFunc) and \
+ isinstance(call.func, astng.Name) and \
+ call.func.name == 'super':
+ if not klass.newstyle:
+ # super should not be used on an old style class
+ self.add_message('E1002', node=node)
+ else:
+ # super first arg should be the class
+ try:
+ supcls = (call.args and call.args[0].infer().next()
+ or None)
+ except astng.InferenceError:
+ continue
+ if klass is not supcls:
+ supcls = getattr(supcls, 'name', supcls)
+ self.add_message('E1003', node=node, args=supcls)
+
+
+def register(linter):
+ """required method to auto register this checker """
+ linter.register_checker(NewStyleConflictChecker(linter))
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/checkers/raw_metrics.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/checkers/raw_metrics.py
@@ -0,0 +1,125 @@
+# This program is free software; you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation; either version 2 of the License, or (at your option) any later
+# version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# this program; if not, write to the Free Software Foundation, Inc.,
+# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+""" Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE).
+ http://www.logilab.fr/ -- mailto:contact@logilab.fr
+
+Raw metrics checker
+"""
+
+import tokenize
+
+# pylint now requires pylint >= 2.2, so this is no longer necessary
+#if not hasattr(tokenize, 'NL'):
+# raise ValueError("tokenize.NL doesn't exist -- tokenize module too old")
+
+from ..logilab.common.ureports import Table
+
+from ..interfaces import IRawChecker
+from ..checkers import BaseRawChecker, EmptyReport
+from ..reporters import diff_string
+
+def report_raw_stats(sect, stats, old_stats):
+ """calculate percentage of code / doc / comment / empty
+ """
+ total_lines = stats['total_lines']
+ if not total_lines:
+ raise EmptyReport()
+ sect.description = '%s lines have been analyzed' % total_lines
+ lines = ('type', 'number', '%', 'previous', 'difference')
+ for node_type in ('code', 'docstring', 'comment', 'empty'):
+ key = node_type + '_lines'
+ total = stats[key]
+ percent = float(total * 100) / total_lines
+ old = old_stats.get(key, None)
+ if old is not None:
+ diff_str = diff_string(old, total)
+ else:
+ old, diff_str = 'NC', 'NC'
+ lines += (node_type, str(total), '%.2f' % percent,
+ str(old), diff_str)
+ sect.append(Table(children=lines, cols=5, rheaders=1))
+
+
+class RawMetricsChecker(BaseRawChecker):
+ """does not check anything but gives some raw metrics :
+ * total number of lines
+ * total number of code lines
+ * total number of docstring lines
+ * total number of comments lines
+ * total number of empty lines
+ """
+
+ __implements__ = (IRawChecker,)
+
+ # configuration section name
+ name = 'metrics'
+ # configuration options
+ options = ( )
+ # messages
+ msgs = {}
+ # reports
+ reports = ( ('RP0701', 'Raw metrics', report_raw_stats), )
+
+ def __init__(self, linter):
+ BaseRawChecker.__init__(self, linter)
+ self.stats = None
+
+ def open(self):
+ """init statistics"""
+ self.stats = self.linter.add_stats(total_lines=0, code_lines=0,
+ empty_lines=0, docstring_lines=0,
+ comment_lines=0)
+
+ def process_tokens(self, tokens):
+ """update stats"""
+ i = 0
+ tokens = list(tokens)
+ while i < len(tokens):
+ i, lines_number, line_type = get_type(tokens, i)
+ self.stats['total_lines'] += lines_number
+ self.stats[line_type] += lines_number
+
+
+JUNK = (tokenize.NL, tokenize.INDENT, tokenize.NEWLINE, tokenize.ENDMARKER)
+
+def get_type(tokens, start_index):
+ """return the line type : docstring, comment, code, empty"""
+ i = start_index
+ tok_type = tokens[i][0]
+ start = tokens[i][2]
+ pos = start
+ line_type = None
+ while i < len(tokens) and tokens[i][2][0] == start[0]:
+ tok_type = tokens[i][0]
+ pos = tokens[i][3]
+ if line_type is None:
+ if tok_type == tokenize.STRING:
+ line_type = 'docstring_lines'
+ elif tok_type == tokenize.COMMENT:
+ line_type = 'comment_lines'
+ elif tok_type in JUNK:
+ pass
+ else:
+ line_type = 'code_lines'
+ i += 1
+ if line_type is None:
+ line_type = 'empty_lines'
+ elif i < len(tokens) and tok_type == tokenize.NEWLINE:
+ i += 1
+ return i, pos[0] - start[0] + 1, line_type
+
+
+def register(linter):
+ """ required method to auto register this checker """
+ linter.register_checker(RawMetricsChecker(linter))
+
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/checkers/similar.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/checkers/similar.py
@@ -0,0 +1,357 @@
+# pylint: disable=W0622
+# Copyright (c) 2004-2012 LOGILAB S.A. (Paris, FRANCE).
+# http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This program is free software; you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation; either version 2 of the License, or (at your option) any later
+# version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details
+#
+# You should have received a copy of the GNU General Public License along with
+# this program; if not, write to the Free Software Foundation, Inc.,
+# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+"""a similarities / code duplication command line tool and pylint checker
+"""
+import sys
+from itertools import izip
+
+from ..logilab.common.ureports import Table
+
+from ..interfaces import IRawChecker
+from ..checkers import BaseChecker, table_lines_from_stats
+
+
+class Similar:
+ """finds copy-pasted lines of code in a project"""
+
+ def __init__(self, min_lines=4, ignore_comments=False,
+ ignore_docstrings=False, ignore_imports=False):
+ self.min_lines = min_lines
+ self.ignore_comments = ignore_comments
+ self.ignore_docstrings = ignore_docstrings
+ self.ignore_imports = ignore_imports
+ self.linesets = []
+
+ def append_stream(self, streamid, stream):
+ """append a file to search for similarities"""
+ stream.seek(0) # XXX may be removed with astng > 0.23
+ self.linesets.append(LineSet(streamid,
+ stream.readlines(),
+ self.ignore_comments,
+ self.ignore_docstrings,
+ self.ignore_imports))
+
+ def run(self):
+ """start looking for similarities and display results on stdout"""
+ self._display_sims(self._compute_sims())
+
+ def _compute_sims(self):
+ """compute similarities in appended files"""
+ no_duplicates = {}
+ for num, lineset1, idx1, lineset2, idx2 in self._iter_sims():
+ duplicate = no_duplicates.setdefault(num, [])
+ for couples in duplicate:
+ if (lineset1, idx1) in couples or (lineset2, idx2) in couples:
+ couples.add( (lineset1, idx1) )
+ couples.add( (lineset2, idx2) )
+ break
+ else:
+ duplicate.append( set([(lineset1, idx1), (lineset2, idx2)]) )
+ sims = []
+ for num, ensembles in no_duplicates.iteritems():
+ for couples in ensembles:
+ sims.append( (num, couples) )
+ sims.sort()
+ sims.reverse()
+ return sims
+
+ def _display_sims(self, sims):
+ """display computed similarities on stdout"""
+ nb_lignes_dupliquees = 0
+ for num, couples in sims:
+ print
+ print num, "similar lines in", len(couples), "files"
+ couples = sorted(couples)
+ for lineset, idx in couples:
+ print "==%s:%s" % (lineset.name, idx)
+ # pylint: disable=W0631
+ for line in lineset._real_lines[idx:idx+num]:
+ print " ", line,
+ nb_lignes_dupliquees += num * (len(couples)-1)
+ nb_total_lignes = sum([len(lineset) for lineset in self.linesets])
+ print "TOTAL lines=%s duplicates=%s percent=%.2f" \
+ % (nb_total_lignes, nb_lignes_dupliquees,
+ nb_lignes_dupliquees*100. / nb_total_lignes)
+
+ def _find_common(self, lineset1, lineset2):
+ """find similarities in the two given linesets"""
+ lines1 = lineset1.enumerate_stripped
+ lines2 = lineset2.enumerate_stripped
+ find = lineset2.find
+ index1 = 0
+ min_lines = self.min_lines
+ while index1 < len(lineset1):
+ skip = 1
+ num = 0
+ for index2 in find( lineset1[index1] ):
+ non_blank = 0
+ for num, ((_, line1), (_, line2)) in enumerate(
+ izip(lines1(index1), lines2(index2))):
+ if line1 != line2:
+ if non_blank > min_lines:
+ yield num, lineset1, index1, lineset2, index2
+ skip = max(skip, num)
+ break
+ if line1:
+ non_blank += 1
+ else:
+ # we may have reach the end
+ num += 1
+ if non_blank > min_lines:
+ yield num, lineset1, index1, lineset2, index2
+ skip = max(skip, num)
+ index1 += skip
+
+ def _iter_sims(self):
+ """iterate on similarities among all files, by making a cartesian
+ product
+ """
+ for idx, lineset in enumerate(self.linesets[:-1]):
+ for lineset2 in self.linesets[idx+1:]:
+ for sim in self._find_common(lineset, lineset2):
+ yield sim
+
+def stripped_lines(lines, ignore_comments, ignore_docstrings, ignore_imports):
+ """return lines with leading/trailing whitespace and any ignored code
+ features removed
+ """
+
+ strippedlines = []
+ docstring = None
+ for line in lines:
+ line = line.strip()
+ if ignore_docstrings:
+ if not docstring and \
+ (line.startswith('"""') or line.startswith("'''")):
+ docstring = line[:3]
+ line = line[3:]
+ if docstring:
+ if line.endswith(docstring):
+ docstring = None
+ line = ''
+ if ignore_imports:
+ if line.startswith("import ") or line.startswith("from "):
+ line = ''
+ if ignore_comments:
+ # XXX should use regex in checkers/format to avoid cutting
+ # at a "#" in a string
+ line = line.split('#', 1)[0].strip()
+ strippedlines.append(line)
+ return strippedlines
+
+class LineSet:
+ """Holds and indexes all the lines of a single source file"""
+ def __init__(self, name, lines, ignore_comments=False,
+ ignore_docstrings=False, ignore_imports=False):
+ self.name = name
+ self._real_lines = lines
+ self._stripped_lines = stripped_lines(lines, ignore_comments,
+ ignore_docstrings,
+ ignore_imports)
+ self._index = self._mk_index()
+
+ def __str__(self):
+ return '<Lineset for %s>' % self.name
+
+ def __len__(self):
+ return len(self._real_lines)
+
+ def __getitem__(self, index):
+ return self._stripped_lines[index]
+
+ def __lt__(self, other):
+ return self.name < other.name
+
+ def __hash__(self):
+ return id(self)
+
+ def enumerate_stripped(self, start_at=0):
+ """return an iterator on stripped lines, starting from a given index
+ if specified, else 0
+ """
+ idx = start_at
+ if start_at:
+ lines = self._stripped_lines[start_at:]
+ else:
+ lines = self._stripped_lines
+ for line in lines:
+ #if line:
+ yield idx, line
+ idx += 1
+
+ def find(self, stripped_line):
+ """return positions of the given stripped line in this set"""
+ return self._index.get(stripped_line, ())
+
+ def _mk_index(self):
+ """create the index for this set"""
+ index = {}
+ for line_no, line in enumerate(self._stripped_lines):
+ if line:
+ index.setdefault(line, []).append( line_no )
+ return index
+
+
+MSGS = {'R0801': ('Similar lines in %s files\n%s',
+ 'duplicate-code',
+ 'Indicates that a set of similar lines has been detected \
+ among multiple file. This usually means that the code should \
+ be refactored to avoid this duplication.')}
+
+def report_similarities(sect, stats, old_stats):
+ """make a layout with some stats about duplication"""
+ lines = ['', 'now', 'previous', 'difference']
+ lines += table_lines_from_stats(stats, old_stats,
+ ('nb_duplicated_lines',
+ 'percent_duplicated_lines'))
+ sect.append(Table(children=lines, cols=4, rheaders=1, cheaders=1))
+
+
+# wrapper to get a pylint checker from the similar class
+class SimilarChecker(BaseChecker, Similar):
+ """checks for similarities and duplicated code. This computation may be
+ memory / CPU intensive, so you should disable it if you experiment some
+ problems.
+ """
+
+ __implements__ = (IRawChecker,)
+ # configuration section name
+ name = 'similarities'
+ # messages
+ msgs = MSGS
+ # configuration options
+ # for available dict keys/values see the optik parser 'add_option' method
+ options = (('min-similarity-lines',
+ {'default' : 4, 'type' : "int", 'metavar' : '<int>',
+ 'help' : 'Minimum lines number of a similarity.'}),
+ ('ignore-comments',
+ {'default' : True, 'type' : 'yn', 'metavar' : '<y or n>',
+ 'help': 'Ignore comments when computing similarities.'}
+ ),
+ ('ignore-docstrings',
+ {'default' : True, 'type' : 'yn', 'metavar' : '<y or n>',
+ 'help': 'Ignore docstrings when computing similarities.'}
+ ),
+ ('ignore-imports',
+ {'default' : False, 'type' : 'yn', 'metavar' : '<y or n>',
+ 'help': 'Ignore imports when computing similarities.'}
+ ),
+ )
+ # reports
+ reports = ( ('RP0801', 'Duplication', report_similarities), )
+
+ def __init__(self, linter=None):
+ BaseChecker.__init__(self, linter)
+ Similar.__init__(self, min_lines=4,
+ ignore_comments=True, ignore_docstrings=True)
+ self.stats = None
+
+ def set_option(self, optname, value, action=None, optdict=None):
+ """method called to set an option (registered in the options list)
+
+ overridden to report options setting to Similar
+ """
+ BaseChecker.set_option(self, optname, value, action, optdict)
+ if optname == 'min-similarity-lines':
+ self.min_lines = self.config.min_similarity_lines
+ elif optname == 'ignore-comments':
+ self.ignore_comments = self.config.ignore_comments
+ elif optname == 'ignore-docstrings':
+ self.ignore_docstrings = self.config.ignore_docstrings
+ elif optname == 'ignore-imports':
+ self.ignore_imports = self.config.ignore_imports
+
+ def open(self):
+ """init the checkers: reset linesets and statistics information"""
+ self.linesets = []
+ self.stats = self.linter.add_stats(nb_duplicated_lines=0,
+ percent_duplicated_lines=0)
+
+ def process_module(self, node):
+ """process a module
+
+ the module's content is accessible via the stream object
+
+ stream must implement the readlines method
+ """
+ self.append_stream(self.linter.current_name, node.file_stream)
+
+ def close(self):
+ """compute and display similarities on closing (i.e. end of parsing)"""
+ total = sum([len(lineset) for lineset in self.linesets])
+ duplicated = 0
+ stats = self.stats
+ for num, couples in self._compute_sims():
+ msg = []
+ for lineset, idx in couples:
+ msg.append("==%s:%s" % (lineset.name, idx))
+ msg.sort()
+ # pylint: disable=W0631
+ for line in lineset._real_lines[idx:idx+num]:
+ msg.append(line.rstrip())
+ self.add_message('R0801', args=(len(couples), '\n'.join(msg)))
+ duplicated += num * (len(couples) - 1)
+ stats['nb_duplicated_lines'] = duplicated
+ stats['percent_duplicated_lines'] = total and duplicated * 100. / total
+
+
+def register(linter):
+ """required method to auto register this checker """
+ linter.register_checker(SimilarChecker(linter))
+
+def usage(status=0):
+ """display command line usage information"""
+ print "finds copy pasted blocks in a set of files"
+ print
+ print 'Usage: symilar [-d|--duplicates min_duplicated_lines] \
+[-i|--ignore-comments] [--ignore-docstrings] [--ignore-imports] file1...'
+ sys.exit(status)
+
+def Run(argv=None):
+ """standalone command line access point"""
+ if argv is None:
+ argv = sys.argv[1:]
+ from getopt import getopt
+ s_opts = 'hdi'
+ l_opts = ('help', 'duplicates=', 'ignore-comments', 'ignore-imports',
+ 'ignore-docstrings')
+ min_lines = 4
+ ignore_comments = False
+ ignore_docstrings = False
+ ignore_imports = False
+ opts, args = getopt(argv, s_opts, l_opts)
+ for opt, val in opts:
+ if opt in ('-d', '--duplicates'):
+ min_lines = int(val)
+ elif opt in ('-h', '--help'):
+ usage()
+ elif opt in ('-i', '--ignore-comments'):
+ ignore_comments = True
+ elif opt in ('--ignore-docstrings'):
+ ignore_docstrings = True
+ elif opt in ('--ignore-imports'):
+ ignore_imports = True
+ if not args:
+ usage(1)
+ sim = Similar(min_lines, ignore_comments, ignore_docstrings, ignore_imports)
+ for filename in args:
+ sim.append_stream(filename, open(filename))
+ sim.run()
+ sys.exit(0)
+
+if __name__ == '__main__':
+ Run()
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/checkers/string_format.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/checkers/string_format.py
@@ -0,0 +1,163 @@
+# Copyright (c) 2009-2010 Arista Networks, Inc. - James Lingard
+# Copyright (c) 2004-2010 LOGILAB S.A. (Paris, FRANCE).
+# http://www.logilab.fr/ -- mailto:contact@logilab.fr
+# This program is free software; you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation; either version 2 of the License, or (at your option) any later
+# version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details
+#
+# You should have received a copy of the GNU General Public License along with
+# this program; if not, write to the Free Software Foundation, Inc.,
+# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+"""Checker for string formatting operations.
+"""
+
+from ..logilab import astng
+from ..interfaces import IASTNGChecker
+from ..checkers import BaseChecker
+from ..checkers import utils
+
+
+MSGS = {
+ 'E1300': ("Unsupported format character %r (%#02x) at index %d",
+ "bad-format-character",
+ "Used when a unsupported format character is used in a format\
+ string."),
+ 'E1301': ("Format string ends in middle of conversion specifier",
+ "truncated-format-string",
+ "Used when a format string terminates before the end of a \
+ conversion specifier."),
+ 'E1302': ("Mixing named and unnamed conversion specifiers in format string",
+ "mixed-format-string",
+ "Used when a format string contains both named (e.g. '%(foo)d') \
+ and unnamed (e.g. '%d') conversion specifiers. This is also \
+ used when a named conversion specifier contains * for the \
+ minimum field width and/or precision."),
+ 'E1303': ("Expected mapping for format string, not %s",
+ "format-needs-mapping",
+ "Used when a format string that uses named conversion specifiers \
+ is used with an argument that is not a mapping."),
+ 'W1300': ("Format string dictionary key should be a string, not %s",
+ "bad-format-string-key",
+ "Used when a format string that uses named conversion specifiers \
+ is used with a dictionary whose keys are not all strings."),
+ 'W1301': ("Unused key %r in format string dictionary",
+ "unused-format-string-key",
+ "Used when a format string that uses named conversion specifiers \
+ is used with a dictionary that conWtains keys not required by the \
+ format string."),
+ 'E1304': ("Missing key %r in format string dictionary",
+ "missing-format-string-key",
+ "Used when a format string that uses named conversion specifiers \
+ is used with a dictionary that doesn't contain all the keys \
+ required by the format string."),
+ 'E1305': ("Too many arguments for format string",
+ "too-many-format-args",
+ "Used when a format string that uses unnamed conversion \
+ specifiers is given too few arguments."),
+ 'E1306': ("Not enough arguments for format string",
+ "too-few-format-args",
+ "Used when a format string that uses unnamed conversion \
+ specifiers is given too many arguments"),
+ }
+
+OTHER_NODES = (astng.Const, astng.List, astng.Backquote,
+ astng.Lambda, astng.Function,
+ astng.ListComp, astng.SetComp, astng.GenExpr)
+
+class StringFormatChecker(BaseChecker):
+ """Checks string formatting operations to ensure that the format string
+ is valid and the arguments match the format string.
+ """
+
+ __implements__ = (IASTNGChecker,)
+ name = 'string_format'
+ msgs = MSGS
+
+ def visit_binop(self, node):
+ if node.op != '%':
+ return
+ left = node.left
+ args = node.right
+
+ if not (isinstance(left, astng.Const)
+ and isinstance(left.value, basestring)):
+ return
+ format_string = left.value
+ try:
+ required_keys, required_num_args = \
+ utils.parse_format_string(format_string)
+ except utils.UnsupportedFormatCharacter, e:
+ c = format_string[e.index]
+ self.add_message('E1300', node=node, args=(c, ord(c), e.index))
+ return
+ except utils.IncompleteFormatString:
+ self.add_message('E1301', node=node)
+ return
+ if required_keys and required_num_args:
+ # The format string uses both named and unnamed format
+ # specifiers.
+ self.add_message('E1302', node=node)
+ elif required_keys:
+ # The format string uses only named format specifiers.
+ # Check that the RHS of the % operator is a mapping object
+ # that contains precisely the set of keys required by the
+ # format string.
+ if isinstance(args, astng.Dict):
+ keys = set()
+ unknown_keys = False
+ for k, _ in args.items:
+ if isinstance(k, astng.Const):
+ key = k.value
+ if isinstance(key, basestring):
+ keys.add(key)
+ else:
+ self.add_message('W1300', node=node, args=key)
+ else:
+ # One of the keys was something other than a
+ # constant. Since we can't tell what it is,
+ # supress checks for missing keys in the
+ # dictionary.
+ unknown_keys = True
+ if not unknown_keys:
+ for key in required_keys:
+ if key not in keys:
+ self.add_message('E1304', node=node, args=key)
+ for key in keys:
+ if key not in required_keys:
+ self.add_message('W1301', node=node, args=key)
+ elif isinstance(args, OTHER_NODES + (astng.Tuple,)):
+ type_name = type(args).__name__
+ self.add_message('E1303', node=node, args=type_name)
+ # else:
+ # The RHS of the format specifier is a name or
+ # expression. It may be a mapping object, so
+ # there's nothing we can check.
+ else:
+ # The format string uses only unnamed format specifiers.
+ # Check that the number of arguments passed to the RHS of
+ # the % operator matches the number required by the format
+ # string.
+ if isinstance(args, astng.Tuple):
+ num_args = len(args.elts)
+ elif isinstance(args, OTHER_NODES + (astng.Dict, astng.DictComp)):
+ num_args = 1
+ else:
+ # The RHS of the format specifier is a name or
+ # expression. It could be a tuple of unknown size, so
+ # there's nothing we can check.
+ num_args = None
+ if num_args is not None:
+ if num_args > required_num_args:
+ self.add_message('E1305', node=node)
+ elif num_args < required_num_args:
+ self.add_message('E1306', node=node)
+
+
+def register(linter):
+ """required method to auto register this checker """
+ linter.register_checker(StringFormatChecker(linter))
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/checkers/typecheck.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/checkers/typecheck.py
@@ -0,0 +1,387 @@
+# Copyright (c) 2006-2010 LOGILAB S.A. (Paris, FRANCE).
+# http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This program is free software; you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation; either version 2 of the License, or (at your option) any later
+# version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# this program; if not, write to the Free Software Foundation, Inc.,
+# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+"""try to find more bugs in the code using astng inference capabilities
+"""
+
+import re
+import shlex
+
+from ..logilab import astng
+from ..logilab.astng import InferenceError, NotFoundError, YES, Instance
+
+from ..interfaces import IASTNGChecker
+from ..checkers import BaseChecker
+from ..checkers.utils import safe_infer, is_super, check_messages
+
+MSGS = {
+ 'E1101': ('%s %r has no %r member',
+ 'no-member',
+ 'Used when a variable is accessed for an unexistent member.'),
+ 'E1102': ('%s is not callable',
+ 'not-callable',
+ 'Used when an object being called has been inferred to a non \
+ callable object'),
+ 'E1103': ('%s %r has no %r member (but some types could not be inferred)',
+ 'maybe-no-member',
+ 'Used when a variable is accessed for an unexistent member, but \
+ astng was not able to interpret all possible types of this \
+ variable.'),
+ 'E1111': ('Assigning to function call which doesn\'t return',
+ 'assignment-from-no-return',
+ 'Used when an assignment is done on a function call but the \
+ inferred function doesn\'t return anything.'),
+ 'W1111': ('Assigning to function call which only returns None',
+ 'assignment-from-none',
+ 'Used when an assignment is done on a function call but the \
+ inferred function returns nothing but None.'),
+
+ 'E1120': ('No value passed for parameter %s in function call',
+ 'no-value-for-parameter',
+ 'Used when a function call passes too few arguments.'),
+ 'E1121': ('Too many positional arguments for function call',
+ 'too-many-function-args',
+ 'Used when a function call passes too many positional \
+ arguments.'),
+ 'E1122': ('Duplicate keyword argument %r in function call',
+ 'duplicate-keyword-arg',
+ 'Used when a function call passes the same keyword argument \
+ multiple times.'),
+ 'E1123': ('Passing unexpected keyword argument %r in function call',
+ 'unexpected-keyword-arg',
+ 'Used when a function call passes a keyword argument that \
+ doesn\'t correspond to one of the function\'s parameter names.'),
+ 'E1124': ('Multiple values passed for parameter %r in function call',
+ 'redundant-keyword-arg',
+ 'Used when a function call would result in assigning multiple \
+ values to a function parameter, one value from a positional \
+ argument and one from a keyword argument.'),
+ }
+
+class TypeChecker(BaseChecker):
+ """try to find bugs in the code using type inference
+ """
+
+ __implements__ = (IASTNGChecker,)
+
+ # configuration section name
+ name = 'typecheck'
+ # messages
+ msgs = MSGS
+ priority = -1
+ # configuration options
+ options = (('ignore-mixin-members',
+ {'default' : True, 'type' : 'yn', 'metavar': '<y_or_n>',
+ 'help' : 'Tells whether missing members accessed in mixin \
+class should be ignored. A mixin class is detected if its name ends with \
+"mixin" (case insensitive).'}
+ ),
+
+ ('ignored-classes',
+ {'default' : ('SQLObject',),
+ 'type' : 'csv',
+ 'metavar' : '<members names>',
+ 'help' : 'List of classes names for which member attributes \
+should not be checked (useful for classes with attributes dynamically set).'}
+ ),
+
+ ('zope',
+ {'default' : False, 'type' : 'yn', 'metavar': '<y_or_n>',
+ 'help' : 'When zope mode is activated, add a predefined set \
+of Zope acquired attributes to generated-members.'}
+ ),
+ ('generated-members',
+ {'default' : (
+ 'REQUEST', 'acl_users', 'aq_parent'),
+ 'type' : 'string',
+ 'metavar' : '<members names>',
+ 'help' : 'List of members which are set dynamically and \
+missed by pylint inference system, and so shouldn\'t trigger E0201 when \
+accessed. Python regular expressions are accepted.'}
+ ),
+ )
+
+ def open(self):
+ # do this in open since config not fully initialized in __init__
+ self.generated_members = list(self.config.generated_members)
+ if self.config.zope:
+ self.generated_members.extend(('REQUEST', 'acl_users', 'aq_parent'))
+
+ def visit_assattr(self, node):
+ if isinstance(node.ass_type(), astng.AugAssign):
+ self.visit_getattr(node)
+
+ def visit_delattr(self, node):
+ self.visit_getattr(node)
+
+ @check_messages('E1101', 'E1103')
+ def visit_getattr(self, node):
+ """check that the accessed attribute exists
+
+ to avoid to much false positives for now, we'll consider the code as
+ correct if a single of the inferred nodes has the accessed attribute.
+
+ function/method, super call and metaclasses are ignored
+ """
+ # generated_members may containt regular expressions
+ # (surrounded by quote `"` and followed by a comma `,`)
+ # REQUEST,aq_parent,"[a-zA-Z]+_set{1,2}"' =>
+ # ('REQUEST', 'aq_parent', '[a-zA-Z]+_set{1,2}')
+ if isinstance(self.config.generated_members, str):
+ gen = shlex.shlex(self.config.generated_members)
+ gen.whitespace += ','
+ gen.wordchars += '[]-+'
+ self.config.generated_members = tuple(tok.strip('"') for tok in gen)
+ for pattern in self.config.generated_members:
+ # attribute is marked as generated, stop here
+ if re.match(pattern, node.attrname):
+ return
+ try:
+ infered = list(node.expr.infer())
+ except InferenceError:
+ return
+ # list of (node, nodename) which are missing the attribute
+ missingattr = set()
+ ignoremim = self.config.ignore_mixin_members
+ inference_failure = False
+ for owner in infered:
+ # skip yes object
+ if owner is YES:
+ inference_failure = True
+ continue
+ # skip None anyway
+ if isinstance(owner, astng.Const) and owner.value is None:
+ continue
+ # XXX "super" / metaclass call
+ if is_super(owner) or getattr(owner, 'type', None) == 'metaclass':
+ continue
+ name = getattr(owner, 'name', 'None')
+ if name in self.config.ignored_classes:
+ continue
+ if ignoremim and name[-5:].lower() == 'mixin':
+ continue
+ try:
+ if not [n for n in owner.getattr(node.attrname)
+ if not isinstance(n.statement(), astng.AugAssign)]:
+ missingattr.add((owner, name))
+ continue
+ except AttributeError:
+ # XXX method / function
+ continue
+ except NotFoundError:
+ if isinstance(owner, astng.Function) and owner.decorators:
+ continue
+ if isinstance(owner, Instance) and owner.has_dynamic_getattr():
+ continue
+ # explicit skipping of optparse'Values class
+ if owner.name == 'Values' and owner.root().name == 'optparse':
+ continue
+ missingattr.add((owner, name))
+ continue
+ # stop on the first found
+ break
+ else:
+ # we have not found any node with the attributes, display the
+ # message for infered nodes
+ done = set()
+ for owner, name in missingattr:
+ if isinstance(owner, Instance):
+ actual = owner._proxied
+ else:
+ actual = owner
+ if actual in done:
+ continue
+ done.add(actual)
+ if inference_failure:
+ msgid = 'E1103'
+ else:
+ msgid = 'E1101'
+ self.add_message(msgid, node=node,
+ args=(owner.display_type(), name,
+ node.attrname))
+
+
+ def visit_assign(self, node):
+ """check that if assigning to a function call, the function is
+ possibly returning something valuable
+ """
+ if not isinstance(node.value, astng.CallFunc):
+ return
+ function_node = safe_infer(node.value.func)
+ # skip class, generator and incomplete function definition
+ if not (isinstance(function_node, astng.Function) and
+ function_node.root().fully_defined()):
+ return
+ if function_node.is_generator() \
+ or function_node.is_abstract(pass_is_abstract=False):
+ return
+ returns = list(function_node.nodes_of_class(astng.Return,
+ skip_klass=astng.Function))
+ if len(returns) == 0:
+ self.add_message('E1111', node=node)
+ else:
+ for rnode in returns:
+ if not (isinstance(rnode.value, astng.Const)
+ and rnode.value.value is None):
+ break
+ else:
+ self.add_message('W1111', node=node)
+
+ def visit_callfunc(self, node):
+ """check that called functions/methods are inferred to callable objects,
+ and that the arguments passed to the function match the parameters in
+ the inferred function's definition
+ """
+
+ # Build the set of keyword arguments, checking for duplicate keywords,
+ # and count the positional arguments.
+ keyword_args = set()
+ num_positional_args = 0
+ for arg in node.args:
+ if isinstance(arg, astng.Keyword):
+ keyword = arg.arg
+ if keyword in keyword_args:
+ self.add_message('E1122', node=node, args=keyword)
+ keyword_args.add(keyword)
+ else:
+ num_positional_args += 1
+
+ called = safe_infer(node.func)
+ # only function, generator and object defining __call__ are allowed
+ if called is not None and not called.callable():
+ self.add_message('E1102', node=node, args=node.func.as_string())
+
+ # Note that BoundMethod is a subclass of UnboundMethod (huh?), so must
+ # come first in this 'if..else'.
+ if isinstance(called, astng.BoundMethod):
+ # Bound methods have an extra implicit 'self' argument.
+ num_positional_args += 1
+ elif isinstance(called, astng.UnboundMethod):
+ if called.decorators is not None:
+ for d in called.decorators.nodes:
+ if isinstance(d, astng.Name) and (d.name == 'classmethod'):
+ # Class methods have an extra implicit 'cls' argument.
+ num_positional_args += 1
+ break
+ elif (isinstance(called, astng.Function) or
+ isinstance(called, astng.Lambda)):
+ pass
+ else:
+ return
+
+ if called.args.args is None:
+ # Built-in functions have no argument information.
+ return
+
+ if len( called.argnames() ) != len( set( called.argnames() ) ):
+ # Duplicate parameter name (see E9801). We can't really make sense
+ # of the function call in this case, so just return.
+ return
+
+ # Analyze the list of formal parameters.
+ num_mandatory_parameters = len(called.args.args) - len(called.args.defaults)
+ parameters = []
+ parameter_name_to_index = {}
+ for i, arg in enumerate(called.args.args):
+ if isinstance(arg, astng.Tuple):
+ name = None
+ # Don't store any parameter names within the tuple, since those
+ # are not assignable from keyword arguments.
+ else:
+ if isinstance(arg, astng.Keyword):
+ name = arg.arg
+ else:
+ assert isinstance(arg, astng.AssName)
+ # This occurs with:
+ # def f( (a), (b) ): pass
+ name = arg.name
+ parameter_name_to_index[name] = i
+ if i >= num_mandatory_parameters:
+ defval = called.args.defaults[i - num_mandatory_parameters]
+ else:
+ defval = None
+ parameters.append([(name, defval), False])
+
+ # Match the supplied arguments against the function parameters.
+
+ # 1. Match the positional arguments.
+ for i in range(num_positional_args):
+ if i < len(parameters):
+ parameters[i][1] = True
+ elif called.args.vararg is not None:
+ # The remaining positional arguments get assigned to the *args
+ # parameter.
+ break
+ else:
+ # Too many positional arguments.
+ self.add_message('E1121', node=node)
+ break
+
+ # 2. Match the keyword arguments.
+ for keyword in keyword_args:
+ if keyword in parameter_name_to_index:
+ i = parameter_name_to_index[keyword]
+ if parameters[i][1]:
+ # Duplicate definition of function parameter.
+ self.add_message('E1124', node=node, args=keyword)
+ else:
+ parameters[i][1] = True
+ elif called.args.kwarg is not None:
+ # The keyword argument gets assigned to the **kwargs parameter.
+ pass
+ else:
+ # Unexpected keyword argument.
+ self.add_message('E1123', node=node, args=keyword)
+
+ # 3. Match the *args, if any. Note that Python actually processes
+ # *args _before_ any keyword arguments, but we wait until after
+ # looking at the keyword arguments so as to make a more conservative
+ # guess at how many values are in the *args sequence.
+ if node.starargs is not None:
+ for i in range(num_positional_args, len(parameters)):
+ [(name, defval), assigned] = parameters[i]
+ # Assume that *args provides just enough values for all
+ # non-default parameters after the last parameter assigned by
+ # the positional arguments but before the first parameter
+ # assigned by the keyword arguments. This is the best we can
+ # get without generating any false positives.
+ if (defval is not None) or assigned:
+ break
+ parameters[i][1] = True
+
+ # 4. Match the **kwargs, if any.
+ if node.kwargs is not None:
+ for i, [(name, defval), assigned] in enumerate(parameters):
+ # Assume that *kwargs provides values for all remaining
+ # unassigned named parameters.
+ if name is not None:
+ parameters[i][1] = True
+ else:
+ # **kwargs can't assign to tuples.
+ pass
+
+ # Check that any parameters without a default have been assigned
+ # values.
+ for [(name, defval), assigned] in parameters:
+ if (defval is None) and not assigned:
+ if name is None:
+ display_name = '<tuple>'
+ else:
+ display_name = repr(name)
+ self.add_message('E1120', node=node, args=display_name)
+
+def register(linter):
+ """required method to auto register this checker """
+ linter.register_checker(TypeChecker(linter))
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/checkers/utils.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/checkers/utils.py
@@ -0,0 +1,373 @@
+# pylint: disable=W0611
+#
+# Copyright (c) 2003-2013 LOGILAB S.A. (Paris, FRANCE).
+# http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This program is free software; you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation; either version 2 of the License, or (at your option) any later
+# version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# this program; if not, write to the Free Software Foundation, Inc.,
+# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+"""some functions that may be useful for various checkers
+"""
+
+import re
+import string
+from ..logilab import astng
+from ..logilab.astng import scoped_nodes
+from ..logilab.common.compat import builtins
+BUILTINS_NAME = builtins.__name__
+
+COMP_NODE_TYPES = astng.ListComp, astng.SetComp, astng.DictComp, astng.GenExpr
+
+
+def is_inside_except(node):
+ """Returns true if node is inside the name of an except handler."""
+ current = node
+ while current and not isinstance(current.parent, astng.ExceptHandler):
+ current = current.parent
+
+ return current and current is current.parent.name
+
+
+def get_all_elements(node):
+ """Recursively returns all atoms in nested lists and tuples."""
+ if isinstance(node, (astng.Tuple, astng.List)):
+ for child in node.elts:
+ for e in get_all_elements(child):
+ yield e
+ else:
+ yield node
+
+
+def clobber_in_except(node):
+ """Checks if an assignment node in an except handler clobbers an existing
+ variable.
+
+ Returns (True, args for W0623) if assignment clobbers an existing variable,
+ (False, None) otherwise.
+ """
+ if isinstance(node, astng.AssAttr):
+ return (True, (node.attrname, 'object %r' % (node.expr.name,)))
+ elif isinstance(node, astng.AssName):
+ name = node.name
+ if is_builtin(name):
+ return (True, (name, 'builtins'))
+ else:
+ scope, stmts = node.lookup(name)
+ if (stmts and
+ not isinstance(stmts[0].ass_type(),
+ (astng.Assign, astng.AugAssign, astng.ExceptHandler))):
+ return (True, (name, 'outer scope (line %s)' % (stmts[0].fromlineno,)))
+ return (False, None)
+
+
+def safe_infer(node):
+ """return the inferred value for the given node.
+ Return None if inference failed or if there is some ambiguity (more than
+ one node has been inferred)
+ """
+ try:
+ inferit = node.infer()
+ value = inferit.next()
+ except astng.InferenceError:
+ return
+ try:
+ inferit.next()
+ return # None if there is ambiguity on the inferred node
+ except astng.InferenceError:
+ return # there is some kind of ambiguity
+ except StopIteration:
+ return value
+
+def is_super(node):
+ """return True if the node is referencing the "super" builtin function
+ """
+ if getattr(node, 'name', None) == 'super' and \
+ node.root().name == BUILTINS_NAME:
+ return True
+ return False
+
+def is_error(node):
+ """return true if the function does nothing but raising an exception"""
+ for child_node in node.get_children():
+ if isinstance(child_node, astng.Raise):
+ return True
+ return False
+
+def is_raising(body):
+ """return true if the given statement node raise an exception"""
+ for node in body:
+ if isinstance(node, astng.Raise):
+ return True
+ return False
+
+def is_empty(body):
+ """return true if the given node does nothing but 'pass'"""
+ return len(body) == 1 and isinstance(body[0], astng.Pass)
+
+builtins = builtins.__dict__.copy()
+SPECIAL_BUILTINS = ('__builtins__',) # '__path__', '__file__')
+
+def is_builtin(name): # was is_native_builtin
+ """return true if <name> could be considered as a builtin defined by python
+ """
+ if name in builtins:
+ return True
+ if name in SPECIAL_BUILTINS:
+ return True
+ return False
+
+def is_defined_before(var_node):
+ """return True if the variable node is defined by a parent node (list,
+ set, dict, or generator comprehension, lambda) or in a previous sibling
+ node on the same line (statement_defining ; statement_using)
+ """
+ varname = var_node.name
+ _node = var_node.parent
+ while _node:
+ if isinstance(_node, COMP_NODE_TYPES):
+ for ass_node in _node.nodes_of_class(astng.AssName):
+ if ass_node.name == varname:
+ return True
+ elif isinstance(_node, astng.For):
+ for ass_node in _node.target.nodes_of_class(astng.AssName):
+ if ass_node.name == varname:
+ return True
+ elif isinstance(_node, astng.With):
+ if _node.vars is None:
+ # quickfix : case in which 'with' is used without 'as'
+ return False
+ if _node.vars.name == varname:
+ return True
+ elif isinstance(_node, (astng.Lambda, astng.Function)):
+ if _node.args.is_argument(varname):
+ return True
+ if getattr(_node, 'name', None) == varname:
+ return True
+ break
+ _node = _node.parent
+ # possibly multiple statements on the same line using semi colon separator
+ stmt = var_node.statement()
+ _node = stmt.previous_sibling()
+ lineno = stmt.fromlineno
+ while _node and _node.fromlineno == lineno:
+ for ass_node in _node.nodes_of_class(astng.AssName):
+ if ass_node.name == varname:
+ return True
+ for imp_node in _node.nodes_of_class( (astng.From, astng.Import)):
+ if varname in [name[1] or name[0] for name in imp_node.names]:
+ return True
+ _node = _node.previous_sibling()
+ return False
+
+def is_func_default(node):
+ """return true if the given Name node is used in function default argument's
+ value
+ """
+ parent = node.scope()
+ if isinstance(parent, astng.Function):
+ for default_node in parent.args.defaults:
+ for default_name_node in default_node.nodes_of_class(astng.Name):
+ if default_name_node is node:
+ return True
+ return False
+
+def is_func_decorator(node):
+ """return true if the name is used in function decorator"""
+ parent = node.parent
+ while parent is not None:
+ if isinstance(parent, astng.Decorators):
+ return True
+ if (parent.is_statement or
+ isinstance(parent, astng.Lambda) or
+ isinstance(parent, (scoped_nodes.ComprehensionScope,
+ scoped_nodes.ListComp))):
+ break
+ parent = parent.parent
+ return False
+
+def is_ancestor_name(frame, node):
+ """return True if `frame` is a astng.Class node with `node` in the
+ subtree of its bases attribute
+ """
+ try:
+ bases = frame.bases
+ except AttributeError:
+ return False
+ for base in bases:
+ if node in base.nodes_of_class(astng.Name):
+ return True
+ return False
+
+def assign_parent(node):
+ """return the higher parent which is not an AssName, Tuple or List node
+ """
+ while node and isinstance(node, (astng.AssName,
+ astng.Tuple,
+ astng.List)):
+ node = node.parent
+ return node
+
+def overrides_an_abstract_method(class_node, name):
+ """return True if pnode is a parent of node"""
+ for ancestor in class_node.ancestors():
+ if name in ancestor and isinstance(ancestor[name], astng.Function) and \
+ ancestor[name].is_abstract(pass_is_abstract=False):
+ return True
+ return False
+
+def overrides_a_method(class_node, name):
+ """return True if <name> is a method overridden from an ancestor"""
+ for ancestor in class_node.ancestors():
+ if name in ancestor and isinstance(ancestor[name], astng.Function):
+ return True
+ return False
+
+PYMETHODS = set(('__new__', '__init__', '__del__', '__hash__',
+ '__str__', '__repr__',
+ '__len__', '__iter__',
+ '__delete__', '__get__', '__set__',
+ '__getitem__', '__setitem__', '__delitem__', '__contains__',
+ '__getattribute__', '__getattr__', '__setattr__', '__delattr__',
+ '__call__',
+ '__enter__', '__exit__',
+ '__cmp__', '__ge__', '__gt__', '__le__', '__lt__', '__eq__',
+ '__nonzero__', '__neg__', '__invert__',
+ '__mul__', '__imul__', '__rmul__',
+ '__div__', '__idiv__', '__rdiv__',
+ '__add__', '__iadd__', '__radd__',
+ '__sub__', '__isub__', '__rsub__',
+ '__pow__', '__ipow__', '__rpow__',
+ '__mod__', '__imod__', '__rmod__',
+ '__and__', '__iand__', '__rand__',
+ '__or__', '__ior__', '__ror__',
+ '__xor__', '__ixor__', '__rxor__',
+ # XXX To be continued
+ ))
+
+def check_messages(*messages):
+ """decorator to store messages that are handled by a checker method"""
+
+ def store_messages(func):
+ func.checks_msgs = messages
+ return func
+ return store_messages
+
+class IncompleteFormatString(Exception):
+ """A format string ended in the middle of a format specifier."""
+ pass
+
+class UnsupportedFormatCharacter(Exception):
+ """A format character in a format string is not one of the supported
+ format characters."""
+ def __init__(self, index):
+ Exception.__init__(self, index)
+ self.index = index
+
+def parse_format_string(format_string):
+ """Parses a format string, returning a tuple of (keys, num_args), where keys
+ is the set of mapping keys in the format string, and num_args is the number
+ of arguments required by the format string. Raises
+ IncompleteFormatString or UnsupportedFormatCharacter if a
+ parse error occurs."""
+ keys = set()
+ num_args = 0
+ def next_char(i):
+ i += 1
+ if i == len(format_string):
+ raise IncompleteFormatString
+ return (i, format_string[i])
+ i = 0
+ while i < len(format_string):
+ c = format_string[i]
+ if c == '%':
+ i, c = next_char(i)
+ # Parse the mapping key (optional).
+ key = None
+ if c == '(':
+ depth = 1
+ i, c = next_char(i)
+ key_start = i
+ while depth != 0:
+ if c == '(':
+ depth += 1
+ elif c == ')':
+ depth -= 1
+ i, c = next_char(i)
+ key_end = i - 1
+ key = format_string[key_start:key_end]
+
+ # Parse the conversion flags (optional).
+ while c in '#0- +':
+ i, c = next_char(i)
+ # Parse the minimum field width (optional).
+ if c == '*':
+ num_args += 1
+ i, c = next_char(i)
+ else:
+ while c in string.digits:
+ i, c = next_char(i)
+ # Parse the precision (optional).
+ if c == '.':
+ i, c = next_char(i)
+ if c == '*':
+ num_args += 1
+ i, c = next_char(i)
+ else:
+ while c in string.digits:
+ i, c = next_char(i)
+ # Parse the length modifier (optional).
+ if c in 'hlL':
+ i, c = next_char(i)
+ # Parse the conversion type (mandatory).
+ if c not in 'diouxXeEfFgGcrs%':
+ raise UnsupportedFormatCharacter(i)
+ if key:
+ keys.add(key)
+ elif c != '%':
+ num_args += 1
+ i += 1
+ return keys, num_args
+
+def is_attr_protected(attrname):
+ """return True if attribute name is protected (start with _ and some other
+ details), False otherwise.
+ """
+ return attrname[0] == '_' and not attrname == '_' and not (
+ attrname.startswith('__') and attrname.endswith('__'))
+
+def node_frame_class(node):
+ """return klass node for a method node (or a staticmethod or a
+ classmethod), return null otherwise
+ """
+ klass = node.frame()
+
+ while klass is not None and not isinstance(klass, astng.Class):
+ if klass.parent is None:
+ klass = None
+ else:
+ klass = klass.parent.frame()
+
+ return klass
+
+def is_super_call(expr):
+ """return True if expression node is a function call and if function name
+ is super. Check before that you're in a method.
+ """
+ return (isinstance(expr, astng.CallFunc) and
+ isinstance(expr.func, astng.Name) and
+ expr.func.name == 'super')
+def is_attr_private(attrname):
+ """Check that attribute name is private (at least two leading underscores,
+ at most one trailing underscore)
+ """
+ regex = re.compile('^_{2,}.*[^_]+_?$')
+ return regex.match(attrname)
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/checkers/variables.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/checkers/variables.py
@@ -0,0 +1,588 @@
+# Copyright (c) 2003-2013 LOGILAB S.A. (Paris, FRANCE).
+# http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This program is free software; you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation; either version 2 of the License, or (at your option) any later
+# version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# this program; if not, write to the Free Software Foundation, Inc.,
+# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+"""variables checkers for Python code
+"""
+
+import sys
+from copy import copy
+
+from ..logilab import astng
+from ..logilab.astng import are_exclusive, builtin_lookup, ASTNGBuildingException
+
+from ..interfaces import IASTNGChecker
+from ..checkers import BaseChecker
+from ..checkers.utils import (PYMETHODS, is_ancestor_name, is_builtin,
+ is_defined_before, is_error, is_func_default, is_func_decorator,
+ assign_parent, check_messages, is_inside_except, clobber_in_except,
+ get_all_elements)
+
+
+def in_for_else_branch(parent, stmt):
+ """Returns True if stmt in inside the else branch for a parent For stmt."""
+ return (isinstance(parent, astng.For) and
+ any(else_stmt.parent_of(stmt) for else_stmt in parent.orelse))
+
+def overridden_method(klass, name):
+ """get overridden method if any"""
+ try:
+ parent = klass.local_attr_ancestors(name).next()
+ except (StopIteration, KeyError):
+ return None
+ try:
+ meth_node = parent[name]
+ except KeyError:
+ # We have found an ancestor defining <name> but it's not in the local
+ # dictionary. This may happen with astng built from living objects.
+ return None
+ if isinstance(meth_node, astng.Function):
+ return meth_node
+ return None
+
+
+MSGS = {
+ 'E0601': ('Using variable %r before assignment',
+ 'used-before-assignment',
+ 'Used when a local variable is accessed before it\'s \
+ assignment.'),
+ 'E0602': ('Undefined variable %r',
+ 'undefined-variable',
+ 'Used when an undefined variable is accessed.'),
+ 'E0603': ('Undefined variable name %r in __all__',
+ 'undefined-all-variable',
+ 'Used when an undefined variable name is referenced in __all__.'),
+ 'E0604': ('Invalid object %r in __all__, must contain only strings',
+ 'invalid-all-object',
+ 'Used when an invalid (non-string) object occurs in __all__.'),
+ 'E0611': ('No name %r in module %r',
+ 'no-name-in-module',
+ 'Used when a name cannot be found in a module.'),
+
+ 'W0601': ('Global variable %r undefined at the module level',
+ 'global-variable-undefined',
+ 'Used when a variable is defined through the "global" statement \
+ but the variable is not defined in the module scope.'),
+ 'W0602': ('Using global for %r but no assignment is done',
+ 'global-variable-not-assigned',
+ 'Used when a variable is defined through the "global" statement \
+ but no assignment to this variable is done.'),
+ 'W0603': ('Using the global statement', # W0121
+ 'global-statement',
+ 'Used when you use the "global" statement to update a global \
+ variable. PyLint just try to discourage this \
+ usage. That doesn\'t mean you can not use it !'),
+ 'W0604': ('Using the global statement at the module level', # W0103
+ 'global-at-module-level',
+ 'Used when you use the "global" statement at the module level \
+ since it has no effect'),
+ 'W0611': ('Unused import %s',
+ 'unused-import',
+ 'Used when an imported module or variable is not used.'),
+ 'W0612': ('Unused variable %r',
+ 'unused-variable',
+ 'Used when a variable is defined but not used.'),
+ 'W0613': ('Unused argument %r',
+ 'unused-argument',
+ 'Used when a function or method argument is not used.'),
+ 'W0614': ('Unused import %s from wildcard import',
+ 'unused-wildcard-import',
+ 'Used when an imported module or variable is not used from a \
+ \'from X import *\' style import.'),
+
+ 'W0621': ('Redefining name %r from outer scope (line %s)',
+ 'redefined-outer-name',
+ 'Used when a variable\'s name hide a name defined in the outer \
+ scope.'),
+ 'W0622': ('Redefining built-in %r',
+ 'redefined-builtin',
+ 'Used when a variable or function override a built-in.'),
+ 'W0623': ('Redefining name %r from %s in exception handler',
+ 'redefine-in-handler',
+ 'Used when an exception handler assigns the exception \
+ to an existing name'),
+
+ 'W0631': ('Using possibly undefined loop variable %r',
+ 'undefined-loop-variable',
+ 'Used when an loop variable (i.e. defined by a for loop or \
+ a list comprehension or a generator expression) is used outside \
+ the loop.'),
+ }
+
+class VariablesChecker(BaseChecker):
+ """checks for
+ * unused variables / imports
+ * undefined variables
+ * redefinition of variable from builtins or from an outer scope
+ * use of variable before assignment
+ * __all__ consistency
+ """
+
+ __implements__ = IASTNGChecker
+
+ name = 'variables'
+ msgs = MSGS
+ priority = -1
+ options = (
+ ("init-import",
+ {'default': 0, 'type' : 'yn', 'metavar' : '<y_or_n>',
+ 'help' : 'Tells whether we should check for unused import in \
+__init__ files.'}),
+ ("dummy-variables-rgx",
+ {'default': ('_|dummy'),
+ 'type' :'regexp', 'metavar' : '<regexp>',
+ 'help' : 'A regular expression matching the beginning of \
+ the name of dummy variables (i.e. not used).'}),
+ ("additional-builtins",
+ {'default': (), 'type' : 'csv',
+ 'metavar' : '<comma separated list>',
+ 'help' : 'List of additional names supposed to be defined in \
+builtins. Remember that you should avoid to define new builtins when possible.'
+ }),
+ )
+ def __init__(self, linter=None):
+ BaseChecker.__init__(self, linter)
+ self._to_consume = None
+ self._checking_mod_attr = None
+
+ def visit_module(self, node):
+ """visit module : update consumption analysis variable
+ checks globals doesn't overrides builtins
+ """
+ self._to_consume = [(copy(node.locals), {}, 'module')]
+ for name, stmts in node.locals.iteritems():
+ if is_builtin(name) and not is_inside_except(stmts[0]):
+ # do not print Redefining builtin for additional builtins
+ self.add_message('W0622', args=name, node=stmts[0])
+
+ @check_messages('W0611', 'W0614')
+ def leave_module(self, node):
+ """leave module: check globals
+ """
+ assert len(self._to_consume) == 1
+ not_consumed = self._to_consume.pop()[0]
+ # attempt to check for __all__ if defined
+ if '__all__' in node.locals:
+ assigned = node.igetattr('__all__').next()
+ if assigned is not astng.YES:
+ for elt in getattr(assigned, 'elts', ()):
+ try:
+ elt_name = elt.infer().next()
+ except astng.InferenceError:
+ continue
+
+ if not isinstance(elt_name, astng.Const) or not isinstance(elt_name.value, basestring):
+ self.add_message('E0604', args=elt.as_string(), node=elt)
+ continue
+ elt_name = elt.value
+ # If elt is in not_consumed, remove it from not_consumed
+ if elt_name in not_consumed:
+ del not_consumed[elt_name]
+ continue
+ if elt_name not in node.locals:
+ self.add_message('E0603', args=elt_name, node=elt)
+ # don't check unused imports in __init__ files
+ if not self.config.init_import and node.package:
+ return
+ for name, stmts in not_consumed.iteritems():
+ stmt = stmts[0]
+ if isinstance(stmt, astng.Import):
+ self.add_message('W0611', args=name, node=stmt)
+ elif isinstance(stmt, astng.From) and stmt.modname != '__future__':
+ if stmt.names[0][0] == '*':
+ self.add_message('W0614', args=name, node=stmt)
+ else:
+ self.add_message('W0611', args=name, node=stmt)
+ del self._to_consume
+
+ def visit_class(self, node):
+ """visit class: update consumption analysis variable
+ """
+ self._to_consume.append((copy(node.locals), {}, 'class'))
+
+ def leave_class(self, _):
+ """leave class: update consumption analysis variable
+ """
+ # do not check for not used locals here (no sense)
+ self._to_consume.pop()
+
+ def visit_lambda(self, node):
+ """visit lambda: update consumption analysis variable
+ """
+ self._to_consume.append((copy(node.locals), {}, 'lambda'))
+
+ def leave_lambda(self, _):
+ """leave lambda: update consumption analysis variable
+ """
+ # do not check for not used locals here
+ self._to_consume.pop()
+
+ def visit_genexpr(self, node):
+ """visit genexpr: update consumption analysis variable
+ """
+ self._to_consume.append((copy(node.locals), {}, 'comprehension'))
+
+ def leave_genexpr(self, _):
+ """leave genexpr: update consumption analysis variable
+ """
+ # do not check for not used locals here
+ self._to_consume.pop()
+
+ def visit_dictcomp(self, node):
+ """visit dictcomp: update consumption analysis variable
+ """
+ self._to_consume.append((copy(node.locals), {}, 'comprehension'))
+
+ def leave_dictcomp(self, _):
+ """leave dictcomp: update consumption analysis variable
+ """
+ # do not check for not used locals here
+ self._to_consume.pop()
+
+ def visit_setcomp(self, node):
+ """visit setcomp: update consumption analysis variable
+ """
+ self._to_consume.append((copy(node.locals), {}, 'comprehension'))
+
+ def leave_setcomp(self, _):
+ """leave setcomp: update consumption analysis variable
+ """
+ # do not check for not used locals here
+ self._to_consume.pop()
+
+ def visit_function(self, node):
+ """visit function: update consumption analysis variable and check locals
+ """
+ self._to_consume.append((copy(node.locals), {}, 'function'))
+ if not set(('W0621', 'W0622')) & self.active_msgs:
+ return
+ globs = node.root().globals
+ for name, stmt in node.items():
+ if is_inside_except(stmt):
+ continue
+ if name in globs and not isinstance(stmt, astng.Global):
+ line = globs[name][0].fromlineno
+ self.add_message('W0621', args=(name, line), node=stmt)
+ elif is_builtin(name):
+ # do not print Redefining builtin for additional builtins
+ self.add_message('W0622', args=name, node=stmt)
+
+ def leave_function(self, node):
+ """leave function: check function's locals are consumed"""
+ not_consumed = self._to_consume.pop()[0]
+ if not set(('W0612', 'W0613')) & self.active_msgs:
+ return
+ # don't check arguments of function which are only raising an exception
+ if is_error(node):
+ return
+ # don't check arguments of abstract methods or within an interface
+ is_method = node.is_method()
+ klass = node.parent.frame()
+ if is_method and (klass.type == 'interface' or node.is_abstract()):
+ return
+ authorized_rgx = self.config.dummy_variables_rgx
+ called_overridden = False
+ argnames = node.argnames()
+ for name, stmts in not_consumed.iteritems():
+ # ignore some special names specified by user configuration
+ if authorized_rgx.match(name):
+ continue
+ # ignore names imported by the global statement
+ # FIXME: should only ignore them if it's assigned latter
+ stmt = stmts[0]
+ if isinstance(stmt, astng.Global):
+ continue
+ # care about functions with unknown argument (builtins)
+ if name in argnames:
+ if is_method:
+ # don't warn for the first argument of a (non static) method
+ if node.type != 'staticmethod' and name == argnames[0]:
+ continue
+ # don't warn for argument of an overridden method
+ if not called_overridden:
+ overridden = overridden_method(klass, node.name)
+ called_overridden = True
+ if overridden is not None and name in overridden.argnames():
+ continue
+ if node.name in PYMETHODS and node.name not in ('__init__', '__new__'):
+ continue
+ # don't check callback arguments XXX should be configurable
+ if node.name.startswith('cb_') or node.name.endswith('_cb'):
+ continue
+ self.add_message('W0613', args=name, node=stmt)
+ else:
+ self.add_message('W0612', args=name, node=stmt)
+
+ @check_messages('W0601', 'W0602', 'W0603', 'W0604', 'W0622')
+ def visit_global(self, node):
+ """check names imported exists in the global scope"""
+ frame = node.frame()
+ if isinstance(frame, astng.Module):
+ self.add_message('W0604', node=node)
+ return
+ module = frame.root()
+ default_message = True
+ for name in node.names:
+ try:
+ assign_nodes = module.getattr(name)
+ except astng.NotFoundError:
+ # unassigned global, skip
+ assign_nodes = []
+ for anode in assign_nodes:
+ if anode.parent is None:
+ # node returned for builtin attribute such as __file__,
+ # __doc__, etc...
+ continue
+ if anode.frame() is frame:
+ # same scope level assignment
+ break
+ else:
+ # global but no assignment
+ self.add_message('W0602', args=name, node=node)
+ default_message = False
+ if not assign_nodes:
+ continue
+ for anode in assign_nodes:
+ if anode.parent is None:
+ self.add_message('W0622', args=name, node=node)
+ break
+ if anode.frame() is module:
+ # module level assignment
+ break
+ else:
+ # global undefined at the module scope
+ self.add_message('W0601', args=name, node=node)
+ default_message = False
+ if default_message:
+ self.add_message('W0603', node=node)
+
+ def _loopvar_name(self, node, name):
+ # filter variables according to node's scope
+ # XXX used to filter parents but don't remember why, and removing this
+ # fixes a W0631 false positive reported by Paul Hachmann on 2008/12 on
+ # python-projects (added to func_use_for_or_listcomp_var test)
+ #astmts = [stmt for stmt in node.lookup(name)[1]
+ # if hasattr(stmt, 'ass_type')] and
+ # not stmt.statement().parent_of(node)]
+ if 'W0631' not in self.active_msgs:
+ return
+ astmts = [stmt for stmt in node.lookup(name)[1]
+ if hasattr(stmt, 'ass_type')]
+ # filter variables according their respective scope test is_statement
+ # and parent to avoid #74747. This is not a total fix, which would
+ # introduce a mechanism similar to special attribute lookup in
+ # modules. Also, in order to get correct inference in this case, the
+ # scope lookup rules would need to be changed to return the initial
+ # assignment (which does not exist in code per se) as well as any later
+ # modifications.
+ if not astmts or (astmts[0].is_statement or astmts[0].parent) \
+ and astmts[0].statement().parent_of(node):
+ _astmts = []
+ else:
+ _astmts = astmts[:1]
+ for i, stmt in enumerate(astmts[1:]):
+ if (astmts[i].statement().parent_of(stmt)
+ and not in_for_else_branch(astmts[i].statement(), stmt)):
+ continue
+ _astmts.append(stmt)
+ astmts = _astmts
+ if len(astmts) == 1:
+ ass = astmts[0].ass_type()
+ if isinstance(ass, (astng.For, astng.Comprehension, astng.GenExpr)) \
+ and not ass.statement() is node.statement():
+ self.add_message('W0631', args=name, node=node)
+
+ def visit_excepthandler(self, node):
+ for name in get_all_elements(node.name):
+ clobbering, args = clobber_in_except(name)
+ if clobbering:
+ self.add_message('W0623', args=args, node=name)
+
+ def visit_assname(self, node):
+ if isinstance(node.ass_type(), astng.AugAssign):
+ self.visit_name(node)
+
+ def visit_delname(self, node):
+ self.visit_name(node)
+
+ def visit_name(self, node):
+ """check that a name is defined if the current scope and doesn't
+ redefine a built-in
+ """
+ stmt = node.statement()
+ if stmt.fromlineno is None:
+ # name node from a astng built from live code, skip
+ assert not stmt.root().file.endswith('.py')
+ return
+ name = node.name
+ frame = stmt.scope()
+ # if the name node is used as a function default argument's value or as
+ # a decorator, then start from the parent frame of the function instead
+ # of the function frame - and thus open an inner class scope
+ if (is_func_default(node) or is_func_decorator(node)
+ or is_ancestor_name(frame, node)):
+ start_index = len(self._to_consume) - 2
+ else:
+ start_index = len(self._to_consume) - 1
+ # iterates through parent scopes, from the inner to the outer
+ base_scope_type = self._to_consume[start_index][-1]
+ for i in range(start_index, -1, -1):
+ to_consume, consumed, scope_type = self._to_consume[i]
+ # if the current scope is a class scope but it's not the inner
+ # scope, ignore it. This prevents to access this scope instead of
+ # the globals one in function members when there are some common
+ # names. The only exception is when the starting scope is a
+ # comprehension and its direct outer scope is a class
+ if scope_type == 'class' and i != start_index and not (
+ base_scope_type == 'comprehension' and i == start_index-1):
+ # XXX find a way to handle class scope in a smoother way
+ continue
+ # the name has already been consumed, only check it's not a loop
+ # variable used outside the loop
+ if name in consumed:
+ self._loopvar_name(node, name)
+ break
+ # mark the name as consumed if it's defined in this scope
+ # (i.e. no KeyError is raised by "to_consume[name]")
+ try:
+ consumed[name] = to_consume[name]
+ except KeyError:
+ continue
+ # checks for use before assignment
+ defnode = assign_parent(to_consume[name][0])
+ if defnode is not None:
+ defstmt = defnode.statement()
+ defframe = defstmt.frame()
+ maybee0601 = True
+ if not frame is defframe:
+ maybee0601 = False
+ elif defframe.parent is None:
+ # we are at the module level, check the name is not
+ # defined in builtins
+ if name in defframe.scope_attrs or builtin_lookup(name)[1]:
+ maybee0601 = False
+ else:
+ # we are in a local scope, check the name is not
+ # defined in global or builtin scope
+ if defframe.root().lookup(name)[1]:
+ maybee0601 = False
+ if (maybee0601
+ and stmt.fromlineno <= defstmt.fromlineno
+ and not is_defined_before(node)
+ and not are_exclusive(stmt, defstmt, ('NameError', 'Exception', 'BaseException'))):
+ if defstmt is stmt and isinstance(node, (astng.DelName,
+ astng.AssName)):
+ self.add_message('E0602', args=name, node=node)
+ elif self._to_consume[-1][-1] != 'lambda':
+ # E0601 may *not* occurs in lambda scope
+ self.add_message('E0601', args=name, node=node)
+ if not isinstance(node, astng.AssName): # Aug AssName
+ del to_consume[name]
+ else:
+ del consumed[name]
+ # check it's not a loop variable used outside the loop
+ self._loopvar_name(node, name)
+ break
+ else:
+ # we have not found the name, if it isn't a builtin, that's an
+ # undefined name !
+ if not (name in astng.Module.scope_attrs or is_builtin(name)
+ or name in self.config.additional_builtins):
+ self.add_message('E0602', args=name, node=node)
+
+ @check_messages('E0611')
+ def visit_import(self, node):
+ """check modules attribute accesses"""
+ for name, _ in node.names:
+ parts = name.split('.')
+ try:
+ module = node.infer_name_module(parts[0]).next()
+ except astng.ResolveError:
+ continue
+ self._check_module_attrs(node, module, parts[1:])
+
+ @check_messages('E0611')
+ def visit_from(self, node):
+ """check modules attribute accesses"""
+ name_parts = node.modname.split('.')
+ level = getattr(node, 'level', None)
+ try:
+ module = node.root().import_module(name_parts[0], level=level)
+ except ASTNGBuildingException:
+ return
+ except Exception, exc:
+ print 'Unhandled exception in VariablesChecker:', exc
+ return
+ module = self._check_module_attrs(node, module, name_parts[1:])
+ if not module:
+ return
+ for name, _ in node.names:
+ if name == '*':
+ continue
+ self._check_module_attrs(node, module, name.split('.'))
+
+ def _check_module_attrs(self, node, module, module_names):
+ """check that module_names (list of string) are accessible through the
+ given module
+ if the latest access name corresponds to a module, return it
+ """
+ assert isinstance(module, astng.Module), module
+ while module_names:
+ name = module_names.pop(0)
+ if name == '__dict__':
+ module = None
+ break
+ try:
+ module = module.getattr(name)[0].infer().next()
+ if module is astng.YES:
+ return None
+ except astng.NotFoundError:
+ self.add_message('E0611', args=(name, module.name), node=node)
+ return None
+ except astng.InferenceError:
+ return None
+ if module_names:
+ # FIXME: other message if name is not the latest part of
+ # module_names ?
+ modname = module and module.name or '__dict__'
+ self.add_message('E0611', node=node,
+ args=('.'.join(module_names), modname))
+ return None
+ if isinstance(module, astng.Module):
+ return module
+ return None
+
+
+class VariablesChecker3k(VariablesChecker):
+ '''Modified variables checker for 3k'''
+ # listcomp have now also their scope
+
+ def visit_listcomp(self, node):
+ """visit dictcomp: update consumption analysis variable
+ """
+ self._to_consume.append((copy(node.locals), {}, 'comprehension'))
+
+ def leave_listcomp(self, _):
+ """leave dictcomp: update consumption analysis variable
+ """
+ # do not check for not used locals here
+ self._to_consume.pop()
+
+if sys.version_info >= (3, 0):
+ VariablesChecker = VariablesChecker3k
+
+
+def register(linter):
+ """required method to auto register this checker"""
+ linter.register_checker(VariablesChecker(linter))
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/config.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/config.py
@@ -0,0 +1,152 @@
+# Copyright (c) 2003-2012 LOGILAB S.A. (Paris, FRANCE).
+# This program is free software; you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation; either version 2 of the License, or (at your option) any later
+# version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details
+#
+# You should have received a copy of the GNU General Public License along with
+# this program; if not, write to the Free Software Foundation, Inc.,
+# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+"""utilities for PyLint configuration :
+
+* pylintrc
+* pylint.d (PYLINTHOME)
+"""
+
+import pickle
+import os
+import sys
+from os.path import exists, isfile, join, expanduser, abspath, dirname
+
+# pylint home is used to save old runs results ################################
+
+USER_HOME = expanduser('~')
+if 'PYLINTHOME' in os.environ:
+ PYLINT_HOME = os.environ['PYLINTHOME']
+ if USER_HOME == '~':
+ USER_HOME = dirname(PYLINT_HOME)
+elif USER_HOME == '~':
+ PYLINT_HOME = ".pylint.d"
+else:
+ PYLINT_HOME = join(USER_HOME, '.pylint.d')
+
+if not exists(PYLINT_HOME):
+ try:
+ os.mkdir(PYLINT_HOME)
+ except OSError:
+ print >> sys.stderr, 'Unable to create directory %s' % PYLINT_HOME
+
+def get_pdata_path(base_name, recurs):
+ """return the path of the file which should contain old search data for the
+ given base_name with the given options values
+ """
+ base_name = base_name.replace(os.sep, '_')
+ return join(PYLINT_HOME, "%s%s%s"%(base_name, recurs, '.stats'))
+
+def load_results(base):
+ """try to unpickle and return data from file if it exists and is not
+ corrupted
+
+ return an empty dictionary if it doesn't exists
+ """
+ data_file = get_pdata_path(base, 1)
+ try:
+ return pickle.load(open(data_file))
+ except:
+ return {}
+
+if sys.version_info < (3, 0):
+ _PICK_MOD = 'w'
+else:
+ _PICK_MOD = 'wb'
+
+def save_results(results, base):
+ """pickle results"""
+ data_file = get_pdata_path(base, 1)
+ try:
+ pickle.dump(results, open(data_file, _PICK_MOD))
+ except (IOError, OSError), ex:
+ print >> sys.stderr, 'Unable to create file %s: %s' % (data_file, ex)
+
+# location of the configuration file ##########################################
+
+
+def find_pylintrc():
+ """search the pylint rc file and return its path if it find it, else None
+ """
+ # is there a pylint rc file in the current directory ?
+ if exists('pylintrc'):
+ return abspath('pylintrc')
+ if isfile('__init__.py'):
+ curdir = abspath(os.getcwd())
+ while isfile(join(curdir, '__init__.py')):
+ curdir = abspath(join(curdir, '..'))
+ if isfile(join(curdir, 'pylintrc')):
+ return join(curdir, 'pylintrc')
+ if 'PYLINTRC' in os.environ and exists(os.environ['PYLINTRC']):
+ pylintrc = os.environ['PYLINTRC']
+ else:
+ user_home = expanduser('~')
+ if user_home == '~' or user_home == '/root':
+ pylintrc = ".pylintrc"
+ else:
+ pylintrc = join(user_home, '.pylintrc')
+ if not isfile(pylintrc):
+ if isfile('/etc/pylintrc'):
+ pylintrc = '/etc/pylintrc'
+ else:
+ pylintrc = None
+ return pylintrc
+
+PYLINTRC = find_pylintrc()
+
+ENV_HELP = '''
+The following environment variables are used:
+ * PYLINTHOME
+ path to the directory where data of persistent run will be stored. If not
+found, it defaults to ~/.pylint.d/ or .pylint.d (in the current working
+directory).
+ * PYLINTRC
+ path to the configuration file. If not found, it will use the first
+existing file among (~/.pylintrc, /etc/pylintrc).
+''' % globals()
+
+# evaluation messages #########################################################
+
+def get_note_message(note):
+ """return a message according to note
+ note is a float < 10 (10 is the highest note)
+ """
+ assert note <= 10, "Note is %.2f. Either you cheated, or pylint's \
+broken!" % note
+ if note < 0:
+ msg = 'You have to do something quick !'
+ elif note < 1:
+ msg = 'Hey! This is really dreadful. Or maybe pylint is buggy?'
+ elif note < 2:
+ msg = "Come on! You can't be proud of this code"
+ elif note < 3:
+ msg = 'Hum... Needs work.'
+ elif note < 4:
+ msg = 'Wouldn\'t you be a bit lazy?'
+ elif note < 5:
+ msg = 'A little more work would make it acceptable.'
+ elif note < 6:
+ msg = 'Just the bare minimum. Give it a bit more polish. '
+ elif note < 7:
+ msg = 'This is okay-ish, but I\'m sure you can do better.'
+ elif note < 8:
+ msg = 'If you commit now, people should not be making nasty \
+comments about you on c.l.py'
+ elif note < 9:
+ msg = 'That\'s pretty good. Good work mate.'
+ elif note < 10:
+ msg = 'So close to being perfect...'
+ else:
+ msg = 'Wow ! Now this deserves our uttermost respect.\nPlease send \
+your code to python-projects@logilab.org'
+ return msg
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/interfaces.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/interfaces.py
@@ -0,0 +1,98 @@
+# This program is free software; you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation; either version 2 of the License, or (at your option) any later
+# version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details
+#
+# You should have received a copy of the GNU General Public License along with
+# this program; if not, write to the Free Software Foundation, Inc.,
+# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+""" Copyright (c) 2002-2003 LOGILAB S.A. (Paris, FRANCE).
+ http://www.logilab.fr/ -- mailto:contact@logilab.fr
+
+Interfaces for PyLint objects
+"""
+
+__revision__ = "$Id: interfaces.py,v 1.9 2004-04-24 12:14:53 syt Exp $"
+
+from .logilab.common.interface import Interface
+
+
+class IChecker(Interface):
+ """This is an base interface, not designed to be used elsewhere than for
+ sub interfaces definition.
+ """
+
+ def open(self):
+ """called before visiting project (i.e set of modules)"""
+
+ def close(self):
+ """called after visiting project (i.e set of modules)"""
+
+## def open_module(self):
+## """called before visiting a module"""
+
+## def close_module(self):
+## """called after visiting a module"""
+
+
+class IRawChecker(IChecker):
+ """interface for checker which need to parse the raw file
+ """
+
+ def process_module(self, astng):
+ """ process a module
+
+ the module's content is accessible via astng.file_stream
+ """
+
+
+class IASTNGChecker(IChecker):
+ """ interface for checker which prefers receive events according to
+ statement type
+ """
+
+
+class ILinter(Interface):
+ """interface for the linter class
+
+ the linter class will generate events to its registered checkers.
+ Each checker may interact with the linter instance using this API
+ """
+
+ def register_checker(self, checker):
+ """register a new checker class
+
+ checker is a class implementing IrawChecker or / and IASTNGChecker
+ """
+
+ def add_message(self, msg_id, line=None, node=None, args=None):
+ """add the message corresponding to the given id.
+
+ If provided, msg is expanded using args
+
+ astng checkers should provide the node argument,
+ raw checkers should provide the line argument.
+ """
+
+
+class IReporter(Interface):
+ """ reporter collect messages and display results encapsulated in a layout
+ """
+ def add_message(self, msg_id, location, msg):
+ """add a message of a given type
+
+ msg_id is a message identifier
+ location is a 3-uple (module, object, line)
+ msg is the actual message
+ """
+
+ def display_results(self, layout):
+ """display results encapsulated in the layout tree
+ """
+
+
+__all__ = ('IRawChecker', 'ILinter', 'IReporter')
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/lint.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/lint.py
@@ -0,0 +1,1046 @@
+# Copyright (c) 2003-2010 Sylvain Thenault (thenault@gmail.com).
+# Copyright (c) 2003-2012 LOGILAB S.A. (Paris, FRANCE).
+# http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This program is free software; you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation; either version 2 of the License, or (at your option) any later
+# version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details
+#
+# You should have received a copy of the GNU General Public License along with
+# this program; if not, write to the Free Software Foundation, Inc.,
+# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+""" %prog [options] module_or_package
+
+ Check that a module satisfies a coding standard (and more !).
+
+ %prog --help
+
+ Display this help message and exit.
+
+ %prog --help-msg <msg-id>[,<msg-id>]
+
+ Display help messages about given message identifiers and exit.
+"""
+
+# import this first to avoid builtin namespace pollution
+from .checkers import utils
+
+import sys
+import os
+import re
+import tokenize
+from warnings import warn
+
+from .logilab.common.configuration import UnsupportedAction, OptionsManagerMixIn
+from .logilab.common.optik_ext import check_csv
+from .logilab.common.modutils import load_module_from_name, get_module_part
+from .logilab.common.interface import implements
+from .logilab.common.textutils import splitstrip
+from .logilab.common.ureports import Table, Text, Section
+from .logilab.common.__pkginfo__ import version as common_version
+
+from .logilab.astng import MANAGER, nodes, ASTNGBuildingException
+from .logilab.astng.__pkginfo__ import version as astng_version
+
+from .utils import (PyLintASTWalker, UnknownMessage, MessagesHandlerMixIn,
+ ReportsHandlerMixIn, MSG_TYPES, expand_modules)
+from .interfaces import ILinter, IRawChecker, IASTNGChecker
+from .checkers import (BaseRawChecker, EmptyReport,
+ table_lines_from_stats)
+from .reporters.text import (TextReporter, ParseableTextReporter,
+ VSTextReporter, ColorizedTextReporter)
+from .reporters.html import HTMLReporter
+from . import config
+
+from .__pkginfo__ import version
+
+
+OPTION_RGX = re.compile(r'\s*#.*\bpylint:(.*)')
+REPORTER_OPT_MAP = {'text': TextReporter,
+ 'parseable': ParseableTextReporter,
+ 'msvs': VSTextReporter,
+ 'colorized': ColorizedTextReporter,
+ 'html': HTMLReporter,}
+
+
+def _get_python_path(filepath):
+ dirname = os.path.dirname(os.path.realpath(
+ os.path.expanduser(filepath)))
+ while True:
+ if not os.path.exists(os.path.join(dirname, "__init__.py")):
+ return dirname
+ old_dirname = dirname
+ dirname = os.path.dirname(dirname)
+ if old_dirname == dirname:
+ return os.getcwd()
+
+
+# Python Linter class #########################################################
+
+MSGS = {
+ 'F0001': ('%s',
+ 'fatal',
+ 'Used when an error occurred preventing the analysis of a \
+ module (unable to find it for instance).'),
+ 'F0002': ('%s: %s',
+ 'astng-error',
+ 'Used when an unexpected error occurred while building the ASTNG \
+ representation. This is usually accompanied by a traceback. \
+ Please report such errors !'),
+ 'F0003': ('ignored builtin module %s',
+ 'ignored-builtin-module',
+ 'Used to indicate that the user asked to analyze a builtin module \
+ which has been skipped.'),
+ 'F0004': ('unexpected inferred value %s',
+ 'unexpected-inferred-value',
+ 'Used to indicate that some value of an unexpected type has been \
+ inferred.'),
+ 'F0010': ('error while code parsing: %s',
+ 'parse-error',
+ 'Used when an exception occured while building the ASTNG \
+ representation which could be handled by astng.'),
+
+ 'I0001': ('Unable to run raw checkers on built-in module %s',
+ 'raw-checker-failed',
+ 'Used to inform that a built-in module has not been checked \
+ using the raw checkers.'),
+
+ 'I0010': ('Unable to consider inline option %r',
+ 'bad-inline-option',
+ 'Used when an inline option is either badly formatted or can\'t \
+ be used inside modules.'),
+
+ 'I0011': ('Locally disabling %s',
+ 'locally-disabled',
+ 'Used when an inline option disables a message or a messages \
+ category.'),
+ 'I0012': ('Locally enabling %s',
+ 'locally-enabled',
+ 'Used when an inline option enables a message or a messages \
+ category.'),
+ 'I0013': ('Ignoring entire file',
+ 'file-ignored',
+ 'Used to inform that the file will not be checked'),
+ 'I0014': ('Used deprecated directive "pylint:disable-all" or "pylint:disable=all"',
+ 'deprecated-disable-all',
+ 'You should preferably use "pylint:skip-file" as this directive '
+ 'has a less confusing name. Do this only if you are sure that all '
+ 'people running Pylint on your code have version >= 0.26'),
+ 'I0020': ('Suppressed %s (from line %d)',
+ 'suppressed-message',
+ 'A message was triggered on a line, but suppressed explicitly '
+ 'by a disable= comment in the file. This message is not '
+ 'generated for messages that are ignored due to configuration '
+ 'settings.'),
+ 'I0021': ('Useless suppression of %s',
+ 'useless-suppression',
+ 'Reported when a message is explicitly disabled for a line or '
+ 'a block of code, but never triggered.'),
+
+
+ 'E0001': ('%s',
+ 'syntax-error',
+ 'Used when a syntax error is raised for a module.'),
+
+ 'E0011': ('Unrecognized file option %r',
+ 'unrecognized-inline-option',
+ 'Used when an unknown inline option is encountered.'),
+ 'E0012': ('Bad option value %r',
+ 'bad-option-value',
+ 'Used when a bad value for an inline option is encountered.'),
+ }
+
+
+class PyLinter(OptionsManagerMixIn, MessagesHandlerMixIn, ReportsHandlerMixIn,
+ BaseRawChecker):
+ """lint Python modules using external checkers.
+
+ This is the main checker controlling the other ones and the reports
+ generation. It is itself both a raw checker and an astng checker in order
+ to:
+ * handle message activation / deactivation at the module level
+ * handle some basic but necessary stats'data (number of classes, methods...)
+
+ IDE plugins developpers: you may have to call
+ `logilab.astng.builder.MANAGER.astng_cache.clear()` accross run if you want
+ to ensure the latest code version is actually checked.
+ """
+
+ __implements__ = (ILinter, IRawChecker)
+
+ name = 'master'
+ priority = 0
+ level = 0
+ msgs = MSGS
+ may_be_disabled = False
+
+ @staticmethod
+ def make_options():
+ return (('ignore',
+ {'type' : 'csv', 'metavar' : '<file>[,<file>...]',
+ 'dest' : 'black_list', 'default' : ('CVS',),
+ 'help' : 'Add files or directories to the blacklist. \
+They should be base names, not paths.'}),
+ ('persistent',
+ {'default': True, 'type' : 'yn', 'metavar' : '<y_or_n>',
+ 'level': 1,
+ 'help' : 'Pickle collected data for later comparisons.'}),
+
+ ('load-plugins',
+ {'type' : 'csv', 'metavar' : '<modules>', 'default' : (),
+ 'level': 1,
+ 'help' : 'List of plugins (as comma separated values of \
+python modules names) to load, usually to register additional checkers.'}),
+
+ ('output-format',
+ {'default': 'text', 'type': 'string', 'metavar' : '<format>',
+ 'short': 'f',
+ 'group': 'Reports',
+ 'help' : 'Set the output format. Available formats are text,'
+ ' parseable, colorized, msvs (visual studio) and html. You '
+ 'can also give a reporter class, eg mypackage.mymodule.'
+ 'MyReporterClass.'}),
+
+ ('include-ids',
+ {'type' : 'yn', 'metavar' : '<y_or_n>', 'default' : 0,
+ 'short': 'i',
+ 'group': 'Reports',
+ 'help' : 'Include message\'s id in output'}),
+
+ ('symbols',
+ {'type' : 'yn', 'metavar' : '<y_or_n>', 'default' : 0,
+ 'short': 's',
+ 'group': 'Reports',
+ 'help' : 'Include symbolic ids of messages in output'}),
+
+ ('files-output',
+ {'default': 0, 'type' : 'yn', 'metavar' : '<y_or_n>',
+ 'group': 'Reports', 'level': 1,
+ 'help' : 'Put messages in a separate file for each module / \
+package specified on the command line instead of printing them on stdout. \
+Reports (if any) will be written in a file name "pylint_global.[txt|html]".'}),
+
+ ('reports',
+ {'default': 1, 'type' : 'yn', 'metavar' : '<y_or_n>',
+ 'short': 'r',
+ 'group': 'Reports',
+ 'help' : 'Tells whether to display a full report or only the\
+ messages'}),
+
+ ('evaluation',
+ {'type' : 'string', 'metavar' : '<python_expression>',
+ 'group': 'Reports', 'level': 1,
+ 'default': '10.0 - ((float(5 * error + warning + refactor + \
+convention) / statement) * 10)',
+ 'help' : 'Python expression which should return a note less \
+than 10 (10 is the highest note). You have access to the variables errors \
+warning, statement which respectively contain the number of errors / warnings\
+ messages and the total number of statements analyzed. This is used by the \
+ global evaluation report (RP0004).'}),
+
+ ('comment',
+ {'default': 0, 'type' : 'yn', 'metavar' : '<y_or_n>',
+ 'group': 'Reports', 'level': 1,
+ 'help' : 'Add a comment according to your evaluation note. \
+This is used by the global evaluation report (RP0004).'}),
+
+ ('enable',
+ {'type' : 'csv', 'metavar': '<msg ids>',
+ 'short': 'e',
+ 'group': 'Messages control',
+ 'help' : 'Enable the message, report, category or checker with the '
+ 'given id(s). You can either give multiple identifier '
+ 'separated by comma (,) or put this option multiple time. '
+ 'See also the "--disable" option for examples. '}),
+
+ ('disable',
+ {'type' : 'csv', 'metavar': '<msg ids>',
+ 'short': 'd',
+ 'group': 'Messages control',
+ 'help' : 'Disable the message, report, category or checker '
+ 'with the given id(s). You can either give multiple identifiers'
+ ' separated by comma (,) or put this option multiple times '
+ '(only on the command line, not in the configuration file '
+ 'where it should appear only once).'
+ 'You can also use "--disable=all" to disable everything first '
+ 'and then reenable specific checks. For example, if you want '
+ 'to run only the similarities checker, you can use '
+ '"--disable=all --enable=similarities". '
+ 'If you want to run only the classes checker, but have no '
+ 'Warning level messages displayed, use'
+ '"--disable=all --enable=classes --disable=W"'}),
+ )
+
+ option_groups = (
+ ('Messages control', 'Options controling analysis messages'),
+ ('Reports', 'Options related to output formating and reporting'),
+ )
+
+ def __init__(self, options=(), reporter=None, option_groups=(),
+ pylintrc=None):
+ # some stuff has to be done before ancestors initialization...
+ #
+ # checkers / reporter / astng manager
+ self.reporter = None
+ self._checkers = {}
+ self._ignore_file = False
+ # visit variables
+ self.base_name = None
+ self.base_file = None
+ self.current_name = None
+ self.current_file = None
+ self.stats = None
+ # init options
+ self.options = options + PyLinter.make_options()
+ self.option_groups = option_groups + PyLinter.option_groups
+ self._options_methods = {
+ 'enable': self.enable,
+ 'disable': self.disable}
+ self._bw_options_methods = {'disable-msg': self.disable,
+ 'enable-msg': self.enable}
+ full_version = '%%prog %s, \nastng %s, common %s\nPython %s' % (
+ version, astng_version, common_version, sys.version)
+ OptionsManagerMixIn.__init__(self, usage=__doc__,
+ version=full_version,
+ config_file=pylintrc or config.PYLINTRC)
+ MessagesHandlerMixIn.__init__(self)
+ ReportsHandlerMixIn.__init__(self)
+ BaseRawChecker.__init__(self)
+ # provided reports
+ self.reports = (('RP0001', 'Messages by category',
+ report_total_messages_stats),
+ ('RP0002', '% errors / warnings by module',
+ report_messages_by_module_stats),
+ ('RP0003', 'Messages',
+ report_messages_stats),
+ ('RP0004', 'Global evaluation',
+ self.report_evaluation),
+ )
+ self.register_checker(self)
+ self._dynamic_plugins = []
+ self.load_provider_defaults()
+ self.set_reporter(reporter or TextReporter(sys.stdout))
+
+ def load_default_plugins(self):
+ from . import checkers
+ checkers.initialize(self)
+
+ def prepare_import_path(self, args):
+ """Prepare sys.path for running the linter checks."""
+ if len(args) == 1:
+ sys.path.insert(0, _get_python_path(args[0]))
+ else:
+ sys.path.insert(0, os.getcwd())
+
+ def cleanup_import_path(self):
+ """Revert any changes made to sys.path in prepare_import_path."""
+ sys.path.pop(0)
+
+ def load_plugin_modules(self, modnames):
+ """take a list of module names which are pylint plugins and load
+ and register them
+ """
+ for modname in modnames:
+ if modname in self._dynamic_plugins:
+ continue
+ self._dynamic_plugins.append(modname)
+ module = load_module_from_name(modname)
+ module.register(self)
+
+ def set_reporter(self, reporter):
+ """set the reporter used to display messages and reports"""
+ self.reporter = reporter
+ reporter.linter = self
+
+ def set_option(self, optname, value, action=None, optdict=None):
+ """overridden from configuration.OptionsProviderMixin to handle some
+ special options
+ """
+ if optname in self._options_methods or optname in self._bw_options_methods:
+ if value:
+ try:
+ meth = self._options_methods[optname]
+ except KeyError:
+ meth = self._bw_options_methods[optname]
+ warn('%s is deprecated, replace it by %s' % (
+ optname, optname.split('-')[0]), DeprecationWarning)
+ value = check_csv(None, optname, value)
+ if isinstance(value, (list, tuple)):
+ for _id in value :
+ meth(_id)
+ else :
+ meth(value)
+ elif optname == 'output-format':
+ if value.lower() in REPORTER_OPT_MAP:
+ self.set_reporter(REPORTER_OPT_MAP[value.lower()]())
+ else:
+ module = load_module_from_name(get_module_part(value))
+ class_name = value.split('.')[-1]
+ reporter_class = getattr(module, class_name)
+ self.set_reporter(reporter_class())
+
+ try:
+ BaseRawChecker.set_option(self, optname, value, action, optdict)
+ except UnsupportedAction:
+ print >> sys.stderr, 'option %s can\'t be read from config file' % \
+ optname
+
+ # checkers manipulation methods ############################################
+
+ def register_checker(self, checker):
+ """register a new checker
+
+ checker is an object implementing IRawChecker or / and IASTNGChecker
+ """
+ assert checker.priority <= 0, 'checker priority can\'t be >= 0'
+ self._checkers.setdefault(checker.name, []).append(checker)
+ for r_id, r_title, r_cb in checker.reports:
+ self.register_report(r_id, r_title, r_cb, checker)
+ self.register_options_provider(checker)
+ if hasattr(checker, 'msgs'):
+ self.register_messages(checker)
+ checker.load_defaults()
+
+ def disable_noerror_messages(self):
+ for msgcat, msgids in self._msgs_by_category.iteritems():
+ if msgcat == 'E':
+ for msgid in msgids:
+ self.enable(msgid)
+ else:
+ for msgid in msgids:
+ self.disable(msgid)
+
+ def disable_reporters(self):
+ """disable all reporters"""
+ for reporters in self._reports.itervalues():
+ for report_id, _title, _cb in reporters:
+ self.disable_report(report_id)
+
+ def error_mode(self):
+ """error mode: enable only errors; no reports, no persistent"""
+ self.disable_noerror_messages()
+ self.disable('miscellaneous')
+ self.set_option('reports', False)
+ self.set_option('persistent', False)
+
+ # block level option handling #############################################
+ #
+ # see func_block_disable_msg.py test case for expected behaviour
+
+ def process_tokens(self, tokens):
+ """process tokens from the current module to search for module/block
+ level options
+ """
+ comment = tokenize.COMMENT
+ newline = tokenize.NEWLINE
+ for (tok_type, _, start, _, line) in tokens:
+ if tok_type not in (comment, newline):
+ continue
+ match = OPTION_RGX.search(line)
+ if match is None:
+ continue
+ if match.group(1).strip() == "disable-all" or match.group(1).strip() == 'skip-file':
+ if match.group(1).strip() == "disable-all":
+ self.add_message('I0014', line=start[0])
+ self.add_message('I0013', line=start[0])
+ self._ignore_file = True
+ return
+ try:
+ opt, value = match.group(1).split('=', 1)
+ except ValueError:
+ self.add_message('I0010', args=match.group(1).strip(),
+ line=start[0])
+ continue
+ opt = opt.strip()
+ if opt in self._options_methods or opt in self._bw_options_methods:
+ try:
+ meth = self._options_methods[opt]
+ except KeyError:
+ meth = self._bw_options_methods[opt]
+ warn('%s is deprecated, replace it with %s (%s, line %s)' % (
+ opt, opt.split('-')[0], self.current_file, line),
+ DeprecationWarning)
+ for msgid in splitstrip(value):
+ try:
+ if (opt, msgid) == ('disable', 'all'):
+ self.add_message('I0014', line=start[0])
+ self.add_message('I0013', line=start[0])
+ self._ignore_file = True
+ return
+ meth(msgid, 'module', start[0])
+ except UnknownMessage:
+ self.add_message('E0012', args=msgid, line=start[0])
+ else:
+ self.add_message('E0011', args=opt, line=start[0])
+
+ def collect_block_lines(self, node, msg_state):
+ """walk ast to collect block level options line numbers"""
+ # recurse on children (depth first)
+ for child in node.get_children():
+ self.collect_block_lines(child, msg_state)
+ first = node.fromlineno
+ last = node.tolineno
+ # first child line number used to distinguish between disable
+ # which are the first child of scoped node with those defined later.
+ # For instance in the code below:
+ #
+ # 1. def meth8(self):
+ # 2. """test late disabling"""
+ # 3. # pylint: disable=E1102
+ # 4. print self.blip
+ # 5. # pylint: disable=E1101
+ # 6. print self.bla
+ #
+ # E1102 should be disabled from line 1 to 6 while E1101 from line 5 to 6
+ #
+ # this is necessary to disable locally messages applying to class /
+ # function using their fromlineno
+ if isinstance(node, (nodes.Module, nodes.Class, nodes.Function)) and node.body:
+ firstchildlineno = node.body[0].fromlineno
+ else:
+ firstchildlineno = last
+ for msgid, lines in msg_state.iteritems():
+ for lineno, state in lines.items():
+ original_lineno = lineno
+ if first <= lineno <= last:
+ if lineno > firstchildlineno:
+ state = True
+ # set state for all lines for this block
+ first, last = node.block_range(lineno)
+ for line in xrange(first, last+1):
+ # do not override existing entries
+ if not line in self._module_msgs_state.get(msgid, ()):
+ if line in lines: # state change in the same block
+ state = lines[line]
+ original_lineno = line
+ if not state:
+ self._suppression_mapping[(msgid, line)] = original_lineno
+ try:
+ self._module_msgs_state[msgid][line] = state
+ except KeyError:
+ self._module_msgs_state[msgid] = {line: state}
+ del lines[lineno]
+
+
+ # code checking methods ###################################################
+
+ def get_checkers(self):
+ """return all available checkers as a list"""
+ return [self] + [c for checkers in self._checkers.itervalues()
+ for c in checkers if c is not self]
+
+ def prepare_checkers(self):
+ """return checkers needed for activated messages and reports"""
+ if not self.config.reports:
+ self.disable_reporters()
+ # get needed checkers
+ neededcheckers = [self]
+ for checker in self.get_checkers()[1:]:
+ # fatal errors should not trigger enable / disabling a checker
+ messages = set(msg for msg in checker.msgs
+ if msg[0] != 'F' and self.is_message_enabled(msg))
+ if (messages or
+ any(self.report_is_enabled(r[0]) for r in checker.reports)):
+ neededcheckers.append(checker)
+ checker.active_msgs = messages
+ return neededcheckers
+
+ def check(self, files_or_modules):
+ """main checking entry: check a list of files or modules from their
+ name.
+ """
+ self.reporter.include_ids = self.config.include_ids
+ self.reporter.symbols = self.config.symbols
+ if not isinstance(files_or_modules, (list, tuple)):
+ files_or_modules = (files_or_modules,)
+ walker = PyLintASTWalker(self)
+ checkers = self.prepare_checkers()
+ rawcheckers = [c for c in checkers if implements(c, IRawChecker)
+ and c is not self]
+ # notify global begin
+ for checker in checkers:
+ checker.open()
+ if implements(checker, IASTNGChecker):
+ walker.add_checker(checker)
+ # build ast and check modules or packages
+ for descr in self.expand_files(files_or_modules):
+ modname, filepath = descr['name'], descr['path']
+ if self.config.files_output:
+ reportfile = 'pylint_%s.%s' % (modname, self.reporter.extension)
+ self.reporter.set_output(open(reportfile, 'w'))
+ self.set_current_module(modname, filepath)
+ # get the module representation
+ astng = self.get_astng(filepath, modname)
+ if astng is None:
+ continue
+ self.base_name = descr['basename']
+ self.base_file = descr['basepath']
+ self._ignore_file = False
+ # fix the current file (if the source file was not available or
+ # if it's actually a c extension)
+ self.current_file = astng.file
+ self.check_astng_module(astng, walker, rawcheckers)
+ self._add_suppression_messages()
+ # notify global end
+ self.set_current_module('')
+ self.stats['statement'] = walker.nbstatements
+ checkers.reverse()
+ for checker in checkers:
+ checker.close()
+
+ def expand_files(self, modules):
+ """get modules and errors from a list of modules and handle errors
+ """
+ result, errors = expand_modules(modules, self.config.black_list)
+ for error in errors:
+ message = modname = error["mod"]
+ key = error["key"]
+ self.set_current_module(modname)
+ if key == "F0001":
+ message = str(error["ex"]).replace(os.getcwd() + os.sep, '')
+ self.add_message(key, args=message)
+ return result
+
+ def set_current_module(self, modname, filepath=None):
+ """set the name of the currently analyzed module and
+ init statistics for it
+ """
+ if not modname and filepath is None:
+ return
+ self.reporter.on_set_current_module(modname, filepath)
+ self.current_name = modname
+ self.current_file = filepath or modname
+ self.stats['by_module'][modname] = {}
+ self.stats['by_module'][modname]['statement'] = 0
+ for msg_cat in MSG_TYPES.itervalues():
+ self.stats['by_module'][modname][msg_cat] = 0
+ # XXX hack, to be correct we need to keep module_msgs_state
+ # for every analyzed module (the problem stands with localized
+ # messages which are only detected in the .close step)
+ if modname:
+ self._module_msgs_state = {}
+ self._module_msg_cats_state = {}
+ self._raw_module_msgs_state = {}
+ self._ignored_msgs = {}
+
+ def get_astng(self, filepath, modname):
+ """return a astng representation for a module"""
+ try:
+ return MANAGER.astng_from_file(filepath, modname, source=True)
+ except SyntaxError, ex:
+ self.add_message('E0001', line=ex.lineno, args=ex.msg)
+ except ASTNGBuildingException, ex:
+ self.add_message('F0010', args=ex)
+ except Exception, ex:
+ import traceback
+ traceback.print_exc()
+ self.add_message('F0002', args=(ex.__class__, ex))
+
+ def check_astng_module(self, astng, walker, rawcheckers):
+ """check a module from its astng representation, real work"""
+ # call raw checkers if possible
+ if not astng.pure_python:
+ self.add_message('I0001', args=astng.name)
+ else:
+ #assert astng.file.endswith('.py')
+ # invoke IRawChecker interface on self to fetch module/block
+ # level options
+ self.process_module(astng)
+ if self._ignore_file:
+ return False
+ # walk ast to collect line numbers
+ for msg, lines in self._module_msgs_state.iteritems():
+ self._raw_module_msgs_state[msg] = lines.copy()
+ orig_state = self._module_msgs_state.copy()
+ self._module_msgs_state = {}
+ self._suppression_mapping = {}
+ self.collect_block_lines(astng, orig_state)
+ for checker in rawcheckers:
+ checker.process_module(astng)
+ # generate events to astng checkers
+ walker.walk(astng)
+ return True
+
+ # IASTNGChecker interface #################################################
+
+ def open(self):
+ """initialize counters"""
+ self.stats = { 'by_module' : {},
+ 'by_msg' : {},
+ }
+ for msg_cat in MSG_TYPES.itervalues():
+ self.stats[msg_cat] = 0
+
+ def close(self):
+ """close the whole package /module, it's time to make reports !
+
+ if persistent run, pickle results for later comparison
+ """
+ if self.base_name is not None:
+ # load previous results if any
+ previous_stats = config.load_results(self.base_name)
+ # XXX code below needs refactoring to be more reporter agnostic
+ self.reporter.on_close(self.stats, previous_stats)
+ if self.config.reports:
+ sect = self.make_reports(self.stats, previous_stats)
+ if self.config.files_output:
+ filename = 'pylint_global.' + self.reporter.extension
+ self.reporter.set_output(open(filename, 'w'))
+ else:
+ sect = Section()
+ if self.config.reports or self.config.output_format == 'html':
+ self.reporter.display_results(sect)
+ # save results if persistent run
+ if self.config.persistent:
+ config.save_results(self.stats, self.base_name)
+
+ # specific reports ########################################################
+
+ def _add_suppression_messages(self):
+ for warning, lines in self._raw_module_msgs_state.iteritems():
+ for line, enable in lines.iteritems():
+ if not enable and (warning, line) not in self._ignored_msgs:
+ self.add_message('I0021', line, None,
+ (self.get_msg_display_string(warning),))
+
+ for (warning, from_), lines in self._ignored_msgs.iteritems():
+ for line in lines:
+ self.add_message('I0020', line, None,
+ (self.get_msg_display_string(warning), from_))
+
+ def report_evaluation(self, sect, stats, previous_stats):
+ """make the global evaluation report"""
+ # check with at least check 1 statements (usually 0 when there is a
+ # syntax error preventing pylint from further processing)
+ if stats['statement'] == 0:
+ raise EmptyReport()
+ # get a global note for the code
+ evaluation = self.config.evaluation
+ try:
+ note = eval(evaluation, {}, self.stats)
+ except Exception, ex:
+ msg = 'An exception occurred while rating: %s' % ex
+ else:
+ stats['global_note'] = note
+ msg = 'Your code has been rated at %.2f/10' % note
+ if 'global_note' in previous_stats:
+ msg += ' (previous run: %.2f/10)' % previous_stats['global_note']
+ if self.config.comment:
+ msg = '%s\n%s' % (msg, config.get_note_message(note))
+ sect.append(Text(msg))
+
+# some reporting functions ####################################################
+
+def report_total_messages_stats(sect, stats, previous_stats):
+ """make total errors / warnings report"""
+ lines = ['type', 'number', 'previous', 'difference']
+ lines += table_lines_from_stats(stats, previous_stats,
+ ('convention', 'refactor',
+ 'warning', 'error'))
+ sect.append(Table(children=lines, cols=4, rheaders=1))
+
+def report_messages_stats(sect, stats, _):
+ """make messages type report"""
+ if not stats['by_msg']:
+ # don't print this report when we didn't detected any errors
+ raise EmptyReport()
+ in_order = sorted([(value, msg_id)
+ for msg_id, value in stats['by_msg'].iteritems()
+ if not msg_id.startswith('I')])
+ in_order.reverse()
+ lines = ('message id', 'occurrences')
+ for value, msg_id in in_order:
+ lines += (msg_id, str(value))
+ sect.append(Table(children=lines, cols=2, rheaders=1))
+
+def report_messages_by_module_stats(sect, stats, _):
+ """make errors / warnings by modules report"""
+ if len(stats['by_module']) == 1:
+ # don't print this report when we are analysing a single module
+ raise EmptyReport()
+ by_mod = {}
+ for m_type in ('fatal', 'error', 'warning', 'refactor', 'convention'):
+ total = stats[m_type]
+ for module in stats['by_module'].iterkeys():
+ mod_total = stats['by_module'][module][m_type]
+ if total == 0:
+ percent = 0
+ else:
+ percent = float((mod_total)*100) / total
+ by_mod.setdefault(module, {})[m_type] = percent
+ sorted_result = []
+ for module, mod_info in by_mod.iteritems():
+ sorted_result.append((mod_info['error'],
+ mod_info['warning'],
+ mod_info['refactor'],
+ mod_info['convention'],
+ module))
+ sorted_result.sort()
+ sorted_result.reverse()
+ lines = ['module', 'error', 'warning', 'refactor', 'convention']
+ for line in sorted_result:
+ if line[0] == 0 and line[1] == 0:
+ break
+ lines.append(line[-1])
+ for val in line[:-1]:
+ lines.append('%.2f' % val)
+ if len(lines) == 5:
+ raise EmptyReport()
+ sect.append(Table(children=lines, cols=5, rheaders=1))
+
+
+# utilities ###################################################################
+
+# this may help to import modules using gettext
+# XXX syt, actually needed since we don't import code?
+
+from .logilab.common.compat import builtins
+builtins._ = str
+
+
+class ArgumentPreprocessingError(Exception):
+ """Raised if an error occurs during argument preprocessing."""
+
+
+def preprocess_options(args, search_for):
+ """look for some options (keys of <search_for>) which have to be processed
+ before others
+
+ values of <search_for> are callback functions to call when the option is
+ found
+ """
+ i = 0
+ while i < len(args):
+ arg = args[i]
+ if arg.startswith('--'):
+ try:
+ option, val = arg[2:].split('=', 1)
+ except ValueError:
+ option, val = arg[2:], None
+ try:
+ cb, takearg = search_for[option]
+ del args[i]
+ if takearg and val is None:
+ if i >= len(args) or args[i].startswith('-'):
+ raise ArgumentPreprocessingError(arg)
+ val = args[i]
+ del args[i]
+ cb(option, val)
+ except KeyError:
+ i += 1
+ else:
+ i += 1
+
+class Run:
+ """helper class to use as main for pylint :
+
+ run(*sys.argv[1:])
+ """
+ LinterClass = PyLinter
+ option_groups = (
+ ('Commands', 'Options which are actually commands. Options in this \
+group are mutually exclusive.'),
+ )
+
+ def __init__(self, args, reporter=None, exit=True):
+ self._rcfile = None
+ self._plugins = []
+ try:
+ preprocess_options(args, {
+ # option: (callback, takearg)
+ 'rcfile': (self.cb_set_rcfile, True),
+ 'load-plugins': (self.cb_add_plugins, True),
+ })
+ except ArgumentPreprocessingError, e:
+ print >> sys.stderr, 'Argument %s expects a value.' % (e.args[0],)
+ sys.exit(32)
+
+ self.linter = linter = self.LinterClass((
+ ('rcfile',
+ {'action' : 'callback', 'callback' : lambda *args: 1,
+ 'type': 'string', 'metavar': '<file>',
+ 'help' : 'Specify a configuration file.'}),
+
+ ('init-hook',
+ {'action' : 'callback', 'type' : 'string', 'metavar': '<code>',
+ 'callback' : cb_init_hook, 'level': 1,
+ 'help' : 'Python code to execute, usually for sys.path \
+manipulation such as pygtk.require().'}),
+
+ ('help-msg',
+ {'action' : 'callback', 'type' : 'string', 'metavar': '<msg-id>',
+ 'callback' : self.cb_help_message,
+ 'group': 'Commands',
+ 'help' : '''Display a help message for the given message id and \
+exit. The value may be a comma separated list of message ids.'''}),
+
+ ('list-msgs',
+ {'action' : 'callback', 'metavar': '<msg-id>',
+ 'callback' : self.cb_list_messages,
+ 'group': 'Commands', 'level': 1,
+ 'help' : "Generate pylint's messages."}),
+
+ ('full-documentation',
+ {'action' : 'callback', 'metavar': '<msg-id>',
+ 'callback' : self.cb_full_documentation,
+ 'group': 'Commands', 'level': 1,
+ 'help' : "Generate pylint's full documentation."}),
+
+ ('generate-rcfile',
+ {'action' : 'callback', 'callback' : self.cb_generate_config,
+ 'group': 'Commands',
+ 'help' : '''Generate a sample configuration file according to \
+the current configuration. You can put other options before this one to get \
+them in the generated configuration.'''}),
+
+ ('generate-man',
+ {'action' : 'callback', 'callback' : self.cb_generate_manpage,
+ 'group': 'Commands',
+ 'help' : "Generate pylint's man page.",'hide': True}),
+
+ ('errors-only',
+ {'action' : 'callback', 'callback' : self.cb_error_mode,
+ 'short': 'E',
+ 'help' : '''In error mode, checkers without error messages are \
+disabled and for others, only the ERROR messages are displayed, and no reports \
+are done by default'''}),
+
+ ('profile',
+ {'type' : 'yn', 'metavar' : '<y_or_n>',
+ 'default': False, 'hide': True,
+ 'help' : 'Profiled execution.'}),
+
+ ), option_groups=self.option_groups,
+ reporter=reporter, pylintrc=self._rcfile)
+ # register standard checkers
+ linter.load_default_plugins()
+ # load command line plugins
+ linter.load_plugin_modules(self._plugins)
+ # add some help section
+ linter.add_help_section('Environment variables', config.ENV_HELP, level=1)
+ linter.add_help_section('Output',
+'Using the default text output, the message format is : \n'
+' \n'
+' MESSAGE_TYPE: LINE_NUM:[OBJECT:] MESSAGE \n'
+' \n'
+'There are 5 kind of message types : \n'
+' * (C) convention, for programming standard violation \n'
+' * (R) refactor, for bad code smell \n'
+' * (W) warning, for python specific problems \n'
+' * (E) error, for probable bugs in the code \n'
+' * (F) fatal, if an error occurred which prevented pylint from doing further\n'
+'processing.\n'
+ , level=1)
+ linter.add_help_section('Output status code',
+'Pylint should leave with following status code: \n'
+' * 0 if everything went fine \n'
+' * 1 if a fatal message was issued \n'
+' * 2 if an error message was issued \n'
+' * 4 if a warning message was issued \n'
+' * 8 if a refactor message was issued \n'
+' * 16 if a convention message was issued \n'
+' * 32 on usage error \n'
+' \n'
+'status 1 to 16 will be bit-ORed so you can know which different categories has\n'
+'been issued by analysing pylint output status code\n',
+ level=1)
+ # read configuration
+ linter.disable('W0704')
+ linter.disable('I0020')
+ linter.disable('I0021')
+ linter.read_config_file()
+ # is there some additional plugins in the file configuration, in
+ config_parser = linter.cfgfile_parser
+ if config_parser.has_option('MASTER', 'load-plugins'):
+ plugins = splitstrip(config_parser.get('MASTER', 'load-plugins'))
+ linter.load_plugin_modules(plugins)
+ # now we can load file config and command line, plugins (which can
+ # provide options) have been registered
+ linter.load_config_file()
+ if reporter:
+ # if a custom reporter is provided as argument, it may be overridden
+ # by file parameters, so re-set it here, but before command line
+ # parsing so it's still overrideable by command line option
+ linter.set_reporter(reporter)
+ try:
+ args = linter.load_command_line_configuration(args)
+ except SystemExit, exc:
+ if exc.code == 2: # bad options
+ exc.code = 32
+ raise
+ if not args:
+ print linter.help()
+ sys.exit(32)
+ # insert current working directory to the python path to have a correct
+ # behaviour
+ linter.prepare_import_path(args)
+ if self.linter.config.profile:
+ print >> sys.stderr, '** profiled run'
+ import cProfile, pstats
+ cProfile.runctx('linter.check(%r)' % args, globals(), locals(), 'stones.prof' )
+ data = pstats.Stats('stones.prof')
+ data.strip_dirs()
+ data.sort_stats('time', 'calls')
+ data.print_stats(30)
+ else:
+ linter.check(args)
+ linter.cleanup_import_path()
+ if exit:
+ sys.exit(self.linter.msg_status)
+
+ def cb_set_rcfile(self, name, value):
+ """callback for option preprocessing (i.e. before optik parsing)"""
+ self._rcfile = value
+
+ def cb_add_plugins(self, name, value):
+ """callback for option preprocessing (i.e. before optik parsing)"""
+ self._plugins.extend(splitstrip(value))
+
+ def cb_error_mode(self, *args, **kwargs):
+ """error mode:
+ * disable all but error messages
+ * disable the 'miscellaneous' checker which can be safely deactivated in
+ debug
+ * disable reports
+ * do not save execution information
+ """
+ self.linter.error_mode()
+
+ def cb_generate_config(self, *args, **kwargs):
+ """optik callback for sample config file generation"""
+ self.linter.generate_config(skipsections=('COMMANDS',))
+ sys.exit(0)
+
+ def cb_generate_manpage(self, *args, **kwargs):
+ """optik callback for sample config file generation"""
+ from . import __pkginfo__
+ self.linter.generate_manpage(__pkginfo__)
+ sys.exit(0)
+
+ def cb_help_message(self, option, optname, value, parser):
+ """optik callback for printing some help about a particular message"""
+ self.linter.help_message(splitstrip(value))
+ sys.exit(0)
+
+ def cb_full_documentation(self, option, optname, value, parser):
+ """optik callback for printing full documentation"""
+ self.linter.print_full_documentation()
+ sys.exit(0)
+
+ def cb_list_messages(self, option, optname, value, parser): # FIXME
+ """optik callback for printing available messages"""
+ self.linter.list_messages()
+ sys.exit(0)
+
+def cb_init_hook(option, optname, value, parser):
+ """exec arbitrary code to set sys.path for instance"""
+ exec value
+
+
+if __name__ == '__main__':
+ Run(sys.argv[1:])
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/__init__.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/__init__.py
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/astng/__init__.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/astng/__init__.py
@@ -0,0 +1,78 @@
+# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+# copyright 2003-2010 Sylvain Thenault, all rights reserved.
+# contact mailto:thenault@gmail.com
+#
+# This file is part of logilab-astng.
+#
+# logilab-astng is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 2.1 of the License, or (at your
+# option) any later version.
+#
+# logilab-astng is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+# for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-astng. If not, see <http://www.gnu.org/licenses/>.
+"""Python Abstract Syntax Tree New Generation
+
+The aim of this module is to provide a common base representation of
+python source code for projects such as pychecker, pyreverse,
+pylint... Well, actually the development of this library is essentially
+governed by pylint's needs.
+
+It extends class defined in the python's _ast module with some
+additional methods and attributes. Instance attributes are added by a
+builder object, which can either generate extended ast (let's call
+them astng ;) by visiting an existent ast tree or by inspecting living
+object. Methods are added by monkey patching ast classes.
+
+Main modules are:
+
+* nodes and scoped_nodes for more information about methods and
+ attributes added to different node classes
+
+* the manager contains a high level object to get astng trees from
+ source files and living objects. It maintains a cache of previously
+ constructed tree for quick access
+
+* builder contains the class responsible to build astng trees
+"""
+__doctype__ = "restructuredtext en"
+
+import sys
+if sys.version_info >= (3, 0):
+ BUILTINS_MODULE = 'builtins'
+else:
+ BUILTINS_MODULE = '__builtin__'
+
+# WARNING: internal imports order matters !
+
+# make all exception classes accessible from astng package
+from .exceptions import *
+
+# make all node classes accessible from astng package
+from .nodes import *
+
+# trigger extra monkey-patching
+from . import inference
+
+# more stuff available
+from . import raw_building
+from .bases import YES, Instance, BoundMethod, UnboundMethod
+from .node_classes import are_exclusive, unpack_infer
+from .scoped_nodes import builtin_lookup
+
+# make a manager instance (borg) as well as Project and Package classes
+# accessible from astng package
+from .manager import ASTNGManager, Project
+MANAGER = ASTNGManager()
+del ASTNGManager
+
+# load brain plugins
+from .brain import py2mechanize
+from .brain import py2qt4
+from .brain import py2stdlib
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/astng/__pkginfo__.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/astng/__pkginfo__.py
@@ -0,0 +1,52 @@
+# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+# copyright 2003-2010 Sylvain Thenault, all rights reserved.
+# contact mailto:thenault@gmail.com
+#
+# This file is part of logilab-astng.
+#
+# logilab-astng is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 2.1 of the License, or (at your
+# option) any later version.
+#
+# logilab-astng is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+# for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-astng. If not, see <http://www.gnu.org/licenses/>.
+"""logilab.astng packaging information"""
+
+distname = 'logilab-astng'
+
+modname = 'astng'
+subpackage_of = 'logilab'
+
+numversion = (0, 24, 2)
+version = '.'.join([str(num) for num in numversion])
+
+install_requires = ['logilab-common >= 0.53.0']
+
+license = 'LGPL'
+
+author = 'Logilab'
+author_email = 'python-projects@lists.logilab.org'
+mailinglist = "mailto://%s" % author_email
+web = "http://www.logilab.org/project/%s" % distname
+ftp = "ftp://ftp.logilab.org/pub/%s" % modname
+
+description = "rebuild a new abstract syntax tree from Python's ast"
+
+from os.path import join
+include_dirs = ['brain',
+ join('test', 'regrtest_data'),
+ join('test', 'data'), join('test', 'data2')]
+
+classifiers = ["Topic :: Software Development :: Libraries :: Python Modules",
+ "Topic :: Software Development :: Quality Assurance",
+ "Programming Language :: Python",
+ "Programming Language :: Python :: 2",
+ "Programming Language :: Python :: 3",
+ ]
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/astng/as_string.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/astng/as_string.py
@@ -0,0 +1,427 @@
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+# copyright 2003-2010 Sylvain Thenault, all rights reserved.
+# contact mailto:thenault@gmail.com
+#
+# This file is part of logilab-astng.
+#
+# logilab-astng is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 2.1 of the License, or (at your
+# option) any later version.
+#
+# logilab-astng is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+# for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-astng. If not, see <http://www.gnu.org/licenses/>.
+"""This module renders ASTNG nodes to string representation.
+
+It will probably not work on bare _ast trees.
+"""
+import sys
+
+
+INDENT = ' ' # 4 spaces ; keep indentation variable
+
+
+def _import_string(names):
+ """return a list of (name, asname) formatted as a string"""
+ _names = []
+ for name, asname in names:
+ if asname is not None:
+ _names.append('%s as %s' % (name, asname))
+ else:
+ _names.append(name)
+ return ', '.join(_names)
+
+
+class AsStringVisitor(object):
+ """Visitor to render an ASTNG node as string """
+
+ def __call__(self, node):
+ """Makes this visitor behave as a simple function"""
+ return node.accept(self)
+
+ def _stmt_list(self, stmts):
+ """return a list of nodes to string"""
+ stmts = '\n'.join([nstr for nstr in [n.accept(self) for n in stmts] if nstr])
+ return INDENT + stmts.replace('\n', '\n'+INDENT)
+
+
+ ## visit_<node> methods ###########################################
+
+ def visit_arguments(self, node):
+ """return an astng.Function node as string"""
+ return node.format_args()
+
+ def visit_assattr(self, node):
+ """return an astng.AssAttr node as string"""
+ return self.visit_getattr(node)
+
+ def visit_assert(self, node):
+ """return an astng.Assert node as string"""
+ if node.fail:
+ return 'assert %s, %s' % (node.test.accept(self),
+ node.fail.accept(self))
+ return 'assert %s' % node.test.accept(self)
+
+ def visit_assname(self, node):
+ """return an astng.AssName node as string"""
+ return node.name
+
+ def visit_assign(self, node):
+ """return an astng.Assign node as string"""
+ lhs = ' = '.join([n.accept(self) for n in node.targets])
+ return '%s = %s' % (lhs, node.value.accept(self))
+
+ def visit_augassign(self, node):
+ """return an astng.AugAssign node as string"""
+ return '%s %s %s' % (node.target.accept(self), node.op, node.value.accept(self))
+
+ def visit_backquote(self, node):
+ """return an astng.Backquote node as string"""
+ return '`%s`' % node.value.accept(self)
+
+ def visit_binop(self, node):
+ """return an astng.BinOp node as string"""
+ return '(%s) %s (%s)' % (node.left.accept(self), node.op, node.right.accept(self))
+
+ def visit_boolop(self, node):
+ """return an astng.BoolOp node as string"""
+ return (' %s ' % node.op).join(['(%s)' % n.accept(self)
+ for n in node.values])
+
+ def visit_break(self, node):
+ """return an astng.Break node as string"""
+ return 'break'
+
+ def visit_callfunc(self, node):
+ """return an astng.CallFunc node as string"""
+ expr_str = node.func.accept(self)
+ args = [arg.accept(self) for arg in node.args]
+ if node.starargs:
+ args.append( '*' + node.starargs.accept(self))
+ if node.kwargs:
+ args.append( '**' + node.kwargs.accept(self))
+ return '%s(%s)' % (expr_str, ', '.join(args))
+
+ def visit_class(self, node):
+ """return an astng.Class node as string"""
+ decorate = node.decorators and node.decorators.accept(self) or ''
+ bases = ', '.join([n.accept(self) for n in node.bases])
+ bases = bases and '(%s)' % bases or ''
+ docs = node.doc and '\n%s"""%s"""' % (INDENT, node.doc) or ''
+ return '\n\n%sclass %s%s:%s\n%s\n' % (decorate, node.name, bases, docs,
+ self._stmt_list( node.body))
+
+ def visit_compare(self, node):
+ """return an astng.Compare node as string"""
+ rhs_str = ' '.join(['%s %s' % (op, expr.accept(self))
+ for op, expr in node.ops])
+ return '%s %s' % (node.left.accept(self), rhs_str)
+
+ def visit_comprehension(self, node):
+ """return an astng.Comprehension node as string"""
+ ifs = ''.join([ ' if %s' % n.accept(self) for n in node.ifs])
+ return 'for %s in %s%s' % (node.target.accept(self),
+ node.iter.accept(self), ifs )
+
+ def visit_const(self, node):
+ """return an astng.Const node as string"""
+ return repr(node.value)
+
+ def visit_continue(self, node):
+ """return an astng.Continue node as string"""
+ return 'continue'
+
+ def visit_delete(self, node): # XXX check if correct
+ """return an astng.Delete node as string"""
+ return 'del %s' % ', '.join([child.accept(self)
+ for child in node.targets])
+
+ def visit_delattr(self, node):
+ """return an astng.DelAttr node as string"""
+ return self.visit_getattr(node)
+
+ def visit_delname(self, node):
+ """return an astng.DelName node as string"""
+ return node.name
+
+ def visit_decorators(self, node):
+ """return an astng.Decorators node as string"""
+ return '@%s\n' % '\n@'.join([item.accept(self) for item in node.nodes])
+
+ def visit_dict(self, node):
+ """return an astng.Dict node as string"""
+ return '{%s}' % ', '.join(['%s: %s' % (key.accept(self),
+ value.accept(self)) for key, value in node.items])
+
+ def visit_dictcomp(self, node):
+ """return an astng.DictComp node as string"""
+ return '{%s: %s %s}' % (node.key.accept(self), node.value.accept(self),
+ ' '.join([n.accept(self) for n in node.generators]))
+
+ def visit_discard(self, node):
+ """return an astng.Discard node as string"""
+ return node.value.accept(self)
+
+ def visit_emptynode(self, node):
+ """dummy method for visiting an Empty node"""
+ return ''
+
+ def visit_excepthandler(self, node):
+ if node.type:
+ if node.name:
+ excs = 'except %s, %s' % (node.type.accept(self),
+ node.name.accept(self))
+ else:
+ excs = 'except %s' % node.type.accept(self)
+ else:
+ excs = 'except'
+ return '%s:\n%s' % (excs, self._stmt_list(node.body))
+
+ def visit_ellipsis(self, node):
+ """return an astng.Ellipsis node as string"""
+ return '...'
+
+ def visit_empty(self, node):
+ """return an Empty node as string"""
+ return ''
+
+ def visit_exec(self, node):
+ """return an astng.Exec node as string"""
+ if node.locals:
+ return 'exec %s in %s, %s' % (node.expr.accept(self),
+ node.locals.accept(self),
+ node.globals.accept(self))
+ if node.globals:
+ return 'exec %s in %s' % (node.expr.accept(self),
+ node.globals.accept(self))
+ return 'exec %s' % node.expr.accept(self)
+
+ def visit_extslice(self, node):
+ """return an astng.ExtSlice node as string"""
+ return ','.join( [dim.accept(self) for dim in node.dims] )
+
+ def visit_for(self, node):
+ """return an astng.For node as string"""
+ fors = 'for %s in %s:\n%s' % (node.target.accept(self),
+ node.iter.accept(self),
+ self._stmt_list( node.body))
+ if node.orelse:
+ fors = '%s\nelse:\n%s' % (fors, self._stmt_list(node.orelse))
+ return fors
+
+ def visit_from(self, node):
+ """return an astng.From node as string"""
+ return 'from %s import %s' % ('.' * (node.level or 0) + node.modname,
+ _import_string(node.names))
+
+ def visit_function(self, node):
+ """return an astng.Function node as string"""
+ decorate = node.decorators and node.decorators.accept(self) or ''
+ docs = node.doc and '\n%s"""%s"""' % (INDENT, node.doc) or ''
+ return '\n%sdef %s(%s):%s\n%s' % (decorate, node.name, node.args.accept(self),
+ docs, self._stmt_list(node.body))
+
+ def visit_genexpr(self, node):
+ """return an astng.GenExpr node as string"""
+ return '(%s %s)' % (node.elt.accept(self), ' '.join([n.accept(self)
+ for n in node.generators]))
+
+ def visit_getattr(self, node):
+ """return an astng.Getattr node as string"""
+ return '%s.%s' % (node.expr.accept(self), node.attrname)
+
+ def visit_global(self, node):
+ """return an astng.Global node as string"""
+ return 'global %s' % ', '.join(node.names)
+
+ def visit_if(self, node):
+ """return an astng.If node as string"""
+ ifs = ['if %s:\n%s' % (node.test.accept(self), self._stmt_list(node.body))]
+ if node.orelse:# XXX use elif ???
+ ifs.append('else:\n%s' % self._stmt_list(node.orelse))
+ return '\n'.join(ifs)
+
+ def visit_ifexp(self, node):
+ """return an astng.IfExp node as string"""
+ return '%s if %s else %s' % (node.body.accept(self),
+ node.test.accept(self), node.orelse.accept(self))
+
+ def visit_import(self, node):
+ """return an astng.Import node as string"""
+ return 'import %s' % _import_string(node.names)
+
+ def visit_keyword(self, node):
+ """return an astng.Keyword node as string"""
+ return '%s=%s' % (node.arg, node.value.accept(self))
+
+ def visit_lambda(self, node):
+ """return an astng.Lambda node as string"""
+ return 'lambda %s: %s' % (node.args.accept(self), node.body.accept(self))
+
+ def visit_list(self, node):
+ """return an astng.List node as string"""
+ return '[%s]' % ', '.join([child.accept(self) for child in node.elts])
+
+ def visit_listcomp(self, node):
+ """return an astng.ListComp node as string"""
+ return '[%s %s]' % (node.elt.accept(self), ' '.join([n.accept(self)
+ for n in node.generators]))
+
+ def visit_module(self, node):
+ """return an astng.Module node as string"""
+ docs = node.doc and '"""%s"""\n\n' % node.doc or ''
+ return docs + '\n'.join([n.accept(self) for n in node.body]) + '\n\n'
+
+ def visit_name(self, node):
+ """return an astng.Name node as string"""
+ return node.name
+
+ def visit_pass(self, node):
+ """return an astng.Pass node as string"""
+ return 'pass'
+
+ def visit_print(self, node):
+ """return an astng.Print node as string"""
+ nodes = ', '.join([n.accept(self) for n in node.values])
+ if not node.nl:
+ nodes = '%s,' % nodes
+ if node.dest:
+ return 'print >> %s, %s' % (node.dest.accept(self), nodes)
+ return 'print %s' % nodes
+
+ def visit_raise(self, node):
+ """return an astng.Raise node as string"""
+ if node.exc:
+ if node.inst:
+ if node.tback:
+ return 'raise %s, %s, %s' % (node.exc.accept(self),
+ node.inst.accept(self),
+ node.tback.accept(self))
+ return 'raise %s, %s' % (node.exc.accept(self),
+ node.inst.accept(self))
+ return 'raise %s' % node.exc.accept(self)
+ return 'raise'
+
+ def visit_return(self, node):
+ """return an astng.Return node as string"""
+ if node.value:
+ return 'return %s' % node.value.accept(self)
+ else:
+ return 'return'
+
+ def visit_index(self, node):
+ """return a astng.Index node as string"""
+ return node.value.accept(self)
+
+ def visit_set(self, node):
+ """return an astng.Set node as string"""
+ return '{%s}' % ', '.join([child.accept(self) for child in node.elts])
+
+ def visit_setcomp(self, node):
+ """return an astng.SetComp node as string"""
+ return '{%s %s}' % (node.elt.accept(self), ' '.join([n.accept(self)
+ for n in node.generators]))
+
+ def visit_slice(self, node):
+ """return a astng.Slice node as string"""
+ lower = node.lower and node.lower.accept(self) or ''
+ upper = node.upper and node.upper.accept(self) or ''
+ step = node.step and node.step.accept(self) or ''
+ if step:
+ return '%s:%s:%s' % (lower, upper, step)
+ return '%s:%s' % (lower, upper)
+
+ def visit_subscript(self, node):
+ """return an astng.Subscript node as string"""
+ return '%s[%s]' % (node.value.accept(self), node.slice.accept(self))
+
+ def visit_tryexcept(self, node):
+ """return an astng.TryExcept node as string"""
+ trys = ['try:\n%s' % self._stmt_list( node.body)]
+ for handler in node.handlers:
+ trys.append(handler.accept(self))
+ if node.orelse:
+ trys.append('else:\n%s' % self._stmt_list(node.orelse))
+ return '\n'.join(trys)
+
+ def visit_tryfinally(self, node):
+ """return an astng.TryFinally node as string"""
+ return 'try:\n%s\nfinally:\n%s' % (self._stmt_list( node.body),
+ self._stmt_list(node.finalbody))
+
+ def visit_tuple(self, node):
+ """return an astng.Tuple node as string"""
+ return '(%s)' % ', '.join([child.accept(self) for child in node.elts])
+
+ def visit_unaryop(self, node):
+ """return an astng.UnaryOp node as string"""
+ if node.op == 'not':
+ operator = 'not '
+ else:
+ operator = node.op
+ return '%s%s' % (operator, node.operand.accept(self))
+
+ def visit_while(self, node):
+ """return an astng.While node as string"""
+ whiles = 'while %s:\n%s' % (node.test.accept(self),
+ self._stmt_list(node.body))
+ if node.orelse:
+ whiles = '%s\nelse:\n%s' % (whiles, self._stmt_list(node.orelse))
+ return whiles
+
+ def visit_with(self, node): # 'with' without 'as' is possible
+ """return an astng.With node as string"""
+ as_var = node.vars and " as (%s)" % (node.vars.accept(self)) or ""
+ withs = 'with (%s)%s:\n%s' % (node.expr.accept(self), as_var,
+ self._stmt_list( node.body))
+ return withs
+
+ def visit_yield(self, node):
+ """yield an ast.Yield node as string"""
+ yi_val = node.value and (" " + node.value.accept(self)) or ""
+ return 'yield' + yi_val
+
+
+class AsStringVisitor3k(AsStringVisitor):
+ """AsStringVisitor3k overwrites some AsStringVisitor methods"""
+
+ def visit_excepthandler(self, node):
+ if node.type:
+ if node.name:
+ excs = 'except %s as %s' % (node.type.accept(self),
+ node.name.accept(self))
+ else:
+ excs = 'except %s' % node.type.accept(self)
+ else:
+ excs = 'except'
+ return '%s:\n%s' % (excs, self._stmt_list(node.body))
+
+ def visit_nonlocal(self, node):
+ """return an astng.Nonlocal node as string"""
+ return 'nonlocal %s' % ', '.join(node.names)
+
+ def visit_raise(self, node):
+ """return an astng.Raise node as string"""
+ if node.exc:
+ if node.cause:
+ return 'raise %s from %s' % (node.exc.accept(self),
+ node.cause.accept(self))
+ return 'raise %s' % node.exc.accept(self)
+ return 'raise'
+
+ def visit_starred(self, node):
+ """return Starred node as string"""
+ return "*" + node.value.accept(self)
+
+if sys.version_info >= (3, 0):
+ AsStringVisitor = AsStringVisitor3k
+
+# this visitor is stateless, thus it can be reused
+as_string = AsStringVisitor()
+
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/astng/bases.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/astng/bases.py
@@ -0,0 +1,631 @@
+# -*- coding: utf-8 -*-
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+# copyright 2003-2010 Sylvain Thenault, all rights reserved.
+# contact mailto:thenault@gmail.com
+#
+# This file is part of logilab-astng.
+#
+# logilab-astng is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 2.1 of the License, or (at your
+# option) any later version.
+#
+# logilab-astng is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+# for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-astng. If not, see <http://www.gnu.org/licenses/>.
+"""This module contains base classes and functions for the nodes and some
+inference utils.
+"""
+
+__docformat__ = "restructuredtext en"
+
+from contextlib import contextmanager
+
+from ..common.compat import builtins
+
+from . import BUILTINS_MODULE
+from .exceptions import InferenceError, ASTNGError, \
+ NotFoundError, UnresolvableName
+from .as_string import as_string
+
+BUILTINS_NAME = builtins.__name__
+
+class Proxy(object):
+ """a simple proxy object"""
+ _proxied = None
+
+ def __init__(self, proxied=None):
+ if proxied is not None:
+ self._proxied = proxied
+
+ def __getattr__(self, name):
+ if name == '_proxied':
+ return getattr(self.__class__, '_proxied')
+ if name in self.__dict__:
+ return self.__dict__[name]
+ return getattr(self._proxied, name)
+
+ def infer(self, context=None):
+ yield self
+
+
+# Inference ##################################################################
+
+class InferenceContext(object):
+ __slots__ = ('path', 'lookupname', 'callcontext', 'boundnode')
+
+ def __init__(self, path=None):
+ if path is None:
+ self.path = set()
+ else:
+ self.path = path
+ self.lookupname = None
+ self.callcontext = None
+ self.boundnode = None
+
+ def push(self, node):
+ name = self.lookupname
+ if (node, name) in self.path:
+ raise StopIteration()
+ self.path.add( (node, name) )
+
+ def clone(self):
+ # XXX copy lookupname/callcontext ?
+ clone = InferenceContext(self.path)
+ clone.callcontext = self.callcontext
+ clone.boundnode = self.boundnode
+ return clone
+
+ @contextmanager
+ def restore_path(self):
+ path = set(self.path)
+ yield
+ self.path = path
+
+def copy_context(context):
+ if context is not None:
+ return context.clone()
+ else:
+ return InferenceContext()
+
+
+def _infer_stmts(stmts, context, frame=None):
+ """return an iterator on statements inferred by each statement in <stmts>
+ """
+ stmt = None
+ infered = False
+ if context is not None:
+ name = context.lookupname
+ context = context.clone()
+ else:
+ name = None
+ context = InferenceContext()
+ for stmt in stmts:
+ if stmt is YES:
+ yield stmt
+ infered = True
+ continue
+ context.lookupname = stmt._infer_name(frame, name)
+ try:
+ for infered in stmt.infer(context):
+ yield infered
+ infered = True
+ except UnresolvableName:
+ continue
+ except InferenceError:
+ yield YES
+ infered = True
+ if not infered:
+ raise InferenceError(str(stmt))
+
+
+# special inference objects (e.g. may be returned as nodes by .infer()) #######
+
+class _Yes(object):
+ """a yes object"""
+ def __repr__(self):
+ return 'YES'
+ def __getattribute__(self, name):
+ if name == 'next':
+ raise AttributeError('next method should not be called')
+ if name.startswith('__') and name.endswith('__'):
+ # to avoid inspection pb
+ return super(_Yes, self).__getattribute__(name)
+ return self
+ def __call__(self, *args, **kwargs):
+ return self
+
+
+YES = _Yes()
+
+
+class Instance(Proxy):
+ """a special node representing a class instance"""
+ def getattr(self, name, context=None, lookupclass=True):
+ try:
+ values = self._proxied.instance_attr(name, context)
+ except NotFoundError:
+ if name == '__class__':
+ return [self._proxied]
+ if lookupclass:
+ # class attributes not available through the instance
+ # unless they are explicitly defined
+ if name in ('__name__', '__bases__', '__mro__', '__subclasses__'):
+ return self._proxied.local_attr(name)
+ return self._proxied.getattr(name, context)
+ raise NotFoundError(name)
+ # since we've no context information, return matching class members as
+ # well
+ if lookupclass:
+ try:
+ return values + self._proxied.getattr(name, context)
+ except NotFoundError:
+ pass
+ return values
+
+ def igetattr(self, name, context=None):
+ """inferred getattr"""
+ try:
+ # XXX frame should be self._proxied, or not ?
+ get_attr = self.getattr(name, context, lookupclass=False)
+ return _infer_stmts(self._wrap_attr(get_attr, context), context,
+ frame=self)
+ except NotFoundError:
+ try:
+ # fallback to class'igetattr since it has some logic to handle
+ # descriptors
+ return self._wrap_attr(self._proxied.igetattr(name, context),
+ context)
+ except NotFoundError:
+ raise InferenceError(name)
+
+ def _wrap_attr(self, attrs, context=None):
+ """wrap bound methods of attrs in a InstanceMethod proxies"""
+ for attr in attrs:
+ if isinstance(attr, UnboundMethod):
+ if BUILTINS_NAME + '.property' in attr.decoratornames():
+ for infered in attr.infer_call_result(self, context):
+ yield infered
+ else:
+ yield BoundMethod(attr, self)
+ else:
+ yield attr
+
+ def infer_call_result(self, caller, context=None):
+ """infer what a class instance is returning when called"""
+ infered = False
+ for node in self._proxied.igetattr('__call__', context):
+ for res in node.infer_call_result(caller, context):
+ infered = True
+ yield res
+ if not infered:
+ raise InferenceError()
+
+ def __repr__(self):
+ return '<Instance of %s.%s at 0x%s>' % (self._proxied.root().name,
+ self._proxied.name,
+ id(self))
+ def __str__(self):
+ return 'Instance of %s.%s' % (self._proxied.root().name,
+ self._proxied.name)
+
+ def callable(self):
+ try:
+ self._proxied.getattr('__call__')
+ return True
+ except NotFoundError:
+ return False
+
+ def pytype(self):
+ return self._proxied.qname()
+
+ def display_type(self):
+ return 'Instance of'
+
+
+class UnboundMethod(Proxy):
+ """a special node representing a method not bound to an instance"""
+ def __repr__(self):
+ frame = self._proxied.parent.frame()
+ return '<%s %s of %s at 0x%s' % (self.__class__.__name__,
+ self._proxied.name,
+ frame.qname(), id(self))
+
+ def is_bound(self):
+ return False
+
+ def getattr(self, name, context=None):
+ if name == 'im_func':
+ return [self._proxied]
+ return super(UnboundMethod, self).getattr(name, context)
+
+ def igetattr(self, name, context=None):
+ if name == 'im_func':
+ return iter((self._proxied,))
+ return super(UnboundMethod, self).igetattr(name, context)
+
+ def infer_call_result(self, caller, context):
+ # If we're unbound method __new__ of builtin object, the result is an
+ # instance of the class given as first argument.
+ if (self._proxied.name == '__new__' and
+ self._proxied.parent.frame().qname() == '%s.object' % BUILTINS_MODULE):
+ return (x is YES and x or Instance(x) for x in caller.args[0].infer())
+ return self._proxied.infer_call_result(caller, context)
+
+
+class BoundMethod(UnboundMethod):
+ """a special node representing a method bound to an instance"""
+ def __init__(self, proxy, bound):
+ UnboundMethod.__init__(self, proxy)
+ self.bound = bound
+
+ def is_bound(self):
+ return True
+
+ def infer_call_result(self, caller, context):
+ context = context.clone()
+ context.boundnode = self.bound
+ return self._proxied.infer_call_result(caller, context)
+
+
+class Generator(Instance):
+ """a special node representing a generator"""
+ def callable(self):
+ return True
+
+ def pytype(self):
+ return '%s.generator' % BUILTINS_MODULE
+
+ def display_type(self):
+ return 'Generator'
+
+ def __repr__(self):
+ return '<Generator(%s) l.%s at 0x%s>' % (self._proxied.name, self.lineno, id(self))
+
+ def __str__(self):
+ return 'Generator(%s)' % (self._proxied.name)
+
+
+# decorators ##################################################################
+
+def path_wrapper(func):
+ """return the given infer function wrapped to handle the path"""
+ def wrapped(node, context=None, _func=func, **kwargs):
+ """wrapper function handling context"""
+ if context is None:
+ context = InferenceContext()
+ context.push(node)
+ yielded = set()
+ for res in _func(node, context, **kwargs):
+ # unproxy only true instance, not const, tuple, dict...
+ if res.__class__ is Instance:
+ ares = res._proxied
+ else:
+ ares = res
+ if not ares in yielded:
+ yield res
+ yielded.add(ares)
+ return wrapped
+
+def yes_if_nothing_infered(func):
+ def wrapper(*args, **kwargs):
+ infered = False
+ for node in func(*args, **kwargs):
+ infered = True
+ yield node
+ if not infered:
+ yield YES
+ return wrapper
+
+def raise_if_nothing_infered(func):
+ def wrapper(*args, **kwargs):
+ infered = False
+ for node in func(*args, **kwargs):
+ infered = True
+ yield node
+ if not infered:
+ raise InferenceError()
+ return wrapper
+
+
+# Node ######################################################################
+
+class NodeNG(object):
+ """Base Class for all ASTNG node classes.
+
+ It represents a node of the new abstract syntax tree.
+ """
+ is_statement = False
+ optional_assign = False # True for For (and for Comprehension if py <3.0)
+ is_function = False # True for Function nodes
+ # attributes below are set by the builder module or by raw factories
+ lineno = None
+ fromlineno = None
+ tolineno = None
+ col_offset = None
+ # parent node in the tree
+ parent = None
+ # attributes containing child node(s) redefined in most concrete classes:
+ _astng_fields = ()
+
+ def _repr_name(self):
+ """return self.name or self.attrname or '' for nice representation"""
+ return getattr(self, 'name', getattr(self, 'attrname', ''))
+
+ def __str__(self):
+ return '%s(%s)' % (self.__class__.__name__, self._repr_name())
+
+ def __repr__(self):
+ return '<%s(%s) l.%s [%s] at Ox%x>' % (self.__class__.__name__,
+ self._repr_name(),
+ self.fromlineno,
+ self.root().name,
+ id(self))
+
+
+ def accept(self, visitor):
+ klass = self.__class__.__name__
+ func = getattr(visitor, "visit_" + self.__class__.__name__.lower())
+ return func(self)
+
+ def get_children(self):
+ for field in self._astng_fields:
+ attr = getattr(self, field)
+ if attr is None:
+ continue
+ if isinstance(attr, (list, tuple)):
+ for elt in attr:
+ yield elt
+ else:
+ yield attr
+
+ def last_child(self):
+ """an optimized version of list(get_children())[-1]"""
+ for field in self._astng_fields[::-1]:
+ attr = getattr(self, field)
+ if not attr: # None or empty listy / tuple
+ continue
+ if isinstance(attr, (list, tuple)):
+ return attr[-1]
+ else:
+ return attr
+ return None
+
+ def parent_of(self, node):
+ """return true if i'm a parent of the given node"""
+ parent = node.parent
+ while parent is not None:
+ if self is parent:
+ return True
+ parent = parent.parent
+ return False
+
+ def statement(self):
+ """return the first parent node marked as statement node"""
+ if self.is_statement:
+ return self
+ return self.parent.statement()
+
+ def frame(self):
+ """return the first parent frame node (i.e. Module, Function or Class)
+ """
+ return self.parent.frame()
+
+ def scope(self):
+ """return the first node defining a new scope (i.e. Module, Function,
+ Class, Lambda but also GenExpr)
+ """
+ return self.parent.scope()
+
+ def root(self):
+ """return the root node of the tree, (i.e. a Module)"""
+ if self.parent:
+ return self.parent.root()
+ return self
+
+ def child_sequence(self, child):
+ """search for the right sequence where the child lies in"""
+ for field in self._astng_fields:
+ node_or_sequence = getattr(self, field)
+ if node_or_sequence is child:
+ return [node_or_sequence]
+ # /!\ compiler.ast Nodes have an __iter__ walking over child nodes
+ if isinstance(node_or_sequence, (tuple, list)) and child in node_or_sequence:
+ return node_or_sequence
+ else:
+ msg = 'Could not found %s in %s\'s children'
+ raise ASTNGError(msg % (repr(child), repr(self)))
+
+ def locate_child(self, child):
+ """return a 2-uple (child attribute name, sequence or node)"""
+ for field in self._astng_fields:
+ node_or_sequence = getattr(self, field)
+ # /!\ compiler.ast Nodes have an __iter__ walking over child nodes
+ if child is node_or_sequence:
+ return field, child
+ if isinstance(node_or_sequence, (tuple, list)) and child in node_or_sequence:
+ return field, node_or_sequence
+ msg = 'Could not found %s in %s\'s children'
+ raise ASTNGError(msg % (repr(child), repr(self)))
+ # FIXME : should we merge child_sequence and locate_child ? locate_child
+ # is only used in are_exclusive, child_sequence one time in pylint.
+
+ def next_sibling(self):
+ """return the next sibling statement"""
+ return self.parent.next_sibling()
+
+ def previous_sibling(self):
+ """return the previous sibling statement"""
+ return self.parent.previous_sibling()
+
+ def nearest(self, nodes):
+ """return the node which is the nearest before this one in the
+ given list of nodes
+ """
+ myroot = self.root()
+ mylineno = self.fromlineno
+ nearest = None, 0
+ for node in nodes:
+ assert node.root() is myroot, \
+ 'nodes %s and %s are not from the same module' % (self, node)
+ lineno = node.fromlineno
+ if node.fromlineno > mylineno:
+ break
+ if lineno > nearest[1]:
+ nearest = node, lineno
+ # FIXME: raise an exception if nearest is None ?
+ return nearest[0]
+
+ def set_line_info(self, lastchild):
+ if self.lineno is None:
+ self.fromlineno = self._fixed_source_line()
+ else:
+ self.fromlineno = self.lineno
+ if lastchild is None:
+ self.tolineno = self.fromlineno
+ else:
+ self.tolineno = lastchild.tolineno
+ return
+ # TODO / FIXME:
+ assert self.fromlineno is not None, self
+ assert self.tolineno is not None, self
+
+ def _fixed_source_line(self):
+ """return the line number where the given node appears
+
+ we need this method since not all nodes have the lineno attribute
+ correctly set...
+ """
+ line = self.lineno
+ _node = self
+ try:
+ while line is None:
+ _node = _node.get_children().next()
+ line = _node.lineno
+ except StopIteration:
+ _node = self.parent
+ while _node and line is None:
+ line = _node.lineno
+ _node = _node.parent
+ return line
+
+ def block_range(self, lineno):
+ """handle block line numbers range for non block opening statements
+ """
+ return lineno, self.tolineno
+
+ def set_local(self, name, stmt):
+ """delegate to a scoped parent handling a locals dictionary"""
+ self.parent.set_local(name, stmt)
+
+ def nodes_of_class(self, klass, skip_klass=None):
+ """return an iterator on nodes which are instance of the given class(es)
+
+ klass may be a class object or a tuple of class objects
+ """
+ if isinstance(self, klass):
+ yield self
+ for child_node in self.get_children():
+ if skip_klass is not None and isinstance(child_node, skip_klass):
+ continue
+ for matching in child_node.nodes_of_class(klass, skip_klass):
+ yield matching
+
+ def _infer_name(self, frame, name):
+ # overridden for From, Import, Global, TryExcept and Arguments
+ return None
+
+ def infer(self, context=None):
+ """we don't know how to resolve a statement by default"""
+ # this method is overridden by most concrete classes
+ raise InferenceError(self.__class__.__name__)
+
+ def infered(self):
+ '''return list of infered values for a more simple inference usage'''
+ return list(self.infer())
+
+ def instanciate_class(self):
+ """instanciate a node if it is a Class node, else return self"""
+ return self
+
+ def has_base(self, node):
+ return False
+
+ def callable(self):
+ return False
+
+ def eq(self, value):
+ return False
+
+ def as_string(self):
+ return as_string(self)
+
+ def repr_tree(self, ids=False):
+ """print a nice astng tree representation.
+
+ :param ids: if true, we also print the ids (usefull for debugging)"""
+ result = []
+ _repr_tree(self, result, ids=ids)
+ return "\n".join(result)
+
+
+class Statement(NodeNG):
+ """Statement node adding a few attributes"""
+ is_statement = True
+
+ def next_sibling(self):
+ """return the next sibling statement"""
+ stmts = self.parent.child_sequence(self)
+ index = stmts.index(self)
+ try:
+ return stmts[index +1]
+ except IndexError:
+ pass
+
+ def previous_sibling(self):
+ """return the previous sibling statement"""
+ stmts = self.parent.child_sequence(self)
+ index = stmts.index(self)
+ if index >= 1:
+ return stmts[index -1]
+
+INDENT = " "
+
+def _repr_tree(node, result, indent='', _done=None, ids=False):
+ """built a tree representation of a node as a list of lines"""
+ if _done is None:
+ _done = set()
+ if not hasattr(node, '_astng_fields'): # not a astng node
+ return
+ if node in _done:
+ result.append( indent + 'loop in tree: %s' % node )
+ return
+ _done.add(node)
+ node_str = str(node)
+ if ids:
+ node_str += ' . \t%x' % id(node)
+ result.append( indent + node_str )
+ indent += INDENT
+ for field in node._astng_fields:
+ value = getattr(node, field)
+ if isinstance(value, (list, tuple) ):
+ result.append( indent + field + " = [" )
+ for child in value:
+ if isinstance(child, (list, tuple) ):
+ # special case for Dict # FIXME
+ _repr_tree(child[0], result, indent, _done, ids)
+ _repr_tree(child[1], result, indent, _done, ids)
+ result.append(indent + ',')
+ else:
+ _repr_tree(child, result, indent, _done, ids)
+ result.append( indent + "]" )
+ else:
+ result.append( indent + field + " = " )
+ _repr_tree(value, result, indent, _done, ids)
+
+
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/astng/brain/__init__.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/astng/brain/__init__.py
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/astng/brain/py2mechanize.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/astng/brain/py2mechanize.py
@@ -0,0 +1,20 @@
+from .. import MANAGER
+from ..builder import ASTNGBuilder
+
+def mechanize_transform(module):
+ fake = ASTNGBuilder(MANAGER).string_build('''
+
+class Browser(object):
+ def open(self, url, data=None, timeout=None):
+ return None
+ def open_novisit(self, url, data=None, timeout=None):
+ return None
+ def open_local_file(self, filename):
+ return None
+
+''')
+ module.locals['Browser'] = fake.locals['Browser']
+
+import py2stdlib
+py2stdlib.MODULE_TRANSFORMS['mechanize'] = mechanize_transform
+
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/astng/brain/py2qt4.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/astng/brain/py2qt4.py
@@ -0,0 +1,25 @@
+"""ASTNG hooks for the Python 2 qt4 module.
+
+Currently help understanding of :
+
+* PyQT4.QtCore
+"""
+
+from .. import MANAGER
+from ..builder import ASTNGBuilder
+
+
+def pyqt4_qtcore_transform(module):
+ fake = ASTNGBuilder(MANAGER).string_build('''
+
+def SIGNAL(signal_name): pass
+
+class QObject(object):
+ def emit(self, signal): pass
+''')
+ for klass in ('QObject',):
+ module.locals[klass] = fake.locals[klass]
+
+
+import py2stdlib
+py2stdlib.MODULE_TRANSFORMS['PyQt4.QtCore'] = pyqt4_qtcore_transform
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/astng/brain/py2stdlib.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/astng/brain/py2stdlib.py
@@ -0,0 +1,176 @@
+"""ASTNG hooks for the Python 2 standard library.
+
+Currently help understanding of :
+
+* hashlib.md5 and hashlib.sha1
+"""
+
+from .. import MANAGER
+from ..builder import ASTNGBuilder
+
+MODULE_TRANSFORMS = {}
+
+def hashlib_transform(module):
+ fake = ASTNGBuilder(MANAGER).string_build('''
+
+class md5(object):
+ def __init__(self, value): pass
+ def hexdigest(self):
+ return u''
+
+class sha1(object):
+ def __init__(self, value): pass
+ def hexdigest(self):
+ return u''
+
+''')
+ for hashfunc in ('sha1', 'md5'):
+ module.locals[hashfunc] = fake.locals[hashfunc]
+
+def collections_transform(module):
+ fake = ASTNGBuilder(MANAGER).string_build('''
+
+class defaultdict(dict):
+ default_factory = None
+ def __missing__(self, key): pass
+
+class deque(object):
+ maxlen = 0
+ def __init__(iterable=None, maxlen=None): pass
+ def append(self, x): pass
+ def appendleft(self, x): pass
+ def clear(self): pass
+ def count(self, x): return 0
+ def extend(self, iterable): pass
+ def extendleft(self, iterable): pass
+ def pop(self): pass
+ def popleft(self): pass
+ def remove(self, value): pass
+ def reverse(self): pass
+ def rotate(self, n): pass
+
+''')
+
+ for klass in ('deque', 'defaultdict'):
+ module.locals[klass] = fake.locals[klass]
+
+def pkg_resources_transform(module):
+ fake = ASTNGBuilder(MANAGER).string_build('''
+
+def resource_exists(package_or_requirement, resource_name):
+ pass
+
+def resource_isdir(package_or_requirement, resource_name):
+ pass
+
+def resource_filename(package_or_requirement, resource_name):
+ pass
+
+def resource_stream(package_or_requirement, resource_name):
+ pass
+
+def resource_string(package_or_requirement, resource_name):
+ pass
+
+def resource_listdir(package_or_requirement, resource_name):
+ pass
+
+def extraction_error():
+ pass
+
+def get_cache_path(archive_name, names=()):
+ pass
+
+def postprocess(tempname, filename):
+ pass
+
+def set_extraction_path(path):
+ pass
+
+def cleanup_resources(force=False):
+ pass
+
+''')
+
+ for func_name, func in fake.locals.items():
+ module.locals[func_name] = func
+
+
+def urlparse_transform(module):
+ fake = ASTNGBuilder(MANAGER).string_build('''
+
+def urlparse(urlstring, default_scheme='', allow_fragments=True):
+ return ParseResult()
+
+class ParseResult(object):
+ def __init__(self):
+ self.scheme = ''
+ self.netloc = ''
+ self.path = ''
+ self.params = ''
+ self.query = ''
+ self.fragment = ''
+ self.username = None
+ self.password = None
+ self.hostname = None
+ self.port = None
+
+ def geturl(self):
+ return ''
+''')
+
+ for func_name, func in fake.locals.items():
+ module.locals[func_name] = func
+
+def subprocess_transform(module):
+ fake = ASTNGBuilder(MANAGER).string_build('''
+
+class Popen(object):
+ returncode = pid = 0
+ stdin = stdout = stderr = file()
+
+ def __init__(self, args, bufsize=0, executable=None,
+ stdin=None, stdout=None, stderr=None,
+ preexec_fn=None, close_fds=False, shell=False,
+ cwd=None, env=None, universal_newlines=False,
+ startupinfo=None, creationflags=0):
+ pass
+
+ def communicate(self, input=None):
+ return ('string', 'string')
+ def wait(self):
+ return self.returncode
+ def poll(self):
+ return self.returncode
+ def send_signal(self, signal):
+ pass
+ def terminate(self):
+ pass
+ def kill(self):
+ pass
+ ''')
+
+ for func_name, func in fake.locals.items():
+ module.locals[func_name] = func
+
+
+
+MODULE_TRANSFORMS['hashlib'] = hashlib_transform
+MODULE_TRANSFORMS['collections'] = collections_transform
+MODULE_TRANSFORMS['pkg_resources'] = pkg_resources_transform
+MODULE_TRANSFORMS['urlparse'] = urlparse_transform
+MODULE_TRANSFORMS['subprocess'] = subprocess_transform
+
+
+def transform(module):
+ try:
+ tr = MODULE_TRANSFORMS[module.name]
+ except KeyError:
+ pass
+ else:
+ tr(module)
+
+from .. import MANAGER
+MANAGER.register_transformer(transform)
+
+
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/astng/builder.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/astng/builder.py
@@ -0,0 +1,226 @@
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+# copyright 2003-2010 Sylvain Thenault, all rights reserved.
+# contact mailto:thenault@gmail.com
+#
+# This file is part of logilab-astng.
+#
+# logilab-astng is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 2.1 of the License, or (at your
+# option) any later version.
+#
+# logilab-astng is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+# for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-astng. If not, see <http://www.gnu.org/licenses/>.
+"""The ASTNGBuilder makes astng from living object and / or from _ast
+
+The builder is not thread safe and can't be used to parse different sources
+at the same time.
+"""
+
+__docformat__ = "restructuredtext en"
+
+import sys, re
+from os.path import splitext, basename, dirname, exists, abspath
+
+from ..common.modutils import modpath_from_file
+
+from .exceptions import ASTNGBuildingException, InferenceError
+from .raw_building import InspectBuilder
+from .rebuilder import TreeRebuilder
+from .manager import ASTNGManager
+from .bases import YES, Instance
+
+from _ast import PyCF_ONLY_AST
+def parse(string):
+ return compile(string, "<string>", 'exec', PyCF_ONLY_AST)
+
+if sys.version_info >= (3, 0):
+ from tokenize import detect_encoding
+
+ def open_source_file(filename):
+ byte_stream = open(filename, 'bU')
+ encoding = detect_encoding(byte_stream.readline)[0]
+ stream = open(filename, 'U', encoding=encoding)
+ try:
+ data = stream.read()
+ except UnicodeError, uex: # wrong encodingg
+ # detect_encoding returns utf-8 if no encoding specified
+ msg = 'Wrong (%s) or no encoding specified' % encoding
+ raise ASTNGBuildingException(msg)
+ return stream, encoding, data
+
+else:
+ import re
+
+ _ENCODING_RGX = re.compile("\s*#+.*coding[:=]\s*([-\w.]+)")
+
+ def _guess_encoding(string):
+ """get encoding from a python file as string or return None if not found
+ """
+ # check for UTF-8 byte-order mark
+ if string.startswith('\xef\xbb\xbf'):
+ return 'UTF-8'
+ for line in string.split('\n', 2)[:2]:
+ # check for encoding declaration
+ match = _ENCODING_RGX.match(line)
+ if match is not None:
+ return match.group(1)
+
+ def open_source_file(filename):
+ """get data for parsing a file"""
+ stream = open(filename, 'U')
+ data = stream.read()
+ encoding = _guess_encoding(data)
+ return stream, encoding, data
+
+# ast NG builder ##############################################################
+
+MANAGER = ASTNGManager()
+
+class ASTNGBuilder(InspectBuilder):
+ """provide astng building methods"""
+ rebuilder = TreeRebuilder()
+
+ def __init__(self, manager=None):
+ self._manager = manager or MANAGER
+
+ def module_build(self, module, modname=None):
+ """build an astng from a living module instance
+ """
+ node = None
+ path = getattr(module, '__file__', None)
+ if path is not None:
+ path_, ext = splitext(module.__file__)
+ if ext in ('.py', '.pyc', '.pyo') and exists(path_ + '.py'):
+ node = self.file_build(path_ + '.py', modname)
+ if node is None:
+ # this is a built-in module
+ # get a partial representation by introspection
+ node = self.inspect_build(module, modname=modname, path=path)
+ return node
+
+ def file_build(self, path, modname=None):
+ """build astng from a source code file (i.e. from an ast)
+
+ path is expected to be a python source file
+ """
+ try:
+ stream, encoding, data = open_source_file(path)
+ except IOError, exc:
+ msg = 'Unable to load file %r (%s)' % (path, exc)
+ raise ASTNGBuildingException(msg)
+ except SyntaxError, exc: # py3k encoding specification error
+ raise ASTNGBuildingException(exc)
+ except LookupError, exc: # unknown encoding
+ raise ASTNGBuildingException(exc)
+ # get module name if necessary
+ if modname is None:
+ try:
+ modname = '.'.join(modpath_from_file(path))
+ except ImportError:
+ modname = splitext(basename(path))[0]
+ # build astng representation
+ node = self.string_build(data, modname, path)
+ node.file_encoding = encoding
+ return node
+
+ def string_build(self, data, modname='', path=None):
+ """build astng from source code string and return rebuilded astng"""
+ module = self._data_build(data, modname, path)
+ self._manager.astng_cache[module.name] = module
+ # post tree building steps after we stored the module in the cache:
+ for from_node in module._from_nodes:
+ self.add_from_names_to_locals(from_node)
+ # handle delayed assattr nodes
+ for delayed in module._delayed_assattr:
+ self.delayed_assattr(delayed)
+ if modname:
+ for transformer in self._manager.transformers:
+ transformer(module)
+ return module
+
+ def _data_build(self, data, modname, path):
+ """build tree node from data and add some informations"""
+ # this method could be wrapped with a pickle/cache function
+ node = parse(data + '\n')
+ if path is not None:
+ node_file = abspath(path)
+ else:
+ node_file = '<?>'
+ if modname.endswith('.__init__'):
+ modname = modname[:-9]
+ package = True
+ else:
+ package = path and path.find('__init__.py') > -1 or False
+ self.rebuilder.init()
+ module = self.rebuilder.visit_module(node, modname, package)
+ module.file = module.path = node_file
+ module._from_nodes = self.rebuilder._from_nodes
+ module._delayed_assattr = self.rebuilder._delayed_assattr
+ return module
+
+ def add_from_names_to_locals(self, node):
+ """store imported names to the locals;
+ resort the locals if coming from a delayed node
+ """
+
+ _key_func = lambda node: node.fromlineno
+ def sort_locals(my_list):
+ my_list.sort(key=_key_func)
+ for (name, asname) in node.names:
+ if name == '*':
+ try:
+ imported = node.root().import_module(node.modname)
+ except ASTNGBuildingException:
+ continue
+ for name in imported.wildcard_import_names():
+ node.parent.set_local(name, node)
+ sort_locals(node.parent.scope().locals[name])
+ else:
+ node.parent.set_local(asname or name, node)
+ sort_locals(node.parent.scope().locals[asname or name])
+
+ def delayed_assattr(self, node):
+ """visit a AssAttr node -> add name to locals, handle members
+ definition
+ """
+ try:
+ frame = node.frame()
+ for infered in node.expr.infer():
+ if infered is YES:
+ continue
+ try:
+ if infered.__class__ is Instance:
+ infered = infered._proxied
+ iattrs = infered.instance_attrs
+ elif isinstance(infered, Instance):
+ # Const, Tuple, ... we may be wrong, may be not, but
+ # anyway we don't want to pollute builtin's namespace
+ continue
+ elif infered.is_function:
+ iattrs = infered.instance_attrs
+ else:
+ iattrs = infered.locals
+ except AttributeError:
+ # XXX log error
+ #import traceback
+ #traceback.print_exc()
+ continue
+ values = iattrs.setdefault(node.attrname, [])
+ if node in values:
+ continue
+ # get assign in __init__ first XXX useful ?
+ if frame.name == '__init__' and values and not \
+ values[0].frame().name == '__init__':
+ values.insert(0, node)
+ else:
+ values.append(node)
+ except InferenceError:
+ pass
+
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/astng/exceptions.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/astng/exceptions.py
@@ -0,0 +1,60 @@
+# This program is free software; you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free Software
+# Foundation; either version 2 of the License, or (at your option) any later
+# version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License along with
+# this program; if not, write to the Free Software Foundation, Inc.,
+# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+# copyright 2003-2010 Sylvain Thenault, all rights reserved.
+# contact mailto:thenault@gmail.com
+#
+# This file is part of logilab-astng.
+#
+# logilab-astng is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 2.1 of the License, or (at your
+# option) any later version.
+#
+# logilab-astng is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+# for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-astng. If not, see <http://www.gnu.org/licenses/>.
+"""this module contains exceptions used in the astng library
+
+"""
+
+__doctype__ = "restructuredtext en"
+
+class ASTNGError(Exception):
+ """base exception class for all astng related exceptions"""
+
+class ASTNGBuildingException(ASTNGError):
+ """exception class when we are unable to build an astng representation"""
+
+class ResolveError(ASTNGError):
+ """base class of astng resolution/inference error"""
+
+class NotFoundError(ResolveError):
+ """raised when we are unable to resolve a name"""
+
+class InferenceError(ResolveError):
+ """raised when we are unable to infer a node"""
+
+class UnresolvableName(InferenceError):
+ """raised when we are unable to resolve a name"""
+
+class NoDefault(ASTNGError):
+ """raised by function's `default_value` method when an argument has
+ no default value
+ """
+
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/astng/inference.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/astng/inference.py
@@ -0,0 +1,383 @@
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+# copyright 2003-2010 Sylvain Thenault, all rights reserved.
+# contact mailto:thenault@gmail.com
+#
+# This file is part of logilab-astng.
+#
+# logilab-astng is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 2.1 of the License, or (at your
+# option) any later version.
+#
+# logilab-astng is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+# for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-astng. If not, see <http://www.gnu.org/licenses/>.
+"""this module contains a set of functions to handle inference on astng trees
+"""
+
+__doctype__ = "restructuredtext en"
+
+from itertools import chain
+import sys
+
+from . import nodes
+
+from .manager import ASTNGManager
+from .exceptions import (ASTNGBuildingException, ASTNGError,
+ InferenceError, NoDefault, NotFoundError, UnresolvableName)
+from .bases import YES, Instance, InferenceContext, Generator, \
+ _infer_stmts, copy_context, path_wrapper, raise_if_nothing_infered
+from .protocols import _arguments_infer_argname
+
+MANAGER = ASTNGManager()
+
+
+class CallContext:
+ """when inferring a function call, this class is used to remember values
+ given as argument
+ """
+ def __init__(self, args, starargs, dstarargs):
+ self.args = []
+ self.nargs = {}
+ for arg in args:
+ if isinstance(arg, nodes.Keyword):
+ self.nargs[arg.arg] = arg.value
+ else:
+ self.args.append(arg)
+ self.starargs = starargs
+ self.dstarargs = dstarargs
+
+ def infer_argument(self, funcnode, name, context):
+ """infer a function argument value according to the call context"""
+ # 1. search in named keywords
+ try:
+ return self.nargs[name].infer(context)
+ except KeyError:
+ # Function.args.args can be None in astng (means that we don't have
+ # information on argnames)
+ argindex = funcnode.args.find_argname(name)[0]
+ if argindex is not None:
+ # 2. first argument of instance/class method
+ if argindex == 0 and funcnode.type in ('method', 'classmethod'):
+ if context.boundnode is not None:
+ boundnode = context.boundnode
+ else:
+ # XXX can do better ?
+ boundnode = funcnode.parent.frame()
+ if funcnode.type == 'method':
+ if not isinstance(boundnode, Instance):
+ boundnode = Instance(boundnode)
+ return iter((boundnode,))
+ if funcnode.type == 'classmethod':
+ return iter((boundnode,))
+ # 2. search arg index
+ try:
+ return self.args[argindex].infer(context)
+ except IndexError:
+ pass
+ # 3. search in *args (.starargs)
+ if self.starargs is not None:
+ its = []
+ for infered in self.starargs.infer(context):
+ if infered is YES:
+ its.append((YES,))
+ continue
+ try:
+ its.append(infered.getitem(argindex, context).infer(context))
+ except (InferenceError, AttributeError):
+ its.append((YES,))
+ except (IndexError, TypeError):
+ continue
+ if its:
+ return chain(*its)
+ # 4. XXX search in **kwargs (.dstarargs)
+ if self.dstarargs is not None:
+ its = []
+ for infered in self.dstarargs.infer(context):
+ if infered is YES:
+ its.append((YES,))
+ continue
+ try:
+ its.append(infered.getitem(name, context).infer(context))
+ except (InferenceError, AttributeError):
+ its.append((YES,))
+ except (IndexError, TypeError):
+ continue
+ if its:
+ return chain(*its)
+ # 5. */** argument, (Tuple or Dict)
+ if name == funcnode.args.vararg:
+ return iter((nodes.const_factory(())))
+ if name == funcnode.args.kwarg:
+ return iter((nodes.const_factory({})))
+ # 6. return default value if any
+ try:
+ return funcnode.args.default_value(name).infer(context)
+ except NoDefault:
+ raise InferenceError(name)
+
+
+# .infer method ###############################################################
+
+
+def infer_end(self, context=None):
+ """inference's end for node such as Module, Class, Function, Const...
+ """
+ yield self
+nodes.Module.infer = infer_end
+nodes.Class.infer = infer_end
+nodes.Function.infer = infer_end
+nodes.Lambda.infer = infer_end
+nodes.Const.infer = infer_end
+nodes.List.infer = infer_end
+nodes.Tuple.infer = infer_end
+nodes.Dict.infer = infer_end
+
+
+def infer_name(self, context=None):
+ """infer a Name: use name lookup rules"""
+ frame, stmts = self.lookup(self.name)
+ if not stmts:
+ raise UnresolvableName(self.name)
+ context = context.clone()
+ context.lookupname = self.name
+ return _infer_stmts(stmts, context, frame)
+nodes.Name.infer = path_wrapper(infer_name)
+nodes.AssName.infer_lhs = infer_name # won't work with a path wrapper
+
+
+def infer_callfunc(self, context=None):
+ """infer a CallFunc node by trying to guess what the function returns"""
+ callcontext = context.clone()
+ callcontext.callcontext = CallContext(self.args, self.starargs, self.kwargs)
+ callcontext.boundnode = None
+ for callee in self.func.infer(context):
+ if callee is YES:
+ yield callee
+ continue
+ try:
+ if hasattr(callee, 'infer_call_result'):
+ for infered in callee.infer_call_result(self, callcontext):
+ yield infered
+ except InferenceError:
+ ## XXX log error ?
+ continue
+nodes.CallFunc.infer = path_wrapper(raise_if_nothing_infered(infer_callfunc))
+
+
+def infer_import(self, context=None, asname=True):
+ """infer an Import node: return the imported module/object"""
+ name = context.lookupname
+ if name is None:
+ raise InferenceError()
+ if asname:
+ yield self.do_import_module(self.real_name(name))
+ else:
+ yield self.do_import_module(name)
+nodes.Import.infer = path_wrapper(infer_import)
+
+def infer_name_module(self, name):
+ context = InferenceContext()
+ context.lookupname = name
+ return self.infer(context, asname=False)
+nodes.Import.infer_name_module = infer_name_module
+
+
+def infer_from(self, context=None, asname=True):
+ """infer a From nodes: return the imported module/object"""
+ name = context.lookupname
+ if name is None:
+ raise InferenceError()
+ if asname:
+ name = self.real_name(name)
+ module = self.do_import_module(self.modname)
+ try:
+ context = copy_context(context)
+ context.lookupname = name
+ return _infer_stmts(module.getattr(name, ignore_locals=module is self.root()), context)
+ except NotFoundError:
+ raise InferenceError(name)
+nodes.From.infer = path_wrapper(infer_from)
+
+
+def infer_getattr(self, context=None):
+ """infer a Getattr node by using getattr on the associated object"""
+ #context = context.clone()
+ for owner in self.expr.infer(context):
+ if owner is YES:
+ yield owner
+ continue
+ try:
+ context.boundnode = owner
+ for obj in owner.igetattr(self.attrname, context):
+ yield obj
+ context.boundnode = None
+ except (NotFoundError, InferenceError):
+ context.boundnode = None
+ except AttributeError:
+ # XXX method / function
+ context.boundnode = None
+nodes.Getattr.infer = path_wrapper(raise_if_nothing_infered(infer_getattr))
+nodes.AssAttr.infer_lhs = raise_if_nothing_infered(infer_getattr) # # won't work with a path wrapper
+
+
+def infer_global(self, context=None):
+ if context.lookupname is None:
+ raise InferenceError()
+ try:
+ return _infer_stmts(self.root().getattr(context.lookupname), context)
+ except NotFoundError:
+ raise InferenceError()
+nodes.Global.infer = path_wrapper(infer_global)
+
+
+def infer_subscript(self, context=None):
+ """infer simple subscription such as [1,2,3][0] or (1,2,3)[-1]"""
+ if isinstance(self.slice, nodes.Index):
+ index = self.slice.value.infer(context).next()
+ if index is YES:
+ yield YES
+ return
+ try:
+ # suppose it's a Tuple/List node (attribute error else)
+ # XXX infer self.value?
+ assigned = self.value.getitem(index.value, context)
+ except AttributeError:
+ raise InferenceError()
+ except (IndexError, TypeError):
+ yield YES
+ return
+ for infered in assigned.infer(context):
+ yield infered
+ else:
+ raise InferenceError()
+nodes.Subscript.infer = path_wrapper(infer_subscript)
+nodes.Subscript.infer_lhs = raise_if_nothing_infered(infer_subscript)
+
+
+UNARY_OP_METHOD = {'+': '__pos__',
+ '-': '__neg__',
+ '~': '__invert__',
+ 'not': None, # XXX not '__nonzero__'
+ }
+
+def infer_unaryop(self, context=None):
+ for operand in self.operand.infer(context):
+ try:
+ yield operand.infer_unary_op(self.op)
+ except TypeError:
+ continue
+ except AttributeError:
+ meth = UNARY_OP_METHOD[self.op]
+ if meth is None:
+ yield YES
+ else:
+ try:
+ # XXX just suppose if the type implement meth, returned type
+ # will be the same
+ operand.getattr(meth)
+ yield operand
+ except GeneratorExit:
+ raise
+ except:
+ yield YES
+nodes.UnaryOp.infer = path_wrapper(infer_unaryop)
+
+
+BIN_OP_METHOD = {'+': '__add__',
+ '-': '__sub__',
+ '/': '__div__',
+ '//': '__floordiv__',
+ '*': '__mul__',
+ '**': '__power__',
+ '%': '__mod__',
+ '&': '__and__',
+ '|': '__or__',
+ '^': '__xor__',
+ '<<': '__lshift__',
+ '>>': '__rshift__',
+ }
+
+def _infer_binop(operator, operand1, operand2, context, failures=None):
+ if operand1 is YES:
+ yield operand1
+ return
+ try:
+ for valnode in operand1.infer_binary_op(operator, operand2, context):
+ yield valnode
+ except AttributeError:
+ try:
+ # XXX just suppose if the type implement meth, returned type
+ # will be the same
+ operand1.getattr(BIN_OP_METHOD[operator])
+ yield operand1
+ except:
+ if failures is None:
+ yield YES
+ else:
+ failures.append(operand1)
+
+def infer_binop(self, context=None):
+ failures = []
+ for lhs in self.left.infer(context):
+ for val in _infer_binop(self.op, lhs, self.right, context, failures):
+ yield val
+ for lhs in failures:
+ for rhs in self.right.infer(context):
+ for val in _infer_binop(self.op, rhs, lhs, context):
+ yield val
+nodes.BinOp.infer = path_wrapper(infer_binop)
+
+
+def infer_arguments(self, context=None):
+ name = context.lookupname
+ if name is None:
+ raise InferenceError()
+ return _arguments_infer_argname(self, name, context)
+nodes.Arguments.infer = infer_arguments
+
+
+def infer_ass(self, context=None):
+ """infer a AssName/AssAttr: need to inspect the RHS part of the
+ assign node
+ """
+ stmt = self.statement()
+ if isinstance(stmt, nodes.AugAssign):
+ return stmt.infer(context)
+ stmts = list(self.assigned_stmts(context=context))
+ return _infer_stmts(stmts, context)
+nodes.AssName.infer = path_wrapper(infer_ass)
+nodes.AssAttr.infer = path_wrapper(infer_ass)
+
+def infer_augassign(self, context=None):
+ failures = []
+ for lhs in self.target.infer_lhs(context):
+ for val in _infer_binop(self.op, lhs, self.value, context, failures):
+ yield val
+ for lhs in failures:
+ for rhs in self.value.infer(context):
+ for val in _infer_binop(self.op, rhs, lhs, context):
+ yield val
+nodes.AugAssign.infer = path_wrapper(infer_augassign)
+
+
+# no infer method on DelName and DelAttr (expected InferenceError)
+
+
+def infer_empty_node(self, context=None):
+ if not self.has_underlying_object():
+ yield YES
+ else:
+ try:
+ for infered in MANAGER.infer_astng_from_something(self.object,
+ context=context):
+ yield infered
+ except ASTNGError:
+ yield YES
+nodes.EmptyNode.infer = path_wrapper(infer_empty_node)
+
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/astng/manager.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/astng/manager.py
@@ -0,0 +1,299 @@
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+# copyright 2003-2010 Sylvain Thenault, all rights reserved.
+# contact mailto:thenault@gmail.com
+#
+# This file is part of logilab-astng.
+#
+# logilab-astng is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 2.1 of the License, or (at your
+# option) any later version.
+#
+# logilab-astng is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+# for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-astng. If not, see <http://www.gnu.org/licenses/>.
+"""astng manager: avoid multiple astng build of a same module when
+possible by providing a class responsible to get astng representation
+from various source and using a cache of built modules)
+"""
+
+__docformat__ = "restructuredtext en"
+
+import sys
+import os
+from os.path import dirname, basename, abspath, join, isdir, exists
+
+from ..common.modutils import NoSourceFile, is_python_source, \
+ file_from_modpath, load_module_from_name, modpath_from_file, \
+ get_module_files, get_source_file, zipimport
+from ..common.configuration import OptionsProviderMixIn
+
+from .exceptions import ASTNGBuildingException
+
+def astng_wrapper(func, modname):
+ """wrapper to give to ASTNGManager.project_from_files"""
+ print 'parsing %s...' % modname
+ try:
+ return func(modname)
+ except ASTNGBuildingException, exc:
+ print exc
+ except Exception, exc:
+ import traceback
+ traceback.print_exc()
+
+def _silent_no_wrap(func, modname):
+ """silent wrapper that doesn't do anything; can be used for tests"""
+ return func(modname)
+
+def safe_repr(obj):
+ try:
+ return repr(obj)
+ except:
+ return '???'
+
+
+
+class ASTNGManager(OptionsProviderMixIn):
+ """the astng manager, responsible to build astng from files
+ or modules.
+
+ Use the Borg pattern.
+ """
+
+ name = 'astng loader'
+ options = (("ignore",
+ {'type' : "csv", 'metavar' : "<file>",
+ 'dest' : "black_list", "default" : ('CVS',),
+ 'help' : "add <file> (may be a directory) to the black list\
+. It should be a base name, not a path. You may set this option multiple times\
+."}),
+ ("project",
+ {'default': "No Name", 'type' : 'string', 'short': 'p',
+ 'metavar' : '<project name>',
+ 'help' : 'set the project name.'}),
+ )
+ brain = {}
+ def __init__(self):
+ self.__dict__ = ASTNGManager.brain
+ if not self.__dict__:
+ OptionsProviderMixIn.__init__(self)
+ self.load_defaults()
+ # NOTE: cache entries are added by the [re]builder
+ self.astng_cache = {}
+ self._mod_file_cache = {}
+ self.transformers = []
+
+ def astng_from_file(self, filepath, modname=None, fallback=True, source=False):
+ """given a module name, return the astng object"""
+ try:
+ filepath = get_source_file(filepath, include_no_ext=True)
+ source = True
+ except NoSourceFile:
+ pass
+ if modname is None:
+ try:
+ modname = '.'.join(modpath_from_file(filepath))
+ except ImportError:
+ modname = filepath
+ if modname in self.astng_cache:
+ return self.astng_cache[modname]
+ if source:
+ from .builder import ASTNGBuilder
+ return ASTNGBuilder(self).file_build(filepath, modname)
+ elif fallback and modname:
+ return self.astng_from_module_name(modname)
+ raise ASTNGBuildingException('unable to get astng for file %s' %
+ filepath)
+
+ def astng_from_module_name(self, modname, context_file=None):
+ """given a module name, return the astng object"""
+ if modname in self.astng_cache:
+ return self.astng_cache[modname]
+ if modname == '__main__':
+ from .builder import ASTNGBuilder
+ return ASTNGBuilder(self).string_build('', modname)
+ old_cwd = os.getcwd()
+ if context_file:
+ os.chdir(dirname(context_file))
+ try:
+ filepath = self.file_from_module_name(modname, context_file)
+ if filepath is not None and not is_python_source(filepath):
+ module = self.zip_import_data(filepath)
+ if module is not None:
+ return module
+ if filepath is None or not is_python_source(filepath):
+ try:
+ module = load_module_from_name(modname)
+ except Exception, ex:
+ msg = 'Unable to load module %s (%s)' % (modname, ex)
+ raise ASTNGBuildingException(msg)
+ return self.astng_from_module(module, modname)
+ return self.astng_from_file(filepath, modname, fallback=False)
+ finally:
+ os.chdir(old_cwd)
+
+ def zip_import_data(self, filepath):
+ if zipimport is None:
+ return None
+ from .builder import ASTNGBuilder
+ builder = ASTNGBuilder(self)
+ for ext in ('.zip', '.egg'):
+ try:
+ eggpath, resource = filepath.rsplit(ext + '/', 1)
+ except ValueError:
+ continue
+ try:
+ importer = zipimport.zipimporter(eggpath + ext)
+ zmodname = resource.replace('/', '.')
+ if importer.is_package(resource):
+ zmodname = zmodname + '.__init__'
+ module = builder.string_build(importer.get_source(resource),
+ zmodname, filepath)
+ return module
+ except:
+ continue
+ return None
+
+ def file_from_module_name(self, modname, contextfile):
+ try:
+ value = self._mod_file_cache[(modname, contextfile)]
+ except KeyError:
+ try:
+ value = file_from_modpath(modname.split('.'),
+ context_file=contextfile)
+ except ImportError, ex:
+ msg = 'Unable to load module %s (%s)' % (modname, ex)
+ value = ASTNGBuildingException(msg)
+ self._mod_file_cache[(modname, contextfile)] = value
+ if isinstance(value, ASTNGBuildingException):
+ raise value
+ return value
+
+ def astng_from_module(self, module, modname=None):
+ """given an imported module, return the astng object"""
+ modname = modname or module.__name__
+ if modname in self.astng_cache:
+ return self.astng_cache[modname]
+ try:
+ # some builtin modules don't have __file__ attribute
+ filepath = module.__file__
+ if is_python_source(filepath):
+ return self.astng_from_file(filepath, modname)
+ except AttributeError:
+ pass
+ from .builder import ASTNGBuilder
+ return ASTNGBuilder(self).module_build(module, modname)
+
+ def astng_from_class(self, klass, modname=None):
+ """get astng for the given class"""
+ if modname is None:
+ try:
+ modname = klass.__module__
+ except AttributeError:
+ raise ASTNGBuildingException(
+ 'Unable to get module for class %s' % safe_repr(klass))
+ modastng = self.astng_from_module_name(modname)
+ return modastng.getattr(klass.__name__)[0] # XXX
+
+
+ def infer_astng_from_something(self, obj, context=None):
+ """infer astng for the given class"""
+ if hasattr(obj, '__class__') and not isinstance(obj, type):
+ klass = obj.__class__
+ else:
+ klass = obj
+ try:
+ modname = klass.__module__
+ except AttributeError:
+ raise ASTNGBuildingException(
+ 'Unable to get module for %s' % safe_repr(klass))
+ except Exception, ex:
+ raise ASTNGBuildingException(
+ 'Unexpected error while retrieving module for %s: %s'
+ % (safe_repr(klass), ex))
+ try:
+ name = klass.__name__
+ except AttributeError:
+ raise ASTNGBuildingException(
+ 'Unable to get name for %s' % safe_repr(klass))
+ except Exception, ex:
+ raise ASTNGBuildingException(
+ 'Unexpected error while retrieving name for %s: %s'
+ % (safe_repr(klass), ex))
+ # take care, on living object __module__ is regularly wrong :(
+ modastng = self.astng_from_module_name(modname)
+ if klass is obj:
+ for infered in modastng.igetattr(name, context):
+ yield infered
+ else:
+ for infered in modastng.igetattr(name, context):
+ yield infered.instanciate_class()
+
+ def project_from_files(self, files, func_wrapper=astng_wrapper,
+ project_name=None, black_list=None):
+ """return a Project from a list of files or modules"""
+ # build the project representation
+ project_name = project_name or self.config.project
+ black_list = black_list or self.config.black_list
+ project = Project(project_name)
+ for something in files:
+ if not exists(something):
+ fpath = file_from_modpath(something.split('.'))
+ elif isdir(something):
+ fpath = join(something, '__init__.py')
+ else:
+ fpath = something
+ astng = func_wrapper(self.astng_from_file, fpath)
+ if astng is None:
+ continue
+ # XXX why is first file defining the project.path ?
+ project.path = project.path or astng.file
+ project.add_module(astng)
+ base_name = astng.name
+ # recurse in package except if __init__ was explicitly given
+ if astng.package and something.find('__init__') == -1:
+ # recurse on others packages / modules if this is a package
+ for fpath in get_module_files(dirname(astng.file),
+ black_list):
+ astng = func_wrapper(self.astng_from_file, fpath)
+ if astng is None or astng.name == base_name:
+ continue
+ project.add_module(astng)
+ return project
+
+ def register_transformer(self, transformer):
+ self.transformers.append(transformer)
+
+class Project:
+ """a project handle a set of modules / packages"""
+ def __init__(self, name=''):
+ self.name = name
+ self.path = None
+ self.modules = []
+ self.locals = {}
+ self.__getitem__ = self.locals.__getitem__
+ self.__iter__ = self.locals.__iter__
+ self.values = self.locals.values
+ self.keys = self.locals.keys
+ self.items = self.locals.items
+
+ def add_module(self, node):
+ self.locals[node.name] = node
+ self.modules.append(node)
+
+ def get_module(self, name):
+ return self.locals[name]
+
+ def get_children(self):
+ return self.modules
+
+ def __repr__(self):
+ return '<Project %r at %s (%s modules)>' % (self.name, id(self),
+ len(self.modules))
+
+
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/astng/mixins.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/astng/mixins.py
@@ -0,0 +1,136 @@
+# This program is free software; you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free Software
+# Foundation; either version 2 of the License, or (at your option) any later
+# version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License along with
+# this program; if not, write to the Free Software Foundation, Inc.,
+# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+# copyright 2003-2010 Sylvain Thenault, all rights reserved.
+# contact mailto:thenault@gmail.com
+#
+# This file is part of logilab-astng.
+#
+# logilab-astng is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 2.1 of the License, or (at your
+# option) any later version.
+#
+# logilab-astng is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+# for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-astng. If not, see <http://www.gnu.org/licenses/>.
+"""This module contains some mixins for the different nodes.
+"""
+
+from .exceptions import (ASTNGBuildingException, InferenceError,
+ NotFoundError)
+
+
+class BlockRangeMixIn(object):
+ """override block range """
+ def set_line_info(self, lastchild):
+ self.fromlineno = self.lineno
+ self.tolineno = lastchild.tolineno
+ self.blockstart_tolineno = self._blockstart_toline()
+
+ def _elsed_block_range(self, lineno, orelse, last=None):
+ """handle block line numbers range for try/finally, for, if and while
+ statements
+ """
+ if lineno == self.fromlineno:
+ return lineno, lineno
+ if orelse:
+ if lineno >= orelse[0].fromlineno:
+ return lineno, orelse[-1].tolineno
+ return lineno, orelse[0].fromlineno - 1
+ return lineno, last or self.tolineno
+
+class FilterStmtsMixin(object):
+ """Mixin for statement filtering and assignment type"""
+
+ def _get_filtered_stmts(self, _, node, _stmts, mystmt):
+ """method used in _filter_stmts to get statemtents and trigger break"""
+ if self.statement() is mystmt:
+ # original node's statement is the assignment, only keep
+ # current node (gen exp, list comp)
+ return [node], True
+ return _stmts, False
+
+ def ass_type(self):
+ return self
+
+
+class AssignTypeMixin(object):
+
+ def ass_type(self):
+ return self
+
+ def _get_filtered_stmts(self, lookup_node, node, _stmts, mystmt):
+ """method used in filter_stmts"""
+ if self is mystmt:
+ return _stmts, True
+ if self.statement() is mystmt:
+ # original node's statement is the assignment, only keep
+ # current node (gen exp, list comp)
+ return [node], True
+ return _stmts, False
+
+
+class ParentAssignTypeMixin(AssignTypeMixin):
+
+ def ass_type(self):
+ return self.parent.ass_type()
+
+
+
+class FromImportMixIn(FilterStmtsMixin):
+ """MixIn for From and Import Nodes"""
+
+ def _infer_name(self, frame, name):
+ return name
+
+ def do_import_module(self, modname):
+ """return the ast for a module whose name is <modname> imported by <self>
+ """
+ # handle special case where we are on a package node importing a module
+ # using the same name as the package, which may end in an infinite loop
+ # on relative imports
+ # XXX: no more needed ?
+ mymodule = self.root()
+ level = getattr(self, 'level', None) # Import as no level
+ # XXX we should investigate deeper if we really want to check
+ # importing itself: modname and mymodule.name be relative or absolute
+ if mymodule.relative_to_absolute_name(modname, level) == mymodule.name:
+ # FIXME: we used to raise InferenceError here, but why ?
+ return mymodule
+ try:
+ return mymodule.import_module(modname, level=level)
+ except ASTNGBuildingException:
+ raise InferenceError(modname)
+ except SyntaxError, ex:
+ raise InferenceError(str(ex))
+
+ def real_name(self, asname):
+ """get name from 'as' name"""
+ for name, _asname in self.names:
+ if name == '*':
+ return asname
+ if not _asname:
+ name = name.split('.', 1)[0]
+ _asname = name
+ if asname == _asname:
+ return name
+ raise NotFoundError(asname)
+
+
+
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/astng/node_classes.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/astng/node_classes.py
@@ -0,0 +1,908 @@
+# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+# copyright 2003-2010 Sylvain Thenault, all rights reserved.
+# contact mailto:thenault@gmail.com
+#
+# This file is part of logilab-astng.
+#
+# logilab-astng is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 2.1 of the License, or (at your
+# option) any later version.
+#
+# logilab-astng is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+# for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-astng. If not, see <http://www.gnu.org/licenses/>.
+"""Module for some node classes. More nodes in scoped_nodes.py
+"""
+
+import sys
+
+from . import BUILTINS_MODULE
+from .exceptions import NoDefault
+from .bases import (NodeNG, Statement, Instance, InferenceContext,
+ _infer_stmts, YES)
+from .mixins import BlockRangeMixIn, AssignTypeMixin, \
+ ParentAssignTypeMixin, FromImportMixIn
+
+
+def unpack_infer(stmt, context=None):
+ """recursively generate nodes inferred by the given statement.
+ If the inferred value is a list or a tuple, recurse on the elements
+ """
+ if isinstance(stmt, (List, Tuple)):
+ for elt in stmt.elts:
+ for infered_elt in unpack_infer(elt, context):
+ yield infered_elt
+ return
+ # if infered is a final node, return it and stop
+ infered = stmt.infer(context).next()
+ if infered is stmt:
+ yield infered
+ return
+ # else, infer recursivly, except YES object that should be returned as is
+ for infered in stmt.infer(context):
+ if infered is YES:
+ yield infered
+ else:
+ for inf_inf in unpack_infer(infered, context):
+ yield inf_inf
+
+
+def are_exclusive(stmt1, stmt2, exceptions=None):
+ """return true if the two given statements are mutually exclusive
+
+ `exceptions` may be a list of exception names. If specified, discard If
+ branches and check one of the statement is in an exception handler catching
+ one of the given exceptions.
+
+ algorithm :
+ 1) index stmt1's parents
+ 2) climb among stmt2's parents until we find a common parent
+ 3) if the common parent is a If or TryExcept statement, look if nodes are
+ in exclusive branches
+ """
+ # index stmt1's parents
+ stmt1_parents = {}
+ children = {}
+ node = stmt1.parent
+ previous = stmt1
+ while node:
+ stmt1_parents[node] = 1
+ children[node] = previous
+ previous = node
+ node = node.parent
+ # climb among stmt2's parents until we find a common parent
+ node = stmt2.parent
+ previous = stmt2
+ while node:
+ if node in stmt1_parents:
+ # if the common parent is a If or TryExcept statement, look if
+ # nodes are in exclusive branches
+ if isinstance(node, If) and exceptions is None:
+ if (node.locate_child(previous)[1]
+ is not node.locate_child(children[node])[1]):
+ return True
+ elif isinstance(node, TryExcept):
+ c2attr, c2node = node.locate_child(previous)
+ c1attr, c1node = node.locate_child(children[node])
+ if c1node is not c2node:
+ if ((c2attr == 'body' and c1attr == 'handlers' and children[node].catch(exceptions)) or
+ (c2attr == 'handlers' and c1attr == 'body' and previous.catch(exceptions)) or
+ (c2attr == 'handlers' and c1attr == 'orelse') or
+ (c2attr == 'orelse' and c1attr == 'handlers')):
+ return True
+ elif c2attr == 'handlers' and c1attr == 'handlers':
+ return previous is not children[node]
+ return False
+ previous = node
+ node = node.parent
+ return False
+
+
+class LookupMixIn(object):
+ """Mixin looking up a name in the right scope
+ """
+
+ def lookup(self, name):
+ """lookup a variable name
+
+ return the scope node and the list of assignments associated to the given
+ name according to the scope where it has been found (locals, globals or
+ builtin)
+
+ The lookup is starting from self's scope. If self is not a frame itself and
+ the name is found in the inner frame locals, statements will be filtered
+ to remove ignorable statements according to self's location
+ """
+ return self.scope().scope_lookup(self, name)
+
+ def ilookup(self, name):
+ """infered lookup
+
+ return an iterator on infered values of the statements returned by
+ the lookup method
+ """
+ frame, stmts = self.lookup(name)
+ context = InferenceContext()
+ return _infer_stmts(stmts, context, frame)
+
+ def _filter_stmts(self, stmts, frame, offset):
+ """filter statements to remove ignorable statements.
+
+ If self is not a frame itself and the name is found in the inner
+ frame locals, statements will be filtered to remove ignorable
+ statements according to self's location
+ """
+ # if offset == -1, my actual frame is not the inner frame but its parent
+ #
+ # class A(B): pass
+ #
+ # we need this to resolve B correctly
+ if offset == -1:
+ myframe = self.frame().parent.frame()
+ else:
+ myframe = self.frame()
+ if not myframe is frame or self is frame:
+ return stmts
+ mystmt = self.statement()
+ # line filtering if we are in the same frame
+ #
+ # take care node may be missing lineno information (this is the case for
+ # nodes inserted for living objects)
+ if myframe is frame and mystmt.fromlineno is not None:
+ assert mystmt.fromlineno is not None, mystmt
+ mylineno = mystmt.fromlineno + offset
+ else:
+ # disabling lineno filtering
+ mylineno = 0
+ _stmts = []
+ _stmt_parents = []
+ for node in stmts:
+ stmt = node.statement()
+ # line filtering is on and we have reached our location, break
+ if mylineno > 0 and stmt.fromlineno > mylineno:
+ break
+ assert hasattr(node, 'ass_type'), (node, node.scope(),
+ node.scope().locals)
+ ass_type = node.ass_type()
+
+ if node.has_base(self):
+ break
+
+ _stmts, done = ass_type._get_filtered_stmts(self, node, _stmts, mystmt)
+ if done:
+ break
+
+ optional_assign = ass_type.optional_assign
+ if optional_assign and ass_type.parent_of(self):
+ # we are inside a loop, loop var assigment is hidding previous
+ # assigment
+ _stmts = [node]
+ _stmt_parents = [stmt.parent]
+ continue
+
+ # XXX comment various branches below!!!
+ try:
+ pindex = _stmt_parents.index(stmt.parent)
+ except ValueError:
+ pass
+ else:
+ # we got a parent index, this means the currently visited node
+ # is at the same block level as a previously visited node
+ if _stmts[pindex].ass_type().parent_of(ass_type):
+ # both statements are not at the same block level
+ continue
+ # if currently visited node is following previously considered
+ # assignement and both are not exclusive, we can drop the
+ # previous one. For instance in the following code ::
+ #
+ # if a:
+ # x = 1
+ # else:
+ # x = 2
+ # print x
+ #
+ # we can't remove neither x = 1 nor x = 2 when looking for 'x'
+ # of 'print x'; while in the following ::
+ #
+ # x = 1
+ # x = 2
+ # print x
+ #
+ # we can remove x = 1 when we see x = 2
+ #
+ # moreover, on loop assignment types, assignment won't
+ # necessarily be done if the loop has no iteration, so we don't
+ # want to clear previous assigments if any (hence the test on
+ # optional_assign)
+ if not (optional_assign or are_exclusive(_stmts[pindex], node)):
+ del _stmt_parents[pindex]
+ del _stmts[pindex]
+ if isinstance(node, AssName):
+ if not optional_assign and stmt.parent is mystmt.parent:
+ _stmts = []
+ _stmt_parents = []
+ elif isinstance(node, DelName):
+ _stmts = []
+ _stmt_parents = []
+ continue
+ if not are_exclusive(self, node):
+ _stmts.append(node)
+ _stmt_parents.append(stmt.parent)
+ return _stmts
+
+# Name classes
+
+class AssName(LookupMixIn, ParentAssignTypeMixin, NodeNG):
+ """class representing an AssName node"""
+
+
+class DelName(LookupMixIn, ParentAssignTypeMixin, NodeNG):
+ """class representing a DelName node"""
+
+
+class Name(LookupMixIn, NodeNG):
+ """class representing a Name node"""
+
+
+
+
+##################### node classes ########################################
+
+class Arguments(NodeNG, AssignTypeMixin):
+ """class representing an Arguments node"""
+ _astng_fields = ('args', 'defaults')
+ args = None
+ defaults = None
+
+ def __init__(self, vararg=None, kwarg=None):
+ self.vararg = vararg
+ self.kwarg = kwarg
+
+ def _infer_name(self, frame, name):
+ if self.parent is frame:
+ return name
+ return None
+
+ def format_args(self):
+ """return arguments formatted as string"""
+ result = [_format_args(self.args, self.defaults)]
+ if self.vararg:
+ result.append('*%s' % self.vararg)
+ if self.kwarg:
+ result.append('**%s' % self.kwarg)
+ return ', '.join(result)
+
+ def default_value(self, argname):
+ """return the default value for an argument
+
+ :raise `NoDefault`: if there is no default value defined
+ """
+ i = _find_arg(argname, self.args)[0]
+ if i is not None:
+ idx = i - (len(self.args) - len(self.defaults))
+ if idx >= 0:
+ return self.defaults[idx]
+ raise NoDefault()
+
+ def is_argument(self, name):
+ """return True if the name is defined in arguments"""
+ if name == self.vararg:
+ return True
+ if name == self.kwarg:
+ return True
+ return self.find_argname(name, True)[1] is not None
+
+ def find_argname(self, argname, rec=False):
+ """return index and Name node with given name"""
+ if self.args: # self.args may be None in some cases (builtin function)
+ return _find_arg(argname, self.args, rec)
+ return None, None
+
+
+def _find_arg(argname, args, rec=False):
+ for i, arg in enumerate(args):
+ if isinstance(arg, Tuple):
+ if rec:
+ found = _find_arg(argname, arg.elts)
+ if found[0] is not None:
+ return found
+ elif arg.name == argname:
+ return i, arg
+ return None, None
+
+
+def _format_args(args, defaults=None):
+ values = []
+ if args is None:
+ return ''
+ if defaults is not None:
+ default_offset = len(args) - len(defaults)
+ for i, arg in enumerate(args):
+ if isinstance(arg, Tuple):
+ values.append('(%s)' % _format_args(arg.elts))
+ else:
+ values.append(arg.name)
+ if defaults is not None and i >= default_offset:
+ values[-1] += '=' + defaults[i-default_offset].as_string()
+ return ', '.join(values)
+
+
+class AssAttr(NodeNG, ParentAssignTypeMixin):
+ """class representing an AssAttr node"""
+ _astng_fields = ('expr',)
+ expr = None
+
+class Assert(Statement):
+ """class representing an Assert node"""
+ _astng_fields = ('test', 'fail',)
+ test = None
+ fail = None
+
+class Assign(Statement, AssignTypeMixin):
+ """class representing an Assign node"""
+ _astng_fields = ('targets', 'value',)
+ targets = None
+ value = None
+
+class AugAssign(Statement, AssignTypeMixin):
+ """class representing an AugAssign node"""
+ _astng_fields = ('target', 'value',)
+ target = None
+ value = None
+
+class Backquote(NodeNG):
+ """class representing a Backquote node"""
+ _astng_fields = ('value',)
+ value = None
+
+class BinOp(NodeNG):
+ """class representing a BinOp node"""
+ _astng_fields = ('left', 'right',)
+ left = None
+ right = None
+
+class BoolOp(NodeNG):
+ """class representing a BoolOp node"""
+ _astng_fields = ('values',)
+ values = None
+
+class Break(Statement):
+ """class representing a Break node"""
+
+
+class CallFunc(NodeNG):
+ """class representing a CallFunc node"""
+ _astng_fields = ('func', 'args', 'starargs', 'kwargs')
+ func = None
+ args = None
+ starargs = None
+ kwargs = None
+
+ def __init__(self):
+ self.starargs = None
+ self.kwargs = None
+
+class Compare(NodeNG):
+ """class representing a Compare node"""
+ _astng_fields = ('left', 'ops',)
+ left = None
+ ops = None
+
+ def get_children(self):
+ """override get_children for tuple fields"""
+ yield self.left
+ for _, comparator in self.ops:
+ yield comparator # we don't want the 'op'
+
+ def last_child(self):
+ """override last_child"""
+ # XXX maybe if self.ops:
+ return self.ops[-1][1]
+ #return self.left
+
+class Comprehension(NodeNG):
+ """class representing a Comprehension node"""
+ _astng_fields = ('target', 'iter' ,'ifs')
+ target = None
+ iter = None
+ ifs = None
+
+ optional_assign = True
+ def ass_type(self):
+ return self
+
+ def _get_filtered_stmts(self, lookup_node, node, stmts, mystmt):
+ """method used in filter_stmts"""
+ if self is mystmt:
+ if isinstance(lookup_node, (Const, Name)):
+ return [lookup_node], True
+
+ elif self.statement() is mystmt:
+ # original node's statement is the assignment, only keeps
+ # current node (gen exp, list comp)
+
+ return [node], True
+
+ return stmts, False
+
+
+class Const(NodeNG, Instance):
+ """represent a constant node like num, str, bool, None, bytes"""
+
+ def __init__(self, value=None):
+ self.value = value
+
+ def getitem(self, index, context=None):
+ if isinstance(self.value, basestring):
+ return Const(self.value[index])
+ raise TypeError('%r (value=%s)' % (self, self.value))
+
+ def has_dynamic_getattr(self):
+ return False
+
+ def itered(self):
+ if isinstance(self.value, basestring):
+ return self.value
+ raise TypeError()
+
+ def pytype(self):
+ return self._proxied.qname()
+
+
+class Continue(Statement):
+ """class representing a Continue node"""
+
+
+class Decorators(NodeNG):
+ """class representing a Decorators node"""
+ _astng_fields = ('nodes',)
+ nodes = None
+
+ def __init__(self, nodes=None):
+ self.nodes = nodes
+
+ def scope(self):
+ # skip the function node to go directly to the upper level scope
+ return self.parent.parent.scope()
+
+class DelAttr(NodeNG, ParentAssignTypeMixin):
+ """class representing a DelAttr node"""
+ _astng_fields = ('expr',)
+ expr = None
+
+
+class Delete(Statement, AssignTypeMixin):
+ """class representing a Delete node"""
+ _astng_fields = ('targets',)
+ targets = None
+
+
+class Dict(NodeNG, Instance):
+ """class representing a Dict node"""
+ _astng_fields = ('items',)
+
+ def __init__(self, items=None):
+ if items is None:
+ self.items = []
+ else:
+ self.items = [(const_factory(k), const_factory(v))
+ for k,v in items.iteritems()]
+
+ def pytype(self):
+ return '%s.dict' % BUILTINS_MODULE
+
+ def get_children(self):
+ """get children of a Dict node"""
+ # overrides get_children
+ for key, value in self.items:
+ yield key
+ yield value
+
+ def last_child(self):
+ """override last_child"""
+ if self.items:
+ return self.items[-1][1]
+ return None
+
+ def itered(self):
+ return self.items[::2]
+
+ def getitem(self, key, context=None):
+ for i in xrange(0, len(self.items), 2):
+ for inferedkey in self.items[i].infer(context):
+ if inferedkey is YES:
+ continue
+ if isinstance(inferedkey, Const) and inferedkey.value == key:
+ return self.items[i+1]
+ raise IndexError(key)
+
+
+class Discard(Statement):
+ """class representing a Discard node"""
+ _astng_fields = ('value',)
+ value = None
+
+
+class Ellipsis(NodeNG):
+ """class representing an Ellipsis node"""
+
+
+class EmptyNode(NodeNG):
+ """class representing an EmptyNode node"""
+
+
+class ExceptHandler(Statement, AssignTypeMixin):
+ """class representing an ExceptHandler node"""
+ _astng_fields = ('type', 'name', 'body',)
+ type = None
+ name = None
+ body = None
+
+ def _blockstart_toline(self):
+ if self.name:
+ return self.name.tolineno
+ elif self.type:
+ return self.type.tolineno
+ else:
+ return self.lineno
+
+ def set_line_info(self, lastchild):
+ self.fromlineno = self.lineno
+ self.tolineno = lastchild.tolineno
+ self.blockstart_tolineno = self._blockstart_toline()
+
+ def catch(self, exceptions):
+ if self.type is None or exceptions is None:
+ return True
+ for node in self.type.nodes_of_class(Name):
+ if node.name in exceptions:
+ return True
+
+
+class Exec(Statement):
+ """class representing an Exec node"""
+ _astng_fields = ('expr', 'globals', 'locals',)
+ expr = None
+ globals = None
+ locals = None
+
+
+class ExtSlice(NodeNG):
+ """class representing an ExtSlice node"""
+ _astng_fields = ('dims',)
+ dims = None
+
+class For(BlockRangeMixIn, AssignTypeMixin, Statement):
+ """class representing a For node"""
+ _astng_fields = ('target', 'iter', 'body', 'orelse',)
+ target = None
+ iter = None
+ body = None
+ orelse = None
+
+ optional_assign = True
+ def _blockstart_toline(self):
+ return self.iter.tolineno
+
+
+class From(FromImportMixIn, Statement):
+ """class representing a From node"""
+
+ def __init__(self, fromname, names, level=0):
+ self.modname = fromname
+ self.names = names
+ self.level = level
+
+class Getattr(NodeNG):
+ """class representing a Getattr node"""
+ _astng_fields = ('expr',)
+ expr = None
+
+
+class Global(Statement):
+ """class representing a Global node"""
+
+ def __init__(self, names):
+ self.names = names
+
+ def _infer_name(self, frame, name):
+ return name
+
+
+class If(BlockRangeMixIn, Statement):
+ """class representing an If node"""
+ _astng_fields = ('test', 'body', 'orelse')
+ test = None
+ body = None
+ orelse = None
+
+ def _blockstart_toline(self):
+ return self.test.tolineno
+
+ def block_range(self, lineno):
+ """handle block line numbers range for if statements"""
+ if lineno == self.body[0].fromlineno:
+ return lineno, lineno
+ if lineno <= self.body[-1].tolineno:
+ return lineno, self.body[-1].tolineno
+ return self._elsed_block_range(lineno, self.orelse,
+ self.body[0].fromlineno - 1)
+
+
+class IfExp(NodeNG):
+ """class representing an IfExp node"""
+ _astng_fields = ('test', 'body', 'orelse')
+ test = None
+ body = None
+ orelse = None
+
+
+class Import(FromImportMixIn, Statement):
+ """class representing an Import node"""
+
+
+class Index(NodeNG):
+ """class representing an Index node"""
+ _astng_fields = ('value',)
+ value = None
+
+
+class Keyword(NodeNG):
+ """class representing a Keyword node"""
+ _astng_fields = ('value',)
+ value = None
+
+
+class List(NodeNG, Instance, ParentAssignTypeMixin):
+ """class representing a List node"""
+ _astng_fields = ('elts',)
+
+ def __init__(self, elts=None):
+ if elts is None:
+ self.elts = []
+ else:
+ self.elts = [const_factory(e) for e in elts]
+
+ def pytype(self):
+ return '%s.list' % BUILTINS_MODULE
+
+ def getitem(self, index, context=None):
+ return self.elts[index]
+
+ def itered(self):
+ return self.elts
+
+
+class Nonlocal(Statement):
+ """class representing a Nonlocal node"""
+
+ def __init__(self, names):
+ self.names = names
+
+ def _infer_name(self, frame, name):
+ return name
+
+
+class Pass(Statement):
+ """class representing a Pass node"""
+
+
+class Print(Statement):
+ """class representing a Print node"""
+ _astng_fields = ('dest', 'values',)
+ dest = None
+ values = None
+
+
+class Raise(Statement):
+ """class representing a Raise node"""
+ exc = None
+ if sys.version_info < (3, 0):
+ _astng_fields = ('exc', 'inst', 'tback')
+ inst = None
+ tback = None
+ else:
+ _astng_fields = ('exc', 'cause')
+ exc = None
+ cause = None
+
+ def raises_not_implemented(self):
+ if not self.exc:
+ return
+ for name in self.exc.nodes_of_class(Name):
+ if name.name == 'NotImplementedError':
+ return True
+
+
+class Return(Statement):
+ """class representing a Return node"""
+ _astng_fields = ('value',)
+ value = None
+
+
+class Set(NodeNG, Instance, ParentAssignTypeMixin):
+ """class representing a Set node"""
+ _astng_fields = ('elts',)
+
+ def __init__(self, elts=None):
+ if elts is None:
+ self.elts = []
+ else:
+ self.elts = [const_factory(e) for e in elts]
+
+ def pytype(self):
+ return '%s.set' % BUILTINS_MODULE
+
+ def itered(self):
+ return self.elts
+
+
+class Slice(NodeNG):
+ """class representing a Slice node"""
+ _astng_fields = ('lower', 'upper', 'step')
+ lower = None
+ upper = None
+ step = None
+
+class Starred(NodeNG, ParentAssignTypeMixin):
+ """class representing a Starred node"""
+ _astng_fields = ('value',)
+ value = None
+
+
+class Subscript(NodeNG):
+ """class representing a Subscript node"""
+ _astng_fields = ('value', 'slice')
+ value = None
+ slice = None
+
+
+class TryExcept(BlockRangeMixIn, Statement):
+ """class representing a TryExcept node"""
+ _astng_fields = ('body', 'handlers', 'orelse',)
+ body = None
+ handlers = None
+ orelse = None
+
+ def _infer_name(self, frame, name):
+ return name
+
+ def _blockstart_toline(self):
+ return self.lineno
+
+ def block_range(self, lineno):
+ """handle block line numbers range for try/except statements"""
+ last = None
+ for exhandler in self.handlers:
+ if exhandler.type and lineno == exhandler.type.fromlineno:
+ return lineno, lineno
+ if exhandler.body[0].fromlineno <= lineno <= exhandler.body[-1].tolineno:
+ return lineno, exhandler.body[-1].tolineno
+ if last is None:
+ last = exhandler.body[0].fromlineno - 1
+ return self._elsed_block_range(lineno, self.orelse, last)
+
+
+class TryFinally(BlockRangeMixIn, Statement):
+ """class representing a TryFinally node"""
+ _astng_fields = ('body', 'finalbody',)
+ body = None
+ finalbody = None
+
+ def _blockstart_toline(self):
+ return self.lineno
+
+ def block_range(self, lineno):
+ """handle block line numbers range for try/finally statements"""
+ child = self.body[0]
+ # py2.5 try: except: finally:
+ if (isinstance(child, TryExcept) and child.fromlineno == self.fromlineno
+ and lineno > self.fromlineno and lineno <= child.tolineno):
+ return child.block_range(lineno)
+ return self._elsed_block_range(lineno, self.finalbody)
+
+
+class Tuple(NodeNG, Instance, ParentAssignTypeMixin):
+ """class representing a Tuple node"""
+ _astng_fields = ('elts',)
+
+ def __init__(self, elts=None):
+ if elts is None:
+ self.elts = []
+ else:
+ self.elts = [const_factory(e) for e in elts]
+
+ def pytype(self):
+ return '%s.tuple' % BUILTINS_MODULE
+
+ def getitem(self, index, context=None):
+ return self.elts[index]
+
+ def itered(self):
+ return self.elts
+
+
+class UnaryOp(NodeNG):
+ """class representing an UnaryOp node"""
+ _astng_fields = ('operand',)
+ operand = None
+
+
+class While(BlockRangeMixIn, Statement):
+ """class representing a While node"""
+ _astng_fields = ('test', 'body', 'orelse',)
+ test = None
+ body = None
+ orelse = None
+
+ def _blockstart_toline(self):
+ return self.test.tolineno
+
+ def block_range(self, lineno):
+ """handle block line numbers range for for and while statements"""
+ return self. _elsed_block_range(lineno, self.orelse)
+
+
+class With(BlockRangeMixIn, AssignTypeMixin, Statement):
+ """class representing a With node"""
+ _astng_fields = ('expr', 'vars', 'body')
+ expr = None
+ vars = None
+ body = None
+
+ def _blockstart_toline(self):
+ if self.vars:
+ return self.vars.tolineno
+ else:
+ return self.expr.tolineno
+
+
+class Yield(NodeNG):
+ """class representing a Yield node"""
+ _astng_fields = ('value',)
+ value = None
+
+# constants ##############################################################
+
+CONST_CLS = {
+ list: List,
+ tuple: Tuple,
+ dict: Dict,
+ set: Set,
+ type(None): Const,
+ }
+
+def _update_const_classes():
+ """update constant classes, so the keys of CONST_CLS can be reused"""
+ klasses = (bool, int, float, complex, str)
+ if sys.version_info < (3, 0):
+ klasses += (unicode, long)
+ if sys.version_info >= (2, 6):
+ klasses += (bytes,)
+ for kls in klasses:
+ CONST_CLS[kls] = Const
+_update_const_classes()
+
+def const_factory(value):
+ """return an astng node for a python value"""
+ # since const_factory is called to evaluate content of container (eg list,
+ # tuple), it may be called with some node as argument that should be left
+ # untouched
+ if isinstance(value, NodeNG):
+ return value
+ try:
+ return CONST_CLS[value.__class__](value)
+ except (KeyError, AttributeError):
+ # some constants (like from gtk._gtk) don't have their class in
+ # CONST_CLS, though we can "assert isinstance(value, tuple(CONST_CLS))"
+ if isinstance(value, tuple(CONST_CLS)):
+ return Const(value)
+ node = EmptyNode()
+ node.object = value
+ return node
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/astng/nodes.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/astng/nodes.py
@@ -0,0 +1,75 @@
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+# copyright 2003-2010 Sylvain Thenault, all rights reserved.
+# contact mailto:thenault@gmail.com
+#
+# This file is part of logilab-astng.
+#
+# logilab-astng is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 2.1 of the License, or (at your
+# option) any later version.
+#
+# logilab-astng is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+# for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-astng. If not, see <http://www.gnu.org/licenses/>.
+"""
+on all nodes :
+ .is_statement, returning true if the node should be considered as a
+ statement node
+ .root(), returning the root node of the tree (i.e. a Module)
+ .previous_sibling(), returning previous sibling statement node
+ .next_sibling(), returning next sibling statement node
+ .statement(), returning the first parent node marked as statement node
+ .frame(), returning the first node defining a new local scope (i.e.
+ Module, Function or Class)
+ .set_local(name, node), define an identifier <name> on the first parent frame,
+ with the node defining it. This is used by the astng builder and should not
+ be used from out there.
+
+on From and Import :
+ .real_name(name),
+
+
+"""
+
+__docformat__ = "restructuredtext en"
+
+from .node_classes import Arguments, AssAttr, Assert, Assign, \
+ AssName, AugAssign, Backquote, BinOp, BoolOp, Break, CallFunc, Compare, \
+ Comprehension, Const, Continue, Decorators, DelAttr, DelName, Delete, \
+ Dict, Discard, Ellipsis, EmptyNode, ExceptHandler, Exec, ExtSlice, For, \
+ From, Getattr, Global, If, IfExp, Import, Index, Keyword, \
+ List, Name, Nonlocal, Pass, Print, Raise, Return, Set, Slice, Starred, Subscript, \
+ TryExcept, TryFinally, Tuple, UnaryOp, While, With, Yield, \
+ const_factory
+from .scoped_nodes import Module, GenExpr, Lambda, DictComp, \
+ ListComp, SetComp, Function, Class
+
+ALL_NODE_CLASSES = (
+ Arguments, AssAttr, Assert, Assign, AssName, AugAssign,
+ Backquote, BinOp, BoolOp, Break,
+ CallFunc, Class, Compare, Comprehension, Const, Continue,
+ Decorators, DelAttr, DelName, Delete,
+ Dict, DictComp, Discard,
+ Ellipsis, EmptyNode, ExceptHandler, Exec, ExtSlice,
+ For, From, Function,
+ Getattr, GenExpr, Global,
+ If, IfExp, Import, Index,
+ Keyword,
+ Lambda, List, ListComp,
+ Name, Nonlocal,
+ Module,
+ Pass, Print,
+ Raise, Return,
+ Set, SetComp, Slice, Starred, Subscript,
+ TryExcept, TryFinally, Tuple,
+ UnaryOp,
+ While, With,
+ Yield,
+ )
+
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/astng/protocols.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/astng/protocols.py
@@ -0,0 +1,321 @@
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+# copyright 2003-2010 Sylvain Thenault, all rights reserved.
+# contact mailto:thenault@gmail.com
+#
+# This file is part of logilab-astng.
+#
+# logilab-astng is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 2.1 of the License, or (at your
+# option) any later version.
+#
+# logilab-astng is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+# for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-astng. If not, see <http://www.gnu.org/licenses/>.
+"""this module contains a set of functions to handle python protocols for nodes
+where it makes sense.
+"""
+
+__doctype__ = "restructuredtext en"
+
+from .exceptions import InferenceError, NoDefault
+from .node_classes import unpack_infer
+from .bases import copy_context, \
+ raise_if_nothing_infered, yes_if_nothing_infered, Instance, Generator, YES
+from .nodes import const_factory
+from . import nodes
+
+# unary operations ############################################################
+
+def tl_infer_unary_op(self, operator):
+ if operator == 'not':
+ return const_factory(not bool(self.elts))
+ raise TypeError() # XXX log unsupported operation
+nodes.Tuple.infer_unary_op = tl_infer_unary_op
+nodes.List.infer_unary_op = tl_infer_unary_op
+
+
+def dict_infer_unary_op(self, operator):
+ if operator == 'not':
+ return const_factory(not bool(self.items))
+ raise TypeError() # XXX log unsupported operation
+nodes.Dict.infer_unary_op = dict_infer_unary_op
+
+
+def const_infer_unary_op(self, operator):
+ if operator == 'not':
+ return const_factory(not self.value)
+ # XXX log potentially raised TypeError
+ elif operator == '+':
+ return const_factory(+self.value)
+ else: # operator == '-':
+ return const_factory(-self.value)
+nodes.Const.infer_unary_op = const_infer_unary_op
+
+
+# binary operations ###########################################################
+
+BIN_OP_IMPL = {'+': lambda a, b: a + b,
+ '-': lambda a, b: a - b,
+ '/': lambda a, b: a / b,
+ '//': lambda a, b: a // b,
+ '*': lambda a, b: a * b,
+ '**': lambda a, b: a ** b,
+ '%': lambda a, b: a % b,
+ '&': lambda a, b: a & b,
+ '|': lambda a, b: a | b,
+ '^': lambda a, b: a ^ b,
+ '<<': lambda a, b: a << b,
+ '>>': lambda a, b: a >> b,
+ }
+for key, impl in BIN_OP_IMPL.items():
+ BIN_OP_IMPL[key+'='] = impl
+
+def const_infer_binary_op(self, operator, other, context):
+ for other in other.infer(context):
+ if isinstance(other, nodes.Const):
+ try:
+ impl = BIN_OP_IMPL[operator]
+
+ try:
+ yield const_factory(impl(self.value, other.value))
+ except Exception:
+ # ArithmeticError is not enough: float >> float is a TypeError
+ # TODO : let pylint know about the problem
+ pass
+ except TypeError:
+ # XXX log TypeError
+ continue
+ elif other is YES:
+ yield other
+ else:
+ try:
+ for val in other.infer_binary_op(operator, self, context):
+ yield val
+ except AttributeError:
+ yield YES
+nodes.Const.infer_binary_op = yes_if_nothing_infered(const_infer_binary_op)
+
+
+def tl_infer_binary_op(self, operator, other, context):
+ for other in other.infer(context):
+ if isinstance(other, self.__class__) and operator == '+':
+ node = self.__class__()
+ elts = [n for elt in self.elts for n in elt.infer(context)
+ if not n is YES]
+ elts += [n for elt in other.elts for n in elt.infer(context)
+ if not n is YES]
+ node.elts = elts
+ yield node
+ elif isinstance(other, nodes.Const) and operator == '*':
+ if not isinstance(other.value, int):
+ yield YES
+ continue
+ node = self.__class__()
+ elts = [n for elt in self.elts for n in elt.infer(context)
+ if not n is YES] * other.value
+ node.elts = elts
+ yield node
+ elif isinstance(other, Instance) and not isinstance(other, nodes.Const):
+ yield YES
+ # XXX else log TypeError
+nodes.Tuple.infer_binary_op = yes_if_nothing_infered(tl_infer_binary_op)
+nodes.List.infer_binary_op = yes_if_nothing_infered(tl_infer_binary_op)
+
+
+def dict_infer_binary_op(self, operator, other, context):
+ for other in other.infer(context):
+ if isinstance(other, Instance) and isinstance(other._proxied, nodes.Class):
+ yield YES
+ # XXX else log TypeError
+nodes.Dict.infer_binary_op = yes_if_nothing_infered(dict_infer_binary_op)
+
+
+# assignment ##################################################################
+
+"""the assigned_stmts method is responsible to return the assigned statement
+(e.g. not inferred) according to the assignment type.
+
+The `asspath` argument is used to record the lhs path of the original node.
+For instance if we want assigned statements for 'c' in 'a, (b,c)', asspath
+will be [1, 1] once arrived to the Assign node.
+
+The `context` argument is the current inference context which should be given
+to any intermediary inference necessary.
+"""
+
+def _resolve_looppart(parts, asspath, context):
+ """recursive function to resolve multiple assignments on loops"""
+ asspath = asspath[:]
+ index = asspath.pop(0)
+ for part in parts:
+ if part is YES:
+ continue
+ # XXX handle __iter__ and log potentially detected errors
+ if not hasattr(part, 'itered'):
+ continue
+ try:
+ itered = part.itered()
+ except TypeError:
+ continue # XXX log error
+ for stmt in itered:
+ try:
+ assigned = stmt.getitem(index, context)
+ except (AttributeError, IndexError):
+ continue
+ except TypeError, exc: # stmt is unsubscriptable Const
+ continue
+ if not asspath:
+ # we achieved to resolved the assignment path,
+ # don't infer the last part
+ yield assigned
+ elif assigned is YES:
+ break
+ else:
+ # we are not yet on the last part of the path
+ # search on each possibly inferred value
+ try:
+ for infered in _resolve_looppart(assigned.infer(context),
+ asspath, context):
+ yield infered
+ except InferenceError:
+ break
+
+
+def for_assigned_stmts(self, node, context=None, asspath=None):
+ if asspath is None:
+ for lst in self.iter.infer(context):
+ if isinstance(lst, (nodes.Tuple, nodes.List)):
+ for item in lst.elts:
+ yield item
+ else:
+ for infered in _resolve_looppart(self.iter.infer(context),
+ asspath, context):
+ yield infered
+
+nodes.For.assigned_stmts = raise_if_nothing_infered(for_assigned_stmts)
+nodes.Comprehension.assigned_stmts = raise_if_nothing_infered(for_assigned_stmts)
+
+
+def mulass_assigned_stmts(self, node, context=None, asspath=None):
+ if asspath is None:
+ asspath = []
+ asspath.insert(0, self.elts.index(node))
+ return self.parent.assigned_stmts(self, context, asspath)
+nodes.Tuple.assigned_stmts = mulass_assigned_stmts
+nodes.List.assigned_stmts = mulass_assigned_stmts
+
+
+def assend_assigned_stmts(self, context=None):
+ return self.parent.assigned_stmts(self, context=context)
+nodes.AssName.assigned_stmts = assend_assigned_stmts
+nodes.AssAttr.assigned_stmts = assend_assigned_stmts
+
+
+def _arguments_infer_argname(self, name, context):
+ # arguments information may be missing, in which case we can't do anything
+ # more
+ if not (self.args or self.vararg or self.kwarg):
+ yield YES
+ return
+ # first argument of instance/class method
+ if self.args and getattr(self.args[0], 'name', None) == name:
+ functype = self.parent.type
+ if functype == 'method':
+ yield Instance(self.parent.parent.frame())
+ return
+ if functype == 'classmethod':
+ yield self.parent.parent.frame()
+ return
+ if name == self.vararg:
+ yield const_factory(())
+ return
+ if name == self.kwarg:
+ yield const_factory({})
+ return
+ # if there is a default value, yield it. And then yield YES to reflect
+ # we can't guess given argument value
+ try:
+ context = copy_context(context)
+ for infered in self.default_value(name).infer(context):
+ yield infered
+ yield YES
+ except NoDefault:
+ yield YES
+
+
+def arguments_assigned_stmts(self, node, context, asspath=None):
+ if context.callcontext:
+ # reset call context/name
+ callcontext = context.callcontext
+ context = copy_context(context)
+ context.callcontext = None
+ for infered in callcontext.infer_argument(self.parent, node.name, context):
+ yield infered
+ return
+ for infered in _arguments_infer_argname(self, node.name, context):
+ yield infered
+nodes.Arguments.assigned_stmts = arguments_assigned_stmts
+
+
+def assign_assigned_stmts(self, node, context=None, asspath=None):
+ if not asspath:
+ yield self.value
+ return
+ for infered in _resolve_asspart(self.value.infer(context), asspath, context):
+ yield infered
+nodes.Assign.assigned_stmts = raise_if_nothing_infered(assign_assigned_stmts)
+nodes.AugAssign.assigned_stmts = raise_if_nothing_infered(assign_assigned_stmts)
+
+
+def _resolve_asspart(parts, asspath, context):
+ """recursive function to resolve multiple assignments"""
+ asspath = asspath[:]
+ index = asspath.pop(0)
+ for part in parts:
+ if hasattr(part, 'getitem'):
+ try:
+ assigned = part.getitem(index, context)
+ # XXX raise a specific exception to avoid potential hiding of
+ # unexpected exception ?
+ except (TypeError, IndexError):
+ return
+ if not asspath:
+ # we achieved to resolved the assignment path, don't infer the
+ # last part
+ yield assigned
+ elif assigned is YES:
+ return
+ else:
+ # we are not yet on the last part of the path search on each
+ # possibly inferred value
+ try:
+ for infered in _resolve_asspart(assigned.infer(context),
+ asspath, context):
+ yield infered
+ except InferenceError:
+ return
+
+
+def excepthandler_assigned_stmts(self, node, context=None, asspath=None):
+ for assigned in unpack_infer(self.type):
+ if isinstance(assigned, nodes.Class):
+ assigned = Instance(assigned)
+ yield assigned
+nodes.ExceptHandler.assigned_stmts = raise_if_nothing_infered(excepthandler_assigned_stmts)
+
+
+def with_assigned_stmts(self, node, context=None, asspath=None):
+ if asspath is None:
+ for lst in self.vars.infer(context):
+ if isinstance(lst, (nodes.Tuple, nodes.List)):
+ for item in lst.nodes:
+ yield item
+nodes.With.assigned_stmts = raise_if_nothing_infered(with_assigned_stmts)
+
+
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/astng/raw_building.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/astng/raw_building.py
@@ -0,0 +1,353 @@
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+# copyright 2003-2010 Sylvain Thenault, all rights reserved.
+# contact mailto:thenault@gmail.com
+#
+# This file is part of logilab-astng.
+#
+# logilab-astng is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 2.1 of the License, or (at your
+# option) any later version.
+#
+# logilab-astng is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+# for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-astng. If not, see <http://www.gnu.org/licenses/>.
+"""this module contains a set of functions to create astng trees from scratch
+(build_* functions) or from living object (object_build_* functions)
+"""
+
+__docformat__ = "restructuredtext en"
+
+import sys
+from os.path import abspath
+from inspect import (getargspec, isdatadescriptor, isfunction, ismethod,
+ ismethoddescriptor, isclass, isbuiltin)
+
+from . import BUILTINS_MODULE
+from .node_classes import CONST_CLS
+from .nodes import (Module, Class, Const, const_factory, From,
+ Function, EmptyNode, Name, Arguments, Dict, List, Set, Tuple)
+from .bases import Generator
+from .manager import ASTNGManager
+MANAGER = ASTNGManager()
+
+_CONSTANTS = tuple(CONST_CLS) # the keys of CONST_CLS eg python builtin types
+
+def _attach_local_node(parent, node, name):
+ node.name = name # needed by add_local_node
+ parent.add_local_node(node)
+
+_marker = object()
+
+def attach_dummy_node(node, name, object=_marker):
+ """create a dummy node and register it in the locals of the given
+ node with the specified name
+ """
+ enode = EmptyNode()
+ enode.object = object
+ _attach_local_node(node, enode, name)
+
+EmptyNode.has_underlying_object = lambda self: self.object is not _marker
+
+def attach_const_node(node, name, value):
+ """create a Const node and register it in the locals of the given
+ node with the specified name
+ """
+ if not name in node.special_attributes:
+ _attach_local_node(node, const_factory(value), name)
+
+def attach_import_node(node, modname, membername):
+ """create a From node and register it in the locals of the given
+ node with the specified name
+ """
+ from_node = From(modname, [(membername, None)])
+ _attach_local_node(node, from_node, membername)
+
+
+def build_module(name, doc=None):
+ """create and initialize a astng Module node"""
+ node = Module(name, doc, pure_python=False)
+ node.package = False
+ node.parent = None
+ return node
+
+def build_class(name, basenames=(), doc=None):
+ """create and initialize a astng Class node"""
+ node = Class(name, doc)
+ for base in basenames:
+ basenode = Name()
+ basenode.name = base
+ node.bases.append(basenode)
+ basenode.parent = node
+ return node
+
+def build_function(name, args=None, defaults=None, flag=0, doc=None):
+ """create and initialize a astng Function node"""
+ args, defaults = args or [], defaults or []
+ # first argument is now a list of decorators
+ func = Function(name, doc)
+ func.args = argsnode = Arguments()
+ argsnode.args = []
+ for arg in args:
+ argsnode.args.append(Name())
+ argsnode.args[-1].name = arg
+ argsnode.args[-1].parent = argsnode
+ argsnode.defaults = []
+ for default in defaults:
+ argsnode.defaults.append(const_factory(default))
+ argsnode.defaults[-1].parent = argsnode
+ argsnode.kwarg = None
+ argsnode.vararg = None
+ argsnode.parent = func
+ if args:
+ register_arguments(func)
+ return func
+
+
+def build_from_import(fromname, names):
+ """create and initialize an astng From import statement"""
+ return From(fromname, [(name, None) for name in names])
+
+def register_arguments(func, args=None):
+ """add given arguments to local
+
+ args is a list that may contains nested lists
+ (i.e. def func(a, (b, c, d)): ...)
+ """
+ if args is None:
+ args = func.args.args
+ if func.args.vararg:
+ func.set_local(func.args.vararg, func.args)
+ if func.args.kwarg:
+ func.set_local(func.args.kwarg, func.args)
+ for arg in args:
+ if isinstance(arg, Name):
+ func.set_local(arg.name, arg)
+ else:
+ register_arguments(func, arg.elts)
+
+def object_build_class(node, member, localname):
+ """create astng for a living class object"""
+ basenames = [base.__name__ for base in member.__bases__]
+ return _base_class_object_build(node, member, basenames,
+ localname=localname)
+
+def object_build_function(node, member, localname):
+ """create astng for a living function object"""
+ args, varargs, varkw, defaults = getargspec(member)
+ if varargs is not None:
+ args.append(varargs)
+ if varkw is not None:
+ args.append(varkw)
+ func = build_function(getattr(member, '__name__', None) or localname, args,
+ defaults, member.func_code.co_flags, member.__doc__)
+ node.add_local_node(func, localname)
+
+def object_build_datadescriptor(node, member, name):
+ """create astng for a living data descriptor object"""
+ return _base_class_object_build(node, member, [], name)
+
+def object_build_methoddescriptor(node, member, localname):
+ """create astng for a living method descriptor object"""
+ # FIXME get arguments ?
+ func = build_function(getattr(member, '__name__', None) or localname,
+ doc=member.__doc__)
+ # set node's arguments to None to notice that we have no information, not
+ # and empty argument list
+ func.args.args = None
+ node.add_local_node(func, localname)
+
+def _base_class_object_build(node, member, basenames, name=None, localname=None):
+ """create astng for a living class object, with a given set of base names
+ (e.g. ancestors)
+ """
+ klass = build_class(name or getattr(member, '__name__', None) or localname,
+ basenames, member.__doc__)
+ klass._newstyle = isinstance(member, type)
+ node.add_local_node(klass, localname)
+ try:
+ # limit the instantiation trick since it's too dangerous
+ # (such as infinite test execution...)
+ # this at least resolves common case such as Exception.args,
+ # OSError.errno
+ if issubclass(member, Exception):
+ instdict = member().__dict__
+ else:
+ raise TypeError
+ except:
+ pass
+ else:
+ for name, obj in instdict.items():
+ valnode = EmptyNode()
+ valnode.object = obj
+ valnode.parent = klass
+ valnode.lineno = 1
+ klass.instance_attrs[name] = [valnode]
+ return klass
+
+
+
+
+class InspectBuilder(object):
+ """class for building nodes from living object
+
+ this is actually a really minimal representation, including only Module,
+ Function and Class nodes and some others as guessed.
+ """
+
+ # astng from living objects ###############################################
+
+ def __init__(self):
+ self._done = {}
+ self._module = None
+
+ def inspect_build(self, module, modname=None, path=None):
+ """build astng from a living module (i.e. using inspect)
+ this is used when there is no python source code available (either
+ because it's a built-in module or because the .py is not available)
+ """
+ self._module = module
+ if modname is None:
+ modname = module.__name__
+ try:
+ node = build_module(modname, module.__doc__)
+ except AttributeError:
+ # in jython, java modules have no __doc__ (see #109562)
+ node = build_module(modname)
+ node.file = node.path = path and abspath(path) or path
+ MANAGER.astng_cache[modname] = node
+ node.package = hasattr(module, '__path__')
+ self._done = {}
+ self.object_build(node, module)
+ return node
+
+ def object_build(self, node, obj):
+ """recursive method which create a partial ast from real objects
+ (only function, class, and method are handled)
+ """
+ if obj in self._done:
+ return self._done[obj]
+ self._done[obj] = node
+ for name in dir(obj):
+ try:
+ member = getattr(obj, name)
+ except AttributeError:
+ # damned ExtensionClass.Base, I know you're there !
+ attach_dummy_node(node, name)
+ continue
+ if ismethod(member):
+ member = member.im_func
+ if isfunction(member):
+ # verify this is not an imported function
+ filename = getattr(member.func_code, 'co_filename', None)
+ if filename is None:
+ assert isinstance(member, object)
+ object_build_methoddescriptor(node, member, name)
+ elif filename != getattr(self._module, '__file__', None):
+ attach_dummy_node(node, name, member)
+ else:
+ object_build_function(node, member, name)
+ elif isbuiltin(member):
+ if self.imported_member(node, member, name):
+ #if obj is object:
+ # print 'skippp', obj, name, member
+ continue
+ object_build_methoddescriptor(node, member, name)
+ elif isclass(member):
+ if self.imported_member(node, member, name):
+ continue
+ if member in self._done:
+ class_node = self._done[member]
+ if not class_node in node.locals.get(name, ()):
+ node.add_local_node(class_node, name)
+ else:
+ class_node = object_build_class(node, member, name)
+ # recursion
+ self.object_build(class_node, member)
+ if name == '__class__' and class_node.parent is None:
+ class_node.parent = self._done[self._module]
+ elif ismethoddescriptor(member):
+ assert isinstance(member, object)
+ object_build_methoddescriptor(node, member, name)
+ elif isdatadescriptor(member):
+ assert isinstance(member, object)
+ object_build_datadescriptor(node, member, name)
+ elif isinstance(member, _CONSTANTS):
+ attach_const_node(node, name, member)
+ else:
+ # create an empty node so that the name is actually defined
+ attach_dummy_node(node, name, member)
+
+ def imported_member(self, node, member, name):
+ """verify this is not an imported class or handle it"""
+ # /!\ some classes like ExtensionClass doesn't have a __module__
+ # attribute ! Also, this may trigger an exception on badly built module
+ # (see http://www.logilab.org/ticket/57299 for instance)
+ try:
+ modname = getattr(member, '__module__', None)
+ except:
+ # XXX use logging
+ print 'unexpected error while building astng from living object'
+ import traceback
+ traceback.print_exc()
+ modname = None
+ if modname is None:
+ if name in ('__new__', '__subclasshook__'):
+ # Python 2.5.1 (r251:54863, Sep 1 2010, 22:03:14)
+ # >>> print object.__new__.__module__
+ # None
+ modname = BUILTINS_MODULE
+ else:
+ attach_dummy_node(node, name, member)
+ return True
+ if {'gtk': 'gtk._gtk'}.get(modname, modname) != self._module.__name__:
+ # check if it sounds valid and then add an import node, else use a
+ # dummy node
+ try:
+ getattr(sys.modules[modname], name)
+ except (KeyError, AttributeError):
+ attach_dummy_node(node, name, member)
+ else:
+ attach_import_node(node, modname, name)
+ return True
+ return False
+
+
+### astng boot strapping ################################################### ###
+
+_CONST_PROXY = {}
+def astng_boot_strapping():
+ """astng boot strapping the builtins module"""
+ # this boot strapping is necessary since we need the Const nodes to
+ # inspect_build builtins, and then we can proxy Const
+ builder = InspectBuilder()
+ from ..common.compat import builtins
+ astng_builtin = builder.inspect_build(builtins)
+ for cls, node_cls in CONST_CLS.items():
+ if cls is type(None):
+ proxy = build_class('NoneType')
+ proxy.parent = astng_builtin
+ else:
+ proxy = astng_builtin.getattr(cls.__name__)[0] # XXX
+ if cls in (dict, list, set, tuple):
+ node_cls._proxied = proxy
+ else:
+ _CONST_PROXY[cls] = proxy
+
+astng_boot_strapping()
+
+# TODO : find a nicer way to handle this situation;
+# However __proxied introduced an
+# infinite recursion (see https://bugs.launchpad.net/pylint/+bug/456870)
+def _set_proxied(const):
+ return _CONST_PROXY[const.value.__class__]
+Const._proxied = property(_set_proxied)
+
+# FIXME : is it alright that Generator._proxied is not a astng node?
+Generator._proxied = MANAGER.infer_astng_from_something(type(a for a in ()))
+
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/astng/rebuilder.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/astng/rebuilder.py
@@ -0,0 +1,886 @@
+# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of logilab-astng.
+#
+# logilab-astng is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 2.1 of the License, or (at your
+# option) any later version.
+#
+# logilab-astng is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+# for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-astng. If not, see <http://www.gnu.org/licenses/>.
+"""this module contains utilities for rebuilding a _ast tree in
+order to get a single ASTNG representation
+"""
+
+import sys
+from _ast import (Expr as Discard, Str,
+ # binary operators
+ Add, Div, FloorDiv, Mod, Mult, Pow, Sub, BitAnd, BitOr, BitXor,
+ LShift, RShift,
+ # logical operators
+ And, Or,
+ # unary operators
+ UAdd, USub, Not, Invert,
+ # comparison operators
+ Eq, Gt, GtE, In, Is, IsNot, Lt, LtE, NotEq, NotIn,
+ )
+
+from .exceptions import ASTNGBuildingException
+from . import nodes as new
+
+
+_BIN_OP_CLASSES = {Add: '+',
+ BitAnd: '&',
+ BitOr: '|',
+ BitXor: '^',
+ Div: '/',
+ FloorDiv: '//',
+ Mod: '%',
+ Mult: '*',
+ Pow: '**',
+ Sub: '-',
+ LShift: '<<',
+ RShift: '>>'}
+
+_BOOL_OP_CLASSES = {And: 'and',
+ Or: 'or'}
+
+_UNARY_OP_CLASSES = {UAdd: '+',
+ USub: '-',
+ Not: 'not',
+ Invert: '~'}
+
+_CMP_OP_CLASSES = {Eq: '==',
+ Gt: '>',
+ GtE: '>=',
+ In: 'in',
+ Is: 'is',
+ IsNot: 'is not',
+ Lt: '<',
+ LtE: '<=',
+ NotEq: '!=',
+ NotIn: 'not in'}
+
+CONST_NAME_TRANSFORMS = {'None': None,
+ 'True': True,
+ 'False': False}
+
+REDIRECT = {'arguments': 'Arguments',
+ 'Attribute': 'Getattr',
+ 'comprehension': 'Comprehension',
+ 'Call': 'CallFunc',
+ 'ClassDef': 'Class',
+ "ListCompFor": 'Comprehension',
+ "GenExprFor": 'Comprehension',
+ 'excepthandler': 'ExceptHandler',
+ 'Expr': 'Discard',
+ 'FunctionDef': 'Function',
+ 'GeneratorExp': 'GenExpr',
+ 'ImportFrom': 'From',
+ 'keyword': 'Keyword',
+ 'Repr': 'Backquote',
+ }
+
+def _init_set_doc(node, newnode):
+ newnode.doc = None
+ try:
+ if isinstance(node.body[0], Discard) and isinstance(node.body[0].value, Str):
+ newnode.tolineno = node.body[0].lineno
+ newnode.doc = node.body[0].value.s
+ node.body = node.body[1:]
+
+ except IndexError:
+ pass # ast built from scratch
+
+def _lineno_parent(oldnode, newnode, parent):
+ newnode.parent = parent
+ if hasattr(oldnode, 'lineno'):
+ newnode.lineno = oldnode.lineno
+ if hasattr(oldnode, 'col_offset'):
+ newnode.col_offset = oldnode.col_offset
+
+def _set_infos(oldnode, newnode, parent):
+ newnode.parent = parent
+ if hasattr(oldnode, 'lineno'):
+ newnode.lineno = oldnode.lineno
+ if hasattr(oldnode, 'col_offset'):
+ newnode.col_offset = oldnode.col_offset
+ newnode.set_line_info(newnode.last_child()) # set_line_info accepts None
+
+
+
+
+class TreeRebuilder(object):
+ """Rebuilds the _ast tree to become an ASTNG tree"""
+
+ _visit_meths = {}
+ def __init__(self):
+ self.init()
+
+ def init(self):
+ self.asscontext = None
+ self._metaclass = ['']
+ self._global_names = []
+ self._from_nodes = []
+ self._delayed_assattr = []
+
+ def visit(self, node, parent):
+ cls = node.__class__
+ if cls in self._visit_meths:
+ return self._visit_meths[cls](node, parent)
+ else:
+ cls_name = cls.__name__
+ visit_name = 'visit_' + REDIRECT.get(cls_name, cls_name).lower()
+ visit_method = getattr(self, visit_name)
+ self._visit_meths[cls] = visit_method
+ return visit_method(node, parent)
+
+ def _save_assignment(self, node, name=None):
+ """save assignement situation since node.parent is not available yet"""
+ if self._global_names and node.name in self._global_names[-1]:
+ node.root().set_local(node.name, node)
+ else:
+ node.parent.set_local(node.name, node)
+
+
+ def visit_arguments(self, node, parent):
+ """visit a Arguments node by returning a fresh instance of it"""
+ newnode = new.Arguments()
+ _lineno_parent(node, newnode, parent)
+ self.asscontext = "Ass"
+ newnode.args = [self.visit(child, newnode) for child in node.args]
+ self.asscontext = None
+ newnode.defaults = [self.visit(child, newnode) for child in node.defaults]
+ newnode.vararg = node.vararg
+ newnode.kwarg = node.kwarg
+ # save argument names in locals:
+ if node.vararg:
+ newnode.parent.set_local(newnode.vararg, newnode)
+ if node.kwarg:
+ newnode.parent.set_local(newnode.kwarg, newnode)
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+ def visit_assattr(self, node, parent):
+ """visit a AssAttr node by returning a fresh instance of it"""
+ assc, self.asscontext = self.asscontext, None
+ newnode = new.AssAttr()
+ _lineno_parent(node, newnode, parent)
+ newnode.expr = self.visit(node.expr, newnode)
+ self.asscontext = assc
+ self._delayed_assattr.append(newnode)
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+ def visit_assert(self, node, parent):
+ """visit a Assert node by returning a fresh instance of it"""
+ newnode = new.Assert()
+ _lineno_parent(node, newnode, parent)
+ newnode.test = self.visit(node.test, newnode)
+ if node.msg is not None:
+ newnode.fail = self.visit(node.msg, newnode)
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+ def visit_assign(self, node, parent):
+ """visit a Assign node by returning a fresh instance of it"""
+ newnode = new.Assign()
+ _lineno_parent(node, newnode, parent)
+ self.asscontext = "Ass"
+ newnode.targets = [self.visit(child, newnode) for child in node.targets]
+ self.asscontext = None
+ newnode.value = self.visit(node.value, newnode)
+ # set some function or metaclass infos XXX explain ?
+ klass = newnode.parent.frame()
+ if (isinstance(klass, new.Class)
+ and isinstance(newnode.value, new.CallFunc)
+ and isinstance(newnode.value.func, new.Name)):
+ func_name = newnode.value.func.name
+ for ass_node in newnode.targets:
+ try:
+ meth = klass[ass_node.name]
+ if isinstance(meth, new.Function):
+ if func_name in ('classmethod', 'staticmethod'):
+ meth.type = func_name
+ elif func_name == 'classproperty': # see lgc.decorators
+ meth.type = 'classmethod'
+ meth.extra_decorators.append(newnode.value)
+ except (AttributeError, KeyError):
+ continue
+ elif getattr(newnode.targets[0], 'name', None) == '__metaclass__':
+ # XXX check more...
+ self._metaclass[-1] = 'type' # XXX get the actual metaclass
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+ def visit_assname(self, node, parent, node_name=None):
+ '''visit a node and return a AssName node'''
+ newnode = new.AssName()
+ _set_infos(node, newnode, parent)
+ newnode.name = node_name
+ self._save_assignment(newnode)
+ return newnode
+
+ def visit_augassign(self, node, parent):
+ """visit a AugAssign node by returning a fresh instance of it"""
+ newnode = new.AugAssign()
+ _lineno_parent(node, newnode, parent)
+ newnode.op = _BIN_OP_CLASSES[node.op.__class__] + "="
+ self.asscontext = "Ass"
+ newnode.target = self.visit(node.target, newnode)
+ self.asscontext = None
+ newnode.value = self.visit(node.value, newnode)
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+ def visit_backquote(self, node, parent):
+ """visit a Backquote node by returning a fresh instance of it"""
+ newnode = new.Backquote()
+ _lineno_parent(node, newnode, parent)
+ newnode.value = self.visit(node.value, newnode)
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+ def visit_binop(self, node, parent):
+ """visit a BinOp node by returning a fresh instance of it"""
+ newnode = new.BinOp()
+ _lineno_parent(node, newnode, parent)
+ newnode.left = self.visit(node.left, newnode)
+ newnode.right = self.visit(node.right, newnode)
+ newnode.op = _BIN_OP_CLASSES[node.op.__class__]
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+ def visit_boolop(self, node, parent):
+ """visit a BoolOp node by returning a fresh instance of it"""
+ newnode = new.BoolOp()
+ _lineno_parent(node, newnode, parent)
+ newnode.values = [self.visit(child, newnode) for child in node.values]
+ newnode.op = _BOOL_OP_CLASSES[node.op.__class__]
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+ def visit_break(self, node, parent):
+ """visit a Break node by returning a fresh instance of it"""
+ newnode = new.Break()
+ _set_infos(node, newnode, parent)
+ return newnode
+
+ def visit_callfunc(self, node, parent):
+ """visit a CallFunc node by returning a fresh instance of it"""
+ newnode = new.CallFunc()
+ _lineno_parent(node, newnode, parent)
+ newnode.func = self.visit(node.func, newnode)
+ newnode.args = [self.visit(child, newnode) for child in node.args]
+ if node.starargs is not None:
+ newnode.starargs = self.visit(node.starargs, newnode)
+ if node.kwargs is not None:
+ newnode.kwargs = self.visit(node.kwargs, newnode)
+ newnode.args.extend(self.visit(child, newnode) for child in node.keywords)
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+ def visit_class(self, node, parent):
+ """visit a Class node to become astng"""
+ self._metaclass.append(self._metaclass[-1])
+ newnode = new.Class(node.name, None)
+ _lineno_parent(node, newnode, parent)
+ _init_set_doc(node, newnode)
+ newnode.bases = [self.visit(child, newnode) for child in node.bases]
+ newnode.body = [self.visit(child, newnode) for child in node.body]
+ if 'decorator_list' in node._fields and node.decorator_list:# py >= 2.6
+ newnode.decorators = self.visit_decorators(node, newnode)
+ newnode.set_line_info(newnode.last_child())
+ metaclass = self._metaclass.pop()
+ if not newnode.bases:
+ # no base classes, detect new / style old style according to
+ # current scope
+ newnode._newstyle = metaclass == 'type'
+ newnode.parent.frame().set_local(newnode.name, newnode)
+ return newnode
+
+ def visit_const(self, node, parent):
+ """visit a Const node by returning a fresh instance of it"""
+ newnode = new.Const(node.value)
+ _set_infos(node, newnode, parent)
+ return newnode
+
+ def visit_continue(self, node, parent):
+ """visit a Continue node by returning a fresh instance of it"""
+ newnode = new.Continue()
+ _set_infos(node, newnode, parent)
+ return newnode
+
+ def visit_compare(self, node, parent):
+ """visit a Compare node by returning a fresh instance of it"""
+ newnode = new.Compare()
+ _lineno_parent(node, newnode, parent)
+ newnode.left = self.visit(node.left, newnode)
+ newnode.ops = [(_CMP_OP_CLASSES[op.__class__], self.visit(expr, newnode))
+ for (op, expr) in zip(node.ops, node.comparators)]
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+ def visit_comprehension(self, node, parent):
+ """visit a Comprehension node by returning a fresh instance of it"""
+ newnode = new.Comprehension()
+ _lineno_parent(node, newnode, parent)
+ self.asscontext = "Ass"
+ newnode.target = self.visit(node.target, newnode)
+ self.asscontext = None
+ newnode.iter = self.visit(node.iter, newnode)
+ newnode.ifs = [self.visit(child, newnode) for child in node.ifs]
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+ def visit_decorators(self, node, parent):
+ """visit a Decorators node by returning a fresh instance of it"""
+ # /!\ node is actually a _ast.Function node while
+ # parent is a astng.nodes.Function node
+ newnode = new.Decorators()
+ _lineno_parent(node, newnode, parent)
+ if 'decorators' in node._fields: # py < 2.6, i.e. 2.5
+ decorators = node.decorators
+ else:
+ decorators= node.decorator_list
+ newnode.nodes = [self.visit(child, newnode) for child in decorators]
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+ def visit_delete(self, node, parent):
+ """visit a Delete node by returning a fresh instance of it"""
+ newnode = new.Delete()
+ _lineno_parent(node, newnode, parent)
+ self.asscontext = "Del"
+ newnode.targets = [self.visit(child, newnode) for child in node.targets]
+ self.asscontext = None
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+ def visit_dict(self, node, parent):
+ """visit a Dict node by returning a fresh instance of it"""
+ newnode = new.Dict()
+ _lineno_parent(node, newnode, parent)
+ newnode.items = [(self.visit(key, newnode), self.visit(value, newnode))
+ for key, value in zip(node.keys, node.values)]
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+ def visit_dictcomp(self, node, parent):
+ """visit a DictComp node by returning a fresh instance of it"""
+ newnode = new.DictComp()
+ _lineno_parent(node, newnode, parent)
+ newnode.key = self.visit(node.key, newnode)
+ newnode.value = self.visit(node.value, newnode)
+ newnode.generators = [self.visit(child, newnode)
+ for child in node.generators]
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+ def visit_discard(self, node, parent):
+ """visit a Discard node by returning a fresh instance of it"""
+ newnode = new.Discard()
+ _lineno_parent(node, newnode, parent)
+ newnode.value = self.visit(node.value, newnode)
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+ def visit_ellipsis(self, node, parent):
+ """visit an Ellipsis node by returning a fresh instance of it"""
+ newnode = new.Ellipsis()
+ _set_infos(node, newnode, parent)
+ return newnode
+
+ def visit_emptynode(self, node, parent):
+ """visit an EmptyNode node by returning a fresh instance of it"""
+ newnode = new.EmptyNode()
+ _set_infos(node, newnode, parent)
+ return newnode
+
+ def visit_excepthandler(self, node, parent):
+ """visit an ExceptHandler node by returning a fresh instance of it"""
+ newnode = new.ExceptHandler()
+ _lineno_parent(node, newnode, parent)
+ if node.type is not None:
+ newnode.type = self.visit(node.type, newnode)
+ if node.name is not None:
+ # /!\ node.name can be a tuple
+ self.asscontext = "Ass"
+ newnode.name = self.visit(node.name, newnode)
+ self.asscontext = None
+ newnode.body = [self.visit(child, newnode) for child in node.body]
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+ def visit_exec(self, node, parent):
+ """visit an Exec node by returning a fresh instance of it"""
+ newnode = new.Exec()
+ _lineno_parent(node, newnode, parent)
+ newnode.expr = self.visit(node.body, newnode)
+ if node.globals is not None:
+ newnode.globals = self.visit(node.globals, newnode)
+ if node.locals is not None:
+ newnode.locals = self.visit(node.locals, newnode)
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+ def visit_extslice(self, node, parent):
+ """visit an ExtSlice node by returning a fresh instance of it"""
+ newnode = new.ExtSlice()
+ _lineno_parent(node, newnode, parent)
+ newnode.dims = [self.visit(dim, newnode) for dim in node.dims]
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+ def visit_for(self, node, parent):
+ """visit a For node by returning a fresh instance of it"""
+ newnode = new.For()
+ _lineno_parent(node, newnode, parent)
+ self.asscontext = "Ass"
+ newnode.target = self.visit(node.target, newnode)
+ self.asscontext = None
+ newnode.iter = self.visit(node.iter, newnode)
+ newnode.body = [self.visit(child, newnode) for child in node.body]
+ newnode.orelse = [self.visit(child, newnode) for child in node.orelse]
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+ def visit_from(self, node, parent):
+ """visit a From node by returning a fresh instance of it"""
+ names = [(alias.name, alias.asname) for alias in node.names]
+ newnode = new.From(node.module or '', names, node.level or None)
+ _set_infos(node, newnode, parent)
+ # store From names to add them to locals after building
+ self._from_nodes.append(newnode)
+ return newnode
+
+ def visit_function(self, node, parent):
+ """visit an Function node to become astng"""
+ self._global_names.append({})
+ newnode = new.Function(node.name, None)
+ _lineno_parent(node, newnode, parent)
+ _init_set_doc(node, newnode)
+ newnode.args = self.visit(node.args, newnode)
+ newnode.body = [self.visit(child, newnode) for child in node.body]
+ if 'decorators' in node._fields: # py < 2.6
+ attr = 'decorators'
+ else:
+ attr = 'decorator_list'
+ decorators = getattr(node, attr)
+ if decorators:
+ newnode.decorators = self.visit_decorators(node, newnode)
+ newnode.set_line_info(newnode.last_child())
+ self._global_names.pop()
+ frame = newnode.parent.frame()
+ if isinstance(frame, new.Class):
+ if newnode.name == '__new__':
+ newnode.type = 'classmethod'
+ else:
+ newnode.type = 'method'
+ if newnode.decorators is not None:
+ for decorator_expr in newnode.decorators.nodes:
+ if isinstance(decorator_expr, new.Name):
+ if decorator_expr.name in ('classmethod', 'staticmethod'):
+ newnode.type = decorator_expr.name
+ elif decorator_expr.name == 'classproperty':
+ newnode.type = 'classmethod'
+ frame.set_local(newnode.name, newnode)
+ return newnode
+
+ def visit_genexpr(self, node, parent):
+ """visit a GenExpr node by returning a fresh instance of it"""
+ newnode = new.GenExpr()
+ _lineno_parent(node, newnode, parent)
+ newnode.elt = self.visit(node.elt, newnode)
+ newnode.generators = [self.visit(child, newnode) for child in node.generators]
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+ def visit_getattr(self, node, parent):
+ """visit a Getattr node by returning a fresh instance of it"""
+ if self.asscontext == "Del":
+ # FIXME : maybe we should reintroduce and visit_delattr ?
+ # for instance, deactivating asscontext
+ newnode = new.DelAttr()
+ elif self.asscontext == "Ass":
+ # FIXME : maybe we should call visit_assattr ?
+ newnode = new.AssAttr()
+ self._delayed_assattr.append(newnode)
+ else:
+ newnode = new.Getattr()
+ _lineno_parent(node, newnode, parent)
+ asscontext, self.asscontext = self.asscontext, None
+ newnode.expr = self.visit(node.value, newnode)
+ self.asscontext = asscontext
+ newnode.attrname = node.attr
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+ def visit_global(self, node, parent):
+ """visit an Global node to become astng"""
+ newnode = new.Global(node.names)
+ _set_infos(node, newnode, parent)
+ if self._global_names: # global at the module level, no effect
+ for name in node.names:
+ self._global_names[-1].setdefault(name, []).append(newnode)
+ return newnode
+
+ def visit_if(self, node, parent):
+ """visit a If node by returning a fresh instance of it"""
+ newnode = new.If()
+ _lineno_parent(node, newnode, parent)
+ newnode.test = self.visit(node.test, newnode)
+ newnode.body = [self.visit(child, newnode) for child in node.body]
+ newnode.orelse = [self.visit(child, newnode) for child in node.orelse]
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+ def visit_ifexp(self, node, parent):
+ """visit a IfExp node by returning a fresh instance of it"""
+ newnode = new.IfExp()
+ _lineno_parent(node, newnode, parent)
+ newnode.test = self.visit(node.test, newnode)
+ newnode.body = self.visit(node.body, newnode)
+ newnode.orelse = self.visit(node.orelse, newnode)
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+ def visit_import(self, node, parent):
+ """visit a Import node by returning a fresh instance of it"""
+ newnode = new.Import()
+ _set_infos(node, newnode, parent)
+ newnode.names = [(alias.name, alias.asname) for alias in node.names]
+ # save import names in parent's locals:
+ for (name, asname) in newnode.names:
+ name = asname or name
+ newnode.parent.set_local(name.split('.')[0], newnode)
+ return newnode
+
+ def visit_index(self, node, parent):
+ """visit a Index node by returning a fresh instance of it"""
+ newnode = new.Index()
+ _lineno_parent(node, newnode, parent)
+ newnode.value = self.visit(node.value, newnode)
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+ def visit_keyword(self, node, parent):
+ """visit a Keyword node by returning a fresh instance of it"""
+ newnode = new.Keyword()
+ _lineno_parent(node, newnode, parent)
+ newnode.arg = node.arg
+ newnode.value = self.visit(node.value, newnode)
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+ def visit_lambda(self, node, parent):
+ """visit a Lambda node by returning a fresh instance of it"""
+ newnode = new.Lambda()
+ _lineno_parent(node, newnode, parent)
+ newnode.args = self.visit(node.args, newnode)
+ newnode.body = self.visit(node.body, newnode)
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+ def visit_list(self, node, parent):
+ """visit a List node by returning a fresh instance of it"""
+ newnode = new.List()
+ _lineno_parent(node, newnode, parent)
+ newnode.elts = [self.visit(child, newnode) for child in node.elts]
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+ def visit_listcomp(self, node, parent):
+ """visit a ListComp node by returning a fresh instance of it"""
+ newnode = new.ListComp()
+ _lineno_parent(node, newnode, parent)
+ newnode.elt = self.visit(node.elt, newnode)
+ newnode.generators = [self.visit(child, newnode)
+ for child in node.generators]
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+ def visit_module(self, node, modname, package):
+ """visit a Module node by returning a fresh instance of it"""
+ newnode = new.Module(modname, None)
+ newnode.package = package
+ _lineno_parent(node, newnode, parent=None)
+ _init_set_doc(node, newnode)
+ newnode.body = [self.visit(child, newnode) for child in node.body]
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+ def visit_name(self, node, parent):
+ """visit a Name node by returning a fresh instance of it"""
+ # True and False can be assigned to something in py2x, so we have to
+ # check first the asscontext
+ if self.asscontext == "Del":
+ newnode = new.DelName()
+ elif self.asscontext is not None: # Ass
+ assert self.asscontext == "Ass"
+ newnode = new.AssName()
+ elif node.id in CONST_NAME_TRANSFORMS:
+ newnode = new.Const(CONST_NAME_TRANSFORMS[node.id])
+ _set_infos(node, newnode, parent)
+ return newnode
+ else:
+ newnode = new.Name()
+ _lineno_parent(node, newnode, parent)
+ newnode.name = node.id
+ # XXX REMOVE me :
+ if self.asscontext in ('Del', 'Ass'): # 'Aug' ??
+ self._save_assignment(newnode)
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+ def visit_bytes(self, node, parent):
+ """visit a Bytes node by returning a fresh instance of Const"""
+ newnode = new.Const(node.s)
+ _set_infos(node, newnode, parent)
+ return newnode
+
+ def visit_num(self, node, parent):
+ """visit a Num node by returning a fresh instance of Const"""
+ newnode = new.Const(node.n)
+ _set_infos(node, newnode, parent)
+ return newnode
+
+ def visit_pass(self, node, parent):
+ """visit a Pass node by returning a fresh instance of it"""
+ newnode = new.Pass()
+ _set_infos(node, newnode, parent)
+ return newnode
+
+ def visit_str(self, node, parent):
+ """visit a Str node by returning a fresh instance of Const"""
+ newnode = new.Const(node.s)
+ _set_infos(node, newnode, parent)
+ return newnode
+
+ def visit_print(self, node, parent):
+ """visit a Print node by returning a fresh instance of it"""
+ newnode = new.Print()
+ _lineno_parent(node, newnode, parent)
+ newnode.nl = node.nl
+ if node.dest is not None:
+ newnode.dest = self.visit(node.dest, newnode)
+ newnode.values = [self.visit(child, newnode) for child in node.values]
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+ def visit_raise(self, node, parent):
+ """visit a Raise node by returning a fresh instance of it"""
+ newnode = new.Raise()
+ _lineno_parent(node, newnode, parent)
+ if node.type is not None:
+ newnode.exc = self.visit(node.type, newnode)
+ if node.inst is not None:
+ newnode.inst = self.visit(node.inst, newnode)
+ if node.tback is not None:
+ newnode.tback = self.visit(node.tback, newnode)
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+ def visit_return(self, node, parent):
+ """visit a Return node by returning a fresh instance of it"""
+ newnode = new.Return()
+ _lineno_parent(node, newnode, parent)
+ if node.value is not None:
+ newnode.value = self.visit(node.value, newnode)
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+ def visit_set(self, node, parent):
+ """visit a Tuple node by returning a fresh instance of it"""
+ newnode = new.Set()
+ _lineno_parent(node, newnode, parent)
+ newnode.elts = [self.visit(child, newnode) for child in node.elts]
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+ def visit_setcomp(self, node, parent):
+ """visit a SetComp node by returning a fresh instance of it"""
+ newnode = new.SetComp()
+ _lineno_parent(node, newnode, parent)
+ newnode.elt = self.visit(node.elt, newnode)
+ newnode.generators = [self.visit(child, newnode)
+ for child in node.generators]
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+ def visit_slice(self, node, parent):
+ """visit a Slice node by returning a fresh instance of it"""
+ newnode = new.Slice()
+ _lineno_parent(node, newnode, parent)
+ if node.lower is not None:
+ newnode.lower = self.visit(node.lower, newnode)
+ if node.upper is not None:
+ newnode.upper = self.visit(node.upper, newnode)
+ if node.step is not None:
+ newnode.step = self.visit(node.step, newnode)
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+ def visit_subscript(self, node, parent):
+ """visit a Subscript node by returning a fresh instance of it"""
+ newnode = new.Subscript()
+ _lineno_parent(node, newnode, parent)
+ subcontext, self.asscontext = self.asscontext, None
+ newnode.value = self.visit(node.value, newnode)
+ newnode.slice = self.visit(node.slice, newnode)
+ self.asscontext = subcontext
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+ def visit_tryexcept(self, node, parent):
+ """visit a TryExcept node by returning a fresh instance of it"""
+ newnode = new.TryExcept()
+ _lineno_parent(node, newnode, parent)
+ newnode.body = [self.visit(child, newnode) for child in node.body]
+ newnode.handlers = [self.visit(child, newnode) for child in node.handlers]
+ newnode.orelse = [self.visit(child, newnode) for child in node.orelse]
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+ def visit_tryfinally(self, node, parent):
+ """visit a TryFinally node by returning a fresh instance of it"""
+ newnode = new.TryFinally()
+ _lineno_parent(node, newnode, parent)
+ newnode.body = [self.visit(child, newnode) for child in node.body]
+ newnode.finalbody = [self.visit(n, newnode) for n in node.finalbody]
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+ def visit_tuple(self, node, parent):
+ """visit a Tuple node by returning a fresh instance of it"""
+ newnode = new.Tuple()
+ _lineno_parent(node, newnode, parent)
+ newnode.elts = [self.visit(child, newnode) for child in node.elts]
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+ def visit_unaryop(self, node, parent):
+ """visit a UnaryOp node by returning a fresh instance of it"""
+ newnode = new.UnaryOp()
+ _lineno_parent(node, newnode, parent)
+ newnode.operand = self.visit(node.operand, newnode)
+ newnode.op = _UNARY_OP_CLASSES[node.op.__class__]
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+ def visit_while(self, node, parent):
+ """visit a While node by returning a fresh instance of it"""
+ newnode = new.While()
+ _lineno_parent(node, newnode, parent)
+ newnode.test = self.visit(node.test, newnode)
+ newnode.body = [self.visit(child, newnode) for child in node.body]
+ newnode.orelse = [self.visit(child, newnode) for child in node.orelse]
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+ def visit_with(self, node, parent):
+ newnode = new.With()
+ _lineno_parent(node, newnode, parent)
+ _node = getattr(node, 'items', [node])[0] # python 3.3 XXX
+ newnode.expr = self.visit(_node.context_expr, newnode)
+ self.asscontext = "Ass"
+ if _node.optional_vars is not None:
+ newnode.vars = self.visit(_node.optional_vars, newnode)
+ self.asscontext = None
+ newnode.body = [self.visit(child, newnode) for child in node.body]
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+ def visit_yield(self, node, parent):
+ """visit a Yield node by returning a fresh instance of it"""
+ newnode = new.Yield()
+ _lineno_parent(node, newnode, parent)
+ if node.value is not None:
+ newnode.value = self.visit(node.value, newnode)
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+
+class TreeRebuilder3k(TreeRebuilder):
+ """extend and overwrite TreeRebuilder for python3k"""
+
+ def visit_arg(self, node, parent):
+ """visit a arg node by returning a fresh AssName instance"""
+ # the <arg> node is coming from py>=3.0, but we use AssName in py2.x
+ # XXX or we should instead introduce a Arg node in astng ?
+ return self.visit_assname(node, parent, node.arg)
+
+ def visit_excepthandler(self, node, parent):
+ """visit an ExceptHandler node by returning a fresh instance of it"""
+ newnode = new.ExceptHandler()
+ _lineno_parent(node, newnode, parent)
+ if node.type is not None:
+ newnode.type = self.visit(node.type, newnode)
+ if node.name is not None:
+ newnode.name = self.visit_assname(node, newnode, node.name)
+ newnode.body = [self.visit(child, newnode) for child in node.body]
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+ def visit_nonlocal(self, node, parent):
+ """visit a Nonlocal node and return a new instance of it"""
+ newnode = new.Nonlocal(node.names)
+ _set_infos(node, newnode, parent)
+ return newnode
+
+ def visit_raise(self, node, parent):
+ """visit a Raise node by returning a fresh instance of it"""
+ newnode = new.Raise()
+ _lineno_parent(node, newnode, parent)
+ # no traceback; anyway it is not used in Pylint
+ if node.exc is not None:
+ newnode.exc = self.visit(node.exc, newnode)
+ if node.cause is not None:
+ newnode.cause = self.visit(node.cause, newnode)
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+ def visit_starred(self, node, parent):
+ """visit a Starred node and return a new instance of it"""
+ newnode = new.Starred()
+ _lineno_parent(node, newnode, parent)
+ newnode.value = self.visit(node.value, newnode)
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+ def visit_try(self, node, parent):
+ # python 3.3 introduce a new Try node replacing TryFinally/TryExcept nodes
+ if node.finalbody:
+ newnode = new.TryFinally()
+ _lineno_parent(node, newnode, parent)
+ newnode.finalbody = [self.visit(n, newnode) for n in node.finalbody]
+ if node.handlers:
+ excnode = new.TryExcept()
+ _lineno_parent(node, excnode, parent)
+ excnode.body = [self.visit(child, newnode) for child in node.body]
+ excnode.handlers = [self.visit(child, newnode) for child in node.handlers]
+ excnode.orelse = [self.visit(child, newnode) for child in node.orelse]
+ newnode.body = [excnode]
+ else:
+ newnode.body = [self.visit(child, newnode) for child in node.body]
+ elif node.handlers:
+ newnode = new.TryExcept()
+ _lineno_parent(node, newnode, parent)
+ newnode.body = [self.visit(child, newnode) for child in node.body]
+ newnode.handlers = [self.visit(child, newnode) for child in node.handlers]
+ newnode.orelse = [self.visit(child, newnode) for child in node.orelse]
+ newnode.set_line_info(newnode.last_child())
+ return newnode
+
+
+if sys.version_info >= (3, 0):
+ TreeRebuilder = TreeRebuilder3k
+
+
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/astng/scoped_nodes.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/astng/scoped_nodes.py
@@ -0,0 +1,981 @@
+# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+# copyright 2003-2010 Sylvain Thenault, all rights reserved.
+# contact mailto:thenault@gmail.com
+#
+# This file is part of logilab-astng.
+#
+# logilab-astng is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 2.1 of the License, or (at your
+# option) any later version.
+#
+# logilab-astng is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+# for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-astng. If not, see <http://www.gnu.org/licenses/>.
+"""This module contains the classes for "scoped" node, i.e. which are opening a
+new local scope in the language definition : Module, Class, Function (and
+Lambda, GenExpr, DictComp and SetComp to some extent).
+"""
+from __future__ import with_statement
+
+__doctype__ = "restructuredtext en"
+
+import sys
+from itertools import chain
+
+from ..common.compat import builtins
+from ..common.decorators import cached
+
+from . import BUILTINS_MODULE
+from .exceptions import NotFoundError, NoDefault, \
+ ASTNGBuildingException, InferenceError
+from .node_classes import Const, DelName, DelAttr, \
+ Dict, From, List, Name, Pass, Raise, Return, Tuple, Yield, \
+ are_exclusive, LookupMixIn, const_factory as cf, unpack_infer
+from .bases import NodeNG, InferenceContext, Instance,\
+ YES, Generator, UnboundMethod, BoundMethod, _infer_stmts, copy_context, \
+ BUILTINS_NAME
+from .mixins import FilterStmtsMixin
+from .bases import Statement
+from .manager import ASTNGManager
+
+
+def remove_nodes(func, cls):
+ def wrapper(*args, **kwargs):
+ nodes = [n for n in func(*args, **kwargs) if not isinstance(n, cls)]
+ if not nodes:
+ raise NotFoundError()
+ return nodes
+ return wrapper
+
+
+def function_to_method(n, klass):
+ if isinstance(n, Function):
+ if n.type == 'classmethod':
+ return BoundMethod(n, klass)
+ if n.type != 'staticmethod':
+ return UnboundMethod(n)
+ return n
+
+def std_special_attributes(self, name, add_locals=True):
+ if add_locals:
+ locals = self.locals
+ else:
+ locals = {}
+ if name == '__name__':
+ return [cf(self.name)] + locals.get(name, [])
+ if name == '__doc__':
+ return [cf(self.doc)] + locals.get(name, [])
+ if name == '__dict__':
+ return [Dict()] + locals.get(name, [])
+ raise NotFoundError(name)
+
+MANAGER = ASTNGManager()
+def builtin_lookup(name):
+ """lookup a name into the builtin module
+ return the list of matching statements and the astng for the builtin
+ module
+ """
+ builtin_astng = MANAGER.astng_from_module(builtins)
+ if name == '__dict__':
+ return builtin_astng, ()
+ try:
+ stmts = builtin_astng.locals[name]
+ except KeyError:
+ stmts = ()
+ return builtin_astng, stmts
+
+
+# TODO move this Mixin to mixins.py; problem: 'Function' in _scope_lookup
+class LocalsDictNodeNG(LookupMixIn, NodeNG):
+ """ this class provides locals handling common to Module, Function
+ and Class nodes, including a dict like interface for direct access
+ to locals information
+ """
+
+ # attributes below are set by the builder module or by raw factories
+
+ # dictionary of locals with name as key and node defining the local as
+ # value
+
+ def qname(self):
+ """return the 'qualified' name of the node, eg module.name,
+ module.class.name ...
+ """
+ if self.parent is None:
+ return self.name
+ return '%s.%s' % (self.parent.frame().qname(), self.name)
+
+ def frame(self):
+ """return the first parent frame node (i.e. Module, Function or Class)
+ """
+ return self
+
+ def scope(self):
+ """return the first node defining a new scope (i.e. Module,
+ Function, Class, Lambda but also GenExpr, DictComp and SetComp)
+ """
+ return self
+
+
+ def _scope_lookup(self, node, name, offset=0):
+ """XXX method for interfacing the scope lookup"""
+ try:
+ stmts = node._filter_stmts(self.locals[name], self, offset)
+ except KeyError:
+ stmts = ()
+ if stmts:
+ return self, stmts
+ if self.parent: # i.e. not Module
+ # nested scope: if parent scope is a function, that's fine
+ # else jump to the module
+ pscope = self.parent.scope()
+ if not pscope.is_function:
+ pscope = pscope.root()
+ return pscope.scope_lookup(node, name)
+ return builtin_lookup(name) # Module
+
+
+
+ def set_local(self, name, stmt):
+ """define <name> in locals (<stmt> is the node defining the name)
+ if the node is a Module node (i.e. has globals), add the name to
+ globals
+
+ if the name is already defined, ignore it
+ """
+ #assert not stmt in self.locals.get(name, ()), (self, stmt)
+ self.locals.setdefault(name, []).append(stmt)
+
+ __setitem__ = set_local
+
+ def _append_node(self, child):
+ """append a child, linking it in the tree"""
+ self.body.append(child)
+ child.parent = self
+
+ def add_local_node(self, child_node, name=None):
+ """append a child which should alter locals to the given node"""
+ if name != '__class__':
+ # add __class__ node as a child will cause infinite recursion later!
+ self._append_node(child_node)
+ self.set_local(name or child_node.name, child_node)
+
+
+ def __getitem__(self, item):
+ """method from the `dict` interface returning the first node
+ associated with the given name in the locals dictionary
+
+ :type item: str
+ :param item: the name of the locally defined object
+ :raises KeyError: if the name is not defined
+ """
+ return self.locals[item][0]
+
+ def __iter__(self):
+ """method from the `dict` interface returning an iterator on
+ `self.keys()`
+ """
+ return iter(self.keys())
+
+ def keys(self):
+ """method from the `dict` interface returning a tuple containing
+ locally defined names
+ """
+ return self.locals.keys()
+
+ def values(self):
+ """method from the `dict` interface returning a tuple containing
+ locally defined nodes which are instance of `Function` or `Class`
+ """
+ return [self[key] for key in self.keys()]
+
+ def items(self):
+ """method from the `dict` interface returning a list of tuple
+ containing each locally defined name with its associated node,
+ which is an instance of `Function` or `Class`
+ """
+ return zip(self.keys(), self.values())
+
+
+ def __contains__(self, name):
+ return name in self.locals
+ has_key = __contains__
+
+# Module #####################################################################
+
+class Module(LocalsDictNodeNG):
+ _astng_fields = ('body',)
+
+ fromlineno = 0
+ lineno = 0
+
+ # attributes below are set by the builder module or by raw factories
+
+ # the file from which as been extracted the astng representation. It may
+ # be None if the representation has been built from a built-in module
+ file = None
+ # encoding of python source file, so we can get unicode out of it (python2
+ # only)
+ file_encoding = None
+ # the module name
+ name = None
+ # boolean for astng built from source (i.e. ast)
+ pure_python = None
+ # boolean for package module
+ package = None
+ # dictionary of globals with name as key and node defining the global
+ # as value
+ globals = None
+
+ # names of python special attributes (handled by getattr impl.)
+ special_attributes = set(('__name__', '__doc__', '__file__', '__path__',
+ '__dict__'))
+ # names of module attributes available through the global scope
+ scope_attrs = set(('__name__', '__doc__', '__file__', '__path__'))
+
+ def __init__(self, name, doc, pure_python=True):
+ self.name = name
+ self.doc = doc
+ self.pure_python = pure_python
+ self.locals = self.globals = {}
+ self.body = []
+
+ @property
+ def file_stream(self):
+ if self.file is not None:
+ return open(self.file)
+ return None
+
+ def block_range(self, lineno):
+ """return block line numbers.
+
+ start from the beginning whatever the given lineno
+ """
+ return self.fromlineno, self.tolineno
+
+ def scope_lookup(self, node, name, offset=0):
+ if name in self.scope_attrs and not name in self.locals:
+ try:
+ return self, self.getattr(name)
+ except NotFoundError:
+ return self, ()
+ return self._scope_lookup(node, name, offset)
+
+ def pytype(self):
+ return '%s.module' % BUILTINS_MODULE
+
+ def display_type(self):
+ return 'Module'
+
+ def getattr(self, name, context=None, ignore_locals=False):
+ if name in self.special_attributes:
+ if name == '__file__':
+ return [cf(self.file)] + self.locals.get(name, [])
+ if name == '__path__' and self.package:
+ return [List()] + self.locals.get(name, [])
+ return std_special_attributes(self, name)
+ if not ignore_locals and name in self.locals:
+ return self.locals[name]
+ if self.package:
+ try:
+ return [self.import_module(name, relative_only=True)]
+ except ASTNGBuildingException:
+ raise NotFoundError(name)
+ except Exception:# XXX pylint tests never pass here; do we need it?
+ import traceback
+ traceback.print_exc()
+ raise NotFoundError(name)
+ getattr = remove_nodes(getattr, DelName)
+
+ def igetattr(self, name, context=None):
+ """inferred getattr"""
+ # set lookup name since this is necessary to infer on import nodes for
+ # instance
+ context = copy_context(context)
+ context.lookupname = name
+ try:
+ return _infer_stmts(self.getattr(name, context), context, frame=self)
+ except NotFoundError:
+ raise InferenceError(name)
+
+ def fully_defined(self):
+ """return True if this module has been built from a .py file
+ and so contains a complete representation including the code
+ """
+ return self.file is not None and self.file.endswith('.py')
+
+ def statement(self):
+ """return the first parent node marked as statement node
+ consider a module as a statement...
+ """
+ return self
+
+ def previous_sibling(self):
+ """module has no sibling"""
+ return
+
+ def next_sibling(self):
+ """module has no sibling"""
+ return
+
+ if sys.version_info < (2, 8):
+ def absolute_import_activated(self):
+ for stmt in self.locals.get('absolute_import', ()):
+ if isinstance(stmt, From) and stmt.modname == '__future__':
+ return True
+ return False
+ else:
+ absolute_import_activated = lambda self: True
+
+ def import_module(self, modname, relative_only=False, level=None):
+ """import the given module considering self as context"""
+ if relative_only and level is None:
+ level = 0
+ absmodname = self.relative_to_absolute_name(modname, level)
+ try:
+ return MANAGER.astng_from_module_name(absmodname)
+ except ASTNGBuildingException:
+ # we only want to import a sub module or package of this module,
+ # skip here
+ if relative_only:
+ raise
+ return MANAGER.astng_from_module_name(modname)
+
+ def relative_to_absolute_name(self, modname, level):
+ """return the absolute module name for a relative import.
+
+ The relative import can be implicit or explicit.
+ """
+ # XXX this returns non sens when called on an absolute import
+ # like 'pylint.checkers.logilab.astng.utils'
+ # XXX doesn't return absolute name if self.name isn't absolute name
+ if self.absolute_import_activated() and level is None:
+ return modname
+ if level:
+ if self.package:
+ level = level - 1
+ package_name = self.name.rsplit('.', level)[0]
+ elif self.package:
+ package_name = self.name
+ else:
+ package_name = self.name.rsplit('.', 1)[0]
+ if package_name:
+ if not modname:
+ return package_name
+ return '%s.%s' % (package_name, modname)
+ return modname
+
+
+ def wildcard_import_names(self):
+ """return the list of imported names when this module is 'wildcard
+ imported'
+
+ It doesn't include the '__builtins__' name which is added by the
+ current CPython implementation of wildcard imports.
+ """
+ # take advantage of a living module if it exists
+ try:
+ living = sys.modules[self.name]
+ except KeyError:
+ pass
+ else:
+ try:
+ return living.__all__
+ except AttributeError:
+ return [name for name in living.__dict__.keys()
+ if not name.startswith('_')]
+ # else lookup the astng
+ #
+ # We separate the different steps of lookup in try/excepts
+ # to avoid catching too many Exceptions
+ # However, we can not analyse dynamically constructed __all__
+ try:
+ all = self['__all__']
+ except KeyError:
+ return [name for name in self.keys() if not name.startswith('_')]
+ try:
+ explicit = all.assigned_stmts().next()
+ except InferenceError:
+ return [name for name in self.keys() if not name.startswith('_')]
+ except AttributeError:
+ # not an assignment node
+ # XXX infer?
+ return [name for name in self.keys() if not name.startswith('_')]
+ try:
+ # should be a Tuple/List of constant string / 1 string not allowed
+ return [const.value for const in explicit.elts]
+ except AttributeError:
+ return [name for name in self.keys() if not name.startswith('_')]
+
+
+class ComprehensionScope(LocalsDictNodeNG):
+ def frame(self):
+ return self.parent.frame()
+
+ scope_lookup = LocalsDictNodeNG._scope_lookup
+
+
+class GenExpr(ComprehensionScope):
+ _astng_fields = ('elt', 'generators')
+
+ def __init__(self):
+ self.locals = {}
+ self.elt = None
+ self.generators = []
+
+
+class DictComp(ComprehensionScope):
+ _astng_fields = ('key', 'value', 'generators')
+
+ def __init__(self):
+ self.locals = {}
+ self.key = None
+ self.value = None
+ self.generators = []
+
+
+class SetComp(ComprehensionScope):
+ _astng_fields = ('elt', 'generators')
+
+ def __init__(self):
+ self.locals = {}
+ self.elt = None
+ self.generators = []
+
+
+class _ListComp(NodeNG):
+ """class representing a ListComp node"""
+ _astng_fields = ('elt', 'generators')
+ elt = None
+ generators = None
+
+if sys.version_info >= (3, 0):
+ class ListComp(_ListComp, ComprehensionScope):
+ """class representing a ListComp node"""
+ def __init__(self):
+ self.locals = {}
+else:
+ class ListComp(_ListComp):
+ """class representing a ListComp node"""
+
+# Function ###################################################################
+
+
+class Lambda(LocalsDictNodeNG, FilterStmtsMixin):
+ _astng_fields = ('args', 'body',)
+ name = '<lambda>'
+
+ # function's type, 'function' | 'method' | 'staticmethod' | 'classmethod'
+ type = 'function'
+
+ def __init__(self):
+ self.locals = {}
+ self.args = []
+ self.body = []
+
+ def pytype(self):
+ if 'method' in self.type:
+ return '%s.instancemethod' % BUILTINS_MODULE
+ return '%s.function' % BUILTINS_MODULE
+
+ def display_type(self):
+ if 'method' in self.type:
+ return 'Method'
+ return 'Function'
+
+ def callable(self):
+ return True
+
+ def argnames(self):
+ """return a list of argument names"""
+ if self.args.args: # maybe None with builtin functions
+ names = _rec_get_names(self.args.args)
+ else:
+ names = []
+ if self.args.vararg:
+ names.append(self.args.vararg)
+ if self.args.kwarg:
+ names.append(self.args.kwarg)
+ return names
+
+ def infer_call_result(self, caller, context=None):
+ """infer what a function is returning when called"""
+ return self.body.infer(context)
+
+ def scope_lookup(self, node, name, offset=0):
+ if node in self.args.defaults:
+ frame = self.parent.frame()
+ # line offset to avoid that def func(f=func) resolve the default
+ # value to the defined function
+ offset = -1
+ else:
+ # check this is not used in function decorators
+ frame = self
+ return frame._scope_lookup(node, name, offset)
+
+
+class Function(Statement, Lambda):
+ _astng_fields = ('decorators', 'args', 'body')
+
+ special_attributes = set(('__name__', '__doc__', '__dict__'))
+ is_function = True
+ # attributes below are set by the builder module or by raw factories
+ blockstart_tolineno = None
+ decorators = None
+
+ def __init__(self, name, doc):
+ self.locals = {}
+ self.args = []
+ self.body = []
+ self.decorators = None
+ self.name = name
+ self.doc = doc
+ self.extra_decorators = []
+ self.instance_attrs = {}
+
+ def set_line_info(self, lastchild):
+ self.fromlineno = self.lineno
+ # lineno is the line number of the first decorator, we want the def statement lineno
+ if self.decorators is not None:
+ self.fromlineno += sum(node.tolineno - node.lineno + 1
+ for node in self.decorators.nodes)
+ self.tolineno = lastchild.tolineno
+ self.blockstart_tolineno = self.args.tolineno
+
+ def block_range(self, lineno):
+ """return block line numbers.
+
+ start from the "def" position whatever the given lineno
+ """
+ return self.fromlineno, self.tolineno
+
+ def getattr(self, name, context=None):
+ """this method doesn't look in the instance_attrs dictionary since it's
+ done by an Instance proxy at inference time.
+ """
+ if name == '__module__':
+ return [cf(self.root().qname())]
+ if name in self.instance_attrs:
+ return self.instance_attrs[name]
+ return std_special_attributes(self, name, False)
+
+ def is_method(self):
+ """return true if the function node should be considered as a method"""
+ # check we are defined in a Class, because this is usually expected
+ # (e.g. pylint...) when is_method() return True
+ return self.type != 'function' and isinstance(self.parent.frame(), Class)
+
+ def decoratornames(self):
+ """return a list of decorator qualified names"""
+ result = set()
+ decoratornodes = []
+ if self.decorators is not None:
+ decoratornodes += self.decorators.nodes
+ decoratornodes += self.extra_decorators
+ for decnode in decoratornodes:
+ for infnode in decnode.infer():
+ result.add(infnode.qname())
+ return result
+ decoratornames = cached(decoratornames)
+
+ def is_bound(self):
+ """return true if the function is bound to an Instance or a class"""
+ return self.type == 'classmethod'
+
+ def is_abstract(self, pass_is_abstract=True):
+ """return true if the method is abstract
+ It's considered as abstract if the only statement is a raise of
+ NotImplementError, or, if pass_is_abstract, a pass statement
+ """
+ for child_node in self.body:
+ if isinstance(child_node, Raise):
+ if child_node.raises_not_implemented():
+ return True
+ if pass_is_abstract and isinstance(child_node, Pass):
+ return True
+ return False
+ # empty function is the same as function with a single "pass" statement
+ if pass_is_abstract:
+ return True
+
+ def is_generator(self):
+ """return true if this is a generator function"""
+ # XXX should be flagged, not computed
+ try:
+ return self.nodes_of_class(Yield, skip_klass=Function).next()
+ except StopIteration:
+ return False
+
+ def infer_call_result(self, caller, context=None):
+ """infer what a function is returning when called"""
+ if self.is_generator():
+ yield Generator(self)
+ return
+ returns = self.nodes_of_class(Return, skip_klass=Function)
+ for returnnode in returns:
+ if returnnode.value is None:
+ yield Const(None)
+ else:
+ try:
+ for infered in returnnode.value.infer(context):
+ yield infered
+ except InferenceError:
+ yield YES
+
+
+def _rec_get_names(args, names=None):
+ """return a list of all argument names"""
+ if names is None:
+ names = []
+ for arg in args:
+ if isinstance(arg, Tuple):
+ _rec_get_names(arg.elts, names)
+ else:
+ names.append(arg.name)
+ return names
+
+
+# Class ######################################################################
+
+def _class_type(klass, ancestors=None):
+ """return a Class node type to differ metaclass, interface and exception
+ from 'regular' classes
+ """
+ # XXX we have to store ancestors in case we have a ancestor loop
+ if klass._type is not None:
+ return klass._type
+ if klass.name == 'type':
+ klass._type = 'metaclass'
+ elif klass.name.endswith('Interface'):
+ klass._type = 'interface'
+ elif klass.name.endswith('Exception'):
+ klass._type = 'exception'
+ else:
+ if ancestors is None:
+ ancestors = set()
+ if klass in ancestors:
+ # XXX we are in loop ancestors, and have found no type
+ klass._type = 'class'
+ return 'class'
+ ancestors.add(klass)
+ # print >> sys.stderr, '_class_type', repr(klass)
+ for base in klass.ancestors(recurs=False):
+ if _class_type(base, ancestors) != 'class':
+ klass._type = base.type
+ break
+ if klass._type is None:
+ klass._type = 'class'
+ return klass._type
+
+def _iface_hdlr(iface_node):
+ """a handler function used by interfaces to handle suspicious
+ interface nodes
+ """
+ return True
+
+
+class Class(Statement, LocalsDictNodeNG, FilterStmtsMixin):
+
+ # some of the attributes below are set by the builder module or
+ # by a raw factories
+
+ # a dictionary of class instances attributes
+ _astng_fields = ('decorators', 'bases', 'body') # name
+
+ decorators = None
+ special_attributes = set(('__name__', '__doc__', '__dict__', '__module__',
+ '__bases__', '__mro__', '__subclasses__'))
+ blockstart_tolineno = None
+
+ _type = None
+ type = property(_class_type,
+ doc="class'type, possible values are 'class' | "
+ "'metaclass' | 'interface' | 'exception'")
+
+ def __init__(self, name, doc):
+ self.instance_attrs = {}
+ self.locals = {}
+ self.bases = []
+ self.body = []
+ self.name = name
+ self.doc = doc
+
+ def _newstyle_impl(self, context=None):
+ if context is None:
+ context = InferenceContext()
+ if self._newstyle is not None:
+ return self._newstyle
+ for base in self.ancestors(recurs=False, context=context):
+ if base._newstyle_impl(context):
+ self._newstyle = True
+ break
+ if self._newstyle is None:
+ self._newstyle = False
+ return self._newstyle
+
+ _newstyle = None
+ newstyle = property(_newstyle_impl,
+ doc="boolean indicating if it's a new style class"
+ "or not")
+
+ def set_line_info(self, lastchild):
+ self.fromlineno = self.lineno
+ self.blockstart_tolineno = self.bases and self.bases[-1].tolineno or self.fromlineno
+ if lastchild is not None:
+ self.tolineno = lastchild.tolineno
+ # else this is a class with only a docstring, then tolineno is (should be) already ok
+
+ def block_range(self, lineno):
+ """return block line numbers.
+
+ start from the "class" position whatever the given lineno
+ """
+ return self.fromlineno, self.tolineno
+
+ def pytype(self):
+ if self.newstyle:
+ return '%s.type' % BUILTINS_MODULE
+ return '%s.classobj' % BUILTINS_MODULE
+
+ def display_type(self):
+ return 'Class'
+
+ def callable(self):
+ return True
+
+ def infer_call_result(self, caller, context=None):
+ """infer what a class is returning when called"""
+ yield Instance(self)
+
+ def scope_lookup(self, node, name, offset=0):
+ if node in self.bases:
+ frame = self.parent.frame()
+ # line offset to avoid that class A(A) resolve the ancestor to
+ # the defined class
+ offset = -1
+ else:
+ frame = self
+ return frame._scope_lookup(node, name, offset)
+
+ # list of parent class as a list of string (i.e. names as they appear
+ # in the class definition) XXX bw compat
+ def basenames(self):
+ return [bnode.as_string() for bnode in self.bases]
+ basenames = property(basenames)
+
+ def ancestors(self, recurs=True, context=None):
+ """return an iterator on the node base classes in a prefixed
+ depth first order
+
+ :param recurs:
+ boolean indicating if it should recurse or return direct
+ ancestors only
+ """
+ # FIXME: should be possible to choose the resolution order
+ # XXX inference make infinite loops possible here (see BaseTransformer
+ # manipulation in the builder module for instance)
+ yielded = set([self])
+ if context is None:
+ context = InferenceContext()
+ for stmt in self.bases:
+ with context.restore_path():
+ try:
+ for baseobj in stmt.infer(context):
+ if not isinstance(baseobj, Class):
+ # duh ?
+ continue
+ if baseobj in yielded:
+ continue # cf xxx above
+ yielded.add(baseobj)
+ yield baseobj
+ if recurs:
+ for grandpa in baseobj.ancestors(True, context):
+ if grandpa in yielded:
+ continue # cf xxx above
+ yielded.add(grandpa)
+ yield grandpa
+ except InferenceError:
+ # XXX log error ?
+ continue
+
+ def local_attr_ancestors(self, name, context=None):
+ """return an iterator on astng representation of parent classes
+ which have <name> defined in their locals
+ """
+ for astng in self.ancestors(context=context):
+ if name in astng:
+ yield astng
+
+ def instance_attr_ancestors(self, name, context=None):
+ """return an iterator on astng representation of parent classes
+ which have <name> defined in their instance attribute dictionary
+ """
+ for astng in self.ancestors(context=context):
+ if name in astng.instance_attrs:
+ yield astng
+
+ def has_base(self, node):
+ return node in self.bases
+
+ def local_attr(self, name, context=None):
+ """return the list of assign node associated to name in this class
+ locals or in its parents
+
+ :raises `NotFoundError`:
+ if no attribute with this name has been find in this class or
+ its parent classes
+ """
+ try:
+ return self.locals[name]
+ except KeyError:
+ # get if from the first parent implementing it if any
+ for class_node in self.local_attr_ancestors(name, context):
+ return class_node.locals[name]
+ raise NotFoundError(name)
+ local_attr = remove_nodes(local_attr, DelAttr)
+
+ def instance_attr(self, name, context=None):
+ """return the astng nodes associated to name in this class instance
+ attributes dictionary and in its parents
+
+ :raises `NotFoundError`:
+ if no attribute with this name has been find in this class or
+ its parent classes
+ """
+ values = self.instance_attrs.get(name, [])
+ # get all values from parents
+ for class_node in self.instance_attr_ancestors(name, context):
+ values += class_node.instance_attrs[name]
+ if not values:
+ raise NotFoundError(name)
+ return values
+ instance_attr = remove_nodes(instance_attr, DelAttr)
+
+ def instanciate_class(self):
+ """return Instance of Class node, else return self"""
+ return Instance(self)
+
+ def getattr(self, name, context=None):
+ """this method doesn't look in the instance_attrs dictionary since it's
+ done by an Instance proxy at inference time.
+
+ It may return a YES object if the attribute has not been actually
+ found but a __getattr__ or __getattribute__ method is defined
+ """
+ values = self.locals.get(name, [])
+ if name in self.special_attributes:
+ if name == '__module__':
+ return [cf(self.root().qname())] + values
+ # FIXME : what is expected by passing the list of ancestors to cf:
+ # you can just do [cf(tuple())] + values without breaking any test
+ # this is ticket http://www.logilab.org/ticket/52785
+ if name == '__bases__':
+ return [cf(tuple(self.ancestors(recurs=False, context=context)))] + values
+ # XXX need proper meta class handling + MRO implementation
+ if name == '__mro__' and self.newstyle:
+ # XXX mro is read-only but that's not our job to detect that
+ return [cf(tuple(self.ancestors(recurs=True, context=context)))] + values
+ return std_special_attributes(self, name)
+ # don't modify the list in self.locals!
+ values = list(values)
+ for classnode in self.ancestors(recurs=True, context=context):
+ values += classnode.locals.get(name, [])
+ if not values:
+ raise NotFoundError(name)
+ return values
+
+ def igetattr(self, name, context=None):
+ """inferred getattr, need special treatment in class to handle
+ descriptors
+ """
+ # set lookup name since this is necessary to infer on import nodes for
+ # instance
+ context = copy_context(context)
+ context.lookupname = name
+ try:
+ for infered in _infer_stmts(self.getattr(name, context), context,
+ frame=self):
+ # yield YES object instead of descriptors when necessary
+ if not isinstance(infered, Const) and isinstance(infered, Instance):
+ try:
+ infered._proxied.getattr('__get__', context)
+ except NotFoundError:
+ yield infered
+ else:
+ yield YES
+ else:
+ yield function_to_method(infered, self)
+ except NotFoundError:
+ if not name.startswith('__') and self.has_dynamic_getattr(context):
+ # class handle some dynamic attributes, return a YES object
+ yield YES
+ else:
+ raise InferenceError(name)
+
+ def has_dynamic_getattr(self, context=None):
+ """return True if the class has a custom __getattr__ or
+ __getattribute__ method
+ """
+ # need to explicitly handle optparse.Values (setattr is not detected)
+ if self.name == 'Values' and self.root().name == 'optparse':
+ return True
+ try:
+ self.getattr('__getattr__', context)
+ return True
+ except NotFoundError:
+ #if self.newstyle: XXX cause an infinite recursion error
+ try:
+ getattribute = self.getattr('__getattribute__', context)[0]
+ if getattribute.root().name != BUILTINS_NAME:
+ # class has a custom __getattribute__ defined
+ return True
+ except NotFoundError:
+ pass
+ return False
+
+ def methods(self):
+ """return an iterator on all methods defined in the class and
+ its ancestors
+ """
+ done = {}
+ for astng in chain(iter((self,)), self.ancestors()):
+ for meth in astng.mymethods():
+ if meth.name in done:
+ continue
+ done[meth.name] = None
+ yield meth
+
+ def mymethods(self):
+ """return an iterator on all methods defined in the class"""
+ for member in self.values():
+ if isinstance(member, Function):
+ yield member
+
+ def interfaces(self, herited=True, handler_func=_iface_hdlr):
+ """return an iterator on interfaces implemented by the given
+ class node
+ """
+ # FIXME: what if __implements__ = (MyIFace, MyParent.__implements__)...
+ try:
+ implements = Instance(self).getattr('__implements__')[0]
+ except NotFoundError:
+ return
+ if not herited and not implements.frame() is self:
+ return
+ found = set()
+ missing = False
+ for iface in unpack_infer(implements):
+ if iface is YES:
+ missing = True
+ continue
+ if not iface in found and handler_func(iface):
+ found.add(iface)
+ yield iface
+ if missing:
+ raise InferenceError()
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/astng/utils.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/astng/utils.py
@@ -0,0 +1,241 @@
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+# copyright 2003-2010 Sylvain Thenault, all rights reserved.
+# contact mailto:thenault@gmail.com
+#
+# This file is part of logilab-astng.
+#
+# logilab-astng is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 2.1 of the License, or (at your
+# option) any later version.
+#
+# logilab-astng is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+# for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-astng. If not, see <http://www.gnu.org/licenses/>.
+"""this module contains some utilities to navigate in the tree or to
+extract information from it
+"""
+
+__docformat__ = "restructuredtext en"
+
+from .exceptions import ASTNGBuildingException
+
+
+class ASTWalker:
+ """a walker visiting a tree in preorder, calling on the handler:
+
+ * visit_<class name> on entering a node, where class name is the class of
+ the node in lower case
+
+ * leave_<class name> on leaving a node, where class name is the class of
+ the node in lower case
+ """
+
+ def __init__(self, handler):
+ self.handler = handler
+ self._cache = {}
+
+ def walk(self, node, _done=None):
+ """walk on the tree from <node>, getting callbacks from handler"""
+ if _done is None:
+ _done = set()
+ if node in _done:
+ raise AssertionError((id(node), node, node.parent))
+ _done.add(node)
+ self.visit(node)
+ for child_node in node.get_children():
+ self.handler.set_context(node, child_node)
+ assert child_node is not node
+ self.walk(child_node, _done)
+ self.leave(node)
+ assert node.parent is not node
+
+ def get_callbacks(self, node):
+ """get callbacks from handler for the visited node"""
+ klass = node.__class__
+ methods = self._cache.get(klass)
+ if methods is None:
+ handler = self.handler
+ kid = klass.__name__.lower()
+ e_method = getattr(handler, 'visit_%s' % kid,
+ getattr(handler, 'visit_default', None))
+ l_method = getattr(handler, 'leave_%s' % kid,
+ getattr(handler, 'leave_default', None))
+ self._cache[klass] = (e_method, l_method)
+ else:
+ e_method, l_method = methods
+ return e_method, l_method
+
+ def visit(self, node):
+ """walk on the tree from <node>, getting callbacks from handler"""
+ method = self.get_callbacks(node)[0]
+ if method is not None:
+ method(node)
+
+ def leave(self, node):
+ """walk on the tree from <node>, getting callbacks from handler"""
+ method = self.get_callbacks(node)[1]
+ if method is not None:
+ method(node)
+
+
+class LocalsVisitor(ASTWalker):
+ """visit a project by traversing the locals dictionary"""
+ def __init__(self):
+ ASTWalker.__init__(self, self)
+ self._visited = {}
+
+ def visit(self, node):
+ """launch the visit starting from the given node"""
+ if node in self._visited:
+ return
+ self._visited[node] = 1 # FIXME: use set ?
+ methods = self.get_callbacks(node)
+ if methods[0] is not None:
+ methods[0](node)
+ if 'locals' in node.__dict__: # skip Instance and other proxy
+ for name, local_node in node.items():
+ self.visit(local_node)
+ if methods[1] is not None:
+ return methods[1](node)
+
+
+def _check_children(node):
+ """a helper function to check children - parent relations"""
+ for child in node.get_children():
+ ok = False
+ if child is None:
+ print "Hm, child of %s is None" % node
+ continue
+ if not hasattr(child, 'parent'):
+ print " ERROR: %s has child %s %x with no parent" % (node, child, id(child))
+ elif not child.parent:
+ print " ERROR: %s has child %s %x with parent %r" % (node, child, id(child), child.parent)
+ elif child.parent is not node:
+ print " ERROR: %s %x has child %s %x with wrong parent %s" % (node,
+ id(node), child, id(child), child.parent)
+ else:
+ ok = True
+ if not ok:
+ print "lines;", node.lineno, child.lineno
+ print "of module", node.root(), node.root().name
+ raise ASTNGBuildingException
+ _check_children(child)
+
+
+from _ast import PyCF_ONLY_AST
+def parse(string):
+ return compile(string, "<string>", 'exec', PyCF_ONLY_AST)
+
+class TreeTester(object):
+ '''A helper class to see _ast tree and compare with astng tree
+
+ indent: string for tree indent representation
+ lineno: bool to tell if we should print the line numbers
+
+ >>> tester = TreeTester('print')
+ >>> print tester.native_tree_repr()
+
+ <Module>
+ . body = [
+ . <Print>
+ . . nl = True
+ . ]
+ >>> print tester.astng_tree_repr()
+ Module()
+ body = [
+ Print()
+ dest =
+ values = [
+ ]
+ ]
+ '''
+
+ indent = '. '
+ lineno = False
+
+ def __init__(self, sourcecode):
+ self._string = ''
+ self.sourcecode = sourcecode
+ self._ast_node = None
+ self.build_ast()
+
+ def build_ast(self):
+ """build the _ast tree from the source code"""
+ self._ast_node = parse(self.sourcecode)
+
+ def native_tree_repr(self, node=None, indent=''):
+ """get a nice representation of the _ast tree"""
+ self._string = ''
+ if node is None:
+ node = self._ast_node
+ self._native_repr_tree(node, indent)
+ return self._string
+
+
+ def _native_repr_tree(self, node, indent, _done=None):
+ """recursive method for the native tree representation"""
+ from _ast import Load as _Load, Store as _Store, Del as _Del
+ from _ast import AST as Node
+ if _done is None:
+ _done = set()
+ if node in _done:
+ self._string += '\nloop in tree: %r (%s)' % (node,
+ getattr(node, 'lineno', None))
+ return
+ _done.add(node)
+ self._string += '\n' + indent + '<%s>' % node.__class__.__name__
+ indent += self.indent
+ if not hasattr(node, '__dict__'):
+ self._string += '\n' + self.indent + " ** node has no __dict__ " + str(node)
+ return
+ node_dict = node.__dict__
+ if hasattr(node, '_attributes'):
+ for a in node._attributes:
+ attr = node_dict[a]
+ if attr is None:
+ continue
+ if a in ("lineno", "col_offset") and not self.lineno:
+ continue
+ self._string +='\n' + indent + a + " = " + repr(attr)
+ for field in node._fields or ():
+ attr = node_dict[field]
+ if attr is None:
+ continue
+ if isinstance(attr, list):
+ if not attr:
+ continue
+ self._string += '\n' + indent + field + ' = ['
+ for elt in attr:
+ self._native_repr_tree(elt, indent, _done)
+ self._string += '\n' + indent + ']'
+ continue
+ if isinstance(attr, (_Load, _Store, _Del)):
+ continue
+ if isinstance(attr, Node):
+ self._string += '\n' + indent + field + " = "
+ self._native_repr_tree(attr, indent, _done)
+ else:
+ self._string += '\n' + indent + field + " = " + repr(attr)
+
+
+ def build_astng_tree(self):
+ """build astng tree from the _ast tree
+ """
+ from logilab.astng.builder import ASTNGBuilder
+ tree = ASTNGBuilder().string_build(self.sourcecode)
+ return tree
+
+ def astng_tree_repr(self, ids=False):
+ """build the astng tree and return a nice tree representation"""
+ mod = self.build_astng_tree()
+ return mod.repr_tree(ids)
+
+
+__all__ = ('LocalsVisitor', 'ASTWalker',)
+
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/common/__init__.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/common/__init__.py
@@ -0,0 +1,171 @@
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of logilab-common.
+#
+# logilab-common is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option) any
+# later version.
+#
+# logilab-common is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-common. If not, see <http://www.gnu.org/licenses/>.
+"""Logilab common library (aka Logilab's extension to the standard library).
+
+:type STD_BLACKLIST: tuple
+:var STD_BLACKLIST: directories ignored by default by the functions in
+ this package which have to recurse into directories
+
+:type IGNORED_EXTENSIONS: tuple
+:var IGNORED_EXTENSIONS: file extensions that may usually be ignored
+"""
+__docformat__ = "restructuredtext en"
+from .__pkginfo__ import version as __version__
+
+STD_BLACKLIST = ('CVS', '.svn', '.hg', 'debian', 'dist', 'build')
+
+IGNORED_EXTENSIONS = ('.pyc', '.pyo', '.elc', '~', '.swp', '.orig')
+
+# set this to False if you've mx DateTime installed but you don't want your db
+# adapter to use it (should be set before you got a connection)
+USE_MX_DATETIME = True
+
+
+class attrdict(dict):
+ """A dictionary for which keys are also accessible as attributes."""
+ def __getattr__(self, attr):
+ try:
+ return self[attr]
+ except KeyError:
+ raise AttributeError(attr)
+
+class dictattr(dict):
+ def __init__(self, proxy):
+ self.__proxy = proxy
+
+ def __getitem__(self, attr):
+ try:
+ return getattr(self.__proxy, attr)
+ except AttributeError:
+ raise KeyError(attr)
+
+class nullobject(object):
+ def __repr__(self):
+ return '<nullobject>'
+ def __nonzero__(self):
+ return False
+
+class tempattr(object):
+ def __init__(self, obj, attr, value):
+ self.obj = obj
+ self.attr = attr
+ self.value = value
+
+ def __enter__(self):
+ self.oldvalue = getattr(self.obj, self.attr)
+ setattr(self.obj, self.attr, self.value)
+ return self.obj
+
+ def __exit__(self, exctype, value, traceback):
+ setattr(self.obj, self.attr, self.oldvalue)
+
+
+
+# flatten -----
+# XXX move in a specific module and use yield instead
+# do not mix flatten and translate
+#
+# def iterable(obj):
+# try: iter(obj)
+# except: return False
+# return True
+#
+# def is_string_like(obj):
+# try: obj +''
+# except (TypeError, ValueError): return False
+# return True
+#
+#def is_scalar(obj):
+# return is_string_like(obj) or not iterable(obj)
+#
+#def flatten(seq):
+# for item in seq:
+# if is_scalar(item):
+# yield item
+# else:
+# for subitem in flatten(item):
+# yield subitem
+
+def flatten(iterable, tr_func=None, results=None):
+ """Flatten a list of list with any level.
+
+ If tr_func is not None, it should be a one argument function that'll be called
+ on each final element.
+
+ :rtype: list
+
+ >>> flatten([1, [2, 3]])
+ [1, 2, 3]
+ """
+ if results is None:
+ results = []
+ for val in iterable:
+ if isinstance(val, (list, tuple)):
+ flatten(val, tr_func, results)
+ elif tr_func is None:
+ results.append(val)
+ else:
+ results.append(tr_func(val))
+ return results
+
+
+# XXX is function below still used ?
+
+def make_domains(lists):
+ """
+ Given a list of lists, return a list of domain for each list to produce all
+ combinations of possibles values.
+
+ :rtype: list
+
+ Example:
+
+ >>> make_domains(['a', 'b'], ['c','d', 'e'])
+ [['a', 'b', 'a', 'b', 'a', 'b'], ['c', 'c', 'd', 'd', 'e', 'e']]
+ """
+ domains = []
+ for iterable in lists:
+ new_domain = iterable[:]
+ for i in range(len(domains)):
+ domains[i] = domains[i]*len(iterable)
+ if domains:
+ missing = (len(domains[0]) - len(iterable)) / len(iterable)
+ i = 0
+ for j in range(len(iterable)):
+ value = iterable[j]
+ for dummy in range(missing):
+ new_domain.insert(i, value)
+ i += 1
+ i += 1
+ domains.append(new_domain)
+ return domains
+
+
+# private stuff ################################################################
+
+def _handle_blacklist(blacklist, dirnames, filenames):
+ """remove files/directories in the black list
+
+ dirnames/filenames are usually from os.walk
+ """
+ for norecurs in blacklist:
+ if norecurs in dirnames:
+ dirnames.remove(norecurs)
+ elif norecurs in filenames:
+ filenames.remove(norecurs)
+
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/common/__pkginfo__.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/common/__pkginfo__.py
@@ -0,0 +1,53 @@
+# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of logilab-common.
+#
+# logilab-common is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option) any
+# later version.
+#
+# logilab-common is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-common. If not, see <http://www.gnu.org/licenses/>.
+"""logilab.common packaging information"""
+__docformat__ = "restructuredtext en"
+import sys
+import os
+
+distname = 'logilab-common'
+modname = 'common'
+subpackage_of = 'logilab'
+subpackage_master = True
+
+numversion = (0, 58, 3)
+version = '.'.join([str(num) for num in numversion])
+
+license = 'LGPL' # 2.1 or later
+description = "collection of low-level Python packages and modules used by Logilab projects"
+web = "http://www.logilab.org/project/%s" % distname
+mailinglist = "mailto://python-projects@lists.logilab.org"
+author = "Logilab"
+author_email = "contact@logilab.fr"
+
+
+from os.path import join
+scripts = [join('bin', 'pytest')]
+include_dirs = [join('test', 'data')]
+
+install_requires = []
+if sys.version_info < (2, 7):
+ install_requires.append('unittest2 >= 0.5.1')
+if os.name == 'nt':
+ install_requires.append('colorama')
+
+classifiers = ["Topic :: Utilities",
+ "Programming Language :: Python",
+ "Programming Language :: Python :: 2",
+ "Programming Language :: Python :: 3",
+ ]
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/common/changelog.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/common/changelog.py
@@ -0,0 +1,236 @@
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of logilab-common.
+#
+# logilab-common is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option) any
+# later version.
+#
+# logilab-common is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-common. If not, see <http://www.gnu.org/licenses/>.
+"""Manipulation of upstream change log files.
+
+The upstream change log files format handled is simpler than the one
+often used such as those generated by the default Emacs changelog mode.
+
+Sample ChangeLog format::
+
+ Change log for project Yoo
+ ==========================
+
+ --
+ * add a new functionality
+
+ 2002-02-01 -- 0.1.1
+ * fix bug #435454
+ * fix bug #434356
+
+ 2002-01-01 -- 0.1
+ * initial release
+
+
+There is 3 entries in this change log, one for each released version and one
+for the next version (i.e. the current entry).
+Each entry contains a set of messages corresponding to changes done in this
+release.
+All the non empty lines before the first entry are considered as the change
+log title.
+"""
+
+__docformat__ = "restructuredtext en"
+
+import sys
+from stat import S_IWRITE
+
+BULLET = '*'
+SUBBULLET = '-'
+INDENT = ' ' * 4
+
+class NoEntry(Exception):
+ """raised when we are unable to find an entry"""
+
+class EntryNotFound(Exception):
+ """raised when we are unable to find a given entry"""
+
+class Version(tuple):
+ """simple class to handle soft version number has a tuple while
+ correctly printing it as X.Y.Z
+ """
+ def __new__(cls, versionstr):
+ if isinstance(versionstr, basestring):
+ versionstr = versionstr.strip(' :') # XXX (syt) duh?
+ parsed = cls.parse(versionstr)
+ else:
+ parsed = versionstr
+ return tuple.__new__(cls, parsed)
+
+ @classmethod
+ def parse(cls, versionstr):
+ versionstr = versionstr.strip(' :')
+ try:
+ return [int(i) for i in versionstr.split('.')]
+ except ValueError, ex:
+ raise ValueError("invalid literal for version '%s' (%s)"%(versionstr, ex))
+
+ def __str__(self):
+ return '.'.join([str(i) for i in self])
+
+# upstream change log #########################################################
+
+class ChangeLogEntry(object):
+ """a change log entry, i.e. a set of messages associated to a version and
+ its release date
+ """
+ version_class = Version
+
+ def __init__(self, date=None, version=None, **kwargs):
+ self.__dict__.update(kwargs)
+ if version:
+ self.version = self.version_class(version)
+ else:
+ self.version = None
+ self.date = date
+ self.messages = []
+
+ def add_message(self, msg):
+ """add a new message"""
+ self.messages.append(([msg], []))
+
+ def complete_latest_message(self, msg_suite):
+ """complete the latest added message
+ """
+ if not self.messages:
+ raise ValueError('unable to complete last message as there is no previous message)')
+ if self.messages[-1][1]: # sub messages
+ self.messages[-1][1][-1].append(msg_suite)
+ else: # message
+ self.messages[-1][0].append(msg_suite)
+
+ def add_sub_message(self, sub_msg, key=None):
+ if not self.messages:
+ raise ValueError('unable to complete last message as there is no previous message)')
+ if key is None:
+ self.messages[-1][1].append([sub_msg])
+ else:
+ raise NotImplementedError("sub message to specific key are not implemented yet")
+
+ def write(self, stream=sys.stdout):
+ """write the entry to file """
+ stream.write('%s -- %s\n' % (self.date or '', self.version or ''))
+ for msg, sub_msgs in self.messages:
+ stream.write('%s%s %s\n' % (INDENT, BULLET, msg[0]))
+ stream.write(''.join(msg[1:]))
+ if sub_msgs:
+ stream.write('\n')
+ for sub_msg in sub_msgs:
+ stream.write('%s%s %s\n' % (INDENT * 2, SUBBULLET, sub_msg[0]))
+ stream.write(''.join(sub_msg[1:]))
+ stream.write('\n')
+
+ stream.write('\n\n')
+
+class ChangeLog(object):
+ """object representation of a whole ChangeLog file"""
+
+ entry_class = ChangeLogEntry
+
+ def __init__(self, changelog_file, title=''):
+ self.file = changelog_file
+ self.title = title
+ self.additional_content = ''
+ self.entries = []
+ self.load()
+
+ def __repr__(self):
+ return '<ChangeLog %s at %s (%s entries)>' % (self.file, id(self),
+ len(self.entries))
+
+ def add_entry(self, entry):
+ """add a new entry to the change log"""
+ self.entries.append(entry)
+
+ def get_entry(self, version='', create=None):
+ """ return a given changelog entry
+ if version is omitted, return the current entry
+ """
+ if not self.entries:
+ if version or not create:
+ raise NoEntry()
+ self.entries.append(self.entry_class())
+ if not version:
+ if self.entries[0].version and create is not None:
+ self.entries.insert(0, self.entry_class())
+ return self.entries[0]
+ version = self.version_class(version)
+ for entry in self.entries:
+ if entry.version == version:
+ return entry
+ raise EntryNotFound()
+
+ def add(self, msg, create=None):
+ """add a new message to the latest opened entry"""
+ entry = self.get_entry(create=create)
+ entry.add_message(msg)
+
+ def load(self):
+ """ read a logilab's ChangeLog from file """
+ try:
+ stream = open(self.file)
+ except IOError:
+ return
+ last = None
+ expect_sub = False
+ for line in stream.readlines():
+ sline = line.strip()
+ words = sline.split()
+ # if new entry
+ if len(words) == 1 and words[0] == '--':
+ expect_sub = False
+ last = self.entry_class()
+ self.add_entry(last)
+ # if old entry
+ elif len(words) == 3 and words[1] == '--':
+ expect_sub = False
+ last = self.entry_class(words[0], words[2])
+ self.add_entry(last)
+ # if title
+ elif sline and last is None:
+ self.title = '%s%s' % (self.title, line)
+ # if new entry
+ elif sline and sline[0] == BULLET:
+ expect_sub = False
+ last.add_message(sline[1:].strip())
+ # if new sub_entry
+ elif expect_sub and sline and sline[0] == SUBBULLET:
+ last.add_sub_message(sline[1:].strip())
+ # if new line for current entry
+ elif sline and last.messages:
+ last.complete_latest_message(line)
+ else:
+ expect_sub = True
+ self.additional_content += line
+ stream.close()
+
+ def format_title(self):
+ return '%s\n\n' % self.title.strip()
+
+ def save(self):
+ """write back change log"""
+ # filetutils isn't importable in appengine, so import locally
+ from logilab.common.fileutils import ensure_fs_mode
+ ensure_fs_mode(self.file, S_IWRITE)
+ self.write(open(self.file, 'w'))
+
+ def write(self, stream=sys.stdout):
+ """write changelog to stream"""
+ stream.write(self.format_title())
+ for entry in self.entries:
+ entry.write(stream)
+
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/common/compat.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/common/compat.py
@@ -0,0 +1,243 @@
+# pylint: disable=E0601,W0622,W0611
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of logilab-common.
+#
+# logilab-common is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option) any
+# later version.
+#
+# logilab-common is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-common. If not, see <http://www.gnu.org/licenses/>.
+"""Wrappers around some builtins introduced in python 2.3, 2.4 and
+2.5, making them available in for earlier versions of python.
+
+See another compatibility snippets from other projects:
+
+ :mod:`lib2to3.fixes`
+ :mod:`coverage.backward`
+ :mod:`unittest2.compatibility`
+"""
+
+from __future__ import generators
+
+__docformat__ = "restructuredtext en"
+
+import os
+import sys
+import types
+from warnings import warn
+
+import __builtin__ as builtins # 2to3 will tranform '__builtin__' to 'builtins'
+
+if sys.version_info < (3, 0):
+ str_to_bytes = str
+ def str_encode(string, encoding):
+ if isinstance(string, unicode):
+ return string.encode(encoding)
+ return str(string)
+else:
+ def str_to_bytes(string):
+ return str.encode(string)
+ # we have to ignore the encoding in py3k to be able to write a string into a
+ # TextIOWrapper or like object (which expect an unicode string)
+ def str_encode(string, encoding):
+ return str(string)
+
+# XXX callable built-in seems back in all python versions
+try:
+ callable = builtins.callable
+except AttributeError:
+ from collections import Callable
+ def callable(something):
+ return isinstance(something, Callable)
+ del Callable
+
+# See also http://bugs.python.org/issue11776
+if sys.version_info[0] == 3:
+ def method_type(callable, instance, klass):
+ # api change. klass is no more considered
+ return types.MethodType(callable, instance)
+else:
+ # alias types otherwise
+ method_type = types.MethodType
+
+if sys.version_info < (3, 0):
+ raw_input = raw_input
+else:
+ raw_input = input
+
+# Pythons 2 and 3 differ on where to get StringIO
+if sys.version_info < (3, 0):
+ from cStringIO import StringIO
+ FileIO = file
+ BytesIO = StringIO
+ reload = reload
+else:
+ from io import FileIO, BytesIO, StringIO
+ from imp import reload
+
+# Where do pickles come from?
+try:
+ import cPickle as pickle
+except ImportError:
+ import pickle
+
+from .deprecation import deprecated
+
+from itertools import izip, chain, imap
+if sys.version_info < (3, 0):# 2to3 will remove the imports
+ izip = deprecated('izip exists in itertools since py2.3')(izip)
+ imap = deprecated('imap exists in itertools since py2.3')(imap)
+chain = deprecated('chain exists in itertools since py2.3')(chain)
+
+sum = deprecated('sum exists in builtins since py2.3')(sum)
+enumerate = deprecated('enumerate exists in builtins since py2.3')(enumerate)
+frozenset = deprecated('frozenset exists in builtins since py2.4')(frozenset)
+reversed = deprecated('reversed exists in builtins since py2.4')(reversed)
+sorted = deprecated('sorted exists in builtins since py2.4')(sorted)
+max = deprecated('max exists in builtins since py2.4')(max)
+
+
+# Python2.5 builtins
+try:
+ any = any
+ all = all
+except NameError:
+ def any(iterable):
+ """any(iterable) -> bool
+
+ Return True if bool(x) is True for any x in the iterable.
+ """
+ for elt in iterable:
+ if elt:
+ return True
+ return False
+
+ def all(iterable):
+ """all(iterable) -> bool
+
+ Return True if bool(x) is True for all values x in the iterable.
+ """
+ for elt in iterable:
+ if not elt:
+ return False
+ return True
+
+
+# Python2.5 subprocess added functions and exceptions
+try:
+ from subprocess import Popen
+except ImportError:
+ # gae or python < 2.3
+
+ class CalledProcessError(Exception):
+ """This exception is raised when a process run by check_call() returns
+ a non-zero exit status. The exit status will be stored in the
+ returncode attribute."""
+ def __init__(self, returncode, cmd):
+ self.returncode = returncode
+ self.cmd = cmd
+ def __str__(self):
+ return "Command '%s' returned non-zero exit status %d" % (self.cmd,
+ self.returncode)
+
+ def call(*popenargs, **kwargs):
+ """Run command with arguments. Wait for command to complete, then
+ return the returncode attribute.
+
+ The arguments are the same as for the Popen constructor. Example:
+
+ retcode = call(["ls", "-l"])
+ """
+ # workaround: subprocess.Popen(cmd, stdout=sys.stdout) fails
+ # see http://bugs.python.org/issue1531862
+ if "stdout" in kwargs:
+ fileno = kwargs.get("stdout").fileno()
+ del kwargs['stdout']
+ return Popen(stdout=os.dup(fileno), *popenargs, **kwargs).wait()
+ return Popen(*popenargs, **kwargs).wait()
+
+ def check_call(*popenargs, **kwargs):
+ """Run command with arguments. Wait for command to complete. If
+ the exit code was zero then return, otherwise raise
+ CalledProcessError. The CalledProcessError object will have the
+ return code in the returncode attribute.
+
+ The arguments are the same as for the Popen constructor. Example:
+
+ check_call(["ls", "-l"])
+ """
+ retcode = call(*popenargs, **kwargs)
+ cmd = kwargs.get("args")
+ if cmd is None:
+ cmd = popenargs[0]
+ if retcode:
+ raise CalledProcessError(retcode, cmd)
+ return retcode
+
+try:
+ from os.path import relpath
+except ImportError: # python < 2.6
+ from os.path import curdir, abspath, sep, commonprefix, pardir, join
+ def relpath(path, start=curdir):
+ """Return a relative version of a path"""
+
+ if not path:
+ raise ValueError("no path specified")
+
+ start_list = abspath(start).split(sep)
+ path_list = abspath(path).split(sep)
+
+ # Work out how much of the filepath is shared by start and path.
+ i = len(commonprefix([start_list, path_list]))
+
+ rel_list = [pardir] * (len(start_list)-i) + path_list[i:]
+ if not rel_list:
+ return curdir
+ return join(*rel_list)
+
+
+# XXX don't know why tests don't pass if I don't do that :
+_real_set, set = set, deprecated('set exists in builtins since py2.4')(set)
+if (2, 5) <= sys.version_info[:2]:
+ InheritableSet = _real_set
+else:
+ class InheritableSet(_real_set):
+ """hacked resolving inheritancy issue from old style class in 2.4"""
+ def __new__(cls, *args, **kwargs):
+ if args:
+ new_args = (args[0], )
+ else:
+ new_args = ()
+ obj = _real_set.__new__(cls, *new_args)
+ obj.__init__(*args, **kwargs)
+ return obj
+
+# XXX shouldn't we remove this and just let 2to3 do his job ?
+# range or xrange?
+try:
+ range = xrange
+except NameError:
+ range = range
+
+# ConfigParser was renamed to the more-standard configparser
+try:
+ import configparser
+except ImportError:
+ import ConfigParser as configparser
+
+try:
+ import json
+except ImportError:
+ try:
+ import simplejson as json
+ except ImportError:
+ json = None
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/common/configuration.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/common/configuration.py
@@ -0,0 +1,1078 @@
+# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of logilab-common.
+#
+# logilab-common is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option) any
+# later version.
+#
+# logilab-common is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-common. If not, see <http://www.gnu.org/licenses/>.
+"""Classes to handle advanced configuration in simple to complex applications.
+
+Allows to load the configuration from a file or from command line
+options, to generate a sample configuration file or to display
+program's usage. Fills the gap between optik/optparse and ConfigParser
+by adding data types (which are also available as a standalone optik
+extension in the `optik_ext` module).
+
+
+Quick start: simplest usage
+---------------------------
+
+.. python ::
+
+ >>> import sys
+ >>> from logilab.common.configuration import Configuration
+ >>> options = [('dothis', {'type':'yn', 'default': True, 'metavar': '<y or n>'}),
+ ... ('value', {'type': 'string', 'metavar': '<string>'}),
+ ... ('multiple', {'type': 'csv', 'default': ('yop',),
+ ... 'metavar': '<comma separated values>',
+ ... 'help': 'you can also document the option'}),
+ ... ('number', {'type': 'int', 'default':2, 'metavar':'<int>'}),
+ ... ]
+ >>> config = Configuration(options=options, name='My config')
+ >>> print config['dothis']
+ True
+ >>> print config['value']
+ None
+ >>> print config['multiple']
+ ('yop',)
+ >>> print config['number']
+ 2
+ >>> print config.help()
+ Usage: [options]
+
+ Options:
+ -h, --help show this help message and exit
+ --dothis=<y or n>
+ --value=<string>
+ --multiple=<comma separated values>
+ you can also document the option [current: none]
+ --number=<int>
+
+ >>> f = open('myconfig.ini', 'w')
+ >>> f.write('''[MY CONFIG]
+ ... number = 3
+ ... dothis = no
+ ... multiple = 1,2,3
+ ... ''')
+ >>> f.close()
+ >>> config.load_file_configuration('myconfig.ini')
+ >>> print config['dothis']
+ False
+ >>> print config['value']
+ None
+ >>> print config['multiple']
+ ['1', '2', '3']
+ >>> print config['number']
+ 3
+ >>> sys.argv = ['mon prog', '--value', 'bacon', '--multiple', '4,5,6',
+ ... 'nonoptionargument']
+ >>> print config.load_command_line_configuration()
+ ['nonoptionargument']
+ >>> print config['value']
+ bacon
+ >>> config.generate_config()
+ # class for simple configurations which don't need the
+ # manager / providers model and prefer delegation to inheritance
+ #
+ # configuration values are accessible through a dict like interface
+ #
+ [MY CONFIG]
+
+ dothis=no
+
+ value=bacon
+
+ # you can also document the option
+ multiple=4,5,6
+
+ number=3
+ >>>
+"""
+__docformat__ = "restructuredtext en"
+
+__all__ = ('OptionsManagerMixIn', 'OptionsProviderMixIn',
+ 'ConfigurationMixIn', 'Configuration',
+ 'OptionsManager2ConfigurationAdapter')
+
+import os
+import sys
+import re
+from os.path import exists, expanduser
+from copy import copy
+from ConfigParser import ConfigParser, NoOptionError, NoSectionError, \
+ DuplicateSectionError
+from warnings import warn
+
+from .compat import callable, raw_input, str_encode as _encode
+
+from .textutils import normalize_text, unquote
+from . import optik_ext as optparse
+
+OptionError = optparse.OptionError
+
+REQUIRED = []
+
+class UnsupportedAction(Exception):
+ """raised by set_option when it doesn't know what to do for an action"""
+
+
+def _get_encoding(encoding, stream):
+ encoding = encoding or getattr(stream, 'encoding', None)
+ if not encoding:
+ import locale
+ encoding = locale.getpreferredencoding()
+ return encoding
+
+
+# validation functions ########################################################
+
+def choice_validator(optdict, name, value):
+ """validate and return a converted value for option of type 'choice'
+ """
+ if not value in optdict['choices']:
+ msg = "option %s: invalid value: %r, should be in %s"
+ raise optparse.OptionValueError(msg % (name, value, optdict['choices']))
+ return value
+
+def multiple_choice_validator(optdict, name, value):
+ """validate and return a converted value for option of type 'choice'
+ """
+ choices = optdict['choices']
+ values = optparse.check_csv(None, name, value)
+ for value in values:
+ if not value in choices:
+ msg = "option %s: invalid value: %r, should be in %s"
+ raise optparse.OptionValueError(msg % (name, value, choices))
+ return values
+
+def csv_validator(optdict, name, value):
+ """validate and return a converted value for option of type 'csv'
+ """
+ return optparse.check_csv(None, name, value)
+
+def yn_validator(optdict, name, value):
+ """validate and return a converted value for option of type 'yn'
+ """
+ return optparse.check_yn(None, name, value)
+
+def named_validator(optdict, name, value):
+ """validate and return a converted value for option of type 'named'
+ """
+ return optparse.check_named(None, name, value)
+
+def file_validator(optdict, name, value):
+ """validate and return a filepath for option of type 'file'"""
+ return optparse.check_file(None, name, value)
+
+def color_validator(optdict, name, value):
+ """validate and return a valid color for option of type 'color'"""
+ return optparse.check_color(None, name, value)
+
+def password_validator(optdict, name, value):
+ """validate and return a string for option of type 'password'"""
+ return optparse.check_password(None, name, value)
+
+def date_validator(optdict, name, value):
+ """validate and return a mx DateTime object for option of type 'date'"""
+ return optparse.check_date(None, name, value)
+
+def time_validator(optdict, name, value):
+ """validate and return a time object for option of type 'time'"""
+ return optparse.check_time(None, name, value)
+
+def bytes_validator(optdict, name, value):
+ """validate and return an integer for option of type 'bytes'"""
+ return optparse.check_bytes(None, name, value)
+
+
+VALIDATORS = {'string': unquote,
+ 'int': int,
+ 'float': float,
+ 'file': file_validator,
+ 'font': unquote,
+ 'color': color_validator,
+ 'regexp': re.compile,
+ 'csv': csv_validator,
+ 'yn': yn_validator,
+ 'bool': yn_validator,
+ 'named': named_validator,
+ 'password': password_validator,
+ 'date': date_validator,
+ 'time': time_validator,
+ 'bytes': bytes_validator,
+ 'choice': choice_validator,
+ 'multiple_choice': multiple_choice_validator,
+ }
+
+def _call_validator(opttype, optdict, option, value):
+ if opttype not in VALIDATORS:
+ raise Exception('Unsupported type "%s"' % opttype)
+ try:
+ return VALIDATORS[opttype](optdict, option, value)
+ except TypeError:
+ try:
+ return VALIDATORS[opttype](value)
+ except optparse.OptionValueError:
+ raise
+ except:
+ raise optparse.OptionValueError('%s value (%r) should be of type %s' %
+ (option, value, opttype))
+
+# user input functions ########################################################
+
+def input_password(optdict, question='password:'):
+ from getpass import getpass
+ while True:
+ value = getpass(question)
+ value2 = getpass('confirm: ')
+ if value == value2:
+ return value
+ print 'password mismatch, try again'
+
+def input_string(optdict, question):
+ value = raw_input(question).strip()
+ return value or None
+
+def _make_input_function(opttype):
+ def input_validator(optdict, question):
+ while True:
+ value = raw_input(question)
+ if not value.strip():
+ return None
+ try:
+ return _call_validator(opttype, optdict, None, value)
+ except optparse.OptionValueError, ex:
+ msg = str(ex).split(':', 1)[-1].strip()
+ print 'bad value: %s' % msg
+ return input_validator
+
+INPUT_FUNCTIONS = {
+ 'string': input_string,
+ 'password': input_password,
+ }
+
+for opttype in VALIDATORS.keys():
+ INPUT_FUNCTIONS.setdefault(opttype, _make_input_function(opttype))
+
+def expand_default(self, option):
+ """monkey patch OptionParser.expand_default since we have a particular
+ way to handle defaults to avoid overriding values in the configuration
+ file
+ """
+ if self.parser is None or not self.default_tag:
+ return option.help
+ optname = option._long_opts[0][2:]
+ try:
+ provider = self.parser.options_manager._all_options[optname]
+ except KeyError:
+ value = None
+ else:
+ optdict = provider.get_option_def(optname)
+ optname = provider.option_name(optname, optdict)
+ value = getattr(provider.config, optname, optdict)
+ value = format_option_value(optdict, value)
+ if value is optparse.NO_DEFAULT or not value:
+ value = self.NO_DEFAULT_VALUE
+ return option.help.replace(self.default_tag, str(value))
+
+
+def convert(value, optdict, name=''):
+ """return a validated value for an option according to its type
+
+ optional argument name is only used for error message formatting
+ """
+ try:
+ _type = optdict['type']
+ except KeyError:
+ # FIXME
+ return value
+ return _call_validator(_type, optdict, name, value)
+
+def comment(string):
+ """return string as a comment"""
+ lines = [line.strip() for line in string.splitlines()]
+ return '# ' + ('%s# ' % os.linesep).join(lines)
+
+def format_time(value):
+ if not value:
+ return '0'
+ if value != int(value):
+ return '%.2fs' % value
+ value = int(value)
+ nbmin, nbsec = divmod(value, 60)
+ if nbsec:
+ return '%ss' % value
+ nbhour, nbmin_ = divmod(nbmin, 60)
+ if nbmin_:
+ return '%smin' % nbmin
+ nbday, nbhour_ = divmod(nbhour, 24)
+ if nbhour_:
+ return '%sh' % nbhour
+ return '%sd' % nbday
+
+def format_bytes(value):
+ if not value:
+ return '0'
+ if value != int(value):
+ return '%.2fB' % value
+ value = int(value)
+ prevunit = 'B'
+ for unit in ('KB', 'MB', 'GB', 'TB'):
+ next, remain = divmod(value, 1024)
+ if remain:
+ return '%s%s' % (value, prevunit)
+ prevunit = unit
+ value = next
+ return '%s%s' % (value, unit)
+
+def format_option_value(optdict, value):
+ """return the user input's value from a 'compiled' value"""
+ if isinstance(value, (list, tuple)):
+ value = ','.join(value)
+ elif isinstance(value, dict):
+ value = ','.join(['%s:%s' % (k, v) for k, v in value.items()])
+ elif hasattr(value, 'match'): # optdict.get('type') == 'regexp'
+ # compiled regexp
+ value = value.pattern
+ elif optdict.get('type') == 'yn':
+ value = value and 'yes' or 'no'
+ elif isinstance(value, (str, unicode)) and value.isspace():
+ value = "'%s'" % value
+ elif optdict.get('type') == 'time' and isinstance(value, (float, int, long)):
+ value = format_time(value)
+ elif optdict.get('type') == 'bytes' and hasattr(value, '__int__'):
+ value = format_bytes(value)
+ return value
+
+def ini_format_section(stream, section, options, encoding=None, doc=None):
+ """format an options section using the INI format"""
+ encoding = _get_encoding(encoding, stream)
+ if doc:
+ print >> stream, _encode(comment(doc), encoding)
+ print >> stream, '[%s]' % section
+ ini_format(stream, options, encoding)
+
+def ini_format(stream, options, encoding):
+ """format options using the INI format"""
+ for optname, optdict, value in options:
+ value = format_option_value(optdict, value)
+ help = optdict.get('help')
+ if help:
+ help = normalize_text(help, line_len=79, indent='# ')
+ print >> stream
+ print >> stream, _encode(help, encoding)
+ else:
+ print >> stream
+ if value is None:
+ print >> stream, '#%s=' % optname
+ else:
+ value = _encode(value, encoding).strip()
+ print >> stream, '%s=%s' % (optname, value)
+
+format_section = ini_format_section
+
+def rest_format_section(stream, section, options, encoding=None, doc=None):
+ """format an options section using the INI format"""
+ encoding = _get_encoding(encoding, stream)
+ if section:
+ print >> stream, '%s\n%s' % (section, "'"*len(section))
+ if doc:
+ print >> stream, _encode(normalize_text(doc, line_len=79, indent=''),
+ encoding)
+ print >> stream
+ for optname, optdict, value in options:
+ help = optdict.get('help')
+ print >> stream, ':%s:' % optname
+ if help:
+ help = normalize_text(help, line_len=79, indent=' ')
+ print >> stream, _encode(help, encoding)
+ if value:
+ value = _encode(format_option_value(optdict, value), encoding)
+ print >> stream, ''
+ print >> stream, ' Default: ``%s``' % value.replace("`` ", "```` ``")
+
+
+class OptionsManagerMixIn(object):
+ """MixIn to handle a configuration from both a configuration file and
+ command line options
+ """
+
+ def __init__(self, usage, config_file=None, version=None, quiet=0):
+ self.config_file = config_file
+ self.reset_parsers(usage, version=version)
+ # list of registered options providers
+ self.options_providers = []
+ # dictionary associating option name to checker
+ self._all_options = {}
+ self._short_options = {}
+ self._nocallback_options = {}
+ self._mygroups = dict()
+ # verbosity
+ self.quiet = quiet
+ self._maxlevel = 0
+
+ def reset_parsers(self, usage='', version=None):
+ # configuration file parser
+ self.cfgfile_parser = ConfigParser()
+ # command line parser
+ self.cmdline_parser = optparse.OptionParser(usage=usage, version=version)
+ self.cmdline_parser.options_manager = self
+ self._optik_option_attrs = set(self.cmdline_parser.option_class.ATTRS)
+
+ def register_options_provider(self, provider, own_group=True):
+ """register an options provider"""
+ assert provider.priority <= 0, "provider's priority can't be >= 0"
+ for i in range(len(self.options_providers)):
+ if provider.priority > self.options_providers[i].priority:
+ self.options_providers.insert(i, provider)
+ break
+ else:
+ self.options_providers.append(provider)
+ non_group_spec_options = [option for option in provider.options
+ if 'group' not in option[1]]
+ groups = getattr(provider, 'option_groups', ())
+ if own_group and non_group_spec_options:
+ self.add_option_group(provider.name.upper(), provider.__doc__,
+ non_group_spec_options, provider)
+ else:
+ for opt, optdict in non_group_spec_options:
+ self.add_optik_option(provider, self.cmdline_parser, opt, optdict)
+ for gname, gdoc in groups:
+ gname = gname.upper()
+ goptions = [option for option in provider.options
+ if option[1].get('group', '').upper() == gname]
+ self.add_option_group(gname, gdoc, goptions, provider)
+
+ def add_option_group(self, group_name, doc, options, provider):
+ """add an option group including the listed options
+ """
+ assert options
+ # add option group to the command line parser
+ if group_name in self._mygroups:
+ group = self._mygroups[group_name]
+ else:
+ group = optparse.OptionGroup(self.cmdline_parser,
+ title=group_name.capitalize())
+ self.cmdline_parser.add_option_group(group)
+ group.level = provider.level
+ self._mygroups[group_name] = group
+ # add section to the config file
+ if group_name != "DEFAULT":
+ self.cfgfile_parser.add_section(group_name)
+ # add provider's specific options
+ for opt, optdict in options:
+ self.add_optik_option(provider, group, opt, optdict)
+
+ def add_optik_option(self, provider, optikcontainer, opt, optdict):
+ if 'inputlevel' in optdict:
+ warn('[0.50] "inputlevel" in option dictionary for %s is deprecated,'
+ ' use "level"' % opt, DeprecationWarning)
+ optdict['level'] = optdict.pop('inputlevel')
+ args, optdict = self.optik_option(provider, opt, optdict)
+ option = optikcontainer.add_option(*args, **optdict)
+ self._all_options[opt] = provider
+ self._maxlevel = max(self._maxlevel, option.level or 0)
+
+ def optik_option(self, provider, opt, optdict):
+ """get our personal option definition and return a suitable form for
+ use with optik/optparse
+ """
+ optdict = copy(optdict)
+ others = {}
+ if 'action' in optdict:
+ self._nocallback_options[provider] = opt
+ else:
+ optdict['action'] = 'callback'
+ optdict['callback'] = self.cb_set_provider_option
+ # default is handled here and *must not* be given to optik if you
+ # want the whole machinery to work
+ if 'default' in optdict:
+ if (optparse.OPTPARSE_FORMAT_DEFAULT and 'help' in optdict and
+ optdict.get('default') is not None and
+ not optdict['action'] in ('store_true', 'store_false')):
+ optdict['help'] += ' [current: %default]'
+ del optdict['default']
+ args = ['--' + str(opt)]
+ if 'short' in optdict:
+ self._short_options[optdict['short']] = opt
+ args.append('-' + optdict['short'])
+ del optdict['short']
+ # cleanup option definition dict before giving it to optik
+ for key in optdict.keys():
+ if not key in self._optik_option_attrs:
+ optdict.pop(key)
+ return args, optdict
+
+ def cb_set_provider_option(self, option, opt, value, parser):
+ """optik callback for option setting"""
+ if opt.startswith('--'):
+ # remove -- on long option
+ opt = opt[2:]
+ else:
+ # short option, get its long equivalent
+ opt = self._short_options[opt[1:]]
+ # trick since we can't set action='store_true' on options
+ if value is None:
+ value = 1
+ self.global_set_option(opt, value)
+
+ def global_set_option(self, opt, value):
+ """set option on the correct option provider"""
+ self._all_options[opt].set_option(opt, value)
+
+ def generate_config(self, stream=None, skipsections=(), encoding=None):
+ """write a configuration file according to the current configuration
+ into the given stream or stdout
+ """
+ options_by_section = {}
+ sections = []
+ for provider in self.options_providers:
+ for section, options in provider.options_by_section():
+ if section is None:
+ section = provider.name
+ if section in skipsections:
+ continue
+ options = [(n, d, v) for (n, d, v) in options
+ if d.get('type') is not None]
+ if not options:
+ continue
+ if not section in sections:
+ sections.append(section)
+ alloptions = options_by_section.setdefault(section, [])
+ alloptions += options
+ stream = stream or sys.stdout
+ encoding = _get_encoding(encoding, stream)
+ printed = False
+ for section in sections:
+ if printed:
+ print >> stream, '\n'
+ format_section(stream, section.upper(), options_by_section[section],
+ encoding)
+ printed = True
+
+ def generate_manpage(self, pkginfo, section=1, stream=None):
+ """write a man page for the current configuration into the given
+ stream or stdout
+ """
+ self._monkeypatch_expand_default()
+ try:
+ optparse.generate_manpage(self.cmdline_parser, pkginfo,
+ section, stream=stream or sys.stdout,
+ level=self._maxlevel)
+ finally:
+ self._unmonkeypatch_expand_default()
+
+ # initialization methods ##################################################
+
+ def load_provider_defaults(self):
+ """initialize configuration using default values"""
+ for provider in self.options_providers:
+ provider.load_defaults()
+
+ def load_file_configuration(self, config_file=None):
+ """load the configuration from file"""
+ self.read_config_file(config_file)
+ self.load_config_file()
+
+ def read_config_file(self, config_file=None):
+ """read the configuration file but do not load it (i.e. dispatching
+ values to each options provider)
+ """
+ helplevel = 1
+ while helplevel <= self._maxlevel:
+ opt = '-'.join(['long'] * helplevel) + '-help'
+ if opt in self._all_options:
+ break # already processed
+ def helpfunc(option, opt, val, p, level=helplevel):
+ print self.help(level)
+ sys.exit(0)
+ helpmsg = '%s verbose help.' % ' '.join(['more'] * helplevel)
+ optdict = {'action' : 'callback', 'callback' : helpfunc,
+ 'help' : helpmsg}
+ provider = self.options_providers[0]
+ self.add_optik_option(provider, self.cmdline_parser, opt, optdict)
+ provider.options += ( (opt, optdict), )
+ helplevel += 1
+ if config_file is None:
+ config_file = self.config_file
+ if config_file is not None:
+ config_file = expanduser(config_file)
+ if config_file and exists(config_file):
+ parser = self.cfgfile_parser
+ parser.read([config_file])
+ # normalize sections'title
+ for sect, values in parser._sections.items():
+ if not sect.isupper() and values:
+ parser._sections[sect.upper()] = values
+ elif not self.quiet:
+ msg = 'No config file found, using default configuration'
+ print >> sys.stderr, msg
+ return
+
+ def input_config(self, onlysection=None, inputlevel=0, stream=None):
+ """interactively get configuration values by asking to the user and generate
+ a configuration file
+ """
+ if onlysection is not None:
+ onlysection = onlysection.upper()
+ for provider in self.options_providers:
+ for section, option, optdict in provider.all_options():
+ if onlysection is not None and section != onlysection:
+ continue
+ if not 'type' in optdict:
+ # ignore action without type (callback, store_true...)
+ continue
+ provider.input_option(option, optdict, inputlevel)
+ # now we can generate the configuration file
+ if stream is not None:
+ self.generate_config(stream)
+
+ def load_config_file(self):
+ """dispatch values previously read from a configuration file to each
+ options provider)
+ """
+ parser = self.cfgfile_parser
+ for provider in self.options_providers:
+ for section, option, optdict in provider.all_options():
+ try:
+ value = parser.get(section, option)
+ provider.set_option(option, value, optdict=optdict)
+ except (NoSectionError, NoOptionError), ex:
+ continue
+
+ def load_configuration(self, **kwargs):
+ """override configuration according to given parameters
+ """
+ for opt, opt_value in kwargs.items():
+ opt = opt.replace('_', '-')
+ provider = self._all_options[opt]
+ provider.set_option(opt, opt_value)
+
+ def load_command_line_configuration(self, args=None):
+ """override configuration according to command line parameters
+
+ return additional arguments
+ """
+ self._monkeypatch_expand_default()
+ try:
+ if args is None:
+ args = sys.argv[1:]
+ else:
+ args = list(args)
+ (options, args) = self.cmdline_parser.parse_args(args=args)
+ for provider in self._nocallback_options.keys():
+ config = provider.config
+ for attr in config.__dict__.keys():
+ value = getattr(options, attr, None)
+ if value is None:
+ continue
+ setattr(config, attr, value)
+ return args
+ finally:
+ self._unmonkeypatch_expand_default()
+
+
+ # help methods ############################################################
+
+ def add_help_section(self, title, description, level=0):
+ """add a dummy option section for help purpose """
+ group = optparse.OptionGroup(self.cmdline_parser,
+ title=title.capitalize(),
+ description=description)
+ group.level = level
+ self._maxlevel = max(self._maxlevel, level)
+ self.cmdline_parser.add_option_group(group)
+
+ def _monkeypatch_expand_default(self):
+ # monkey patch optparse to deal with our default values
+ try:
+ self.__expand_default_backup = optparse.HelpFormatter.expand_default
+ optparse.HelpFormatter.expand_default = expand_default
+ except AttributeError:
+ # python < 2.4: nothing to be done
+ pass
+ def _unmonkeypatch_expand_default(self):
+ # remove monkey patch
+ if hasattr(optparse.HelpFormatter, 'expand_default'):
+ # unpatch optparse to avoid side effects
+ optparse.HelpFormatter.expand_default = self.__expand_default_backup
+
+ def help(self, level=0):
+ """return the usage string for available options """
+ self.cmdline_parser.formatter.output_level = level
+ self._monkeypatch_expand_default()
+ try:
+ return self.cmdline_parser.format_help()
+ finally:
+ self._unmonkeypatch_expand_default()
+
+
+class Method(object):
+ """used to ease late binding of default method (so you can define options
+ on the class using default methods on the configuration instance)
+ """
+ def __init__(self, methname):
+ self.method = methname
+ self._inst = None
+
+ def bind(self, instance):
+ """bind the method to its instance"""
+ if self._inst is None:
+ self._inst = instance
+
+ def __call__(self, *args, **kwargs):
+ assert self._inst, 'unbound method'
+ return getattr(self._inst, self.method)(*args, **kwargs)
+
+
+class OptionsProviderMixIn(object):
+ """Mixin to provide options to an OptionsManager"""
+
+ # those attributes should be overridden
+ priority = -1
+ name = 'default'
+ options = ()
+ level = 0
+
+ def __init__(self):
+ self.config = optparse.Values()
+ for option in self.options:
+ try:
+ option, optdict = option
+ except ValueError:
+ raise Exception('Bad option: %r' % option)
+ if isinstance(optdict.get('default'), Method):
+ optdict['default'].bind(self)
+ elif isinstance(optdict.get('callback'), Method):
+ optdict['callback'].bind(self)
+ self.load_defaults()
+
+ def load_defaults(self):
+ """initialize the provider using default values"""
+ for opt, optdict in self.options:
+ action = optdict.get('action')
+ if action != 'callback':
+ # callback action have no default
+ default = self.option_default(opt, optdict)
+ if default is REQUIRED:
+ continue
+ self.set_option(opt, default, action, optdict)
+
+ def option_default(self, opt, optdict=None):
+ """return the default value for an option"""
+ if optdict is None:
+ optdict = self.get_option_def(opt)
+ default = optdict.get('default')
+ if callable(default):
+ default = default()
+ return default
+
+ def option_name(self, opt, optdict=None):
+ """get the config attribute corresponding to opt
+ """
+ if optdict is None:
+ optdict = self.get_option_def(opt)
+ return optdict.get('dest', opt.replace('-', '_'))
+
+ def option_value(self, opt):
+ """get the current value for the given option"""
+ return getattr(self.config, self.option_name(opt), None)
+
+ def set_option(self, opt, value, action=None, optdict=None):
+ """method called to set an option (registered in the options list)
+ """
+ # print "************ setting option", opt," to value", value
+ if optdict is None:
+ optdict = self.get_option_def(opt)
+ if value is not None:
+ value = convert(value, optdict, opt)
+ if action is None:
+ action = optdict.get('action', 'store')
+ if optdict.get('type') == 'named': # XXX need specific handling
+ optname = self.option_name(opt, optdict)
+ currentvalue = getattr(self.config, optname, None)
+ if currentvalue:
+ currentvalue.update(value)
+ value = currentvalue
+ if action == 'store':
+ setattr(self.config, self.option_name(opt, optdict), value)
+ elif action in ('store_true', 'count'):
+ setattr(self.config, self.option_name(opt, optdict), 0)
+ elif action == 'store_false':
+ setattr(self.config, self.option_name(opt, optdict), 1)
+ elif action == 'append':
+ opt = self.option_name(opt, optdict)
+ _list = getattr(self.config, opt, None)
+ if _list is None:
+ if isinstance(value, (list, tuple)):
+ _list = value
+ elif value is not None:
+ _list = []
+ _list.append(value)
+ setattr(self.config, opt, _list)
+ elif isinstance(_list, tuple):
+ setattr(self.config, opt, _list + (value,))
+ else:
+ _list.append(value)
+ elif action == 'callback':
+ optdict['callback'](None, opt, value, None)
+ else:
+ raise UnsupportedAction(action)
+
+ def input_option(self, option, optdict, inputlevel=99):
+ default = self.option_default(option, optdict)
+ if default is REQUIRED:
+ defaultstr = '(required): '
+ elif optdict.get('level', 0) > inputlevel:
+ return
+ elif optdict['type'] == 'password' or default is None:
+ defaultstr = ': '
+ else:
+ defaultstr = '(default: %s): ' % format_option_value(optdict, default)
+ print ':%s:' % option
+ print optdict.get('help') or option
+ inputfunc = INPUT_FUNCTIONS[optdict['type']]
+ value = inputfunc(optdict, defaultstr)
+ while default is REQUIRED and not value:
+ print 'please specify a value'
+ value = inputfunc(optdict, '%s: ' % option)
+ if value is None and default is not None:
+ value = default
+ self.set_option(option, value, optdict=optdict)
+
+ def get_option_def(self, opt):
+ """return the dictionary defining an option given it's name"""
+ assert self.options
+ for option in self.options:
+ if option[0] == opt:
+ return option[1]
+ raise OptionError('no such option %s in section %r'
+ % (opt, self.name), opt)
+
+
+ def all_options(self):
+ """return an iterator on available options for this provider
+ option are actually described by a 3-uple:
+ (section, option name, option dictionary)
+ """
+ for section, options in self.options_by_section():
+ if section is None:
+ if self.name is None:
+ continue
+ section = self.name.upper()
+ for option, optiondict, value in options:
+ yield section, option, optiondict
+
+ def options_by_section(self):
+ """return an iterator on options grouped by section
+
+ (section, [list of (optname, optdict, optvalue)])
+ """
+ sections = {}
+ for optname, optdict in self.options:
+ sections.setdefault(optdict.get('group'), []).append(
+ (optname, optdict, self.option_value(optname)))
+ if None in sections:
+ yield None, sections.pop(None)
+ for section, options in sections.items():
+ yield section.upper(), options
+
+ def options_and_values(self, options=None):
+ if options is None:
+ options = self.options
+ for optname, optdict in options:
+ yield (optname, optdict, self.option_value(optname))
+
+
+class ConfigurationMixIn(OptionsManagerMixIn, OptionsProviderMixIn):
+ """basic mixin for simple configurations which don't need the
+ manager / providers model
+ """
+ def __init__(self, *args, **kwargs):
+ if not args:
+ kwargs.setdefault('usage', '')
+ kwargs.setdefault('quiet', 1)
+ OptionsManagerMixIn.__init__(self, *args, **kwargs)
+ OptionsProviderMixIn.__init__(self)
+ if not getattr(self, 'option_groups', None):
+ self.option_groups = []
+ for option, optdict in self.options:
+ try:
+ gdef = (optdict['group'].upper(), '')
+ except KeyError:
+ continue
+ if not gdef in self.option_groups:
+ self.option_groups.append(gdef)
+ self.register_options_provider(self, own_group=0)
+
+ def register_options(self, options):
+ """add some options to the configuration"""
+ options_by_group = {}
+ for optname, optdict in options:
+ options_by_group.setdefault(optdict.get('group', self.name.upper()), []).append((optname, optdict))
+ for group, options in options_by_group.items():
+ self.add_option_group(group, None, options, self)
+ self.options += tuple(options)
+
+ def load_defaults(self):
+ OptionsProviderMixIn.load_defaults(self)
+
+ def __iter__(self):
+ return iter(self.config.__dict__.iteritems())
+
+ def __getitem__(self, key):
+ try:
+ return getattr(self.config, self.option_name(key))
+ except (optparse.OptionValueError, AttributeError):
+ raise KeyError(key)
+
+ def __setitem__(self, key, value):
+ self.set_option(key, value)
+
+ def get(self, key, default=None):
+ try:
+ return getattr(self.config, self.option_name(key))
+ except (OptionError, AttributeError):
+ return default
+
+
+class Configuration(ConfigurationMixIn):
+ """class for simple configurations which don't need the
+ manager / providers model and prefer delegation to inheritance
+
+ configuration values are accessible through a dict like interface
+ """
+
+ def __init__(self, config_file=None, options=None, name=None,
+ usage=None, doc=None, version=None):
+ if options is not None:
+ self.options = options
+ if name is not None:
+ self.name = name
+ if doc is not None:
+ self.__doc__ = doc
+ super(Configuration, self).__init__(config_file=config_file, usage=usage, version=version)
+
+
+class OptionsManager2ConfigurationAdapter(object):
+ """Adapt an option manager to behave like a
+ `logilab.common.configuration.Configuration` instance
+ """
+ def __init__(self, provider):
+ self.config = provider
+
+ def __getattr__(self, key):
+ return getattr(self.config, key)
+
+ def __getitem__(self, key):
+ provider = self.config._all_options[key]
+ try:
+ return getattr(provider.config, provider.option_name(key))
+ except AttributeError:
+ raise KeyError(key)
+
+ def __setitem__(self, key, value):
+ self.config.global_set_option(self.config.option_name(key), value)
+
+ def get(self, key, default=None):
+ provider = self.config._all_options[key]
+ try:
+ return getattr(provider.config, provider.option_name(key))
+ except AttributeError:
+ return default
+
+
+def read_old_config(newconfig, changes, configfile):
+ """initialize newconfig from a deprecated configuration file
+
+ possible changes:
+ * ('renamed', oldname, newname)
+ * ('moved', option, oldgroup, newgroup)
+ * ('typechanged', option, oldtype, newvalue)
+ """
+ # build an index of changes
+ changesindex = {}
+ for action in changes:
+ if action[0] == 'moved':
+ option, oldgroup, newgroup = action[1:]
+ changesindex.setdefault(option, []).append((action[0], oldgroup, newgroup))
+ continue
+ if action[0] == 'renamed':
+ oldname, newname = action[1:]
+ changesindex.setdefault(newname, []).append((action[0], oldname))
+ continue
+ if action[0] == 'typechanged':
+ option, oldtype, newvalue = action[1:]
+ changesindex.setdefault(option, []).append((action[0], oldtype, newvalue))
+ continue
+ if action[1] in ('added', 'removed'):
+ continue # nothing to do here
+ raise Exception('unknown change %s' % action[0])
+ # build a config object able to read the old config
+ options = []
+ for optname, optdef in newconfig.options:
+ for action in changesindex.pop(optname, ()):
+ if action[0] == 'moved':
+ oldgroup, newgroup = action[1:]
+ optdef = optdef.copy()
+ optdef['group'] = oldgroup
+ elif action[0] == 'renamed':
+ optname = action[1]
+ elif action[0] == 'typechanged':
+ oldtype = action[1]
+ optdef = optdef.copy()
+ optdef['type'] = oldtype
+ options.append((optname, optdef))
+ if changesindex:
+ raise Exception('unapplied changes: %s' % changesindex)
+ oldconfig = Configuration(options=options, name=newconfig.name)
+ # read the old config
+ oldconfig.load_file_configuration(configfile)
+ # apply values reverting changes
+ changes.reverse()
+ done = set()
+ for action in changes:
+ if action[0] == 'renamed':
+ oldname, newname = action[1:]
+ newconfig[newname] = oldconfig[oldname]
+ done.add(newname)
+ elif action[0] == 'typechanged':
+ optname, oldtype, newvalue = action[1:]
+ newconfig[optname] = newvalue
+ done.add(optname)
+ for optname, optdef in newconfig.options:
+ if optdef.get('type') and not optname in done:
+ newconfig.set_option(optname, oldconfig[optname], optdict=optdef)
+
+
+def merge_options(options, optgroup=None):
+ """preprocess a list of options and remove duplicates, returning a new list
+ (tuple actually) of options.
+
+ Options dictionaries are copied to avoid later side-effect. Also, if
+ `otpgroup` argument is specified, ensure all options are in the given group.
+ """
+ alloptions = {}
+ options = list(options)
+ for i in range(len(options)-1, -1, -1):
+ optname, optdict = options[i]
+ if optname in alloptions:
+ options.pop(i)
+ alloptions[optname].update(optdict)
+ else:
+ optdict = optdict.copy()
+ options[i] = (optname, optdict)
+ alloptions[optname] = optdict
+ if optgroup is not None:
+ alloptions[optname]['group'] = optgroup
+ return tuple(options)
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/common/decorators.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/common/decorators.py
@@ -0,0 +1,283 @@
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of logilab-common.
+#
+# logilab-common is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option) any
+# later version.
+#
+# logilab-common is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-common. If not, see <http://www.gnu.org/licenses/>.
+""" A few useful function/method decorators. """
+__docformat__ = "restructuredtext en"
+
+import sys
+from time import clock, time
+
+from .compat import callable, method_type
+
+# XXX rewrite so we can use the decorator syntax when keyarg has to be specified
+
+def _is_generator_function(callableobj):
+ return callableobj.func_code.co_flags & 0x20
+
+class cached_decorator(object):
+ def __init__(self, cacheattr=None, keyarg=None):
+ self.cacheattr = cacheattr
+ self.keyarg = keyarg
+ def __call__(self, callableobj=None):
+ assert not _is_generator_function(callableobj), \
+ 'cannot cache generator function: %s' % callableobj
+ if callableobj.func_code.co_argcount == 1 or self.keyarg == 0:
+ cache = _SingleValueCache(callableobj, self.cacheattr)
+ elif self.keyarg:
+ cache = _MultiValuesKeyArgCache(callableobj, self.keyarg, self.cacheattr)
+ else:
+ cache = _MultiValuesCache(callableobj, self.cacheattr)
+ return cache.closure()
+
+class _SingleValueCache(object):
+ def __init__(self, callableobj, cacheattr=None):
+ self.callable = callableobj
+ if cacheattr is None:
+ self.cacheattr = '_%s_cache_' % callableobj.__name__
+ else:
+ assert cacheattr != callableobj.__name__
+ self.cacheattr = cacheattr
+
+ def __call__(__me, self, *args):
+ try:
+ return self.__dict__[__me.cacheattr]
+ except KeyError:
+ value = __me.callable(self, *args)
+ setattr(self, __me.cacheattr, value)
+ return value
+
+ def closure(self):
+ def wrapped(*args, **kwargs):
+ return self.__call__(*args, **kwargs)
+ wrapped.cache_obj = self
+ try:
+ wrapped.__doc__ = self.callable.__doc__
+ wrapped.__name__ = self.callable.__name__
+ wrapped.func_name = self.callable.func_name
+ except:
+ pass
+ return wrapped
+
+ def clear(self, holder):
+ holder.__dict__.pop(self.cacheattr, None)
+
+
+class _MultiValuesCache(_SingleValueCache):
+ def _get_cache(self, holder):
+ try:
+ _cache = holder.__dict__[self.cacheattr]
+ except KeyError:
+ _cache = {}
+ setattr(holder, self.cacheattr, _cache)
+ return _cache
+
+ def __call__(__me, self, *args, **kwargs):
+ _cache = __me._get_cache(self)
+ try:
+ return _cache[args]
+ except KeyError:
+ _cache[args] = __me.callable(self, *args)
+ return _cache[args]
+
+class _MultiValuesKeyArgCache(_MultiValuesCache):
+ def __init__(self, callableobj, keyarg, cacheattr=None):
+ super(_MultiValuesKeyArgCache, self).__init__(callableobj, cacheattr)
+ self.keyarg = keyarg
+
+ def __call__(__me, self, *args, **kwargs):
+ _cache = __me._get_cache(self)
+ key = args[__me.keyarg-1]
+ try:
+ return _cache[key]
+ except KeyError:
+ _cache[key] = __me.callable(self, *args, **kwargs)
+ return _cache[key]
+
+
+def cached(callableobj=None, keyarg=None, **kwargs):
+ """Simple decorator to cache result of method call."""
+ kwargs['keyarg'] = keyarg
+ decorator = cached_decorator(**kwargs)
+ if callableobj is None:
+ return decorator
+ else:
+ return decorator(callableobj)
+
+
+class cachedproperty(object):
+ """ Provides a cached property equivalent to the stacking of
+ @cached and @property, but more efficient.
+
+ After first usage, the <property_name> becomes part of the object's
+ __dict__. Doing:
+
+ del obj.<property_name> empties the cache.
+
+ Idea taken from the pyramid_ framework and the mercurial_ project.
+
+ .. _pyramid: http://pypi.python.org/pypi/pyramid
+ .. _mercurial: http://pypi.python.org/pypi/Mercurial
+ """
+ __slots__ = ('wrapped',)
+
+ def __init__(self, wrapped):
+ try:
+ wrapped.__name__
+ except AttributeError:
+ raise TypeError('%s must have a __name__ attribute' %
+ wrapped)
+ self.wrapped = wrapped
+
+ @property
+ def __doc__(self):
+ doc = getattr(self.wrapped, '__doc__', None)
+ return ('<wrapped by the cachedproperty decorator>%s'
+ % ('\n%s' % doc if doc else ''))
+
+ def __get__(self, inst, objtype=None):
+ if inst is None:
+ return self
+ val = self.wrapped(inst)
+ setattr(inst, self.wrapped.__name__, val)
+ return val
+
+
+def get_cache_impl(obj, funcname):
+ cls = obj.__class__
+ member = getattr(cls, funcname)
+ if isinstance(member, property):
+ member = member.fget
+ return member.cache_obj
+
+def clear_cache(obj, funcname):
+ """Clear a cache handled by the :func:`cached` decorator. If 'x' class has
+ @cached on its method `foo`, type
+
+ >>> clear_cache(x, 'foo')
+
+ to purge this method's cache on the instance.
+ """
+ get_cache_impl(obj, funcname).clear(obj)
+
+def copy_cache(obj, funcname, cacheobj):
+ """Copy cache for <funcname> from cacheobj to obj."""
+ cacheattr = get_cache_impl(obj, funcname).cacheattr
+ try:
+ setattr(obj, cacheattr, cacheobj.__dict__[cacheattr])
+ except KeyError:
+ pass
+
+
+class wproperty(object):
+ """Simple descriptor expecting to take a modifier function as first argument
+ and looking for a _<function name> to retrieve the attribute.
+ """
+ def __init__(self, setfunc):
+ self.setfunc = setfunc
+ self.attrname = '_%s' % setfunc.__name__
+
+ def __set__(self, obj, value):
+ self.setfunc(obj, value)
+
+ def __get__(self, obj, cls):
+ assert obj is not None
+ return getattr(obj, self.attrname)
+
+
+class classproperty(object):
+ """this is a simple property-like class but for class attributes.
+ """
+ def __init__(self, get):
+ self.get = get
+ def __get__(self, inst, cls):
+ return self.get(cls)
+
+
+class iclassmethod(object):
+ '''Descriptor for method which should be available as class method if called
+ on the class or instance method if called on an instance.
+ '''
+ def __init__(self, func):
+ self.func = func
+ def __get__(self, instance, objtype):
+ if instance is None:
+ return method_type(self.func, objtype, objtype.__class__)
+ return method_type(self.func, instance, objtype)
+ def __set__(self, instance, value):
+ raise AttributeError("can't set attribute")
+
+
+def timed(f):
+ def wrap(*args, **kwargs):
+ t = time()
+ c = clock()
+ res = f(*args, **kwargs)
+ print '%s clock: %.9f / time: %.9f' % (f.__name__,
+ clock() - c, time() - t)
+ return res
+ return wrap
+
+
+def locked(acquire, release):
+ """Decorator taking two methods to acquire/release a lock as argument,
+ returning a decorator function which will call the inner method after
+ having called acquire(self) et will call release(self) afterwards.
+ """
+ def decorator(f):
+ def wrapper(self, *args, **kwargs):
+ acquire(self)
+ try:
+ return f(self, *args, **kwargs)
+ finally:
+ release(self)
+ return wrapper
+ return decorator
+
+
+def monkeypatch(klass, methodname=None):
+ """Decorator extending class with the decorated callable
+ >>> class A:
+ ... pass
+ >>> @monkeypatch(A)
+ ... def meth(self):
+ ... return 12
+ ...
+ >>> a = A()
+ >>> a.meth()
+ 12
+ >>> @monkeypatch(A, 'foo')
+ ... def meth(self):
+ ... return 12
+ ...
+ >>> a.foo()
+ 12
+ """
+ def decorator(func):
+ try:
+ name = methodname or func.__name__
+ except AttributeError:
+ raise AttributeError('%s has no __name__ attribute: '
+ 'you should provide an explicit `methodname`'
+ % func)
+ if callable(func) and sys.version_info < (3, 0):
+ setattr(klass, name, method_type(func, None, klass))
+ else:
+ # likely a property
+ # this is quite borderline but usage already in the wild ...
+ setattr(klass, name, func)
+ return func
+ return decorator
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/common/deprecation.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/common/deprecation.py
@@ -0,0 +1,188 @@
+# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of logilab-common.
+#
+# logilab-common is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option) any
+# later version.
+#
+# logilab-common is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-common. If not, see <http://www.gnu.org/licenses/>.
+"""Deprecation utilities."""
+
+__docformat__ = "restructuredtext en"
+
+import sys
+from warnings import warn
+
+from .changelog import Version
+
+
+class DeprecationWrapper(object):
+ """proxy to print a warning on access to any attribute of the wrapped object
+ """
+ def __init__(self, proxied, msg=None):
+ self._proxied = proxied
+ self._msg = msg
+
+ def __getattr__(self, attr):
+ warn(self._msg, DeprecationWarning, stacklevel=2)
+ return getattr(self._proxied, attr)
+
+ def __setattr__(self, attr, value):
+ if attr in ('_proxied', '_msg'):
+ self.__dict__[attr] = value
+ else:
+ warn(self._msg, DeprecationWarning, stacklevel=2)
+ setattr(self._proxied, attr, value)
+
+
+class DeprecationManager(object):
+ """Manage the deprecation message handling. Messages are dropped for
+ versions more recent than the 'compatible' version. Example::
+
+ deprecator = deprecation.DeprecationManager("module_name")
+ deprecator.compatibility('1.3')
+
+ deprecator.warn('1.2', "message.")
+
+ @deprecator.deprecated('1.2', 'Message')
+ def any_func():
+ pass
+
+ class AnyClass(object):
+ __metaclass__ = deprecator.class_deprecated('1.2')
+ """
+ def __init__(self, module_name=None):
+ """
+ """
+ self.module_name = module_name
+ self.compatible_version = None
+
+ def compatibility(self, compatible_version):
+ """Set the compatible version.
+ """
+ self.compatible_version = compatible_version
+
+ def deprecated(self, version=None, reason=None, stacklevel=2, name=None, doc=None):
+ """Display a deprecation message only if the version is older than the
+ compatible version.
+ """
+ def decorator(func):
+ message = reason or 'The function "%s" is deprecated'
+ if '%s' in message:
+ message %= func.func_name
+ def wrapped(*args, **kwargs):
+ self.warn(version, message, stacklevel)
+ return func(*args, **kwargs)
+ return wrapped
+ return decorator
+
+ def class_deprecated(self, version=None):
+ class metaclass(type):
+ """metaclass to print a warning on instantiation of a deprecated class"""
+
+ def __call__(cls, *args, **kwargs):
+ msg = getattr(cls, "__deprecation_warning__",
+ "%(cls)s is deprecated") % {'cls': cls.__name__}
+ self.warn(version, msg)
+ return type.__call__(cls, *args, **kwargs)
+ return metaclass
+
+ def moved(self, version, modpath, objname):
+ """use to tell that a callable has been moved to a new module.
+
+ It returns a callable wrapper, so that when its called a warning is printed
+ telling where the object can be found, import is done (and not before) and
+ the actual object is called.
+
+ NOTE: the usage is somewhat limited on classes since it will fail if the
+ wrapper is use in a class ancestors list, use the `class_moved` function
+ instead (which has no lazy import feature though).
+ """
+ def callnew(*args, **kwargs):
+ from logilab.common.modutils import load_module_from_name
+ message = "object %s has been moved to module %s" % (objname, modpath)
+ self.warn(version, message)
+ m = load_module_from_name(modpath)
+ return getattr(m, objname)(*args, **kwargs)
+ return callnew
+
+ def class_renamed(self, version, old_name, new_class, message=None):
+ clsdict = {}
+ if message is None:
+ message = '%s is deprecated, use %s' % (old_name, new_class.__name__)
+ clsdict['__deprecation_warning__'] = message
+ try:
+ # new-style class
+ return self.class_deprecated(version)(old_name, (new_class,), clsdict)
+ except (NameError, TypeError):
+ # old-style class
+ class DeprecatedClass(new_class):
+ """FIXME: There might be a better way to handle old/new-style class
+ """
+ def __init__(self, *args, **kwargs):
+ self.warn(version, message)
+ new_class.__init__(self, *args, **kwargs)
+ return DeprecatedClass
+
+ def class_moved(self, version, new_class, old_name=None, message=None):
+ """nice wrapper around class_renamed when a class has been moved into
+ another module
+ """
+ if old_name is None:
+ old_name = new_class.__name__
+ if message is None:
+ message = 'class %s is now available as %s.%s' % (
+ old_name, new_class.__module__, new_class.__name__)
+ return self.class_renamed(version, old_name, new_class, message)
+
+ def warn(self, version=None, reason="", stacklevel=2):
+ """Display a deprecation message only if the version is older than the
+ compatible version.
+ """
+ if self.module_name and version:
+ reason = '[%s %s] %s' % (self.module_name, version, reason)
+ elif self.module_name:
+ reason = '[%s] %s' % (self.module_name, reason)
+ elif version:
+ reason = '[%s] %s' % (version, reason)
+ if (self.compatible_version is None
+ or version is None
+ or Version(version) < Version(self.compatible_version)):
+ warn(reason, DeprecationWarning, stacklevel=stacklevel)
+
+_defaultdeprecator = DeprecationManager()
+
+def deprecated(reason=None, stacklevel=2, name=None, doc=None):
+ return _defaultdeprecator.deprecated(None, reason, stacklevel, name, doc)
+
+class_deprecated = _defaultdeprecator.class_deprecated()
+
+def moved(modpath, objname):
+ return _defaultdeprecator.moved(None, modpath, objname)
+moved.__doc__ = _defaultdeprecator.moved.__doc__
+
+def class_renamed(old_name, new_class, message=None):
+ """automatically creates a class which fires a DeprecationWarning
+ when instantiated.
+
+ >>> Set = class_renamed('Set', set, 'Set is now replaced by set')
+ >>> s = Set()
+ sample.py:57: DeprecationWarning: Set is now replaced by set
+ s = Set()
+ >>>
+ """
+ return _defaultdeprecator.class_renamed(None, old_name, new_class, message)
+
+def class_moved(new_class, old_name=None, message=None):
+ return _defaultdeprecator.class_moved(None, new_class, old_name, message)
+class_moved.__doc__ = _defaultdeprecator.class_moved.__doc__
+
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/common/graph.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/common/graph.py
@@ -0,0 +1,273 @@
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of logilab-common.
+#
+# logilab-common is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option) any
+# later version.
+#
+# logilab-common is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-common. If not, see <http://www.gnu.org/licenses/>.
+"""Graph manipulation utilities.
+
+(dot generation adapted from pypy/translator/tool/make_dot.py)
+"""
+
+__docformat__ = "restructuredtext en"
+
+__metaclass__ = type
+
+import os.path as osp
+import os
+import sys
+import tempfile
+from .compat import str_encode
+
+def escape(value):
+ """Make <value> usable in a dot file."""
+ lines = [line.replace('"', '\\"') for line in value.split('\n')]
+ data = '\\l'.join(lines)
+ return '\\n' + data
+
+def target_info_from_filename(filename):
+ """Transforms /some/path/foo.png into ('/some/path', 'foo.png', 'png')."""
+ basename = osp.basename(filename)
+ storedir = osp.dirname(osp.abspath(filename))
+ target = filename.split('.')[-1]
+ return storedir, basename, target
+
+
+class DotBackend:
+ """Dot File backend."""
+ def __init__(self, graphname, rankdir=None, size=None, ratio=None,
+ charset='utf-8', renderer='dot', additionnal_param={}):
+ self.graphname = graphname
+ self.renderer = renderer
+ self.lines = []
+ self._source = None
+ self.emit("digraph %s {" % normalize_node_id(graphname))
+ if rankdir:
+ self.emit('rankdir=%s' % rankdir)
+ if ratio:
+ self.emit('ratio=%s' % ratio)
+ if size:
+ self.emit('size="%s"' % size)
+ if charset:
+ assert charset.lower() in ('utf-8', 'iso-8859-1', 'latin1'), \
+ 'unsupported charset %s' % charset
+ self.emit('charset="%s"' % charset)
+ for param in additionnal_param.iteritems():
+ self.emit('='.join(param))
+
+ def get_source(self):
+ """returns self._source"""
+ if self._source is None:
+ self.emit("}\n")
+ self._source = '\n'.join(self.lines)
+ del self.lines
+ return self._source
+
+ source = property(get_source)
+
+ def generate(self, outputfile=None, dotfile=None, mapfile=None):
+ """Generates a graph file.
+
+ :param outputfile: filename and path [defaults to graphname.png]
+ :param dotfile: filename and path [defaults to graphname.dot]
+
+ :rtype: str
+ :return: a path to the generated file
+ """
+ import subprocess # introduced in py 2.4
+ name = self.graphname
+ if not dotfile:
+ # if 'outputfile' is a dot file use it as 'dotfile'
+ if outputfile and outputfile.endswith(".dot"):
+ dotfile = outputfile
+ else:
+ dotfile = '%s.dot' % name
+ if outputfile is not None:
+ storedir, basename, target = target_info_from_filename(outputfile)
+ if target != "dot":
+ pdot, dot_sourcepath = tempfile.mkstemp(".dot", name)
+ os.close(pdot)
+ else:
+ dot_sourcepath = osp.join(storedir, dotfile)
+ else:
+ target = 'png'
+ pdot, dot_sourcepath = tempfile.mkstemp(".dot", name)
+ ppng, outputfile = tempfile.mkstemp(".png", name)
+ os.close(pdot)
+ os.close(ppng)
+ pdot = open(dot_sourcepath, 'w')
+ pdot.write(str_encode(self.source, 'utf8'))
+ pdot.close()
+ if target != 'dot':
+ if sys.platform == 'win32':
+ use_shell = True
+ else:
+ use_shell = False
+ if mapfile:
+ subprocess.call([self.renderer, '-Tcmapx', '-o', mapfile, '-T', target, dot_sourcepath, '-o', outputfile],
+ shell=use_shell)
+ else:
+ subprocess.call([self.renderer, '-T', target,
+ dot_sourcepath, '-o', outputfile],
+ shell=use_shell)
+ os.unlink(dot_sourcepath)
+ return outputfile
+
+ def emit(self, line):
+ """Adds <line> to final output."""
+ self.lines.append(line)
+
+ def emit_edge(self, name1, name2, **props):
+ """emit an edge from <name1> to <name2>.
+ edge properties: see http://www.graphviz.org/doc/info/attrs.html
+ """
+ attrs = ['%s="%s"' % (prop, value) for prop, value in props.items()]
+ n_from, n_to = normalize_node_id(name1), normalize_node_id(name2)
+ self.emit('%s -> %s [%s];' % (n_from, n_to, ", ".join(attrs)) )
+
+ def emit_node(self, name, **props):
+ """emit a node with given properties.
+ node properties: see http://www.graphviz.org/doc/info/attrs.html
+ """
+ attrs = ['%s="%s"' % (prop, value) for prop, value in props.items()]
+ self.emit('%s [%s];' % (normalize_node_id(name), ", ".join(attrs)))
+
+def normalize_node_id(nid):
+ """Returns a suitable DOT node id for `nid`."""
+ return '"%s"' % nid
+
+class GraphGenerator:
+ def __init__(self, backend):
+ # the backend is responsible to output the graph in a particular format
+ self.backend = backend
+
+ # XXX doesn't like space in outpufile / mapfile
+ def generate(self, visitor, propshdlr, outputfile=None, mapfile=None):
+ # the visitor
+ # the property handler is used to get node and edge properties
+ # according to the graph and to the backend
+ self.propshdlr = propshdlr
+ for nodeid, node in visitor.nodes():
+ props = propshdlr.node_properties(node)
+ self.backend.emit_node(nodeid, **props)
+ for subjnode, objnode, edge in visitor.edges():
+ props = propshdlr.edge_properties(edge, subjnode, objnode)
+ self.backend.emit_edge(subjnode, objnode, **props)
+ return self.backend.generate(outputfile=outputfile, mapfile=mapfile)
+
+
+class UnorderableGraph(Exception):
+ pass
+
+def ordered_nodes(graph):
+ """takes a dependency graph dict as arguments and return an ordered tuple of
+ nodes starting with nodes without dependencies and up to the outermost node.
+
+ If there is some cycle in the graph, :exc:`UnorderableGraph` will be raised.
+
+ Also the given graph dict will be emptied.
+ """
+ # check graph consistency
+ cycles = get_cycles(graph)
+ if cycles:
+ cycles = '\n'.join([' -> '.join(cycle) for cycle in cycles])
+ raise UnorderableGraph('cycles in graph: %s' % cycles)
+ vertices = set(graph)
+ to_vertices = set()
+ for edges in graph.values():
+ to_vertices |= set(edges)
+ missing_vertices = to_vertices - vertices
+ if missing_vertices:
+ raise UnorderableGraph('missing vertices: %s' % ', '.join(missing_vertices))
+ # order vertices
+ order = []
+ order_set = set()
+ old_len = None
+ while graph:
+ if old_len == len(graph):
+ raise UnorderableGraph('unknown problem with %s' % graph)
+ old_len = len(graph)
+ deps_ok = []
+ for node, node_deps in graph.items():
+ for dep in node_deps:
+ if dep not in order_set:
+ break
+ else:
+ deps_ok.append(node)
+ order.append(deps_ok)
+ order_set |= set(deps_ok)
+ for node in deps_ok:
+ del graph[node]
+ result = []
+ for grp in reversed(order):
+ result.extend(sorted(grp))
+ return tuple(result)
+
+
+def get_cycles(graph_dict, vertices=None):
+ '''given a dictionary representing an ordered graph (i.e. key are vertices
+ and values is a list of destination vertices representing edges), return a
+ list of detected cycles
+ '''
+ if not graph_dict:
+ return ()
+ result = []
+ if vertices is None:
+ vertices = graph_dict.keys()
+ for vertice in vertices:
+ _get_cycles(graph_dict, vertice, [], result)
+ return result
+
+def _get_cycles(graph_dict, vertice=None, path=None, result=None):
+ """recursive function doing the real work for get_cycles"""
+ if vertice in path:
+ cycle = [vertice]
+ for node in path[::-1]:
+ if node == vertice:
+ break
+ cycle.insert(0, node)
+ # make a canonical representation
+ start_from = min(cycle)
+ index = cycle.index(start_from)
+ cycle = cycle[index:] + cycle[0:index]
+ # append it to result if not already in
+ if not cycle in result:
+ result.append(cycle)
+ return
+ path.append(vertice)
+ try:
+ for node in graph_dict[vertice]:
+ _get_cycles(graph_dict, node, path, result)
+ except KeyError:
+ pass
+ path.pop()
+
+def has_path(graph_dict, fromnode, tonode, path=None):
+ """generic function taking a simple graph definition as a dictionary, with
+ node has key associated to a list of nodes directly reachable from it.
+
+ Return None if no path exists to go from `fromnode` to `tonode`, else the
+ first path found (as a list including the destination node at last)
+ """
+ if path is None:
+ path = []
+ elif fromnode in path:
+ return None
+ path.append(fromnode)
+ for destnode in graph_dict[fromnode]:
+ if destnode == tonode or has_path(graph_dict, destnode, tonode, path):
+ return path[1:] + [tonode]
+ path.pop()
+ return None
+
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/common/interface.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/common/interface.py
@@ -0,0 +1,71 @@
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of logilab-common.
+#
+# logilab-common is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option) any
+# later version.
+#
+# logilab-common is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-common. If not, see <http://www.gnu.org/licenses/>.
+"""Bases class for interfaces to provide 'light' interface handling.
+
+ TODO:
+ _ implements a check method which check that an object implements the
+ interface
+ _ Attribute objects
+
+ This module requires at least python 2.2
+"""
+__docformat__ = "restructuredtext en"
+
+
+class Interface(object):
+ """Base class for interfaces."""
+ def is_implemented_by(cls, instance):
+ return implements(instance, cls)
+ is_implemented_by = classmethod(is_implemented_by)
+
+
+def implements(obj, interface):
+ """Return true if the give object (maybe an instance or class) implements
+ the interface.
+ """
+ kimplements = getattr(obj, '__implements__', ())
+ if not isinstance(kimplements, (list, tuple)):
+ kimplements = (kimplements,)
+ for implementedinterface in kimplements:
+ if issubclass(implementedinterface, interface):
+ return True
+ return False
+
+
+def extend(klass, interface, _recurs=False):
+ """Add interface to klass'__implements__ if not already implemented in.
+
+ If klass is subclassed, ensure subclasses __implements__ it as well.
+
+ NOTE: klass should be e new class.
+ """
+ if not implements(klass, interface):
+ try:
+ kimplements = klass.__implements__
+ kimplementsklass = type(kimplements)
+ kimplements = list(kimplements)
+ except AttributeError:
+ kimplementsklass = tuple
+ kimplements = []
+ kimplements.append(interface)
+ klass.__implements__ = kimplementsklass(kimplements)
+ for subklass in klass.__subclasses__():
+ extend(subklass, interface, _recurs=True)
+ elif _recurs:
+ for subklass in klass.__subclasses__():
+ extend(subklass, interface, _recurs=True)
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/common/modutils.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/common/modutils.py
@@ -0,0 +1,658 @@
+# -*- coding: utf-8 -*-
+# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of logilab-common.
+#
+# logilab-common is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option) any
+# later version.
+#
+# logilab-common is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-common. If not, see <http://www.gnu.org/licenses/>.
+"""Python modules manipulation utility functions.
+
+:type PY_SOURCE_EXTS: tuple(str)
+:var PY_SOURCE_EXTS: list of possible python source file extension
+
+:type STD_LIB_DIR: str
+:var STD_LIB_DIR: directory where standard modules are located
+
+:type BUILTIN_MODULES: dict
+:var BUILTIN_MODULES: dictionary with builtin module names has key
+"""
+__docformat__ = "restructuredtext en"
+
+import sys
+import os
+from os.path import splitext, join, abspath, isdir, dirname, exists, basename
+from imp import find_module, load_module, C_BUILTIN, PY_COMPILED, PKG_DIRECTORY
+from distutils.sysconfig import get_config_var, get_python_lib, get_python_version
+from distutils.errors import DistutilsPlatformError
+
+try:
+ import zipimport
+except ImportError:
+ zipimport = None
+
+ZIPFILE = object()
+
+from . import STD_BLACKLIST, _handle_blacklist
+
+# Notes about STD_LIB_DIR
+# Consider arch-specific installation for STD_LIB_DIR definition
+# :mod:`distutils.sysconfig` contains to much hardcoded values to rely on
+#
+# :see: `Problems with /usr/lib64 builds <http://bugs.python.org/issue1294959>`_
+# :see: `FHS <http://www.pathname.com/fhs/pub/fhs-2.3.html#LIBLTQUALGTALTERNATEFORMATESSENTIAL>`_
+if sys.platform.startswith('win'):
+ PY_SOURCE_EXTS = ('py', 'pyw')
+ PY_COMPILED_EXTS = ('dll', 'pyd')
+else:
+ PY_SOURCE_EXTS = ('py',)
+ PY_COMPILED_EXTS = ('so',)
+
+try:
+ STD_LIB_DIR = get_python_lib(standard_lib=1)
+# get_python_lib(standard_lib=1) is not available on pypy, set STD_LIB_DIR to
+# non-valid path, see https://bugs.pypy.org/issue1164
+except DistutilsPlatformError:
+ STD_LIB_DIR = '//'
+
+EXT_LIB_DIR = get_python_lib()
+
+BUILTIN_MODULES = dict(zip(sys.builtin_module_names,
+ [1]*len(sys.builtin_module_names)))
+
+
+class NoSourceFile(Exception):
+ """exception raised when we are not able to get a python
+ source file for a precompiled file
+ """
+
+class LazyObject(object):
+ def __init__(self, module, obj):
+ self.module = module
+ self.obj = obj
+ self._imported = None
+
+ def _getobj(self):
+ if self._imported is None:
+ self._imported = getattr(load_module_from_name(self.module),
+ self.obj)
+ return self._imported
+
+ def __getattribute__(self, attr):
+ try:
+ return super(LazyObject, self).__getattribute__(attr)
+ except AttributeError, ex:
+ return getattr(self._getobj(), attr)
+
+ def __call__(self, *args, **kwargs):
+ return self._getobj()(*args, **kwargs)
+
+
+def load_module_from_name(dotted_name, path=None, use_sys=1):
+ """Load a Python module from its name.
+
+ :type dotted_name: str
+ :param dotted_name: python name of a module or package
+
+ :type path: list or None
+ :param path:
+ optional list of path where the module or package should be
+ searched (use sys.path if nothing or None is given)
+
+ :type use_sys: bool
+ :param use_sys:
+ boolean indicating whether the sys.modules dictionary should be
+ used or not
+
+
+ :raise ImportError: if the module or package is not found
+
+ :rtype: module
+ :return: the loaded module
+ """
+ return load_module_from_modpath(dotted_name.split('.'), path, use_sys)
+
+
+def load_module_from_modpath(parts, path=None, use_sys=1):
+ """Load a python module from its splitted name.
+
+ :type parts: list(str) or tuple(str)
+ :param parts:
+ python name of a module or package splitted on '.'
+
+ :type path: list or None
+ :param path:
+ optional list of path where the module or package should be
+ searched (use sys.path if nothing or None is given)
+
+ :type use_sys: bool
+ :param use_sys:
+ boolean indicating whether the sys.modules dictionary should be used or not
+
+ :raise ImportError: if the module or package is not found
+
+ :rtype: module
+ :return: the loaded module
+ """
+ if use_sys:
+ try:
+ return sys.modules['.'.join(parts)]
+ except KeyError:
+ pass
+ modpath = []
+ prevmodule = None
+ for part in parts:
+ modpath.append(part)
+ curname = '.'.join(modpath)
+ module = None
+ if len(modpath) != len(parts):
+ # even with use_sys=False, should try to get outer packages from sys.modules
+ module = sys.modules.get(curname)
+ elif use_sys:
+ # because it may have been indirectly loaded through a parent
+ module = sys.modules.get(curname)
+ if module is None:
+ mp_file, mp_filename, mp_desc = find_module(part, path)
+ module = load_module(curname, mp_file, mp_filename, mp_desc)
+ if prevmodule:
+ setattr(prevmodule, part, module)
+ _file = getattr(module, '__file__', '')
+ if not _file and len(modpath) != len(parts):
+ raise ImportError('no module in %s' % '.'.join(parts[len(modpath):]) )
+ path = [dirname( _file )]
+ prevmodule = module
+ return module
+
+
+def load_module_from_file(filepath, path=None, use_sys=1, extrapath=None):
+ """Load a Python module from it's path.
+
+ :type filepath: str
+ :param filepath: path to the python module or package
+
+ :type path: list or None
+ :param path:
+ optional list of path where the module or package should be
+ searched (use sys.path if nothing or None is given)
+
+ :type use_sys: bool
+ :param use_sys:
+ boolean indicating whether the sys.modules dictionary should be
+ used or not
+
+
+ :raise ImportError: if the module or package is not found
+
+ :rtype: module
+ :return: the loaded module
+ """
+ modpath = modpath_from_file(filepath, extrapath)
+ return load_module_from_modpath(modpath, path, use_sys)
+
+
+def _check_init(path, mod_path):
+ """check there are some __init__.py all along the way"""
+ for part in mod_path:
+ path = join(path, part)
+ if not _has_init(path):
+ return False
+ return True
+
+
+def modpath_from_file(filename, extrapath=None):
+ """given a file path return the corresponding splitted module's name
+ (i.e name of a module or package splitted on '.')
+
+ :type filename: str
+ :param filename: file's path for which we want the module's name
+
+ :type extrapath: dict
+ :param extrapath:
+ optional extra search path, with path as key and package name for the path
+ as value. This is usually useful to handle package splitted in multiple
+ directories using __path__ trick.
+
+
+ :raise ImportError:
+ if the corresponding module's name has not been found
+
+ :rtype: list(str)
+ :return: the corresponding splitted module's name
+ """
+ base = splitext(abspath(filename))[0]
+ if extrapath is not None:
+ for path_ in extrapath:
+ path = abspath(path_)
+ if path and base[:len(path)] == path:
+ submodpath = [pkg for pkg in base[len(path):].split(os.sep)
+ if pkg]
+ if _check_init(path, submodpath[:-1]):
+ return extrapath[path_].split('.') + submodpath
+ for path in sys.path:
+ path = abspath(path)
+ if path and base.startswith(path):
+ modpath = [pkg for pkg in base[len(path):].split(os.sep) if pkg]
+ if _check_init(path, modpath[:-1]):
+ return modpath
+ raise ImportError('Unable to find module for %s in %s' % (
+ filename, ', \n'.join(sys.path)))
+
+
+
+def file_from_modpath(modpath, path=None, context_file=None):
+ """given a mod path (i.e. splitted module / package name), return the
+ corresponding file, giving priority to source file over precompiled
+ file if it exists
+
+ :type modpath: list or tuple
+ :param modpath:
+ splitted module's name (i.e name of a module or package splitted
+ on '.')
+ (this means explicit relative imports that start with dots have
+ empty strings in this list!)
+
+ :type path: list or None
+ :param path:
+ optional list of path where the module or package should be
+ searched (use sys.path if nothing or None is given)
+
+ :type context_file: str or None
+ :param context_file:
+ context file to consider, necessary if the identifier has been
+ introduced using a relative import unresolvable in the actual
+ context (i.e. modutils)
+
+ :raise ImportError: if there is no such module in the directory
+
+ :rtype: str or None
+ :return:
+ the path to the module's file or None if it's an integrated
+ builtin module such as 'sys'
+ """
+ if context_file is not None:
+ context = dirname(context_file)
+ else:
+ context = context_file
+ if modpath[0] == 'xml':
+ # handle _xmlplus
+ try:
+ return _file_from_modpath(['_xmlplus'] + modpath[1:], path, context)
+ except ImportError:
+ return _file_from_modpath(modpath, path, context)
+ elif modpath == ['os', 'path']:
+ # FIXME: currently ignoring search_path...
+ return os.path.__file__
+ return _file_from_modpath(modpath, path, context)
+
+
+
+def get_module_part(dotted_name, context_file=None):
+ """given a dotted name return the module part of the name :
+
+ >>> get_module_part('logilab.common.modutils.get_module_part')
+ 'logilab.common.modutils'
+
+ :type dotted_name: str
+ :param dotted_name: full name of the identifier we are interested in
+
+ :type context_file: str or None
+ :param context_file:
+ context file to consider, necessary if the identifier has been
+ introduced using a relative import unresolvable in the actual
+ context (i.e. modutils)
+
+
+ :raise ImportError: if there is no such module in the directory
+
+ :rtype: str or None
+ :return:
+ the module part of the name or None if we have not been able at
+ all to import the given name
+
+ XXX: deprecated, since it doesn't handle package precedence over module
+ (see #10066)
+ """
+ # os.path trick
+ if dotted_name.startswith('os.path'):
+ return 'os.path'
+ parts = dotted_name.split('.')
+ if context_file is not None:
+ # first check for builtin module which won't be considered latter
+ # in that case (path != None)
+ if parts[0] in BUILTIN_MODULES:
+ if len(parts) > 2:
+ raise ImportError(dotted_name)
+ return parts[0]
+ # don't use += or insert, we want a new list to be created !
+ path = None
+ starti = 0
+ if parts[0] == '':
+ assert context_file is not None, \
+ 'explicit relative import, but no context_file?'
+ path = [] # prevent resolving the import non-relatively
+ starti = 1
+ while parts[starti] == '': # for all further dots: change context
+ starti += 1
+ context_file = dirname(context_file)
+ for i in range(starti, len(parts)):
+ try:
+ file_from_modpath(parts[starti:i+1],
+ path=path, context_file=context_file)
+ except ImportError:
+ if not i >= max(1, len(parts) - 2):
+ raise
+ return '.'.join(parts[:i])
+ return dotted_name
+
+
+def get_modules(package, src_directory, blacklist=STD_BLACKLIST):
+ """given a package directory return a list of all available python
+ modules in the package and its subpackages
+
+ :type package: str
+ :param package: the python name for the package
+
+ :type src_directory: str
+ :param src_directory:
+ path of the directory corresponding to the package
+
+ :type blacklist: list or tuple
+ :param blacklist:
+ optional list of files or directory to ignore, default to
+ the value of `logilab.common.STD_BLACKLIST`
+
+ :rtype: list
+ :return:
+ the list of all available python modules in the package and its
+ subpackages
+ """
+ modules = []
+ for directory, dirnames, filenames in os.walk(src_directory):
+ _handle_blacklist(blacklist, dirnames, filenames)
+ # check for __init__.py
+ if not '__init__.py' in filenames:
+ dirnames[:] = ()
+ continue
+ if directory != src_directory:
+ dir_package = directory[len(src_directory):].replace(os.sep, '.')
+ modules.append(package + dir_package)
+ for filename in filenames:
+ if _is_python_file(filename) and filename != '__init__.py':
+ src = join(directory, filename)
+ module = package + src[len(src_directory):-3]
+ modules.append(module.replace(os.sep, '.'))
+ return modules
+
+
+
+def get_module_files(src_directory, blacklist=STD_BLACKLIST):
+ """given a package directory return a list of all available python
+ module's files in the package and its subpackages
+
+ :type src_directory: str
+ :param src_directory:
+ path of the directory corresponding to the package
+
+ :type blacklist: list or tuple
+ :param blacklist:
+ optional list of files or directory to ignore, default to the value of
+ `logilab.common.STD_BLACKLIST`
+
+ :rtype: list
+ :return:
+ the list of all available python module's files in the package and
+ its subpackages
+ """
+ files = []
+ for directory, dirnames, filenames in os.walk(src_directory):
+ _handle_blacklist(blacklist, dirnames, filenames)
+ # check for __init__.py
+ if not '__init__.py' in filenames:
+ dirnames[:] = ()
+ continue
+ for filename in filenames:
+ if _is_python_file(filename):
+ src = join(directory, filename)
+ files.append(src)
+ return files
+
+
+def get_source_file(filename, include_no_ext=False):
+ """given a python module's file name return the matching source file
+ name (the filename will be returned identically if it's a already an
+ absolute path to a python source file...)
+
+ :type filename: str
+ :param filename: python module's file name
+
+
+ :raise NoSourceFile: if no source file exists on the file system
+
+ :rtype: str
+ :return: the absolute path of the source file if it exists
+ """
+ base, orig_ext = splitext(abspath(filename))
+ for ext in PY_SOURCE_EXTS:
+ source_path = '%s.%s' % (base, ext)
+ if exists(source_path):
+ return source_path
+ if include_no_ext and not orig_ext and exists(base):
+ return base
+ raise NoSourceFile(filename)
+
+
+def cleanup_sys_modules(directories):
+ """remove submodules of `directories` from `sys.modules`"""
+ for modname, module in sys.modules.items():
+ modfile = getattr(module, '__file__', None)
+ if modfile:
+ for directory in directories:
+ if modfile.startswith(directory):
+ del sys.modules[modname]
+ break
+
+
+def is_python_source(filename):
+ """
+ rtype: bool
+ return: True if the filename is a python source file
+ """
+ return splitext(filename)[1][1:] in PY_SOURCE_EXTS
+
+
+
+def is_standard_module(modname, std_path=(STD_LIB_DIR,)):
+ """try to guess if a module is a standard python module (by default,
+ see `std_path` parameter's description)
+
+ :type modname: str
+ :param modname: name of the module we are interested in
+
+ :type std_path: list(str) or tuple(str)
+ :param std_path: list of path considered has standard
+
+
+ :rtype: bool
+ :return:
+ true if the module:
+ - is located on the path listed in one of the directory in `std_path`
+ - is a built-in module
+ """
+ modname = modname.split('.')[0]
+ try:
+ filename = file_from_modpath([modname])
+ except ImportError, ex:
+ # import failed, i'm probably not so wrong by supposing it's
+ # not standard...
+ return 0
+ # modules which are not living in a file are considered standard
+ # (sys and __builtin__ for instance)
+ if filename is None:
+ return 1
+ filename = abspath(filename)
+ if filename.startswith(EXT_LIB_DIR):
+ return 0
+ for path in std_path:
+ if filename.startswith(abspath(path)):
+ return 1
+ return False
+
+
+
+def is_relative(modname, from_file):
+ """return true if the given module name is relative to the given
+ file name
+
+ :type modname: str
+ :param modname: name of the module we are interested in
+
+ :type from_file: str
+ :param from_file:
+ path of the module from which modname has been imported
+
+ :rtype: bool
+ :return:
+ true if the module has been imported relatively to `from_file`
+ """
+ if not isdir(from_file):
+ from_file = dirname(from_file)
+ if from_file in sys.path:
+ return False
+ try:
+ find_module(modname.split('.')[0], [from_file])
+ return True
+ except ImportError:
+ return False
+
+
+# internal only functions #####################################################
+
+def _file_from_modpath(modpath, path=None, context=None):
+ """given a mod path (i.e. splitted module / package name), return the
+ corresponding file
+
+ this function is used internally, see `file_from_modpath`'s
+ documentation for more information
+ """
+ assert len(modpath) > 0
+ if context is not None:
+ try:
+ mtype, mp_filename = _module_file(modpath, [context])
+ except ImportError:
+ mtype, mp_filename = _module_file(modpath, path)
+ else:
+ mtype, mp_filename = _module_file(modpath, path)
+ if mtype == PY_COMPILED:
+ try:
+ return get_source_file(mp_filename)
+ except NoSourceFile:
+ return mp_filename
+ elif mtype == C_BUILTIN:
+ # integrated builtin module
+ return None
+ elif mtype == PKG_DIRECTORY:
+ mp_filename = _has_init(mp_filename)
+ return mp_filename
+
+def _search_zip(modpath, pic):
+ for filepath, importer in pic.items():
+ if importer is not None:
+ if importer.find_module(modpath[0]):
+ if not importer.find_module('/'.join(modpath)):
+ raise ImportError('No module named %s in %s/%s' % (
+ '.'.join(modpath[1:]), file, modpath))
+ return ZIPFILE, abspath(filepath) + '/' + '/'.join(modpath), filepath
+ raise ImportError('No module named %s' % '.'.join(modpath))
+
+def _module_file(modpath, path=None):
+ """get a module type / file path
+
+ :type modpath: list or tuple
+ :param modpath:
+ splitted module's name (i.e name of a module or package splitted
+ on '.'), with leading empty strings for explicit relative import
+
+ :type path: list or None
+ :param path:
+ optional list of path where the module or package should be
+ searched (use sys.path if nothing or None is given)
+
+
+ :rtype: tuple(int, str)
+ :return: the module type flag and the file path for a module
+ """
+ # egg support compat
+ try:
+ pic = sys.path_importer_cache
+ _path = (path is None and sys.path or path)
+ for __path in _path:
+ if not __path in pic:
+ try:
+ pic[__path] = zipimport.zipimporter(__path)
+ except zipimport.ZipImportError:
+ pic[__path] = None
+ checkeggs = True
+ except AttributeError:
+ checkeggs = False
+ imported = []
+ while modpath:
+ try:
+ _, mp_filename, mp_desc = find_module(modpath[0], path)
+ except ImportError:
+ if checkeggs:
+ return _search_zip(modpath, pic)[:2]
+ raise
+ else:
+ if checkeggs:
+ fullabspath = [abspath(x) for x in _path]
+ try:
+ pathindex = fullabspath.index(dirname(abspath(mp_filename)))
+ emtype, emp_filename, zippath = _search_zip(modpath, pic)
+ if pathindex > _path.index(zippath):
+ # an egg takes priority
+ return emtype, emp_filename
+ except ValueError:
+ # XXX not in _path
+ pass
+ except ImportError:
+ pass
+ checkeggs = False
+ imported.append(modpath.pop(0))
+ mtype = mp_desc[2]
+ if modpath:
+ if mtype != PKG_DIRECTORY:
+ raise ImportError('No module %s in %s' % ('.'.join(modpath),
+ '.'.join(imported)))
+ path = [mp_filename]
+ return mtype, mp_filename
+
+def _is_python_file(filename):
+ """return true if the given filename should be considered as a python file
+
+ .pyc and .pyo are ignored
+ """
+ for ext in ('.py', '.so', '.pyd', '.pyw'):
+ if filename.endswith(ext):
+ return True
+ return False
+
+
+def _has_init(directory):
+ """if the given directory has a valid __init__ file, return its path,
+ else return None
+ """
+ mod_or_pack = join(directory, '__init__')
+ for ext in PY_SOURCE_EXTS + ('pyc', 'pyo'):
+ if exists(mod_or_pack + '.' + ext):
+ return mod_or_pack + '.' + ext
+ return None
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/common/optik_ext.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/common/optik_ext.py
@@ -0,0 +1,397 @@
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of logilab-common.
+#
+# logilab-common is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option) any
+# later version.
+#
+# logilab-common is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-common. If not, see <http://www.gnu.org/licenses/>.
+"""Add an abstraction level to transparently import optik classes from optparse
+(python >= 2.3) or the optik package.
+
+It also defines three new types for optik/optparse command line parser :
+
+ * regexp
+ argument of this type will be converted using re.compile
+ * csv
+ argument of this type will be converted using split(',')
+ * yn
+ argument of this type will be true if 'y' or 'yes', false if 'n' or 'no'
+ * named
+ argument of this type are in the form <NAME>=<VALUE> or <NAME>:<VALUE>
+ * password
+ argument of this type wont be converted but this is used by other tools
+ such as interactive prompt for configuration to double check value and
+ use an invisible field
+ * multiple_choice
+ same as default "choice" type but multiple choices allowed
+ * file
+ argument of this type wont be converted but checked that the given file exists
+ * color
+ argument of this type wont be converted but checked its either a
+ named color or a color specified using hexadecimal notation (preceded by a #)
+ * time
+ argument of this type will be converted to a float value in seconds
+ according to time units (ms, s, min, h, d)
+ * bytes
+ argument of this type will be converted to a float value in bytes
+ according to byte units (b, kb, mb, gb, tb)
+"""
+__docformat__ = "restructuredtext en"
+
+import re
+import sys
+import time
+from copy import copy
+from os.path import exists
+
+# python >= 2.3
+from optparse import OptionParser as BaseParser, Option as BaseOption, \
+ OptionGroup, OptionContainer, OptionValueError, OptionError, \
+ Values, HelpFormatter, NO_DEFAULT, SUPPRESS_HELP
+
+try:
+ from mx import DateTime
+ HAS_MX_DATETIME = True
+except ImportError:
+ HAS_MX_DATETIME = False
+
+
+OPTPARSE_FORMAT_DEFAULT = sys.version_info >= (2, 4)
+
+from .textutils import splitstrip
+
+def check_regexp(option, opt, value):
+ """check a regexp value by trying to compile it
+ return the compiled regexp
+ """
+ if hasattr(value, 'pattern'):
+ return value
+ try:
+ return re.compile(value)
+ except ValueError:
+ raise OptionValueError(
+ "option %s: invalid regexp value: %r" % (opt, value))
+
+def check_csv(option, opt, value):
+ """check a csv value by trying to split it
+ return the list of separated values
+ """
+ if isinstance(value, (list, tuple)):
+ return value
+ try:
+ return splitstrip(value)
+ except ValueError:
+ raise OptionValueError(
+ "option %s: invalid csv value: %r" % (opt, value))
+
+def check_yn(option, opt, value):
+ """check a yn value
+ return true for yes and false for no
+ """
+ if isinstance(value, int):
+ return bool(value)
+ if value in ('y', 'yes'):
+ return True
+ if value in ('n', 'no'):
+ return False
+ msg = "option %s: invalid yn value %r, should be in (y, yes, n, no)"
+ raise OptionValueError(msg % (opt, value))
+
+def check_named(option, opt, value):
+ """check a named value
+ return a dictionary containing (name, value) associations
+ """
+ if isinstance(value, dict):
+ return value
+ values = []
+ for value in check_csv(option, opt, value):
+ if value.find('=') != -1:
+ values.append(value.split('=', 1))
+ elif value.find(':') != -1:
+ values.append(value.split(':', 1))
+ if values:
+ return dict(values)
+ msg = "option %s: invalid named value %r, should be <NAME>=<VALUE> or \
+<NAME>:<VALUE>"
+ raise OptionValueError(msg % (opt, value))
+
+def check_password(option, opt, value):
+ """check a password value (can't be empty)
+ """
+ # no actual checking, monkey patch if you want more
+ return value
+
+def check_file(option, opt, value):
+ """check a file value
+ return the filepath
+ """
+ if exists(value):
+ return value
+ msg = "option %s: file %r does not exist"
+ raise OptionValueError(msg % (opt, value))
+
+# XXX use python datetime
+def check_date(option, opt, value):
+ """check a file value
+ return the filepath
+ """
+ try:
+ return DateTime.strptime(value, "%Y/%m/%d")
+ except DateTime.Error :
+ raise OptionValueError(
+ "expected format of %s is yyyy/mm/dd" % opt)
+
+def check_color(option, opt, value):
+ """check a color value and returns it
+ /!\ does *not* check color labels (like 'red', 'green'), only
+ checks hexadecimal forms
+ """
+ # Case (1) : color label, we trust the end-user
+ if re.match('[a-z0-9 ]+$', value, re.I):
+ return value
+ # Case (2) : only accepts hexadecimal forms
+ if re.match('#[a-f0-9]{6}', value, re.I):
+ return value
+ # Else : not a color label neither a valid hexadecimal form => error
+ msg = "option %s: invalid color : %r, should be either hexadecimal \
+ value or predefined color"
+ raise OptionValueError(msg % (opt, value))
+
+def check_time(option, opt, value):
+ from logilab.common.textutils import TIME_UNITS, apply_units
+ if isinstance(value, (int, long, float)):
+ return value
+ return apply_units(value, TIME_UNITS)
+
+def check_bytes(option, opt, value):
+ from logilab.common.textutils import BYTE_UNITS, apply_units
+ if hasattr(value, '__int__'):
+ return value
+ return apply_units(value, BYTE_UNITS)
+
+import types
+
+class Option(BaseOption):
+ """override optik.Option to add some new option types
+ """
+ TYPES = BaseOption.TYPES + ('regexp', 'csv', 'yn', 'named', 'password',
+ 'multiple_choice', 'file', 'color',
+ 'time', 'bytes')
+ ATTRS = BaseOption.ATTRS + ['hide', 'level']
+ TYPE_CHECKER = copy(BaseOption.TYPE_CHECKER)
+ TYPE_CHECKER['regexp'] = check_regexp
+ TYPE_CHECKER['csv'] = check_csv
+ TYPE_CHECKER['yn'] = check_yn
+ TYPE_CHECKER['named'] = check_named
+ TYPE_CHECKER['multiple_choice'] = check_csv
+ TYPE_CHECKER['file'] = check_file
+ TYPE_CHECKER['color'] = check_color
+ TYPE_CHECKER['password'] = check_password
+ TYPE_CHECKER['time'] = check_time
+ TYPE_CHECKER['bytes'] = check_bytes
+ if HAS_MX_DATETIME:
+ TYPES += ('date',)
+ TYPE_CHECKER['date'] = check_date
+
+ def __init__(self, *opts, **attrs):
+ BaseOption.__init__(self, *opts, **attrs)
+ if hasattr(self, "hide") and self.hide:
+ self.help = SUPPRESS_HELP
+
+ def _check_choice(self):
+ """FIXME: need to override this due to optik misdesign"""
+ if self.type in ("choice", "multiple_choice"):
+ if self.choices is None:
+ raise OptionError(
+ "must supply a list of choices for type 'choice'", self)
+ elif type(self.choices) not in (types.TupleType, types.ListType):
+ raise OptionError(
+ "choices must be a list of strings ('%s' supplied)"
+ % str(type(self.choices)).split("'")[1], self)
+ elif self.choices is not None:
+ raise OptionError(
+ "must not supply choices for type %r" % self.type, self)
+ BaseOption.CHECK_METHODS[2] = _check_choice
+
+
+ def process(self, opt, value, values, parser):
+ # First, convert the value(s) to the right type. Howl if any
+ # value(s) are bogus.
+ try:
+ value = self.convert_value(opt, value)
+ except AttributeError: # py < 2.4
+ value = self.check_value(opt, value)
+ if self.type == 'named':
+ existant = getattr(values, self.dest)
+ if existant:
+ existant.update(value)
+ value = existant
+ # And then take whatever action is expected of us.
+ # This is a separate method to make life easier for
+ # subclasses to add new actions.
+ return self.take_action(
+ self.action, self.dest, opt, value, values, parser)
+
+
+class OptionParser(BaseParser):
+ """override optik.OptionParser to use our Option class
+ """
+ def __init__(self, option_class=Option, *args, **kwargs):
+ BaseParser.__init__(self, option_class=Option, *args, **kwargs)
+
+ def format_option_help(self, formatter=None):
+ if formatter is None:
+ formatter = self.formatter
+ outputlevel = getattr(formatter, 'output_level', 0)
+ formatter.store_option_strings(self)
+ result = []
+ result.append(formatter.format_heading("Options"))
+ formatter.indent()
+ if self.option_list:
+ result.append(OptionContainer.format_option_help(self, formatter))
+ result.append("\n")
+ for group in self.option_groups:
+ if group.level <= outputlevel and (
+ group.description or level_options(group, outputlevel)):
+ result.append(group.format_help(formatter))
+ result.append("\n")
+ formatter.dedent()
+ # Drop the last "\n", or the header if no options or option groups:
+ return "".join(result[:-1])
+
+
+OptionGroup.level = 0
+
+def level_options(group, outputlevel):
+ return [option for option in group.option_list
+ if (getattr(option, 'level', 0) or 0) <= outputlevel
+ and not option.help is SUPPRESS_HELP]
+
+def format_option_help(self, formatter):
+ result = []
+ outputlevel = getattr(formatter, 'output_level', 0) or 0
+ for option in level_options(self, outputlevel):
+ result.append(formatter.format_option(option))
+ return "".join(result)
+OptionContainer.format_option_help = format_option_help
+
+
+class ManHelpFormatter(HelpFormatter):
+ """Format help using man pages ROFF format"""
+
+ def __init__ (self,
+ indent_increment=0,
+ max_help_position=24,
+ width=79,
+ short_first=0):
+ HelpFormatter.__init__ (
+ self, indent_increment, max_help_position, width, short_first)
+
+ def format_heading(self, heading):
+ return '.SH %s\n' % heading.upper()
+
+ def format_description(self, description):
+ return description
+
+ def format_option(self, option):
+ try:
+ optstring = option.option_strings
+ except AttributeError:
+ optstring = self.format_option_strings(option)
+ if option.help:
+ help_text = self.expand_default(option)
+ help = ' '.join([l.strip() for l in help_text.splitlines()])
+ else:
+ help = ''
+ return '''.IP "%s"
+%s
+''' % (optstring, help)
+
+ def format_head(self, optparser, pkginfo, section=1):
+ long_desc = ""
+ try:
+ pgm = optparser._get_prog_name()
+ except AttributeError:
+ # py >= 2.4.X (dunno which X exactly, at least 2)
+ pgm = optparser.get_prog_name()
+ short_desc = self.format_short_description(pgm, pkginfo.description)
+ if hasattr(pkginfo, "long_desc"):
+ long_desc = self.format_long_description(pgm, pkginfo.long_desc)
+ return '%s\n%s\n%s\n%s' % (self.format_title(pgm, section),
+ short_desc, self.format_synopsis(pgm),
+ long_desc)
+
+ def format_title(self, pgm, section):
+ date = '-'.join([str(num) for num in time.localtime()[:3]])
+ return '.TH %s %s "%s" %s' % (pgm, section, date, pgm)
+
+ def format_short_description(self, pgm, short_desc):
+ return '''.SH NAME
+.B %s
+\- %s
+''' % (pgm, short_desc.strip())
+
+ def format_synopsis(self, pgm):
+ return '''.SH SYNOPSIS
+.B %s
+[
+.I OPTIONS
+] [
+.I <arguments>
+]
+''' % pgm
+
+ def format_long_description(self, pgm, long_desc):
+ long_desc = '\n'.join([line.lstrip()
+ for line in long_desc.splitlines()])
+ long_desc = long_desc.replace('\n.\n', '\n\n')
+ if long_desc.lower().startswith(pgm):
+ long_desc = long_desc[len(pgm):]
+ return '''.SH DESCRIPTION
+.B %s
+%s
+''' % (pgm, long_desc.strip())
+
+ def format_tail(self, pkginfo):
+ tail = '''.SH SEE ALSO
+/usr/share/doc/pythonX.Y-%s/
+
+.SH BUGS
+Please report bugs on the project\'s mailing list:
+%s
+
+.SH AUTHOR
+%s <%s>
+''' % (getattr(pkginfo, 'debian_name', pkginfo.modname),
+ pkginfo.mailinglist, pkginfo.author, pkginfo.author_email)
+
+ if hasattr(pkginfo, "copyright"):
+ tail += '''
+.SH COPYRIGHT
+%s
+''' % pkginfo.copyright
+
+ return tail
+
+def generate_manpage(optparser, pkginfo, section=1, stream=sys.stdout, level=0):
+ """generate a man page from an optik parser"""
+ formatter = ManHelpFormatter()
+ formatter.output_level = level
+ formatter.parser = optparser
+ print >> stream, formatter.format_head(optparser, pkginfo, section)
+ print >> stream, optparser.format_option_help(formatter)
+ print >> stream, formatter.format_tail(pkginfo)
+
+
+__all__ = ('OptionParser', 'Option', 'OptionGroup', 'OptionValueError',
+ 'Values')
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/common/textutils.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/common/textutils.py
@@ -0,0 +1,534 @@
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of logilab-common.
+#
+# logilab-common is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option) any
+# later version.
+#
+# logilab-common is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-common. If not, see <http://www.gnu.org/licenses/>.
+"""Some text manipulation utility functions.
+
+
+:group text formatting: normalize_text, normalize_paragraph, pretty_match,\
+unquote, colorize_ansi
+:group text manipulation: searchall, splitstrip
+:sort: text formatting, text manipulation
+
+:type ANSI_STYLES: dict(str)
+:var ANSI_STYLES: dictionary mapping style identifier to ANSI terminal code
+
+:type ANSI_COLORS: dict(str)
+:var ANSI_COLORS: dictionary mapping color identifier to ANSI terminal code
+
+:type ANSI_PREFIX: str
+:var ANSI_PREFIX:
+ ANSI terminal code notifying the start of an ANSI escape sequence
+
+:type ANSI_END: str
+:var ANSI_END:
+ ANSI terminal code notifying the end of an ANSI escape sequence
+
+:type ANSI_RESET: str
+:var ANSI_RESET:
+ ANSI terminal code resetting format defined by a previous ANSI escape sequence
+"""
+__docformat__ = "restructuredtext en"
+
+import sys
+import re
+import os.path as osp
+from warnings import warn
+from unicodedata import normalize as _uninormalize
+try:
+ from os import linesep
+except ImportError:
+ linesep = '\n' # gae
+
+from .deprecation import deprecated
+
+MANUAL_UNICODE_MAP = {
+ u'\xa1': u'!', # INVERTED EXCLAMATION MARK
+ u'\u0142': u'l', # LATIN SMALL LETTER L WITH STROKE
+ u'\u2044': u'/', # FRACTION SLASH
+ u'\xc6': u'AE', # LATIN CAPITAL LETTER AE
+ u'\xa9': u'(c)', # COPYRIGHT SIGN
+ u'\xab': u'"', # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
+ u'\xe6': u'ae', # LATIN SMALL LETTER AE
+ u'\xae': u'(r)', # REGISTERED SIGN
+ u'\u0153': u'oe', # LATIN SMALL LIGATURE OE
+ u'\u0152': u'OE', # LATIN CAPITAL LIGATURE OE
+ u'\xd8': u'O', # LATIN CAPITAL LETTER O WITH STROKE
+ u'\xf8': u'o', # LATIN SMALL LETTER O WITH STROKE
+ u'\xbb': u'"', # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
+ u'\xdf': u'ss', # LATIN SMALL LETTER SHARP S
+ }
+
+def unormalize(ustring, ignorenonascii=None, substitute=None):
+ """replace diacritical characters with their corresponding ascii characters
+
+ Convert the unicode string to its long normalized form (unicode character
+ will be transform into several characters) and keep the first one only.
+ The normal form KD (NFKD) will apply the compatibility decomposition, i.e.
+ replace all compatibility characters with their equivalents.
+
+ :type substitute: str
+ :param substitute: replacement character to use if decomposition fails
+
+ :see: Another project about ASCII transliterations of Unicode text
+ http://pypi.python.org/pypi/Unidecode
+ """
+ # backward compatibility, ignorenonascii was a boolean
+ if ignorenonascii is not None:
+ warn("ignorenonascii is deprecated, use substitute named parameter instead",
+ DeprecationWarning, stacklevel=2)
+ if ignorenonascii:
+ substitute = ''
+ res = []
+ for letter in ustring[:]:
+ try:
+ replacement = MANUAL_UNICODE_MAP[letter]
+ except KeyError:
+ replacement = _uninormalize('NFKD', letter)[0]
+ if ord(replacement) >= 2 ** 7:
+ if substitute is None:
+ raise ValueError("can't deal with non-ascii based characters")
+ replacement = substitute
+ res.append(replacement)
+ return u''.join(res)
+
+def unquote(string):
+ """remove optional quotes (simple or double) from the string
+
+ :type string: str or unicode
+ :param string: an optionally quoted string
+
+ :rtype: str or unicode
+ :return: the unquoted string (or the input string if it wasn't quoted)
+ """
+ if not string:
+ return string
+ if string[0] in '"\'':
+ string = string[1:]
+ if string[-1] in '"\'':
+ string = string[:-1]
+ return string
+
+
+_BLANKLINES_RGX = re.compile('\r?\n\r?\n')
+_NORM_SPACES_RGX = re.compile('\s+')
+
+def normalize_text(text, line_len=80, indent='', rest=False):
+ """normalize a text to display it with a maximum line size and
+ optionally arbitrary indentation. Line jumps are normalized but blank
+ lines are kept. The indentation string may be used to insert a
+ comment (#) or a quoting (>) mark for instance.
+
+ :type text: str or unicode
+ :param text: the input text to normalize
+
+ :type line_len: int
+ :param line_len: expected maximum line's length, default to 80
+
+ :type indent: str or unicode
+ :param indent: optional string to use as indentation
+
+ :rtype: str or unicode
+ :return:
+ the input text normalized to fit on lines with a maximized size
+ inferior to `line_len`, and optionally prefixed by an
+ indentation string
+ """
+ if rest:
+ normp = normalize_rest_paragraph
+ else:
+ normp = normalize_paragraph
+ result = []
+ for text in _BLANKLINES_RGX.split(text):
+ result.append(normp(text, line_len, indent))
+ return ('%s%s%s' % (linesep, indent, linesep)).join(result)
+
+
+def normalize_paragraph(text, line_len=80, indent=''):
+ """normalize a text to display it with a maximum line size and
+ optionally arbitrary indentation. Line jumps are normalized. The
+ indentation string may be used top insert a comment mark for
+ instance.
+
+ :type text: str or unicode
+ :param text: the input text to normalize
+
+ :type line_len: int
+ :param line_len: expected maximum line's length, default to 80
+
+ :type indent: str or unicode
+ :param indent: optional string to use as indentation
+
+ :rtype: str or unicode
+ :return:
+ the input text normalized to fit on lines with a maximized size
+ inferior to `line_len`, and optionally prefixed by an
+ indentation string
+ """
+ text = _NORM_SPACES_RGX.sub(' ', text)
+ line_len = line_len - len(indent)
+ lines = []
+ while text:
+ aline, text = splittext(text.strip(), line_len)
+ lines.append(indent + aline)
+ return linesep.join(lines)
+
+def normalize_rest_paragraph(text, line_len=80, indent=''):
+ """normalize a ReST text to display it with a maximum line size and
+ optionally arbitrary indentation. Line jumps are normalized. The
+ indentation string may be used top insert a comment mark for
+ instance.
+
+ :type text: str or unicode
+ :param text: the input text to normalize
+
+ :type line_len: int
+ :param line_len: expected maximum line's length, default to 80
+
+ :type indent: str or unicode
+ :param indent: optional string to use as indentation
+
+ :rtype: str or unicode
+ :return:
+ the input text normalized to fit on lines with a maximized size
+ inferior to `line_len`, and optionally prefixed by an
+ indentation string
+ """
+ toreport = ''
+ lines = []
+ line_len = line_len - len(indent)
+ for line in text.splitlines():
+ line = toreport + _NORM_SPACES_RGX.sub(' ', line.strip())
+ toreport = ''
+ while len(line) > line_len:
+ # too long line, need split
+ line, toreport = splittext(line, line_len)
+ lines.append(indent + line)
+ if toreport:
+ line = toreport + ' '
+ toreport = ''
+ else:
+ line = ''
+ if line:
+ lines.append(indent + line.strip())
+ return linesep.join(lines)
+
+
+def splittext(text, line_len):
+ """split the given text on space according to the given max line size
+
+ return a 2-uple:
+ * a line <= line_len if possible
+ * the rest of the text which has to be reported on another line
+ """
+ if len(text) <= line_len:
+ return text, ''
+ pos = min(len(text)-1, line_len)
+ while pos > 0 and text[pos] != ' ':
+ pos -= 1
+ if pos == 0:
+ pos = min(len(text), line_len)
+ while len(text) > pos and text[pos] != ' ':
+ pos += 1
+ return text[:pos], text[pos+1:].strip()
+
+
+def splitstrip(string, sep=','):
+ """return a list of stripped string by splitting the string given as
+ argument on `sep` (',' by default). Empty string are discarded.
+
+ >>> splitstrip('a, b, c , 4,,')
+ ['a', 'b', 'c', '4']
+ >>> splitstrip('a')
+ ['a']
+ >>>
+
+ :type string: str or unicode
+ :param string: a csv line
+
+ :type sep: str or unicode
+ :param sep: field separator, default to the comma (',')
+
+ :rtype: str or unicode
+ :return: the unquoted string (or the input string if it wasn't quoted)
+ """
+ return [word.strip() for word in string.split(sep) if word.strip()]
+
+get_csv = deprecated('get_csv is deprecated, use splitstrip')(splitstrip)
+
+
+def split_url_or_path(url_or_path):
+ """return the latest component of a string containing either an url of the
+ form <scheme>://<path> or a local file system path
+ """
+ if '://' in url_or_path:
+ return url_or_path.rstrip('/').rsplit('/', 1)
+ return osp.split(url_or_path.rstrip(osp.sep))
+
+
+def text_to_dict(text):
+ """parse multilines text containing simple 'key=value' lines and return a
+ dict of {'key': 'value'}. When the same key is encountered multiple time,
+ value is turned into a list containing all values.
+
+ >>> text_to_dict('''multiple=1
+ ... multiple= 2
+ ... single =3
+ ... ''')
+ {'single': '3', 'multiple': ['1', '2']}
+
+ """
+ res = {}
+ if not text:
+ return res
+ for line in text.splitlines():
+ line = line.strip()
+ if line and not line.startswith('#'):
+ key, value = [w.strip() for w in line.split('=', 1)]
+ if key in res:
+ try:
+ res[key].append(value)
+ except AttributeError:
+ res[key] = [res[key], value]
+ else:
+ res[key] = value
+ return res
+
+
+_BLANK_URE = r'(\s|,)+'
+_BLANK_RE = re.compile(_BLANK_URE)
+__VALUE_URE = r'-?(([0-9]+\.[0-9]*)|((0x?)?[0-9]+))'
+__UNITS_URE = r'[a-zA-Z]+'
+_VALUE_RE = re.compile(r'(?P<value>%s)(?P<unit>%s)?'%(__VALUE_URE, __UNITS_URE))
+_VALIDATION_RE = re.compile(r'^((%s)(%s))*(%s)?$' % (__VALUE_URE, __UNITS_URE,
+ __VALUE_URE))
+
+BYTE_UNITS = {
+ "b": 1,
+ "kb": 1024,
+ "mb": 1024 ** 2,
+ "gb": 1024 ** 3,
+ "tb": 1024 ** 4,
+}
+
+TIME_UNITS = {
+ "ms": 0.0001,
+ "s": 1,
+ "min": 60,
+ "h": 60 * 60,
+ "d": 60 * 60 *24,
+}
+
+def apply_units(string, units, inter=None, final=float, blank_reg=_BLANK_RE,
+ value_reg=_VALUE_RE):
+ """Parse the string applying the units defined in units
+ (e.g.: "1.5m",{'m',60} -> 80).
+
+ :type string: str or unicode
+ :param string: the string to parse
+
+ :type units: dict (or any object with __getitem__ using basestring key)
+ :param units: a dict mapping a unit string repr to its value
+
+ :type inter: type
+ :param inter: used to parse every intermediate value (need __sum__)
+
+ :type blank_reg: regexp
+ :param blank_reg: should match every blank char to ignore.
+
+ :type value_reg: regexp with "value" and optional "unit" group
+ :param value_reg: match a value and it's unit into the
+ """
+ if inter is None:
+ inter = final
+ fstring = _BLANK_RE.sub('', string)
+ if not (fstring and _VALIDATION_RE.match(fstring)):
+ raise ValueError("Invalid unit string: %r." % string)
+ values = []
+ for match in value_reg.finditer(fstring):
+ dic = match.groupdict()
+ lit, unit = dic["value"], dic.get("unit")
+ value = inter(lit)
+ if unit is not None:
+ try:
+ value *= units[unit.lower()]
+ except KeyError:
+ raise KeyError('invalid unit %s. valid units are %s' %
+ (unit, units.keys()))
+ values.append(value)
+ return final(sum(values))
+
+
+_LINE_RGX = re.compile('\r\n|\r+|\n')
+
+def pretty_match(match, string, underline_char='^'):
+ """return a string with the match location underlined:
+
+ >>> import re
+ >>> print(pretty_match(re.search('mange', 'il mange du bacon'), 'il mange du bacon'))
+ il mange du bacon
+ ^^^^^
+ >>>
+
+ :type match: _sre.SRE_match
+ :param match: object returned by re.match, re.search or re.finditer
+
+ :type string: str or unicode
+ :param string:
+ the string on which the regular expression has been applied to
+ obtain the `match` object
+
+ :type underline_char: str or unicode
+ :param underline_char:
+ character to use to underline the matched section, default to the
+ carret '^'
+
+ :rtype: str or unicode
+ :return:
+ the original string with an inserted line to underline the match
+ location
+ """
+ start = match.start()
+ end = match.end()
+ string = _LINE_RGX.sub(linesep, string)
+ start_line_pos = string.rfind(linesep, 0, start)
+ if start_line_pos == -1:
+ start_line_pos = 0
+ result = []
+ else:
+ result = [string[:start_line_pos]]
+ start_line_pos += len(linesep)
+ offset = start - start_line_pos
+ underline = ' ' * offset + underline_char * (end - start)
+ end_line_pos = string.find(linesep, end)
+ if end_line_pos == -1:
+ string = string[start_line_pos:]
+ result.append(string)
+ result.append(underline)
+ else:
+ end = string[end_line_pos + len(linesep):]
+ string = string[start_line_pos:end_line_pos]
+ result.append(string)
+ result.append(underline)
+ result.append(end)
+ return linesep.join(result).rstrip()
+
+
+# Ansi colorization ###########################################################
+
+ANSI_PREFIX = '\033['
+ANSI_END = 'm'
+ANSI_RESET = '\033[0m'
+ANSI_STYLES = {
+ 'reset': "0",
+ 'bold': "1",
+ 'italic': "3",
+ 'underline': "4",
+ 'blink': "5",
+ 'inverse': "7",
+ 'strike': "9",
+}
+ANSI_COLORS = {
+ 'reset': "0",
+ 'black': "30",
+ 'red': "31",
+ 'green': "32",
+ 'yellow': "33",
+ 'blue': "34",
+ 'magenta': "35",
+ 'cyan': "36",
+ 'white': "37",
+}
+
+def _get_ansi_code(color=None, style=None):
+ """return ansi escape code corresponding to color and style
+
+ :type color: str or None
+ :param color:
+ the color name (see `ANSI_COLORS` for available values)
+ or the color number when 256 colors are available
+
+ :type style: str or None
+ :param style:
+ style string (see `ANSI_COLORS` for available values). To get
+ several style effects at the same time, use a coma as separator.
+
+ :raise KeyError: if an unexistent color or style identifier is given
+
+ :rtype: str
+ :return: the built escape code
+ """
+ ansi_code = []
+ if style:
+ style_attrs = splitstrip(style)
+ for effect in style_attrs:
+ ansi_code.append(ANSI_STYLES[effect])
+ if color:
+ if color.isdigit():
+ ansi_code.extend(['38', '5'])
+ ansi_code.append(color)
+ else:
+ ansi_code.append(ANSI_COLORS[color])
+ if ansi_code:
+ return ANSI_PREFIX + ';'.join(ansi_code) + ANSI_END
+ return ''
+
+def colorize_ansi(msg, color=None, style=None):
+ """colorize message by wrapping it with ansi escape codes
+
+ :type msg: str or unicode
+ :param msg: the message string to colorize
+
+ :type color: str or None
+ :param color:
+ the color identifier (see `ANSI_COLORS` for available values)
+
+ :type style: str or None
+ :param style:
+ style string (see `ANSI_COLORS` for available values). To get
+ several style effects at the same time, use a coma as separator.
+
+ :raise KeyError: if an unexistent color or style identifier is given
+
+ :rtype: str or unicode
+ :return: the ansi escaped string
+ """
+ # If both color and style are not defined, then leave the text as is
+ if color is None and style is None:
+ return msg
+ escape_code = _get_ansi_code(color, style)
+ # If invalid (or unknown) color, don't wrap msg with ansi codes
+ if escape_code:
+ return '%s%s%s' % (escape_code, msg, ANSI_RESET)
+ return msg
+
+DIFF_STYLE = {'separator': 'cyan', 'remove': 'red', 'add': 'green'}
+
+def diff_colorize_ansi(lines, out=sys.stdout, style=DIFF_STYLE):
+ for line in lines:
+ if line[:4] in ('--- ', '+++ '):
+ out.write(colorize_ansi(line, style['separator']))
+ elif line[0] == '-':
+ out.write(colorize_ansi(line, style['remove']))
+ elif line[0] == '+':
+ out.write(colorize_ansi(line, style['add']))
+ elif line[:4] == '--- ':
+ out.write(colorize_ansi(line, style['separator']))
+ elif line[:4] == '+++ ':
+ out.write(colorize_ansi(line, style['separator']))
+ else:
+ out.write(line)
+
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/common/tree.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/common/tree.py
@@ -0,0 +1,369 @@
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of logilab-common.
+#
+# logilab-common is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option) any
+# later version.
+#
+# logilab-common is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-common. If not, see <http://www.gnu.org/licenses/>.
+"""Base class to represent a tree structure.
+
+
+
+
+"""
+__docformat__ = "restructuredtext en"
+
+import sys
+
+from . import flatten
+from .visitor import VisitedMixIn, FilteredIterator, no_filter
+
+## Exceptions #################################################################
+
+class NodeNotFound(Exception):
+ """raised when a node has not been found"""
+
+EX_SIBLING_NOT_FOUND = "No such sibling as '%s'"
+EX_CHILD_NOT_FOUND = "No such child as '%s'"
+EX_NODE_NOT_FOUND = "No such node as '%s'"
+
+
+# Base node ###################################################################
+
+class Node(object):
+ """a basic tree node, characterized by an id"""
+
+ def __init__(self, nid=None) :
+ self.id = nid
+ # navigation
+ self.parent = None
+ self.children = []
+
+ def __iter__(self):
+ return iter(self.children)
+
+ def __str__(self, indent=0):
+ s = ['%s%s %s' % (' '*indent, self.__class__.__name__, self.id)]
+ indent += 2
+ for child in self.children:
+ try:
+ s.append(child.__str__(indent))
+ except TypeError:
+ s.append(child.__str__())
+ return '\n'.join(s)
+
+ def is_leaf(self):
+ return not self.children
+
+ def append(self, child):
+ """add a node to children"""
+ self.children.append(child)
+ child.parent = self
+
+ def remove(self, child):
+ """remove a child node"""
+ self.children.remove(child)
+ child.parent = None
+
+ def insert(self, index, child):
+ """insert a child node"""
+ self.children.insert(index, child)
+ child.parent = self
+
+ def replace(self, old_child, new_child):
+ """replace a child node with another"""
+ i = self.children.index(old_child)
+ self.children.pop(i)
+ self.children.insert(i, new_child)
+ new_child.parent = self
+
+ def get_sibling(self, nid):
+ """return the sibling node that has given id"""
+ try:
+ return self.parent.get_child_by_id(nid)
+ except NodeNotFound :
+ raise NodeNotFound(EX_SIBLING_NOT_FOUND % nid)
+
+ def next_sibling(self):
+ """
+ return the next sibling for this node if any
+ """
+ parent = self.parent
+ if parent is None:
+ # root node has no sibling
+ return None
+ index = parent.children.index(self)
+ try:
+ return parent.children[index+1]
+ except IndexError:
+ return None
+
+ def previous_sibling(self):
+ """
+ return the previous sibling for this node if any
+ """
+ parent = self.parent
+ if parent is None:
+ # root node has no sibling
+ return None
+ index = parent.children.index(self)
+ if index > 0:
+ return parent.children[index-1]
+ return None
+
+ def get_node_by_id(self, nid):
+ """
+ return node in whole hierarchy that has given id
+ """
+ root = self.root()
+ try:
+ return root.get_child_by_id(nid, 1)
+ except NodeNotFound :
+ raise NodeNotFound(EX_NODE_NOT_FOUND % nid)
+
+ def get_child_by_id(self, nid, recurse=None):
+ """
+ return child of given id
+ """
+ if self.id == nid:
+ return self
+ for c in self.children :
+ if recurse:
+ try:
+ return c.get_child_by_id(nid, 1)
+ except NodeNotFound :
+ continue
+ if c.id == nid :
+ return c
+ raise NodeNotFound(EX_CHILD_NOT_FOUND % nid)
+
+ def get_child_by_path(self, path):
+ """
+ return child of given path (path is a list of ids)
+ """
+ if len(path) > 0 and path[0] == self.id:
+ if len(path) == 1 :
+ return self
+ else :
+ for c in self.children :
+ try:
+ return c.get_child_by_path(path[1:])
+ except NodeNotFound :
+ pass
+ raise NodeNotFound(EX_CHILD_NOT_FOUND % path)
+
+ def depth(self):
+ """
+ return depth of this node in the tree
+ """
+ if self.parent is not None:
+ return 1 + self.parent.depth()
+ else :
+ return 0
+
+ def depth_down(self):
+ """
+ return depth of the tree from this node
+ """
+ if self.children:
+ return 1 + max([c.depth_down() for c in self.children])
+ return 1
+
+ def width(self):
+ """
+ return the width of the tree from this node
+ """
+ return len(self.leaves())
+
+ def root(self):
+ """
+ return the root node of the tree
+ """
+ if self.parent is not None:
+ return self.parent.root()
+ return self
+
+ def leaves(self):
+ """
+ return a list with all the leaves nodes descendant from this node
+ """
+ leaves = []
+ if self.children:
+ for child in self.children:
+ leaves += child.leaves()
+ return leaves
+ else:
+ return [self]
+
+ def flatten(self, _list=None):
+ """
+ return a list with all the nodes descendant from this node
+ """
+ if _list is None:
+ _list = []
+ _list.append(self)
+ for c in self.children:
+ c.flatten(_list)
+ return _list
+
+ def lineage(self):
+ """
+ return list of parents up to root node
+ """
+ lst = [self]
+ if self.parent is not None:
+ lst.extend(self.parent.lineage())
+ return lst
+
+class VNode(Node, VisitedMixIn):
+ """a visitable node
+ """
+ pass
+
+
+class BinaryNode(VNode):
+ """a binary node (i.e. only two children
+ """
+ def __init__(self, lhs=None, rhs=None) :
+ VNode.__init__(self)
+ if lhs is not None or rhs is not None:
+ assert lhs and rhs
+ self.append(lhs)
+ self.append(rhs)
+
+ def remove(self, child):
+ """remove the child and replace this node with the other child
+ """
+ self.children.remove(child)
+ self.parent.replace(self, self.children[0])
+
+ def get_parts(self):
+ """
+ return the left hand side and the right hand side of this node
+ """
+ return self.children[0], self.children[1]
+
+
+
+if sys.version_info[0:2] >= (2, 2):
+ list_class = list
+else:
+ from UserList import UserList
+ list_class = UserList
+
+class ListNode(VNode, list_class):
+ """Used to manipulate Nodes as Lists
+ """
+ def __init__(self):
+ list_class.__init__(self)
+ VNode.__init__(self)
+ self.children = self
+
+ def __str__(self, indent=0):
+ return '%s%s %s' % (indent*' ', self.__class__.__name__,
+ ', '.join([str(v) for v in self]))
+
+ def append(self, child):
+ """add a node to children"""
+ list_class.append(self, child)
+ child.parent = self
+
+ def insert(self, index, child):
+ """add a node to children"""
+ list_class.insert(self, index, child)
+ child.parent = self
+
+ def remove(self, child):
+ """add a node to children"""
+ list_class.remove(self, child)
+ child.parent = None
+
+ def pop(self, index):
+ """add a node to children"""
+ child = list_class.pop(self, index)
+ child.parent = None
+
+ def __iter__(self):
+ return list_class.__iter__(self)
+
+# construct list from tree ####################################################
+
+def post_order_list(node, filter_func=no_filter):
+ """
+ create a list with tree nodes for which the <filter> function returned true
+ in a post order fashion
+ """
+ l, stack = [], []
+ poped, index = 0, 0
+ while node:
+ if filter_func(node):
+ if node.children and not poped:
+ stack.append((node, index))
+ index = 0
+ node = node.children[0]
+ else:
+ l.append(node)
+ index += 1
+ try:
+ node = stack[-1][0].children[index]
+ except IndexError:
+ node = None
+ else:
+ node = None
+ poped = 0
+ if node is None and stack:
+ node, index = stack.pop()
+ poped = 1
+ return l
+
+def pre_order_list(node, filter_func=no_filter):
+ """
+ create a list with tree nodes for which the <filter> function returned true
+ in a pre order fashion
+ """
+ l, stack = [], []
+ poped, index = 0, 0
+ while node:
+ if filter_func(node):
+ if not poped:
+ l.append(node)
+ if node.children and not poped:
+ stack.append((node, index))
+ index = 0
+ node = node.children[0]
+ else:
+ index += 1
+ try:
+ node = stack[-1][0].children[index]
+ except IndexError:
+ node = None
+ else:
+ node = None
+ poped = 0
+ if node is None and len(stack) > 1:
+ node, index = stack.pop()
+ poped = 1
+ return l
+
+class PostfixedDepthFirstIterator(FilteredIterator):
+ """a postfixed depth first iterator, designed to be used with visitors
+ """
+ def __init__(self, node, filter_func=None):
+ FilteredIterator.__init__(self, node, post_order_list, filter_func)
+
+class PrefixedDepthFirstIterator(FilteredIterator):
+ """a prefixed depth first iterator, designed to be used with visitors
+ """
+ def __init__(self, node, filter_func=None):
+ FilteredIterator.__init__(self, node, pre_order_list, filter_func)
+
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/common/ureports/__init__.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/common/ureports/__init__.py
@@ -0,0 +1,174 @@
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of logilab-common.
+#
+# logilab-common is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option) any
+# later version.
+#
+# logilab-common is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-common. If not, see <http://www.gnu.org/licenses/>.
+"""Universal report objects and some formatting drivers.
+
+A way to create simple reports using python objects, primarily designed to be
+formatted as text and html.
+"""
+from __future__ import generators
+__docformat__ = "restructuredtext en"
+
+import sys
+from cStringIO import StringIO
+from StringIO import StringIO as UStringIO
+
+from ..textutils import linesep
+
+
+def get_nodes(node, klass):
+ """return an iterator on all children node of the given klass"""
+ for child in node.children:
+ if isinstance(child, klass):
+ yield child
+ # recurse (FIXME: recursion controled by an option)
+ for grandchild in get_nodes(child, klass):
+ yield grandchild
+
+def layout_title(layout):
+ """try to return the layout's title as string, return None if not found
+ """
+ for child in layout.children:
+ if isinstance(child, Title):
+ return ' '.join([node.data for node in get_nodes(child, Text)])
+
+def build_summary(layout, level=1):
+ """make a summary for the report, including X level"""
+ assert level > 0
+ level -= 1
+ summary = List(klass='summary')
+ for child in layout.children:
+ if not isinstance(child, Section):
+ continue
+ label = layout_title(child)
+ if not label and not child.id:
+ continue
+ if not child.id:
+ child.id = label.replace(' ', '-')
+ node = Link('#'+child.id, label=label or child.id)
+ # FIXME: Three following lines produce not very compliant
+ # docbook: there are some useless <para><para>. They might be
+ # replaced by the three commented lines but this then produces
+ # a bug in html display...
+ if level and [n for n in child.children if isinstance(n, Section)]:
+ node = Paragraph([node, build_summary(child, level)])
+ summary.append(node)
+# summary.append(node)
+# if level and [n for n in child.children if isinstance(n, Section)]:
+# summary.append(build_summary(child, level))
+ return summary
+
+
+class BaseWriter(object):
+ """base class for ureport writers"""
+
+ def format(self, layout, stream=None, encoding=None):
+ """format and write the given layout into the stream object
+
+ unicode policy: unicode strings may be found in the layout;
+ try to call stream.write with it, but give it back encoded using
+ the given encoding if it fails
+ """
+ if stream is None:
+ stream = sys.stdout
+ if not encoding:
+ encoding = getattr(stream, 'encoding', 'UTF-8')
+ self.encoding = encoding or 'UTF-8'
+ self.__compute_funcs = []
+ self.out = stream
+ self.begin_format(layout)
+ layout.accept(self)
+ self.end_format(layout)
+
+ def format_children(self, layout):
+ """recurse on the layout children and call their accept method
+ (see the Visitor pattern)
+ """
+ for child in getattr(layout, 'children', ()):
+ child.accept(self)
+
+ def writeln(self, string=''):
+ """write a line in the output buffer"""
+ self.write(string + linesep)
+
+ def write(self, string):
+ """write a string in the output buffer"""
+ try:
+ self.out.write(string)
+ except UnicodeEncodeError:
+ self.out.write(string.encode(self.encoding))
+
+ def begin_format(self, layout):
+ """begin to format a layout"""
+ self.section = 0
+
+ def end_format(self, layout):
+ """finished to format a layout"""
+
+ def get_table_content(self, table):
+ """trick to get table content without actually writing it
+
+ return an aligned list of lists containing table cells values as string
+ """
+ result = [[]]
+ cols = table.cols
+ for cell in self.compute_content(table):
+ if cols == 0:
+ result.append([])
+ cols = table.cols
+ cols -= 1
+ result[-1].append(cell)
+ # fill missing cells
+ while len(result[-1]) < cols:
+ result[-1].append('')
+ return result
+
+ def compute_content(self, layout):
+ """trick to compute the formatting of children layout before actually
+ writing it
+
+ return an iterator on strings (one for each child element)
+ """
+ # use cells !
+ def write(data):
+ try:
+ stream.write(data)
+ except UnicodeEncodeError:
+ stream.write(data.encode(self.encoding))
+ def writeln(data=''):
+ try:
+ stream.write(data+linesep)
+ except UnicodeEncodeError:
+ stream.write(data.encode(self.encoding)+linesep)
+ self.write = write
+ self.writeln = writeln
+ self.__compute_funcs.append((write, writeln))
+ for child in layout.children:
+ stream = UStringIO()
+ child.accept(self)
+ yield stream.getvalue()
+ self.__compute_funcs.pop()
+ try:
+ self.write, self.writeln = self.__compute_funcs[-1]
+ except IndexError:
+ del self.write
+ del self.writeln
+
+
+from .nodes import *
+from .text_writer import TextWriter
+from .html_writer import HTMLWriter
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/common/ureports/docbook_writer.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/common/ureports/docbook_writer.py
@@ -0,0 +1,139 @@
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of logilab-common.
+#
+# logilab-common is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option) any
+# later version.
+#
+# logilab-common is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-common. If not, see <http://www.gnu.org/licenses/>.
+"""HTML formatting drivers for ureports"""
+from __future__ import generators
+__docformat__ = "restructuredtext en"
+
+from ..ureports import HTMLWriter
+
+class DocbookWriter(HTMLWriter):
+ """format layouts as HTML"""
+
+ def begin_format(self, layout):
+ """begin to format a layout"""
+ super(HTMLWriter, self).begin_format(layout)
+ if self.snippet is None:
+ self.writeln('<?xml version="1.0" encoding="ISO-8859-1"?>')
+ self.writeln("""
+<book xmlns:xi='http://www.w3.org/2001/XInclude'
+ lang='fr'>
+""")
+
+ def end_format(self, layout):
+ """finished to format a layout"""
+ if self.snippet is None:
+ self.writeln('</book>')
+
+ def visit_section(self, layout):
+ """display a section (using <chapter> (level 0) or <section>)"""
+ if self.section == 0:
+ tag = "chapter"
+ else:
+ tag = "section"
+ self.section += 1
+ self.writeln(self._indent('<%s%s>' % (tag, self.handle_attrs(layout))))
+ self.format_children(layout)
+ self.writeln(self._indent('</%s>'% tag))
+ self.section -= 1
+
+ def visit_title(self, layout):
+ """display a title using <title>"""
+ self.write(self._indent(' <title%s>' % self.handle_attrs(layout)))
+ self.format_children(layout)
+ self.writeln('</title>')
+
+ def visit_table(self, layout):
+ """display a table as html"""
+ self.writeln(self._indent(' <table%s><title>%s</title>' \
+ % (self.handle_attrs(layout), layout.title)))
+ self.writeln(self._indent(' <tgroup cols="%s">'% layout.cols))
+ for i in range(layout.cols):
+ self.writeln(self._indent(' <colspec colname="c%s" colwidth="1*"/>' % i))
+
+ table_content = self.get_table_content(layout)
+ # write headers
+ if layout.cheaders:
+ self.writeln(self._indent(' <thead>'))
+ self._write_row(table_content[0])
+ self.writeln(self._indent(' </thead>'))
+ table_content = table_content[1:]
+ elif layout.rcheaders:
+ self.writeln(self._indent(' <thead>'))
+ self._write_row(table_content[-1])
+ self.writeln(self._indent(' </thead>'))
+ table_content = table_content[:-1]
+ # write body
+ self.writeln(self._indent(' <tbody>'))
+ for i in range(len(table_content)):
+ row = table_content[i]
+ self.writeln(self._indent(' <row>'))
+ for j in range(len(row)):
+ cell = row[j] or ' '
+ self.writeln(self._indent(' <entry>%s</entry>' % cell))
+ self.writeln(self._indent(' </row>'))
+ self.writeln(self._indent(' </tbody>'))
+ self.writeln(self._indent(' </tgroup>'))
+ self.writeln(self._indent(' </table>'))
+
+ def _write_row(self, row):
+ """write content of row (using <row> <entry>)"""
+ self.writeln(' <row>')
+ for j in range(len(row)):
+ cell = row[j] or ' '
+ self.writeln(' <entry>%s</entry>' % cell)
+ self.writeln(self._indent(' </row>'))
+
+ def visit_list(self, layout):
+ """display a list (using <itemizedlist>)"""
+ self.writeln(self._indent(' <itemizedlist%s>' % self.handle_attrs(layout)))
+ for row in list(self.compute_content(layout)):
+ self.writeln(' <listitem><para>%s</para></listitem>' % row)
+ self.writeln(self._indent(' </itemizedlist>'))
+
+ def visit_paragraph(self, layout):
+ """display links (using <para>)"""
+ self.write(self._indent(' <para>'))
+ self.format_children(layout)
+ self.writeln('</para>')
+
+ def visit_span(self, layout):
+ """display links (using <p>)"""
+ #TODO: translate in docbook
+ self.write('<literal %s>' % self.handle_attrs(layout))
+ self.format_children(layout)
+ self.write('</literal>')
+
+ def visit_link(self, layout):
+ """display links (using <ulink>)"""
+ self.write('<ulink url="%s"%s>%s</ulink>' % (layout.url,
+ self.handle_attrs(layout),
+ layout.label))
+
+ def visit_verbatimtext(self, layout):
+ """display verbatim text (using <programlisting>)"""
+ self.writeln(self._indent(' <programlisting>'))
+ self.write(layout.data.replace('&', '&').replace('<', '<'))
+ self.writeln(self._indent(' </programlisting>'))
+
+ def visit_text(self, layout):
+ """add some text"""
+ self.write(layout.data.replace('&', '&').replace('<', '<'))
+
+ def _indent(self, string):
+ """correctly indent string according to section"""
+ return ' ' * 2*(self.section) + string
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/common/ureports/html_writer.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/common/ureports/html_writer.py
@@ -0,0 +1,131 @@
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of logilab-common.
+#
+# logilab-common is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option) any
+# later version.
+#
+# logilab-common is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-common. If not, see <http://www.gnu.org/licenses/>.
+"""HTML formatting drivers for ureports"""
+__docformat__ = "restructuredtext en"
+
+from cgi import escape
+
+from ..ureports import BaseWriter
+
+
+class HTMLWriter(BaseWriter):
+ """format layouts as HTML"""
+
+ def __init__(self, snippet=None):
+ super(HTMLWriter, self).__init__()
+ self.snippet = snippet
+
+ def handle_attrs(self, layout):
+ """get an attribute string from layout member attributes"""
+ attrs = ''
+ klass = getattr(layout, 'klass', None)
+ if klass:
+ attrs += ' class="%s"' % klass
+ nid = getattr(layout, 'id', None)
+ if nid:
+ attrs += ' id="%s"' % nid
+ return attrs
+
+ def begin_format(self, layout):
+ """begin to format a layout"""
+ super(HTMLWriter, self).begin_format(layout)
+ if self.snippet is None:
+ self.writeln('<html>')
+ self.writeln('<body>')
+
+ def end_format(self, layout):
+ """finished to format a layout"""
+ if self.snippet is None:
+ self.writeln('</body>')
+ self.writeln('</html>')
+
+
+ def visit_section(self, layout):
+ """display a section as html, using div + h[section level]"""
+ self.section += 1
+ self.writeln('<div%s>' % self.handle_attrs(layout))
+ self.format_children(layout)
+ self.writeln('</div>')
+ self.section -= 1
+
+ def visit_title(self, layout):
+ """display a title using <hX>"""
+ self.write('<h%s%s>' % (self.section, self.handle_attrs(layout)))
+ self.format_children(layout)
+ self.writeln('</h%s>' % self.section)
+
+ def visit_table(self, layout):
+ """display a table as html"""
+ self.writeln('<table%s>' % self.handle_attrs(layout))
+ table_content = self.get_table_content(layout)
+ for i in range(len(table_content)):
+ row = table_content[i]
+ if i == 0 and layout.rheaders:
+ self.writeln('<tr class="header">')
+ elif i+1 == len(table_content) and layout.rrheaders:
+ self.writeln('<tr class="header">')
+ else:
+ self.writeln('<tr class="%s">' % (i%2 and 'even' or 'odd'))
+ for j in range(len(row)):
+ cell = row[j] or ' '
+ if (layout.rheaders and i == 0) or \
+ (layout.cheaders and j == 0) or \
+ (layout.rrheaders and i+1 == len(table_content)) or \
+ (layout.rcheaders and j+1 == len(row)):
+ self.writeln('<th>%s</th>' % cell)
+ else:
+ self.writeln('<td>%s</td>' % cell)
+ self.writeln('</tr>')
+ self.writeln('</table>')
+
+ def visit_list(self, layout):
+ """display a list as html"""
+ self.writeln('<ul%s>' % self.handle_attrs(layout))
+ for row in list(self.compute_content(layout)):
+ self.writeln('<li>%s</li>' % row)
+ self.writeln('</ul>')
+
+ def visit_paragraph(self, layout):
+ """display links (using <p>)"""
+ self.write('<p>')
+ self.format_children(layout)
+ self.write('</p>')
+
+ def visit_span(self, layout):
+ """display links (using <p>)"""
+ self.write('<span%s>' % self.handle_attrs(layout))
+ self.format_children(layout)
+ self.write('</span>')
+
+ def visit_link(self, layout):
+ """display links (using <a>)"""
+ self.write(' <a href="%s"%s>%s</a>' % (layout.url,
+ self.handle_attrs(layout),
+ layout.label))
+ def visit_verbatimtext(self, layout):
+ """display verbatim text (using <pre>)"""
+ self.write('<pre>')
+ self.write(layout.data.replace('&', '&').replace('<', '<'))
+ self.write('</pre>')
+
+ def visit_text(self, layout):
+ """add some text"""
+ data = layout.data
+ if layout.escaped:
+ data = data.replace('&', '&').replace('<', '<')
+ self.write(data)
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/common/ureports/nodes.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/common/ureports/nodes.py
@@ -0,0 +1,201 @@
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of logilab-common.
+#
+# logilab-common is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option) any
+# later version.
+#
+# logilab-common is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-common. If not, see <http://www.gnu.org/licenses/>.
+"""Micro reports objects.
+
+A micro report is a tree of layout and content objects.
+"""
+__docformat__ = "restructuredtext en"
+
+from ..tree import VNode
+
+class BaseComponent(VNode):
+ """base report component
+
+ attributes
+ * id : the component's optional id
+ * klass : the component's optional klass
+ """
+ def __init__(self, id=None, klass=None):
+ VNode.__init__(self, id)
+ self.klass = klass
+
+class BaseLayout(BaseComponent):
+ """base container node
+
+ attributes
+ * BaseComponent attributes
+ * children : components in this table (i.e. the table's cells)
+ """
+ def __init__(self, children=(), **kwargs):
+ super(BaseLayout, self).__init__(**kwargs)
+ for child in children:
+ if isinstance(child, BaseComponent):
+ self.append(child)
+ else:
+ self.add_text(child)
+
+ def append(self, child):
+ """overridden to detect problems easily"""
+ assert child not in self.parents()
+ VNode.append(self, child)
+
+ def parents(self):
+ """return the ancestor nodes"""
+ assert self.parent is not self
+ if self.parent is None:
+ return []
+ return [self.parent] + self.parent.parents()
+
+ def add_text(self, text):
+ """shortcut to add text data"""
+ self.children.append(Text(text))
+
+
+# non container nodes #########################################################
+
+class Text(BaseComponent):
+ """a text portion
+
+ attributes :
+ * BaseComponent attributes
+ * data : the text value as an encoded or unicode string
+ """
+ def __init__(self, data, escaped=True, **kwargs):
+ super(Text, self).__init__(**kwargs)
+ #if isinstance(data, unicode):
+ # data = data.encode('ascii')
+ assert isinstance(data, (str, unicode)), data.__class__
+ self.escaped = escaped
+ self.data = data
+
+class VerbatimText(Text):
+ """a verbatim text, display the raw data
+
+ attributes :
+ * BaseComponent attributes
+ * data : the text value as an encoded or unicode string
+ """
+
+class Link(BaseComponent):
+ """a labelled link
+
+ attributes :
+ * BaseComponent attributes
+ * url : the link's target (REQUIRED)
+ * label : the link's label as a string (use the url by default)
+ """
+ def __init__(self, url, label=None, **kwargs):
+ super(Link, self).__init__(**kwargs)
+ assert url
+ self.url = url
+ self.label = label or url
+
+
+class Image(BaseComponent):
+ """an embedded or a single image
+
+ attributes :
+ * BaseComponent attributes
+ * filename : the image's filename (REQUIRED)
+ * stream : the stream object containing the image data (REQUIRED)
+ * title : the image's optional title
+ """
+ def __init__(self, filename, stream, title=None, **kwargs):
+ super(Image, self).__init__(**kwargs)
+ assert filename
+ assert stream
+ self.filename = filename
+ self.stream = stream
+ self.title = title
+
+
+# container nodes #############################################################
+
+class Section(BaseLayout):
+ """a section
+
+ attributes :
+ * BaseLayout attributes
+
+ a title may also be given to the constructor, it'll be added
+ as a first element
+ a description may also be given to the constructor, it'll be added
+ as a first paragraph
+ """
+ def __init__(self, title=None, description=None, **kwargs):
+ super(Section, self).__init__(**kwargs)
+ if description:
+ self.insert(0, Paragraph([Text(description)]))
+ if title:
+ self.insert(0, Title(children=(title,)))
+
+class Title(BaseLayout):
+ """a title
+
+ attributes :
+ * BaseLayout attributes
+
+ A title must not contains a section nor a paragraph!
+ """
+
+class Span(BaseLayout):
+ """a title
+
+ attributes :
+ * BaseLayout attributes
+
+ A span should only contains Text and Link nodes (in-line elements)
+ """
+
+class Paragraph(BaseLayout):
+ """a simple text paragraph
+
+ attributes :
+ * BaseLayout attributes
+
+ A paragraph must not contains a section !
+ """
+
+class Table(BaseLayout):
+ """some tabular data
+
+ attributes :
+ * BaseLayout attributes
+ * cols : the number of columns of the table (REQUIRED)
+ * rheaders : the first row's elements are table's header
+ * cheaders : the first col's elements are table's header
+ * title : the table's optional title
+ """
+ def __init__(self, cols, title=None,
+ rheaders=0, cheaders=0, rrheaders=0, rcheaders=0,
+ **kwargs):
+ super(Table, self).__init__(**kwargs)
+ assert isinstance(cols, int)
+ self.cols = cols
+ self.title = title
+ self.rheaders = rheaders
+ self.cheaders = cheaders
+ self.rrheaders = rrheaders
+ self.rcheaders = rcheaders
+
+class List(BaseLayout):
+ """some list data
+
+ attributes :
+ * BaseLayout attributes
+ """
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/common/ureports/text_writer.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/common/ureports/text_writer.py
@@ -0,0 +1,140 @@
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of logilab-common.
+#
+# logilab-common is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option) any
+# later version.
+#
+# logilab-common is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-common. If not, see <http://www.gnu.org/licenses/>.
+"""Text formatting drivers for ureports"""
+__docformat__ = "restructuredtext en"
+
+from ..textutils import linesep
+from ..ureports import BaseWriter
+
+
+TITLE_UNDERLINES = ['', '=', '-', '`', '.', '~', '^']
+BULLETS = ['*', '-']
+
+class TextWriter(BaseWriter):
+ """format layouts as text
+ (ReStructured inspiration but not totally handled yet)
+ """
+ def begin_format(self, layout):
+ super(TextWriter, self).begin_format(layout)
+ self.list_level = 0
+ self.pending_urls = []
+
+ def visit_section(self, layout):
+ """display a section as text
+ """
+ self.section += 1
+ self.writeln()
+ self.format_children(layout)
+ if self.pending_urls:
+ self.writeln()
+ for label, url in self.pending_urls:
+ self.writeln('.. _`%s`: %s' % (label, url))
+ self.pending_urls = []
+ self.section -= 1
+ self.writeln()
+
+ def visit_title(self, layout):
+ title = ''.join(list(self.compute_content(layout)))
+ self.writeln(title)
+ try:
+ self.writeln(TITLE_UNDERLINES[self.section] * len(title))
+ except IndexError:
+ print "FIXME TITLE TOO DEEP. TURNING TITLE INTO TEXT"
+
+ def visit_paragraph(self, layout):
+ """enter a paragraph"""
+ self.format_children(layout)
+ self.writeln()
+
+ def visit_span(self, layout):
+ """enter a span"""
+ self.format_children(layout)
+
+ def visit_table(self, layout):
+ """display a table as text"""
+ table_content = self.get_table_content(layout)
+ # get columns width
+ cols_width = [0]*len(table_content[0])
+ for row in table_content:
+ for index in range(len(row)):
+ col = row[index]
+ cols_width[index] = max(cols_width[index], len(col))
+ if layout.klass == 'field':
+ self.field_table(layout, table_content, cols_width)
+ else:
+ self.default_table(layout, table_content, cols_width)
+ self.writeln()
+
+ def default_table(self, layout, table_content, cols_width):
+ """format a table"""
+ cols_width = [size+1 for size in cols_width]
+ format_strings = ' '.join(['%%-%ss'] * len(cols_width))
+ format_strings = format_strings % tuple(cols_width)
+ format_strings = format_strings.split(' ')
+ table_linesep = '\n+' + '+'.join(['-'*w for w in cols_width]) + '+\n'
+ headsep = '\n+' + '+'.join(['='*w for w in cols_width]) + '+\n'
+ # FIXME: layout.cheaders
+ self.write(table_linesep)
+ for i in range(len(table_content)):
+ self.write('|')
+ line = table_content[i]
+ for j in range(len(line)):
+ self.write(format_strings[j] % line[j])
+ self.write('|')
+ if i == 0 and layout.rheaders:
+ self.write(headsep)
+ else:
+ self.write(table_linesep)
+
+ def field_table(self, layout, table_content, cols_width):
+ """special case for field table"""
+ assert layout.cols == 2
+ format_string = '%s%%-%ss: %%s' % (linesep, cols_width[0])
+ for field, value in table_content:
+ self.write(format_string % (field, value))
+
+
+ def visit_list(self, layout):
+ """display a list layout as text"""
+ bullet = BULLETS[self.list_level % len(BULLETS)]
+ indent = ' ' * self.list_level
+ self.list_level += 1
+ for child in layout.children:
+ self.write('%s%s%s ' % (linesep, indent, bullet))
+ child.accept(self)
+ self.list_level -= 1
+
+ def visit_link(self, layout):
+ """add a hyperlink"""
+ if layout.label != layout.url:
+ self.write('`%s`_' % layout.label)
+ self.pending_urls.append( (layout.label, layout.url) )
+ else:
+ self.write(layout.url)
+
+ def visit_verbatimtext(self, layout):
+ """display a verbatim layout as text (so difficult ;)
+ """
+ self.writeln('::\n')
+ for line in layout.data.splitlines():
+ self.writeln(' ' + line)
+ self.writeln()
+
+ def visit_text(self, layout):
+ """add some text"""
+ self.write(layout.data)
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/common/visitor.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/logilab/common/visitor.py
@@ -0,0 +1,107 @@
+# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of logilab-common.
+#
+# logilab-common is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option) any
+# later version.
+#
+# logilab-common is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with logilab-common. If not, see <http://www.gnu.org/licenses/>.
+"""A generic visitor abstract implementation.
+
+
+
+
+"""
+__docformat__ = "restructuredtext en"
+
+def no_filter(_):
+ return 1
+
+# Iterators ###################################################################
+class FilteredIterator(object):
+
+ def __init__(self, node, list_func, filter_func=None):
+ self._next = [(node, 0)]
+ if filter_func is None:
+ filter_func = no_filter
+ self._list = list_func(node, filter_func)
+
+ def next(self):
+ try:
+ return self._list.pop(0)
+ except :
+ return None
+
+# Base Visitor ################################################################
+class Visitor(object):
+
+ def __init__(self, iterator_class, filter_func=None):
+ self._iter_class = iterator_class
+ self.filter = filter_func
+
+ def visit(self, node, *args, **kargs):
+ """
+ launch the visit on a given node
+
+ call 'open_visit' before the beginning of the visit, with extra args
+ given
+ when all nodes have been visited, call the 'close_visit' method
+ """
+ self.open_visit(node, *args, **kargs)
+ return self.close_visit(self._visit(node))
+
+ def _visit(self, node):
+ iterator = self._get_iterator(node)
+ n = iterator.next()
+ while n:
+ result = n.accept(self)
+ n = iterator.next()
+ return result
+
+ def _get_iterator(self, node):
+ return self._iter_class(node, self.filter)
+
+ def open_visit(self, *args, **kargs):
+ """
+ method called at the beginning of the visit
+ """
+ pass
+
+ def close_visit(self, result):
+ """
+ method called at the end of the visit
+ """
+ return result
+
+# standard visited mixin ######################################################
+class VisitedMixIn(object):
+ """
+ Visited interface allow node visitors to use the node
+ """
+ def get_visit_name(self):
+ """
+ return the visit name for the mixed class. When calling 'accept', the
+ method <'visit_' + name returned by this method> will be called on the
+ visitor
+ """
+ try:
+ return self.TYPE.replace('-', '_')
+ except:
+ return self.__class__.__name__.lower()
+
+ def accept(self, visitor, *args, **kwargs):
+ func = getattr(visitor, 'visit_%s' % self.get_visit_name())
+ return func(self, *args, **kwargs)
+
+ def leave(self, visitor, *args, **kwargs):
+ func = getattr(visitor, 'leave_%s' % self.get_visit_name())
+ return func(self, *args, **kwargs)
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/reporters/__init__.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/reporters/__init__.py
@@ -0,0 +1,113 @@
+# Copyright (c) 2003-2010 Sylvain Thenault (thenault@gmail.com).
+# Copyright (c) 2003-2013 LOGILAB S.A. (Paris, FRANCE).
+# This program is free software; you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation; either version 2 of the License, or (at your option) any later
+# version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# this program; if not, write to the Free Software Foundation, Inc.,
+# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+"""utilities methods and classes for reporters"""
+
+import sys, locale
+
+CMPS = ['=', '-', '+']
+
+# py3k has no more cmp builtin
+if sys.version_info >= (3, 0):
+ def cmp(a, b):
+ return (a > b) - (a < b)
+
+def diff_string(old, new):
+ """given a old and new int value, return a string representing the
+ difference
+ """
+ diff = abs(old - new)
+ diff_str = "%s%s" % (CMPS[cmp(old, new)], diff and ('%.2f' % diff) or '')
+ return diff_str
+
+
+class EmptyReport(Exception):
+ """raised when a report is empty and so should not be displayed"""
+
+class BaseReporter:
+ """base class for reporters
+
+ symbols: show short symbolic names for messages.
+ """
+
+ extension = ''
+
+ def __init__(self, output=None):
+ self.linter = None
+ self.include_ids = None
+ self.symbols = None
+ self.section = 0
+ self.out = None
+ self.out_encoding = None
+ self.set_output(output)
+
+ def make_sigle(self, msg_id):
+ """generate a short prefix for a message.
+
+ The sigle can include the id, the symbol, or both, or it can just be
+ the message class.
+ """
+ if self.include_ids:
+ sigle = msg_id
+ else:
+ sigle = msg_id[0]
+ if self.symbols:
+ symbol = self.linter.check_message_id(msg_id).symbol
+ if symbol:
+ sigle += '(%s)' % symbol
+ return sigle
+
+ def set_output(self, output=None):
+ """set output stream"""
+ self.out = output or sys.stdout
+ # py3k streams handle their encoding :
+ if sys.version_info >= (3, 0):
+ self.encode = lambda x: x
+ return
+
+ def encode(string):
+ if not isinstance(string, unicode):
+ return string
+ encoding = (getattr(self.out, 'encoding', None) or
+ locale.getdefaultlocale()[1] or
+ sys.getdefaultencoding())
+ return string.encode(encoding)
+ self.encode = encode
+
+ def writeln(self, string=''):
+ """write a line in the output buffer"""
+ print >> self.out, self.encode(string)
+
+ def display_results(self, layout):
+ """display results encapsulated in the layout tree"""
+ self.section = 0
+ if self.include_ids and hasattr(layout, 'report_id'):
+ layout.children[0].children[0].data += ' (%s)' % layout.report_id
+ self._display(layout)
+
+ def _display(self, layout):
+ """display the layout"""
+ raise NotImplementedError()
+
+ # Event callbacks
+
+ def on_set_current_module(self, module, filepath):
+ """starting analyzis of a module"""
+ pass
+
+ def on_close(self, stats, previous_stats):
+ """global end of analyzis"""
+ pass
+
+
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/reporters/guireporter.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/reporters/guireporter.py
@@ -0,0 +1,32 @@
+""" reporter used by gui.py """
+
+import sys
+
+from ..interfaces import IReporter
+from . import BaseReporter
+from ..logilab.common.ureports import TextWriter
+
+
+class GUIReporter(BaseReporter):
+ """saves messages"""
+
+ __implements__ = IReporter
+ extension = ''
+
+ def __init__(self, gui, output=sys.stdout):
+ """init"""
+ BaseReporter.__init__(self, output)
+ self.msgs = []
+ self.gui = gui
+
+ def add_message(self, msg_id, location, msg):
+ """manage message of different type and in the context of path"""
+ module, obj, line, col_offset = location[1:]
+ sigle = self.make_sigle(msg_id)
+ full_msg = [sigle, module, obj, str(line), msg]
+ self.msgs += [[sigle, module, obj, str(line)]]
+ self.gui.msg_queue.put(full_msg)
+
+ def _display(self, layout):
+ """launch layouts display"""
+ TextWriter().format(layout, self.out)
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/reporters/html.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/reporters/html.py
@@ -0,0 +1,66 @@
+# Copyright (c) 2003-2006 Sylvain Thenault (thenault@gmail.com).
+# Copyright (c) 2003-2011 LOGILAB S.A. (Paris, FRANCE).
+# This program is free software; you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation; either version 2 of the License, or (at your option) any later
+# version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# this program; if not, write to the Free Software Foundation, Inc.,
+# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+"""HTML reporter"""
+
+import sys
+from cgi import escape
+
+from ..logilab.common.ureports import HTMLWriter, Section, Table
+
+from ..interfaces import IReporter
+from . import BaseReporter
+
+
+class HTMLReporter(BaseReporter):
+ """report messages and layouts in HTML"""
+
+ __implements__ = IReporter
+ extension = 'html'
+
+ def __init__(self, output=sys.stdout):
+ BaseReporter.__init__(self, output)
+ self.msgs = []
+
+ def add_message(self, msg_id, location, msg):
+ """manage message of different type and in the context of path"""
+ module, obj, line, col_offset = location[1:]
+ sigle = self.make_sigle(msg_id)
+ self.msgs += [sigle, module, obj, str(line), str(col_offset), escape(msg)]
+
+ def set_output(self, output=None):
+ """set output stream
+
+ messages buffered for old output is processed first"""
+ if self.out and self.msgs:
+ self._display(Section())
+ BaseReporter.set_output(self, output)
+
+ def _display(self, layout):
+ """launch layouts display
+
+ overridden from BaseReporter to add insert the messages section
+ (in add_message, message is not displayed, just collected so it
+ can be displayed in an html table)
+ """
+ if self.msgs:
+ # add stored messages to the layout
+ msgs = ['type', 'module', 'object', 'line', 'col_offset', 'message']
+ msgs += self.msgs
+ sect = Section('Messages')
+ layout.append(sect)
+ sect.append(Table(cols=6, children=msgs, rheaders=1))
+ self.msgs = []
+ HTMLWriter().format(layout, self.out)
+
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/reporters/text.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/reporters/text.py
@@ -0,0 +1,147 @@
+# Copyright (c) 2003-2007 Sylvain Thenault (thenault@gmail.com).
+# Copyright (c) 2003-2012 LOGILAB S.A. (Paris, FRANCE).
+# This program is free software; you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation; either version 2 of the License, or (at your option) any later
+# version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# this program; if not, write to the Free Software Foundation, Inc.,
+# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+"""Plain text reporters:
+
+:text: the default one grouping messages by module
+:parseable:
+ standard parseable output with full module path on each message (for
+ editor integration)
+:colorized: an ANSI colorized text reporter
+
+"""
+
+import os
+import sys
+
+from ..logilab.common.ureports import TextWriter
+from ..logilab.common.textutils import colorize_ansi
+
+from ..interfaces import IReporter
+from . import BaseReporter
+
+TITLE_UNDERLINES = ['', '=', '-', '.']
+
+
+class TextReporter(BaseReporter):
+ """reports messages and layouts in plain text
+ """
+
+ __implements__ = IReporter
+ extension = 'txt'
+
+ def __init__(self, output=None):
+ BaseReporter.__init__(self, output)
+ self._modules = {}
+
+ def add_message(self, msg_id, location, msg):
+ """manage message of different type and in the context of path"""
+ module, obj, line, col_offset = location[1:]
+ if module not in self._modules:
+ if module:
+ self.writeln('************* Module %s' % module)
+ self._modules[module] = 1
+ else:
+ self.writeln('************* %s' % module)
+ if obj:
+ obj = ':%s' % obj
+ sigle = self.make_sigle(msg_id)
+ self.writeln('%s:%3s,%s%s: %s' % (sigle, line, col_offset, obj, msg))
+
+ def _display(self, layout):
+ """launch layouts display"""
+ print >> self.out
+ TextWriter().format(layout, self.out)
+
+
+class ParseableTextReporter(TextReporter):
+ """a reporter very similar to TextReporter, but display messages in a form
+ recognized by most text editors :
+
+ <filename>:<linenum>:<msg>
+ """
+ line_format = '%(path)s:%(line)s: [%(sigle)s%(obj)s] %(msg)s'
+
+ def __init__(self, output=None, relative=True):
+ TextReporter.__init__(self, output)
+ if relative:
+ self._prefix = os.getcwd() + os.sep
+ else:
+ self._prefix = ''
+
+ def add_message(self, msg_id, location, msg):
+ """manage message of different type and in the context of path"""
+ path, _, obj, line, _ = location
+ if obj:
+ obj = ', %s' % obj
+ sigle = self.make_sigle(msg_id)
+ if self._prefix:
+ path = path.replace(self._prefix, '')
+ self.writeln(self.line_format % locals())
+
+
+class VSTextReporter(ParseableTextReporter):
+ """Visual studio text reporter"""
+ line_format = '%(path)s(%(line)s): [%(sigle)s%(obj)s] %(msg)s'
+
+class ColorizedTextReporter(TextReporter):
+ """Simple TextReporter that colorizes text output"""
+
+ COLOR_MAPPING = {
+ "I" : ("green", None),
+ 'C' : (None, "bold"),
+ 'R' : ("magenta", "bold, italic"),
+ 'W' : ("blue", None),
+ 'E' : ("red", "bold"),
+ 'F' : ("red", "bold, underline"),
+ 'S' : ("yellow", "inverse"), # S stands for module Separator
+ }
+
+ def __init__(self, output=None, color_mapping=None):
+ TextReporter.__init__(self, output)
+ self.color_mapping = color_mapping or \
+ dict(ColorizedTextReporter.COLOR_MAPPING)
+
+
+ def _get_decoration(self, msg_id):
+ """Returns the tuple color, style associated with msg_id as defined
+ in self.color_mapping
+ """
+ try:
+ return self.color_mapping[msg_id[0]]
+ except KeyError:
+ return None, None
+
+ def add_message(self, msg_id, location, msg):
+ """manage message of different types, and colorize output
+ using ansi escape codes
+ """
+ module, obj, line, _ = location[1:]
+ if module not in self._modules:
+ color, style = self._get_decoration('S')
+ if module:
+ modsep = colorize_ansi('************* Module %s' % module,
+ color, style)
+ else:
+ modsep = colorize_ansi('************* %s' % module,
+ color, style)
+ self.writeln(modsep)
+ self._modules[module] = 1
+ if obj:
+ obj = ':%s' % obj
+ sigle = self.make_sigle(msg_id)
+ color, style = self._get_decoration(sigle)
+ msg = colorize_ansi(msg, color, style)
+ sigle = colorize_ansi(sigle, color, style)
+ self.writeln('%s:%3s%s: %s' % (sigle, line, obj, msg))
diff --git a/.vim/bundle/python-mode/pylibs/pylama/pylint/utils.py b/.vim/bundle/python-mode/pylibs/pylama/pylint/utils.py
@@ -0,0 +1,602 @@
+# Copyright (c) 2003-2010 Sylvain Thenault (thenault@gmail.com).
+# Copyright (c) 2003-2012 LOGILAB S.A. (Paris, FRANCE).
+# http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This program is free software; you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation; either version 2 of the License, or (at your option) any later
+# version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details
+#
+# You should have received a copy of the GNU General Public License along with
+# this program; if not, write to the Free Software Foundation, Inc.,
+# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+"""some various utilities and helper classes, most of them used in the
+main pylint class
+"""
+
+import sys
+from warnings import warn
+from os.path import dirname, basename, splitext, exists, isdir, join, normpath
+
+from .logilab.common.modutils import modpath_from_file, get_module_files, \
+ file_from_modpath
+from .logilab.common.textutils import normalize_text
+from .logilab.common.configuration import rest_format_section
+from .logilab.common.ureports import Section
+
+from .logilab.astng import nodes, Module
+
+from .checkers import EmptyReport
+
+
+class UnknownMessage(Exception):
+ """raised when a unregistered message id is encountered"""
+
+
+MSG_TYPES = {
+ 'I' : 'info',
+ 'C' : 'convention',
+ 'R' : 'refactor',
+ 'W' : 'warning',
+ 'E' : 'error',
+ 'F' : 'fatal'
+ }
+MSG_TYPES_LONG = dict([(v, k) for k, v in MSG_TYPES.iteritems()])
+
+MSG_TYPES_STATUS = {
+ 'I' : 0,
+ 'C' : 16,
+ 'R' : 8,
+ 'W' : 4,
+ 'E' : 2,
+ 'F' : 1
+ }
+
+_MSG_ORDER = 'EWRCIF'
+MSG_STATE_SCOPE_CONFIG = 0
+MSG_STATE_SCOPE_MODULE = 1
+
+def sort_msgs(msgids):
+ """sort message identifiers according to their category first"""
+ msgs = {}
+ for msg in msgids:
+ msgs.setdefault(msg[0], []).append(msg)
+ result = []
+ for m_id in _MSG_ORDER:
+ if m_id in msgs:
+ result.extend( sorted(msgs[m_id]) )
+ return result
+
+def get_module_and_frameid(node):
+ """return the module name and the frame id in the module"""
+ frame = node.frame()
+ module, obj = '', []
+ while frame:
+ if isinstance(frame, Module):
+ module = frame.name
+ else:
+ obj.append(getattr(frame, 'name', '<lambda>'))
+ try:
+ frame = frame.parent.frame()
+ except AttributeError:
+ frame = None
+ obj.reverse()
+ return module, '.'.join(obj)
+
+def category_id(id):
+ id = id.upper()
+ if id in MSG_TYPES:
+ return id
+ return MSG_TYPES_LONG.get(id)
+
+
+class Message:
+ def __init__(self, checker, msgid, msg, descr, symbol):
+ assert len(msgid) == 5, 'Invalid message id %s' % msgid
+ assert msgid[0] in MSG_TYPES, \
+ 'Bad message type %s in %r' % (msgid[0], msgid)
+ self.msgid = msgid
+ self.msg = msg
+ self.descr = descr
+ self.checker = checker
+ self.symbol = symbol
+
+class MessagesHandlerMixIn:
+ """a mix-in class containing all the messages related methods for the main
+ lint class
+ """
+
+ def __init__(self):
+ # dictionary of registered messages
+ self._messages = {}
+ # dictionary from string symbolic id to Message object.
+ self._messages_by_symbol = {}
+ self._msgs_state = {}
+ self._module_msgs_state = {} # None
+ self._raw_module_msgs_state = {}
+ self._msgs_by_category = {}
+ self.msg_status = 0
+ self._ignored_msgs = {}
+
+ def register_messages(self, checker):
+ """register a dictionary of messages
+
+ Keys are message ids, values are a 2-uple with the message type and the
+ message itself
+
+ message ids should be a string of len 4, where the two first characters
+ are the checker id and the two last the message id in this checker
+ """
+ msgs_dict = checker.msgs
+ chkid = None
+ for msgid, msg_tuple in msgs_dict.iteritems():
+ if len(msg_tuple) == 3:
+ (msg, msgsymbol, msgdescr) = msg_tuple
+ assert msgsymbol not in self._messages_by_symbol, \
+ 'Message symbol %r is already defined' % msgsymbol
+ else:
+ # messages should have a symbol, but for backward compatibility
+ # they may not.
+ (msg, msgdescr) = msg_tuple
+ warn("[pylint 0.26] description of message %s doesn't include "
+ "a symbolic name" % msgid, DeprecationWarning)
+ msgsymbol = None
+ # avoid duplicate / malformed ids
+ assert msgid not in self._messages, \
+ 'Message id %r is already defined' % msgid
+ assert chkid is None or chkid == msgid[1:3], \
+ 'Inconsistent checker part in message id %r' % msgid
+ chkid = msgid[1:3]
+ msg = Message(checker, msgid, msg, msgdescr, msgsymbol)
+ self._messages[msgid] = msg
+ self._messages_by_symbol[msgsymbol] = msg
+ self._msgs_by_category.setdefault(msgid[0], []).append(msgid)
+
+ def get_message_help(self, msgid, checkerref=False):
+ """return the help string for the given message id"""
+ msg = self.check_message_id(msgid)
+ desc = normalize_text(' '.join(msg.descr.split()), indent=' ')
+ if checkerref:
+ desc += ' This message belongs to the %s checker.' % \
+ msg.checker.name
+ title = msg.msg
+ if msg.symbol:
+ symbol_part = ' (%s)' % msg.symbol
+ else:
+ symbol_part = ''
+ if title != '%s':
+ title = title.splitlines()[0]
+ return ':%s%s: *%s*\n%s' % (msg.msgid, symbol_part, title, desc)
+ return ':%s%s:\n%s' % (msg.msgid, symbol_part, desc)
+
+ def disable(self, msgid, scope='package', line=None):
+ """don't output message of the given id"""
+ assert scope in ('package', 'module')
+ # handle disable=all by disabling all categories
+ if msgid == 'all':
+ for msgid in MSG_TYPES:
+ self.disable(msgid, scope, line)
+ return
+ # msgid is a category?
+ catid = category_id(msgid)
+ if catid is not None:
+ for _msgid in self._msgs_by_category.get(catid):
+ self.disable(_msgid, scope, line)
+ return
+ # msgid is a checker name?
+ if msgid.lower() in self._checkers:
+ for checker in self._checkers[msgid.lower()]:
+ for _msgid in checker.msgs:
+ self.disable(_msgid, scope, line)
+ return
+ # msgid is report id?
+ if msgid.lower().startswith('rp'):
+ self.disable_report(msgid)
+ return
+ # msgid is a symbolic or numeric msgid.
+ msg = self.check_message_id(msgid)
+ if scope == 'module':
+ assert line > 0
+ try:
+ self._module_msgs_state[msg.msgid][line] = False
+ except KeyError:
+ self._module_msgs_state[msg.msgid] = {line: False}
+ if msgid != 'I0011':
+ self.add_message('I0011', line=line, args=msg.msgid)
+
+ else:
+ msgs = self._msgs_state
+ msgs[msg.msgid] = False
+ # sync configuration object
+ self.config.disable_msg = [mid for mid, val in msgs.iteritems()
+ if not val]
+
+ def enable(self, msgid, scope='package', line=None):
+ """reenable message of the given id"""
+ assert scope in ('package', 'module')
+ catid = category_id(msgid)
+ # msgid is a category?
+ if catid is not None:
+ for msgid in self._msgs_by_category.get(catid):
+ self.enable(msgid, scope, line)
+ return
+ # msgid is a checker name?
+ if msgid.lower() in self._checkers:
+ for checker in self._checkers[msgid.lower()]:
+ for msgid in checker.msgs:
+ self.enable(msgid, scope, line)
+ return
+ # msgid is report id?
+ if msgid.lower().startswith('rp'):
+ self.enable_report(msgid)
+ return
+ # msgid is a symbolic or numeric msgid.
+ msg = self.check_message_id(msgid)
+ if scope == 'module':
+ assert line > 0
+ try:
+ self._module_msgs_state[msg.msgid][line] = True
+ except KeyError:
+ self._module_msgs_state[msg.msgid] = {line: True}
+ self.add_message('I0012', line=line, args=msg.msgid)
+ else:
+ msgs = self._msgs_state
+ msgs[msg.msgid] = True
+ # sync configuration object
+ self.config.enable = [mid for mid, val in msgs.iteritems() if val]
+
+ def check_message_id(self, msgid):
+ """returns the Message object for this message.
+
+ msgid may be either a numeric or symbolic id.
+
+ Raises UnknownMessage if the message id is not defined.
+ """
+ if msgid in self._messages_by_symbol:
+ return self._messages_by_symbol[msgid]
+ msgid = msgid.upper()
+ try:
+ return self._messages[msgid]
+ except KeyError:
+ raise UnknownMessage('No such message id %s' % msgid)
+
+ def get_msg_display_string(self, msgid):
+ """Generates a user-consumable representation of a message.
+
+ Can be just the message ID or the ID and the symbol.
+ """
+ if self.config.symbols:
+ symbol = self.check_message_id(msgid).symbol
+ if symbol:
+ msgid += '(%s)' % symbol
+ return msgid
+
+ def get_message_state_scope(self, msgid, line=None):
+ """Returns the scope at which a message was enabled/disabled."""
+ try:
+ if line in self._module_msgs_state[msgid]:
+ return MSG_STATE_SCOPE_MODULE
+ except (KeyError, TypeError):
+ return MSG_STATE_SCOPE_CONFIG
+
+ def is_message_enabled(self, msgid, line=None):
+ """return true if the message associated to the given message id is
+ enabled
+
+ msgid may be either a numeric or symbolic message id.
+ """
+ if msgid in self._messages_by_symbol:
+ msgid = self._messages_by_symbol[msgid].msgid
+ if line is None:
+ return self._msgs_state.get(msgid, True)
+ try:
+ return self._module_msgs_state[msgid][line]
+ except (KeyError, TypeError):
+ return self._msgs_state.get(msgid, True)
+
+ def handle_ignored_message(self, state_scope, msgid, line, node, args):
+ """Report an ignored message.
+
+ state_scope is either MSG_STATE_SCOPE_MODULE or MSG_STATE_SCOPE_CONFIG,
+ depending on whether the message was disabled locally in the module,
+ or globally. The other arguments are the same as for add_message.
+ """
+ if state_scope == MSG_STATE_SCOPE_MODULE:
+ try:
+ orig_line = self._suppression_mapping[(msgid, line)]
+ self._ignored_msgs.setdefault((msgid, orig_line), set()).add(line)
+ except KeyError:
+ pass
+
+ def add_message(self, msgid, line=None, node=None, args=None):
+ """add the message corresponding to the given id.
+
+ If provided, msg is expanded using args
+
+ astng checkers should provide the node argument, raw checkers should
+ provide the line argument.
+ """
+ if line is None and node is not None:
+ line = node.fromlineno
+ if hasattr(node, 'col_offset'):
+ col_offset = node.col_offset # XXX measured in bytes for utf-8, divide by two for chars?
+ else:
+ col_offset = None
+ # should this message be displayed
+ if not self.is_message_enabled(msgid, line):
+ self.handle_ignored_message(
+ self.get_message_state_scope(msgid, line), msgid, line, node, args)
+ return
+ # update stats
+ msg_cat = MSG_TYPES[msgid[0]]
+ self.msg_status |= MSG_TYPES_STATUS[msgid[0]]
+ self.stats[msg_cat] += 1
+ self.stats['by_module'][self.current_name][msg_cat] += 1
+ try:
+ self.stats['by_msg'][msgid] += 1
+ except KeyError:
+ self.stats['by_msg'][msgid] = 1
+ msg = self._messages[msgid].msg
+ # expand message ?
+ if args:
+ msg %= args
+ # get module and object
+ if node is None:
+ module, obj = self.current_name, ''
+ path = self.current_file
+ else:
+ module, obj = get_module_and_frameid(node)
+ path = node.root().file
+ # add the message
+ self.reporter.add_message(msgid, (path, module, obj, line or 1, col_offset or 0), msg)
+
+ def help_message(self, msgids):
+ """display help messages for the given message identifiers"""
+ for msgid in msgids:
+ try:
+ print self.get_message_help(msgid, True)
+ print
+ except UnknownMessage, ex:
+ print ex
+ print
+ continue
+
+ def print_full_documentation(self):
+ """output a full documentation in ReST format"""
+ by_checker = {}
+ for checker in self.get_checkers():
+ if checker.name == 'master':
+ prefix = 'Main '
+ print "Options"
+ print '-------\n'
+ if checker.options:
+ for section, options in checker.options_by_section():
+ if section is None:
+ title = 'General options'
+ else:
+ title = '%s options' % section.capitalize()
+ print title
+ print '~' * len(title)
+ rest_format_section(sys.stdout, None, options)
+ print
+ else:
+ try:
+ by_checker[checker.name][0] += checker.options_and_values()
+ by_checker[checker.name][1].update(checker.msgs)
+ by_checker[checker.name][2] += checker.reports
+ except KeyError:
+ by_checker[checker.name] = [list(checker.options_and_values()),
+ dict(checker.msgs),
+ list(checker.reports)]
+ for checker, (options, msgs, reports) in by_checker.iteritems():
+ prefix = ''
+ title = '%s checker' % checker
+ print title
+ print '-' * len(title)
+ print
+ if options:
+ title = 'Options'
+ print title
+ print '~' * len(title)
+ rest_format_section(sys.stdout, None, options)
+ print
+ if msgs:
+ title = ('%smessages' % prefix).capitalize()
+ print title
+ print '~' * len(title)
+ for msgid in sort_msgs(msgs.iterkeys()):
+ print self.get_message_help(msgid, False)
+ print
+ if reports:
+ title = ('%sreports' % prefix).capitalize()
+ print title
+ print '~' * len(title)
+ for report in reports:
+ print ':%s: %s' % report[:2]
+ print
+ print
+
+ def list_messages(self):
+ """output full messages list documentation in ReST format"""
+ msgids = []
+ for checker in self.get_checkers():
+ for msgid in checker.msgs.iterkeys():
+ msgids.append(msgid)
+ msgids.sort()
+ for msgid in msgids:
+ print self.get_message_help(msgid, False)
+ print
+
+
+class ReportsHandlerMixIn:
+ """a mix-in class containing all the reports and stats manipulation
+ related methods for the main lint class
+ """
+ def __init__(self):
+ self._reports = {}
+ self._reports_state = {}
+
+ def register_report(self, reportid, r_title, r_cb, checker):
+ """register a report
+
+ reportid is the unique identifier for the report
+ r_title the report's title
+ r_cb the method to call to make the report
+ checker is the checker defining the report
+ """
+ reportid = reportid.upper()
+ self._reports.setdefault(checker, []).append( (reportid, r_title, r_cb) )
+
+ def enable_report(self, reportid):
+ """disable the report of the given id"""
+ reportid = reportid.upper()
+ self._reports_state[reportid] = True
+
+ def disable_report(self, reportid):
+ """disable the report of the given id"""
+ reportid = reportid.upper()
+ self._reports_state[reportid] = False
+
+ def report_is_enabled(self, reportid):
+ """return true if the report associated to the given identifier is
+ enabled
+ """
+ return self._reports_state.get(reportid, True)
+
+ def make_reports(self, stats, old_stats):
+ """render registered reports"""
+ sect = Section('Report',
+ '%s statements analysed.'% (self.stats['statement']))
+ for checker in self._reports:
+ for reportid, r_title, r_cb in self._reports[checker]:
+ if not self.report_is_enabled(reportid):
+ continue
+ report_sect = Section(r_title)
+ try:
+ r_cb(report_sect, stats, old_stats)
+ except EmptyReport:
+ continue
+ report_sect.report_id = reportid
+ sect.append(report_sect)
+ return sect
+
+ def add_stats(self, **kwargs):
+ """add some stats entries to the statistic dictionary
+ raise an AssertionError if there is a key conflict
+ """
+ for key, value in kwargs.iteritems():
+ if key[-1] == '_':
+ key = key[:-1]
+ assert key not in self.stats
+ self.stats[key] = value
+ return self.stats
+
+
+def expand_modules(files_or_modules, black_list):
+ """take a list of files/modules/packages and return the list of tuple
+ (file, module name) which have to be actually checked
+ """
+ result = []
+ errors = []
+ for something in files_or_modules:
+ if exists(something):
+ # this is a file or a directory
+ try:
+ modname = '.'.join(modpath_from_file(something))
+ except ImportError:
+ modname = splitext(basename(something))[0]
+ if isdir(something):
+ filepath = join(something, '__init__.py')
+ else:
+ filepath = something
+ else:
+ # suppose it's a module or package
+ modname = something
+ try:
+ filepath = file_from_modpath(modname.split('.'))
+ if filepath is None:
+ errors.append( {'key' : 'F0003', 'mod': modname} )
+ continue
+ except (ImportError, SyntaxError), ex:
+ # FIXME p3k : the SyntaxError is a Python bug and should be
+ # removed as soon as possible http://bugs.python.org/issue10588
+ errors.append( {'key': 'F0001', 'mod': modname, 'ex': ex} )
+ continue
+ filepath = normpath(filepath)
+ result.append( {'path': filepath, 'name': modname,
+ 'basepath': filepath, 'basename': modname} )
+ if not (modname.endswith('.__init__') or modname == '__init__') \
+ and '__init__.py' in filepath:
+ for subfilepath in get_module_files(dirname(filepath), black_list):
+ if filepath == subfilepath:
+ continue
+ submodname = '.'.join(modpath_from_file(subfilepath))
+ result.append( {'path': subfilepath, 'name': submodname,
+ 'basepath': filepath, 'basename': modname} )
+ return result, errors
+
+
+class PyLintASTWalker(object):
+
+ def __init__(self, linter):
+ # callbacks per node types
+ self.nbstatements = 1
+ self.visit_events = {}
+ self.leave_events = {}
+ self.linter = linter
+
+ def add_checker(self, checker):
+ """walk to the checker's dir and collect visit and leave methods"""
+ # XXX : should be possible to merge needed_checkers and add_checker
+ vcids = set()
+ lcids = set()
+ visits = self.visit_events
+ leaves = self.leave_events
+ msgs = self.linter._msgs_state
+ for member in dir(checker):
+ cid = member[6:]
+ if cid == 'default':
+ continue
+ if member.startswith('visit_'):
+ v_meth = getattr(checker, member)
+ # don't use visit_methods with no activated message:
+ if hasattr(v_meth, 'checks_msgs'):
+ if not any(msgs.get(m, True) for m in v_meth.checks_msgs):
+ continue
+ visits.setdefault(cid, []).append(v_meth)
+ vcids.add(cid)
+ elif member.startswith('leave_'):
+ l_meth = getattr(checker, member)
+ # don't use leave_methods with no activated message:
+ if hasattr(l_meth, 'checks_msgs'):
+ if not any(msgs.get(m, True) for m in l_meth.checks_msgs):
+ continue
+ leaves.setdefault(cid, []).append(l_meth)
+ lcids.add(cid)
+ visit_default = getattr(checker, 'visit_default', None)
+ if visit_default:
+ for cls in nodes.ALL_NODE_CLASSES:
+ cid = cls.__name__.lower()
+ if cid not in vcids:
+ visits.setdefault(cid, []).append(visit_default)
+ # for now we have no "leave_default" method in Pylint
+
+ def walk(self, astng):
+ """call visit events of astng checkers for the given node, recurse on
+ its children, then leave events.
+ """
+ cid = astng.__class__.__name__.lower()
+ if astng.is_statement:
+ self.nbstatements += 1
+ # generate events for this node on each checker
+ for cb in self.visit_events.get(cid, ()):
+ cb(astng)
+ # recurse on children
+ for child in astng.get_children():
+ self.walk(child)
+ for cb in self.leave_events.get(cid, ()):
+ cb(astng)
diff --git a/.vim/bundle/python-mode/pylibs/pylama/utils.py b/.vim/bundle/python-mode/pylibs/pylama/utils.py
@@ -0,0 +1,110 @@
+import _ast
+from os import path as op, environ
+
+from .mccabe import get_code_complexity
+from .pep8 import BaseReport, StyleGuide
+from .pyflakes import checker
+
+
+__all__ = 'pep8', 'mccabe', 'pyflakes', 'pylint'
+
+PYLINT_RC = op.abspath(op.join(op.dirname(__file__), 'pylint.rc'))
+
+
+class PEP8Report(BaseReport):
+
+ def __init__(self, *args, **kwargs):
+ super(PEP8Report, self).__init__(*args, **kwargs)
+ self.errors = []
+
+ def init_file(self, filename, lines, expected, line_offset):
+ super(PEP8Report, self).init_file(
+ filename, lines, expected, line_offset)
+ self.errors = []
+
+ def error(self, line_number, offset, text, check):
+ code = super(PEP8Report, self).error(
+ line_number, offset, text, check)
+
+ self.errors.append(dict(
+ text=text,
+ type=code,
+ col=offset + 1,
+ lnum=line_number,
+ ))
+
+ def get_file_results(self):
+ return self.errors
+
+P8Style = StyleGuide(reporter=PEP8Report)
+
+
+def pep8(path, **meta):
+ " PEP8 code checking. "
+
+ return P8Style.input_file(path)
+
+
+def mccabe(path, code=None, complexity=8, **meta):
+ " MCCabe code checking. "
+
+ return get_code_complexity(code, complexity, filename=path)
+
+
+def pyflakes(path, code=None, **meta):
+ " PyFlakes code checking. "
+
+ errors = []
+ tree = compile(code, path, "exec", _ast.PyCF_ONLY_AST)
+ w = checker.Checker(tree, path)
+ w.messages = sorted(w.messages, key=lambda m: m.lineno)
+ for w in w.messages:
+ errors.append(dict(
+ lnum=w.lineno,
+ text=w.message % w.message_args,
+ ))
+ return errors
+
+
+def pylint(path, **meta):
+ from sys import version_info
+ if version_info > (2, 8):
+ import logging
+ logging.warn("Pylint don't supported python3 and will be disabled.")
+ return []
+
+ from .pylint.lint import Run
+ from .pylint.reporters import BaseReporter
+
+ from .pylint.logilab.astng.builder import MANAGER
+ MANAGER.astng_cache.clear()
+
+ class Reporter(BaseReporter):
+
+ def __init__(self):
+ self.errors = []
+ BaseReporter.__init__(self)
+
+ def _display(self, layout):
+ pass
+
+ def add_message(self, msg_id, location, msg):
+ _, _, line, col = location[1:]
+ self.errors.append(dict(
+ lnum=line,
+ col=col,
+ text="%s %s" % (msg_id, msg),
+ type=msg_id[0]
+ ))
+
+ pylintrc = op.join(environ.get('HOME', ''), '.pylintrc')
+ defattrs = '-r n'
+ if not op.exists(pylintrc):
+ defattrs += ' --rcfile={0}'.format(PYLINT_RC)
+ attrs = meta.get('pylint', defattrs.split())
+
+ runner = Run(
+ [path] + attrs, reporter=Reporter(), exit=False)
+ return runner.linter.reporter.errors
+
+# pymode:lint_ignore=W0231
diff --git a/.vim/bundle/python-mode/pylibs/pymode/__init__.py b/.vim/bundle/python-mode/pylibs/pymode/__init__.py
diff --git a/.vim/bundle/python-mode/pylibs/pymode/auto.py b/.vim/bundle/python-mode/pylibs/pymode/auto.py
@@ -0,0 +1,18 @@
+import vim
+from autopep8 import fix_file
+
+
+class Options():
+ aggressive = 0
+ diff = False
+ ignore = ''
+ in_place = True
+ max_line_length = 79
+ pep8_passes = 100
+ recursive = False
+ select = ''
+ verbose = 0
+
+
+def fix_current_file():
+ fix_file(vim.current.buffer.name, Options)
diff --git a/.vim/bundle/python-mode/pylibs/pymode/interface.py b/.vim/bundle/python-mode/pylibs/pymode/interface.py
@@ -0,0 +1,25 @@
+import vim
+
+
+def get_option(name):
+ return get_bvar(name) or get_var(name)
+
+
+def get_var(name):
+ return vim.eval("g:pymode_%s" % name)
+
+
+def get_bvar(name):
+ return (int(vim.eval("exists('b:pymode_%s')" % name)) and vim.eval("b:pymode_%s" % name)) or None
+
+
+def get_current_buffer():
+ return vim.current.buffer
+
+
+def show_message(message):
+ vim.command("call pymode#WideMessage('%s')" % message)
+
+
+def command(cmd):
+ vim.command(cmd)
diff --git a/.vim/bundle/python-mode/pylibs/pymode/lint.py b/.vim/bundle/python-mode/pylibs/pymode/lint.py
@@ -0,0 +1,64 @@
+import locale
+
+from pylama.main import run
+
+from .interface import get_option, get_var, get_current_buffer, command
+from .queue import add_task
+
+
+try:
+ locale.setlocale(locale.LC_CTYPE, "C")
+except AttributeError:
+ pass
+
+
+def check_file():
+ checkers = get_option('lint_checker').split(',')
+
+ ignore = set([
+ i for i in (
+ get_option('lint_ignore').split(',') +
+ get_var('lint_ignore').split(','))
+ if i
+ ])
+ select = set([
+ s for s in (
+ get_option('lint_select').split(',') +
+ get_var('lint_select').split(','))
+ if i
+ ])
+
+ buffer = get_current_buffer()
+ complexity = int(get_option('lint_mccabe_complexity') or 0)
+
+ add_task(run_checkers, checkers=checkers, ignore=ignore,
+ title='Code checking',
+ callback=parse_result,
+ buffer=buffer,
+ select=select,
+ complexity=complexity)
+
+
+def run_checkers(task=None, checkers=None, ignore=None,
+ buffer=None, select=None, complexity=None):
+
+ buffer = (task and task.buffer) or buffer
+ filename = buffer.name
+ result = []
+
+ pylint_options = '--rcfile={0} -r n'.format(get_var('lint_config')).split()
+
+ result = run(filename, ignore=ignore, select=select, linters=checkers,
+ pylint=pylint_options, complexity=complexity)
+
+ if task:
+ task.result = result
+ task.finished = True
+ task.done = 100
+
+
+def parse_result(result, bnum):
+ command(('let g:qf_list = {0}'.format(repr(result)).replace('\': u', '\': ')))
+ command('call pymode#lint#Parse({0})'.format(bnum))
+
+# pymode:lint_ignore=W0622
diff --git a/.vim/bundle/python-mode/pylibs/pymode/queue.py b/.vim/bundle/python-mode/pylibs/pymode/queue.py
@@ -0,0 +1,60 @@
+import threading
+from .interface import show_message
+import time
+
+
+class Task(threading.Thread):
+
+ def __init__(self, buffer, callback=None, title=None, *args, **kwargs):
+ self.buffer = buffer
+ self._stop = threading.Event()
+ self.result = None
+ self.callback = callback
+ self.done = 0
+ self.finished = False
+ self.title = title
+ threading.Thread.__init__(self, *args, **kwargs)
+
+ def run(self):
+ " Run tasks. "
+ self._Thread__target(task=self, *self._Thread__args, **self._Thread__kwargs)
+
+ # Wait for result parsing
+ while not self.stopped():
+ time.sleep(.2)
+
+ def stop(self):
+ " Stop task. "
+ self._stop.set()
+
+ def stopped(self):
+ return self._stop.isSet()
+
+
+def stop_queue():
+ " Stop all tasks. "
+ for thread in threading.enumerate():
+ if isinstance(thread, Task):
+ thread.stop()
+ show_message('%s stopped.' % thread.title)
+
+
+def add_task(target, callback=None, buffer=None, title=None, *args, **kwargs):
+ " Add all tasks. "
+
+ task = Task(buffer, title=title, target=target, callback=callback, args=args, kwargs=kwargs)
+ task.daemon = True
+ task.start()
+
+ show_message('%s started.' % task.title)
+
+
+def check_task():
+ " Check tasks for result. "
+ for thread in threading.enumerate():
+ if isinstance(thread, Task):
+ if thread.finished:
+ thread.stop()
+ thread.callback(thread.result, thread.buffer.number)
+ else:
+ show_message('%s %s%%' % (thread.title, thread.done))
diff --git a/.vim/bundle/python-mode/pylibs/rope/__init__.py b/.vim/bundle/python-mode/pylibs/rope/__init__.py
@@ -0,0 +1,17 @@
+"""rope, a python refactoring library"""
+
+INFO = __doc__
+VERSION = '0.9.4'
+COPYRIGHT = """\
+Copyright (C) 2006-2012 Ali Gholami Rudi
+Copyright (C) 2009-2012 Anton Gritsay
+
+This program is free software; you can redistribute it and/or modify it
+under the terms of GNU General Public License as published by the
+Free Software Foundation; either version 2 of the license, or (at your
+opinion) any later version.
+
+This program is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+GNU General Public License for more details."""
diff --git a/.vim/bundle/python-mode/pylibs/rope/base/__init__.py b/.vim/bundle/python-mode/pylibs/rope/base/__init__.py
@@ -0,0 +1,8 @@
+"""Base rope package
+
+This package contains rope core modules that are used by other modules
+and packages.
+
+"""
+
+__all__ = ['project', 'libutils', 'exceptions']
diff --git a/.vim/bundle/python-mode/pylibs/rope/base/arguments.py b/.vim/bundle/python-mode/pylibs/rope/base/arguments.py
@@ -0,0 +1,109 @@
+import rope.base.evaluate
+from rope.base import ast
+
+
+class Arguments(object):
+ """A class for evaluating parameters passed to a function
+
+ You can use the `create_arguments` factory. It handles implicit
+ first arguments.
+
+ """
+
+ def __init__(self, args, scope):
+ self.args = args
+ self.scope = scope
+ self.instance = None
+
+ def get_arguments(self, parameters):
+ result = []
+ for pyname in self.get_pynames(parameters):
+ if pyname is None:
+ result.append(None)
+ else:
+ result.append(pyname.get_object())
+ return result
+
+ def get_pynames(self, parameters):
+ result = [None] * max(len(parameters), len(self.args))
+ for index, arg in enumerate(self.args):
+ if isinstance(arg, ast.keyword) and arg.arg in parameters:
+ result[parameters.index(arg.arg)] = self._evaluate(arg.value)
+ else:
+ result[index] = self._evaluate(arg)
+ return result
+
+ def get_instance_pyname(self):
+ if self.args:
+ return self._evaluate(self.args[0])
+
+ def _evaluate(self, ast_node):
+ return rope.base.evaluate.eval_node(self.scope, ast_node)
+
+
+def create_arguments(primary, pyfunction, call_node, scope):
+ """A factory for creating `Arguments`"""
+ args = list(call_node.args)
+ args.extend(call_node.keywords)
+ called = call_node.func
+ # XXX: Handle constructors
+ if _is_method_call(primary, pyfunction) and \
+ isinstance(called, ast.Attribute):
+ args.insert(0, called.value)
+ return Arguments(args, scope)
+
+
+class ObjectArguments(object):
+
+ def __init__(self, pynames):
+ self.pynames = pynames
+
+ def get_arguments(self, parameters):
+ result = []
+ for pyname in self.pynames:
+ if pyname is None:
+ result.append(None)
+ else:
+ result.append(pyname.get_object())
+ return result
+
+ def get_pynames(self, parameters):
+ return self.pynames
+
+ def get_instance_pyname(self):
+ return self.pynames[0]
+class MixedArguments(object):
+
+ def __init__(self, pyname, arguments, scope):
+ """`argumens` is an instance of `Arguments`"""
+ self.pyname = pyname
+ self.args = arguments
+
+ def get_pynames(self, parameters):
+ return [self.pyname] + self.args.get_pynames(parameters[1:])
+
+ def get_arguments(self, parameters):
+ result = []
+ for pyname in self.get_pynames(parameters):
+ if pyname is None:
+ result.append(None)
+ else:
+ result.append(pyname.get_object())
+ return result
+
+ def get_instance_pyname(self):
+ return self.pyname
+
+
+def _is_method_call(primary, pyfunction):
+ if primary is None:
+ return False
+ pyobject = primary.get_object()
+ if isinstance(pyobject.get_type(), rope.base.pyobjects.PyClass) and \
+ isinstance(pyfunction, rope.base.pyobjects.PyFunction) and \
+ isinstance(pyfunction.parent, rope.base.pyobjects.PyClass):
+ return True
+ if isinstance(pyobject.get_type(), rope.base.pyobjects.AbstractClass) and \
+ isinstance(pyfunction, rope.base.builtins.BuiltinFunction):
+ return True
+ return False
diff --git a/.vim/bundle/python-mode/pylibs/rope/base/ast.py b/.vim/bundle/python-mode/pylibs/rope/base/ast.py
@@ -0,0 +1,71 @@
+import _ast
+from _ast import *
+
+from rope.base import fscommands
+
+
+def parse(source, filename='<string>'):
+ # NOTE: the raw string should be given to `compile` function
+ if isinstance(source, unicode):
+ source = fscommands.unicode_to_file_data(source)
+ if '\r' in source:
+ source = source.replace('\r\n', '\n').replace('\r', '\n')
+ if not source.endswith('\n'):
+ source += '\n'
+ try:
+ return compile(source, filename, 'exec', _ast.PyCF_ONLY_AST)
+ except (TypeError, ValueError), e:
+ error = SyntaxError()
+ error.lineno = 1
+ error.filename = filename
+ error.msg = str(e)
+ raise error
+
+
+def walk(node, walker):
+ """Walk the syntax tree"""
+ method_name = '_' + node.__class__.__name__
+ method = getattr(walker, method_name, None)
+ if method is not None:
+ if isinstance(node, _ast.ImportFrom) and node.module is None:
+ # In python < 2.7 ``node.module == ''`` for relative imports
+ # but for python 2.7 it is None. Generalizing it to ''.
+ node.module = ''
+ return method(node)
+ for child in get_child_nodes(node):
+ walk(child, walker)
+
+
+def get_child_nodes(node):
+ if isinstance(node, _ast.Module):
+ return node.body
+ result = []
+ if node._fields is not None:
+ for name in node._fields:
+ child = getattr(node, name)
+ if isinstance(child, list):
+ for entry in child:
+ if isinstance(entry, _ast.AST):
+ result.append(entry)
+ if isinstance(child, _ast.AST):
+ result.append(child)
+ return result
+
+
+def call_for_nodes(node, callback, recursive=False):
+ """If callback returns `True` the child nodes are skipped"""
+ result = callback(node)
+ if recursive and not result:
+ for child in get_child_nodes(node):
+ call_for_nodes(child, callback, recursive)
+
+
+def get_children(node):
+ result = []
+ if node._fields is not None:
+ for name in node._fields:
+ if name in ['lineno', 'col_offset']:
+ continue
+ child = getattr(node, name)
+ result.append(child)
+ return result
diff --git a/.vim/bundle/python-mode/pylibs/rope/base/astutils.py b/.vim/bundle/python-mode/pylibs/rope/base/astutils.py
@@ -0,0 +1,61 @@
+from rope.base import ast
+
+
+def get_name_levels(node):
+ """Return a list of ``(name, level)`` tuples for assigned names
+
+ The `level` is `None` for simple assignments and is a list of
+ numbers for tuple assignments for example in::
+
+ a, (b, c) = x
+
+ The levels for for `a` is ``[0]``, for `b` is ``[1, 0]`` and for
+ `c` is ``[1, 1]``.
+
+ """
+ visitor = _NodeNameCollector()
+ ast.walk(node, visitor)
+ return visitor.names
+
+
+class _NodeNameCollector(object):
+
+ def __init__(self, levels=None):
+ self.names = []
+ self.levels = levels
+ self.index = 0
+
+ def _add_node(self, node):
+ new_levels = []
+ if self.levels is not None:
+ new_levels = list(self.levels)
+ new_levels.append(self.index)
+ self.index += 1
+ self._added(node, new_levels)
+
+ def _added(self, node, levels):
+ if hasattr(node, 'id'):
+ self.names.append((node.id, levels))
+
+ def _Name(self, node):
+ self._add_node(node)
+
+ def _Tuple(self, node):
+ new_levels = []
+ if self.levels is not None:
+ new_levels = list(self.levels)
+ new_levels.append(self.index)
+ self.index += 1
+ visitor = _NodeNameCollector(new_levels)
+ for child in ast.get_child_nodes(node):
+ ast.walk(child, visitor)
+ self.names.extend(visitor.names)
+
+ def _Subscript(self, node):
+ self._add_node(node)
+
+ def _Attribute(self, node):
+ self._add_node(node)
+
+ def _Slice(self, node):
+ self._add_node(node)
diff --git a/.vim/bundle/python-mode/pylibs/rope/base/builtins.py b/.vim/bundle/python-mode/pylibs/rope/base/builtins.py
@@ -0,0 +1,767 @@
+"""This module trys to support builtin types and functions."""
+import inspect
+
+import rope.base.evaluate
+from rope.base import pynames, pyobjects, arguments, utils, ast
+
+
+class BuiltinModule(pyobjects.AbstractModule):
+
+ def __init__(self, name, pycore=None, initial={}):
+ super(BuiltinModule, self).__init__()
+ self.name = name
+ self.pycore = pycore
+ self.initial = initial
+
+ parent = None
+
+ def get_attributes(self):
+ return self.attributes
+
+ def get_doc(self):
+ if self.module:
+ return self.module.__doc__
+
+ def get_name(self):
+ return self.name.split('.')[-1]
+
+ @property
+ @utils.saveit
+ def attributes(self):
+ result = _object_attributes(self.module, self)
+ result.update(self.initial)
+ if self.pycore is not None:
+ submodules = self.pycore._builtin_submodules(self.name)
+ for name, module in submodules.iteritems():
+ result[name] = rope.base.builtins.BuiltinName(module)
+ return result
+
+ @property
+ @utils.saveit
+ def module(self):
+ try:
+ result = __import__(self.name)
+ for token in self.name.split('.')[1:]:
+ result = getattr(result, token, None)
+ return result
+ except ImportError:
+ return
+
+
+class _BuiltinElement(object):
+
+ def __init__(self, builtin, parent=None):
+ self.builtin = builtin
+ self._parent = parent
+
+ def get_doc(self):
+ if self.builtin:
+ return getattr(self.builtin, '__doc__', None)
+
+ def get_name(self):
+ if self.builtin:
+ return getattr(self.builtin, '__name__', None)
+
+ @property
+ def parent(self):
+ if self._parent is None:
+ return builtins
+ return self._parent
+
+
+class BuiltinClass(_BuiltinElement, pyobjects.AbstractClass):
+
+ def __init__(self, builtin, attributes, parent=None):
+ _BuiltinElement.__init__(self, builtin, parent)
+ pyobjects.AbstractClass.__init__(self)
+ self.initial = attributes
+
+ @utils.saveit
+ def get_attributes(self):
+ result = _object_attributes(self.builtin, self)
+ result.update(self.initial)
+ return result
+
+
+class BuiltinFunction(_BuiltinElement, pyobjects.AbstractFunction):
+
+ def __init__(self, returned=None, function=None, builtin=None,
+ argnames=[], parent=None):
+ _BuiltinElement.__init__(self, builtin, parent)
+ pyobjects.AbstractFunction.__init__(self)
+ self.argnames = argnames
+ self.returned = returned
+ self.function = function
+
+ def get_returned_object(self, args):
+ if self.function is not None:
+ return self.function(_CallContext(self.argnames, args))
+ else:
+ return self.returned
+
+ def get_param_names(self, special_args=True):
+ return self.argnames
+
+
+class BuiltinUnknown(_BuiltinElement, pyobjects.PyObject):
+
+ def __init__(self, builtin):
+ super(BuiltinUnknown, self).__init__(pyobjects.get_unknown())
+ self.builtin = builtin
+ self.type = pyobjects.get_unknown()
+
+ def get_name(self):
+ return getattr(type(self.builtin), '__name__', None)
+
+ @utils.saveit
+ def get_attributes(self):
+ return _object_attributes(self.builtin, self)
+
+
+def _object_attributes(obj, parent):
+ attributes = {}
+ for name in dir(obj):
+ if name == 'None':
+ continue
+ try:
+ child = getattr(obj, name)
+ except AttributeError:
+ # descriptors are allowed to raise AttributeError
+ # even if they are in dir()
+ continue
+ pyobject = None
+ if inspect.isclass(child):
+ pyobject = BuiltinClass(child, {}, parent=parent)
+ elif inspect.isroutine(child):
+ pyobject = BuiltinFunction(builtin=child, parent=parent)
+ else:
+ pyobject = BuiltinUnknown(builtin=child)
+ attributes[name] = BuiltinName(pyobject)
+ return attributes
+
+
+def _create_builtin_type_getter(cls):
+ def _get_builtin(*args):
+ if not hasattr(cls, '_generated'):
+ cls._generated = {}
+ if args not in cls._generated:
+ cls._generated[args] = cls(*args)
+ return cls._generated[args]
+ return _get_builtin
+
+def _create_builtin_getter(cls):
+ type_getter = _create_builtin_type_getter(cls)
+ def _get_builtin(*args):
+ return pyobjects.PyObject(type_getter(*args))
+ return _get_builtin
+
+
+class _CallContext(object):
+
+ def __init__(self, argnames, args):
+ self.argnames = argnames
+ self.args = args
+
+ def _get_scope_and_pyname(self, pyname):
+ if pyname is not None and isinstance(pyname, pynames.AssignedName):
+ pymodule, lineno = pyname.get_definition_location()
+ if pymodule is None:
+ return None, None
+ if lineno is None:
+ lineno = 1
+ scope = pymodule.get_scope().get_inner_scope_for_line(lineno)
+ name = None
+ while name is None and scope is not None:
+ for current in scope.get_names():
+ if scope[current] is pyname:
+ name = current
+ break
+ else:
+ scope = scope.parent
+ return scope, name
+ return None, None
+
+ def get_argument(self, name):
+ if self.args:
+ args = self.args.get_arguments(self.argnames)
+ return args[self.argnames.index(name)]
+
+ def get_pyname(self, name):
+ if self.args:
+ args = self.args.get_pynames(self.argnames)
+ if name in self.argnames:
+ return args[self.argnames.index(name)]
+
+ def get_arguments(self, argnames):
+ if self.args:
+ return self.args.get_arguments(argnames)
+
+ def get_pynames(self, argnames):
+ if self.args:
+ return self.args.get_pynames(argnames)
+
+ def get_per_name(self):
+ if self.args is None:
+ return None
+ pyname = self.args.get_instance_pyname()
+ scope, name = self._get_scope_and_pyname(pyname)
+ if name is not None:
+ pymodule = pyname.get_definition_location()[0]
+ return pymodule.pycore.object_info.get_per_name(scope, name)
+ return None
+
+ def save_per_name(self, value):
+ if self.args is None:
+ return None
+ pyname = self.args.get_instance_pyname()
+ scope, name = self._get_scope_and_pyname(pyname)
+ if name is not None:
+ pymodule = pyname.get_definition_location()[0]
+ pymodule.pycore.object_info.save_per_name(scope, name, value)
+
+
+class _AttributeCollector(object):
+
+ def __init__(self, type):
+ self.attributes = {}
+ self.type = type
+
+ def __call__(self, name, returned=None, function=None,
+ argnames=['self'], check_existence=True):
+ try:
+ builtin = getattr(self.type, name)
+ except AttributeError:
+ if check_existence:
+ raise
+ builtin=None
+ self.attributes[name] = BuiltinName(
+ BuiltinFunction(returned=returned, function=function,
+ argnames=argnames, builtin=builtin))
+
+ def __setitem__(self, name, value):
+ self.attributes[name] = value
+
+
+class List(BuiltinClass):
+
+ def __init__(self, holding=None):
+ self.holding = holding
+ collector = _AttributeCollector(list)
+
+ collector('__iter__', function=self._iterator_get)
+ collector('__new__', function=self._new_list)
+
+ # Adding methods
+ collector('append', function=self._list_add, argnames=['self', 'value'])
+ collector('__setitem__', function=self._list_add,
+ argnames=['self', 'index', 'value'])
+ collector('insert', function=self._list_add,
+ argnames=['self', 'index', 'value'])
+ collector('extend', function=self._self_set,
+ argnames=['self', 'iterable'])
+
+ # Getting methods
+ collector('__getitem__', function=self._list_get)
+ collector('pop', function=self._list_get)
+ collector('__getslice__', function=self._self_get)
+
+ super(List, self).__init__(list, collector.attributes)
+
+ def _new_list(self, args):
+ return _create_builtin(args, get_list)
+
+ def _list_add(self, context):
+ if self.holding is not None:
+ return
+ holding = context.get_argument('value')
+ if holding is not None and holding != pyobjects.get_unknown():
+ context.save_per_name(holding)
+
+ def _self_set(self, context):
+ if self.holding is not None:
+ return
+ iterable = context.get_pyname('iterable')
+ holding = _infer_sequence_for_pyname(iterable)
+ if holding is not None and holding != pyobjects.get_unknown():
+ context.save_per_name(holding)
+
+ def _list_get(self, context):
+ if self.holding is not None:
+ return self.holding
+ return context.get_per_name()
+
+ def _iterator_get(self, context):
+ return get_iterator(self._list_get(context))
+
+ def _self_get(self, context):
+ return get_list(self._list_get(context))
+
+
+get_list = _create_builtin_getter(List)
+get_list_type = _create_builtin_type_getter(List)
+
+
+class Dict(BuiltinClass):
+
+ def __init__(self, keys=None, values=None):
+ self.keys = keys
+ self.values = values
+ item = get_tuple(self.keys, self.values)
+ collector = _AttributeCollector(dict)
+ collector('__new__', function=self._new_dict)
+ collector('__setitem__', function=self._dict_add)
+ collector('popitem', function=self._item_get)
+ collector('pop', function=self._value_get)
+ collector('get', function=self._key_get)
+ collector('keys', function=self._key_list)
+ collector('values', function=self._value_list)
+ collector('items', function=self._item_list)
+ collector('copy', function=self._self_get)
+ collector('__getitem__', function=self._value_get)
+ collector('__iter__', function=self._key_iter)
+ collector('update', function=self._self_set)
+ super(Dict, self).__init__(dict, collector.attributes)
+
+ def _new_dict(self, args):
+ def do_create(holding=None):
+ if holding is None:
+ return get_dict()
+ type = holding.get_type()
+ if isinstance(type, Tuple) and len(type.get_holding_objects()) == 2:
+ return get_dict(*type.get_holding_objects())
+ return _create_builtin(args, do_create)
+
+ def _dict_add(self, context):
+ if self.keys is not None:
+ return
+ key, value = context.get_arguments(['self', 'key', 'value'])[1:]
+ if key is not None and key != pyobjects.get_unknown():
+ context.save_per_name(get_tuple(key, value))
+
+ def _item_get(self, context):
+ if self.keys is not None:
+ return get_tuple(self.keys, self.values)
+ item = context.get_per_name()
+ if item is None or not isinstance(item.get_type(), Tuple):
+ return get_tuple(self.keys, self.values)
+ return item
+
+ def _value_get(self, context):
+ item = self._item_get(context).get_type()
+ return item.get_holding_objects()[1]
+
+ def _key_get(self, context):
+ item = self._item_get(context).get_type()
+ return item.get_holding_objects()[0]
+
+ def _value_list(self, context):
+ return get_list(self._value_get(context))
+
+ def _key_list(self, context):
+ return get_list(self._key_get(context))
+
+ def _item_list(self, context):
+ return get_list(self._item_get(context))
+
+ def _value_iter(self, context):
+ return get_iterator(self._value_get(context))
+
+ def _key_iter(self, context):
+ return get_iterator(self._key_get(context))
+
+ def _item_iter(self, context):
+ return get_iterator(self._item_get(context))
+
+ def _self_get(self, context):
+ item = self._item_get(context).get_type()
+ key, value = item.get_holding_objects()[:2]
+ return get_dict(key, value)
+
+ def _self_set(self, context):
+ if self.keys is not None:
+ return
+ new_dict = context.get_pynames(['self', 'd'])[1]
+ if new_dict and isinstance(new_dict.get_object().get_type(), Dict):
+ args = arguments.ObjectArguments([new_dict])
+ items = new_dict.get_object()['popitem'].\
+ get_object().get_returned_object(args)
+ context.save_per_name(items)
+ else:
+ holding = _infer_sequence_for_pyname(new_dict)
+ if holding is not None and isinstance(holding.get_type(), Tuple):
+ context.save_per_name(holding)
+
+
+get_dict = _create_builtin_getter(Dict)
+get_dict_type = _create_builtin_type_getter(Dict)
+
+
+class Tuple(BuiltinClass):
+
+ def __init__(self, *objects):
+ self.objects = objects
+ first = None
+ if objects:
+ first = objects[0]
+ attributes = {
+ '__getitem__': BuiltinName(BuiltinFunction(first)),
+ '__getslice__': BuiltinName(BuiltinFunction(pyobjects.PyObject(self))),
+ '__new__': BuiltinName(BuiltinFunction(function=self._new_tuple)),
+ '__iter__': BuiltinName(BuiltinFunction(get_iterator(first)))}
+ super(Tuple, self).__init__(tuple, attributes)
+
+ def get_holding_objects(self):
+ return self.objects
+
+ def _new_tuple(self, args):
+ return _create_builtin(args, get_tuple)
+
+
+get_tuple = _create_builtin_getter(Tuple)
+get_tuple_type = _create_builtin_type_getter(Tuple)
+
+
+class Set(BuiltinClass):
+
+ def __init__(self, holding=None):
+ self.holding = holding
+ collector = _AttributeCollector(set)
+ collector('__new__', function=self._new_set)
+
+ self_methods = ['copy', 'difference', 'intersection',
+ 'symmetric_difference', 'union']
+ for method in self_methods:
+ collector(method, function=self._self_get)
+ collector('add', function=self._set_add)
+ collector('update', function=self._self_set)
+ collector('update', function=self._self_set)
+ collector('symmetric_difference_update', function=self._self_set)
+ collector('difference_update', function=self._self_set)
+
+ collector('pop', function=self._set_get)
+ collector('__iter__', function=self._iterator_get)
+ super(Set, self).__init__(set, collector.attributes)
+
+ def _new_set(self, args):
+ return _create_builtin(args, get_set)
+
+ def _set_add(self, context):
+ if self.holding is not None:
+ return
+ holding = context.get_arguments(['self', 'value'])[1]
+ if holding is not None and holding != pyobjects.get_unknown():
+ context.save_per_name(holding)
+
+ def _self_set(self, context):
+ if self.holding is not None:
+ return
+ iterable = context.get_pyname('iterable')
+ holding = _infer_sequence_for_pyname(iterable)
+ if holding is not None and holding != pyobjects.get_unknown():
+ context.save_per_name(holding)
+
+ def _set_get(self, context):
+ if self.holding is not None:
+ return self.holding
+ return context.get_per_name()
+
+ def _iterator_get(self, context):
+ return get_iterator(self._set_get(context))
+
+ def _self_get(self, context):
+ return get_list(self._set_get(context))
+
+
+get_set = _create_builtin_getter(Set)
+get_set_type = _create_builtin_type_getter(Set)
+
+
+class Str(BuiltinClass):
+
+ def __init__(self):
+ self_object = pyobjects.PyObject(self)
+ collector = _AttributeCollector(str)
+ collector('__iter__', get_iterator(self_object), check_existence=False)
+
+ self_methods = ['__getitem__', '__getslice__', 'capitalize', 'center',
+ 'decode', 'encode', 'expandtabs', 'join', 'ljust',
+ 'lower', 'lstrip', 'replace', 'rjust', 'rstrip', 'strip',
+ 'swapcase', 'title', 'translate', 'upper', 'zfill']
+ for method in self_methods:
+ collector(method, self_object)
+
+ for method in ['rsplit', 'split', 'splitlines']:
+ collector(method, get_list(self_object))
+
+ super(Str, self).__init__(str, collector.attributes)
+
+ def get_doc(self):
+ return str.__doc__
+
+
+get_str = _create_builtin_getter(Str)
+get_str_type = _create_builtin_type_getter(Str)
+
+
+class BuiltinName(pynames.PyName):
+
+ def __init__(self, pyobject):
+ self.pyobject = pyobject
+
+ def get_object(self):
+ return self.pyobject
+
+ def get_definition_location(self):
+ return (None, None)
+
+class Iterator(pyobjects.AbstractClass):
+
+ def __init__(self, holding=None):
+ super(Iterator, self).__init__()
+ self.holding = holding
+ self.attributes = {
+ 'next': BuiltinName(BuiltinFunction(self.holding)),
+ '__iter__': BuiltinName(BuiltinFunction(self))}
+
+ def get_attributes(self):
+ return self.attributes
+
+ def get_returned_object(self, args):
+ return self.holding
+
+get_iterator = _create_builtin_getter(Iterator)
+
+
+class Generator(pyobjects.AbstractClass):
+
+ def __init__(self, holding=None):
+ super(Generator, self).__init__()
+ self.holding = holding
+ self.attributes = {
+ 'next': BuiltinName(BuiltinFunction(self.holding)),
+ '__iter__': BuiltinName(BuiltinFunction(get_iterator(self.holding))),
+ 'close': BuiltinName(BuiltinFunction()),
+ 'send': BuiltinName(BuiltinFunction()),
+ 'throw': BuiltinName(BuiltinFunction())}
+
+ def get_attributes(self):
+ return self.attributes
+
+ def get_returned_object(self, args):
+ return self.holding
+
+get_generator = _create_builtin_getter(Generator)
+
+
+class File(BuiltinClass):
+
+ def __init__(self):
+ self_object = pyobjects.PyObject(self)
+ str_object = get_str()
+ str_list = get_list(get_str())
+ attributes = {}
+ def add(name, returned=None, function=None):
+ builtin = getattr(file, name, None)
+ attributes[name] = BuiltinName(
+ BuiltinFunction(returned=returned, function=function,
+ builtin=builtin))
+ add('__iter__', get_iterator(str_object))
+ for method in ['next', 'read', 'readline', 'readlines']:
+ add(method, str_list)
+ for method in ['close', 'flush', 'lineno', 'isatty', 'seek', 'tell',
+ 'truncate', 'write', 'writelines']:
+ add(method)
+ super(File, self).__init__(file, attributes)
+
+
+get_file = _create_builtin_getter(File)
+get_file_type = _create_builtin_type_getter(File)
+
+
+class Property(BuiltinClass):
+
+ def __init__(self, fget=None, fset=None, fdel=None, fdoc=None):
+ self._fget = fget
+ self._fdoc = fdoc
+ attributes = {
+ 'fget': BuiltinName(BuiltinFunction()),
+ 'fset': BuiltinName(pynames.UnboundName()),
+ 'fdel': BuiltinName(pynames.UnboundName()),
+ '__new__': BuiltinName(BuiltinFunction(function=_property_function))}
+ super(Property, self).__init__(property, attributes)
+
+ def get_property_object(self, args):
+ if isinstance(self._fget, pyobjects.AbstractFunction):
+ return self._fget.get_returned_object(args)
+
+
+def _property_function(args):
+ parameters = args.get_arguments(['fget', 'fset', 'fdel', 'fdoc'])
+ return pyobjects.PyObject(Property(parameters[0]))
+
+
+class Lambda(pyobjects.AbstractFunction):
+
+ def __init__(self, node, scope):
+ super(Lambda, self).__init__()
+ self.node = node
+ self.arguments = node.args
+ self.scope = scope
+
+ def get_returned_object(self, args):
+ result = rope.base.evaluate.eval_node(self.scope, self.node.body)
+ if result is not None:
+ return result.get_object()
+ else:
+ return pyobjects.get_unknown()
+
+ def get_module(self):
+ return self.parent.get_module()
+
+ def get_scope(self):
+ return self.scope
+
+ def get_kind(self):
+ return 'lambda'
+
+ def get_ast(self):
+ return self.node
+
+ def get_attributes(self):
+ return {}
+
+ def get_name(self):
+ return 'lambda'
+
+ def get_param_names(self, special_args=True):
+ result = [node.id for node in self.arguments.args
+ if isinstance(node, ast.Name)]
+ if self.arguments.vararg:
+ result.append('*' + self.arguments.vararg)
+ if self.arguments.kwarg:
+ result.append('**' + self.arguments.kwarg)
+ return result
+
+ @property
+ def parent(self):
+ return self.scope.pyobject
+
+
+class BuiltinObject(BuiltinClass):
+
+ def __init__(self):
+ super(BuiltinObject, self).__init__(object, {})
+
+
+class BuiltinType(BuiltinClass):
+
+ def __init__(self):
+ super(BuiltinType, self).__init__(type, {})
+
+
+def _infer_sequence_for_pyname(pyname):
+ if pyname is None:
+ return None
+ seq = pyname.get_object()
+ args = arguments.ObjectArguments([pyname])
+ if '__iter__' in seq:
+ obj = seq['__iter__'].get_object()
+ if not isinstance(obj, pyobjects.AbstractFunction):
+ return None
+ iter = obj.get_returned_object(args)
+ if iter is not None and 'next' in iter:
+ holding = iter['next'].get_object().\
+ get_returned_object(args)
+ return holding
+
+
+def _create_builtin(args, creator):
+ passed = args.get_pynames(['sequence'])[0]
+ if passed is None:
+ holding = None
+ else:
+ holding = _infer_sequence_for_pyname(passed)
+ if holding is not None:
+ return creator(holding)
+ else:
+ return creator()
+
+
+def _range_function(args):
+ return get_list()
+
+def _reversed_function(args):
+ return _create_builtin(args, get_iterator)
+
+def _sorted_function(args):
+ return _create_builtin(args, get_list)
+
+def _super_function(args):
+ passed_class, passed_self = args.get_arguments(['type', 'self'])
+ if passed_self is None:
+ return passed_class
+ else:
+ #pyclass = passed_self.get_type()
+ pyclass = passed_class
+ if isinstance(pyclass, pyobjects.AbstractClass):
+ supers = pyclass.get_superclasses()
+ if supers:
+ return pyobjects.PyObject(supers[0])
+ return passed_self
+
+def _zip_function(args):
+ args = args.get_pynames(['sequence'])
+ objects = []
+ for seq in args:
+ if seq is None:
+ holding = None
+ else:
+ holding = _infer_sequence_for_pyname(seq)
+ objects.append(holding)
+ tuple = get_tuple(*objects)
+ return get_list(tuple)
+
+def _enumerate_function(args):
+ passed = args.get_pynames(['sequence'])[0]
+ if passed is None:
+ holding = None
+ else:
+ holding = _infer_sequence_for_pyname(passed)
+ tuple = get_tuple(None, holding)
+ return get_iterator(tuple)
+
+def _iter_function(args):
+ passed = args.get_pynames(['sequence'])[0]
+ if passed is None:
+ holding = None
+ else:
+ holding = _infer_sequence_for_pyname(passed)
+ return get_iterator(holding)
+
+def _input_function(args):
+ return get_str()
+
+
+_initial_builtins = {
+ 'list': BuiltinName(get_list_type()),
+ 'dict': BuiltinName(get_dict_type()),
+ 'tuple': BuiltinName(get_tuple_type()),
+ 'set': BuiltinName(get_set_type()),
+ 'str': BuiltinName(get_str_type()),
+ 'file': BuiltinName(get_file_type()),
+ 'open': BuiltinName(get_file_type()),
+ 'unicode': BuiltinName(get_str_type()),
+ 'range': BuiltinName(BuiltinFunction(function=_range_function, builtin=range)),
+ 'reversed': BuiltinName(BuiltinFunction(function=_reversed_function, builtin=reversed)),
+ 'sorted': BuiltinName(BuiltinFunction(function=_sorted_function, builtin=sorted)),
+ 'super': BuiltinName(BuiltinFunction(function=_super_function, builtin=super)),
+ 'property': BuiltinName(BuiltinFunction(function=_property_function, builtin=property)),
+ 'zip': BuiltinName(BuiltinFunction(function=_zip_function, builtin=zip)),
+ 'enumerate': BuiltinName(BuiltinFunction(function=_enumerate_function, builtin=enumerate)),
+ 'object': BuiltinName(BuiltinObject()),
+ 'type': BuiltinName(BuiltinType()),
+ 'iter': BuiltinName(BuiltinFunction(function=_iter_function, builtin=iter)),
+ 'raw_input': BuiltinName(BuiltinFunction(function=_input_function, builtin=raw_input)),
+ }
+
+builtins = BuiltinModule('__builtin__', initial=_initial_builtins)
diff --git a/.vim/bundle/python-mode/pylibs/rope/base/change.py b/.vim/bundle/python-mode/pylibs/rope/base/change.py
@@ -0,0 +1,448 @@
+import datetime
+import difflib
+import os
+import time
+import warnings
+
+import rope.base.fscommands
+from rope.base import taskhandle, exceptions, utils
+
+
+class Change(object):
+ """The base class for changes
+
+ Rope refactorings return `Change` objects. They can be previewed,
+ committed or undone.
+ """
+
+ def do(self, job_set=None):
+ """Perform the change
+
+ .. note:: Do use this directly. Use `Project.do()` instead.
+ """
+
+ def undo(self, job_set=None):
+ """Perform the change
+
+ .. note:: Do use this directly. Use `History.undo()` instead.
+ """
+
+ def get_description(self):
+ """Return the description of this change
+
+ This can be used for previewing the changes.
+ """
+ return str(self)
+
+ def get_changed_resources(self):
+ """Return the list of resources that will be changed"""
+ return []
+
+ @property
+ @utils.saveit
+ def _operations(self):
+ return _ResourceOperations(self.resource.project)
+
+
+class ChangeSet(Change):
+ """A collection of `Change` objects
+
+ This class holds a collection of changes. This class provides
+ these fields:
+
+ * `changes`: the list of changes
+ * `description`: the goal of these changes
+ """
+
+ def __init__(self, description, timestamp=None):
+ self.changes = []
+ self.description = description
+ self.time = timestamp
+
+ def do(self, job_set=taskhandle.NullJobSet()):
+ try:
+ done = []
+ for change in self.changes:
+ change.do(job_set)
+ done.append(change)
+ self.time = time.time()
+ except Exception:
+ for change in done:
+ change.undo()
+ raise
+
+ def undo(self, job_set=taskhandle.NullJobSet()):
+ try:
+ done = []
+ for change in reversed(self.changes):
+ change.undo(job_set)
+ done.append(change)
+ except Exception:
+ for change in done:
+ change.do()
+ raise
+
+ def add_change(self, change):
+ self.changes.append(change)
+
+ def get_description(self):
+ result = [str(self) + ':\n\n\n']
+ for change in self.changes:
+ result.append(change.get_description())
+ result.append('\n')
+ return ''.join(result)
+
+ def __str__(self):
+ if self.time is not None:
+ date = datetime.datetime.fromtimestamp(self.time)
+ if date.date() == datetime.date.today():
+ string_date = 'today'
+ elif date.date() == (datetime.date.today() - datetime.timedelta(1)):
+ string_date = 'yesterday'
+ elif date.year == datetime.date.today().year:
+ string_date = date.strftime('%b %d')
+ else:
+ string_date = date.strftime('%d %b, %Y')
+ string_time = date.strftime('%H:%M:%S')
+ string_time = '%s %s ' % (string_date, string_time)
+ return self.description + ' - ' + string_time
+ return self.description
+
+ def get_changed_resources(self):
+ result = set()
+ for change in self.changes:
+ result.update(change.get_changed_resources())
+ return result
+
+
+def _handle_job_set(function):
+ """A decorator for handling `taskhandle.JobSet`\s
+
+ A decorator for handling `taskhandle.JobSet`\s for `do` and `undo`
+ methods of `Change`\s.
+ """
+ def call(self, job_set=taskhandle.NullJobSet()):
+ job_set.started_job(str(self))
+ function(self)
+ job_set.finished_job()
+ return call
+
+
+class ChangeContents(Change):
+ """A class to change the contents of a file
+
+ Fields:
+
+ * `resource`: The `rope.base.resources.File` to change
+ * `new_contents`: What to write in the file
+ """
+
+ def __init__(self, resource, new_contents, old_contents=None):
+ self.resource = resource
+ # IDEA: Only saving diffs; possible problems when undo/redoing
+ self.new_contents = new_contents
+ self.old_contents = old_contents
+
+ @_handle_job_set
+ def do(self):
+ if self.old_contents is None:
+ self.old_contents = self.resource.read()
+ self._operations.write_file(self.resource, self.new_contents)
+
+ @_handle_job_set
+ def undo(self):
+ if self.old_contents is None:
+ raise exceptions.HistoryError(
+ 'Undoing a change that is not performed yet!')
+ self._operations.write_file(self.resource, self.old_contents)
+
+ def __str__(self):
+ return 'Change <%s>' % self.resource.path
+
+ def get_description(self):
+ new = self.new_contents
+ old = self.old_contents
+ if old is None:
+ if self.resource.exists():
+ old = self.resource.read()
+ else:
+ old = ''
+ result = difflib.unified_diff(
+ old.splitlines(True), new.splitlines(True),
+ 'a/' + self.resource.path, 'b/' + self.resource.path)
+ return ''.join(list(result))
+
+ def get_changed_resources(self):
+ return [self.resource]
+
+
+class MoveResource(Change):
+ """Move a resource to a new location
+
+ Fields:
+
+ * `resource`: The `rope.base.resources.Resource` to move
+ * `new_resource`: The destination for move; It is the moved
+ resource not the folder containing that resource.
+ """
+
+ def __init__(self, resource, new_location, exact=False):
+ self.project = resource.project
+ self.resource = resource
+ if not exact:
+ new_location = _get_destination_for_move(resource, new_location)
+ if resource.is_folder():
+ self.new_resource = self.project.get_folder(new_location)
+ else:
+ self.new_resource = self.project.get_file(new_location)
+
+ @_handle_job_set
+ def do(self):
+ self._operations.move(self.resource, self.new_resource)
+
+ @_handle_job_set
+ def undo(self):
+ self._operations.move(self.new_resource, self.resource)
+
+ def __str__(self):
+ return 'Move <%s>' % self.resource.path
+
+ def get_description(self):
+ return 'rename from %s\nrename to %s' % (self.resource.path,
+ self.new_resource.path)
+
+ def get_changed_resources(self):
+ return [self.resource, self.new_resource]
+
+
+class CreateResource(Change):
+ """A class to create a resource
+
+ Fields:
+
+ * `resource`: The resource to create
+ """
+
+ def __init__(self, resource):
+ self.resource = resource
+
+ @_handle_job_set
+ def do(self):
+ self._operations.create(self.resource)
+
+ @_handle_job_set
+ def undo(self):
+ self._operations.remove(self.resource)
+
+ def __str__(self):
+ return 'Create Resource <%s>' % (self.resource.path)
+
+ def get_description(self):
+ return 'new file %s' % (self.resource.path)
+
+ def get_changed_resources(self):
+ return [self.resource]
+
+ def _get_child_path(self, parent, name):
+ if parent.path == '':
+ return name
+ else:
+ return parent.path + '/' + name
+
+
+class CreateFolder(CreateResource):
+ """A class to create a folder
+
+ See docs for `CreateResource`.
+ """
+
+ def __init__(self, parent, name):
+ resource = parent.project.get_folder(self._get_child_path(parent, name))
+ super(CreateFolder, self).__init__(resource)
+
+
+class CreateFile(CreateResource):
+ """A class to create a file
+
+ See docs for `CreateResource`.
+ """
+
+ def __init__(self, parent, name):
+ resource = parent.project.get_file(self._get_child_path(parent, name))
+ super(CreateFile, self).__init__(resource)
+
+
+class RemoveResource(Change):
+ """A class to remove a resource
+
+ Fields:
+
+ * `resource`: The resource to be removed
+ """
+
+ def __init__(self, resource):
+ self.resource = resource
+
+ @_handle_job_set
+ def do(self):
+ self._operations.remove(self.resource)
+
+ # TODO: Undoing remove operations
+ @_handle_job_set
+ def undo(self):
+ raise NotImplementedError(
+ 'Undoing `RemoveResource` is not implemented yet.')
+
+ def __str__(self):
+ return 'Remove <%s>' % (self.resource.path)
+
+ def get_changed_resources(self):
+ return [self.resource]
+
+
+def count_changes(change):
+ """Counts the number of basic changes a `Change` will make"""
+ if isinstance(change, ChangeSet):
+ result = 0
+ for child in change.changes:
+ result += count_changes(child)
+ return result
+ return 1
+
+def create_job_set(task_handle, change):
+ return task_handle.create_jobset(str(change), count_changes(change))
+
+
+class _ResourceOperations(object):
+
+ def __init__(self, project):
+ self.project = project
+ self.fscommands = project.fscommands
+ self.direct_commands = rope.base.fscommands.FileSystemCommands()
+
+ def _get_fscommands(self, resource):
+ if self.project.is_ignored(resource):
+ return self.direct_commands
+ return self.fscommands
+
+ def write_file(self, resource, contents):
+ data = rope.base.fscommands.unicode_to_file_data(contents)
+ fscommands = self._get_fscommands(resource)
+ fscommands.write(resource.real_path, data)
+ for observer in list(self.project.observers):
+ observer.resource_changed(resource)
+
+ def move(self, resource, new_resource):
+ fscommands = self._get_fscommands(resource)
+ fscommands.move(resource.real_path, new_resource.real_path)
+ for observer in list(self.project.observers):
+ observer.resource_moved(resource, new_resource)
+
+ def create(self, resource):
+ if resource.is_folder():
+ self._create_resource(resource.path, kind='folder')
+ else:
+ self._create_resource(resource.path)
+ for observer in list(self.project.observers):
+ observer.resource_created(resource)
+
+ def remove(self, resource):
+ fscommands = self._get_fscommands(resource)
+ fscommands.remove(resource.real_path)
+ for observer in list(self.project.observers):
+ observer.resource_removed(resource)
+
+ def _create_resource(self, file_name, kind='file'):
+ resource_path = self.project._get_resource_path(file_name)
+ if os.path.exists(resource_path):
+ raise exceptions.RopeError('Resource <%s> already exists'
+ % resource_path)
+ resource = self.project.get_file(file_name)
+ if not resource.parent.exists():
+ raise exceptions.ResourceNotFoundError(
+ 'Parent folder of <%s> does not exist' % resource.path)
+ fscommands = self._get_fscommands(resource)
+ try:
+ if kind == 'file':
+ fscommands.create_file(resource_path)
+ else:
+ fscommands.create_folder(resource_path)
+ except IOError, e:
+ raise exceptions.RopeError(e)
+
+
+def _get_destination_for_move(resource, destination):
+ dest_path = resource.project._get_resource_path(destination)
+ if os.path.isdir(dest_path):
+ if destination != '':
+ return destination + '/' + resource.name
+ else:
+ return resource.name
+ return destination
+
+
+class ChangeToData(object):
+
+ def convertChangeSet(self, change):
+ description = change.description
+ changes = []
+ for child in change.changes:
+ changes.append(self(child))
+ return (description, changes, change.time)
+
+ def convertChangeContents(self, change):
+ return (change.resource.path, change.new_contents, change.old_contents)
+
+ def convertMoveResource(self, change):
+ return (change.resource.path, change.new_resource.path)
+
+ def convertCreateResource(self, change):
+ return (change.resource.path, change.resource.is_folder())
+
+ def convertRemoveResource(self, change):
+ return (change.resource.path, change.resource.is_folder())
+
+ def __call__(self, change):
+ change_type = type(change)
+ if change_type in (CreateFolder, CreateFile):
+ change_type = CreateResource
+ method = getattr(self, 'convert' + change_type.__name__)
+ return (change_type.__name__, method(change))
+
+
+class DataToChange(object):
+
+ def __init__(self, project):
+ self.project = project
+
+ def makeChangeSet(self, description, changes, time=None):
+ result = ChangeSet(description, time)
+ for child in changes:
+ result.add_change(self(child))
+ return result
+
+ def makeChangeContents(self, path, new_contents, old_contents):
+ resource = self.project.get_file(path)
+ return ChangeContents(resource, new_contents, old_contents)
+
+ def makeMoveResource(self, old_path, new_path):
+ resource = self.project.get_file(old_path)
+ return MoveResource(resource, new_path, exact=True)
+
+ def makeCreateResource(self, path, is_folder):
+ if is_folder:
+ resource = self.project.get_folder(path)
+ else:
+ resource = self.project.get_file(path)
+ return CreateResource(resource)
+
+ def makeRemoveResource(self, path, is_folder):
+ if is_folder:
+ resource = self.project.get_folder(path)
+ else:
+ resource = self.project.get_file(path)
+ return RemoveResource(resource)
+
+ def __call__(self, data):
+ method = getattr(self, 'make' + data[0])
+ return method(*data[1])
diff --git a/.vim/bundle/python-mode/pylibs/rope/base/codeanalyze.py b/.vim/bundle/python-mode/pylibs/rope/base/codeanalyze.py
@@ -0,0 +1,358 @@
+import bisect
+import re
+import token
+import tokenize
+
+
+class ChangeCollector(object):
+
+ def __init__(self, text):
+ self.text = text
+ self.changes = []
+
+ def add_change(self, start, end, new_text=None):
+ if new_text is None:
+ new_text = self.text[start:end]
+ self.changes.append((start, end, new_text))
+
+ def get_changed(self):
+ if not self.changes:
+ return None
+ def compare_changes(change1, change2):
+ return cmp(change1[:2], change2[:2])
+ self.changes.sort(compare_changes)
+ pieces = []
+ last_changed = 0
+ for change in self.changes:
+ start, end, text = change
+ pieces.append(self.text[last_changed:start] + text)
+ last_changed = end
+ if last_changed < len(self.text):
+ pieces.append(self.text[last_changed:])
+ result = ''.join(pieces)
+ if result != self.text:
+ return result
+
+
+class SourceLinesAdapter(object):
+ """Adapts source to Lines interface
+
+ Note: The creation of this class is expensive.
+ """
+
+ def __init__(self, source_code):
+ self.code = source_code
+ self.starts = None
+ self._initialize_line_starts()
+
+ def _initialize_line_starts(self):
+ self.starts = []
+ self.starts.append(0)
+ try:
+ i = 0
+ while True:
+ i = self.code.index('\n', i) + 1
+ self.starts.append(i)
+ except ValueError:
+ pass
+ self.starts.append(len(self.code) + 1)
+
+ def get_line(self, lineno):
+ return self.code[self.starts[lineno - 1]:
+ self.starts[lineno] - 1]
+
+ def length(self):
+ return len(self.starts) - 1
+
+ def get_line_number(self, offset):
+ return bisect.bisect(self.starts, offset)
+
+ def get_line_start(self, lineno):
+ return self.starts[lineno - 1]
+
+ def get_line_end(self, lineno):
+ return self.starts[lineno] - 1
+
+
+class ArrayLinesAdapter(object):
+
+ def __init__(self, lines):
+ self.lines = lines
+
+ def get_line(self, line_number):
+ return self.lines[line_number - 1]
+
+ def length(self):
+ return len(self.lines)
+
+
+class LinesToReadline(object):
+
+ def __init__(self, lines, start):
+ self.lines = lines
+ self.current = start
+
+ def readline(self):
+ if self.current <= self.lines.length():
+ self.current += 1
+ return self.lines.get_line(self.current - 1) + '\n'
+ return ''
+
+ def __call__(self):
+ return self.readline()
+
+
+class _CustomGenerator(object):
+
+ def __init__(self, lines):
+ self.lines = lines
+ self.in_string = ''
+ self.open_count = 0
+ self.continuation = False
+
+ def __call__(self):
+ size = self.lines.length()
+ result = []
+ i = 1
+ while i <= size:
+ while i <= size and not self.lines.get_line(i).strip():
+ i += 1
+ if i <= size:
+ start = i
+ while True:
+ line = self.lines.get_line(i)
+ self._analyze_line(line)
+ if not (self.continuation or self.open_count or
+ self.in_string) or i == size:
+ break
+ i += 1
+ result.append((start, i))
+ i += 1
+ return result
+
+ _main_chars = re.compile(r'[\'|"|#|\\|\[|\]|\{|\}|\(|\)]')
+ def _analyze_line(self, line):
+ char = None
+ for match in self._main_chars.finditer(line):
+ char = match.group()
+ i = match.start()
+ if char in '\'"':
+ if not self.in_string:
+ self.in_string = char
+ if char * 3 == line[i:i + 3]:
+ self.in_string = char * 3
+ elif self.in_string == line[i:i + len(self.in_string)] and \
+ not (i > 0 and line[i - 1] == '\\' and
+ not (i > 1 and line[i - 2] == '\\')):
+ self.in_string = ''
+ if self.in_string:
+ continue
+ if char == '#':
+ break
+ if char in '([{':
+ self.open_count += 1
+ elif char in ')]}':
+ self.open_count -= 1
+ if line and char != '#' and line.endswith('\\'):
+ self.continuation = True
+ else:
+ self.continuation = False
+
+def custom_generator(lines):
+ return _CustomGenerator(lines)()
+
+
+class LogicalLineFinder(object):
+
+ def __init__(self, lines):
+ self.lines = lines
+
+ def logical_line_in(self, line_number):
+ indents = count_line_indents(self.lines.get_line(line_number))
+ tries = 0
+ while True:
+ block_start = get_block_start(self.lines, line_number, indents)
+ try:
+ return self._block_logical_line(block_start, line_number)
+ except IndentationError, e:
+ tries += 1
+ if tries == 5:
+ raise e
+ lineno = e.lineno + block_start - 1
+ indents = count_line_indents(self.lines.get_line(lineno))
+
+ def generate_starts(self, start_line=1, end_line=None):
+ for start, end in self.generate_regions(start_line, end_line):
+ yield start
+
+ def generate_regions(self, start_line=1, end_line=None):
+ # XXX: `block_start` should be at a better position!
+ block_start = 1
+ readline = LinesToReadline(self.lines, block_start)
+ shifted = start_line - block_start + 1
+ try:
+ for start, end in self._logical_lines(readline):
+ real_start = start + block_start - 1
+ real_start = self._first_non_blank(real_start)
+ if end_line is not None and real_start >= end_line:
+ break
+ real_end = end + block_start - 1
+ if real_start >= start_line:
+ yield (real_start, real_end)
+ except tokenize.TokenError, e:
+ pass
+
+ def _block_logical_line(self, block_start, line_number):
+ readline = LinesToReadline(self.lines, block_start)
+ shifted = line_number - block_start + 1
+ region = self._calculate_logical(readline, shifted)
+ start = self._first_non_blank(region[0] + block_start - 1)
+ if region[1] is None:
+ end = self.lines.length()
+ else:
+ end = region[1] + block_start - 1
+ return start, end
+
+ def _calculate_logical(self, readline, line_number):
+ last_end = 1
+ try:
+ for start, end in self._logical_lines(readline):
+ if line_number <= end:
+ return (start, end)
+ last_end = end + 1
+ except tokenize.TokenError, e:
+ current = e.args[1][0]
+ return (last_end, max(last_end, current - 1))
+ return (last_end, None)
+
+ def _logical_lines(self, readline):
+ last_end = 1
+ for current_token in tokenize.generate_tokens(readline):
+ current = current_token[2][0]
+ if current_token[0] == token.NEWLINE:
+ yield (last_end, current)
+ last_end = current + 1
+
+ def _first_non_blank(self, line_number):
+ current = line_number
+ while current < self.lines.length():
+ line = self.lines.get_line(current).strip()
+ if line and not line.startswith('#'):
+ return current
+ current += 1
+ return current
+
+
+def tokenizer_generator(lines):
+ return LogicalLineFinder(lines).generate_regions()
+
+
+class CachingLogicalLineFinder(object):
+
+ def __init__(self, lines, generate=custom_generator):
+ self.lines = lines
+ self._generate = generate
+
+ _starts = None
+ @property
+ def starts(self):
+ if self._starts is None:
+ self._init_logicals()
+ return self._starts
+
+ _ends = None
+ @property
+ def ends(self):
+ if self._ends is None:
+ self._init_logicals()
+ return self._ends
+
+ def _init_logicals(self):
+ """Should initialize _starts and _ends attributes"""
+ size = self.lines.length() + 1
+ self._starts = [None] * size
+ self._ends = [None] * size
+ for start, end in self._generate(self.lines):
+ self._starts[start] = True
+ self._ends[end] = True
+
+ def logical_line_in(self, line_number):
+ start = line_number
+ while start > 0 and not self.starts[start]:
+ start -= 1
+ if start == 0:
+ try:
+ start = self.starts.index(True, line_number)
+ except ValueError:
+ return (line_number, line_number)
+ return (start, self.ends.index(True, start))
+
+ def generate_starts(self, start_line=1, end_line=None):
+ if end_line is None:
+ end_line = self.lines.length()
+ for index in range(start_line, end_line):
+ if self.starts[index]:
+ yield index
+
+
+def get_block_start(lines, lineno, maximum_indents=80):
+ """Approximate block start"""
+ pattern = get_block_start_patterns()
+ for i in range(lineno, 0, -1):
+ match = pattern.search(lines.get_line(i))
+ if match is not None and \
+ count_line_indents(lines.get_line(i)) <= maximum_indents:
+ striped = match.string.lstrip()
+ # Maybe we're in a list comprehension or generator expression
+ if i > 1 and striped.startswith('if') or striped.startswith('for'):
+ bracs = 0
+ for j in range(i, min(i + 5, lines.length() + 1)):
+ for c in lines.get_line(j):
+ if c == '#':
+ break
+ if c in '[(':
+ bracs += 1
+ if c in ')]':
+ bracs -= 1
+ if bracs < 0:
+ break
+ if bracs < 0:
+ break
+ if bracs < 0:
+ continue
+ return i
+ return 1
+
+
+_block_start_pattern = None
+
+def get_block_start_patterns():
+ global _block_start_pattern
+ if not _block_start_pattern:
+ pattern = '^\\s*(((def|class|if|elif|except|for|while|with)\\s)|'\
+ '((try|else|finally|except)\\s*:))'
+ _block_start_pattern = re.compile(pattern, re.M)
+ return _block_start_pattern
+
+
+def count_line_indents(line):
+ indents = 0
+ for char in line:
+ if char == ' ':
+ indents += 1
+ elif char == '\t':
+ indents += 8
+ else:
+ return indents
+ return 0
+
+
+def get_string_pattern():
+ start = r'(\b[uU]?[rR]?)?'
+ longstr = r'%s"""(\\.|"(?!"")|\\\n|[^"\\])*"""' % start
+ shortstr = r'%s"(\\.|[^"\\\n])*"' % start
+ return '|'.join([longstr, longstr.replace('"', "'"),
+ shortstr, shortstr.replace('"', "'")])
+
+def get_comment_pattern():
+ return r'#[^\n]*'
diff --git a/.vim/bundle/python-mode/pylibs/rope/base/default_config.py b/.vim/bundle/python-mode/pylibs/rope/base/default_config.py
@@ -0,0 +1,85 @@
+# The default ``config.py``
+
+
+def set_prefs(prefs):
+ """This function is called before opening the project"""
+
+ # Specify which files and folders to ignore in the project.
+ # Changes to ignored resources are not added to the history and
+ # VCSs. Also they are not returned in `Project.get_files()`.
+ # Note that ``?`` and ``*`` match all characters but slashes.
+ # '*.pyc': matches 'test.pyc' and 'pkg/test.pyc'
+ # 'mod*.pyc': matches 'test/mod1.pyc' but not 'mod/1.pyc'
+ # '.svn': matches 'pkg/.svn' and all of its children
+ # 'build/*.o': matches 'build/lib.o' but not 'build/sub/lib.o'
+ # 'build//*.o': matches 'build/lib.o' and 'build/sub/lib.o'
+ prefs['ignored_resources'] = ['*.pyc', '*~', '.ropeproject',
+ '.hg', '.svn', '_svn', '.git']
+
+ # Specifies which files should be considered python files. It is
+ # useful when you have scripts inside your project. Only files
+ # ending with ``.py`` are considered to be python files by
+ # default.
+ #prefs['python_files'] = ['*.py']
+
+ # Custom source folders: By default rope searches the project
+ # for finding source folders (folders that should be searched
+ # for finding modules). You can add paths to that list. Note
+ # that rope guesses project source folders correctly most of the
+ # time; use this if you have any problems.
+ # The folders should be relative to project root and use '/' for
+ # separating folders regardless of the platform rope is running on.
+ # 'src/my_source_folder' for instance.
+ #prefs.add('source_folders', 'src')
+
+ # You can extend python path for looking up modules
+ #prefs.add('python_path', '~/python/')
+
+ # Should rope save object information or not.
+ prefs['save_objectdb'] = True
+ prefs['compress_objectdb'] = False
+
+ # If `True`, rope analyzes each module when it is being saved.
+ prefs['automatic_soa'] = True
+ # The depth of calls to follow in static object analysis
+ prefs['soa_followed_calls'] = 0
+
+ # If `False` when running modules or unit tests "dynamic object
+ # analysis" is turned off. This makes them much faster.
+ prefs['perform_doa'] = True
+
+ # Rope can check the validity of its object DB when running.
+ prefs['validate_objectdb'] = True
+
+ # How many undos to hold?
+ prefs['max_history_items'] = 32
+
+ # Shows whether to save history across sessions.
+ prefs['save_history'] = True
+ prefs['compress_history'] = False
+
+ # Set the number spaces used for indenting. According to
+ # :PEP:`8`, it is best to use 4 spaces. Since most of rope's
+ # unit-tests use 4 spaces it is more reliable, too.
+ prefs['indent_size'] = 4
+
+ # Builtin and c-extension modules that are allowed to be imported
+ # and inspected by rope.
+ prefs['extension_modules'] = []
+
+ # Add all standard c-extensions to extension_modules list.
+ prefs['import_dynload_stdmods'] = True
+
+ # If `True` modules with syntax errors are considered to be empty.
+ # The default value is `False`; When `False` syntax errors raise
+ # `rope.base.exceptions.ModuleSyntaxError` exception.
+ prefs['ignore_syntax_errors'] = False
+
+ # If `True`, rope ignores unresolvable imports. Otherwise, they
+ # appear in the importing namespace.
+ prefs['ignore_bad_imports'] = False
+
+
+def project_opened(project):
+ """This function is called after opening the project"""
+ # Do whatever you like here!
diff --git a/.vim/bundle/python-mode/pylibs/rope/base/evaluate.py b/.vim/bundle/python-mode/pylibs/rope/base/evaluate.py
@@ -0,0 +1,321 @@
+import rope.base.builtins
+import rope.base.pynames
+import rope.base.pyobjects
+from rope.base import ast, astutils, exceptions, pyobjects, arguments, worder
+
+
+BadIdentifierError = exceptions.BadIdentifierError
+
+def eval_location(pymodule, offset):
+ """Find the pyname at the offset"""
+ return eval_location2(pymodule, offset)[1]
+
+
+def eval_location2(pymodule, offset):
+ """Find the primary and pyname at offset"""
+ pyname_finder = ScopeNameFinder(pymodule)
+ return pyname_finder.get_primary_and_pyname_at(offset)
+
+
+def eval_node(scope, node):
+ """Evaluate a `ast.AST` node and return a PyName
+
+ Return `None` if the expression cannot be evaluated.
+ """
+ return eval_node2(scope, node)[1]
+
+
+def eval_node2(scope, node):
+ evaluator = StatementEvaluator(scope)
+ ast.walk(node, evaluator)
+ return evaluator.old_result, evaluator.result
+
+
+def eval_str(holding_scope, name):
+ return eval_str2(holding_scope, name)[1]
+
+
+def eval_str2(holding_scope, name):
+ try:
+ # parenthesizing for handling cases like 'a_var.\nattr'
+ node = ast.parse('(%s)' % name)
+ except SyntaxError:
+ raise BadIdentifierError('Not a resolvable python identifier selected.')
+ return eval_node2(holding_scope, node)
+
+
+class ScopeNameFinder(object):
+
+ def __init__(self, pymodule):
+ self.module_scope = pymodule.get_scope()
+ self.lines = pymodule.lines
+ self.worder = worder.Worder(pymodule.source_code, True)
+
+ def _is_defined_in_class_body(self, holding_scope, offset, lineno):
+ if lineno == holding_scope.get_start() and \
+ holding_scope.parent is not None and \
+ holding_scope.parent.get_kind() == 'Class' and \
+ self.worder.is_a_class_or_function_name_in_header(offset):
+ return True
+ if lineno != holding_scope.get_start() and \
+ holding_scope.get_kind() == 'Class' and \
+ self.worder.is_name_assigned_in_class_body(offset):
+ return True
+ return False
+
+ def _is_function_name_in_function_header(self, scope, offset, lineno):
+ if scope.get_start() <= lineno <= scope.get_body_start() and \
+ scope.get_kind() == 'Function' and \
+ self.worder.is_a_class_or_function_name_in_header(offset):
+ return True
+ return False
+
+ def get_pyname_at(self, offset):
+ return self.get_primary_and_pyname_at(offset)[1]
+
+ def get_primary_and_pyname_at(self, offset):
+ lineno = self.lines.get_line_number(offset)
+ holding_scope = self.module_scope.get_inner_scope_for_line(lineno)
+ # function keyword parameter
+ if self.worder.is_function_keyword_parameter(offset):
+ keyword_name = self.worder.get_word_at(offset)
+ pyobject = self.get_enclosing_function(offset)
+ if isinstance(pyobject, pyobjects.PyFunction):
+ return (None, pyobject.get_parameters().get(keyword_name, None))
+ # class body
+ if self._is_defined_in_class_body(holding_scope, offset, lineno):
+ class_scope = holding_scope
+ if lineno == holding_scope.get_start():
+ class_scope = holding_scope.parent
+ name = self.worder.get_primary_at(offset).strip()
+ try:
+ return (None, class_scope.pyobject[name])
+ except rope.base.exceptions.AttributeNotFoundError:
+ return (None, None)
+ # function header
+ if self._is_function_name_in_function_header(holding_scope, offset, lineno):
+ name = self.worder.get_primary_at(offset).strip()
+ return (None, holding_scope.parent[name])
+ # from statement module
+ if self.worder.is_from_statement_module(offset):
+ module = self.worder.get_primary_at(offset)
+ module_pyname = self._find_module(module)
+ return (None, module_pyname)
+ if self.worder.is_from_aliased(offset):
+ name = self.worder.get_from_aliased(offset)
+ else:
+ name = self.worder.get_primary_at(offset)
+ return eval_str2(holding_scope, name)
+
+ def get_enclosing_function(self, offset):
+ function_parens = self.worder.find_parens_start_from_inside(offset)
+ try:
+ function_pyname = self.get_pyname_at(function_parens - 1)
+ except BadIdentifierError:
+ function_pyname = None
+ if function_pyname is not None:
+ pyobject = function_pyname.get_object()
+ if isinstance(pyobject, pyobjects.AbstractFunction):
+ return pyobject
+ elif isinstance(pyobject, pyobjects.AbstractClass) and \
+ '__init__' in pyobject:
+ return pyobject['__init__'].get_object()
+ elif '__call__' in pyobject:
+ return pyobject['__call__'].get_object()
+ return None
+
+ def _find_module(self, module_name):
+ dots = 0
+ while module_name[dots] == '.':
+ dots += 1
+ return rope.base.pynames.ImportedModule(
+ self.module_scope.pyobject, module_name[dots:], dots)
+
+
+class StatementEvaluator(object):
+
+ def __init__(self, scope):
+ self.scope = scope
+ self.result = None
+ self.old_result = None
+
+ def _Name(self, node):
+ self.result = self.scope.lookup(node.id)
+
+ def _Attribute(self, node):
+ pyname = eval_node(self.scope, node.value)
+ if pyname is None:
+ pyname = rope.base.pynames.UnboundName()
+ self.old_result = pyname
+ if pyname.get_object() != rope.base.pyobjects.get_unknown():
+ try:
+ self.result = pyname.get_object()[node.attr]
+ except exceptions.AttributeNotFoundError:
+ self.result = None
+
+ def _Call(self, node):
+ primary, pyobject = self._get_primary_and_object_for_node(node.func)
+ if pyobject is None:
+ return
+ def _get_returned(pyobject):
+ args = arguments.create_arguments(primary, pyobject,
+ node, self.scope)
+ return pyobject.get_returned_object(args)
+ if isinstance(pyobject, rope.base.pyobjects.AbstractClass):
+ result = None
+ if '__new__' in pyobject:
+ new_function = pyobject['__new__'].get_object()
+ result = _get_returned(new_function)
+ if result is None or \
+ result == rope.base.pyobjects.get_unknown():
+ result = rope.base.pyobjects.PyObject(pyobject)
+ self.result = rope.base.pynames.UnboundName(pyobject=result)
+ return
+
+ pyfunction = None
+ if isinstance(pyobject, rope.base.pyobjects.AbstractFunction):
+ pyfunction = pyobject
+ elif '__call__' in pyobject:
+ pyfunction = pyobject['__call__'].get_object()
+ if pyfunction is not None:
+ self.result = rope.base.pynames.UnboundName(
+ pyobject=_get_returned(pyfunction))
+
+ def _Str(self, node):
+ self.result = rope.base.pynames.UnboundName(
+ pyobject=rope.base.builtins.get_str())
+
+ def _Num(self, node):
+ type_name = type(node.n).__name__
+ self.result = self._get_builtin_name(type_name)
+
+ def _get_builtin_name(self, type_name):
+ pytype = rope.base.builtins.builtins[type_name].get_object()
+ return rope.base.pynames.UnboundName(
+ rope.base.pyobjects.PyObject(pytype))
+
+ def _BinOp(self, node):
+ self.result = rope.base.pynames.UnboundName(
+ self._get_object_for_node(node.left))
+
+ def _BoolOp(self, node):
+ pyobject = self._get_object_for_node(node.values[0])
+ if pyobject is None:
+ pyobject = self._get_object_for_node(node.values[1])
+ self.result = rope.base.pynames.UnboundName(pyobject)
+
+ def _Repr(self, node):
+ self.result = self._get_builtin_name('str')
+
+ def _UnaryOp(self, node):
+ self.result = rope.base.pynames.UnboundName(
+ self._get_object_for_node(node.operand))
+
+ def _Compare(self, node):
+ self.result = self._get_builtin_name('bool')
+
+ def _Dict(self, node):
+ keys = None
+ values = None
+ if node.keys:
+ keys = self._get_object_for_node(node.keys[0])
+ values = self._get_object_for_node(node.values[0])
+ self.result = rope.base.pynames.UnboundName(
+ pyobject=rope.base.builtins.get_dict(keys, values))
+
+ def _List(self, node):
+ holding = None
+ if node.elts:
+ holding = self._get_object_for_node(node.elts[0])
+ self.result = rope.base.pynames.UnboundName(
+ pyobject=rope.base.builtins.get_list(holding))
+
+ def _ListComp(self, node):
+ pyobject = self._what_does_comprehension_hold(node)
+ self.result = rope.base.pynames.UnboundName(
+ pyobject=rope.base.builtins.get_list(pyobject))
+
+ def _GeneratorExp(self, node):
+ pyobject = self._what_does_comprehension_hold(node)
+ self.result = rope.base.pynames.UnboundName(
+ pyobject=rope.base.builtins.get_iterator(pyobject))
+
+ def _what_does_comprehension_hold(self, node):
+ scope = self._make_comprehension_scope(node)
+ pyname = eval_node(scope, node.elt)
+ return pyname.get_object() if pyname is not None else None
+
+ def _make_comprehension_scope(self, node):
+ scope = self.scope
+ module = scope.pyobject.get_module()
+ names = {}
+ for comp in node.generators:
+ new_names = _get_evaluated_names(comp.target, comp.iter, module,
+ '.__iter__().next()', node.lineno)
+ names.update(new_names)
+ return rope.base.pyscopes.TemporaryScope(scope.pycore, scope, names)
+
+ def _Tuple(self, node):
+ objects = []
+ if len(node.elts) < 4:
+ for stmt in node.elts:
+ pyobject = self._get_object_for_node(stmt)
+ objects.append(pyobject)
+ else:
+ objects.append(self._get_object_for_node(node.elts[0]))
+ self.result = rope.base.pynames.UnboundName(
+ pyobject=rope.base.builtins.get_tuple(*objects))
+
+ def _get_object_for_node(self, stmt):
+ pyname = eval_node(self.scope, stmt)
+ pyobject = None
+ if pyname is not None:
+ pyobject = pyname.get_object()
+ return pyobject
+
+ def _get_primary_and_object_for_node(self, stmt):
+ primary, pyname = eval_node2(self.scope, stmt)
+ pyobject = None
+ if pyname is not None:
+ pyobject = pyname.get_object()
+ return primary, pyobject
+
+ def _Subscript(self, node):
+ if isinstance(node.slice, ast.Index):
+ self._call_function(node.value, '__getitem__',
+ [node.slice.value])
+ elif isinstance(node.slice, ast.Slice):
+ self._call_function(node.value, '__getslice__')
+
+ def _call_function(self, node, function_name, other_args=None):
+ pyname = eval_node(self.scope, node)
+ if pyname is not None:
+ pyobject = pyname.get_object()
+ else:
+ return
+ if function_name in pyobject:
+ called = pyobject[function_name].get_object()
+ if not called or not isinstance(called, pyobjects.AbstractFunction):
+ return
+ args = [node]
+ if other_args:
+ args += other_args
+ arguments_ = arguments.Arguments(args, self.scope)
+ self.result = rope.base.pynames.UnboundName(
+ pyobject=called.get_returned_object(arguments_))
+
+ def _Lambda(self, node):
+ self.result = rope.base.pynames.UnboundName(
+ pyobject=rope.base.builtins.Lambda(node, self.scope))
+
+
+def _get_evaluated_names(targets, assigned, module, evaluation, lineno):
+ result = {}
+ for name, levels in astutils.get_name_levels(targets):
+ assignment = rope.base.pynames.AssignmentValue(assigned, levels,
+ evaluation)
+ # XXX: this module should not access `rope.base.pynamesdef`!
+ pyname = rope.base.pynamesdef.AssignedName(lineno, module)
+ pyname.assignments.append(assignment)
+ result[name] = pyname
+ return result
diff --git a/.vim/bundle/python-mode/pylibs/rope/base/exceptions.py b/.vim/bundle/python-mode/pylibs/rope/base/exceptions.py
@@ -0,0 +1,61 @@
+class RopeError(Exception):
+ """Base exception for rope"""
+
+
+class ResourceNotFoundError(RopeError):
+ """Resource not found exception"""
+
+
+class RefactoringError(RopeError):
+ """Errors for performing a refactoring"""
+
+
+class InterruptedTaskError(RopeError):
+ """The task has been interrupted"""
+
+
+class HistoryError(RopeError):
+ """Errors for history undo/redo operations"""
+
+
+class ModuleNotFoundError(RopeError):
+ """Module not found exception"""
+
+
+class AttributeNotFoundError(RopeError):
+ """Attribute not found exception"""
+
+
+class NameNotFoundError(RopeError):
+ """Name not found exception"""
+
+
+class BadIdentifierError(RopeError):
+ """The name cannot be resolved"""
+
+
+class ModuleSyntaxError(RopeError):
+ """Module has syntax errors
+
+ The `filename` and `lineno` fields indicate where the error has
+ occurred.
+
+ """
+
+ def __init__(self, filename, lineno, message):
+ self.filename = filename
+ self.lineno = lineno
+ self.message_ = message
+ super(ModuleSyntaxError, self).__init__(
+ 'Syntax error in file <%s> line <%s>: %s' %
+ (filename, lineno, message))
+
+
+class ModuleDecodeError(RopeError):
+ """Cannot decode module"""
+
+ def __init__(self, filename, message):
+ self.filename = filename
+ self.message_ = message
+ super(ModuleDecodeError, self).__init__(
+ 'Cannot decode file <%s>: %s' % (filename, message))
diff --git a/.vim/bundle/python-mode/pylibs/rope/base/fscommands.py b/.vim/bundle/python-mode/pylibs/rope/base/fscommands.py
@@ -0,0 +1,267 @@
+"""Project file system commands.
+
+This modules implements file system operations used by rope. Different
+version control systems can be supported by implementing the interface
+provided by `FileSystemCommands` class. See `SubversionCommands` and
+`MercurialCommands` for example.
+
+"""
+import os
+import shutil
+import subprocess
+
+
+def create_fscommands(root):
+ dirlist = os.listdir(root)
+ commands = {'.hg': MercurialCommands,
+ '.svn': SubversionCommands,
+ '.git': GITCommands,
+ '_svn': SubversionCommands,
+ '_darcs': DarcsCommands}
+ for key in commands:
+ if key in dirlist:
+ try:
+ return commands[key](root)
+ except (ImportError, OSError):
+ pass
+ return FileSystemCommands()
+
+
+class FileSystemCommands(object):
+
+ def create_file(self, path):
+ open(path, 'w').close()
+
+ def create_folder(self, path):
+ os.mkdir(path)
+
+ def move(self, path, new_location):
+ shutil.move(path, new_location)
+
+ def remove(self, path):
+ if os.path.isfile(path):
+ os.remove(path)
+ else:
+ shutil.rmtree(path)
+
+ def write(self, path, data):
+ file_ = open(path, 'wb')
+ try:
+ file_.write(data)
+ finally:
+ file_.close()
+
+
+class SubversionCommands(object):
+
+ def __init__(self, *args):
+ self.normal_actions = FileSystemCommands()
+ import pysvn
+ self.client = pysvn.Client()
+
+ def create_file(self, path):
+ self.normal_actions.create_file(path)
+ self.client.add(path, force=True)
+
+ def create_folder(self, path):
+ self.normal_actions.create_folder(path)
+ self.client.add(path, force=True)
+
+ def move(self, path, new_location):
+ self.client.move(path, new_location, force=True)
+
+ def remove(self, path):
+ self.client.remove(path, force=True)
+
+ def write(self, path, data):
+ self.normal_actions.write(path, data)
+
+
+class MercurialCommands(object):
+
+ def __init__(self, root):
+ self.hg = self._import_mercurial()
+ self.normal_actions = FileSystemCommands()
+ try:
+ self.ui = self.hg.ui.ui(
+ verbose=False, debug=False, quiet=True,
+ interactive=False, traceback=False, report_untrusted=False)
+ except:
+ self.ui = self.hg.ui.ui()
+ self.ui.setconfig('ui', 'interactive', 'no')
+ self.ui.setconfig('ui', 'debug', 'no')
+ self.ui.setconfig('ui', 'traceback', 'no')
+ self.ui.setconfig('ui', 'verbose', 'no')
+ self.ui.setconfig('ui', 'report_untrusted', 'no')
+ self.ui.setconfig('ui', 'quiet', 'yes')
+
+ self.repo = self.hg.hg.repository(self.ui, root)
+
+ def _import_mercurial(self):
+ import mercurial.commands
+ import mercurial.hg
+ import mercurial.ui
+ return mercurial
+
+ def create_file(self, path):
+ self.normal_actions.create_file(path)
+ self.hg.commands.add(self.ui, self.repo, path)
+
+ def create_folder(self, path):
+ self.normal_actions.create_folder(path)
+
+ def move(self, path, new_location):
+ self.hg.commands.rename(self.ui, self.repo, path,
+ new_location, after=False)
+
+ def remove(self, path):
+ self.hg.commands.remove(self.ui, self.repo, path)
+
+ def write(self, path, data):
+ self.normal_actions.write(path, data)
+
+
+class GITCommands(object):
+
+ def __init__(self, root):
+ self.root = root
+ self._do(['version'])
+ self.normal_actions = FileSystemCommands()
+
+ def create_file(self, path):
+ self.normal_actions.create_file(path)
+ self._do(['add', self._in_dir(path)])
+
+ def create_folder(self, path):
+ self.normal_actions.create_folder(path)
+
+ def move(self, path, new_location):
+ self._do(['mv', self._in_dir(path), self._in_dir(new_location)])
+
+ def remove(self, path):
+ self._do(['rm', self._in_dir(path)])
+
+ def write(self, path, data):
+ # XXX: should we use ``git add``?
+ self.normal_actions.write(path, data)
+
+ def _do(self, args):
+ _execute(['git'] + args, cwd=self.root)
+
+ def _in_dir(self, path):
+ if path.startswith(self.root):
+ return path[len(self.root) + 1:]
+ return self.root
+
+
+class DarcsCommands(object):
+
+ def __init__(self, root):
+ self.root = root
+ self.normal_actions = FileSystemCommands()
+
+ def create_file(self, path):
+ self.normal_actions.create_file(path)
+ self._do(['add', path])
+
+ def create_folder(self, path):
+ self.normal_actions.create_folder(path)
+ self._do(['add', path])
+
+ def move(self, path, new_location):
+ self._do(['mv', path, new_location])
+
+ def remove(self, path):
+ self.normal_actions.remove(path)
+
+ def write(self, path, data):
+ self.normal_actions.write(path, data)
+
+ def _do(self, args):
+ _execute(['darcs'] + args, cwd=self.root)
+
+
+def _execute(args, cwd=None):
+ process = subprocess.Popen(args, cwd=cwd, stdout=subprocess.PIPE)
+ process.wait()
+ return process.returncode
+
+
+def unicode_to_file_data(contents, encoding=None):
+ if not isinstance(contents, unicode):
+ return contents
+ if encoding is None:
+ encoding = read_str_coding(contents)
+ if encoding is not None:
+ return contents.encode(encoding)
+ try:
+ return contents.encode()
+ except UnicodeEncodeError:
+ return contents.encode('utf-8')
+
+def file_data_to_unicode(data, encoding=None):
+ result = _decode_data(data, encoding)
+ if '\r' in result:
+ result = result.replace('\r\n', '\n').replace('\r', '\n')
+ return result
+
+def _decode_data(data, encoding):
+ if isinstance(data, unicode):
+ return data
+ if encoding is None:
+ encoding = read_str_coding(data)
+ if encoding is None:
+ # there is no encoding tip, we need to guess.
+ # PEP263 says that "encoding not explicitly defined" means it is ascii,
+ # but we will use utf8 instead since utf8 fully covers ascii and btw is
+ # the only non-latin sane encoding.
+ encoding = 'utf-8'
+ try:
+ return data.decode(encoding)
+ except (UnicodeError, LookupError):
+ # fallback to latin1: it should never fail
+ return data.decode('latin1')
+
+
+def read_file_coding(path):
+ file = open(path, 'b')
+ count = 0
+ result = []
+ buffsize = 10
+ while True:
+ current = file.read(10)
+ if not current:
+ break
+ count += current.count('\n')
+ result.append(current)
+ file.close()
+ return _find_coding(''.join(result))
+
+
+def read_str_coding(source):
+ try:
+ first = source.index('\n') + 1
+ second = source.index('\n', first) + 1
+ except ValueError:
+ second = len(source)
+ return _find_coding(source[:second])
+
+
+def _find_coding(text):
+ coding = 'coding'
+ try:
+ start = text.index(coding) + len(coding)
+ if text[start] not in '=:':
+ return
+ start += 1
+ while start < len(text) and text[start].isspace():
+ start += 1
+ end = start
+ while end < len(text):
+ c = text[end]
+ if not c.isalnum() and c not in '-_':
+ break
+ end += 1
+ return text[start:end]
+ except ValueError:
+ pass
diff --git a/.vim/bundle/python-mode/pylibs/rope/base/history.py b/.vim/bundle/python-mode/pylibs/rope/base/history.py
@@ -0,0 +1,235 @@
+from rope.base import exceptions, change, taskhandle
+
+
+class History(object):
+ """A class that holds project history"""
+
+ def __init__(self, project, maxundos=None):
+ self.project = project
+ self._undo_list = []
+ self._redo_list = []
+ self._maxundos = maxundos
+ self._load_history()
+ self.project.data_files.add_write_hook(self.write)
+ self.current_change = None
+
+ def _load_history(self):
+ if self.save:
+ result = self.project.data_files.read_data(
+ 'history', compress=self.compress, import_=True)
+ if result is not None:
+ to_change = change.DataToChange(self.project)
+ for data in result[0]:
+ self._undo_list.append(to_change(data))
+ for data in result[1]:
+ self._redo_list.append(to_change(data))
+
+ def do(self, changes, task_handle=taskhandle.NullTaskHandle()):
+ """Perform the change and add it to the `self.undo_list`
+
+ Note that uninteresting changes (changes to ignored files)
+ will not be appended to `self.undo_list`.
+
+ """
+ try:
+ self.current_change = changes
+ changes.do(change.create_job_set(task_handle, changes))
+ finally:
+ self.current_change = None
+ if self._is_change_interesting(changes):
+ self.undo_list.append(changes)
+ self._remove_extra_items()
+ del self.redo_list[:]
+
+ def _remove_extra_items(self):
+ if len(self.undo_list) > self.max_undos:
+ del self.undo_list[0:len(self.undo_list) - self.max_undos]
+
+ def _is_change_interesting(self, changes):
+ for resource in changes.get_changed_resources():
+ if not self.project.is_ignored(resource):
+ return True
+ return False
+
+ def undo(self, change=None, drop=False,
+ task_handle=taskhandle.NullTaskHandle()):
+ """Redo done changes from the history
+
+ When `change` is `None`, the last done change will be undone.
+ If change is not `None` it should be an item from
+ `self.undo_list`; this change and all changes that depend on
+ it will be undone. In both cases the list of undone changes
+ will be returned.
+
+ If `drop` is `True`, the undone change will not be appended to
+ the redo list.
+
+ """
+ if not self._undo_list:
+ raise exceptions.HistoryError('Undo list is empty')
+ if change is None:
+ change = self.undo_list[-1]
+ dependencies = self._find_dependencies(self.undo_list, change)
+ self._move_front(self.undo_list, dependencies)
+ self._perform_undos(len(dependencies), task_handle)
+ result = self.redo_list[-len(dependencies):]
+ if drop:
+ del self.redo_list[-len(dependencies):]
+ return result
+
+ def redo(self, change=None, task_handle=taskhandle.NullTaskHandle()):
+ """Redo undone changes from the history
+
+ When `change` is `None`, the last undone change will be
+ redone. If change is not `None` it should be an item from
+ `self.redo_list`; this change and all changes that depend on
+ it will be redone. In both cases the list of redone changes
+ will be returned.
+
+ """
+ if not self.redo_list:
+ raise exceptions.HistoryError('Redo list is empty')
+ if change is None:
+ change = self.redo_list[-1]
+ dependencies = self._find_dependencies(self.redo_list, change)
+ self._move_front(self.redo_list, dependencies)
+ self._perform_redos(len(dependencies), task_handle)
+ return self.undo_list[-len(dependencies):]
+
+ def _move_front(self, change_list, changes):
+ for change in changes:
+ change_list.remove(change)
+ change_list.append(change)
+
+ def _find_dependencies(self, change_list, change):
+ index = change_list.index(change)
+ return _FindChangeDependencies(change_list[index:])()
+
+ def _perform_undos(self, count, task_handle):
+ for i in range(count):
+ self.current_change = self.undo_list[-1]
+ try:
+ job_set = change.create_job_set(task_handle,
+ self.current_change)
+ self.current_change.undo(job_set)
+ finally:
+ self.current_change = None
+ self.redo_list.append(self.undo_list.pop())
+
+ def _perform_redos(self, count, task_handle):
+ for i in range(count):
+ self.current_change = self.redo_list[-1]
+ try:
+ job_set = change.create_job_set(task_handle,
+ self.current_change)
+ self.current_change.do(job_set)
+ finally:
+ self.current_change = None
+ self.undo_list.append(self.redo_list.pop())
+
+ def contents_before_current_change(self, file):
+ if self.current_change is None:
+ return None
+ result = self._search_for_change_contents([self.current_change], file)
+ if result is not None:
+ return result
+ if file.exists() and not file.is_folder():
+ return file.read()
+ else:
+ return None
+
+ def _search_for_change_contents(self, change_list, file):
+ for change_ in reversed(change_list):
+ if isinstance(change_, change.ChangeSet):
+ result = self._search_for_change_contents(change_.changes,
+ file)
+ if result is not None:
+ return result
+ if isinstance(change_, change.ChangeContents) and \
+ change_.resource == file:
+ return change_.old_contents
+
+ def write(self):
+ if self.save:
+ data = []
+ to_data = change.ChangeToData()
+ self._remove_extra_items()
+ data.append([to_data(change_) for change_ in self.undo_list])
+ data.append([to_data(change_) for change_ in self.redo_list])
+ self.project.data_files.write_data('history', data,
+ compress=self.compress)
+
+ def get_file_undo_list(self, resource):
+ result = []
+ for change in self.undo_list:
+ if resource in change.get_changed_resources():
+ result.append(change)
+ return result
+
+ def __str__(self):
+ return 'History holds %s changes in memory' % \
+ (len(self.undo_list) + len(self.redo_list))
+
+ undo_list = property(lambda self: self._undo_list)
+ redo_list = property(lambda self: self._redo_list)
+
+ @property
+ def tobe_undone(self):
+ """The last done change if available, `None` otherwise"""
+ if self.undo_list:
+ return self.undo_list[-1]
+
+ @property
+ def tobe_redone(self):
+ """The last undone change if available, `None` otherwise"""
+ if self.redo_list:
+ return self.redo_list[-1]
+
+ @property
+ def max_undos(self):
+ if self._maxundos is None:
+ return self.project.prefs.get('max_history_items', 100)
+ else:
+ return self._maxundos
+
+ @property
+ def save(self):
+ return self.project.prefs.get('save_history', False)
+
+ @property
+ def compress(self):
+ return self.project.prefs.get('compress_history', False)
+
+ def clear(self):
+ """Forget all undo and redo information"""
+ del self.undo_list[:]
+ del self.redo_list[:]
+
+
+class _FindChangeDependencies(object):
+
+ def __init__(self, change_list):
+ self.change = change_list[0]
+ self.change_list = change_list
+ self.changed_resources = set(self.change.get_changed_resources())
+
+ def __call__(self):
+ result = [self.change]
+ for change in self.change_list[1:]:
+ if self._depends_on(change, result):
+ result.append(change)
+ self.changed_resources.update(change.get_changed_resources())
+ return result
+
+ def _depends_on(self, changes, result):
+ for resource in changes.get_changed_resources():
+ if resource is None:
+ continue
+ if resource in self.changed_resources:
+ return True
+ for changed in self.changed_resources:
+ if resource.is_folder() and resource.contains(changed):
+ return True
+ if changed.is_folder() and changed.contains(resource):
+ return True
+ return False
diff --git a/.vim/bundle/python-mode/pylibs/rope/base/libutils.py b/.vim/bundle/python-mode/pylibs/rope/base/libutils.py
@@ -0,0 +1,65 @@
+"""A few useful functions for using rope as a library"""
+import os.path
+
+import rope.base.project
+import rope.base.pycore
+from rope.base import taskhandle
+
+
+def path_to_resource(project, path, type=None):
+ """Get the resource at path
+
+ You only need to specify `type` if `path` does not exist. It can
+ be either 'file' or 'folder'. If the type is `None` it is assumed
+ that the resource already exists.
+
+ Note that this function uses `Project.get_resource()`,
+ `Project.get_file()`, and `Project.get_folder()` methods.
+
+ """
+ project_path = relative(project.address, path)
+ if project_path is None:
+ project_path = rope.base.project._realpath(path)
+ project = rope.base.project.get_no_project()
+ if type is None:
+ return project.get_resource(project_path)
+ if type == 'file':
+ return project.get_file(project_path)
+ if type == 'folder':
+ return project.get_folder(project_path)
+ return None
+
+def relative(root, path):
+ root = rope.base.project._realpath(root).replace(os.path.sep, '/')
+ path = rope.base.project._realpath(path).replace(os.path.sep, '/')
+ if path == root:
+ return ''
+ if path.startswith(root + '/'):
+ return path[len(root) + 1:]
+
+def report_change(project, path, old_content):
+ """Report that the contents of file at `path` was changed
+
+ The new contents of file is retrieved by reading the file.
+
+ """
+ resource = path_to_resource(project, path)
+ if resource is None:
+ return
+ for observer in list(project.observers):
+ observer.resource_changed(resource)
+ if project.pycore.automatic_soa:
+ rope.base.pycore.perform_soa_on_changed_scopes(project, resource,
+ old_content)
+
+def analyze_modules(project, task_handle=taskhandle.NullTaskHandle()):
+ """Perform static object analysis on all python files in the project
+
+ Note that this might be really time consuming.
+ """
+ resources = project.pycore.get_python_files()
+ job_set = task_handle.create_jobset('Analyzing Modules', len(resources))
+ for resource in resources:
+ job_set.started_job(resource.path)
+ project.pycore.analyze_module(resource)
+ job_set.finished_job()
diff --git a/.vim/bundle/python-mode/pylibs/rope/base/oi/__init__.py b/.vim/bundle/python-mode/pylibs/rope/base/oi/__init__.py
@@ -0,0 +1,38 @@
+"""Rope object analysis and inference package
+
+Rope makes some simplifying assumptions about a python program. It
+assumes that a program only performs assignments and function calls.
+Tracking assignments is simple and `PyName` objects handle that. The
+main problem is function calls. Rope uses these two approaches for
+obtaining call information:
+
+* Static object analysis: `rope.base.pycore.PyCore.analyze_module()`
+
+ It can analyze modules to obtain information about functions. This
+ is done by analyzing function calls in a module or scope. Currently
+ SOA analyzes the scopes that are changed while saving or when the
+ user asks to analyze a module. That is mainly because static
+ analysis is time-consuming.
+
+* Dynamic object analysis: `rope.base.pycore.PyCore.run_module()`
+
+ When you run a module or your testsuite, when DOA is enabled, it
+ collects information about parameters passed to and objects returned
+ from functions. The main problem with this approach is that it is
+ quite slow; Not when looking up the information but when collecting
+ them.
+
+An instance of `rope.base.oi.objectinfo.ObjectInfoManager` can be used
+for accessing these information. It saves the data in a
+`rope.base.oi.objectdb.ObjectDB` internally.
+
+Now if our objectdb does not know anything about a function and we
+need the value returned by it, static object inference, SOI, comes
+into play. It analyzes function body and tries to infer the object
+that is returned from it (we usually need the returned value for the
+given parameter objects).
+
+Rope might collect and store information for other `PyName`\s, too.
+For instance rope stores the object builtin containers hold.
+
+"""
diff --git a/.vim/bundle/python-mode/pylibs/rope/base/oi/doa.py b/.vim/bundle/python-mode/pylibs/rope/base/oi/doa.py
@@ -0,0 +1,162 @@
+import cPickle as pickle
+import marshal
+import os
+import socket
+import subprocess
+import sys
+import tempfile
+import threading
+
+
+class PythonFileRunner(object):
+ """A class for running python project files"""
+
+ def __init__(self, pycore, file_, args=None, stdin=None,
+ stdout=None, analyze_data=None):
+ self.pycore = pycore
+ self.file = file_
+ self.analyze_data = analyze_data
+ self.observers = []
+ self.args = args
+ self.stdin = stdin
+ self.stdout = stdout
+
+ def run(self):
+ """Execute the process"""
+ env = dict(os.environ)
+ file_path = self.file.real_path
+ path_folders = self.pycore.get_source_folders() + \
+ self.pycore.get_python_path_folders()
+ env['PYTHONPATH'] = os.pathsep.join(folder.real_path
+ for folder in path_folders)
+ runmod_path = self.pycore.find_module('rope.base.oi.runmod').real_path
+ self.receiver = None
+ self._init_data_receiving()
+ send_info = '-'
+ if self.receiver:
+ send_info = self.receiver.get_send_info()
+ args = [sys.executable, runmod_path, send_info,
+ self.pycore.project.address, self.file.real_path]
+ if self.analyze_data is None:
+ del args[1:4]
+ if self.args is not None:
+ args.extend(self.args)
+ self.process = subprocess.Popen(
+ executable=sys.executable, args=args, env=env,
+ cwd=os.path.split(file_path)[0], stdin=self.stdin,
+ stdout=self.stdout, stderr=self.stdout, close_fds=os.name != 'nt')
+
+ def _init_data_receiving(self):
+ if self.analyze_data is None:
+ return
+ # Disabling FIFO data transfer due to blocking when running
+ # unittests in the GUI.
+ # XXX: Handle FIFO data transfer for `rope.ui.testview`
+ if True or os.name == 'nt':
+ self.receiver = _SocketReceiver()
+ else:
+ self.receiver = _FIFOReceiver()
+ self.receiving_thread = threading.Thread(target=self._receive_information)
+ self.receiving_thread.setDaemon(True)
+ self.receiving_thread.start()
+
+ def _receive_information(self):
+ #temp = open('/dev/shm/info', 'w')
+ for data in self.receiver.receive_data():
+ self.analyze_data(data)
+ #temp.write(str(data) + '\n')
+ #temp.close()
+ for observer in self.observers:
+ observer()
+
+ def wait_process(self):
+ """Wait for the process to finish"""
+ self.process.wait()
+ if self.analyze_data:
+ self.receiving_thread.join()
+
+ def kill_process(self):
+ """Stop the process"""
+ if self.process.poll() is not None:
+ return
+ try:
+ if hasattr(self.process, 'terminate'):
+ self.process.terminate()
+ elif os.name != 'nt':
+ os.kill(self.process.pid, 9)
+ else:
+ import ctypes
+ handle = int(self.process._handle)
+ ctypes.windll.kernel32.TerminateProcess(handle, -1)
+ except OSError:
+ pass
+
+ def add_finishing_observer(self, observer):
+ """Notify this observer when execution finishes"""
+ self.observers.append(observer)
+
+
+class _MessageReceiver(object):
+
+ def receive_data(self):
+ pass
+
+ def get_send_info(self):
+ pass
+
+
+class _SocketReceiver(_MessageReceiver):
+
+ def __init__(self):
+ self.server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ self.data_port = 3037
+ while self.data_port < 4000:
+ try:
+ self.server_socket.bind(('', self.data_port))
+ break
+ except socket.error, e:
+ self.data_port += 1
+ self.server_socket.listen(1)
+
+ def get_send_info(self):
+ return str(self.data_port)
+
+ def receive_data(self):
+ conn, addr = self.server_socket.accept()
+ self.server_socket.close()
+ my_file = conn.makefile('r')
+ while True:
+ try:
+ yield pickle.load(my_file)
+ except EOFError:
+ break
+ my_file.close()
+ conn.close()
+
+
+class _FIFOReceiver(_MessageReceiver):
+
+ def __init__(self):
+ # XXX: this is insecure and might cause race conditions
+ self.file_name = self._get_file_name()
+ os.mkfifo(self.file_name)
+
+ def _get_file_name(self):
+ prefix = tempfile.gettempdir() + '/__rope_'
+ i = 0
+ while os.path.exists(prefix + str(i).rjust(4, '0')):
+ i += 1
+ return prefix + str(i).rjust(4, '0')
+
+ def get_send_info(self):
+ return self.file_name
+
+ def receive_data(self):
+ my_file = open(self.file_name, 'rb')
+ while True:
+ try:
+ yield marshal.load(my_file)
+ except EOFError:
+ break
+ my_file.close()
+ os.remove(self.file_name)
diff --git a/.vim/bundle/python-mode/pylibs/rope/base/oi/memorydb.py b/.vim/bundle/python-mode/pylibs/rope/base/oi/memorydb.py
@@ -0,0 +1,106 @@
+from rope.base.oi import objectdb
+
+
+class MemoryDB(objectdb.FileDict):
+
+ def __init__(self, project, persist=None):
+ self.project = project
+ self._persist = persist
+ self.files = self
+ self._load_files()
+ self.project.data_files.add_write_hook(self.write)
+
+ def _load_files(self):
+ self._files = {}
+ if self.persist:
+ result = self.project.data_files.read_data(
+ 'objectdb', compress=self.compress, import_=True)
+ if result is not None:
+ self._files = result
+
+ def keys(self):
+ return self._files.keys()
+
+ def __contains__(self, key):
+ return key in self._files
+
+ def __getitem__(self, key):
+ return FileInfo(self._files[key])
+
+ def create(self, path):
+ self._files[path] = {}
+
+ def rename(self, file, newfile):
+ if file not in self._files:
+ return
+ self._files[newfile] = self._files[file]
+ del self[file]
+
+ def __delitem__(self, file):
+ del self._files[file]
+
+ def write(self):
+ if self.persist:
+ self.project.data_files.write_data('objectdb', self._files,
+ self.compress)
+
+ @property
+ def compress(self):
+ return self.project.prefs.get('compress_objectdb', False)
+
+ @property
+ def persist(self):
+ if self._persist is not None:
+ return self._persist
+ else:
+ return self.project.prefs.get('save_objectdb', False)
+
+
+class FileInfo(objectdb.FileInfo):
+
+ def __init__(self, scopes):
+ self.scopes = scopes
+
+ def create_scope(self, key):
+ self.scopes[key] = ScopeInfo()
+
+ def keys(self):
+ return self.scopes.keys()
+
+ def __contains__(self, key):
+ return key in self.scopes
+
+ def __getitem__(self, key):
+ return self.scopes[key]
+
+ def __delitem__(self, key):
+ del self.scopes[key]
+
+
+class ScopeInfo(objectdb.ScopeInfo):
+
+ def __init__(self):
+ self.call_info = {}
+ self.per_name = {}
+
+ def get_per_name(self, name):
+ return self.per_name.get(name, None)
+
+ def save_per_name(self, name, value):
+ self.per_name[name] = value
+
+ def get_returned(self, parameters):
+ return self.call_info.get(parameters, None)
+
+ def get_call_infos(self):
+ for args, returned in self.call_info.items():
+ yield objectdb.CallInfo(args, returned)
+
+ def add_call(self, parameters, returned):
+ self.call_info[parameters] = returned
+
+ def __getstate__(self):
+ return (self.call_info, self.per_name)
+
+ def __setstate__(self, data):
+ self.call_info, self.per_name = data
diff --git a/.vim/bundle/python-mode/pylibs/rope/base/oi/objectdb.py b/.vim/bundle/python-mode/pylibs/rope/base/oi/objectdb.py
@@ -0,0 +1,175 @@
+import UserDict
+
+
+class ObjectDB(object):
+
+ def __init__(self, db, validation):
+ self.db = db
+ self.validation = validation
+ self.observers = []
+ self.files = db.files
+
+ def validate_files(self):
+ for file in list(self.files):
+ if not self.validation.is_file_valid(file):
+ del self.files[file]
+ self._file_removed(file)
+
+ def validate_file(self, file):
+ if file not in self.files:
+ return
+ for key in list(self.files[file]):
+ if not self.validation.is_scope_valid(file, key):
+ del self.files[file][key]
+
+ def file_moved(self, file, newfile):
+ if file not in self.files:
+ return
+ self.files.rename(file, newfile)
+ self._file_removed(file)
+ self._file_added(newfile)
+
+ def get_files(self):
+ return self.files.keys()
+
+ def get_returned(self, path, key, args):
+ scope_info = self._get_scope_info(path, key, readonly=True)
+ result = scope_info.get_returned(args)
+ if self.validation.is_value_valid(result):
+ return result
+
+ def get_pername(self, path, key, name):
+ scope_info = self._get_scope_info(path, key, readonly=True)
+ result = scope_info.get_per_name(name)
+ if self.validation.is_value_valid(result):
+ return result
+
+ def get_callinfos(self, path, key):
+ scope_info = self._get_scope_info(path, key, readonly=True)
+ return scope_info.get_call_infos()
+
+ def add_callinfo(self, path, key, args, returned):
+ scope_info = self._get_scope_info(path, key, readonly=False)
+ old_returned = scope_info.get_returned(args)
+ if self.validation.is_more_valid(returned, old_returned):
+ scope_info.add_call(args, returned)
+
+ def add_pername(self, path, key, name, value):
+ scope_info = self._get_scope_info(path, key, readonly=False)
+ old_value = scope_info.get_per_name(name)
+ if self.validation.is_more_valid(value, old_value):
+ scope_info.save_per_name(name, value)
+
+ def add_file_list_observer(self, observer):
+ self.observers.append(observer)
+
+ def write(self):
+ self.db.write()
+
+ def _get_scope_info(self, path, key, readonly=True):
+ if path not in self.files:
+ if readonly:
+ return _NullScopeInfo()
+ self.files.create(path)
+ self._file_added(path)
+ if key not in self.files[path]:
+ if readonly:
+ return _NullScopeInfo()
+ self.files[path].create_scope(key)
+ result = self.files[path][key]
+ if isinstance(result, dict):
+ print self.files, self.files[path], self.files[path][key]
+ return result
+
+ def _file_removed(self, path):
+ for observer in self.observers:
+ observer.removed(path)
+
+ def _file_added(self, path):
+ for observer in self.observers:
+ observer.added(path)
+
+ def __str__(self):
+ scope_count = 0
+ for file_dict in self.files.values():
+ scope_count += len(file_dict)
+ return 'ObjectDB holds %s file and %s scope infos' % \
+ (len(self.files), scope_count)
+
+
+class _NullScopeInfo(object):
+
+ def __init__(self, error_on_write=True):
+ self.error_on_write = error_on_write
+
+ def get_per_name(self, name):
+ pass
+
+ def save_per_name(self, name, value):
+ if self.error_on_write:
+ raise NotImplementedError()
+
+ def get_returned(self, parameters):
+ pass
+
+ def get_call_infos(self):
+ return []
+
+ def add_call(self, parameters, returned):
+ if self.error_on_write:
+ raise NotImplementedError()
+
+
+class FileInfo(UserDict.DictMixin):
+
+ def create_scope(self, key):
+ pass
+
+
+class FileDict(UserDict.DictMixin):
+
+ def create(self, key):
+ pass
+
+ def rename(self, key, new_key):
+ pass
+
+
+class ScopeInfo(object):
+
+ def get_per_name(self, name):
+ pass
+
+ def save_per_name(self, name, value):
+ pass
+
+ def get_returned(self, parameters):
+ pass
+
+ def get_call_infos(self):
+ pass
+
+ def add_call(self, parameters, returned):
+ pass
+
+
+class CallInfo(object):
+
+ def __init__(self, args, returned):
+ self.args = args
+ self.returned = returned
+
+ def get_parameters(self):
+ return self.args
+
+ def get_returned(self):
+ return self.returned
+
+
+class FileListObserver(object):
+
+ def added(self, path):
+ pass
+
+ def removed(self, path):
+ pass
diff --git a/.vim/bundle/python-mode/pylibs/rope/base/oi/objectinfo.py b/.vim/bundle/python-mode/pylibs/rope/base/oi/objectinfo.py
@@ -0,0 +1,232 @@
+import warnings
+
+from rope.base import exceptions, resourceobserver
+from rope.base.oi import objectdb, memorydb, transform
+
+
+class ObjectInfoManager(object):
+ """Stores object information
+
+ It uses an instance of `objectdb.ObjectDB` for storing
+ information.
+
+ """
+
+ def __init__(self, project):
+ self.project = project
+ self.to_textual = transform.PyObjectToTextual(project)
+ self.to_pyobject = transform.TextualToPyObject(project)
+ self.doi_to_pyobject = transform.DOITextualToPyObject(project)
+ self._init_objectdb()
+ if project.prefs.get('validate_objectdb', False):
+ self._init_validation()
+
+ def _init_objectdb(self):
+ dbtype = self.project.get_prefs().get('objectdb_type', None)
+ persist = None
+ if dbtype is not None:
+ warnings.warn(
+ '"objectdb_type" project config is deprecated;\n'
+ 'Use "save_objectdb" instead in your project '
+ 'config file.\n(".ropeproject/config.py" by default)\n',
+ DeprecationWarning)
+ if dbtype != 'memory' and self.project.ropefolder is not None:
+ persist = True
+ self.validation = TextualValidation(self.to_pyobject)
+ db = memorydb.MemoryDB(self.project, persist=persist)
+ self.objectdb = objectdb.ObjectDB(db, self.validation)
+
+ def _init_validation(self):
+ self.objectdb.validate_files()
+ observer = resourceobserver.ResourceObserver(
+ changed=self._resource_changed, moved=self._resource_moved,
+ removed=self._resource_moved)
+ files = []
+ for path in self.objectdb.get_files():
+ resource = self.to_pyobject.path_to_resource(path)
+ if resource is not None and resource.project == self.project:
+ files.append(resource)
+ self.observer = resourceobserver.FilteredResourceObserver(observer,
+ files)
+ self.objectdb.add_file_list_observer(_FileListObserver(self))
+ self.project.add_observer(self.observer)
+
+ def _resource_changed(self, resource):
+ try:
+ self.objectdb.validate_file(
+ self.to_textual.resource_to_path(resource))
+ except exceptions.ModuleSyntaxError:
+ pass
+
+ def _resource_moved(self, resource, new_resource=None):
+ self.observer.remove_resource(resource)
+ if new_resource is not None:
+ old = self.to_textual.resource_to_path(resource)
+ new = self.to_textual.resource_to_path(new_resource)
+ self.objectdb.file_moved(old, new)
+ self.observer.add_resource(new_resource)
+
+ def get_returned(self, pyobject, args):
+ result = self.get_exact_returned(pyobject, args)
+ if result is not None:
+ return result
+ path, key = self._get_scope(pyobject)
+ if path is None:
+ return None
+ for call_info in self.objectdb.get_callinfos(path, key):
+ returned = call_info.get_returned()
+ if returned and returned[0] not in ('unknown', 'none'):
+ result = returned
+ break
+ if result is None:
+ result = returned
+ if result is not None:
+ return self.to_pyobject(result)
+
+ def get_exact_returned(self, pyobject, args):
+ path, key = self._get_scope(pyobject)
+ if path is not None:
+ returned = self.objectdb.get_returned(
+ path, key, self._args_to_textual(pyobject, args))
+ if returned is not None:
+ return self.to_pyobject(returned)
+
+ def _args_to_textual(self, pyfunction, args):
+ parameters = list(pyfunction.get_param_names(special_args=False))
+ arguments = args.get_arguments(parameters)[:len(parameters)]
+ textual_args = tuple([self.to_textual(arg)
+ for arg in arguments])
+ return textual_args
+
+ def get_parameter_objects(self, pyobject):
+ path, key = self._get_scope(pyobject)
+ if path is None:
+ return None
+ arg_count = len(pyobject.get_param_names(special_args=False))
+ unknowns = arg_count
+ parameters = [None] * arg_count
+ for call_info in self.objectdb.get_callinfos(path, key):
+ args = call_info.get_parameters()
+ for index, arg in enumerate(args[:arg_count]):
+ old = parameters[index]
+ if self.validation.is_more_valid(arg, old):
+ parameters[index] = arg
+ if self.validation.is_value_valid(arg):
+ unknowns -= 1
+ if unknowns == 0:
+ break
+ if unknowns < arg_count:
+ return [self.to_pyobject(parameter)
+ for parameter in parameters]
+
+ def get_passed_objects(self, pyfunction, parameter_index):
+ path, key = self._get_scope(pyfunction)
+ if path is None:
+ return []
+ result = []
+ for call_info in self.objectdb.get_callinfos(path, key):
+ args = call_info.get_parameters()
+ if len(args) > parameter_index:
+ parameter = self.to_pyobject(args[parameter_index])
+ if parameter is not None:
+ result.append(parameter)
+ return result
+
+ def doa_data_received(self, data):
+ def doi_to_normal(textual):
+ pyobject = self.doi_to_pyobject(textual)
+ return self.to_textual(pyobject)
+ function = doi_to_normal(data[0])
+ args = tuple([doi_to_normal(textual) for textual in data[1]])
+ returned = doi_to_normal(data[2])
+ if function[0] == 'defined' and len(function) == 3:
+ self._save_data(function, args, returned)
+
+ def function_called(self, pyfunction, params, returned=None):
+ function_text = self.to_textual(pyfunction)
+ params_text = tuple([self.to_textual(param)
+ for param in params])
+ returned_text = ('unknown',)
+ if returned is not None:
+ returned_text = self.to_textual(returned)
+ self._save_data(function_text, params_text, returned_text)
+
+ def save_per_name(self, scope, name, data):
+ path, key = self._get_scope(scope.pyobject)
+ if path is not None:
+ self.objectdb.add_pername(path, key, name, self.to_textual(data))
+
+ def get_per_name(self, scope, name):
+ path, key = self._get_scope(scope.pyobject)
+ if path is not None:
+ result = self.objectdb.get_pername(path, key, name)
+ if result is not None:
+ return self.to_pyobject(result)
+
+ def _save_data(self, function, args, returned=('unknown',)):
+ self.objectdb.add_callinfo(function[1], function[2], args, returned)
+
+ def _get_scope(self, pyobject):
+ resource = pyobject.get_module().get_resource()
+ if resource is None:
+ return None, None
+ textual = self.to_textual(pyobject)
+ if textual[0] == 'defined':
+ path = textual[1]
+ if len(textual) == 3:
+ key = textual[2]
+ else:
+ key = ''
+ return path, key
+ return None, None
+
+ def sync(self):
+ self.objectdb.sync()
+
+ def __str__(self):
+ return str(self.objectdb)
+
+
+class TextualValidation(object):
+
+ def __init__(self, to_pyobject):
+ self.to_pyobject = to_pyobject
+
+ def is_value_valid(self, value):
+ # ???: Should none and unknown be considered valid?
+ if value is None or value[0] in ('none', 'unknown'):
+ return False
+ return self.to_pyobject(value) is not None
+
+ def is_more_valid(self, new, old):
+ if old is None:
+ return True
+ return new[0] not in ('unknown', 'none')
+
+ def is_file_valid(self, path):
+ return self.to_pyobject.path_to_resource(path) is not None
+
+ def is_scope_valid(self, path, key):
+ if key == '':
+ textual = ('defined', path)
+ else:
+ textual = ('defined', path, key)
+ return self.to_pyobject(textual) is not None
+
+
+class _FileListObserver(object):
+
+ def __init__(self, object_info):
+ self.object_info = object_info
+ self.observer = self.object_info.observer
+ self.to_pyobject = self.object_info.to_pyobject
+
+ def removed(self, path):
+ resource = self.to_pyobject.path_to_resource(path)
+ if resource is not None:
+ self.observer.remove_resource(resource)
+
+ def added(self, path):
+ resource = self.to_pyobject.path_to_resource(path)
+ if resource is not None:
+ self.observer.add_resource(resource)
diff --git a/.vim/bundle/python-mode/pylibs/rope/base/oi/runmod.py b/.vim/bundle/python-mode/pylibs/rope/base/oi/runmod.py
@@ -0,0 +1,211 @@
+
+def __rope_start_everything():
+ import os
+ import sys
+ import socket
+ import cPickle as pickle
+ import marshal
+ import inspect
+ import types
+ import threading
+
+ class _MessageSender(object):
+
+ def send_data(self, data):
+ pass
+
+ class _SocketSender(_MessageSender):
+
+ def __init__(self, port):
+ s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ s.connect(('127.0.0.1', port))
+ self.my_file = s.makefile('w')
+
+ def send_data(self, data):
+ if not self.my_file.closed:
+ pickle.dump(data, self.my_file)
+
+ def close(self):
+ self.my_file.close()
+
+ class _FileSender(_MessageSender):
+
+ def __init__(self, file_name):
+ self.my_file = open(file_name, 'wb')
+
+ def send_data(self, data):
+ if not self.my_file.closed:
+ marshal.dump(data, self.my_file)
+
+ def close(self):
+ self.my_file.close()
+
+
+ def _cached(func):
+ cache = {}
+ def newfunc(self, arg):
+ if arg in cache:
+ return cache[arg]
+ result = func(self, arg)
+ cache[arg] = result
+ return result
+ return newfunc
+
+ class _FunctionCallDataSender(object):
+
+ def __init__(self, send_info, project_root):
+ self.project_root = project_root
+ if send_info.isdigit():
+ self.sender = _SocketSender(int(send_info))
+ else:
+ self.sender = _FileSender(send_info)
+
+ def global_trace(frame, event, arg):
+ # HACK: Ignoring out->in calls
+ # This might lose some information
+ if self._is_an_interesting_call(frame):
+ return self.on_function_call
+ sys.settrace(global_trace)
+ threading.settrace(global_trace)
+
+ def on_function_call(self, frame, event, arg):
+ if event != 'return':
+ return
+ args = []
+ returned = ('unknown',)
+ code = frame.f_code
+ for argname in code.co_varnames[:code.co_argcount]:
+ try:
+ args.append(self._object_to_persisted_form(frame.f_locals[argname]))
+ except (TypeError, AttributeError):
+ args.append(('unknown',))
+ try:
+ returned = self._object_to_persisted_form(arg)
+ except (TypeError, AttributeError):
+ pass
+ try:
+ data = (self._object_to_persisted_form(frame.f_code),
+ tuple(args), returned)
+ self.sender.send_data(data)
+ except (TypeError):
+ pass
+ return self.on_function_call
+
+ def _is_an_interesting_call(self, frame):
+ #if frame.f_code.co_name in ['?', '<module>']:
+ # return False
+ #return not frame.f_back or not self._is_code_inside_project(frame.f_back.f_code)
+
+ if not self._is_code_inside_project(frame.f_code) and \
+ (not frame.f_back or not self._is_code_inside_project(frame.f_back.f_code)):
+ return False
+ return True
+
+ def _is_code_inside_project(self, code):
+ source = self._path(code.co_filename)
+ return source is not None and os.path.exists(source) and \
+ _realpath(source).startswith(self.project_root)
+
+ @_cached
+ def _get_persisted_code(self, object_):
+ source = self._path(object_.co_filename)
+ if not os.path.exists(source):
+ raise TypeError('no source')
+ return ('defined', _realpath(source), str(object_.co_firstlineno))
+
+ @_cached
+ def _get_persisted_class(self, object_):
+ try:
+ return ('defined', _realpath(inspect.getsourcefile(object_)),
+ object_.__name__)
+ except (TypeError, AttributeError):
+ return ('unknown',)
+
+ def _get_persisted_builtin(self, object_):
+ if isinstance(object_, (str, unicode)):
+ return ('builtin', 'str')
+ if isinstance(object_, list):
+ holding = None
+ if len(object_) > 0:
+ holding = object_[0]
+ return ('builtin', 'list', self._object_to_persisted_form(holding))
+ if isinstance(object_, dict):
+ keys = None
+ values = None
+ if len(object_) > 0:
+ keys = object_.keys()[0]
+ values = object_[keys]
+ return ('builtin', 'dict',
+ self._object_to_persisted_form(keys),
+ self._object_to_persisted_form(values))
+ if isinstance(object_, tuple):
+ objects = []
+ if len(object_) < 3:
+ for holding in object_:
+ objects.append(self._object_to_persisted_form(holding))
+ else:
+ objects.append(self._object_to_persisted_form(object_[0]))
+ return tuple(['builtin', 'tuple'] + objects)
+ if isinstance(object_, set):
+ holding = None
+ if len(object_) > 0:
+ for o in object_:
+ holding = o
+ break
+ return ('builtin', 'set', self._object_to_persisted_form(holding))
+ return ('unknown',)
+
+ def _object_to_persisted_form(self, object_):
+ if object_ is None:
+ return ('none',)
+ if isinstance(object_, types.CodeType):
+ return self._get_persisted_code(object_)
+ if isinstance(object_, types.FunctionType):
+ return self._get_persisted_code(object_.func_code)
+ if isinstance(object_, types.MethodType):
+ return self._get_persisted_code(object_.im_func.func_code)
+ if isinstance(object_, types.ModuleType):
+ return self._get_persisted_module(object_)
+ if isinstance(object_, (str, unicode, list, dict, tuple, set)):
+ return self._get_persisted_builtin(object_)
+ if isinstance(object_, (types.TypeType, types.ClassType)):
+ return self._get_persisted_class(object_)
+ return ('instance', self._get_persisted_class(type(object_)))
+
+ @_cached
+ def _get_persisted_module(self, object_):
+ path = self._path(object_.__file__)
+ if path and os.path.exists(path):
+ return ('defined', _realpath(path))
+ return ('unknown',)
+
+ def _path(self, path):
+ if path.endswith('.pyc'):
+ path = path[:-1]
+ if path.endswith('.py'):
+ return path
+
+ def close(self):
+ self.sender.close()
+ sys.settrace(None)
+
+ def _realpath(path):
+ return os.path.realpath(os.path.abspath(os.path.expanduser(path)))
+
+ send_info = sys.argv[1]
+ project_root = sys.argv[2]
+ file_to_run = sys.argv[3]
+ run_globals = globals()
+ run_globals.update({'__name__': '__main__',
+ '__builtins__': __builtins__,
+ '__file__': file_to_run})
+ if send_info != '-':
+ data_sender = _FunctionCallDataSender(send_info, project_root)
+ del sys.argv[1:4]
+ execfile(file_to_run, run_globals)
+ if send_info != '-':
+ data_sender.close()
+
+
+if __name__ == '__main__':
+ __rope_start_everything()
diff --git a/.vim/bundle/python-mode/pylibs/rope/base/oi/soa.py b/.vim/bundle/python-mode/pylibs/rope/base/oi/soa.py
@@ -0,0 +1,136 @@
+import rope.base.ast
+import rope.base.oi.soi
+import rope.base.pynames
+from rope.base import pyobjects, evaluate, astutils, arguments
+
+
+def analyze_module(pycore, pymodule, should_analyze,
+ search_subscopes, followed_calls):
+ """Analyze `pymodule` for static object inference
+
+ Analyzes scopes for collecting object information. The analysis
+ starts from inner scopes.
+
+ """
+ _analyze_node(pycore, pymodule, should_analyze,
+ search_subscopes, followed_calls)
+
+
+def _analyze_node(pycore, pydefined, should_analyze,
+ search_subscopes, followed_calls):
+ if search_subscopes(pydefined):
+ for scope in pydefined.get_scope().get_scopes():
+ _analyze_node(pycore, scope.pyobject, should_analyze,
+ search_subscopes, followed_calls)
+ if should_analyze(pydefined):
+ new_followed_calls = max(0, followed_calls - 1)
+ return_true = lambda pydefined: True
+ return_false = lambda pydefined: False
+ def _follow(pyfunction):
+ _analyze_node(pycore, pyfunction, return_true,
+ return_false, new_followed_calls)
+ if not followed_calls:
+ _follow = None
+ visitor = SOAVisitor(pycore, pydefined, _follow)
+ for child in rope.base.ast.get_child_nodes(pydefined.get_ast()):
+ rope.base.ast.walk(child, visitor)
+
+
+class SOAVisitor(object):
+
+ def __init__(self, pycore, pydefined, follow_callback=None):
+ self.pycore = pycore
+ self.pymodule = pydefined.get_module()
+ self.scope = pydefined.get_scope()
+ self.follow = follow_callback
+
+ def _FunctionDef(self, node):
+ pass
+
+ def _ClassDef(self, node):
+ pass
+
+ def _Call(self, node):
+ for child in rope.base.ast.get_child_nodes(node):
+ rope.base.ast.walk(child, self)
+ primary, pyname = evaluate.eval_node2(self.scope, node.func)
+ if pyname is None:
+ return
+ pyfunction = pyname.get_object()
+ if isinstance(pyfunction, pyobjects.AbstractFunction):
+ args = arguments.create_arguments(primary, pyfunction,
+ node, self.scope)
+ elif isinstance(pyfunction, pyobjects.PyClass):
+ pyclass = pyfunction
+ if '__init__' in pyfunction:
+ pyfunction = pyfunction['__init__'].get_object()
+ pyname = rope.base.pynames.UnboundName(pyobjects.PyObject(pyclass))
+ args = self._args_with_self(primary, pyname, pyfunction, node)
+ elif '__call__' in pyfunction:
+ pyfunction = pyfunction['__call__'].get_object()
+ args = self._args_with_self(primary, pyname, pyfunction, node)
+ else:
+ return
+ self._call(pyfunction, args)
+
+ def _args_with_self(self, primary, self_pyname, pyfunction, node):
+ base_args = arguments.create_arguments(primary, pyfunction,
+ node, self.scope)
+ return arguments.MixedArguments(self_pyname, base_args, self.scope)
+
+ def _call(self, pyfunction, args):
+ if isinstance(pyfunction, pyobjects.PyFunction):
+ if self.follow is not None:
+ before = self._parameter_objects(pyfunction)
+ self.pycore.object_info.function_called(
+ pyfunction, args.get_arguments(pyfunction.get_param_names()))
+ pyfunction._set_parameter_pyobjects(None)
+ if self.follow is not None:
+ after = self._parameter_objects(pyfunction)
+ if after != before:
+ self.follow(pyfunction)
+ # XXX: Maybe we should not call every builtin function
+ if isinstance(pyfunction, rope.base.builtins.BuiltinFunction):
+ pyfunction.get_returned_object(args)
+
+ def _parameter_objects(self, pyfunction):
+ result = []
+ for i in range(len(pyfunction.get_param_names(False))):
+ result.append(pyfunction.get_parameter(i))
+ return result
+
+ def _Assign(self, node):
+ for child in rope.base.ast.get_child_nodes(node):
+ rope.base.ast.walk(child, self)
+ visitor = _SOAAssignVisitor()
+ nodes = []
+ for child in node.targets:
+ rope.base.ast.walk(child, visitor)
+ nodes.extend(visitor.nodes)
+ for subscript, levels in nodes:
+ instance = evaluate.eval_node(self.scope, subscript.value)
+ args_pynames = []
+ args_pynames.append(evaluate.eval_node(self.scope,
+ subscript.slice.value))
+ value = rope.base.oi.soi._infer_assignment(
+ rope.base.pynames.AssignmentValue(node.value, levels), self.pymodule)
+ args_pynames.append(rope.base.pynames.UnboundName(value))
+ if instance is not None and value is not None:
+ pyobject = instance.get_object()
+ if '__setitem__' in pyobject:
+ pyfunction = pyobject['__setitem__'].get_object()
+ args = arguments.ObjectArguments([instance] + args_pynames)
+ self._call(pyfunction, args)
+ # IDEA: handle `__setslice__`, too
+
+
+class _SOAAssignVisitor(astutils._NodeNameCollector):
+
+ def __init__(self):
+ super(_SOAAssignVisitor, self).__init__()
+ self.nodes = []
+
+ def _added(self, node, levels):
+ if isinstance(node, rope.base.ast.Subscript) and \
+ isinstance(node.slice, rope.base.ast.Index):
+ self.nodes.append((node, levels))
diff --git a/.vim/bundle/python-mode/pylibs/rope/base/oi/soi.py b/.vim/bundle/python-mode/pylibs/rope/base/oi/soi.py
@@ -0,0 +1,186 @@
+"""A module for inferring objects
+
+For more information see the documentation in `rope.base.oi`
+package.
+
+"""
+import rope.base.builtins
+import rope.base.pynames
+import rope.base.pyobjects
+from rope.base import evaluate, utils, arguments
+
+
+_ignore_inferred = utils.ignore_exception(
+ rope.base.pyobjects.IsBeingInferredError)
+
+
+@_ignore_inferred
+def infer_returned_object(pyfunction, args):
+ """Infer the `PyObject` this `PyFunction` returns after calling"""
+ object_info = pyfunction.pycore.object_info
+ result = object_info.get_exact_returned(pyfunction, args)
+ if result is not None:
+ return result
+ result = _infer_returned(pyfunction, args)
+ if result is not None:
+ if args and pyfunction.get_module().get_resource() is not None:
+ params = args.get_arguments(
+ pyfunction.get_param_names(special_args=False))
+ object_info.function_called(pyfunction, params, result)
+ return result
+ return object_info.get_returned(pyfunction, args)
+
+@_ignore_inferred
+def infer_parameter_objects(pyfunction):
+ """Infer the `PyObject`\s of parameters of this `PyFunction`"""
+ object_info = pyfunction.pycore.object_info
+ result = object_info.get_parameter_objects(pyfunction)
+ if result is None:
+ result = _parameter_objects(pyfunction)
+ _handle_first_parameter(pyfunction, result)
+ return result
+
+def _handle_first_parameter(pyobject, parameters):
+ kind = pyobject.get_kind()
+ if parameters is None or kind not in ['method', 'classmethod']:
+ pass
+ if not parameters:
+ if not pyobject.get_param_names(special_args=False):
+ return
+ parameters.append(rope.base.pyobjects.get_unknown())
+ if kind == 'method':
+ parameters[0] = rope.base.pyobjects.PyObject(pyobject.parent)
+ if kind == 'classmethod':
+ parameters[0] = pyobject.parent
+
+@_ignore_inferred
+def infer_assigned_object(pyname):
+ if not pyname.assignments:
+ return
+ for assignment in reversed(pyname.assignments):
+ result = _infer_assignment(assignment, pyname.module)
+ if result is not None:
+ return result
+
+def get_passed_objects(pyfunction, parameter_index):
+ object_info = pyfunction.pycore.object_info
+ result = object_info.get_passed_objects(pyfunction,
+ parameter_index)
+ if not result:
+ statically_inferred = _parameter_objects(pyfunction)
+ if len(statically_inferred) > parameter_index:
+ result.append(statically_inferred[parameter_index])
+ return result
+
+def _infer_returned(pyobject, args):
+ if args:
+ # HACK: Setting parameter objects manually
+ # This is not thread safe and might cause problems if `args`
+ # does not come from a good call site
+ pyobject.get_scope().invalidate_data()
+ pyobject._set_parameter_pyobjects(
+ args.get_arguments(pyobject.get_param_names(special_args=False)))
+ scope = pyobject.get_scope()
+ if not scope._get_returned_asts():
+ return
+ maxtries = 3
+ for returned_node in reversed(scope._get_returned_asts()[-maxtries:]):
+ try:
+ resulting_pyname = evaluate.eval_node(scope, returned_node)
+ if resulting_pyname is None:
+ continue
+ pyobject = resulting_pyname.get_object()
+ if pyobject == rope.base.pyobjects.get_unknown():
+ continue
+ if not scope._is_generator():
+ return pyobject
+ else:
+ return rope.base.builtins.get_generator(pyobject)
+ except rope.base.pyobjects.IsBeingInferredError:
+ pass
+
+def _parameter_objects(pyobject):
+ params = pyobject.get_param_names(special_args=False)
+ return [rope.base.pyobjects.get_unknown()] * len(params)
+
+# handling `rope.base.pynames.AssignmentValue`
+
+@_ignore_inferred
+def _infer_assignment(assignment, pymodule):
+ result = _follow_pyname(assignment, pymodule)
+ if result is None:
+ return None
+ pyname, pyobject = result
+ pyobject = _follow_evaluations(assignment, pyname, pyobject)
+ if pyobject is None:
+ return None
+ return _follow_levels(assignment, pyobject)
+
+def _follow_levels(assignment, pyobject):
+ for index in assignment.levels:
+ if isinstance(pyobject.get_type(), rope.base.builtins.Tuple):
+ holdings = pyobject.get_type().get_holding_objects()
+ if holdings:
+ pyobject = holdings[min(len(holdings) - 1, index)]
+ else:
+ pyobject = None
+ elif isinstance(pyobject.get_type(), rope.base.builtins.List):
+ pyobject = pyobject.get_type().holding
+ else:
+ pyobject = None
+ if pyobject is None:
+ break
+ return pyobject
+
+@_ignore_inferred
+def _follow_pyname(assignment, pymodule, lineno=None):
+ assign_node = assignment.ast_node
+ if lineno is None:
+ lineno = _get_lineno_for_node(assign_node)
+ holding_scope = pymodule.get_scope().get_inner_scope_for_line(lineno)
+ pyname = evaluate.eval_node(holding_scope, assign_node)
+ if pyname is not None:
+ result = pyname.get_object()
+ if isinstance(result.get_type(), rope.base.builtins.Property) and \
+ holding_scope.get_kind() == 'Class':
+ arg = rope.base.pynames.UnboundName(
+ rope.base.pyobjects.PyObject(holding_scope.pyobject))
+ return pyname, result.get_type().get_property_object(
+ arguments.ObjectArguments([arg]))
+ return pyname, result
+
+@_ignore_inferred
+def _follow_evaluations(assignment, pyname, pyobject):
+ new_pyname = pyname
+ tokens = assignment.evaluation.split('.')
+ for token in tokens:
+ call = token.endswith('()')
+ if call:
+ token = token[:-2]
+ if token:
+ pyname = new_pyname
+ new_pyname = _get_attribute(pyobject, token)
+ if new_pyname is not None:
+ pyobject = new_pyname.get_object()
+ if pyobject is not None and call:
+ if isinstance(pyobject, rope.base.pyobjects.AbstractFunction):
+ args = arguments.ObjectArguments([pyname])
+ pyobject = pyobject.get_returned_object(args)
+ else:
+ pyobject = None
+ if pyobject is None:
+ break
+ if pyobject is not None and assignment.assign_type:
+ return rope.base.pyobjects.PyObject(pyobject)
+ return pyobject
+
+
+def _get_lineno_for_node(assign_node):
+ if hasattr(assign_node, 'lineno') and \
+ assign_node.lineno is not None:
+ return assign_node.lineno
+ return 1
+
+def _get_attribute(pyobject, name):
+ if pyobject is not None and name in pyobject:
+ return pyobject[name]
diff --git a/.vim/bundle/python-mode/pylibs/rope/base/oi/transform.py b/.vim/bundle/python-mode/pylibs/rope/base/oi/transform.py
@@ -0,0 +1,285 @@
+"""Provides classes for persisting `PyObject`\s"""
+import os
+import re
+
+import rope.base.builtins
+from rope.base import exceptions
+
+
+class PyObjectToTextual(object):
+ """For transforming `PyObject` to textual form
+
+ This can be used for storing `PyObjects` in files. Use
+ `TextualToPyObject` for converting back.
+
+ """
+
+ def __init__(self, project):
+ self.project = project
+
+ def transform(self, pyobject):
+ """Transform a `PyObject` to textual form"""
+ if pyobject is None:
+ return ('none',)
+ object_type = type(pyobject)
+ try:
+ method = getattr(self, object_type.__name__ + '_to_textual')
+ return method(pyobject)
+ except AttributeError:
+ return ('unknown',)
+
+ def __call__(self, pyobject):
+ return self.transform(pyobject)
+
+ def PyObject_to_textual(self, pyobject):
+ if isinstance(pyobject.get_type(), rope.base.pyobjects.AbstractClass):
+ result = self.transform(pyobject.get_type())
+ if result[0] == 'defined':
+ return ('instance', result)
+ return result
+ return ('unknown',)
+
+ def PyFunction_to_textual(self, pyobject):
+ return self._defined_to_textual(pyobject)
+
+ def PyClass_to_textual(self, pyobject):
+ return self._defined_to_textual(pyobject)
+
+ def _defined_to_textual(self, pyobject):
+ address = []
+ while pyobject.parent is not None:
+ address.insert(0, pyobject.get_name())
+ pyobject = pyobject.parent
+ return ('defined', self._get_pymodule_path(pyobject.get_module()),
+ '.'.join(address))
+
+ def PyModule_to_textual(self, pyobject):
+ return ('defined', self._get_pymodule_path(pyobject))
+
+ def PyPackage_to_textual(self, pyobject):
+ return ('defined', self._get_pymodule_path(pyobject))
+
+ def List_to_textual(self, pyobject):
+ return ('builtin', 'list', self.transform(pyobject.holding))
+
+ def Dict_to_textual(self, pyobject):
+ return ('builtin', 'dict', self.transform(pyobject.keys),
+ self.transform(pyobject.values))
+
+ def Tuple_to_textual(self, pyobject):
+ objects = [self.transform(holding)
+ for holding in pyobject.get_holding_objects()]
+ return tuple(['builtin', 'tuple'] + objects)
+
+ def Set_to_textual(self, pyobject):
+ return ('builtin', 'set', self.transform(pyobject.holding))
+
+ def Iterator_to_textual(self, pyobject):
+ return ('builtin', 'iter', self.transform(pyobject.holding))
+
+ def Generator_to_textual(self, pyobject):
+ return ('builtin', 'generator', self.transform(pyobject.holding))
+
+ def Str_to_textual(self, pyobject):
+ return ('builtin', 'str')
+
+ def File_to_textual(self, pyobject):
+ return ('builtin', 'file')
+
+ def BuiltinFunction_to_textual(self, pyobject):
+ return ('builtin', 'function', pyobject.get_name())
+
+ def _get_pymodule_path(self, pymodule):
+ return self.resource_to_path(pymodule.get_resource())
+
+ def resource_to_path(self, resource):
+ if resource.project == self.project:
+ return resource.path
+ else:
+ return resource.real_path
+
+
+class TextualToPyObject(object):
+ """For transforming textual form to `PyObject`"""
+
+ def __init__(self, project, allow_in_project_absolutes=False):
+ self.project = project
+
+ def __call__(self, textual):
+ return self.transform(textual)
+
+ def transform(self, textual):
+ """Transform an object from textual form to `PyObject`"""
+ if textual is None:
+ return None
+ type = textual[0]
+ try:
+ method = getattr(self, type + '_to_pyobject')
+ return method(textual)
+ except AttributeError:
+ return None
+
+ def builtin_to_pyobject(self, textual):
+ name = textual[1]
+ method = getattr(self, 'builtin_%s_to_pyobject' % textual[1], None)
+ if method is not None:
+ return method(textual)
+
+ def builtin_str_to_pyobject(self, textual):
+ return rope.base.builtins.get_str()
+
+ def builtin_list_to_pyobject(self, textual):
+ holding = self.transform(textual[2])
+ return rope.base.builtins.get_list(holding)
+
+ def builtin_dict_to_pyobject(self, textual):
+ keys = self.transform(textual[2])
+ values = self.transform(textual[3])
+ return rope.base.builtins.get_dict(keys, values)
+
+ def builtin_tuple_to_pyobject(self, textual):
+ objects = []
+ for holding in textual[2:]:
+ objects.append(self.transform(holding))
+ return rope.base.builtins.get_tuple(*objects)
+
+ def builtin_set_to_pyobject(self, textual):
+ holding = self.transform(textual[2])
+ return rope.base.builtins.get_set(holding)
+
+ def builtin_iter_to_pyobject(self, textual):
+ holding = self.transform(textual[2])
+ return rope.base.builtins.get_iterator(holding)
+
+ def builtin_generator_to_pyobject(self, textual):
+ holding = self.transform(textual[2])
+ return rope.base.builtins.get_generator(holding)
+
+ def builtin_file_to_pyobject(self, textual):
+ return rope.base.builtins.get_file()
+
+ def builtin_function_to_pyobject(self, textual):
+ if textual[2] in rope.base.builtins.builtins:
+ return rope.base.builtins.builtins[textual[2]].get_object()
+
+ def unknown_to_pyobject(self, textual):
+ return None
+
+ def none_to_pyobject(self, textual):
+ return None
+
+ def _module_to_pyobject(self, textual):
+ path = textual[1]
+ return self._get_pymodule(path)
+
+ def _hierarchical_defined_to_pyobject(self, textual):
+ path = textual[1]
+ names = textual[2].split('.')
+ pymodule = self._get_pymodule(path)
+ pyobject = pymodule
+ for name in names:
+ if pyobject is None:
+ return None
+ if isinstance(pyobject, rope.base.pyobjects.PyDefinedObject):
+ try:
+ pyobject = pyobject.get_scope()[name].get_object()
+ except exceptions.NameNotFoundError:
+ return None
+ else:
+ return None
+ return pyobject
+
+ def defined_to_pyobject(self, textual):
+ if len(textual) == 2 or textual[2] == '':
+ return self._module_to_pyobject(textual)
+ else:
+ return self._hierarchical_defined_to_pyobject(textual)
+
+ def instance_to_pyobject(self, textual):
+ type = self.transform(textual[1])
+ if type is not None:
+ return rope.base.pyobjects.PyObject(type)
+
+ def _get_pymodule(self, path):
+ resource = self.path_to_resource(path)
+ if resource is not None:
+ return self.project.pycore.resource_to_pyobject(resource)
+
+ def path_to_resource(self, path):
+ try:
+ root = self.project.address
+ if not os.path.isabs(path):
+ return self.project.get_resource(path)
+ if path == root or path.startswith(root + os.sep):
+ # INFO: This is a project file; should not be absolute
+ return None
+ import rope.base.project
+ return rope.base.project.get_no_project().get_resource(path)
+ except exceptions.ResourceNotFoundError:
+ return None
+
+
+class DOITextualToPyObject(TextualToPyObject):
+ """For transforming textual form to `PyObject`
+
+ The textual form DOI uses is different from rope's standard
+ textual form. The reason is that we cannot find the needed
+ information by analyzing live objects. This class can be
+ used to transform DOI textual form to `PyObject` and later
+ we can convert it to standard textual form using
+ `TextualToPyObject` class.
+
+ """
+
+ def _function_to_pyobject(self, textual):
+ path = textual[1]
+ lineno = int(textual[2])
+ pymodule = self._get_pymodule(path)
+ if pymodule is not None:
+ scope = pymodule.get_scope()
+ inner_scope = scope.get_inner_scope_for_line(lineno)
+ return inner_scope.pyobject
+
+ def _class_to_pyobject(self, textual):
+ path, name = textual[1:]
+ pymodule = self._get_pymodule(path)
+ if pymodule is None:
+ return None
+ module_scope = pymodule.get_scope()
+ suspected = None
+ if name in module_scope.get_names():
+ suspected = module_scope[name].get_object()
+ if suspected is not None and \
+ isinstance(suspected, rope.base.pyobjects.PyClass):
+ return suspected
+ else:
+ lineno = self._find_occurrence(name, pymodule.get_resource().read())
+ if lineno is not None:
+ inner_scope = module_scope.get_inner_scope_for_line(lineno)
+ return inner_scope.pyobject
+
+ def defined_to_pyobject(self, textual):
+ if len(textual) == 2:
+ return self._module_to_pyobject(textual)
+ else:
+ if textual[2].isdigit():
+ result = self._function_to_pyobject(textual)
+ else:
+ result = self._class_to_pyobject(textual)
+ if not isinstance(result, rope.base.pyobjects.PyModule):
+ return result
+
+ def _find_occurrence(self, name, source):
+ pattern = re.compile(r'^\s*class\s*' + name + r'\b')
+ lines = source.split('\n')
+ for i in range(len(lines)):
+ if pattern.match(lines[i]):
+ return i + 1
+
+ def path_to_resource(self, path):
+ import rope.base.libutils
+ root = self.project.address
+ relpath = rope.base.libutils.relative(root, path)
+ if relpath is not None:
+ path = relpath
+ return super(DOITextualToPyObject, self).path_to_resource(path)
diff --git a/.vim/bundle/python-mode/pylibs/rope/base/prefs.py b/.vim/bundle/python-mode/pylibs/rope/base/prefs.py
@@ -0,0 +1,41 @@
+class Prefs(object):
+
+ def __init__(self):
+ self.prefs = {}
+ self.callbacks = {}
+
+ def set(self, key, value):
+ """Set the value of `key` preference to `value`."""
+ if key in self.callbacks:
+ self.callbacks[key](value)
+ else:
+ self.prefs[key] = value
+
+ def add(self, key, value):
+ """Add an entry to a list preference
+
+ Add `value` to the list of entries for the `key` preference.
+
+ """
+ if not key in self.prefs:
+ self.prefs[key] = []
+ self.prefs[key].append(value)
+
+ def get(self, key, default=None):
+ """Get the value of the key preference"""
+ return self.prefs.get(key, default)
+
+ def add_callback(self, key, callback):
+ """Add `key` preference with `callback` function
+
+ Whenever `key` is set the callback is called with the
+ given `value` as parameter.
+
+ """
+ self.callbacks[key] = callback
+
+ def __setitem__(self, key, value):
+ self.set(key, value)
+
+ def __getitem__(self, key):
+ return self.get(key)
diff --git a/.vim/bundle/python-mode/pylibs/rope/base/project.py b/.vim/bundle/python-mode/pylibs/rope/base/project.py
@@ -0,0 +1,376 @@
+import cPickle as pickle
+import os
+import shutil
+import sys
+import warnings
+
+import rope.base.fscommands
+from rope.base import exceptions, taskhandle, prefs, history, pycore, utils
+from rope.base.resourceobserver import *
+from rope.base.resources import File, Folder, _ResourceMatcher
+
+
+class _Project(object):
+
+ def __init__(self, fscommands):
+ self.observers = []
+ self.fscommands = fscommands
+ self.prefs = prefs.Prefs()
+ self.data_files = _DataFiles(self)
+
+ def get_resource(self, resource_name):
+ """Get a resource in a project.
+
+ `resource_name` is the path of a resource in a project. It is
+ the path of a resource relative to project root. Project root
+ folder address is an empty string. If the resource does not
+ exist a `exceptions.ResourceNotFound` exception would be
+ raised. Use `get_file()` and `get_folder()` when you need to
+ get nonexistent `Resource`\s.
+
+ """
+ path = self._get_resource_path(resource_name)
+ if not os.path.exists(path):
+ raise exceptions.ResourceNotFoundError(
+ 'Resource <%s> does not exist' % resource_name)
+ elif os.path.isfile(path):
+ return File(self, resource_name)
+ elif os.path.isdir(path):
+ return Folder(self, resource_name)
+ else:
+ raise exceptions.ResourceNotFoundError('Unknown resource '
+ + resource_name)
+
+ def validate(self, folder):
+ """Validate files and folders contained in this folder
+
+ It validates all of the files and folders contained in this
+ folder if some observers are interested in them.
+
+ """
+ for observer in list(self.observers):
+ observer.validate(folder)
+
+ def add_observer(self, observer):
+ """Register a `ResourceObserver`
+
+ See `FilteredResourceObserver`.
+ """
+ self.observers.append(observer)
+
+ def remove_observer(self, observer):
+ """Remove a registered `ResourceObserver`"""
+ if observer in self.observers:
+ self.observers.remove(observer)
+
+ def do(self, changes, task_handle=taskhandle.NullTaskHandle()):
+ """Apply the changes in a `ChangeSet`
+
+ Most of the time you call this function for committing the
+ changes for a refactoring.
+ """
+ self.history.do(changes, task_handle=task_handle)
+
+ def get_pycore(self):
+ return self.pycore
+
+ def get_file(self, path):
+ """Get the file with `path` (it may not exist)"""
+ return File(self, path)
+
+ def get_folder(self, path):
+ """Get the folder with `path` (it may not exist)"""
+ return Folder(self, path)
+
+ def is_ignored(self, resource):
+ return False
+
+ def get_prefs(self):
+ return self.prefs
+
+ def _get_resource_path(self, name):
+ pass
+
+ @property
+ @utils.saveit
+ def history(self):
+ return history.History(self)
+
+ @property
+ @utils.saveit
+ def pycore(self):
+ return pycore.PyCore(self)
+
+ def close(self):
+ warnings.warn('Cannot close a NoProject',
+ DeprecationWarning, stacklevel=2)
+
+ ropefolder = None
+
+
+class Project(_Project):
+ """A Project containing files and folders"""
+
+ def __init__(self, projectroot, fscommands=None,
+ ropefolder='.ropeproject', **prefs):
+ """A rope project
+
+ :parameters:
+ - `projectroot`: The address of the root folder of the project
+ - `fscommands`: Implements the file system operations used
+ by rope; have a look at `rope.base.fscommands`
+ - `ropefolder`: The name of the folder in which rope stores
+ project configurations and data. Pass `None` for not using
+ such a folder at all.
+ - `prefs`: Specify project preferences. These values
+ overwrite config file preferences.
+
+ """
+ if projectroot != '/':
+ projectroot = _realpath(projectroot).rstrip('/\\')
+ self._address = projectroot
+ self._ropefolder_name = ropefolder
+ if not os.path.exists(self._address):
+ os.mkdir(self._address)
+ elif not os.path.isdir(self._address):
+ raise exceptions.RopeError('Project root exists and'
+ ' is not a directory')
+ if fscommands is None:
+ fscommands = rope.base.fscommands.create_fscommands(self._address)
+ super(Project, self).__init__(fscommands)
+ self.ignored = _ResourceMatcher()
+ self.file_list = _FileListCacher(self)
+ self.prefs.add_callback('ignored_resources', self.ignored.set_patterns)
+ if ropefolder is not None:
+ self.prefs['ignored_resources'] = [ropefolder]
+ self._init_prefs(prefs)
+
+ def get_files(self):
+ return self.file_list.get_files()
+
+ def _get_resource_path(self, name):
+ return os.path.join(self._address, *name.split('/'))
+
+ def _init_ropefolder(self):
+ if self.ropefolder is not None:
+ if not self.ropefolder.exists():
+ self._create_recursively(self.ropefolder)
+ if not self.ropefolder.has_child('config.py'):
+ config = self.ropefolder.create_file('config.py')
+ config.write(self._default_config())
+
+ def _create_recursively(self, folder):
+ if folder.parent != self.root and not folder.parent.exists():
+ self._create_recursively(folder.parent)
+ folder.create()
+
+ def _init_prefs(self, prefs):
+ run_globals = {}
+ if self.ropefolder is not None:
+ config = self.get_file(self.ropefolder.path + '/config.py')
+ run_globals.update({'__name__': '__main__',
+ '__builtins__': __builtins__,
+ '__file__': config.real_path})
+ if config.exists():
+ config = self.ropefolder.get_child('config.py')
+ execfile(config.real_path, run_globals)
+ else:
+ exec(self._default_config(), run_globals)
+ if 'set_prefs' in run_globals:
+ run_globals['set_prefs'](self.prefs)
+ for key, value in prefs.items():
+ self.prefs[key] = value
+ self._init_other_parts()
+ self._init_ropefolder()
+ if 'project_opened' in run_globals:
+ run_globals['project_opened'](self)
+
+ def _default_config(self):
+ import rope.base.default_config
+ import inspect
+ return inspect.getsource(rope.base.default_config)
+
+ def _init_other_parts(self):
+ # Forcing the creation of `self.pycore` to register observers
+ self.pycore
+
+ def is_ignored(self, resource):
+ return self.ignored.does_match(resource)
+
+ def sync(self):
+ """Closes project open resources"""
+ self.close()
+
+ def close(self):
+ """Closes project open resources"""
+ self.data_files.write()
+
+ def set(self, key, value):
+ """Set the `key` preference to `value`"""
+ self.prefs.set(key, value)
+
+ @property
+ def ropefolder(self):
+ if self._ropefolder_name is not None:
+ return self.get_folder(self._ropefolder_name)
+
+ def validate(self, folder=None):
+ if folder is None:
+ folder = self.root
+ super(Project, self).validate(folder)
+
+ root = property(lambda self: self.get_resource(''))
+ address = property(lambda self: self._address)
+
+
+class NoProject(_Project):
+ """A null object for holding out of project files.
+
+ This class is singleton use `get_no_project` global function
+ """
+
+ def __init__(self):
+ fscommands = rope.base.fscommands.FileSystemCommands()
+ super(NoProject, self).__init__(fscommands)
+
+ def _get_resource_path(self, name):
+ real_name = name.replace('/', os.path.sep)
+ return _realpath(real_name)
+
+ def get_resource(self, name):
+ universal_name = _realpath(name).replace(os.path.sep, '/')
+ return super(NoProject, self).get_resource(universal_name)
+
+ def get_files(self):
+ return []
+
+ _no_project = None
+
+
+def get_no_project():
+ if NoProject._no_project is None:
+ NoProject._no_project = NoProject()
+ return NoProject._no_project
+
+
+class _FileListCacher(object):
+
+ def __init__(self, project):
+ self.project = project
+ self.files = None
+ rawobserver = ResourceObserver(
+ self._changed, self._invalid, self._invalid,
+ self._invalid, self._invalid)
+ self.project.add_observer(rawobserver)
+
+ def get_files(self):
+ if self.files is None:
+ self.files = set()
+ self._add_files(self.project.root)
+ return self.files
+
+ def _add_files(self, folder):
+ for child in folder.get_children():
+ if child.is_folder():
+ self._add_files(child)
+ elif not self.project.is_ignored(child):
+ self.files.add(child)
+
+ def _changed(self, resource):
+ if resource.is_folder():
+ self.files = None
+
+ def _invalid(self, resource, new_resource=None):
+ self.files = None
+
+
+class _DataFiles(object):
+
+ def __init__(self, project):
+ self.project = project
+ self.hooks = []
+
+ def read_data(self, name, compress=False, import_=False):
+ if self.project.ropefolder is None:
+ return None
+ compress = compress and self._can_compress()
+ opener = self._get_opener(compress)
+ file = self._get_file(name, compress)
+ if not compress and import_:
+ self._import_old_files(name)
+ if file.exists():
+ input = opener(file.real_path, 'rb')
+ try:
+ result = []
+ try:
+ while True:
+ result.append(pickle.load(input))
+ except EOFError:
+ pass
+ if len(result) == 1:
+ return result[0]
+ if len(result) > 1:
+ return result
+ finally:
+ input.close()
+
+ def write_data(self, name, data, compress=False):
+ if self.project.ropefolder is not None:
+ compress = compress and self._can_compress()
+ file = self._get_file(name, compress)
+ opener = self._get_opener(compress)
+ output = opener(file.real_path, 'wb')
+ try:
+ pickle.dump(data, output, 2)
+ finally:
+ output.close()
+
+ def add_write_hook(self, hook):
+ self.hooks.append(hook)
+
+ def write(self):
+ for hook in self.hooks:
+ hook()
+
+ def _can_compress(self):
+ try:
+ import gzip
+ return True
+ except ImportError:
+ return False
+
+ def _import_old_files(self, name):
+ old = self._get_file(name + '.pickle', False)
+ new = self._get_file(name, False)
+ if old.exists() and not new.exists():
+ shutil.move(old.real_path, new.real_path)
+
+ def _get_opener(self, compress):
+ if compress:
+ try:
+ import gzip
+ return gzip.open
+ except ImportError:
+ pass
+ return open
+
+ def _get_file(self, name, compress):
+ path = self.project.ropefolder.path + '/' + name
+ if compress:
+ path += '.gz'
+ return self.project.get_file(path)
+
+
+def _realpath(path):
+ """Return the real path of `path`
+
+ Is equivalent to ``realpath(abspath(expanduser(path)))``.
+
+ """
+ path = path or ''
+ # there is a bug in cygwin for os.path.abspath() for abs paths
+ if sys.platform == 'cygwin':
+ if path[1:3] == ':\\':
+ return path
+ return os.path.abspath(os.path.expanduser(path))
+ return os.path.realpath(os.path.abspath(os.path.expanduser(path)))
diff --git a/.vim/bundle/python-mode/pylibs/rope/base/pycore.py b/.vim/bundle/python-mode/pylibs/rope/base/pycore.py
@@ -0,0 +1,410 @@
+import bisect
+import difflib
+import sys
+import warnings
+
+import rope.base.oi.doa
+import rope.base.oi.objectinfo
+import rope.base.oi.soa
+from rope.base import ast, exceptions, taskhandle, utils, stdmods
+from rope.base.exceptions import ModuleNotFoundError
+from rope.base.pyobjectsdef import PyModule, PyPackage, PyClass
+import rope.base.resources
+import rope.base.resourceobserver
+from rope.base import builtins
+
+
+class PyCore(object):
+
+ def __init__(self, project):
+ self.project = project
+ self._init_resource_observer()
+ self.cache_observers = []
+ self.module_cache = _ModuleCache(self)
+ self.extension_cache = _ExtensionCache(self)
+ self.object_info = rope.base.oi.objectinfo.ObjectInfoManager(project)
+ self._init_python_files()
+ self._init_automatic_soa()
+ self._init_source_folders()
+
+ def _init_python_files(self):
+ self.python_matcher = None
+ patterns = self.project.prefs.get('python_files', None)
+ if patterns is not None:
+ self.python_matcher = rope.base.resources._ResourceMatcher()
+ self.python_matcher.set_patterns(patterns)
+
+ def _init_resource_observer(self):
+ callback = self._invalidate_resource_cache
+ observer = rope.base.resourceobserver.ResourceObserver(
+ changed=callback, moved=callback, removed=callback)
+ self.observer = rope.base.resourceobserver.FilteredResourceObserver(observer)
+ self.project.add_observer(self.observer)
+
+ def _init_source_folders(self):
+ self._custom_source_folders = []
+ for path in self.project.prefs.get('source_folders', []):
+ folder = self.project.get_resource(path)
+ self._custom_source_folders.append(folder)
+
+ def _init_automatic_soa(self):
+ if not self.automatic_soa:
+ return
+ callback = self._file_changed_for_soa
+ observer = rope.base.resourceobserver.ResourceObserver(
+ changed=callback, moved=callback, removed=callback)
+ self.project.add_observer(observer)
+
+ @property
+ def automatic_soa(self):
+ auto_soa = self.project.prefs.get('automatic_soi', None)
+ return self.project.prefs.get('automatic_soa', auto_soa)
+
+ def _file_changed_for_soa(self, resource, new_resource=None):
+ old_contents = self.project.history.\
+ contents_before_current_change(resource)
+ if old_contents is not None:
+ perform_soa_on_changed_scopes(self.project, resource, old_contents)
+
+ def is_python_file(self, resource):
+ if resource.is_folder():
+ return False
+ if self.python_matcher is None:
+ return resource.name.endswith('.py')
+ return self.python_matcher.does_match(resource)
+
+ def get_module(self, name, folder=None):
+ """Returns a `PyObject` if the module was found."""
+ # check if this is a builtin module
+ pymod = self._builtin_module(name)
+ if pymod is not None:
+ return pymod
+ module = self.find_module(name, folder)
+ if module is None:
+ raise ModuleNotFoundError('Module %s not found' % name)
+ return self.resource_to_pyobject(module)
+
+ def _builtin_submodules(self, modname):
+ result = {}
+ for extension in self.extension_modules:
+ if extension.startswith(modname + '.'):
+ name = extension[len(modname) + 1:]
+ if '.' not in name:
+ result[name] = self._builtin_module(extension)
+ return result
+
+ def _builtin_module(self, name):
+ return self.extension_cache.get_pymodule(name)
+
+ def get_relative_module(self, name, folder, level):
+ module = self.find_relative_module(name, folder, level)
+ if module is None:
+ raise ModuleNotFoundError('Module %s not found' % name)
+ return self.resource_to_pyobject(module)
+
+ def get_string_module(self, code, resource=None, force_errors=False):
+ """Returns a `PyObject` object for the given code
+
+ If `force_errors` is `True`, `exceptions.ModuleSyntaxError` is
+ raised if module has syntax errors. This overrides
+ ``ignore_syntax_errors`` project config.
+
+ """
+ return PyModule(self, code, resource, force_errors=force_errors)
+
+ def get_string_scope(self, code, resource=None):
+ """Returns a `Scope` object for the given code"""
+ return self.get_string_module(code, resource).get_scope()
+
+ def _invalidate_resource_cache(self, resource, new_resource=None):
+ for observer in self.cache_observers:
+ observer(resource)
+
+ def _find_module_in_folder(self, folder, modname):
+ module = folder
+ packages = modname.split('.')
+ for pkg in packages[:-1]:
+ if module.is_folder() and module.has_child(pkg):
+ module = module.get_child(pkg)
+ else:
+ return None
+ if module.is_folder():
+ if module.has_child(packages[-1]) and \
+ module.get_child(packages[-1]).is_folder():
+ return module.get_child(packages[-1])
+ elif module.has_child(packages[-1] + '.py') and \
+ not module.get_child(packages[-1] + '.py').is_folder():
+ return module.get_child(packages[-1] + '.py')
+
+ def get_python_path_folders(self):
+ import rope.base.project
+ result = []
+ for src in self.project.prefs.get('python_path', []) + sys.path:
+ try:
+ src_folder = rope.base.project.get_no_project().get_resource(src)
+ result.append(src_folder)
+ except rope.base.exceptions.ResourceNotFoundError:
+ pass
+ return result
+
+ def find_module(self, modname, folder=None):
+ """Returns a resource corresponding to the given module
+
+ returns None if it can not be found
+ """
+ return self._find_module(modname, folder)
+
+ def find_relative_module(self, modname, folder, level):
+ for i in range(level - 1):
+ folder = folder.parent
+ if modname == '':
+ return folder
+ else:
+ return self._find_module_in_folder(folder, modname)
+
+ def _find_module(self, modname, folder=None):
+ """Return `modname` module resource"""
+ for src in self.get_source_folders():
+ module = self._find_module_in_folder(src, modname)
+ if module is not None:
+ return module
+ for src in self.get_python_path_folders():
+ module = self._find_module_in_folder(src, modname)
+ if module is not None:
+ return module
+ if folder is not None:
+ module = self._find_module_in_folder(folder, modname)
+ if module is not None:
+ return module
+ return None
+
+ # INFO: It was decided not to cache source folders, since:
+ # - Does not take much time when the root folder contains
+ # packages, that is most of the time
+ # - We need a separate resource observer; `self.observer`
+ # does not get notified about module and folder creations
+ def get_source_folders(self):
+ """Returns project source folders"""
+ if self.project.root is None:
+ return []
+ result = list(self._custom_source_folders)
+ result.extend(self._find_source_folders(self.project.root))
+ return result
+
+ def resource_to_pyobject(self, resource, force_errors=False):
+ return self.module_cache.get_pymodule(resource, force_errors)
+
+ def get_python_files(self):
+ """Returns all python files available in the project"""
+ return [resource for resource in self.project.get_files()
+ if self.is_python_file(resource)]
+
+ def _is_package(self, folder):
+ if folder.has_child('__init__.py') and \
+ not folder.get_child('__init__.py').is_folder():
+ return True
+ else:
+ return False
+
+ def _find_source_folders(self, folder):
+ for resource in folder.get_folders():
+ if self._is_package(resource):
+ return [folder]
+ result = []
+ for resource in folder.get_files():
+ if resource.name.endswith('.py'):
+ result.append(folder)
+ break
+ for resource in folder.get_folders():
+ result.extend(self._find_source_folders(resource))
+ return result
+
+ def run_module(self, resource, args=None, stdin=None, stdout=None):
+ """Run `resource` module
+
+ Returns a `rope.base.oi.doa.PythonFileRunner` object for
+ controlling the process.
+
+ """
+ perform_doa = self.project.prefs.get('perform_doi', True)
+ perform_doa = self.project.prefs.get('perform_doa', perform_doa)
+ receiver = self.object_info.doa_data_received
+ if not perform_doa:
+ receiver = None
+ runner = rope.base.oi.doa.PythonFileRunner(
+ self, resource, args, stdin, stdout, receiver)
+ runner.add_finishing_observer(self.module_cache.forget_all_data)
+ runner.run()
+ return runner
+
+ def analyze_module(self, resource, should_analyze=lambda py: True,
+ search_subscopes=lambda py: True, followed_calls=None):
+ """Analyze `resource` module for static object inference
+
+ This function forces rope to analyze this module to collect
+ information about function calls. `should_analyze` is a
+ function that is called with a `PyDefinedObject` argument. If
+ it returns `True` the element is analyzed. If it is `None` or
+ returns `False` the element is not analyzed.
+
+ `search_subscopes` is like `should_analyze`; The difference is
+ that if it returns `False` the sub-scopes are all ignored.
+ That is it is assumed that `should_analyze` returns `False`
+ for all of its subscopes.
+
+ `followed_calls` override the value of ``soa_followed_calls``
+ project config.
+ """
+ if followed_calls is None:
+ followed_calls = self.project.prefs.get('soa_followed_calls', 0)
+ pymodule = self.resource_to_pyobject(resource)
+ self.module_cache.forget_all_data()
+ rope.base.oi.soa.analyze_module(
+ self, pymodule, should_analyze, search_subscopes, followed_calls)
+
+ def get_classes(self, task_handle=taskhandle.NullTaskHandle()):
+ warnings.warn('`PyCore.get_classes()` is deprecated',
+ DeprecationWarning, stacklevel=2)
+ return []
+
+ def __str__(self):
+ return str(self.module_cache) + str(self.object_info)
+
+ def modname(self, resource):
+ if resource.is_folder():
+ module_name = resource.name
+ source_folder = resource.parent
+ elif resource.name == '__init__.py':
+ module_name = resource.parent.name
+ source_folder = resource.parent.parent
+ else:
+ module_name = resource.name[:-3]
+ source_folder = resource.parent
+
+ while source_folder != source_folder.parent and \
+ source_folder.has_child('__init__.py'):
+ module_name = source_folder.name + '.' + module_name
+ source_folder = source_folder.parent
+ return module_name
+
+ @property
+ @utils.cacheit
+ def extension_modules(self):
+ result = set(self.project.prefs.get('extension_modules', []))
+ if self.project.prefs.get('import_dynload_stdmods', False):
+ result.update(stdmods.dynload_modules())
+ return result
+
+
+class _ModuleCache(object):
+
+ def __init__(self, pycore):
+ self.pycore = pycore
+ self.module_map = {}
+ self.pycore.cache_observers.append(self._invalidate_resource)
+ self.observer = self.pycore.observer
+
+ def _invalidate_resource(self, resource):
+ if resource in self.module_map:
+ self.forget_all_data()
+ self.observer.remove_resource(resource)
+ del self.module_map[resource]
+
+ def get_pymodule(self, resource, force_errors=False):
+ if resource in self.module_map:
+ return self.module_map[resource]
+ if resource.is_folder():
+ result = PyPackage(self.pycore, resource,
+ force_errors=force_errors)
+ else:
+ result = PyModule(self.pycore, resource=resource,
+ force_errors=force_errors)
+ if result.has_errors:
+ return result
+ self.module_map[resource] = result
+ self.observer.add_resource(resource)
+ return result
+
+ def forget_all_data(self):
+ for pymodule in self.module_map.values():
+ pymodule._forget_concluded_data()
+
+ def __str__(self):
+ return 'PyCore caches %d PyModules\n' % len(self.module_map)
+
+
+class _ExtensionCache(object):
+
+ def __init__(self, pycore):
+ self.pycore = pycore
+ self.extensions = {}
+
+ def get_pymodule(self, name):
+ if name == '__builtin__':
+ return builtins.builtins
+ allowed = self.pycore.extension_modules
+ if name not in self.extensions and name in allowed:
+ self.extensions[name] = builtins.BuiltinModule(name, self.pycore)
+ return self.extensions.get(name)
+
+
+def perform_soa_on_changed_scopes(project, resource, old_contents):
+ pycore = project.pycore
+ if resource.exists() and pycore.is_python_file(resource):
+ try:
+ new_contents = resource.read()
+ # detecting changes in new_contents relative to old_contents
+ detector = _TextChangeDetector(new_contents, old_contents)
+ def search_subscopes(pydefined):
+ scope = pydefined.get_scope()
+ return detector.is_changed(scope.get_start(), scope.get_end())
+ def should_analyze(pydefined):
+ scope = pydefined.get_scope()
+ start = scope.get_start()
+ end = scope.get_end()
+ return detector.consume_changes(start, end)
+ pycore.analyze_module(resource, should_analyze, search_subscopes)
+ except exceptions.ModuleSyntaxError:
+ pass
+
+
+class _TextChangeDetector(object):
+
+ def __init__(self, old, new):
+ self.old = old
+ self.new = new
+ self._set_diffs()
+
+ def _set_diffs(self):
+ differ = difflib.Differ()
+ self.lines = []
+ lineno = 0
+ for line in differ.compare(self.old.splitlines(True),
+ self.new.splitlines(True)):
+ if line.startswith(' '):
+ lineno += 1
+ elif line.startswith('-'):
+ lineno += 1
+ self.lines.append(lineno)
+
+ def is_changed(self, start, end):
+ """Tell whether any of start till end lines have changed
+
+ The end points are inclusive and indices start from 1.
+ """
+ left, right = self._get_changed(start, end)
+ if left < right:
+ return True
+ return False
+
+ def consume_changes(self, start, end):
+ """Clear the changed status of lines from start till end"""
+ left, right = self._get_changed(start, end)
+ if left < right:
+ del self.lines[left:right]
+ return left < right
+
+ def _get_changed(self, start, end):
+ left = bisect.bisect_left(self.lines, start)
+ right = bisect.bisect_right(self.lines, end)
+ return left, right
diff --git a/.vim/bundle/python-mode/pylibs/rope/base/pynames.py b/.vim/bundle/python-mode/pylibs/rope/base/pynames.py
@@ -0,0 +1,199 @@
+import rope.base.pyobjects
+from rope.base import exceptions, utils
+
+
+class PyName(object):
+ """References to `PyObject`\s inside python programs"""
+
+ def get_object(self):
+ """Return the `PyObject` object referenced by this `PyName`"""
+
+ def get_definition_location(self):
+ """Return a (module, lineno) tuple"""
+
+
+class DefinedName(PyName):
+
+ def __init__(self, pyobject):
+ self.pyobject = pyobject
+
+ def get_object(self):
+ return self.pyobject
+
+ def get_definition_location(self):
+ return (self.pyobject.get_module(), self.pyobject.get_ast().lineno)
+
+
+class AssignedName(PyName):
+ """Only a placeholder"""
+
+
+class UnboundName(PyName):
+
+ def __init__(self, pyobject=None):
+ self.pyobject = pyobject
+ if self.pyobject is None:
+ self.pyobject = rope.base.pyobjects.get_unknown()
+
+ def get_object(self):
+ return self.pyobject
+
+ def get_definition_location(self):
+ return (None, None)
+
+
+class AssignmentValue(object):
+ """An assigned expression"""
+
+ def __init__(self, ast_node, levels=None, evaluation='',
+ assign_type=False):
+ """The `level` is `None` for simple assignments and is
+ a list of numbers for tuple assignments for example in::
+
+ a, (b, c) = x
+
+ The levels for for `a` is ``[0]``, for `b` is ``[1, 0]`` and for
+ `c` is ``[1, 1]``.
+
+ """
+ self.ast_node = ast_node
+ if levels == None:
+ self.levels = []
+ else:
+ self.levels = levels
+ self.evaluation = evaluation
+ self.assign_type = assign_type
+
+ def get_lineno(self):
+ return self.ast_node.lineno
+
+
+class EvaluatedName(PyName):
+ """A name whose object will be evaluated later"""
+
+ def __init__(self, callback, module=None, lineno=None):
+ self.module = module
+ self.lineno = lineno
+ self.callback = callback
+ self.pyobject = _Inferred(callback, _get_concluded_data(module))
+
+ def get_object(self):
+ return self.pyobject.get()
+
+ def get_definition_location(self):
+ return (self.module, self.lineno)
+
+ def invalidate(self):
+ """Forget the `PyObject` this `PyName` holds"""
+ self.pyobject.set(None)
+
+
+class ParameterName(PyName):
+ """Only a placeholder"""
+
+
+class ImportedModule(PyName):
+
+ def __init__(self, importing_module, module_name=None,
+ level=0, resource=None):
+ self.importing_module = importing_module
+ self.module_name = module_name
+ self.level = level
+ self.resource = resource
+ self.pymodule = _get_concluded_data(self.importing_module)
+
+ def _current_folder(self):
+ resource = self.importing_module.get_module().get_resource()
+ if resource is None:
+ return None
+ return resource.parent
+
+ def _get_pymodule(self):
+ if self.pymodule.get() is None:
+ pycore = self.importing_module.pycore
+ if self.resource is not None:
+ self.pymodule.set(pycore.resource_to_pyobject(self.resource))
+ elif self.module_name is not None:
+ try:
+ if self.level == 0:
+ pymodule = pycore.get_module(self.module_name,
+ self._current_folder())
+ else:
+ pymodule = pycore.get_relative_module(
+ self.module_name, self._current_folder(), self.level)
+ self.pymodule.set(pymodule)
+ except exceptions.ModuleNotFoundError:
+ pass
+ return self.pymodule.get()
+
+ def get_object(self):
+ if self._get_pymodule() is None:
+ return rope.base.pyobjects.get_unknown()
+ return self._get_pymodule()
+
+ def get_definition_location(self):
+ pymodule = self._get_pymodule()
+ if not isinstance(pymodule, rope.base.pyobjects.PyDefinedObject):
+ return (None, None)
+ return (pymodule.get_module(), 1)
+
+
+class ImportedName(PyName):
+
+ def __init__(self, imported_module, imported_name):
+ self.imported_module = imported_module
+ self.imported_name = imported_name
+
+ def _get_imported_pyname(self):
+ try:
+ result = self.imported_module.get_object()[self.imported_name]
+ if result != self:
+ return result
+ except exceptions.AttributeNotFoundError:
+ pass
+ return UnboundName()
+
+ @utils.prevent_recursion(rope.base.pyobjects.get_unknown)
+ def get_object(self):
+ return self._get_imported_pyname().get_object()
+
+ @utils.prevent_recursion(lambda: (None, None))
+ def get_definition_location(self):
+ return self._get_imported_pyname().get_definition_location()
+
+
+def _get_concluded_data(module):
+ if module is None:
+ return rope.base.pyobjects._ConcludedData()
+ return module._get_concluded_data()
+
+
+def _circular_inference():
+ raise rope.base.pyobjects.IsBeingInferredError(
+ 'Circular Object Inference')
+
+class _Inferred(object):
+
+ def __init__(self, get_inferred, concluded=None):
+ self.get_inferred = get_inferred
+ self.concluded = concluded
+ if self.concluded is None:
+ self.temp = None
+
+ @utils.prevent_recursion(_circular_inference)
+ def get(self, *args, **kwds):
+ if self.concluded is None or self.concluded.get() is None:
+ self.set(self.get_inferred(*args, **kwds))
+ if self._get() is None:
+ self.set(rope.base.pyobjects.get_unknown())
+ return self._get()
+
+ def set(self, pyobject):
+ if self.concluded is not None:
+ self.concluded.set(pyobject)
+ self.temp = pyobject
+
+ def _get(self):
+ if self.concluded is not None:
+ return self.concluded.get()
+ return self.temp
diff --git a/.vim/bundle/python-mode/pylibs/rope/base/pynamesdef.py b/.vim/bundle/python-mode/pylibs/rope/base/pynamesdef.py
@@ -0,0 +1,55 @@
+import rope.base.oi.soi
+from rope.base import pynames
+from rope.base.pynames import *
+
+
+class AssignedName(pynames.AssignedName):
+
+ def __init__(self, lineno=None, module=None, pyobject=None):
+ self.lineno = lineno
+ self.module = module
+ self.assignments = []
+ self.pyobject = _Inferred(self._get_inferred,
+ pynames._get_concluded_data(module))
+ self.pyobject.set(pyobject)
+
+ @utils.prevent_recursion(lambda: None)
+ def _get_inferred(self):
+ if self.module is not None:
+ return rope.base.oi.soi.infer_assigned_object(self)
+
+ def get_object(self):
+ return self.pyobject.get()
+
+ def get_definition_location(self):
+ """Returns a (module, lineno) tuple"""
+ if self.lineno is None and self.assignments:
+ self.lineno = self.assignments[0].get_lineno()
+ return (self.module, self.lineno)
+
+ def invalidate(self):
+ """Forget the `PyObject` this `PyName` holds"""
+ self.pyobject.set(None)
+
+
+class ParameterName(pynames.ParameterName):
+
+ def __init__(self, pyfunction, index):
+ self.pyfunction = pyfunction
+ self.index = index
+
+ def get_object(self):
+ result = self.pyfunction.get_parameter(self.index)
+ if result is None:
+ result = rope.base.pyobjects.get_unknown()
+ return result
+
+ def get_objects(self):
+ """Returns the list of objects passed as this parameter"""
+ return rope.base.oi.soi.get_passed_objects(
+ self.pyfunction, self.index)
+
+ def get_definition_location(self):
+ return (self.pyfunction.get_module(), self.pyfunction.get_ast().lineno)
+
+_Inferred = pynames._Inferred
diff --git a/.vim/bundle/python-mode/pylibs/rope/base/pyobjects.py b/.vim/bundle/python-mode/pylibs/rope/base/pyobjects.py
@@ -0,0 +1,311 @@
+from rope.base.fscommands import _decode_data
+from rope.base import ast, exceptions, utils
+
+
+class PyObject(object):
+
+ def __init__(self, type_):
+ if type_ is None:
+ type_ = self
+ self.type = type_
+
+ def get_attributes(self):
+ if self.type is self:
+ return {}
+ return self.type.get_attributes()
+
+ def get_attribute(self, name):
+ if name not in self.get_attributes():
+ raise exceptions.AttributeNotFoundError(
+ 'Attribute %s not found' % name)
+ return self.get_attributes()[name]
+
+ def get_type(self):
+ return self.type
+
+ def __getitem__(self, key):
+ """The same as ``get_attribute(key)``"""
+ return self.get_attribute(key)
+
+ def __contains__(self, key):
+ """The same as ``key in self.get_attributes()``"""
+ return key in self.get_attributes()
+
+ def __eq__(self, obj):
+ """Check the equality of two `PyObject`\s
+
+ Currently it is assumed that instances (the direct instances
+ of `PyObject`, not the instances of its subclasses) are equal
+ if their types are equal. For every other object like
+ defineds or builtins rope assumes objects are reference
+ objects and their identities should match.
+
+ """
+ if self.__class__ != obj.__class__:
+ return False
+ if type(self) == PyObject:
+ if self is not self.type:
+ return self.type == obj.type
+ else:
+ return self.type is obj.type
+ return self is obj
+
+ def __ne__(self, obj):
+ return not self.__eq__(obj)
+
+ def __hash__(self):
+ """See docs for `__eq__()` method"""
+ if type(self) == PyObject and self != self.type:
+ return hash(self.type) + 1
+ else:
+ return super(PyObject, self).__hash__()
+
+ def __iter__(self):
+ """The same as ``iter(self.get_attributes())``"""
+ return iter(self.get_attributes())
+
+ _types = None
+ _unknown = None
+
+ @staticmethod
+ def _get_base_type(name):
+ if PyObject._types is None:
+ PyObject._types = {}
+ base_type = PyObject(None)
+ PyObject._types['Type'] = base_type
+ PyObject._types['Module'] = PyObject(base_type)
+ PyObject._types['Function'] = PyObject(base_type)
+ PyObject._types['Unknown'] = PyObject(base_type)
+ return PyObject._types[name]
+
+
+def get_base_type(name):
+ """Return the base type with name `name`.
+
+ The base types are 'Type', 'Function', 'Module' and 'Unknown'. It
+ was used to check the type of a `PyObject` but currently its use
+ is discouraged. Use classes defined in this module instead.
+ For example instead of
+ ``pyobject.get_type() == get_base_type('Function')`` use
+ ``isinstance(pyobject, AbstractFunction)``.
+
+ You can use `AbstractClass` for classes, `AbstractFunction` for
+ functions, and `AbstractModule` for modules. You can also use
+ `PyFunction` and `PyClass` for testing if an object is
+ defined somewhere and rope can access its source. These classes
+ provide more methods.
+
+ """
+ return PyObject._get_base_type(name)
+
+
+def get_unknown():
+ """Return a pyobject whose type is unknown
+
+ Note that two unknown objects are equal. So for example you can
+ write::
+
+ if pyname.get_object() == get_unknown():
+ print 'cannot determine what this pyname holds'
+
+ Rope could have used `None` for indicating unknown objects but
+ we had to check that in many places. So actually this method
+ returns a null object.
+
+ """
+ if PyObject._unknown is None:
+ PyObject._unknown = PyObject(get_base_type('Unknown'))
+ return PyObject._unknown
+
+
+class AbstractClass(PyObject):
+
+ def __init__(self):
+ super(AbstractClass, self).__init__(get_base_type('Type'))
+
+ def get_name(self):
+ pass
+
+ def get_doc(self):
+ pass
+
+ def get_superclasses(self):
+ return []
+
+
+class AbstractFunction(PyObject):
+
+ def __init__(self):
+ super(AbstractFunction, self).__init__(get_base_type('Function'))
+
+ def get_name(self):
+ pass
+
+ def get_doc(self):
+ pass
+
+ def get_param_names(self, special_args=True):
+ return []
+
+ def get_returned_object(self, args):
+ return get_unknown()
+
+
+class AbstractModule(PyObject):
+
+ def __init__(self, doc=None):
+ super(AbstractModule, self).__init__(get_base_type('Module'))
+
+ def get_doc(self):
+ pass
+
+ def get_resource(self):
+ pass
+
+
+class PyDefinedObject(object):
+ """Python defined names that rope can access their sources"""
+
+ def __init__(self, pycore, ast_node, parent):
+ self.pycore = pycore
+ self.ast_node = ast_node
+ self.scope = None
+ self.parent = parent
+ self.structural_attributes = None
+ self.concluded_attributes = self.get_module()._get_concluded_data()
+ self.attributes = self.get_module()._get_concluded_data()
+ self.defineds = None
+
+ visitor_class = None
+
+ @utils.prevent_recursion(lambda: {})
+ def _get_structural_attributes(self):
+ if self.structural_attributes is None:
+ self.structural_attributes = self._create_structural_attributes()
+ return self.structural_attributes
+
+ @utils.prevent_recursion(lambda: {})
+ def _get_concluded_attributes(self):
+ if self.concluded_attributes.get() is None:
+ self._get_structural_attributes()
+ self.concluded_attributes.set(self._create_concluded_attributes())
+ return self.concluded_attributes.get()
+
+ def get_attributes(self):
+ if self.attributes.get() is None:
+ result = dict(self._get_concluded_attributes())
+ result.update(self._get_structural_attributes())
+ self.attributes.set(result)
+ return self.attributes.get()
+
+ def get_attribute(self, name):
+ if name in self._get_structural_attributes():
+ return self._get_structural_attributes()[name]
+ if name in self._get_concluded_attributes():
+ return self._get_concluded_attributes()[name]
+ raise exceptions.AttributeNotFoundError('Attribute %s not found' %
+ name)
+
+ def get_scope(self):
+ if self.scope is None:
+ self.scope = self._create_scope()
+ return self.scope
+
+ def get_module(self):
+ current_object = self
+ while current_object.parent is not None:
+ current_object = current_object.parent
+ return current_object
+
+ def get_doc(self):
+ if len(self.get_ast().body) > 0:
+ expr = self.get_ast().body[0]
+ if isinstance(expr, ast.Expr) and \
+ isinstance(expr.value, ast.Str):
+ docstring = expr.value.s
+ coding = self.get_module().coding
+ return _decode_data(docstring, coding)
+
+ def _get_defined_objects(self):
+ if self.defineds is None:
+ self._get_structural_attributes()
+ return self.defineds
+
+ def _create_structural_attributes(self):
+ if self.visitor_class is None:
+ return {}
+ new_visitor = self.visitor_class(self.pycore, self)
+ for child in ast.get_child_nodes(self.ast_node):
+ ast.walk(child, new_visitor)
+ self.defineds = new_visitor.defineds
+ return new_visitor.names
+
+ def _create_concluded_attributes(self):
+ return {}
+
+ def get_ast(self):
+ return self.ast_node
+
+ def _create_scope(self):
+ pass
+
+
+class PyFunction(PyDefinedObject, AbstractFunction):
+ """Only a placeholder"""
+
+
+class PyClass(PyDefinedObject, AbstractClass):
+ """Only a placeholder"""
+
+
+class _ConcludedData(object):
+
+ def __init__(self):
+ self.data_ = None
+
+ def set(self, data):
+ self.data_ = data
+
+ def get(self):
+ return self.data_
+
+ data = property(get, set)
+
+ def _invalidate(self):
+ self.data = None
+
+ def __str__(self):
+ return '<' + str(self.data) + '>'
+
+
+class _PyModule(PyDefinedObject, AbstractModule):
+
+ def __init__(self, pycore, ast_node, resource):
+ self.resource = resource
+ self.concluded_data = []
+ AbstractModule.__init__(self)
+ PyDefinedObject.__init__(self, pycore, ast_node, None)
+
+ def _get_concluded_data(self):
+ new_data = _ConcludedData()
+ self.concluded_data.append(new_data)
+ return new_data
+
+ def _forget_concluded_data(self):
+ for data in self.concluded_data:
+ data._invalidate()
+
+ def get_resource(self):
+ return self.resource
+
+
+class PyModule(_PyModule):
+ """Only a placeholder"""
+
+
+class PyPackage(_PyModule):
+ """Only a placeholder"""
+
+
+class IsBeingInferredError(exceptions.RopeError):
+ pass
diff --git a/.vim/bundle/python-mode/pylibs/rope/base/pyobjectsdef.py b/.vim/bundle/python-mode/pylibs/rope/base/pyobjectsdef.py
@@ -0,0 +1,537 @@
+import rope.base.codeanalyze
+import rope.base.evaluate
+import rope.base.builtins
+import rope.base.oi.soi
+import rope.base.pyscopes
+from rope.base import (pynamesdef as pynames, exceptions, ast,
+ astutils, pyobjects, fscommands, arguments, utils)
+from rope.base.pyobjects import *
+
+
+class PyFunction(pyobjects.PyFunction):
+
+ def __init__(self, pycore, ast_node, parent):
+ AbstractFunction.__init__(self)
+ PyDefinedObject.__init__(self, pycore, ast_node, parent)
+ self.arguments = self.ast_node.args
+ self.parameter_pyobjects = pynames._Inferred(
+ self._infer_parameters, self.get_module()._get_concluded_data())
+ self.returned = pynames._Inferred(self._infer_returned)
+ self.parameter_pynames = None
+
+ def _create_structural_attributes(self):
+ return {}
+
+ def _create_concluded_attributes(self):
+ return {}
+
+ def _create_scope(self):
+ return rope.base.pyscopes.FunctionScope(self.pycore, self,
+ _FunctionVisitor)
+
+ def _infer_parameters(self):
+ pyobjects = rope.base.oi.soi.infer_parameter_objects(self)
+ self._handle_special_args(pyobjects)
+ return pyobjects
+
+ def _infer_returned(self, args=None):
+ return rope.base.oi.soi.infer_returned_object(self, args)
+
+ def _handle_special_args(self, pyobjects):
+ if len(pyobjects) == len(self.arguments.args):
+ if self.arguments.vararg:
+ pyobjects.append(rope.base.builtins.get_list())
+ if self.arguments.kwarg:
+ pyobjects.append(rope.base.builtins.get_dict())
+
+ def _set_parameter_pyobjects(self, pyobjects):
+ if pyobjects is not None:
+ self._handle_special_args(pyobjects)
+ self.parameter_pyobjects.set(pyobjects)
+
+ def get_parameters(self):
+ if self.parameter_pynames is None:
+ result = {}
+ for index, name in enumerate(self.get_param_names()):
+ # TODO: handle tuple parameters
+ result[name] = pynames.ParameterName(self, index)
+ self.parameter_pynames = result
+ return self.parameter_pynames
+
+ def get_parameter(self, index):
+ if index < len(self.parameter_pyobjects.get()):
+ return self.parameter_pyobjects.get()[index]
+
+ def get_returned_object(self, args):
+ return self.returned.get(args)
+
+ def get_name(self):
+ return self.get_ast().name
+
+ def get_param_names(self, special_args=True):
+ # TODO: handle tuple parameters
+ result = [node.id for node in self.arguments.args
+ if isinstance(node, ast.Name)]
+ if special_args:
+ if self.arguments.vararg:
+ result.append(self.arguments.vararg)
+ if self.arguments.kwarg:
+ result.append(self.arguments.kwarg)
+ return result
+
+ def get_kind(self):
+ """Get function type
+
+ It returns one of 'function', 'method', 'staticmethod' or
+ 'classmethod' strs.
+
+ """
+ scope = self.parent.get_scope()
+ if isinstance(self.parent, PyClass):
+ for decorator in self.decorators:
+ pyname = rope.base.evaluate.eval_node(scope, decorator)
+ if pyname == rope.base.builtins.builtins['staticmethod']:
+ return 'staticmethod'
+ if pyname == rope.base.builtins.builtins['classmethod']:
+ return 'classmethod'
+ return 'method'
+ return 'function'
+
+ @property
+ def decorators(self):
+ try:
+ return getattr(self.ast_node, 'decorator_list')
+ except AttributeError:
+ return getattr(self.ast_node, 'decorators', None)
+
+
+class PyClass(pyobjects.PyClass):
+
+ def __init__(self, pycore, ast_node, parent):
+ self.visitor_class = _ClassVisitor
+ AbstractClass.__init__(self)
+ PyDefinedObject.__init__(self, pycore, ast_node, parent)
+ self.parent = parent
+ self._superclasses = self.get_module()._get_concluded_data()
+
+ def get_superclasses(self):
+ if self._superclasses.get() is None:
+ self._superclasses.set(self._get_bases())
+ return self._superclasses.get()
+
+ def get_name(self):
+ return self.get_ast().name
+
+ def _create_concluded_attributes(self):
+ result = {}
+ for base in reversed(self.get_superclasses()):
+ result.update(base.get_attributes())
+ return result
+
+ def _get_bases(self):
+ result = []
+ for base_name in self.ast_node.bases:
+ base = rope.base.evaluate.eval_node(self.parent.get_scope(),
+ base_name)
+ if base is not None and \
+ base.get_object().get_type() == get_base_type('Type'):
+ result.append(base.get_object())
+ return result
+
+ def _create_scope(self):
+ return rope.base.pyscopes.ClassScope(self.pycore, self)
+
+
+class PyModule(pyobjects.PyModule):
+
+ def __init__(self, pycore, source=None,
+ resource=None, force_errors=False):
+ ignore = pycore.project.prefs.get('ignore_syntax_errors', False)
+ syntax_errors = force_errors or not ignore
+ self.has_errors = False
+ try:
+ source, node = self._init_source(pycore, source, resource)
+ except exceptions.ModuleSyntaxError:
+ self.has_errors = True
+ if syntax_errors:
+ raise
+ else:
+ source = '\n'
+ node = ast.parse('\n')
+ self.source_code = source
+ self.star_imports = []
+ self.visitor_class = _GlobalVisitor
+ self.coding = fscommands.read_str_coding(self.source_code)
+ super(PyModule, self).__init__(pycore, node, resource)
+
+ def _init_source(self, pycore, source_code, resource):
+ filename = 'string'
+ if resource:
+ filename = resource.path
+ try:
+ if source_code is None:
+ source_bytes = resource.read_bytes()
+ source_code = fscommands.file_data_to_unicode(source_bytes)
+ else:
+ if isinstance(source_code, unicode):
+ source_bytes = fscommands.unicode_to_file_data(source_code)
+ else:
+ source_bytes = source_code
+ ast_node = ast.parse(source_bytes, filename=filename)
+ except SyntaxError, e:
+ raise exceptions.ModuleSyntaxError(filename, e.lineno, e.msg)
+ except UnicodeDecodeError, e:
+ raise exceptions.ModuleSyntaxError(filename, 1, '%s' % (e.reason))
+ return source_code, ast_node
+
+ @utils.prevent_recursion(lambda: {})
+ def _create_concluded_attributes(self):
+ result = {}
+ for star_import in self.star_imports:
+ result.update(star_import.get_names())
+ return result
+
+ def _create_scope(self):
+ return rope.base.pyscopes.GlobalScope(self.pycore, self)
+
+ @property
+ @utils.saveit
+ def lines(self):
+ """A `SourceLinesAdapter`"""
+ return rope.base.codeanalyze.SourceLinesAdapter(self.source_code)
+
+ @property
+ @utils.saveit
+ def logical_lines(self):
+ """A `LogicalLinesFinder`"""
+ return rope.base.codeanalyze.CachingLogicalLineFinder(self.lines)
+
+
+class PyPackage(pyobjects.PyPackage):
+
+ def __init__(self, pycore, resource=None, force_errors=False):
+ self.resource = resource
+ init_dot_py = self._get_init_dot_py()
+ if init_dot_py is not None:
+ ast_node = pycore.resource_to_pyobject(
+ init_dot_py, force_errors=force_errors).get_ast()
+ else:
+ ast_node = ast.parse('\n')
+ super(PyPackage, self).__init__(pycore, ast_node, resource)
+
+ def _create_structural_attributes(self):
+ result = {}
+ modname = self.pycore.modname(self.resource)
+ extension_submodules = self.pycore._builtin_submodules(modname)
+ for name, module in extension_submodules.iteritems():
+ result[name] = rope.base.builtins.BuiltinName(module)
+ if self.resource is None:
+ return result
+ for name, resource in self._get_child_resources().items():
+ result[name] = pynames.ImportedModule(self, resource=resource)
+ return result
+
+ def _create_concluded_attributes(self):
+ result = {}
+ init_dot_py = self._get_init_dot_py()
+ if init_dot_py:
+ init_object = self.pycore.resource_to_pyobject(init_dot_py)
+ result.update(init_object.get_attributes())
+ return result
+
+ def _get_child_resources(self):
+ result = {}
+ for child in self.resource.get_children():
+ if child.is_folder():
+ result[child.name] = child
+ elif child.name.endswith('.py') and \
+ child.name != '__init__.py':
+ name = child.name[:-3]
+ result[name] = child
+ return result
+
+ def _get_init_dot_py(self):
+ if self.resource is not None and self.resource.has_child('__init__.py'):
+ return self.resource.get_child('__init__.py')
+ else:
+ return None
+
+ def _create_scope(self):
+ return self.get_module().get_scope()
+
+ def get_module(self):
+ init_dot_py = self._get_init_dot_py()
+ if init_dot_py:
+ return self.pycore.resource_to_pyobject(init_dot_py)
+ return self
+
+
+class _AssignVisitor(object):
+
+ def __init__(self, scope_visitor):
+ self.scope_visitor = scope_visitor
+ self.assigned_ast = None
+
+ def _Assign(self, node):
+ self.assigned_ast = node.value
+ for child_node in node.targets:
+ ast.walk(child_node, self)
+
+ def _assigned(self, name, assignment=None):
+ self.scope_visitor._assigned(name, assignment)
+
+ def _Name(self, node):
+ assignment = None
+ if self.assigned_ast is not None:
+ assignment = pynames.AssignmentValue(self.assigned_ast)
+ self._assigned(node.id, assignment)
+
+ def _Tuple(self, node):
+ names = astutils.get_name_levels(node)
+ for name, levels in names:
+ assignment = None
+ if self.assigned_ast is not None:
+ assignment = pynames.AssignmentValue(self.assigned_ast, levels)
+ self._assigned(name, assignment)
+
+ def _Attribute(self, node):
+ pass
+
+ def _Subscript(self, node):
+ pass
+
+ def _Slice(self, node):
+ pass
+
+
+class _ScopeVisitor(object):
+
+ def __init__(self, pycore, owner_object):
+ self.pycore = pycore
+ self.owner_object = owner_object
+ self.names = {}
+ self.defineds = []
+
+ def get_module(self):
+ if self.owner_object is not None:
+ return self.owner_object.get_module()
+ else:
+ return None
+
+ def _ClassDef(self, node):
+ pyclass = PyClass(self.pycore, node, self.owner_object)
+ self.names[node.name] = pynames.DefinedName(pyclass)
+ self.defineds.append(pyclass)
+
+ def _FunctionDef(self, node):
+ pyfunction = PyFunction(self.pycore, node, self.owner_object)
+ for decorator in pyfunction.decorators:
+ if isinstance(decorator, ast.Name) and decorator.id == 'property':
+ if isinstance(self, _ClassVisitor):
+ type_ = rope.base.builtins.Property(pyfunction)
+ arg = pynames.UnboundName(PyObject(self.owner_object))
+ def _eval(type_=type_, arg=arg):
+ return type_.get_property_object(
+ arguments.ObjectArguments([arg]))
+ self.names[node.name] = pynames.EvaluatedName(
+ _eval, module=self.get_module(), lineno=node.lineno)
+ break
+ else:
+ self.names[node.name] = pynames.DefinedName(pyfunction)
+ self.defineds.append(pyfunction)
+
+ def _Assign(self, node):
+ ast.walk(node, _AssignVisitor(self))
+
+ def _AugAssign(self, node):
+ pass
+
+ def _For(self, node):
+ names = self._update_evaluated(node.target, node.iter,
+ '.__iter__().next()')
+ for child in node.body + node.orelse:
+ ast.walk(child, self)
+
+ def _assigned(self, name, assignment):
+ pyname = self.names.get(name, None)
+ if pyname is None:
+ pyname = pynames.AssignedName(module=self.get_module())
+ if isinstance(pyname, pynames.AssignedName):
+ if assignment is not None:
+ pyname.assignments.append(assignment)
+ self.names[name] = pyname
+
+ def _update_evaluated(self, targets, assigned,
+ evaluation= '', eval_type=False):
+ result = {}
+ names = astutils.get_name_levels(targets)
+ for name, levels in names:
+ assignment = pynames.AssignmentValue(assigned, levels,
+ evaluation, eval_type)
+ self._assigned(name, assignment)
+ return result
+
+ def _With(self, node):
+ if node.optional_vars:
+ self._update_evaluated(node.optional_vars,
+ node.context_expr, '.__enter__()')
+ for child in node.body:
+ ast.walk(child, self)
+
+ def _excepthandler(self, node):
+ if node.name is not None and isinstance(node.name, ast.Name):
+ type_node = node.type
+ if isinstance(node.type, ast.Tuple) and type_node.elts:
+ type_node = type_node.elts[0]
+ self._update_evaluated(node.name, type_node, eval_type=True)
+ for child in node.body:
+ ast.walk(child, self)
+
+ def _ExceptHandler(self, node):
+ self._excepthandler(node)
+
+ def _Import(self, node):
+ for import_pair in node.names:
+ module_name = import_pair.name
+ alias = import_pair.asname
+ first_package = module_name.split('.')[0]
+ if alias is not None:
+ imported = pynames.ImportedModule(self.get_module(),
+ module_name)
+ if not self._is_ignored_import(imported):
+ self.names[alias] = imported
+ else:
+ imported = pynames.ImportedModule(self.get_module(),
+ first_package)
+ if not self._is_ignored_import(imported):
+ self.names[first_package] = imported
+
+ def _ImportFrom(self, node):
+ level = 0
+ if node.level:
+ level = node.level
+ imported_module = pynames.ImportedModule(self.get_module(),
+ node.module, level)
+ if self._is_ignored_import(imported_module):
+ return
+ if len(node.names) == 1 and node.names[0].name == '*':
+ if isinstance(self.owner_object, PyModule):
+ self.owner_object.star_imports.append(
+ StarImport(imported_module))
+ else:
+ for imported_name in node.names:
+ imported = imported_name.name
+ alias = imported_name.asname
+ if alias is not None:
+ imported = alias
+ self.names[imported] = pynames.ImportedName(imported_module,
+ imported_name.name)
+
+ def _is_ignored_import(self, imported_module):
+ if not self.pycore.project.prefs.get('ignore_bad_imports', False):
+ return False
+ return not isinstance(imported_module.get_object(), AbstractModule)
+
+ def _Global(self, node):
+ module = self.get_module()
+ for name in node.names:
+ if module is not None:
+ try:
+ pyname = module[name]
+ except exceptions.AttributeNotFoundError:
+ pyname = pynames.AssignedName(node.lineno)
+ self.names[name] = pyname
+
+
+class _GlobalVisitor(_ScopeVisitor):
+
+ def __init__(self, pycore, owner_object):
+ super(_GlobalVisitor, self).__init__(pycore, owner_object)
+
+
+class _ClassVisitor(_ScopeVisitor):
+
+ def __init__(self, pycore, owner_object):
+ super(_ClassVisitor, self).__init__(pycore, owner_object)
+
+ def _FunctionDef(self, node):
+ _ScopeVisitor._FunctionDef(self, node)
+ if len(node.args.args) > 0:
+ first = node.args.args[0]
+ if isinstance(first, ast.Name):
+ new_visitor = _ClassInitVisitor(self, first.id)
+ for child in ast.get_child_nodes(node):
+ ast.walk(child, new_visitor)
+
+
+class _FunctionVisitor(_ScopeVisitor):
+
+ def __init__(self, pycore, owner_object):
+ super(_FunctionVisitor, self).__init__(pycore, owner_object)
+ self.returned_asts = []
+ self.generator = False
+
+ def _Return(self, node):
+ if node.value is not None:
+ self.returned_asts.append(node.value)
+
+ def _Yield(self, node):
+ if node.value is not None:
+ self.returned_asts.append(node.value)
+ self.generator = True
+
+
+class _ClassInitVisitor(_AssignVisitor):
+
+ def __init__(self, scope_visitor, self_name):
+ super(_ClassInitVisitor, self).__init__(scope_visitor)
+ self.self_name = self_name
+
+ def _Attribute(self, node):
+ if not isinstance(node.ctx, ast.Store):
+ return
+ if isinstance(node.value, ast.Name) and \
+ node.value.id == self.self_name:
+ if node.attr not in self.scope_visitor.names:
+ self.scope_visitor.names[node.attr] = pynames.AssignedName(
+ lineno=node.lineno, module=self.scope_visitor.get_module())
+ if self.assigned_ast is not None:
+ pyname = self.scope_visitor.names[node.attr]
+ if isinstance(pyname, pynames.AssignedName):
+ pyname.assignments.append(
+ pynames.AssignmentValue(self.assigned_ast))
+
+ def _Tuple(self, node):
+ if not isinstance(node.ctx, ast.Store):
+ return
+ for child in ast.get_child_nodes(node):
+ ast.walk(child, self)
+
+ def _Name(self, node):
+ pass
+
+ def _FunctionDef(self, node):
+ pass
+
+ def _ClassDef(self, node):
+ pass
+
+ def _For(self, node):
+ pass
+
+ def _With(self, node):
+ pass
+
+
+class StarImport(object):
+
+ def __init__(self, imported_module):
+ self.imported_module = imported_module
+
+ def get_names(self):
+ result = {}
+ imported = self.imported_module.get_object()
+ for name in imported:
+ if not name.startswith('_'):
+ result[name] = pynames.ImportedName(self.imported_module, name)
+ return result
diff --git a/.vim/bundle/python-mode/pylibs/rope/base/pyscopes.py b/.vim/bundle/python-mode/pylibs/rope/base/pyscopes.py
@@ -0,0 +1,313 @@
+import rope.base.builtins
+import rope.base.codeanalyze
+import rope.base.pynames
+from rope.base import ast, exceptions, utils
+
+
+class Scope(object):
+
+ def __init__(self, pycore, pyobject, parent_scope):
+ self.pycore = pycore
+ self.pyobject = pyobject
+ self.parent = parent_scope
+
+ def get_names(self):
+ """Return the names defined or imported in this scope"""
+ return self.pyobject.get_attributes()
+
+ def get_defined_names(self):
+ """Return the names defined in this scope"""
+ return self.pyobject._get_structural_attributes()
+
+ def get_name(self, name):
+ """Return name `PyName` defined in this scope"""
+ if name not in self.get_names():
+ raise exceptions.NameNotFoundError('name %s not found' % name)
+ return self.get_names()[name]
+
+ def __getitem__(self, key):
+ """The same as ``get_name(key)``"""
+ return self.get_name(key)
+
+ def __contains__(self, key):
+ """The same as ``key in self.get_names()``"""
+ return key in self.get_names()
+
+ @utils.saveit
+ def get_scopes(self):
+ """Return the subscopes of this scope
+
+ The returned scopes should be sorted by the order they appear.
+ """
+ return self._create_scopes()
+
+ def lookup(self, name):
+ if name in self.get_names():
+ return self.get_names()[name]
+ if self.parent is not None:
+ return self.parent._propagated_lookup(name)
+ return None
+
+ def get_propagated_names(self):
+ """Return the visible names of this scope
+
+ Return the names defined in this scope that are visible from
+ scopes containing this scope. This method returns the same
+ dictionary returned by `get_names()` except for `ClassScope`
+ which returns an empty dict.
+ """
+ return self.get_names()
+
+ def _propagated_lookup(self, name):
+ if name in self.get_propagated_names():
+ return self.get_propagated_names()[name]
+ if self.parent is not None:
+ return self.parent._propagated_lookup(name)
+ return None
+
+ def _create_scopes(self):
+ return [pydefined.get_scope()
+ for pydefined in self.pyobject._get_defined_objects()]
+
+ def _get_global_scope(self):
+ current = self
+ while current.parent is not None:
+ current = current.parent
+ return current
+
+ def get_start(self):
+ return self.pyobject.get_ast().lineno
+
+ def get_body_start(self):
+ body = self.pyobject.get_ast().body
+ if body:
+ return body[0].lineno
+ return self.get_start()
+
+ def get_end(self):
+ pymodule = self._get_global_scope().pyobject
+ return pymodule.logical_lines.logical_line_in(self.logical_end)[1]
+
+ @utils.saveit
+ def get_logical_end(self):
+ global_scope = self._get_global_scope()
+ return global_scope._scope_finder.find_scope_end(self)
+
+ start = property(get_start)
+ end = property(get_end)
+ logical_end = property(get_logical_end)
+
+ def get_kind(self):
+ pass
+
+
+class GlobalScope(Scope):
+
+ def __init__(self, pycore, module):
+ super(GlobalScope, self).__init__(pycore, module, None)
+ self.names = module._get_concluded_data()
+
+ def get_start(self):
+ return 1
+
+ def get_kind(self):
+ return 'Module'
+
+ def get_name(self, name):
+ try:
+ return self.pyobject[name]
+ except exceptions.AttributeNotFoundError:
+ if name in self.builtin_names:
+ return self.builtin_names[name]
+ raise exceptions.NameNotFoundError('name %s not found' % name)
+
+ def get_names(self):
+ if self.names.get() is None:
+ result = dict(self.builtin_names)
+ result.update(super(GlobalScope, self).get_names())
+ self.names.set(result)
+ return self.names.get()
+
+ def get_inner_scope_for_line(self, lineno, indents=None):
+ return self._scope_finder.get_holding_scope(self, lineno, indents)
+
+ def get_inner_scope_for_offset(self, offset):
+ return self._scope_finder.get_holding_scope_for_offset(self, offset)
+
+ @property
+ @utils.saveit
+ def _scope_finder(self):
+ return _HoldingScopeFinder(self.pyobject)
+
+ @property
+ def builtin_names(self):
+ return rope.base.builtins.builtins.get_attributes()
+
+
+class FunctionScope(Scope):
+
+ def __init__(self, pycore, pyobject, visitor):
+ super(FunctionScope, self).__init__(pycore, pyobject,
+ pyobject.parent.get_scope())
+ self.names = None
+ self.returned_asts = None
+ self.is_generator = None
+ self.defineds = None
+ self.visitor = visitor
+
+ def _get_names(self):
+ if self.names is None:
+ self._visit_function()
+ return self.names
+
+ def _visit_function(self):
+ if self.names is None:
+ new_visitor = self.visitor(self.pycore, self.pyobject)
+ for n in ast.get_child_nodes(self.pyobject.get_ast()):
+ ast.walk(n, new_visitor)
+ self.names = new_visitor.names
+ self.names.update(self.pyobject.get_parameters())
+ self.returned_asts = new_visitor.returned_asts
+ self.is_generator = new_visitor.generator
+ self.defineds = new_visitor.defineds
+
+ def _get_returned_asts(self):
+ if self.names is None:
+ self._visit_function()
+ return self.returned_asts
+
+ def _is_generator(self):
+ if self.is_generator is None:
+ self._get_returned_asts()
+ return self.is_generator
+
+ def get_names(self):
+ return self._get_names()
+
+ def _create_scopes(self):
+ if self.defineds is None:
+ self._visit_function()
+ return [pydefined.get_scope() for pydefined in self.defineds]
+
+ def get_kind(self):
+ return 'Function'
+
+ def invalidate_data(self):
+ for pyname in self.get_names().values():
+ if isinstance(pyname, (rope.base.pynames.AssignedName,
+ rope.base.pynames.EvaluatedName)):
+ pyname.invalidate()
+
+
+class ClassScope(Scope):
+
+ def __init__(self, pycore, pyobject):
+ super(ClassScope, self).__init__(pycore, pyobject,
+ pyobject.parent.get_scope())
+
+ def get_kind(self):
+ return 'Class'
+
+ def get_propagated_names(self):
+ return {}
+
+
+class _HoldingScopeFinder(object):
+
+ def __init__(self, pymodule):
+ self.pymodule = pymodule
+
+ def get_indents(self, lineno):
+ return rope.base.codeanalyze.count_line_indents(
+ self.lines.get_line(lineno))
+
+ def _get_scope_indents(self, scope):
+ return self.get_indents(scope.get_start())
+
+ def get_holding_scope(self, module_scope, lineno, line_indents=None):
+ if line_indents is None:
+ line_indents = self.get_indents(lineno)
+ current_scope = module_scope
+ new_scope = current_scope
+ while new_scope is not None and \
+ (new_scope.get_kind() == 'Module' or
+ self._get_scope_indents(new_scope) <= line_indents):
+ current_scope = new_scope
+ if current_scope.get_start() == lineno and \
+ current_scope.get_kind() != 'Module':
+ return current_scope
+ new_scope = None
+ for scope in current_scope.get_scopes():
+ if scope.get_start() <= lineno:
+ if lineno <= scope.get_end():
+ new_scope = scope
+ break
+ else:
+ break
+ return current_scope
+
+ def _is_empty_line(self, lineno):
+ line = self.lines.get_line(lineno)
+ return line.strip() == '' or line.lstrip().startswith('#')
+
+ def _get_body_indents(self, scope):
+ return self.get_indents(scope.get_body_start())
+
+ def get_holding_scope_for_offset(self, scope, offset):
+ return self.get_holding_scope(
+ scope, self.lines.get_line_number(offset))
+
+ def find_scope_end(self, scope):
+ if not scope.parent:
+ return self.lines.length()
+ end = scope.pyobject.get_ast().body[-1].lineno
+ scope_start = self.pymodule.logical_lines.logical_line_in(scope.start)
+ if scope_start[1] >= end:
+ # handling one-liners
+ body_indents = self._get_scope_indents(scope) + 4
+ else:
+ body_indents = self._get_body_indents(scope)
+ for l in self.logical_lines.generate_starts(
+ min(end + 1, self.lines.length()), self.lines.length() + 1):
+ if not self._is_empty_line(l):
+ if self.get_indents(l) < body_indents:
+ return end
+ else:
+ end = l
+ return end
+
+ @property
+ def lines(self):
+ return self.pymodule.lines
+
+ @property
+ def code(self):
+ return self.pymodule.source_code
+
+ @property
+ def logical_lines(self):
+ return self.pymodule.logical_lines
+
+class TemporaryScope(Scope):
+ """Currently used for list comprehensions and generator expressions
+
+ These scopes do not appear in the `get_scopes()` method of their
+ parent scopes.
+ """
+
+ def __init__(self, pycore, parent_scope, names):
+ super(TemporaryScope, self).__init__(
+ pycore, parent_scope.pyobject, parent_scope)
+ self.names = names
+
+ def get_names(self):
+ return self.names
+
+ def get_defined_names(self):
+ return self.names
+
+ def _create_scopes(self):
+ return []
+
+ def get_kind(self):
+ return 'Temporary'
diff --git a/.vim/bundle/python-mode/pylibs/rope/base/resourceobserver.py b/.vim/bundle/python-mode/pylibs/rope/base/resourceobserver.py
@@ -0,0 +1,271 @@
+import os
+
+
+class ResourceObserver(object):
+ """Provides the interface for observing resources
+
+ `ResourceObserver`\s can be registered using `Project.
+ add_observer()`. But most of the time `FilteredResourceObserver`
+ should be used. `ResourceObserver`\s report all changes passed
+ to them and they don't report changes to all resources. For
+ example if a folder is removed, it only calls `removed()` for that
+ folder and not its contents. You can use
+ `FilteredResourceObserver` if you are interested in changes only
+ to a list of resources. And you want changes to be reported on
+ individual resources.
+
+ """
+
+ def __init__(self, changed=None, moved=None, created=None,
+ removed=None, validate=None):
+ self.changed = changed
+ self.moved = moved
+ self.created = created
+ self.removed = removed
+ self._validate = validate
+
+ def resource_changed(self, resource):
+ """It is called when the resource changes"""
+ if self.changed is not None:
+ self.changed(resource)
+
+ def resource_moved(self, resource, new_resource):
+ """It is called when a resource is moved"""
+ if self.moved is not None:
+ self.moved(resource, new_resource)
+
+ def resource_created(self, resource):
+ """Is called when a new resource is created"""
+ if self.created is not None:
+ self.created(resource)
+
+ def resource_removed(self, resource):
+ """Is called when a new resource is removed"""
+ if self.removed is not None:
+ self.removed(resource)
+
+ def validate(self, resource):
+ """Validate the existence of this resource and its children.
+
+ This function is called when rope need to update its resource
+ cache about the files that might have been changed or removed
+ by other processes.
+
+ """
+ if self._validate is not None:
+ self._validate(resource)
+
+
+class FilteredResourceObserver(object):
+ """A useful decorator for `ResourceObserver`
+
+ Most resource observers have a list of resources and are
+ interested only in changes to those files. This class satisfies
+ this need. It dispatches resource changed and removed messages.
+ It performs these tasks:
+
+ * Changes to files and folders are analyzed to check whether any
+ of the interesting resources are changed or not. If they are,
+ it reports these changes to `resource_observer` passed to the
+ constructor.
+ * When a resource is removed it checks whether any of the
+ interesting resources are contained in that folder and reports
+ them to `resource_observer`.
+ * When validating a folder it validates all of the interesting
+ files in that folder.
+
+ Since most resource observers are interested in a list of
+ resources that change over time, `add_resource` and
+ `remove_resource` might be useful.
+
+ """
+
+ def __init__(self, resource_observer, initial_resources=None,
+ timekeeper=None):
+ self.observer = resource_observer
+ self.resources = {}
+ if timekeeper is not None:
+ self.timekeeper = timekeeper
+ else:
+ self.timekeeper = ChangeIndicator()
+ if initial_resources is not None:
+ for resource in initial_resources:
+ self.add_resource(resource)
+
+ def add_resource(self, resource):
+ """Add a resource to the list of interesting resources"""
+ if resource.exists():
+ self.resources[resource] = self.timekeeper.get_indicator(resource)
+ else:
+ self.resources[resource] = None
+
+ def remove_resource(self, resource):
+ """Add a resource to the list of interesting resources"""
+ if resource in self.resources:
+ del self.resources[resource]
+
+ def clear_resources(self):
+ """Removes all registered resources"""
+ self.resources.clear()
+
+ def resource_changed(self, resource):
+ changes = _Changes()
+ self._update_changes_caused_by_changed(changes, resource)
+ self._perform_changes(changes)
+
+ def _update_changes_caused_by_changed(self, changes, changed):
+ if changed in self.resources:
+ changes.add_changed(changed)
+ if self._is_parent_changed(changed):
+ changes.add_changed(changed.parent)
+
+ def _update_changes_caused_by_moved(self, changes, resource,
+ new_resource=None):
+ if resource in self.resources:
+ changes.add_removed(resource, new_resource)
+ if new_resource in self.resources:
+ changes.add_created(new_resource)
+ if resource.is_folder():
+ for file in list(self.resources):
+ if resource.contains(file):
+ new_file = self._calculate_new_resource(
+ resource, new_resource, file)
+ changes.add_removed(file, new_file)
+ if self._is_parent_changed(resource):
+ changes.add_changed(resource.parent)
+ if new_resource is not None:
+ if self._is_parent_changed(new_resource):
+ changes.add_changed(new_resource.parent)
+
+ def _is_parent_changed(self, child):
+ return child.parent in self.resources
+
+ def resource_moved(self, resource, new_resource):
+ changes = _Changes()
+ self._update_changes_caused_by_moved(changes, resource, new_resource)
+ self._perform_changes(changes)
+
+ def resource_created(self, resource):
+ changes = _Changes()
+ self._update_changes_caused_by_created(changes, resource)
+ self._perform_changes(changes)
+
+ def _update_changes_caused_by_created(self, changes, resource):
+ if resource in self.resources:
+ changes.add_created(resource)
+ if self._is_parent_changed(resource):
+ changes.add_changed(resource.parent)
+
+ def resource_removed(self, resource):
+ changes = _Changes()
+ self._update_changes_caused_by_moved(changes, resource)
+ self._perform_changes(changes)
+
+ def _perform_changes(self, changes):
+ for resource in changes.changes:
+ self.observer.resource_changed(resource)
+ self.resources[resource] = self.timekeeper.get_indicator(resource)
+ for resource, new_resource in changes.moves.items():
+ self.resources[resource] = None
+ if new_resource is not None:
+ self.observer.resource_moved(resource, new_resource)
+ else:
+ self.observer.resource_removed(resource)
+ for resource in changes.creations:
+ self.observer.resource_created(resource)
+ self.resources[resource] = self.timekeeper.get_indicator(resource)
+
+ def validate(self, resource):
+ changes = _Changes()
+ for file in self._search_resource_moves(resource):
+ if file in self.resources:
+ self._update_changes_caused_by_moved(changes, file)
+ for file in self._search_resource_changes(resource):
+ if file in self.resources:
+ self._update_changes_caused_by_changed(changes, file)
+ for file in self._search_resource_creations(resource):
+ if file in self.resources:
+ changes.add_created(file)
+ self._perform_changes(changes)
+
+ def _search_resource_creations(self, resource):
+ creations = set()
+ if resource in self.resources and resource.exists() and \
+ self.resources[resource] is None:
+ creations.add(resource)
+ if resource.is_folder():
+ for file in self.resources:
+ if file.exists() and resource.contains(file) and \
+ self.resources[file] is None:
+ creations.add(file)
+ return creations
+
+ def _search_resource_moves(self, resource):
+ all_moved = set()
+ if resource in self.resources and not resource.exists():
+ all_moved.add(resource)
+ if resource.is_folder():
+ for file in self.resources:
+ if resource.contains(file):
+ if not file.exists():
+ all_moved.add(file)
+ moved = set(all_moved)
+ for folder in [file for file in all_moved if file.is_folder()]:
+ if folder in moved:
+ for file in list(moved):
+ if folder.contains(file):
+ moved.remove(file)
+ return moved
+
+ def _search_resource_changes(self, resource):
+ changed = set()
+ if resource in self.resources and self._is_changed(resource):
+ changed.add(resource)
+ if resource.is_folder():
+ for file in self.resources:
+ if file.exists() and resource.contains(file):
+ if self._is_changed(file):
+ changed.add(file)
+ return changed
+
+ def _is_changed(self, resource):
+ if self.resources[resource] is None:
+ return False
+ return self.resources[resource] != self.timekeeper.get_indicator(resource)
+
+ def _calculate_new_resource(self, main, new_main, resource):
+ if new_main is None:
+ return None
+ diff = resource.path[len(main.path):]
+ return resource.project.get_resource(new_main.path + diff)
+
+
+class ChangeIndicator(object):
+
+ def get_indicator(self, resource):
+ """Return the modification time and size of a `Resource`."""
+ path = resource.real_path
+ # on dos, mtime does not change for a folder when files are added
+ if os.name != 'posix' and os.path.isdir(path):
+ return (os.path.getmtime(path),
+ len(os.listdir(path)),
+ os.path.getsize(path))
+ return (os.path.getmtime(path),
+ os.path.getsize(path))
+
+
+class _Changes(object):
+
+ def __init__(self):
+ self.changes = set()
+ self.creations = set()
+ self.moves = {}
+
+ def add_changed(self, resource):
+ self.changes.add(resource)
+
+ def add_removed(self, resource, new_resource=None):
+ self.moves[resource] = new_resource
+
+ def add_created(self, resource):
+ self.creations.add(resource)
diff --git a/.vim/bundle/python-mode/pylibs/rope/base/resources.py b/.vim/bundle/python-mode/pylibs/rope/base/resources.py
@@ -0,0 +1,211 @@
+import os
+import re
+
+import rope.base.change
+import rope.base.fscommands
+from rope.base import exceptions
+
+
+class Resource(object):
+ """Represents files and folders in a project"""
+
+ def __init__(self, project, path):
+ self.project = project
+ self._path = path
+
+ def move(self, new_location):
+ """Move resource to `new_location`"""
+ self._perform_change(rope.base.change.MoveResource(self, new_location),
+ 'Moving <%s> to <%s>' % (self.path, new_location))
+
+ def remove(self):
+ """Remove resource from the project"""
+ self._perform_change(rope.base.change.RemoveResource(self),
+ 'Removing <%s>' % self.path)
+
+ def is_folder(self):
+ """Return true if the resource is a folder"""
+
+ def create(self):
+ """Create this resource"""
+
+ def exists(self):
+ return os.path.exists(self.real_path)
+
+ @property
+ def parent(self):
+ parent = '/'.join(self.path.split('/')[0:-1])
+ return self.project.get_folder(parent)
+
+ @property
+ def path(self):
+ """Return the path of this resource relative to the project root
+
+ The path is the list of parent directories separated by '/' followed
+ by the resource name.
+ """
+ return self._path
+
+ @property
+ def name(self):
+ """Return the name of this resource"""
+ return self.path.split('/')[-1]
+
+ @property
+ def real_path(self):
+ """Return the file system path of this resource"""
+ return self.project._get_resource_path(self.path)
+
+ def __eq__(self, obj):
+ return self.__class__ == obj.__class__ and self.path == obj.path
+
+ def __ne__(self, obj):
+ return not self.__eq__(obj)
+
+ def __hash__(self):
+ return hash(self.path)
+
+ def _perform_change(self, change_, description):
+ changes = rope.base.change.ChangeSet(description)
+ changes.add_change(change_)
+ self.project.do(changes)
+
+
+class File(Resource):
+ """Represents a file"""
+
+ def __init__(self, project, name):
+ super(File, self).__init__(project, name)
+
+ def read(self):
+ data = self.read_bytes()
+ try:
+ return rope.base.fscommands.file_data_to_unicode(data)
+ except UnicodeDecodeError, e:
+ raise exceptions.ModuleDecodeError(self.path, e.reason)
+
+ def read_bytes(self):
+ return open(self.real_path, 'rb').read()
+
+ def write(self, contents):
+ try:
+ if contents == self.read():
+ return
+ except IOError:
+ pass
+ self._perform_change(rope.base.change.ChangeContents(self, contents),
+ 'Writing file <%s>' % self.path)
+
+ def is_folder(self):
+ return False
+
+ def create(self):
+ self.parent.create_file(self.name)
+
+
+class Folder(Resource):
+ """Represents a folder"""
+
+ def __init__(self, project, name):
+ super(Folder, self).__init__(project, name)
+
+ def is_folder(self):
+ return True
+
+ def get_children(self):
+ """Return the children of this folder"""
+ result = []
+ for name in os.listdir(self.real_path):
+ try:
+ child = self.get_child(name)
+ except exceptions.ResourceNotFoundError:
+ continue
+ if not self.project.is_ignored(child):
+ result.append(self.get_child(name))
+ return result
+
+ def create_file(self, file_name):
+ self._perform_change(
+ rope.base.change.CreateFile(self, file_name),
+ 'Creating file <%s>' % self._get_child_path(file_name))
+ return self.get_child(file_name)
+
+ def create_folder(self, folder_name):
+ self._perform_change(
+ rope.base.change.CreateFolder(self, folder_name),
+ 'Creating folder <%s>' % self._get_child_path(folder_name))
+ return self.get_child(folder_name)
+
+ def _get_child_path(self, name):
+ if self.path:
+ return self.path + '/' + name
+ else:
+ return name
+
+ def get_child(self, name):
+ return self.project.get_resource(self._get_child_path(name))
+
+ def has_child(self, name):
+ try:
+ self.get_child(name)
+ return True
+ except exceptions.ResourceNotFoundError:
+ return False
+
+ def get_files(self):
+ return [resource for resource in self.get_children()
+ if not resource.is_folder()]
+
+ def get_folders(self):
+ return [resource for resource in self.get_children()
+ if resource.is_folder()]
+
+ def contains(self, resource):
+ if self == resource:
+ return False
+ return self.path == '' or resource.path.startswith(self.path + '/')
+
+ def create(self):
+ self.parent.create_folder(self.name)
+
+
+class _ResourceMatcher(object):
+
+ def __init__(self):
+ self.patterns = []
+ self._compiled_patterns = []
+
+ def set_patterns(self, patterns):
+ """Specify which resources to match
+
+ `patterns` is a `list` of `str`\s that can contain ``*`` and
+ ``?`` signs for matching resource names.
+
+ """
+ self._compiled_patterns = None
+ self.patterns = patterns
+
+ def _add_pattern(self, pattern):
+ re_pattern = pattern.replace('.', '\\.').\
+ replace('*', '[^/]*').replace('?', '[^/]').\
+ replace('//', '/(.*/)?')
+ re_pattern = '^(.*/)?' + re_pattern + '(/.*)?$'
+ self.compiled_patterns.append(re.compile(re_pattern))
+
+ def does_match(self, resource):
+ for pattern in self.compiled_patterns:
+ if pattern.match(resource.path):
+ return True
+ path = os.path.join(resource.project.address,
+ *resource.path.split('/'))
+ if os.path.islink(path):
+ return True
+ return False
+
+ @property
+ def compiled_patterns(self):
+ if self._compiled_patterns is None:
+ self._compiled_patterns = []
+ for pattern in self.patterns:
+ self._add_pattern(pattern)
+ return self._compiled_patterns
diff --git a/.vim/bundle/python-mode/pylibs/rope/base/simplify.py b/.vim/bundle/python-mode/pylibs/rope/base/simplify.py
@@ -0,0 +1,55 @@
+"""A module to ease code analysis
+
+This module is here to help source code analysis.
+"""
+import re
+
+from rope.base import codeanalyze, utils
+
+
+@utils.cached(7)
+def real_code(source):
+ """Simplify `source` for analysis
+
+ It replaces:
+
+ * comments with spaces
+ * strs with a new str filled with spaces
+ * implicit and explicit continuations with spaces
+ * tabs and semicolons with spaces
+
+ The resulting code is a lot easier to analyze if we are interested
+ only in offsets.
+ """
+ collector = codeanalyze.ChangeCollector(source)
+ for start, end in ignored_regions(source):
+ if source[start] == '#':
+ replacement = ' ' * (end - start)
+ else:
+ replacement = '"%s"' % (' ' * (end - start - 2))
+ collector.add_change(start, end, replacement)
+ source = collector.get_changed() or source
+ collector = codeanalyze.ChangeCollector(source)
+ parens = 0
+ for match in _parens.finditer(source):
+ i = match.start()
+ c = match.group()
+ if c in '({[':
+ parens += 1
+ if c in ')}]':
+ parens -= 1
+ if c == '\n' and parens > 0:
+ collector.add_change(i, i + 1, ' ')
+ source = collector.get_changed() or source
+ return source.replace('\\\n', ' ').replace('\t', ' ').replace(';', '\n')
+
+
+@utils.cached(7)
+def ignored_regions(source):
+ """Return ignored regions like strings and comments in `source` """
+ return [(match.start(), match.end()) for match in _str.finditer(source)]
+
+
+_str = re.compile('%s|%s' % (codeanalyze.get_comment_pattern(),
+ codeanalyze.get_string_pattern()))
+_parens = re.compile(r'[\({\[\]}\)\n]')
diff --git a/.vim/bundle/python-mode/pylibs/rope/base/stdmods.py b/.vim/bundle/python-mode/pylibs/rope/base/stdmods.py
@@ -0,0 +1,40 @@
+import os
+import sys
+
+from rope.base import utils
+
+
+def _stdlib_path():
+ import distutils.sysconfig
+ return distutils.sysconfig.get_python_lib(standard_lib=True)
+
+@utils.cached(1)
+def standard_modules():
+ return python_modules() | dynload_modules()
+
+@utils.cached(1)
+def python_modules():
+ result = set()
+ lib_path = _stdlib_path()
+ if os.path.exists(lib_path):
+ for name in os.listdir(lib_path):
+ path = os.path.join(lib_path, name)
+ if os.path.isdir(path):
+ if '-' not in name:
+ result.add(name)
+ else:
+ if name.endswith('.py'):
+ result.add(name[:-3])
+ return result
+
+@utils.cached(1)
+def dynload_modules():
+ result = set(sys.builtin_module_names)
+ dynload_path = os.path.join(_stdlib_path(), 'lib-dynload')
+ if os.path.exists(dynload_path):
+ for name in os.listdir(dynload_path):
+ path = os.path.join(dynload_path, name)
+ if os.path.isfile(path):
+ if name.endswith('.so') or name.endswith('.dll'):
+ result.add(os.path.splitext(name)[0])
+ return result
diff --git a/.vim/bundle/python-mode/pylibs/rope/base/taskhandle.py b/.vim/bundle/python-mode/pylibs/rope/base/taskhandle.py
@@ -0,0 +1,133 @@
+import warnings
+
+from rope.base import exceptions
+
+
+class TaskHandle(object):
+
+ def __init__(self, name='Task', interrupts=True):
+ """Construct a TaskHandle
+
+ If `interrupts` is `False` the task won't be interrupted by
+ calling `TaskHandle.stop()`.
+
+ """
+ self.name = name
+ self.interrupts = interrupts
+ self.stopped = False
+ self.job_sets = []
+ self.observers = []
+
+ def stop(self):
+ """Interrupts the refactoring"""
+ if self.interrupts:
+ self.stopped = True
+ self._inform_observers()
+
+ def current_jobset(self):
+ """Return the current `JobSet`"""
+ if self.job_sets:
+ return self.job_sets[-1]
+
+ def add_observer(self, observer):
+ """Register an observer for this task handle
+
+ The observer is notified whenever the task is stopped or
+ a job gets finished.
+
+ """
+ self.observers.append(observer)
+
+ def is_stopped(self):
+ return self.stopped
+
+ def get_jobsets(self):
+ return self.job_sets
+
+ def create_jobset(self, name='JobSet', count=None):
+ result = JobSet(self, name=name, count=count)
+ self.job_sets.append(result)
+ self._inform_observers()
+ return result
+
+ def _inform_observers(self):
+ for observer in list(self.observers):
+ observer()
+
+
+class JobSet(object):
+
+ def __init__(self, handle, name, count):
+ self.handle = handle
+ self.name = name
+ self.count = count
+ self.done = 0
+ self.job_name = None
+
+ def started_job(self, name):
+ self.check_status()
+ self.job_name = name
+ self.handle._inform_observers()
+
+ def finished_job(self):
+ self.check_status()
+ self.done += 1
+ self.handle._inform_observers()
+ self.job_name = None
+
+ def check_status(self):
+ if self.handle.is_stopped():
+ raise exceptions.InterruptedTaskError()
+
+ def get_active_job_name(self):
+ return self.job_name
+
+ def get_percent_done(self):
+ if self.count is not None and self.count > 0:
+ percent = self.done * 100 // self.count
+ return min(percent, 100)
+
+ def get_name(self):
+ return self.name
+
+
+class NullTaskHandle(object):
+
+ def __init__(self):
+ pass
+
+ def is_stopped(self):
+ return False
+
+ def stop(self):
+ pass
+
+ def create_jobset(self, *args, **kwds):
+ return NullJobSet()
+
+ def get_jobsets(self):
+ return []
+
+ def add_observer(self, observer):
+ pass
+
+
+class NullJobSet(object):
+
+ def started_job(self, name):
+ pass
+
+ def finished_job(self):
+ pass
+
+ def check_status(self):
+ pass
+
+ def get_active_job_name(self):
+ pass
+
+ def get_percent_done(self):
+ pass
+
+ def get_name(self):
+ pass
diff --git a/.vim/bundle/python-mode/pylibs/rope/base/utils.py b/.vim/bundle/python-mode/pylibs/rope/base/utils.py
@@ -0,0 +1,78 @@
+import warnings
+
+
+def saveit(func):
+ """A decorator that caches the return value of a function"""
+
+ name = '_' + func.__name__
+ def _wrapper(self, *args, **kwds):
+ if not hasattr(self, name):
+ setattr(self, name, func(self, *args, **kwds))
+ return getattr(self, name)
+ return _wrapper
+
+cacheit = saveit
+
+def prevent_recursion(default):
+ """A decorator that returns the return value of `default` in recursions"""
+ def decorator(func):
+ name = '_calling_%s_' % func.__name__
+ def newfunc(self, *args, **kwds):
+ if getattr(self, name, False):
+ return default()
+ setattr(self, name, True)
+ try:
+ return func(self, *args, **kwds)
+ finally:
+ setattr(self, name, False)
+ return newfunc
+ return decorator
+
+
+def ignore_exception(exception_class):
+ """A decorator that ignores `exception_class` exceptions"""
+ def _decorator(func):
+ def newfunc(*args, **kwds):
+ try:
+ return func(*args, **kwds)
+ except exception_class:
+ pass
+ return newfunc
+ return _decorator
+
+
+def deprecated(message=None):
+ """A decorator for deprecated functions"""
+ def _decorator(func, message=message):
+ if message is None:
+ message = '%s is deprecated' % func.__name__
+ def newfunc(*args, **kwds):
+ warnings.warn(message, DeprecationWarning, stacklevel=2)
+ return func(*args, **kwds)
+ return newfunc
+ return _decorator
+
+
+def cached(count):
+ """A caching decorator based on parameter objects"""
+ def decorator(func):
+ return _Cached(func, count)
+ return decorator
+
+class _Cached(object):
+
+ def __init__(self, func, count):
+ self.func = func
+ self.cache = []
+ self.count = count
+
+ def __call__(self, *args, **kwds):
+ key = (args, kwds)
+ for cached_key, cached_result in self.cache:
+ if cached_key == key:
+ return cached_result
+ result = self.func(*args, **kwds)
+ self.cache.append((key, result))
+ if len(self.cache) > self.count:
+ del self.cache[0]
+ return result
diff --git a/.vim/bundle/python-mode/pylibs/rope/base/worder.py b/.vim/bundle/python-mode/pylibs/rope/base/worder.py
@@ -0,0 +1,524 @@
+import bisect
+import keyword
+
+import rope.base.simplify
+
+
+def get_name_at(resource, offset):
+ source_code = resource.read()
+ word_finder = Worder(source_code)
+ return word_finder.get_word_at(offset)
+
+
+class Worder(object):
+ """A class for finding boundaries of words and expressions
+
+ Note that in these methods, offset should be the index of the
+ character not the index of the character after it.
+ """
+
+ def __init__(self, code, handle_ignores=False):
+ simplified = rope.base.simplify.real_code(code)
+ self.code_finder = _RealFinder(simplified, code)
+ self.handle_ignores = handle_ignores
+ self.code = code
+
+ def _init_ignores(self):
+ ignores = rope.base.simplify.ignored_regions(self.code)
+ self.dumb_finder = _RealFinder(self.code, self.code)
+ self.starts = [ignored[0] for ignored in ignores]
+ self.ends = [ignored[1] for ignored in ignores]
+
+ def _context_call(self, name, offset):
+ if self.handle_ignores:
+ if not hasattr(self, 'starts'):
+ self._init_ignores()
+ start = bisect.bisect(self.starts, offset)
+ if start > 0 and offset < self.ends[start - 1]:
+ return getattr(self.dumb_finder, name)(offset)
+ return getattr(self.code_finder, name)(offset)
+
+ def get_primary_at(self, offset):
+ return self._context_call('get_primary_at', offset)
+
+ def get_word_at(self, offset):
+ return self._context_call('get_word_at', offset)
+
+ def get_primary_range(self, offset):
+ return self._context_call('get_primary_range', offset)
+
+ def get_splitted_primary_before(self, offset):
+ return self._context_call('get_splitted_primary_before', offset)
+
+ def get_word_range(self, offset):
+ return self._context_call('get_word_range', offset)
+
+ def is_function_keyword_parameter(self, offset):
+ return self.code_finder.is_function_keyword_parameter(offset)
+
+ def is_a_class_or_function_name_in_header(self, offset):
+ return self.code_finder.is_a_class_or_function_name_in_header(offset)
+
+ def is_from_statement_module(self, offset):
+ return self.code_finder.is_from_statement_module(offset)
+
+ def is_from_aliased(self, offset):
+ return self.code_finder.is_from_aliased(offset)
+
+ def find_parens_start_from_inside(self, offset):
+ return self.code_finder.find_parens_start_from_inside(offset)
+
+ def is_a_name_after_from_import(self, offset):
+ return self.code_finder.is_a_name_after_from_import(offset)
+
+ def is_from_statement(self, offset):
+ return self.code_finder.is_from_statement(offset)
+
+ def get_from_aliased(self, offset):
+ return self.code_finder.get_from_aliased(offset)
+
+ def is_import_statement(self, offset):
+ return self.code_finder.is_import_statement(offset)
+
+ def is_assigned_here(self, offset):
+ return self.code_finder.is_assigned_here(offset)
+
+ def is_a_function_being_called(self, offset):
+ return self.code_finder.is_a_function_being_called(offset)
+
+ def get_word_parens_range(self, offset):
+ return self.code_finder.get_word_parens_range(offset)
+
+ def is_name_assigned_in_class_body(self, offset):
+ return self.code_finder.is_name_assigned_in_class_body(offset)
+
+ def is_on_function_call_keyword(self, offset):
+ return self.code_finder.is_on_function_call_keyword(offset)
+
+ def _find_parens_start(self, offset):
+ return self.code_finder._find_parens_start(offset)
+
+ def get_parameters(self, first, last):
+ return self.code_finder.get_parameters(first, last)
+
+ def get_from_module(self, offset):
+ return self.code_finder.get_from_module(offset)
+
+ def is_assigned_in_a_tuple_assignment(self, offset):
+ return self.code_finder.is_assigned_in_a_tuple_assignment(offset)
+
+ def get_assignment_type(self, offset):
+ return self.code_finder.get_assignment_type(offset)
+
+ def get_function_and_args_in_header(self, offset):
+ return self.code_finder.get_function_and_args_in_header(offset)
+
+ def get_lambda_and_args(self, offset):
+ return self.code_finder.get_lambda_and_args(offset)
+
+ def find_function_offset(self, offset):
+ return self.code_finder.find_function_offset(offset)
+
+
+class _RealFinder(object):
+
+ def __init__(self, code, raw):
+ self.code = code
+ self.raw = raw
+
+ def _find_word_start(self, offset):
+ current_offset = offset
+ while current_offset >= 0 and self._is_id_char(current_offset):
+ current_offset -= 1
+ return current_offset + 1
+
+ def _find_word_end(self, offset):
+ while offset + 1 < len(self.code) and self._is_id_char(offset + 1):
+ offset += 1
+ return offset
+
+ def _find_last_non_space_char(self, offset):
+ while offset >= 0 and self.code[offset].isspace():
+ if self.code[offset] == '\n':
+ return offset
+ offset -= 1
+ return max(-1, offset)
+
+ def get_word_at(self, offset):
+ offset = self._get_fixed_offset(offset)
+ return self.raw[self._find_word_start(offset):
+ self._find_word_end(offset) + 1]
+
+ def _get_fixed_offset(self, offset):
+ if offset >= len(self.code):
+ return offset - 1
+ if not self._is_id_char(offset):
+ if offset > 0 and self._is_id_char(offset - 1):
+ return offset - 1
+ if offset < len(self.code) - 1 and self._is_id_char(offset + 1):
+ return offset + 1
+ return offset
+
+ def _is_id_char(self, offset):
+ return self.code[offset].isalnum() or self.code[offset] == '_'
+
+ def _find_string_start(self, offset):
+ kind = self.code[offset]
+ try:
+ return self.code.rindex(kind, 0, offset)
+ except ValueError:
+ return 0
+
+ def _find_parens_start(self, offset):
+ offset = self._find_last_non_space_char(offset - 1)
+ while offset >= 0 and self.code[offset] not in '[({':
+ if self.code[offset] not in ':,':
+ offset = self._find_primary_start(offset)
+ offset = self._find_last_non_space_char(offset - 1)
+ return offset
+
+ def _find_atom_start(self, offset):
+ old_offset = offset
+ if self.code[offset] == '\n':
+ return offset + 1
+ if self.code[offset].isspace():
+ offset = self._find_last_non_space_char(offset)
+ if self.code[offset] in '\'"':
+ return self._find_string_start(offset)
+ if self.code[offset] in ')]}':
+ return self._find_parens_start(offset)
+ if self._is_id_char(offset):
+ return self._find_word_start(offset)
+ return old_offset
+
+ def _find_primary_without_dot_start(self, offset):
+ """It tries to find the undotted primary start
+
+ It is different from `self._get_atom_start()` in that it
+ follows function calls, too; such as in ``f(x)``.
+
+ """
+ last_atom = offset
+ offset = self._find_last_non_space_char(last_atom)
+ while offset > 0 and self.code[offset] in ')]':
+ last_atom = self._find_parens_start(offset)
+ offset = self._find_last_non_space_char(last_atom - 1)
+ if offset >= 0 and (self.code[offset] in '"\'})]' or
+ self._is_id_char(offset)):
+ atom_start = self._find_atom_start(offset)
+ if not keyword.iskeyword(self.code[atom_start:offset + 1]):
+ return atom_start
+ return last_atom
+
+ def _find_primary_start(self, offset):
+ if offset >= len(self.code):
+ offset = len(self.code) - 1
+ if self.code[offset] != '.':
+ offset = self._find_primary_without_dot_start(offset)
+ else:
+ offset = offset + 1
+ while offset > 0:
+ prev = self._find_last_non_space_char(offset - 1)
+ if offset <= 0 or self.code[prev] != '.':
+ break
+ offset = self._find_primary_without_dot_start(prev - 1)
+ if not self._is_id_char(offset):
+ break
+
+ return offset
+
+ def get_primary_at(self, offset):
+ offset = self._get_fixed_offset(offset)
+ start, end = self.get_primary_range(offset)
+ return self.raw[start:end].strip()
+
+ def get_splitted_primary_before(self, offset):
+ """returns expression, starting, starting_offset
+
+ This function is used in `rope.codeassist.assist` function.
+ """
+ if offset == 0:
+ return ('', '', 0)
+ end = offset - 1
+ word_start = self._find_atom_start(end)
+ real_start = self._find_primary_start(end)
+ if self.code[word_start:offset].strip() == '':
+ word_start = end
+ if self.code[end].isspace():
+ word_start = end
+ if self.code[real_start:word_start].strip() == '':
+ real_start = word_start
+ if real_start == word_start == end and not self._is_id_char(end):
+ return ('', '', offset)
+ if real_start == word_start:
+ return ('', self.raw[word_start:offset], word_start)
+ else:
+ if self.code[end] == '.':
+ return (self.raw[real_start:end], '', offset)
+ last_dot_position = word_start
+ if self.code[word_start] != '.':
+ last_dot_position = self._find_last_non_space_char(word_start - 1)
+ last_char_position = self._find_last_non_space_char(last_dot_position - 1)
+ if self.code[word_start].isspace():
+ word_start = offset
+ return (self.raw[real_start:last_char_position + 1],
+ self.raw[word_start:offset], word_start)
+
+ def _get_line_start(self, offset):
+ try:
+ return self.code.rindex('\n', 0, offset + 1)
+ except ValueError:
+ return 0
+
+ def _get_line_end(self, offset):
+ try:
+ return self.code.index('\n', offset)
+ except ValueError:
+ return len(self.code)
+
+ def is_name_assigned_in_class_body(self, offset):
+ word_start = self._find_word_start(offset - 1)
+ word_end = self._find_word_end(offset) + 1
+ if '.' in self.code[word_start:word_end]:
+ return False
+ line_start = self._get_line_start(word_start)
+ line = self.code[line_start:word_start].strip()
+ return not line and self.get_assignment_type(offset) == '='
+
+ def is_a_class_or_function_name_in_header(self, offset):
+ word_start = self._find_word_start(offset - 1)
+ line_start = self._get_line_start(word_start)
+ prev_word = self.code[line_start:word_start].strip()
+ return prev_word in ['def', 'class']
+
+ def _find_first_non_space_char(self, offset):
+ if offset >= len(self.code):
+ return len(self.code)
+ while offset < len(self.code) and self.code[offset].isspace():
+ if self.code[offset] == '\n':
+ return offset
+ offset += 1
+ return offset
+
+ def is_a_function_being_called(self, offset):
+ word_end = self._find_word_end(offset) + 1
+ next_char = self._find_first_non_space_char(word_end)
+ return next_char < len(self.code) and \
+ self.code[next_char] == '(' and \
+ not self.is_a_class_or_function_name_in_header(offset)
+
+ def _find_import_end(self, start):
+ return self._get_line_end(start)
+
+ def is_import_statement(self, offset):
+ try:
+ last_import = self.code.rindex('import ', 0, offset)
+ except ValueError:
+ return False
+ return self._find_import_end(last_import + 7) >= offset
+
+ def is_from_statement(self, offset):
+ try:
+ last_from = self.code.rindex('from ', 0, offset)
+ from_import = self.code.index(' import ', last_from)
+ from_names = from_import + 8
+ except ValueError:
+ return False
+ from_names = self._find_first_non_space_char(from_names)
+ return self._find_import_end(from_names) >= offset
+
+ def is_from_statement_module(self, offset):
+ if offset >= len(self.code) - 1:
+ return False
+ stmt_start = self._find_primary_start(offset)
+ line_start = self._get_line_start(stmt_start)
+ prev_word = self.code[line_start:stmt_start].strip()
+ return prev_word == 'from'
+
+ def is_a_name_after_from_import(self, offset):
+ try:
+ if len(self.code) > offset and self.code[offset] == '\n':
+ line_start = self._get_line_start(offset - 1)
+ else:
+ line_start = self._get_line_start(offset)
+ last_from = self.code.rindex('from ', line_start, offset)
+ from_import = self.code.index(' import ', last_from)
+ from_names = from_import + 8
+ except ValueError:
+ return False
+ if from_names - 1 > offset:
+ return False
+ return self._find_import_end(from_names) >= offset
+
+ def get_from_module(self, offset):
+ try:
+ last_from = self.code.rindex('from ', 0, offset)
+ import_offset = self.code.index(' import ', last_from)
+ end = self._find_last_non_space_char(import_offset)
+ return self.get_primary_at(end)
+ except ValueError:
+ pass
+
+ def is_from_aliased(self, offset):
+ if not self.is_a_name_after_from_import(offset):
+ return False
+ try:
+ end = self._find_word_end(offset)
+ as_end = min(self._find_word_end(end + 1), len(self.code))
+ as_start = self._find_word_start(as_end)
+ if self.code[as_start:as_end + 1] == 'as':
+ return True
+ except ValueError:
+ return False
+
+ def get_from_aliased(self, offset):
+ try:
+ end = self._find_word_end(offset)
+ as_ = self._find_word_end(end + 1)
+ alias = self._find_word_end(as_ + 1)
+ start = self._find_word_start(alias)
+ return self.raw[start:alias + 1]
+ except ValueError:
+ pass
+
+ def is_function_keyword_parameter(self, offset):
+ word_end = self._find_word_end(offset)
+ if word_end + 1 == len(self.code):
+ return False
+ next_char = self._find_first_non_space_char(word_end + 1)
+ equals = self.code[next_char:next_char + 2]
+ if equals == '==' or not equals.startswith('='):
+ return False
+ word_start = self._find_word_start(offset)
+ prev_char = self._find_last_non_space_char(word_start - 1)
+ return prev_char - 1 >= 0 and self.code[prev_char] in ',('
+
+ def is_on_function_call_keyword(self, offset):
+ stop = self._get_line_start(offset)
+ if self._is_id_char(offset):
+ offset = self._find_word_start(offset) - 1
+ offset = self._find_last_non_space_char(offset)
+ if offset <= stop or self.code[offset] not in '(,':
+ return False
+ parens_start = self.find_parens_start_from_inside(offset)
+ return stop < parens_start
+
+ def find_parens_start_from_inside(self, offset):
+ stop = self._get_line_start(offset)
+ opens = 1
+ while offset > stop:
+ if self.code[offset] == '(':
+ break
+ if self.code[offset] != ',':
+ offset = self._find_primary_start(offset)
+ offset -= 1
+ return max(stop, offset)
+
+ def is_assigned_here(self, offset):
+ return self.get_assignment_type(offset) is not None
+
+ def get_assignment_type(self, offset):
+ # XXX: does not handle tuple assignments
+ word_end = self._find_word_end(offset)
+ next_char = self._find_first_non_space_char(word_end + 1)
+ single = self.code[next_char:next_char + 1]
+ double = self.code[next_char:next_char + 2]
+ triple = self.code[next_char:next_char + 3]
+ if double not in ('==', '<=', '>=', '!='):
+ for op in [single, double, triple]:
+ if op.endswith('='):
+ return op
+
+ def get_primary_range(self, offset):
+ start = self._find_primary_start(offset)
+ end = self._find_word_end(offset) + 1
+ return (start, end)
+
+ def get_word_range(self, offset):
+ offset = max(0, offset)
+ start = self._find_word_start(offset)
+ end = self._find_word_end(offset) + 1
+ return (start, end)
+
+ def get_word_parens_range(self, offset, opening='(', closing=')'):
+ end = self._find_word_end(offset)
+ start_parens = self.code.index(opening, end)
+ index = start_parens
+ open_count = 0
+ while index < len(self.code):
+ if self.code[index] == opening:
+ open_count += 1
+ if self.code[index] == closing:
+ open_count -= 1
+ if open_count == 0:
+ return (start_parens, index + 1)
+ index += 1
+ return (start_parens, index)
+
+ def get_parameters(self, first, last):
+ keywords = []
+ args = []
+ current = self._find_last_non_space_char(last - 1)
+ while current > first:
+ primary_start = current
+ current = self._find_primary_start(current)
+ while current != first and self.code[current] not in '=,':
+ current = self._find_last_non_space_char(current - 1)
+ primary = self.raw[current + 1:primary_start + 1].strip()
+ if self.code[current] == '=':
+ primary_start = current - 1
+ current -= 1
+ while current != first and self.code[current] not in ',':
+ current = self._find_last_non_space_char(current - 1)
+ param_name = self.raw[current + 1:primary_start + 1].strip()
+ keywords.append((param_name, primary))
+ else:
+ args.append(primary)
+ current = self._find_last_non_space_char(current - 1)
+ args.reverse()
+ keywords.reverse()
+ return args, keywords
+
+ def is_assigned_in_a_tuple_assignment(self, offset):
+ start = self._get_line_start(offset)
+ end = self._get_line_end(offset)
+ primary_start = self._find_primary_start(offset)
+ primary_end = self._find_word_end(offset)
+
+ prev_char_offset = self._find_last_non_space_char(primary_start - 1)
+ next_char_offset = self._find_first_non_space_char(primary_end + 1)
+ next_char = prev_char = ''
+ if prev_char_offset >= start:
+ prev_char = self.code[prev_char_offset]
+ if next_char_offset < end:
+ next_char = self.code[next_char_offset]
+ try:
+ equals_offset = self.code.index('=', start, end)
+ except ValueError:
+ return False
+ if prev_char not in '(,' and next_char not in ',)':
+ return False
+ parens_start = self.find_parens_start_from_inside(offset)
+ # XXX: only handling (x, y) = value
+ return offset < equals_offset and \
+ self.code[start:parens_start].strip() == ''
+
+ def get_function_and_args_in_header(self, offset):
+ offset = self.find_function_offset(offset)
+ lparens, rparens = self.get_word_parens_range(offset)
+ return self.raw[offset:rparens + 1]
+
+ def find_function_offset(self, offset, definition='def '):
+ while True:
+ offset = self.code.index(definition, offset)
+ if offset == 0 or not self._is_id_char(offset - 1):
+ break
+ offset += 1
+ def_ = offset + 4
+ return self._find_first_non_space_char(def_)
+
+ def get_lambda_and_args(self, offset):
+ offset = self.find_function_offset(offset, definition = 'lambda ')
+ lparens, rparens = self.get_word_parens_range(offset, opening=' ', closing=':')
+ return self.raw[offset:rparens + 1]
+
diff --git a/.vim/bundle/python-mode/pylibs/rope/contrib/__init__.py b/.vim/bundle/python-mode/pylibs/rope/contrib/__init__.py
@@ -0,0 +1,7 @@
+"""rope IDE tools package
+
+This package contains modules that can be used in IDEs
+but do not depend on the UI. So these modules will be used
+by `rope.ui` modules.
+
+"""
diff --git a/.vim/bundle/python-mode/pylibs/rope/contrib/autoimport.py b/.vim/bundle/python-mode/pylibs/rope/contrib/autoimport.py
@@ -0,0 +1,217 @@
+import re
+
+from rope.base import (exceptions, pynames, resourceobserver,
+ taskhandle, pyobjects, builtins, resources)
+from rope.refactor import importutils
+
+
+class AutoImport(object):
+ """A class for finding the module that provides a name
+
+ This class maintains a cache of global names in python modules.
+ Note that this cache is not accurate and might be out of date.
+
+ """
+
+ def __init__(self, project, observe=True, underlined=False):
+ """Construct an AutoImport object
+
+ If `observe` is `True`, listen for project changes and update
+ the cache.
+
+ If `underlined` is `True`, underlined names are cached, too.
+ """
+ self.project = project
+ self.underlined = underlined
+ self.names = project.data_files.read_data('globalnames')
+ if self.names is None:
+ self.names = {}
+ project.data_files.add_write_hook(self._write)
+ # XXX: using a filtered observer
+ observer = resourceobserver.ResourceObserver(
+ changed=self._changed, moved=self._moved, removed=self._removed)
+ if observe:
+ project.add_observer(observer)
+
+ def import_assist(self, starting):
+ """Return a list of ``(name, module)`` tuples
+
+ This function tries to find modules that have a global name
+ that starts with `starting`.
+ """
+ # XXX: breaking if gave up! use generators
+ result = []
+ for module in self.names:
+ for global_name in self.names[module]:
+ if global_name.startswith(starting):
+ result.append((global_name, module))
+ return result
+
+ def get_modules(self, name):
+ """Return the list of modules that have global `name`"""
+ result = []
+ for module in self.names:
+ if name in self.names[module]:
+ result.append(module)
+ return result
+
+ def get_all_names(self):
+ """Return the list of all cached global names"""
+ result = set()
+ for module in self.names:
+ result.update(set(self.names[module]))
+ return result
+
+ def get_name_locations(self, name):
+ """Return a list of ``(resource, lineno)`` tuples"""
+ result = []
+ pycore = self.project.pycore
+ for module in self.names:
+ if name in self.names[module]:
+ try:
+ pymodule = pycore.get_module(module)
+ if name in pymodule:
+ pyname = pymodule[name]
+ module, lineno = pyname.get_definition_location()
+ if module is not None:
+ resource = module.get_module().get_resource()
+ if resource is not None and lineno is not None:
+ result.append((resource, lineno))
+ except exceptions.ModuleNotFoundError:
+ pass
+ return result
+
+ def generate_cache(self, resources=None, underlined=None,
+ task_handle=taskhandle.NullTaskHandle()):
+ """Generate global name cache for project files
+
+ If `resources` is a list of `rope.base.resource.File`\s, only
+ those files are searched; otherwise all python modules in the
+ project are cached.
+
+ """
+ if resources is None:
+ resources = self.project.pycore.get_python_files()
+ job_set = task_handle.create_jobset(
+ 'Generatig autoimport cache', len(resources))
+ for file in resources:
+ job_set.started_job('Working on <%s>' % file.path)
+ self.update_resource(file, underlined)
+ job_set.finished_job()
+
+ def generate_modules_cache(self, modules, underlined=None,
+ task_handle=taskhandle.NullTaskHandle()):
+ """Generate global name cache for modules listed in `modules`"""
+ job_set = task_handle.create_jobset(
+ 'Generatig autoimport cache for modules', len(modules))
+ for modname in modules:
+ job_set.started_job('Working on <%s>' % modname)
+ if modname.endswith('.*'):
+ mod = self.project.pycore.find_module(modname[:-2])
+ if mod:
+ for sub in submodules(mod):
+ self.update_resource(sub, underlined)
+ else:
+ self.update_module(modname, underlined)
+ job_set.finished_job()
+
+ def clear_cache(self):
+ """Clear all entries in global-name cache
+
+ It might be a good idea to use this function before
+ regenerating global names.
+
+ """
+ self.names.clear()
+
+ def find_insertion_line(self, code):
+ """Guess at what line the new import should be inserted"""
+ match = re.search(r'^(def|class)\s+', code)
+ if match is not None:
+ code = code[:match.start()]
+ try:
+ pymodule = self.project.pycore.get_string_module(code)
+ except exceptions.ModuleSyntaxError:
+ return 1
+ testmodname = '__rope_testmodule_rope'
+ importinfo = importutils.NormalImport(((testmodname, None),))
+ module_imports = importutils.get_module_imports(
+ self.project.pycore, pymodule)
+ module_imports.add_import(importinfo)
+ code = module_imports.get_changed_source()
+ offset = code.index(testmodname)
+ lineno = code.count('\n', 0, offset) + 1
+ return lineno
+
+ def update_resource(self, resource, underlined=None):
+ """Update the cache for global names in `resource`"""
+ try:
+ pymodule = self.project.pycore.resource_to_pyobject(resource)
+ modname = self._module_name(resource)
+ self._add_names(pymodule, modname, underlined)
+ except exceptions.ModuleSyntaxError:
+ pass
+
+ def update_module(self, modname, underlined=None):
+ """Update the cache for global names in `modname` module
+
+ `modname` is the name of a module.
+ """
+ try:
+ pymodule = self.project.pycore.get_module(modname)
+ self._add_names(pymodule, modname, underlined)
+ except exceptions.ModuleNotFoundError:
+ pass
+
+ def _module_name(self, resource):
+ return self.project.pycore.modname(resource)
+
+ def _add_names(self, pymodule, modname, underlined):
+ if underlined is None:
+ underlined = self.underlined
+ globals = []
+ if isinstance(pymodule, pyobjects.PyDefinedObject):
+ attributes = pymodule._get_structural_attributes()
+ else:
+ attributes = pymodule.get_attributes()
+ for name, pyname in attributes.items():
+ if not underlined and name.startswith('_'):
+ continue
+ if isinstance(pyname, (pynames.AssignedName, pynames.DefinedName)):
+ globals.append(name)
+ if isinstance(pymodule, builtins.BuiltinModule):
+ globals.append(name)
+ self.names[modname] = globals
+
+ def _write(self):
+ self.project.data_files.write_data('globalnames', self.names)
+
+ def _changed(self, resource):
+ if not resource.is_folder():
+ self.update_resource(resource)
+
+ def _moved(self, resource, newresource):
+ if not resource.is_folder():
+ modname = self._module_name(resource)
+ if modname in self.names:
+ del self.names[modname]
+ self.update_resource(newresource)
+
+ def _removed(self, resource):
+ if not resource.is_folder():
+ modname = self._module_name(resource)
+ if modname in self.names:
+ del self.names[modname]
+
+
+def submodules(mod):
+ if isinstance(mod, resources.File):
+ if mod.name.endswith('.py') and mod.name != '__init__.py':
+ return set([mod])
+ return set()
+ if not mod.has_child('__init__.py'):
+ return set()
+ result = set([mod])
+ for child in mod.get_children():
+ result |= submodules(child)
+ return result
diff --git a/.vim/bundle/python-mode/pylibs/rope/contrib/changestack.py b/.vim/bundle/python-mode/pylibs/rope/contrib/changestack.py
@@ -0,0 +1,52 @@
+"""For performing many refactorings as a single command
+
+`changestack` module can be used to perform many refactorings on top
+of each other as one bigger command. It can be used like::
+
+ stack = ChangeStack(project, 'my big command')
+
+ #..
+ stack.push(refactoring1.get_changes())
+ #..
+ stack.push(refactoring2.get_changes())
+ #..
+ stack.push(refactoringX.get_changes())
+
+ stack.pop_all()
+ changes = stack.merged()
+
+Now `changes` can be previewed or performed as before.
+"""
+
+from rope.base import change
+
+
+class ChangeStack(object):
+
+ def __init__(self, project, description='merged changes'):
+ self.project = project
+ self.description = description
+ self.stack = []
+
+ def push(self, changes):
+ self.stack.append(changes)
+ self.project.do(changes)
+
+ def pop_all(self):
+ for i in range(len(self.stack)):
+ self.project.history.undo(drop=True)
+
+ def merged(self):
+ result = change.ChangeSet(self.description)
+ for changes in self.stack:
+ for c in self._basic_changes(changes):
+ result.add_change(c)
+ return result
+
+ def _basic_changes(self, changes):
+ if isinstance(changes, change.ChangeSet):
+ for child in changes.changes:
+ for atom in self._basic_changes(child):
+ yield atom
+ else:
+ yield changes
diff --git a/.vim/bundle/python-mode/pylibs/rope/contrib/codeassist.py b/.vim/bundle/python-mode/pylibs/rope/contrib/codeassist.py
@@ -0,0 +1,647 @@
+import keyword
+import sys
+import warnings
+
+import rope.base.codeanalyze
+import rope.base.evaluate
+from rope.base import pyobjects, pyobjectsdef, pynames, builtins, exceptions, worder
+from rope.base.codeanalyze import SourceLinesAdapter
+from rope.contrib import fixsyntax
+from rope.refactor import functionutils
+
+
+def code_assist(project, source_code, offset, resource=None,
+ templates=None, maxfixes=1, later_locals=True):
+ """Return python code completions as a list of `CodeAssistProposal`\s
+
+ `resource` is a `rope.base.resources.Resource` object. If
+ provided, relative imports are handled.
+
+ `maxfixes` is the maximum number of errors to fix if the code has
+ errors in it.
+
+ If `later_locals` is `False` names defined in this scope and after
+ this line is ignored.
+
+ """
+ if templates is not None:
+ warnings.warn('Codeassist no longer supports templates',
+ DeprecationWarning, stacklevel=2)
+ assist = _PythonCodeAssist(
+ project, source_code, offset, resource=resource,
+ maxfixes=maxfixes, later_locals=later_locals)
+ return assist()
+
+
+def starting_offset(source_code, offset):
+ """Return the offset in which the completion should be inserted
+
+ Usually code assist proposals should be inserted like::
+
+ completion = proposal.name
+ result = (source_code[:starting_offset] +
+ completion + source_code[offset:])
+
+ Where starting_offset is the offset returned by this function.
+
+ """
+ word_finder = worder.Worder(source_code, True)
+ expression, starting, starting_offset = \
+ word_finder.get_splitted_primary_before(offset)
+ return starting_offset
+
+
+def get_doc(project, source_code, offset, resource=None, maxfixes=1):
+ """Get the pydoc"""
+ fixer = fixsyntax.FixSyntax(project.pycore, source_code,
+ resource, maxfixes)
+ pymodule = fixer.get_pymodule()
+ pyname = fixer.pyname_at(offset)
+ if pyname is None:
+ return None
+ pyobject = pyname.get_object()
+ return PyDocExtractor().get_doc(pyobject)
+
+
+def get_calltip(project, source_code, offset, resource=None,
+ maxfixes=1, ignore_unknown=False, remove_self=False):
+ """Get the calltip of a function
+
+ The format of the returned string is
+ ``module_name.holding_scope_names.function_name(arguments)``. For
+ classes `__init__()` and for normal objects `__call__()` function
+ is used.
+
+ Note that the offset is on the function itself *not* after the its
+ open parenthesis. (Actually it used to be the other way but it
+ was easily confused when string literals were involved. So I
+ decided it is better for it not to try to be too clever when it
+ cannot be clever enough). You can use a simple search like::
+
+ offset = source_code.rindex('(', 0, offset) - 1
+
+ to handle simple situations.
+
+ If `ignore_unknown` is `True`, `None` is returned for functions
+ without source-code like builtins and extensions.
+
+ If `remove_self` is `True`, the first parameter whose name is self
+ will be removed for methods.
+ """
+ fixer = fixsyntax.FixSyntax(project.pycore, source_code,
+ resource, maxfixes)
+ pymodule = fixer.get_pymodule()
+ pyname = fixer.pyname_at(offset)
+ if pyname is None:
+ return None
+ pyobject = pyname.get_object()
+ return PyDocExtractor().get_calltip(pyobject, ignore_unknown, remove_self)
+
+
+def get_definition_location(project, source_code, offset,
+ resource=None, maxfixes=1):
+ """Return the definition location of the python name at `offset`
+
+ Return a (`rope.base.resources.Resource`, lineno) tuple. If no
+ `resource` is given and the definition is inside the same module,
+ the first element of the returned tuple would be `None`. If the
+ location cannot be determined ``(None, None)`` is returned.
+
+ """
+ fixer = fixsyntax.FixSyntax(project.pycore, source_code,
+ resource, maxfixes)
+ pymodule = fixer.get_pymodule()
+ pyname = fixer.pyname_at(offset)
+ if pyname is not None:
+ module, lineno = pyname.get_definition_location()
+ if module is not None:
+ return module.get_module().get_resource(), lineno
+ return (None, None)
+
+
+def find_occurrences(*args, **kwds):
+ import rope.contrib.findit
+ warnings.warn('Use `rope.contrib.findit.find_occurrences()` instead',
+ DeprecationWarning, stacklevel=2)
+ return rope.contrib.findit.find_occurrences(*args, **kwds)
+
+
+class CompletionProposal(object):
+ """A completion proposal
+
+ The `scope` instance variable shows where proposed name came from
+ and can be 'global', 'local', 'builtin', 'attribute', 'keyword',
+ 'imported', 'parameter_keyword'.
+
+ The `type` instance variable shows the approximate type of the
+ proposed object and can be 'instance', 'class', 'function', 'module',
+ and `None`.
+
+ All possible relations between proposal's `scope` and `type` are shown
+ in the table below (different scopes in rows and types in columns):
+
+ | instance | class | function | module | None
+ local | + | + | + | + |
+ global | + | + | + | + |
+ builtin | + | + | + | |
+ attribute | + | + | + | + |
+ imported | + | + | + | + |
+ keyword | | | | | +
+ parameter_keyword | | | | | +
+
+ """
+
+ def __init__(self, name, scope, pyname=None):
+ self.name = name
+ self.pyname = pyname
+ self.scope = self._get_scope(scope)
+
+ def __str__(self):
+ return '%s (%s, %s)' % (self.name, self.scope, self.type)
+
+ def __repr__(self):
+ return str(self)
+
+ @property
+ def parameters(self):
+ """The names of the parameters the function takes.
+
+ Returns None if this completion is not a function.
+ """
+ pyname = self.pyname
+ if isinstance(pyname, pynames.ImportedName):
+ pyname = pyname._get_imported_pyname()
+ if isinstance(pyname, pynames.DefinedName):
+ pyobject = pyname.get_object()
+ if isinstance(pyobject, pyobjects.AbstractFunction):
+ return pyobject.get_param_names()
+
+ @property
+ def type(self):
+ pyname = self.pyname
+ if isinstance(pyname, builtins.BuiltinName):
+ pyobject = pyname.get_object()
+ if isinstance(pyobject, builtins.BuiltinFunction):
+ return 'function'
+ elif isinstance(pyobject, builtins.BuiltinClass):
+ clsobj = pyobject.builtin
+ return 'class'
+ elif isinstance(pyobject, builtins.BuiltinObject) or \
+ isinstance(pyobject, builtins.BuiltinName):
+ return 'instance'
+ elif isinstance(pyname, pynames.ImportedModule):
+ return 'module'
+ elif isinstance(pyname, pynames.ImportedName) or \
+ isinstance(pyname, pynames.DefinedName):
+ pyobject = pyname.get_object()
+ if isinstance(pyobject, pyobjects.AbstractFunction):
+ return 'function'
+ if isinstance(pyobject, pyobjects.AbstractClass):
+ return 'class'
+ return 'instance'
+
+ def _get_scope(self, scope):
+ if isinstance(self.pyname, builtins.BuiltinName):
+ return 'builtin'
+ if isinstance(self.pyname, pynames.ImportedModule) or \
+ isinstance(self.pyname, pynames.ImportedName):
+ return 'imported'
+ return scope
+
+ def get_doc(self):
+ """Get the proposed object's docstring.
+
+ Returns None if it can not be get.
+ """
+ if not self.pyname:
+ return None
+ pyobject = self.pyname.get_object()
+ if not hasattr(pyobject, 'get_doc'):
+ return None
+ return self.pyname.get_object().get_doc()
+
+ @property
+ def kind(self):
+ warnings.warn("the proposal's `kind` property is deprecated, " \
+ "use `scope` instead")
+ return self.scope
+
+
+# leaved for backward compatibility
+CodeAssistProposal = CompletionProposal
+
+
+class NamedParamProposal(CompletionProposal):
+ """A parameter keyword completion proposal
+
+ Holds reference to ``_function`` -- the function which
+ parameter ``name`` belongs to. This allows to determine
+ default value for this parameter.
+ """
+ def __init__(self, name, function):
+ self.argname = name
+ name = '%s=' % name
+ super(NamedParamProposal, self).__init__(name, 'parameter_keyword')
+ self._function = function
+
+ def get_default(self):
+ """Get a string representation of a param's default value.
+
+ Returns None if there is no default value for this param.
+ """
+ definfo = functionutils.DefinitionInfo.read(self._function)
+ for arg, default in definfo.args_with_defaults:
+ if self.argname == arg:
+ return default
+ return None
+
+
+def sorted_proposals(proposals, scopepref=None, typepref=None):
+ """Sort a list of proposals
+
+ Return a sorted list of the given `CodeAssistProposal`\s.
+
+ `scopepref` can be a list of proposal scopes. Defaults to
+ ``['parameter_keyword', 'local', 'global', 'imported',
+ 'attribute', 'builtin', 'keyword']``.
+
+ `typepref` can be a list of proposal types. Defaults to
+ ``['class', 'function', 'instance', 'module', None]``.
+ (`None` stands for completions with no type like keywords.)
+ """
+ sorter = _ProposalSorter(proposals, scopepref, typepref)
+ return sorter.get_sorted_proposal_list()
+
+
+def starting_expression(source_code, offset):
+ """Return the expression to complete"""
+ word_finder = worder.Worder(source_code, True)
+ expression, starting, starting_offset = \
+ word_finder.get_splitted_primary_before(offset)
+ if expression:
+ return expression + '.' + starting
+ return starting
+
+
+def default_templates():
+ warnings.warn('default_templates() is deprecated.',
+ DeprecationWarning, stacklevel=2)
+ return {}
+
+
+class _PythonCodeAssist(object):
+
+ def __init__(self, project, source_code, offset, resource=None,
+ maxfixes=1, later_locals=True):
+ self.project = project
+ self.pycore = self.project.pycore
+ self.code = source_code
+ self.resource = resource
+ self.maxfixes = maxfixes
+ self.later_locals = later_locals
+ self.word_finder = worder.Worder(source_code, True)
+ self.expression, self.starting, self.offset = \
+ self.word_finder.get_splitted_primary_before(offset)
+
+ keywords = keyword.kwlist
+
+ def _find_starting_offset(self, source_code, offset):
+ current_offset = offset - 1
+ while current_offset >= 0 and (source_code[current_offset].isalnum() or
+ source_code[current_offset] in '_'):
+ current_offset -= 1;
+ return current_offset + 1
+
+ def _matching_keywords(self, starting):
+ result = []
+ for kw in self.keywords:
+ if kw.startswith(starting):
+ result.append(CompletionProposal(kw, 'keyword'))
+ return result
+
+ def __call__(self):
+ if self.offset > len(self.code):
+ return []
+ completions = list(self._code_completions().values())
+ if self.expression.strip() == '' and self.starting.strip() != '':
+ completions.extend(self._matching_keywords(self.starting))
+ return completions
+
+ def _dotted_completions(self, module_scope, holding_scope):
+ result = {}
+ found_pyname = rope.base.evaluate.eval_str(holding_scope,
+ self.expression)
+ if found_pyname is not None:
+ element = found_pyname.get_object()
+ compl_scope = 'attribute'
+ if isinstance(element, (pyobjectsdef.PyModule,
+ pyobjectsdef.PyPackage)):
+ compl_scope = 'imported'
+ for name, pyname in element.get_attributes().items():
+ if name.startswith(self.starting):
+ result[name] = CompletionProposal(name, compl_scope, pyname)
+ return result
+
+ def _undotted_completions(self, scope, result, lineno=None):
+ if scope.parent != None:
+ self._undotted_completions(scope.parent, result)
+ if lineno is None:
+ names = scope.get_propagated_names()
+ else:
+ names = scope.get_names()
+ for name, pyname in names.items():
+ if name.startswith(self.starting):
+ compl_scope = 'local'
+ if scope.get_kind() == 'Module':
+ compl_scope = 'global'
+ if lineno is None or self.later_locals or \
+ not self._is_defined_after(scope, pyname, lineno):
+ result[name] = CompletionProposal(name, compl_scope,
+ pyname)
+
+ def _from_import_completions(self, pymodule):
+ module_name = self.word_finder.get_from_module(self.offset)
+ if module_name is None:
+ return {}
+ pymodule = self._find_module(pymodule, module_name)
+ result = {}
+ for name in pymodule:
+ if name.startswith(self.starting):
+ result[name] = CompletionProposal(name, scope='global',
+ pyname=pymodule[name])
+ return result
+
+ def _find_module(self, pymodule, module_name):
+ dots = 0
+ while module_name[dots] == '.':
+ dots += 1
+ pyname = pynames.ImportedModule(pymodule,
+ module_name[dots:], dots)
+ return pyname.get_object()
+
+ def _is_defined_after(self, scope, pyname, lineno):
+ location = pyname.get_definition_location()
+ if location is not None and location[1] is not None:
+ if location[0] == scope.pyobject.get_module() and \
+ lineno <= location[1] <= scope.get_end():
+ return True
+
+ def _code_completions(self):
+ lineno = self.code.count('\n', 0, self.offset) + 1
+ fixer = fixsyntax.FixSyntax(self.pycore, self.code,
+ self.resource, self.maxfixes)
+ pymodule = fixer.get_pymodule()
+ module_scope = pymodule.get_scope()
+ code = pymodule.source_code
+ lines = code.split('\n')
+ result = {}
+ start = fixsyntax._logical_start(lines, lineno)
+ indents = fixsyntax._get_line_indents(lines[start - 1])
+ inner_scope = module_scope.get_inner_scope_for_line(start, indents)
+ if self.word_finder.is_a_name_after_from_import(self.offset):
+ return self._from_import_completions(pymodule)
+ if self.expression.strip() != '':
+ result.update(self._dotted_completions(module_scope, inner_scope))
+ else:
+ result.update(self._keyword_parameters(module_scope.pyobject,
+ inner_scope))
+ self._undotted_completions(inner_scope, result, lineno=lineno)
+ return result
+
+ def _keyword_parameters(self, pymodule, scope):
+ offset = self.offset
+ if offset == 0:
+ return {}
+ word_finder = worder.Worder(self.code, True)
+ lines = SourceLinesAdapter(self.code)
+ lineno = lines.get_line_number(offset)
+ if word_finder.is_on_function_call_keyword(offset - 1):
+ name_finder = rope.base.evaluate.ScopeNameFinder(pymodule)
+ function_parens = word_finder.\
+ find_parens_start_from_inside(offset - 1)
+ primary = word_finder.get_primary_at(function_parens - 1)
+ try:
+ function_pyname = rope.base.evaluate.\
+ eval_str(scope, primary)
+ except exceptions.BadIdentifierError, e:
+ return {}
+ if function_pyname is not None:
+ pyobject = function_pyname.get_object()
+ if isinstance(pyobject, pyobjects.AbstractFunction):
+ pass
+ elif isinstance(pyobject, pyobjects.AbstractClass) and \
+ '__init__' in pyobject:
+ pyobject = pyobject['__init__'].get_object()
+ elif '__call__' in pyobject:
+ pyobject = pyobject['__call__'].get_object()
+ if isinstance(pyobject, pyobjects.AbstractFunction):
+ param_names = []
+ param_names.extend(
+ pyobject.get_param_names(special_args=False))
+ result = {}
+ for name in param_names:
+ if name.startswith(self.starting):
+ result[name + '='] = NamedParamProposal(
+ name, pyobject
+ )
+ return result
+ return {}
+
+
+class _ProposalSorter(object):
+ """Sort a list of code assist proposals"""
+
+ def __init__(self, code_assist_proposals, scopepref=None, typepref=None):
+ self.proposals = code_assist_proposals
+ if scopepref is None:
+ scopepref = ['parameter_keyword', 'local', 'global', 'imported',
+ 'attribute', 'builtin', 'keyword']
+ self.scopepref = scopepref
+ if typepref is None:
+ typepref = ['class', 'function', 'instance', 'module', None]
+ self.typerank = dict((type, index)
+ for index, type in enumerate(typepref))
+
+ def get_sorted_proposal_list(self):
+ """Return a list of `CodeAssistProposal`"""
+ proposals = {}
+ for proposal in self.proposals:
+ proposals.setdefault(proposal.scope, []).append(proposal)
+ result = []
+ for scope in self.scopepref:
+ scope_proposals = proposals.get(scope, [])
+ scope_proposals = [proposal for proposal in scope_proposals
+ if proposal.type in self.typerank]
+ scope_proposals.sort(self._proposal_cmp)
+ result.extend(scope_proposals)
+ return result
+
+ def _proposal_cmp(self, proposal1, proposal2):
+ if proposal1.type != proposal2.type:
+ return cmp(self.typerank.get(proposal1.type, 100),
+ self.typerank.get(proposal2.type, 100))
+ return self._compare_underlined_names(proposal1.name,
+ proposal2.name)
+
+ def _compare_underlined_names(self, name1, name2):
+ def underline_count(name):
+ result = 0
+ while result < len(name) and name[result] == '_':
+ result += 1
+ return result
+ underline_count1 = underline_count(name1)
+ underline_count2 = underline_count(name2)
+ if underline_count1 != underline_count2:
+ return cmp(underline_count1, underline_count2)
+ return cmp(name1, name2)
+
+
+class PyDocExtractor(object):
+
+ def get_doc(self, pyobject):
+ if isinstance(pyobject, pyobjects.AbstractFunction):
+ return self._get_function_docstring(pyobject)
+ elif isinstance(pyobject, pyobjects.AbstractClass):
+ return self._get_class_docstring(pyobject)
+ elif isinstance(pyobject, pyobjects.AbstractModule):
+ return self._trim_docstring(pyobject.get_doc())
+ return None
+
+ def get_calltip(self, pyobject, ignore_unknown=False, remove_self=False):
+ try:
+ if isinstance(pyobject, pyobjects.AbstractClass):
+ pyobject = pyobject['__init__'].get_object()
+ if not isinstance(pyobject, pyobjects.AbstractFunction):
+ pyobject = pyobject['__call__'].get_object()
+ except exceptions.AttributeNotFoundError:
+ return None
+ if ignore_unknown and not isinstance(pyobject, pyobjects.PyFunction):
+ return
+ if isinstance(pyobject, pyobjects.AbstractFunction):
+ result = self._get_function_signature(pyobject, add_module=True)
+ if remove_self and self._is_method(pyobject):
+ return result.replace('(self)', '()').replace('(self, ', '(')
+ return result
+
+ def _get_class_docstring(self, pyclass):
+ contents = self._trim_docstring(pyclass.get_doc(), 2)
+ supers = [super.get_name() for super in pyclass.get_superclasses()]
+ doc = 'class %s(%s):\n\n' % (pyclass.get_name(), ', '.join(supers)) + contents
+
+ if '__init__' in pyclass:
+ init = pyclass['__init__'].get_object()
+ if isinstance(init, pyobjects.AbstractFunction):
+ doc += '\n\n' + self._get_single_function_docstring(init)
+ return doc
+
+ def _get_function_docstring(self, pyfunction):
+ functions = [pyfunction]
+ if self._is_method(pyfunction):
+ functions.extend(self._get_super_methods(pyfunction.parent,
+ pyfunction.get_name()))
+ return '\n\n'.join([self._get_single_function_docstring(function)
+ for function in functions])
+
+ def _is_method(self, pyfunction):
+ return isinstance(pyfunction, pyobjects.PyFunction) and \
+ isinstance(pyfunction.parent, pyobjects.PyClass)
+
+ def _get_single_function_docstring(self, pyfunction):
+ signature = self._get_function_signature(pyfunction)
+ docs = self._trim_docstring(pyfunction.get_doc(), indents=2)
+ return signature + ':\n\n' + docs
+
+ def _get_super_methods(self, pyclass, name):
+ result = []
+ for super_class in pyclass.get_superclasses():
+ if name in super_class:
+ function = super_class[name].get_object()
+ if isinstance(function, pyobjects.AbstractFunction):
+ result.append(function)
+ result.extend(self._get_super_methods(super_class, name))
+ return result
+
+ def _get_function_signature(self, pyfunction, add_module=False):
+ location = self._location(pyfunction, add_module)
+ if isinstance(pyfunction, pyobjects.PyFunction):
+ info = functionutils.DefinitionInfo.read(pyfunction)
+ return location + info.to_string()
+ else:
+ return '%s(%s)' % (location + pyfunction.get_name(),
+ ', '.join(pyfunction.get_param_names()))
+
+ def _location(self, pyobject, add_module=False):
+ location = []
+ parent = pyobject.parent
+ while parent and not isinstance(parent, pyobjects.AbstractModule):
+ location.append(parent.get_name())
+ location.append('.')
+ parent = parent.parent
+ if add_module:
+ if isinstance(pyobject, pyobjects.PyFunction):
+ module = pyobject.get_module()
+ location.insert(0, self._get_module(pyobject))
+ if isinstance(parent, builtins.BuiltinModule):
+ location.insert(0, parent.get_name() + '.')
+ return ''.join(location)
+
+ def _get_module(self, pyfunction):
+ module = pyfunction.get_module()
+ if module is not None:
+ resource = module.get_resource()
+ if resource is not None:
+ return pyfunction.pycore.modname(resource) + '.'
+ return ''
+
+ def _trim_docstring(self, docstring, indents=0):
+ """The sample code from :PEP:`257`"""
+ if not docstring:
+ return ''
+ # Convert tabs to spaces (following normal Python rules)
+ # and split into a list of lines:
+ lines = docstring.expandtabs().splitlines()
+ # Determine minimum indentation (first line doesn't count):
+ indent = sys.maxint
+ for line in lines[1:]:
+ stripped = line.lstrip()
+ if stripped:
+ indent = min(indent, len(line) - len(stripped))
+ # Remove indentation (first line is special):
+ trimmed = [lines[0].strip()]
+ if indent < sys.maxint:
+ for line in lines[1:]:
+ trimmed.append(line[indent:].rstrip())
+ # Strip off trailing and leading blank lines:
+ while trimmed and not trimmed[-1]:
+ trimmed.pop()
+ while trimmed and not trimmed[0]:
+ trimmed.pop(0)
+ # Return a single string:
+ return '\n'.join((' ' * indents + line for line in trimmed))
+
+
+# Deprecated classes
+
+class TemplateProposal(CodeAssistProposal):
+ def __init__(self, name, template):
+ warnings.warn('TemplateProposal is deprecated.',
+ DeprecationWarning, stacklevel=2)
+ super(TemplateProposal, self).__init__(name, 'template')
+ self.template = template
+
+
+class Template(object):
+
+ def __init__(self, template):
+ self.template = template
+ warnings.warn('Template is deprecated.',
+ DeprecationWarning, stacklevel=2)
+
+ def variables(self):
+ return []
+
+ def substitute(self, mapping):
+ return self.template
+
+ def get_cursor_location(self, mapping):
+ return len(self.template)
diff --git a/.vim/bundle/python-mode/pylibs/rope/contrib/finderrors.py b/.vim/bundle/python-mode/pylibs/rope/contrib/finderrors.py
@@ -0,0 +1,91 @@
+"""Finding bad name and attribute accesses
+
+`find_errors` function can be used to find possible bad name and
+attribute accesses. As an example::
+
+ errors = find_errors(project, project.get_resource('mod.py'))
+ for error in errors:
+ print '%s: %s' % (error.lineno, error.error)
+
+prints possible errors for ``mod.py`` file.
+
+TODO:
+
+* use task handles
+* reporting names at most once
+* attributes of extension modules that don't appear in
+ extension_modules project config can be ignored
+* not calling `PyScope.get_inner_scope_for_line()` if it is a
+ bottleneck; needs profiling
+* not reporting occurrences where rope cannot infer the object
+* rope saves multiple objects for some of the names in its objectdb
+ use all of them not to give false positives
+* ... ;-)
+
+"""
+from rope.base import ast, evaluate, pyobjects
+
+
+def find_errors(project, resource):
+ """Find possible bad name and attribute accesses
+
+ It returns a list of `Error`\s.
+ """
+ pymodule = project.pycore.resource_to_pyobject(resource)
+ finder = _BadAccessFinder(pymodule)
+ ast.walk(pymodule.get_ast(), finder)
+ return finder.errors
+
+
+class _BadAccessFinder(object):
+
+ def __init__(self, pymodule):
+ self.pymodule = pymodule
+ self.scope = pymodule.get_scope()
+ self.errors = []
+
+ def _Name(self, node):
+ if isinstance(node.ctx, (ast.Store, ast.Param)):
+ return
+ scope = self.scope.get_inner_scope_for_line(node.lineno)
+ pyname = scope.lookup(node.id)
+ if pyname is None:
+ self._add_error(node, 'Unresolved variable')
+ elif self._is_defined_after(scope, pyname, node.lineno):
+ self._add_error(node, 'Defined later')
+
+ def _Attribute(self, node):
+ if not isinstance(node.ctx, ast.Store):
+ scope = self.scope.get_inner_scope_for_line(node.lineno)
+ pyname = evaluate.eval_node(scope, node.value)
+ if pyname is not None and \
+ pyname.get_object() != pyobjects.get_unknown():
+ if node.attr not in pyname.get_object():
+ self._add_error(node, 'Unresolved attribute')
+ ast.walk(node.value, self)
+
+ def _add_error(self, node, msg):
+ if isinstance(node, ast.Attribute):
+ name = node.attr
+ else:
+ name = node.id
+ if name != 'None':
+ error = Error(node.lineno, msg + ' ' + name)
+ self.errors.append(error)
+
+ def _is_defined_after(self, scope, pyname, lineno):
+ location = pyname.get_definition_location()
+ if location is not None and location[1] is not None:
+ if location[0] == self.pymodule and \
+ lineno <= location[1] <= scope.get_end():
+ return True
+
+
+class Error(object):
+
+ def __init__(self, lineno, error):
+ self.lineno = lineno
+ self.error = error
+
+ def __str__(self):
+ return '%s: %s' % (self.lineno, self.error)
diff --git a/.vim/bundle/python-mode/pylibs/rope/contrib/findit.py b/.vim/bundle/python-mode/pylibs/rope/contrib/findit.py
@@ -0,0 +1,110 @@
+import rope.base.codeanalyze
+import rope.base.evaluate
+import rope.base.pyobjects
+from rope.base import taskhandle, exceptions, worder
+from rope.contrib import fixsyntax
+from rope.refactor import occurrences
+
+
+def find_occurrences(project, resource, offset, unsure=False, resources=None,
+ in_hierarchy=False, task_handle=taskhandle.NullTaskHandle()):
+ """Return a list of `Location`\s
+
+ If `unsure` is `True`, possible matches are returned, too. You
+ can use `Location.unsure` to see which are unsure occurrences.
+ `resources` can be a list of `rope.base.resource.File`\s that
+ should be searched for occurrences; if `None` all python files
+ in the project are searched.
+
+ """
+ name = worder.get_name_at(resource, offset)
+ this_pymodule = project.pycore.resource_to_pyobject(resource)
+ primary, pyname = rope.base.evaluate.eval_location2(
+ this_pymodule, offset)
+ def is_match(occurrence):
+ return unsure
+ finder = occurrences.create_finder(
+ project.pycore, name, pyname, unsure=is_match,
+ in_hierarchy=in_hierarchy, instance=primary)
+ if resources is None:
+ resources = project.pycore.get_python_files()
+ job_set = task_handle.create_jobset('Finding Occurrences',
+ count=len(resources))
+ return _find_locations(finder, resources, job_set)
+
+
+def find_implementations(project, resource, offset, resources=None,
+ task_handle=taskhandle.NullTaskHandle()):
+ """Find the places a given method is overridden.
+
+ Finds the places a method is implemented. Returns a list of
+ `Location`\s.
+ """
+ name = worder.get_name_at(resource, offset)
+ this_pymodule = project.pycore.resource_to_pyobject(resource)
+ pyname = rope.base.evaluate.eval_location(this_pymodule, offset)
+ if pyname is not None:
+ pyobject = pyname.get_object()
+ if not isinstance(pyobject, rope.base.pyobjects.PyFunction) or \
+ pyobject.get_kind() != 'method':
+ raise exceptions.BadIdentifierError('Not a method!')
+ else:
+ raise exceptions.BadIdentifierError('Cannot resolve the identifier!')
+ def is_defined(occurrence):
+ if not occurrence.is_defined():
+ return False
+ def not_self(occurrence):
+ if occurrence.get_pyname().get_object() == pyname.get_object():
+ return False
+ filters = [is_defined, not_self,
+ occurrences.InHierarchyFilter(pyname, True)]
+ finder = occurrences.Finder(project.pycore, name, filters=filters)
+ if resources is None:
+ resources = project.pycore.get_python_files()
+ job_set = task_handle.create_jobset('Finding Implementations',
+ count=len(resources))
+ return _find_locations(finder, resources, job_set)
+
+
+def find_definition(project, code, offset, resource=None, maxfixes=1):
+ """Return the definition location of the python name at `offset`
+
+ A `Location` object is returned if the definition location can be
+ determined, otherwise ``None`` is returned.
+ """
+ fixer = fixsyntax.FixSyntax(project.pycore, code, resource, maxfixes)
+ main_module = fixer.get_pymodule()
+ pyname = fixer.pyname_at(offset)
+ if pyname is not None:
+ module, lineno = pyname.get_definition_location()
+ name = rope.base.worder.Worder(code).get_word_at(offset)
+ if lineno is not None:
+ start = module.lines.get_line_start(lineno)
+ def check_offset(occurrence):
+ if occurrence.offset < start:
+ return False
+ pyname_filter = occurrences.PyNameFilter(pyname)
+ finder = occurrences.Finder(project.pycore, name,
+ [check_offset, pyname_filter])
+ for occurrence in finder.find_occurrences(pymodule=module):
+ return Location(occurrence)
+
+
+class Location(object):
+
+ def __init__(self, occurrence):
+ self.resource = occurrence.resource
+ self.region = occurrence.get_word_range()
+ self.offset = self.region[0]
+ self.unsure = occurrence.is_unsure()
+ self.lineno = occurrence.lineno
+
+
+def _find_locations(finder, resources, job_set):
+ result = []
+ for resource in resources:
+ job_set.started_job(resource.path)
+ for occurrence in finder.find_occurrences(resource):
+ result.append(Location(occurrence))
+ job_set.finished_job()
+ return result
diff --git a/.vim/bundle/python-mode/pylibs/rope/contrib/fixmodnames.py b/.vim/bundle/python-mode/pylibs/rope/contrib/fixmodnames.py
@@ -0,0 +1,69 @@
+"""Fix the name of modules
+
+This module is useful when you want to rename many of the modules in
+your project. That can happen specially when you want to change their
+naming style.
+
+For instance::
+
+ fixer = FixModuleNames(project)
+ changes = fixer.get_changes(fixer=str.lower)
+ project.do(changes)
+
+Here it renames all modules and packages to use lower-cased chars.
+You can tell it to use any other style by using the ``fixer``
+argument.
+
+"""
+from rope.base import change, taskhandle
+from rope.contrib import changestack
+from rope.refactor import rename
+
+
+class FixModuleNames(object):
+
+ def __init__(self, project):
+ self.project = project
+
+ def get_changes(self, fixer=str.lower,
+ task_handle=taskhandle.NullTaskHandle()):
+ """Fix module names
+
+ `fixer` is a function that takes and returns a `str`. Given
+ the name of a module, it should return the fixed name.
+
+ """
+ stack = changestack.ChangeStack(self.project, 'Fixing module names')
+ jobset = task_handle.create_jobset('Fixing module names',
+ self._count_fixes(fixer) + 1)
+ try:
+ while True:
+ for resource in self._tobe_fixed(fixer):
+ jobset.started_job(resource.path)
+ renamer = rename.Rename(self.project, resource)
+ changes = renamer.get_changes(fixer(self._name(resource)))
+ stack.push(changes)
+ jobset.finished_job()
+ break
+ else:
+ break
+ finally:
+ jobset.started_job('Reverting to original state')
+ stack.pop_all()
+ jobset.finished_job()
+ return stack.merged()
+
+ def _count_fixes(self, fixer):
+ return len(list(self._tobe_fixed(fixer)))
+
+ def _tobe_fixed(self, fixer):
+ for resource in self.project.pycore.get_python_files():
+ modname = self._name(resource)
+ if modname != fixer(modname):
+ yield resource
+
+ def _name(self, resource):
+ modname = resource.name.rsplit('.', 1)[0]
+ if modname == '__init__':
+ modname = resource.parent.name
+ return modname
diff --git a/.vim/bundle/python-mode/pylibs/rope/contrib/fixsyntax.py b/.vim/bundle/python-mode/pylibs/rope/contrib/fixsyntax.py
@@ -0,0 +1,178 @@
+import rope.base.codeanalyze
+import rope.base.evaluate
+from rope.base import worder, exceptions, utils
+from rope.base.codeanalyze import ArrayLinesAdapter, LogicalLineFinder
+
+
+class FixSyntax(object):
+
+ def __init__(self, pycore, code, resource, maxfixes=1):
+ self.pycore = pycore
+ self.code = code
+ self.resource = resource
+ self.maxfixes = maxfixes
+
+ @utils.saveit
+ def get_pymodule(self):
+ """Get a `PyModule`"""
+ errors = []
+ code = self.code
+ tries = 0
+ while True:
+ try:
+ if tries == 0 and self.resource is not None and \
+ self.resource.read() == code:
+ return self.pycore.resource_to_pyobject(self.resource,
+ force_errors=True)
+ return self.pycore.get_string_module(
+ code, resource=self.resource, force_errors=True)
+ except exceptions.ModuleSyntaxError, e:
+ if tries < self.maxfixes:
+ tries += 1
+ self.commenter.comment(e.lineno)
+ code = '\n'.join(self.commenter.lines)
+ errors.append(' * line %s: %s ... fixed' % (e.lineno,
+ e.message_))
+ else:
+ errors.append(' * line %s: %s ... raised!' % (e.lineno,
+ e.message_))
+ new_message = ('\nSyntax errors in file %s:\n' % e.filename) \
+ + '\n'.join(errors)
+ raise exceptions.ModuleSyntaxError(e.filename, e.lineno,
+ new_message)
+
+ @property
+ @utils.saveit
+ def commenter(self):
+ return _Commenter(self.code)
+
+ def pyname_at(self, offset):
+ pymodule = self.get_pymodule()
+ def old_pyname():
+ word_finder = worder.Worder(self.code, True)
+ expression = word_finder.get_primary_at(offset)
+ expression = expression.replace('\\\n', ' ').replace('\n', ' ')
+ lineno = self.code.count('\n', 0, offset)
+ scope = pymodule.get_scope().get_inner_scope_for_line(lineno)
+ return rope.base.evaluate.eval_str(scope, expression)
+ new_code = pymodule.source_code
+ def new_pyname():
+ newoffset = self.commenter.transfered_offset(offset)
+ return rope.base.evaluate.eval_location(pymodule, newoffset)
+ if new_code.startswith(self.code[:offset + 1]):
+ return new_pyname()
+ result = old_pyname()
+ if result is None:
+ return new_pyname()
+ return result
+
+
+class _Commenter(object):
+
+ def __init__(self, code):
+ self.code = code
+ self.lines = self.code.split('\n')
+ self.lines.append('\n')
+ self.origs = range(len(self.lines) + 1)
+ self.diffs = [0] * (len(self.lines) + 1)
+
+ def comment(self, lineno):
+ start = _logical_start(self.lines, lineno, check_prev=True) - 1
+ # using self._get_stmt_end() instead of self._get_block_end()
+ # to lower commented lines
+ end = self._get_stmt_end(start)
+ indents = _get_line_indents(self.lines[start])
+ if 0 < start:
+ last_lineno = self._last_non_blank(start - 1)
+ last_line = self.lines[last_lineno]
+ if last_line.rstrip().endswith(':'):
+ indents = _get_line_indents(last_line) + 4
+ self._set(start, ' ' * indents + 'pass')
+ for line in range(start + 1, end + 1):
+ self._set(line, self.lines[start])
+ self._fix_incomplete_try_blocks(lineno, indents)
+
+ def transfered_offset(self, offset):
+ lineno = self.code.count('\n', 0, offset)
+ diff = sum(self.diffs[:lineno])
+ return offset + diff
+
+ def _last_non_blank(self, start):
+ while start > 0 and self.lines[start].strip() == '':
+ start -= 1
+ return start
+
+ def _get_block_end(self, lineno):
+ end_line = lineno
+ base_indents = _get_line_indents(self.lines[lineno])
+ for i in range(lineno + 1, len(self.lines)):
+ if _get_line_indents(self.lines[i]) >= base_indents:
+ end_line = i
+ else:
+ break
+ return end_line
+
+ def _get_stmt_end(self, lineno):
+ end_line = lineno
+ base_indents = _get_line_indents(self.lines[lineno])
+ for i in range(lineno + 1, len(self.lines)):
+ if _get_line_indents(self.lines[i]) <= base_indents:
+ return i - 1
+ return lineno
+
+ def _fix_incomplete_try_blocks(self, lineno, indents):
+ block_start = lineno
+ last_indents = current_indents = indents
+ while block_start > 0:
+ block_start = rope.base.codeanalyze.get_block_start(
+ ArrayLinesAdapter(self.lines), block_start) - 1
+ if self.lines[block_start].strip().startswith('try:'):
+ indents = _get_line_indents(self.lines[block_start])
+ if indents > last_indents:
+ continue
+ last_indents = indents
+ block_end = self._find_matching_deindent(block_start)
+ line = self.lines[block_end].strip()
+ if not (line.startswith('finally:') or
+ line.startswith('except ') or
+ line.startswith('except:')):
+ self._insert(block_end, ' ' * indents + 'finally:')
+ self._insert(block_end + 1, ' ' * indents + ' pass')
+
+ def _find_matching_deindent(self, line_number):
+ indents = _get_line_indents(self.lines[line_number])
+ current_line = line_number + 1
+ while current_line < len(self.lines):
+ line = self.lines[current_line]
+ if not line.strip().startswith('#') and not line.strip() == '':
+ # HACK: We should have used logical lines here
+ if _get_line_indents(self.lines[current_line]) <= indents:
+ return current_line
+ current_line += 1
+ return len(self.lines) - 1
+
+ def _set(self, lineno, line):
+ self.diffs[self.origs[lineno]] += len(line) - len(self.lines[lineno])
+ self.lines[lineno] = line
+
+ def _insert(self, lineno, line):
+ self.diffs[self.origs[lineno]] += len(line) + 1
+ self.origs.insert(lineno, self.origs[lineno])
+ self.lines.insert(lineno, line)
+
+def _logical_start(lines, lineno, check_prev=False):
+ logical_finder = LogicalLineFinder(ArrayLinesAdapter(lines))
+ if check_prev:
+ prev = lineno - 1
+ while prev > 0:
+ start, end = logical_finder.logical_line_in(prev)
+ if end is None or start <= lineno < end:
+ return start
+ if start <= prev:
+ break
+ prev -= 1
+ return logical_finder.logical_line_in(lineno)[0]
+
+
+def _get_line_indents(line):
+ return rope.base.codeanalyze.count_line_indents(line)
diff --git a/.vim/bundle/python-mode/pylibs/rope/contrib/generate.py b/.vim/bundle/python-mode/pylibs/rope/contrib/generate.py
@@ -0,0 +1,355 @@
+import rope.base.evaluate
+from rope.base import change, pyobjects, exceptions, pynames, worder, codeanalyze
+from rope.refactor import sourceutils, importutils, functionutils, suites
+
+
+def create_generate(kind, project, resource, offset):
+ """A factory for creating `Generate` objects
+
+ `kind` can be 'variable', 'function', 'class', 'module' or
+ 'package'.
+
+ """
+ generate = eval('Generate' + kind.title())
+ return generate(project, resource, offset)
+
+
+def create_module(project, name, sourcefolder=None):
+ """Creates a module and returns a `rope.base.resources.File`"""
+ if sourcefolder is None:
+ sourcefolder = project.root
+ packages = name.split('.')
+ parent = sourcefolder
+ for package in packages[:-1]:
+ parent = parent.get_child(package)
+ return parent.create_file(packages[-1] + '.py')
+
+def create_package(project, name, sourcefolder=None):
+ """Creates a package and returns a `rope.base.resources.Folder`"""
+ if sourcefolder is None:
+ sourcefolder = project.root
+ packages = name.split('.')
+ parent = sourcefolder
+ for package in packages[:-1]:
+ parent = parent.get_child(package)
+ made_packages = parent.create_folder(packages[-1])
+ made_packages.create_file('__init__.py')
+ return made_packages
+
+
+class _Generate(object):
+
+ def __init__(self, project, resource, offset):
+ self.project = project
+ self.resource = resource
+ self.info = self._generate_info(project, resource, offset)
+ self.name = self.info.get_name()
+ self._check_exceptional_conditions()
+
+ def _generate_info(self, project, resource, offset):
+ return _GenerationInfo(project.pycore, resource, offset)
+
+ def _check_exceptional_conditions(self):
+ if self.info.element_already_exists():
+ raise exceptions.RefactoringError(
+ 'Element <%s> already exists.' % self.name)
+ if not self.info.primary_is_found():
+ raise exceptions.RefactoringError(
+ 'Cannot determine the scope <%s> should be defined in.' % self.name)
+
+ def get_changes(self):
+ changes = change.ChangeSet('Generate %s <%s>' %
+ (self._get_element_kind(), self.name))
+ indents = self.info.get_scope_indents()
+ blanks = self.info.get_blank_lines()
+ base_definition = sourceutils.fix_indentation(self._get_element(), indents)
+ definition = '\n' * blanks[0] + base_definition + '\n' * blanks[1]
+
+ resource = self.info.get_insertion_resource()
+ start, end = self.info.get_insertion_offsets()
+
+ collector = codeanalyze.ChangeCollector(resource.read())
+ collector.add_change(start, end, definition)
+ changes.add_change(change.ChangeContents(
+ resource, collector.get_changed()))
+ return changes
+
+ def get_location(self):
+ return (self.info.get_insertion_resource(),
+ self.info.get_insertion_lineno())
+
+ def _get_element_kind(self):
+ raise NotImplementedError()
+
+ def _get_element(self):
+ raise NotImplementedError()
+
+
+class GenerateFunction(_Generate):
+
+ def _generate_info(self, project, resource, offset):
+ return _FunctionGenerationInfo(project.pycore, resource, offset)
+
+ def _get_element(self):
+ decorator = ''
+ args = []
+ if self.info.is_static_method():
+ decorator = '@staticmethod\n'
+ if self.info.is_method() or self.info.is_constructor() or \
+ self.info.is_instance():
+ args.append('self')
+ args.extend(self.info.get_passed_args())
+ definition = '%sdef %s(%s):\n pass\n' % (decorator, self.name,
+ ', '.join(args))
+ return definition
+
+ def _get_element_kind(self):
+ return 'Function'
+
+
+class GenerateVariable(_Generate):
+
+ def _get_element(self):
+ return '%s = None\n' % self.name
+
+ def _get_element_kind(self):
+ return 'Variable'
+
+
+class GenerateClass(_Generate):
+
+ def _get_element(self):
+ return 'class %s(object):\n pass\n' % self.name
+
+ def _get_element_kind(self):
+ return 'Class'
+
+
+class GenerateModule(_Generate):
+
+ def get_changes(self):
+ package = self.info.get_package()
+ changes = change.ChangeSet('Generate Module <%s>' % self.name)
+ new_resource = self.project.get_file('%s/%s.py' % (package.path, self.name))
+ if new_resource.exists():
+ raise exceptions.RefactoringError(
+ 'Module <%s> already exists' % new_resource.path)
+ changes.add_change(change.CreateResource(new_resource))
+ changes.add_change(_add_import_to_module(
+ self.project.pycore, self.resource, new_resource))
+ return changes
+
+ def get_location(self):
+ package = self.info.get_package()
+ return (package.get_child('%s.py' % self.name) , 1)
+
+
+class GeneratePackage(_Generate):
+
+ def get_changes(self):
+ package = self.info.get_package()
+ changes = change.ChangeSet('Generate Package <%s>' % self.name)
+ new_resource = self.project.get_folder('%s/%s' % (package.path, self.name))
+ if new_resource.exists():
+ raise exceptions.RefactoringError(
+ 'Package <%s> already exists' % new_resource.path)
+ changes.add_change(change.CreateResource(new_resource))
+ changes.add_change(_add_import_to_module(
+ self.project.pycore, self.resource, new_resource))
+ child = self.project.get_folder(package.path + '/' + self.name)
+ changes.add_change(change.CreateFile(child, '__init__.py'))
+ return changes
+
+ def get_location(self):
+ package = self.info.get_package()
+ child = package.get_child(self.name)
+ return (child.get_child('__init__.py') , 1)
+
+
+def _add_import_to_module(pycore, resource, imported):
+ pymodule = pycore.resource_to_pyobject(resource)
+ import_tools = importutils.ImportTools(pycore)
+ module_imports = import_tools.module_imports(pymodule)
+ module_name = pycore.modname(imported)
+ new_import = importutils.NormalImport(((module_name, None), ))
+ module_imports.add_import(new_import)
+ return change.ChangeContents(resource, module_imports.get_changed_source())
+
+
+class _GenerationInfo(object):
+
+ def __init__(self, pycore, resource, offset):
+ self.pycore = pycore
+ self.resource = resource
+ self.offset = offset
+ self.source_pymodule = self.pycore.resource_to_pyobject(resource)
+ finder = rope.base.evaluate.ScopeNameFinder(self.source_pymodule)
+ self.primary, self.pyname = finder.get_primary_and_pyname_at(offset)
+ self._init_fields()
+
+ def _init_fields(self):
+ self.source_scope = self._get_source_scope()
+ self.goal_scope = self._get_goal_scope()
+ self.goal_pymodule = self._get_goal_module(self.goal_scope)
+
+ def _get_goal_scope(self):
+ if self.primary is None:
+ return self._get_source_scope()
+ pyobject = self.primary.get_object()
+ if isinstance(pyobject, pyobjects.PyDefinedObject):
+ return pyobject.get_scope()
+ elif isinstance(pyobject.get_type(), pyobjects.PyClass):
+ return pyobject.get_type().get_scope()
+
+ def _get_goal_module(self, scope):
+ if scope is None:
+ return
+ while scope.parent is not None:
+ scope = scope.parent
+ return scope.pyobject
+
+ def _get_source_scope(self):
+ module_scope = self.source_pymodule.get_scope()
+ lineno = self.source_pymodule.lines.get_line_number(self.offset)
+ return module_scope.get_inner_scope_for_line(lineno)
+
+ def get_insertion_lineno(self):
+ lines = self.goal_pymodule.lines
+ if self.goal_scope == self.source_scope:
+ line_finder = self.goal_pymodule.logical_lines
+ lineno = lines.get_line_number(self.offset)
+ lineno = line_finder.logical_line_in(lineno)[0]
+ root = suites.ast_suite_tree(self.goal_scope.pyobject.get_ast())
+ suite = root.find_suite(lineno)
+ indents = sourceutils.get_indents(lines, lineno)
+ while self.get_scope_indents() < indents:
+ lineno = suite.get_start()
+ indents = sourceutils.get_indents(lines, lineno)
+ suite = suite.parent
+ return lineno
+ else:
+ return min(self.goal_scope.get_end() + 1, lines.length())
+
+ def get_insertion_resource(self):
+ return self.goal_pymodule.get_resource()
+
+ def get_insertion_offsets(self):
+ if self.goal_scope.get_kind() == 'Class':
+ start, end = sourceutils.get_body_region(self.goal_scope.pyobject)
+ if self.goal_pymodule.source_code[start:end].strip() == 'pass':
+ return start, end
+ lines = self.goal_pymodule.lines
+ start = lines.get_line_start(self.get_insertion_lineno())
+ return (start, start)
+
+ def get_scope_indents(self):
+ if self.goal_scope.get_kind() == 'Module':
+ return 0
+ return sourceutils.get_indents(self.goal_pymodule.lines,
+ self.goal_scope.get_start()) + 4
+
+ def get_blank_lines(self):
+ if self.goal_scope.get_kind() == 'Module':
+ base_blanks = 2
+ if self.goal_pymodule.source_code.strip() == '':
+ base_blanks = 0
+ if self.goal_scope.get_kind() == 'Class':
+ base_blanks = 1
+ if self.goal_scope.get_kind() == 'Function':
+ base_blanks = 0
+ if self.goal_scope == self.source_scope:
+ return (0, base_blanks)
+ return (base_blanks, 0)
+
+ def get_package(self):
+ primary = self.primary
+ if self.primary is None:
+ return self.pycore.get_source_folders()[0]
+ if isinstance(primary.get_object(), pyobjects.PyPackage):
+ return primary.get_object().get_resource()
+ raise exceptions.RefactoringError(
+ 'A module/package can be only created in a package.')
+
+ def primary_is_found(self):
+ return self.goal_scope is not None
+
+ def element_already_exists(self):
+ if self.pyname is None or isinstance(self.pyname, pynames.UnboundName):
+ return False
+ return self.get_name() in self.goal_scope.get_defined_names()
+
+ def get_name(self):
+ return worder.get_name_at(self.resource, self.offset)
+
+
+class _FunctionGenerationInfo(_GenerationInfo):
+
+ def _get_goal_scope(self):
+ if self.is_constructor():
+ return self.pyname.get_object().get_scope()
+ if self.is_instance():
+ return self.pyname.get_object().get_type().get_scope()
+ if self.primary is None:
+ return self._get_source_scope()
+ pyobject = self.primary.get_object()
+ if isinstance(pyobject, pyobjects.PyDefinedObject):
+ return pyobject.get_scope()
+ elif isinstance(pyobject.get_type(), pyobjects.PyClass):
+ return pyobject.get_type().get_scope()
+
+ def element_already_exists(self):
+ if self.pyname is None or isinstance(self.pyname, pynames.UnboundName):
+ return False
+ return self.get_name() in self.goal_scope.get_defined_names()
+
+ def is_static_method(self):
+ return self.primary is not None and \
+ isinstance(self.primary.get_object(), pyobjects.PyClass)
+
+ def is_method(self):
+ return self.primary is not None and \
+ isinstance(self.primary.get_object().get_type(), pyobjects.PyClass)
+
+ def is_constructor(self):
+ return self.pyname is not None and \
+ isinstance(self.pyname.get_object(), pyobjects.PyClass)
+
+ def is_instance(self):
+ if self.pyname is None:
+ return False
+ pyobject = self.pyname.get_object()
+ return isinstance(pyobject.get_type(), pyobjects.PyClass)
+
+ def get_name(self):
+ if self.is_constructor():
+ return '__init__'
+ if self.is_instance():
+ return '__call__'
+ return worder.get_name_at(self.resource, self.offset)
+
+ def get_passed_args(self):
+ result = []
+ source = self.source_pymodule.source_code
+ finder = worder.Worder(source)
+ if finder.is_a_function_being_called(self.offset):
+ start, end = finder.get_primary_range(self.offset)
+ parens_start, parens_end = finder.get_word_parens_range(end - 1)
+ call = source[start:parens_end]
+ parser = functionutils._FunctionParser(call, False)
+ args, keywords = parser.get_parameters()
+ for arg in args:
+ if self._is_id(arg):
+ result.append(arg)
+ else:
+ result.append('arg%d' % len(result))
+ for name, value in keywords:
+ result.append(name)
+ return result
+
+ def _is_id(self, arg):
+ def id_or_underline(c):
+ return c.isalpha() or c == '_'
+ for c in arg:
+ if not id_or_underline(c) and not c.isdigit():
+ return False
+ return id_or_underline(arg[0])
diff --git a/.vim/bundle/python-mode/pylibs/rope/refactor/__init__.py b/.vim/bundle/python-mode/pylibs/rope/refactor/__init__.py
@@ -0,0 +1,55 @@
+"""rope refactor package
+
+This package contains modules that perform python refactorings.
+Refactoring classes perform refactorings in 4 steps:
+
+1. Collect some data for performing the refactoring and use them
+ to construct a refactoring class. Like::
+
+ renamer = Rename(project, resource, offset)
+
+2. Some refactorings give you useful information about the
+ refactoring after their construction. Like::
+
+ print(renamer.get_old_name())
+
+3. Give the refactoring class more information about how to
+ perform the refactoring and get the changes this refactoring is
+ going to make. This is done by calling `get_changes` method of the
+ refactoring class. Like::
+
+ changes = renamer.get_changes(new_name)
+
+4. You can commit the changes. Like::
+
+ project.do(changes)
+
+These steps are like the steps IDEs usually do for performing a
+refactoring. These are the things an IDE does in each step:
+
+1. Construct a refactoring object by giving it information like
+ resource, offset and ... . Some of the refactoring problems (like
+ performing rename refactoring on language keywords) can be reported
+ here.
+2. Print some information about the refactoring and ask the user
+ about the information that are necessary for completing the
+ refactoring (like new name).
+3. Call the `get_changes` by passing it information asked from
+ the user (if necessary) and get and preview the changes returned by
+ it.
+4. perform the refactoring.
+
+From ``0.5m5`` release the `get_changes()` method of some time-
+consuming refactorings take an optional `rope.base.taskhandle.
+TaskHandle` parameter. You can use this object for stopping or
+monitoring the progress of refactorings.
+
+"""
+from rope.refactor.importutils import ImportOrganizer
+from rope.refactor.topackage import ModuleToPackage
+
+
+__all__ = ['rename', 'move', 'inline', 'extract', 'restructure', 'topackage',
+ 'importutils', 'usefunction', 'change_signature',
+ 'encapsulate_field', 'introduce_factory', 'introduce_parameter',
+ 'localtofield', 'method_object', 'multiproject']
diff --git a/.vim/bundle/python-mode/pylibs/rope/refactor/change_signature.py b/.vim/bundle/python-mode/pylibs/rope/refactor/change_signature.py
@@ -0,0 +1,342 @@
+import copy
+
+import rope.base.exceptions
+from rope.base import pyobjects, taskhandle, evaluate, worder, codeanalyze, utils
+from rope.base.change import ChangeContents, ChangeSet
+from rope.refactor import occurrences, functionutils
+
+
+class ChangeSignature(object):
+
+ def __init__(self, project, resource, offset):
+ self.pycore = project.pycore
+ self.resource = resource
+ self.offset = offset
+ self._set_name_and_pyname()
+ if self.pyname is None or self.pyname.get_object() is None or \
+ not isinstance(self.pyname.get_object(), pyobjects.PyFunction):
+ raise rope.base.exceptions.RefactoringError(
+ 'Change method signature should be performed on functions')
+
+ def _set_name_and_pyname(self):
+ self.name = worder.get_name_at(self.resource, self.offset)
+ this_pymodule = self.pycore.resource_to_pyobject(self.resource)
+ self.primary, self.pyname = evaluate.eval_location2(
+ this_pymodule, self.offset)
+ if self.pyname is None:
+ return
+ pyobject = self.pyname.get_object()
+ if isinstance(pyobject, pyobjects.PyClass) and \
+ '__init__' in pyobject:
+ self.pyname = pyobject['__init__']
+ self.name = '__init__'
+ pyobject = self.pyname.get_object()
+ self.others = None
+ if self.name == '__init__' and \
+ isinstance(pyobject, pyobjects.PyFunction) and \
+ isinstance(pyobject.parent, pyobjects.PyClass):
+ pyclass = pyobject.parent
+ self.others = (pyclass.get_name(),
+ pyclass.parent[pyclass.get_name()])
+
+ def _change_calls(self, call_changer, in_hierarchy=None, resources=None,
+ handle=taskhandle.NullTaskHandle()):
+ if resources is None:
+ resources = self.pycore.get_python_files()
+ changes = ChangeSet('Changing signature of <%s>' % self.name)
+ job_set = handle.create_jobset('Collecting Changes', len(resources))
+ finder = occurrences.create_finder(
+ self.pycore, self.name, self.pyname, instance=self.primary,
+ in_hierarchy=in_hierarchy and self.is_method())
+ if self.others:
+ name, pyname = self.others
+ constructor_finder = occurrences.create_finder(
+ self.pycore, name, pyname, only_calls=True)
+ finder = _MultipleFinders([finder, constructor_finder])
+ for file in resources:
+ job_set.started_job(file.path)
+ change_calls = _ChangeCallsInModule(
+ self.pycore, finder, file, call_changer)
+ changed_file = change_calls.get_changed_module()
+ if changed_file is not None:
+ changes.add_change(ChangeContents(file, changed_file))
+ job_set.finished_job()
+ return changes
+
+ def get_args(self):
+ """Get function arguments.
+
+ Return a list of ``(name, default)`` tuples for all but star
+ and double star arguments. For arguments that don't have a
+ default, `None` will be used.
+ """
+ return self._definfo().args_with_defaults
+
+ def is_method(self):
+ pyfunction = self.pyname.get_object()
+ return isinstance(pyfunction.parent, pyobjects.PyClass)
+
+ @utils.deprecated('Use `ChangeSignature.get_args()` instead')
+ def get_definition_info(self):
+ return self._definfo()
+
+ def _definfo(self):
+ return functionutils.DefinitionInfo.read(self.pyname.get_object())
+
+ @utils.deprecated()
+ def normalize(self):
+ changer = _FunctionChangers(
+ self.pyname.get_object(), self.get_definition_info(),
+ [ArgumentNormalizer()])
+ return self._change_calls(changer)
+
+ @utils.deprecated()
+ def remove(self, index):
+ changer = _FunctionChangers(
+ self.pyname.get_object(), self.get_definition_info(),
+ [ArgumentRemover(index)])
+ return self._change_calls(changer)
+
+ @utils.deprecated()
+ def add(self, index, name, default=None, value=None):
+ changer = _FunctionChangers(
+ self.pyname.get_object(), self.get_definition_info(),
+ [ArgumentAdder(index, name, default, value)])
+ return self._change_calls(changer)
+
+ @utils.deprecated()
+ def inline_default(self, index):
+ changer = _FunctionChangers(
+ self.pyname.get_object(), self.get_definition_info(),
+ [ArgumentDefaultInliner(index)])
+ return self._change_calls(changer)
+
+ @utils.deprecated()
+ def reorder(self, new_ordering):
+ changer = _FunctionChangers(
+ self.pyname.get_object(), self.get_definition_info(),
+ [ArgumentReorderer(new_ordering)])
+ return self._change_calls(changer)
+
+ def get_changes(self, changers, in_hierarchy=False, resources=None,
+ task_handle=taskhandle.NullTaskHandle()):
+ """Get changes caused by this refactoring
+
+ `changers` is a list of `_ArgumentChanger`\s. If `in_hierarchy`
+ is `True` the changers are applyed to all matching methods in
+ the class hierarchy.
+ `resources` can be a list of `rope.base.resource.File`\s that
+ should be searched for occurrences; if `None` all python files
+ in the project are searched.
+
+ """
+ function_changer = _FunctionChangers(self.pyname.get_object(),
+ self._definfo(), changers)
+ return self._change_calls(function_changer, in_hierarchy,
+ resources, task_handle)
+
+
+class _FunctionChangers(object):
+
+ def __init__(self, pyfunction, definition_info, changers=None):
+ self.pyfunction = pyfunction
+ self.definition_info = definition_info
+ self.changers = changers
+ self.changed_definition_infos = self._get_changed_definition_infos()
+
+ def _get_changed_definition_infos(self):
+ result = []
+ definition_info = self.definition_info
+ result.append(definition_info)
+ for changer in self.changers:
+ definition_info = copy.deepcopy(definition_info)
+ changer.change_definition_info(definition_info)
+ result.append(definition_info)
+ return result
+
+ def change_definition(self, call):
+ return self.changed_definition_infos[-1].to_string()
+
+ def change_call(self, primary, pyname, call):
+ call_info = functionutils.CallInfo.read(
+ primary, pyname, self.definition_info, call)
+ mapping = functionutils.ArgumentMapping(self.definition_info, call_info)
+
+ for definition_info, changer in zip(self.changed_definition_infos, self.changers):
+ changer.change_argument_mapping(definition_info, mapping)
+
+ return mapping.to_call_info(self.changed_definition_infos[-1]).to_string()
+
+
+class _ArgumentChanger(object):
+
+ def change_definition_info(self, definition_info):
+ pass
+
+ def change_argument_mapping(self, definition_info, argument_mapping):
+ pass
+
+
+class ArgumentNormalizer(_ArgumentChanger):
+ pass
+
+
+class ArgumentRemover(_ArgumentChanger):
+
+ def __init__(self, index):
+ self.index = index
+
+ def change_definition_info(self, call_info):
+ if self.index < len(call_info.args_with_defaults):
+ del call_info.args_with_defaults[self.index]
+ elif self.index == len(call_info.args_with_defaults) and \
+ call_info.args_arg is not None:
+ call_info.args_arg = None
+ elif (self.index == len(call_info.args_with_defaults) and
+ call_info.args_arg is None and call_info.keywords_arg is not None) or \
+ (self.index == len(call_info.args_with_defaults) + 1 and
+ call_info.args_arg is not None and call_info.keywords_arg is not None):
+ call_info.keywords_arg = None
+
+ def change_argument_mapping(self, definition_info, mapping):
+ if self.index < len(definition_info.args_with_defaults):
+ name = definition_info.args_with_defaults[0]
+ if name in mapping.param_dict:
+ del mapping.param_dict[name]
+
+
+class ArgumentAdder(_ArgumentChanger):
+
+ def __init__(self, index, name, default=None, value=None):
+ self.index = index
+ self.name = name
+ self.default = default
+ self.value = value
+
+ def change_definition_info(self, definition_info):
+ for pair in definition_info.args_with_defaults:
+ if pair[0] == self.name:
+ raise rope.base.exceptions.RefactoringError(
+ 'Adding duplicate parameter: <%s>.' % self.name)
+ definition_info.args_with_defaults.insert(self.index,
+ (self.name, self.default))
+
+ def change_argument_mapping(self, definition_info, mapping):
+ if self.value is not None:
+ mapping.param_dict[self.name] = self.value
+
+
+class ArgumentDefaultInliner(_ArgumentChanger):
+
+ def __init__(self, index):
+ self.index = index
+ self.remove = False
+
+ def change_definition_info(self, definition_info):
+ if self.remove:
+ definition_info.args_with_defaults[self.index] = \
+ (definition_info.args_with_defaults[self.index][0], None)
+
+ def change_argument_mapping(self, definition_info, mapping):
+ default = definition_info.args_with_defaults[self.index][1]
+ name = definition_info.args_with_defaults[self.index][0]
+ if default is not None and name not in mapping.param_dict:
+ mapping.param_dict[name] = default
+
+
+class ArgumentReorderer(_ArgumentChanger):
+
+ def __init__(self, new_order, autodef=None):
+ """Construct an `ArgumentReorderer`
+
+ Note that the `new_order` is a list containing the new
+ position of parameters; not the position each parameter
+ is going to be moved to. (changed in ``0.5m4``)
+
+ For example changing ``f(a, b, c)`` to ``f(c, a, b)``
+ requires passing ``[2, 0, 1]`` and *not* ``[1, 2, 0]``.
+
+ The `autodef` (automatic default) argument, forces rope to use
+ it as a default if a default is needed after the change. That
+ happens when an argument without default is moved after
+ another that has a default value. Note that `autodef` should
+ be a string or `None`; the latter disables adding automatic
+ default.
+
+ """
+ self.new_order = new_order
+ self.autodef = autodef
+
+ def change_definition_info(self, definition_info):
+ new_args = list(definition_info.args_with_defaults)
+ for new_index, index in enumerate(self.new_order):
+ new_args[new_index] = definition_info.args_with_defaults[index]
+ seen_default = False
+ for index, (arg, default) in enumerate(list(new_args)):
+ if default is not None:
+ seen_default = True
+ if seen_default and default is None and self.autodef is not None:
+ new_args[index] = (arg, self.autodef)
+ definition_info.args_with_defaults = new_args
+
+
+class _ChangeCallsInModule(object):
+
+ def __init__(self, pycore, occurrence_finder, resource, call_changer):
+ self.pycore = pycore
+ self.occurrence_finder = occurrence_finder
+ self.resource = resource
+ self.call_changer = call_changer
+
+ def get_changed_module(self):
+ word_finder = worder.Worder(self.source)
+ change_collector = codeanalyze.ChangeCollector(self.source)
+ for occurrence in self.occurrence_finder.find_occurrences(self.resource):
+ if not occurrence.is_called() and not occurrence.is_defined():
+ continue
+ start, end = occurrence.get_primary_range()
+ begin_parens, end_parens = word_finder.get_word_parens_range(end - 1)
+ if occurrence.is_called():
+ primary, pyname = occurrence.get_primary_and_pyname()
+ changed_call = self.call_changer.change_call(
+ primary, pyname, self.source[start:end_parens])
+ else:
+ changed_call = self.call_changer.change_definition(
+ self.source[start:end_parens])
+ if changed_call is not None:
+ change_collector.add_change(start, end_parens, changed_call)
+ return change_collector.get_changed()
+
+ @property
+ @utils.saveit
+ def pymodule(self):
+ return self.pycore.resource_to_pyobject(self.resource)
+
+ @property
+ @utils.saveit
+ def source(self):
+ if self.resource is not None:
+ return self.resource.read()
+ else:
+ return self.pymodule.source_code
+
+ @property
+ @utils.saveit
+ def lines(self):
+ return self.pymodule.lines
+
+
+class _MultipleFinders(object):
+
+ def __init__(self, finders):
+ self.finders = finders
+
+ def find_occurrences(self, resource=None, pymodule=None):
+ all_occurrences = []
+ for finder in self.finders:
+ all_occurrences.extend(finder.find_occurrences(resource, pymodule))
+ all_occurrences.sort(self._cmp_occurrences)
+ return all_occurrences
+
+ def _cmp_occurrences(self, o1, o2):
+ return cmp(o1.get_primary_range(), o2.get_primary_range())
diff --git a/.vim/bundle/python-mode/pylibs/rope/refactor/encapsulate_field.py b/.vim/bundle/python-mode/pylibs/rope/refactor/encapsulate_field.py
@@ -0,0 +1,202 @@
+from rope.base import pynames, taskhandle, evaluate, exceptions, worder, utils
+from rope.base.change import ChangeSet, ChangeContents
+from rope.refactor import sourceutils, occurrences
+
+
+class EncapsulateField(object):
+
+ def __init__(self, project, resource, offset):
+ self.pycore = project.pycore
+ self.name = worder.get_name_at(resource, offset)
+ this_pymodule = self.pycore.resource_to_pyobject(resource)
+ self.pyname = evaluate.eval_location(this_pymodule, offset)
+ if not self._is_an_attribute(self.pyname):
+ raise exceptions.RefactoringError(
+ 'Encapsulate field should be performed on class attributes.')
+ self.resource = self.pyname.get_definition_location()[0].get_resource()
+
+ def get_changes(self, getter=None, setter=None, resources=None,
+ task_handle=taskhandle.NullTaskHandle()):
+ """Get the changes this refactoring makes
+
+ If `getter` is not `None`, that will be the name of the
+ getter, otherwise ``get_${field_name}`` will be used. The
+ same is true for `setter` and if it is None set_${field_name} is
+ used.
+
+ `resources` can be a list of `rope.base.resource.File`\s that
+ the refactoring should be applied on; if `None` all python
+ files in the project are searched.
+
+ """
+ if resources is None:
+ resources = self.pycore.get_python_files()
+ changes = ChangeSet('Encapsulate field <%s>' % self.name)
+ job_set = task_handle.create_jobset('Collecting Changes',
+ len(resources))
+ if getter is None:
+ getter = 'get_' + self.name
+ if setter is None:
+ setter = 'set_' + self.name
+ renamer = GetterSetterRenameInModule(
+ self.pycore, self.name, self.pyname, getter, setter)
+ for file in resources:
+ job_set.started_job(file.path)
+ if file == self.resource:
+ result = self._change_holding_module(changes, renamer,
+ getter, setter)
+ changes.add_change(ChangeContents(self.resource, result))
+ else:
+ result = renamer.get_changed_module(file)
+ if result is not None:
+ changes.add_change(ChangeContents(file, result))
+ job_set.finished_job()
+ return changes
+
+ def get_field_name(self):
+ """Get the name of the field to be encapsulated"""
+ return self.name
+
+ def _is_an_attribute(self, pyname):
+ if pyname is not None and isinstance(pyname, pynames.AssignedName):
+ pymodule, lineno = self.pyname.get_definition_location()
+ scope = pymodule.get_scope().\
+ get_inner_scope_for_line(lineno)
+ if scope.get_kind() == 'Class':
+ return pyname in scope.get_names().values()
+ parent = scope.parent
+ if parent is not None and parent.get_kind() == 'Class':
+ return pyname in parent.get_names().values()
+ return False
+
+ def _get_defining_class_scope(self):
+ defining_scope = self._get_defining_scope()
+ if defining_scope.get_kind() == 'Function':
+ defining_scope = defining_scope.parent
+ return defining_scope
+
+ def _get_defining_scope(self):
+ pymodule, line = self.pyname.get_definition_location()
+ return pymodule.get_scope().get_inner_scope_for_line(line)
+
+ def _change_holding_module(self, changes, renamer, getter, setter):
+ pymodule = self.pycore.resource_to_pyobject(self.resource)
+ class_scope = self._get_defining_class_scope()
+ defining_object = self._get_defining_scope().pyobject
+ start, end = sourceutils.get_body_region(defining_object)
+
+ new_source = renamer.get_changed_module(pymodule=pymodule,
+ skip_start=start, skip_end=end)
+ if new_source is not None:
+ pymodule = self.pycore.get_string_module(new_source, self.resource)
+ class_scope = pymodule.get_scope().\
+ get_inner_scope_for_line(class_scope.get_start())
+ indents = sourceutils.get_indent(self.pycore) * ' '
+ getter = 'def %s(self):\n%sreturn self.%s' % \
+ (getter, indents, self.name)
+ setter = 'def %s(self, value):\n%sself.%s = value' % \
+ (setter, indents, self.name)
+ new_source = sourceutils.add_methods(pymodule, class_scope,
+ [getter, setter])
+ return new_source
+
+
+class GetterSetterRenameInModule(object):
+
+ def __init__(self, pycore, name, pyname, getter, setter):
+ self.pycore = pycore
+ self.name = name
+ self.finder = occurrences.create_finder(pycore, name, pyname)
+ self.getter = getter
+ self.setter = setter
+
+ def get_changed_module(self, resource=None, pymodule=None,
+ skip_start=0, skip_end=0):
+ change_finder = _FindChangesForModule(self, resource, pymodule,
+ skip_start, skip_end)
+ return change_finder.get_changed_module()
+
+
+class _FindChangesForModule(object):
+
+ def __init__(self, finder, resource, pymodule, skip_start, skip_end):
+ self.pycore = finder.pycore
+ self.finder = finder.finder
+ self.getter = finder.getter
+ self.setter = finder.setter
+ self.resource = resource
+ self.pymodule = pymodule
+ self.last_modified = 0
+ self.last_set = None
+ self.set_index = None
+ self.skip_start = skip_start
+ self.skip_end = skip_end
+
+ def get_changed_module(self):
+ result = []
+ for occurrence in self.finder.find_occurrences(self.resource,
+ self.pymodule):
+ start, end = occurrence.get_word_range()
+ if self.skip_start <= start < self.skip_end:
+ continue
+ self._manage_writes(start, result)
+ result.append(self.source[self.last_modified:start])
+ if self._is_assigned_in_a_tuple_assignment(occurrence):
+ raise exceptions.RefactoringError(
+ 'Cannot handle tuple assignments in encapsulate field.')
+ if occurrence.is_written():
+ assignment_type = self.worder.get_assignment_type(start)
+ if assignment_type == '=':
+ result.append(self.setter + '(')
+ else:
+ var_name = self.source[occurrence.get_primary_range()[0]:
+ start] + self.getter + '()'
+ result.append(self.setter + '(' + var_name
+ + ' %s ' % assignment_type[:-1])
+ current_line = self.lines.get_line_number(start)
+ start_line, end_line = self.pymodule.logical_lines.\
+ logical_line_in(current_line)
+ self.last_set = self.lines.get_line_end(end_line)
+ end = self.source.index('=', end) + 1
+ self.set_index = len(result)
+ else:
+ result.append(self.getter + '()')
+ self.last_modified = end
+ if self.last_modified != 0:
+ self._manage_writes(len(self.source), result)
+ result.append(self.source[self.last_modified:])
+ return ''.join(result)
+ return None
+
+ def _manage_writes(self, offset, result):
+ if self.last_set is not None and self.last_set <= offset:
+ result.append(self.source[self.last_modified:self.last_set])
+ set_value = ''.join(result[self.set_index:]).strip()
+ del result[self.set_index:]
+ result.append(set_value + ')')
+ self.last_modified = self.last_set
+ self.last_set = None
+
+ def _is_assigned_in_a_tuple_assignment(self, occurance):
+ offset = occurance.get_word_range()[0]
+ return self.worder.is_assigned_in_a_tuple_assignment(offset)
+
+ @property
+ @utils.saveit
+ def source(self):
+ if self.resource is not None:
+ return self.resource.read()
+ else:
+ return self.pymodule.source_code
+
+ @property
+ @utils.saveit
+ def lines(self):
+ if self.pymodule is None:
+ self.pymodule = self.pycore.resource_to_pyobject(self.resource)
+ return self.pymodule.lines
+
+ @property
+ @utils.saveit
+ def worder(self):
+ return worder.Worder(self.source)
diff --git a/.vim/bundle/python-mode/pylibs/rope/refactor/extract.py b/.vim/bundle/python-mode/pylibs/rope/refactor/extract.py
@@ -0,0 +1,789 @@
+import re
+
+from rope.base import ast, codeanalyze
+from rope.base.change import ChangeSet, ChangeContents
+from rope.base.exceptions import RefactoringError
+from rope.refactor import (sourceutils, similarfinder,
+ patchedast, suites, usefunction)
+
+
+# Extract refactoring has lots of special cases. I tried to split it
+# to smaller parts to make it more manageable:
+#
+# _ExtractInfo: holds information about the refactoring; it is passed
+# to the parts that need to have information about the refactoring
+#
+# _ExtractCollector: merely saves all of the information necessary for
+# performing the refactoring.
+#
+# _DefinitionLocationFinder: finds where to insert the definition.
+#
+# _ExceptionalConditionChecker: checks for exceptional conditions in
+# which the refactoring cannot be applied.
+#
+# _ExtractMethodParts: generates the pieces of code (like definition)
+# needed for performing extract method.
+#
+# _ExtractVariableParts: like _ExtractMethodParts for variables.
+#
+# _ExtractPerformer: Uses above classes to collect refactoring
+# changes.
+#
+# There are a few more helper functions and classes used by above
+# classes.
+class _ExtractRefactoring(object):
+
+ def __init__(self, project, resource, start_offset, end_offset,
+ variable=False):
+ self.project = project
+ self.pycore = project.pycore
+ self.resource = resource
+ self.start_offset = self._fix_start(resource.read(), start_offset)
+ self.end_offset = self._fix_end(resource.read(), end_offset)
+
+ def _fix_start(self, source, offset):
+ while offset < len(source) and source[offset].isspace():
+ offset += 1
+ return offset
+
+ def _fix_end(self, source, offset):
+ while offset > 0 and source[offset - 1].isspace():
+ offset -= 1
+ return offset
+
+ def get_changes(self, extracted_name, similar=False, global_=False):
+ """Get the changes this refactoring makes
+
+ :parameters:
+ - `similar`: if `True`, similar expressions/statements are also
+ replaced.
+ - `global_`: if `True`, the extracted method/variable will
+ be global.
+
+ """
+ info = _ExtractInfo(
+ self.project, self.resource, self.start_offset, self.end_offset,
+ extracted_name, variable=self.kind == 'variable',
+ similar=similar, make_global=global_)
+ new_contents = _ExtractPerformer(info).extract()
+ changes = ChangeSet('Extract %s <%s>' % (self.kind,
+ extracted_name))
+ changes.add_change(ChangeContents(self.resource, new_contents))
+ return changes
+
+
+class ExtractMethod(_ExtractRefactoring):
+
+ def __init__(self, *args, **kwds):
+ super(ExtractMethod, self).__init__(*args, **kwds)
+
+ kind = 'method'
+
+
+class ExtractVariable(_ExtractRefactoring):
+
+ def __init__(self, *args, **kwds):
+ kwds = dict(kwds)
+ kwds['variable'] = True
+ super(ExtractVariable, self).__init__(*args, **kwds)
+
+ kind = 'variable'
+
+
+class _ExtractInfo(object):
+ """Holds information about the extract to be performed"""
+
+ def __init__(self, project, resource, start, end, new_name,
+ variable, similar, make_global):
+ self.pycore = project.pycore
+ self.resource = resource
+ self.pymodule = self.pycore.resource_to_pyobject(resource)
+ self.global_scope = self.pymodule.get_scope()
+ self.source = self.pymodule.source_code
+ self.lines = self.pymodule.lines
+ self.new_name = new_name
+ self.variable = variable
+ self.similar = similar
+ self._init_parts(start, end)
+ self._init_scope()
+ self.make_global = make_global
+
+ def _init_parts(self, start, end):
+ self.region = (self._choose_closest_line_end(start),
+ self._choose_closest_line_end(end, end=True))
+
+ start = self.logical_lines.logical_line_in(
+ self.lines.get_line_number(self.region[0]))[0]
+ end = self.logical_lines.logical_line_in(
+ self.lines.get_line_number(self.region[1]))[1]
+ self.region_lines = (start, end)
+
+ self.lines_region = (self.lines.get_line_start(self.region_lines[0]),
+ self.lines.get_line_end(self.region_lines[1]))
+
+ @property
+ def logical_lines(self):
+ return self.pymodule.logical_lines
+
+ def _init_scope(self):
+ start_line = self.region_lines[0]
+ scope = self.global_scope.get_inner_scope_for_line(start_line)
+ if scope.get_kind() != 'Module' and scope.get_start() == start_line:
+ scope = scope.parent
+ self.scope = scope
+ self.scope_region = self._get_scope_region(self.scope)
+
+ def _get_scope_region(self, scope):
+ return (self.lines.get_line_start(scope.get_start()),
+ self.lines.get_line_end(scope.get_end()) + 1)
+
+ def _choose_closest_line_end(self, offset, end=False):
+ lineno = self.lines.get_line_number(offset)
+ line_start = self.lines.get_line_start(lineno)
+ line_end = self.lines.get_line_end(lineno)
+ if self.source[line_start:offset].strip() == '':
+ if end:
+ return line_start - 1
+ else:
+ return line_start
+ elif self.source[offset:line_end].strip() == '':
+ return min(line_end, len(self.source))
+ return offset
+
+ @property
+ def one_line(self):
+ return self.region != self.lines_region and \
+ (self.logical_lines.logical_line_in(self.region_lines[0]) ==
+ self.logical_lines.logical_line_in(self.region_lines[1]))
+
+ @property
+ def global_(self):
+ return self.scope.parent is None
+
+ @property
+ def method(self):
+ return self.scope.parent is not None and \
+ self.scope.parent.get_kind() == 'Class'
+
+ @property
+ def indents(self):
+ return sourceutils.get_indents(self.pymodule.lines,
+ self.region_lines[0])
+
+ @property
+ def scope_indents(self):
+ if self.global_:
+ return 0
+ return sourceutils.get_indents(self.pymodule.lines,
+ self.scope.get_start())
+
+ @property
+ def extracted(self):
+ return self.source[self.region[0]:self.region[1]]
+
+ _returned = None
+ @property
+ def returned(self):
+ """Does the extracted piece contain return statement"""
+ if self._returned is None:
+ node = _parse_text(self.extracted)
+ self._returned = usefunction._returns_last(node)
+ return self._returned
+
+
+class _ExtractCollector(object):
+ """Collects information needed for performing the extract"""
+
+ def __init__(self, info):
+ self.definition = None
+ self.body_pattern = None
+ self.checks = {}
+ self.replacement_pattern = None
+ self.matches = None
+ self.replacements = None
+ self.definition_location = None
+
+
+class _ExtractPerformer(object):
+
+ def __init__(self, info):
+ self.info = info
+ _ExceptionalConditionChecker()(self.info)
+
+ def extract(self):
+ extract_info = self._collect_info()
+ content = codeanalyze.ChangeCollector(self.info.source)
+ definition = extract_info.definition
+ lineno, indents = extract_info.definition_location
+ offset = self.info.lines.get_line_start(lineno)
+ indented = sourceutils.fix_indentation(definition, indents)
+ content.add_change(offset, offset, indented)
+ self._replace_occurrences(content, extract_info)
+ return content.get_changed()
+
+ def _replace_occurrences(self, content, extract_info):
+ for match in extract_info.matches:
+ replacement = similarfinder.CodeTemplate(
+ extract_info.replacement_pattern)
+ mapping = {}
+ for name in replacement.get_names():
+ node = match.get_ast(name)
+ if node:
+ start, end = patchedast.node_region(match.get_ast(name))
+ mapping[name] = self.info.source[start:end]
+ else:
+ mapping[name] = name
+ region = match.get_region()
+ content.add_change(region[0], region[1],
+ replacement.substitute(mapping))
+
+ def _collect_info(self):
+ extract_collector = _ExtractCollector(self.info)
+ self._find_definition(extract_collector)
+ self._find_matches(extract_collector)
+ self._find_definition_location(extract_collector)
+ return extract_collector
+
+ def _find_matches(self, collector):
+ regions = self._where_to_search()
+ finder = similarfinder.SimilarFinder(self.info.pymodule)
+ matches = []
+ for start, end in regions:
+ matches.extend((finder.get_matches(collector.body_pattern,
+ collector.checks, start, end)))
+ collector.matches = matches
+
+ def _where_to_search(self):
+ if self.info.similar:
+ if self.info.make_global or self.info.global_:
+ return [(0, len(self.info.pymodule.source_code))]
+ if self.info.method and not self.info.variable:
+ class_scope = self.info.scope.parent
+ regions = []
+ method_kind = _get_function_kind(self.info.scope)
+ for scope in class_scope.get_scopes():
+ if method_kind == 'method' and \
+ _get_function_kind(scope) != 'method':
+ continue
+ start = self.info.lines.get_line_start(scope.get_start())
+ end = self.info.lines.get_line_end(scope.get_end())
+ regions.append((start, end))
+ return regions
+ else:
+ if self.info.variable:
+ return [self.info.scope_region]
+ else:
+ return [self.info._get_scope_region(self.info.scope.parent)]
+ else:
+ return [self.info.region]
+
+ def _find_definition_location(self, collector):
+ matched_lines = []
+ for match in collector.matches:
+ start = self.info.lines.get_line_number(match.get_region()[0])
+ start_line = self.info.logical_lines.logical_line_in(start)[0]
+ matched_lines.append(start_line)
+ location_finder = _DefinitionLocationFinder(self.info, matched_lines)
+ collector.definition_location = (location_finder.find_lineno(),
+ location_finder.find_indents())
+
+ def _find_definition(self, collector):
+ if self.info.variable:
+ parts = _ExtractVariableParts(self.info)
+ else:
+ parts = _ExtractMethodParts(self.info)
+ collector.definition = parts.get_definition()
+ collector.body_pattern = parts.get_body_pattern()
+ collector.replacement_pattern = parts.get_replacement_pattern()
+ collector.checks = parts.get_checks()
+
+
+class _DefinitionLocationFinder(object):
+
+ def __init__(self, info, matched_lines):
+ self.info = info
+ self.matched_lines = matched_lines
+ # This only happens when subexpressions cannot be matched
+ if not matched_lines:
+ self.matched_lines.append(self.info.region_lines[0])
+
+ def find_lineno(self):
+ if self.info.variable and not self.info.make_global:
+ return self._get_before_line()
+ if self.info.make_global or self.info.global_:
+ toplevel = self._find_toplevel(self.info.scope)
+ ast = self.info.pymodule.get_ast()
+ newlines = sorted(self.matched_lines + [toplevel.get_end() + 1])
+ return suites.find_visible(ast, newlines)
+ return self._get_after_scope()
+
+ def _find_toplevel(self, scope):
+ toplevel = scope
+ if toplevel.parent is not None:
+ while toplevel.parent.parent is not None:
+ toplevel = toplevel.parent
+ return toplevel
+
+ def find_indents(self):
+ if self.info.variable and not self.info.make_global:
+ return sourceutils.get_indents(self.info.lines,
+ self._get_before_line())
+ else:
+ if self.info.global_ or self.info.make_global:
+ return 0
+ return self.info.scope_indents
+
+ def _get_before_line(self):
+ ast = self.info.scope.pyobject.get_ast()
+ return suites.find_visible(ast, self.matched_lines)
+
+ def _get_after_scope(self):
+ return self.info.scope.get_end() + 1
+
+
+class _ExceptionalConditionChecker(object):
+
+ def __call__(self, info):
+ self.base_conditions(info)
+ if info.one_line:
+ self.one_line_conditions(info)
+ else:
+ self.multi_line_conditions(info)
+
+ def base_conditions(self, info):
+ if info.region[1] > info.scope_region[1]:
+ raise RefactoringError('Bad region selected for extract method')
+ end_line = info.region_lines[1]
+ end_scope = info.global_scope.get_inner_scope_for_line(end_line)
+ if end_scope != info.scope and end_scope.get_end() != end_line:
+ raise RefactoringError('Bad region selected for extract method')
+ try:
+ extracted = info.source[info.region[0]:info.region[1]]
+ if info.one_line:
+ extracted = '(%s)' % extracted
+ if _UnmatchedBreakOrContinueFinder.has_errors(extracted):
+ raise RefactoringError('A break/continue without having a '
+ 'matching for/while loop.')
+ except SyntaxError:
+ raise RefactoringError('Extracted piece should '
+ 'contain complete statements.')
+
+ def one_line_conditions(self, info):
+ if self._is_region_on_a_word(info):
+ raise RefactoringError('Should extract complete statements.')
+ if info.variable and not info.one_line:
+ raise RefactoringError('Extract variable should not '
+ 'span multiple lines.')
+
+ def multi_line_conditions(self, info):
+ node = _parse_text(info.source[info.region[0]:info.region[1]])
+ count = usefunction._return_count(node)
+ if count > 1:
+ raise RefactoringError('Extracted piece can have only one '
+ 'return statement.')
+ if usefunction._yield_count(node):
+ raise RefactoringError('Extracted piece cannot '
+ 'have yield statements.')
+ if count == 1 and not usefunction._returns_last(node):
+ raise RefactoringError('Return should be the last statement.')
+ if info.region != info.lines_region:
+ raise RefactoringError('Extracted piece should '
+ 'contain complete statements.')
+
+ def _is_region_on_a_word(self, info):
+ if info.region[0] > 0 and self._is_on_a_word(info, info.region[0] - 1) or \
+ self._is_on_a_word(info, info.region[1] - 1):
+ return True
+
+ def _is_on_a_word(self, info, offset):
+ prev = info.source[offset]
+ if not (prev.isalnum() or prev == '_') or \
+ offset + 1 == len(info.source):
+ return False
+ next = info.source[offset + 1]
+ return next.isalnum() or next == '_'
+
+
+class _ExtractMethodParts(object):
+
+ def __init__(self, info):
+ self.info = info
+ self.info_collector = self._create_info_collector()
+
+ def get_definition(self):
+ if self.info.global_:
+ return '\n%s\n' % self._get_function_definition()
+ else:
+ return '\n%s' % self._get_function_definition()
+
+ def get_replacement_pattern(self):
+ variables = []
+ variables.extend(self._find_function_arguments())
+ variables.extend(self._find_function_returns())
+ return similarfinder.make_pattern(self._get_call(), variables)
+
+ def get_body_pattern(self):
+ variables = []
+ variables.extend(self._find_function_arguments())
+ variables.extend(self._find_function_returns())
+ variables.extend(self._find_temps())
+ return similarfinder.make_pattern(self._get_body(), variables)
+
+ def _get_body(self):
+ result = sourceutils.fix_indentation(self.info.extracted, 0)
+ if self.info.one_line:
+ result = '(%s)' % result
+ return result
+
+ def _find_temps(self):
+ return usefunction.find_temps(self.info.pycore.project,
+ self._get_body())
+
+ def get_checks(self):
+ if self.info.method and not self.info.make_global:
+ if _get_function_kind(self.info.scope) == 'method':
+ class_name = similarfinder._pydefined_to_str(
+ self.info.scope.parent.pyobject)
+ return {self._get_self_name(): 'type=' + class_name}
+ return {}
+
+ def _create_info_collector(self):
+ zero = self.info.scope.get_start() - 1
+ start_line = self.info.region_lines[0] - zero
+ end_line = self.info.region_lines[1] - zero
+ info_collector = _FunctionInformationCollector(start_line, end_line,
+ self.info.global_)
+ body = self.info.source[self.info.scope_region[0]:
+ self.info.scope_region[1]]
+ node = _parse_text(body)
+ ast.walk(node, info_collector)
+ return info_collector
+
+ def _get_function_definition(self):
+ args = self._find_function_arguments()
+ returns = self._find_function_returns()
+ result = []
+ if self.info.method and not self.info.make_global and \
+ _get_function_kind(self.info.scope) != 'method':
+ result.append('@staticmethod\n')
+ result.append('def %s:\n' % self._get_function_signature(args))
+ unindented_body = self._get_unindented_function_body(returns)
+ indents = sourceutils.get_indent(self.info.pycore)
+ function_body = sourceutils.indent_lines(unindented_body, indents)
+ result.append(function_body)
+ definition = ''.join(result)
+
+ return definition + '\n'
+
+ def _get_function_signature(self, args):
+ args = list(args)
+ prefix = ''
+ if self._extracting_method():
+ self_name = self._get_self_name()
+ if self_name is None:
+ raise RefactoringError('Extracting a method from a function '
+ 'with no self argument.')
+ if self_name in args:
+ args.remove(self_name)
+ args.insert(0, self_name)
+ return prefix + self.info.new_name + \
+ '(%s)' % self._get_comma_form(args)
+
+ def _extracting_method(self):
+ return self.info.method and not self.info.make_global and \
+ _get_function_kind(self.info.scope) == 'method'
+
+ def _get_self_name(self):
+ param_names = self.info.scope.pyobject.get_param_names()
+ if param_names:
+ return param_names[0]
+
+ def _get_function_call(self, args):
+ prefix = ''
+ if self.info.method and not self.info.make_global:
+ if _get_function_kind(self.info.scope) == 'method':
+ self_name = self._get_self_name()
+ if self_name in args:
+ args.remove(self_name)
+ prefix = self_name + '.'
+ else:
+ prefix = self.info.scope.parent.pyobject.get_name() + '.'
+ return prefix + '%s(%s)' % (self.info.new_name,
+ self._get_comma_form(args))
+
+ def _get_comma_form(self, names):
+ result = ''
+ if names:
+ result += names[0]
+ for name in names[1:]:
+ result += ', ' + name
+ return result
+
+ def _get_call(self):
+ if self.info.one_line:
+ args = self._find_function_arguments()
+ return self._get_function_call(args)
+ args = self._find_function_arguments()
+ returns = self._find_function_returns()
+ call_prefix = ''
+ if returns:
+ call_prefix = self._get_comma_form(returns) + ' = '
+ if self.info.returned:
+ call_prefix = 'return '
+ return call_prefix + self._get_function_call(args)
+
+ def _find_function_arguments(self):
+ # if not make_global, do not pass any global names; they are
+ # all visible.
+ if self.info.global_ and not self.info.make_global:
+ return ()
+ if not self.info.one_line:
+ result = (self.info_collector.prewritten &
+ self.info_collector.read)
+ result |= (self.info_collector.prewritten &
+ self.info_collector.postread &
+ (self.info_collector.maybe_written -
+ self.info_collector.written))
+ return list(result)
+ start = self.info.region[0]
+ if start == self.info.lines_region[0]:
+ start = start + re.search('\S', self.info.extracted).start()
+ function_definition = self.info.source[start:self.info.region[1]]
+ read = _VariableReadsAndWritesFinder.find_reads_for_one_liners(
+ function_definition)
+ return list(self.info_collector.prewritten.intersection(read))
+
+ def _find_function_returns(self):
+ if self.info.one_line or self.info.returned:
+ return []
+ written = self.info_collector.written | \
+ self.info_collector.maybe_written
+ return list(written & self.info_collector.postread)
+
+ def _get_unindented_function_body(self, returns):
+ if self.info.one_line:
+ return 'return ' + _join_lines(self.info.extracted)
+ extracted_body = self.info.extracted
+ unindented_body = sourceutils.fix_indentation(extracted_body, 0)
+ if returns:
+ unindented_body += '\nreturn %s' % self._get_comma_form(returns)
+ return unindented_body
+
+
+class _ExtractVariableParts(object):
+
+ def __init__(self, info):
+ self.info = info
+
+ def get_definition(self):
+ result = self.info.new_name + ' = ' + \
+ _join_lines(self.info.extracted) + '\n'
+ return result
+
+ def get_body_pattern(self):
+ return '(%s)' % self.info.extracted.strip()
+
+ def get_replacement_pattern(self):
+ return self.info.new_name
+
+ def get_checks(self):
+ return {}
+
+
+class _FunctionInformationCollector(object):
+
+ def __init__(self, start, end, is_global):
+ self.start = start
+ self.end = end
+ self.is_global = is_global
+ self.prewritten = set()
+ self.maybe_written = set()
+ self.written = set()
+ self.read = set()
+ self.postread = set()
+ self.postwritten = set()
+ self.host_function = True
+ self.conditional = False
+
+ def _read_variable(self, name, lineno):
+ if self.start <= lineno <= self.end:
+ if name not in self.written:
+ self.read.add(name)
+ if self.end < lineno:
+ if name not in self.postwritten:
+ self.postread.add(name)
+
+ def _written_variable(self, name, lineno):
+ if self.start <= lineno <= self.end:
+ if self.conditional:
+ self.maybe_written.add(name)
+ else:
+ self.written.add(name)
+ if self.start > lineno:
+ self.prewritten.add(name)
+ if self.end < lineno:
+ self.postwritten.add(name)
+
+ def _FunctionDef(self, node):
+ if not self.is_global and self.host_function:
+ self.host_function = False
+ for name in _get_argnames(node.args):
+ self._written_variable(name, node.lineno)
+ for child in node.body:
+ ast.walk(child, self)
+ else:
+ self._written_variable(node.name, node.lineno)
+ visitor = _VariableReadsAndWritesFinder()
+ for child in node.body:
+ ast.walk(child, visitor)
+ for name in visitor.read - visitor.written:
+ self._read_variable(name, node.lineno)
+
+ def _Name(self, node):
+ if isinstance(node.ctx, (ast.Store, ast.AugStore)):
+ self._written_variable(node.id, node.lineno)
+ if not isinstance(node.ctx, ast.Store):
+ self._read_variable(node.id, node.lineno)
+
+ def _Assign(self, node):
+ ast.walk(node.value, self)
+ for child in node.targets:
+ ast.walk(child, self)
+
+ def _ClassDef(self, node):
+ self._written_variable(node.name, node.lineno)
+
+ def _handle_conditional_node(self, node):
+ self.conditional = True
+ try:
+ for child in ast.get_child_nodes(node):
+ ast.walk(child, self)
+ finally:
+ self.conditional = False
+
+ def _If(self, node):
+ self._handle_conditional_node(node)
+
+ def _While(self, node):
+ self._handle_conditional_node(node)
+
+ def _For(self, node):
+ self._handle_conditional_node(node)
+
+
+
+def _get_argnames(arguments):
+ result = [node.id for node in arguments.args
+ if isinstance(node, ast.Name)]
+ if arguments.vararg:
+ result.append(arguments.vararg)
+ if arguments.kwarg:
+ result.append(arguments.kwarg)
+ return result
+
+
+class _VariableReadsAndWritesFinder(object):
+
+ def __init__(self):
+ self.written = set()
+ self.read = set()
+
+ def _Name(self, node):
+ if isinstance(node.ctx, (ast.Store, ast.AugStore)):
+ self.written.add(node.id)
+ if not isinstance(node, ast.Store):
+ self.read.add(node.id)
+
+ def _FunctionDef(self, node):
+ self.written.add(node.name)
+ visitor = _VariableReadsAndWritesFinder()
+ for child in ast.get_child_nodes(node):
+ ast.walk(child, visitor)
+ self.read.update(visitor.read - visitor.written)
+
+ def _Class(self, node):
+ self.written.add(node.name)
+
+ @staticmethod
+ def find_reads_and_writes(code):
+ if code.strip() == '':
+ return set(), set()
+ if isinstance(code, unicode):
+ code = code.encode('utf-8')
+ node = _parse_text(code)
+ visitor = _VariableReadsAndWritesFinder()
+ ast.walk(node, visitor)
+ return visitor.read, visitor.written
+
+ @staticmethod
+ def find_reads_for_one_liners(code):
+ if code.strip() == '':
+ return set(), set()
+ node = _parse_text(code)
+ visitor = _VariableReadsAndWritesFinder()
+ ast.walk(node, visitor)
+ return visitor.read
+
+
+class _UnmatchedBreakOrContinueFinder(object):
+
+ def __init__(self):
+ self.error = False
+ self.loop_count = 0
+
+ def _For(self, node):
+ self.loop_encountered(node)
+
+ def _While(self, node):
+ self.loop_encountered(node)
+
+ def loop_encountered(self, node):
+ self.loop_count += 1
+ for child in node.body:
+ ast.walk(child, self)
+ self.loop_count -= 1
+ if node.orelse:
+ ast.walk(node.orelse, self)
+
+ def _Break(self, node):
+ self.check_loop()
+
+ def _Continue(self, node):
+ self.check_loop()
+
+ def check_loop(self):
+ if self.loop_count < 1:
+ self.error = True
+
+ def _FunctionDef(self, node):
+ pass
+
+ def _ClassDef(self, node):
+ pass
+
+ @staticmethod
+ def has_errors(code):
+ if code.strip() == '':
+ return False
+ node = _parse_text(code)
+ visitor = _UnmatchedBreakOrContinueFinder()
+ ast.walk(node, visitor)
+ return visitor.error
+
+def _get_function_kind(scope):
+ return scope.pyobject.get_kind()
+
+
+def _parse_text(body):
+ body = sourceutils.fix_indentation(body, 0)
+ node = ast.parse(body)
+ return node
+
+def _join_lines(code):
+ lines = []
+ for line in code.splitlines():
+ if line.endswith('\\'):
+ lines.append(line[:-1].strip())
+ else:
+ lines.append(line.strip())
+ return ' '.join(lines)
diff --git a/.vim/bundle/python-mode/pylibs/rope/refactor/functionutils.py b/.vim/bundle/python-mode/pylibs/rope/refactor/functionutils.py
@@ -0,0 +1,222 @@
+import rope.base.exceptions
+import rope.base.pyobjects
+from rope.base.builtins import Lambda
+from rope.base import worder
+
+
+class DefinitionInfo(object):
+
+ def __init__(self, function_name, is_method, args_with_defaults,
+ args_arg, keywords_arg):
+ self.function_name = function_name
+ self.is_method = is_method
+ self.args_with_defaults = args_with_defaults
+ self.args_arg = args_arg
+ self.keywords_arg = keywords_arg
+
+ def to_string(self):
+ return '%s(%s)' % (self.function_name, self.arguments_to_string())
+
+ def arguments_to_string(self, from_index=0):
+ params = []
+ for arg, default in self.args_with_defaults:
+ if default is not None:
+ params.append('%s=%s' % (arg, default))
+ else:
+ params.append(arg)
+ if self.args_arg is not None:
+ params.append('*' + self.args_arg)
+ if self.keywords_arg:
+ params.append('**' + self.keywords_arg)
+ return ', '.join(params[from_index:])
+
+ @staticmethod
+ def _read(pyfunction, code):
+ scope = pyfunction.get_scope()
+ parent = scope.parent
+ parameter_names = pyfunction.get_param_names()
+ kind = pyfunction.get_kind()
+ is_method = kind == 'method'
+ is_lambda = kind == 'lambda'
+ info = _FunctionParser(code, is_method, is_lambda)
+ args, keywords = info.get_parameters()
+ args_arg = None
+ keywords_arg = None
+ if args and args[-1].startswith('**'):
+ keywords_arg = args[-1][2:]
+ del args[-1]
+ if args and args[-1].startswith('*'):
+ args_arg = args[-1][1:]
+ del args[-1]
+ args_with_defaults = [(name, None) for name in args]
+ args_with_defaults.extend(keywords)
+ return DefinitionInfo(info.get_function_name(), is_method,
+ args_with_defaults, args_arg, keywords_arg)
+
+ @staticmethod
+ def read(pyfunction):
+ pymodule = pyfunction.get_module()
+ word_finder = worder.Worder(pymodule.source_code)
+ lineno = pyfunction.get_ast().lineno
+ start = pymodule.lines.get_line_start(lineno)
+ if isinstance(pyfunction, Lambda):
+ call = word_finder.get_lambda_and_args(start)
+ else:
+ call = word_finder.get_function_and_args_in_header(start)
+ return DefinitionInfo._read(pyfunction, call)
+
+
+class CallInfo(object):
+
+ def __init__(self, function_name, args, keywords, args_arg,
+ keywords_arg, implicit_arg, constructor):
+ self.function_name = function_name
+ self.args = args
+ self.keywords = keywords
+ self.args_arg = args_arg
+ self.keywords_arg = keywords_arg
+ self.implicit_arg = implicit_arg
+ self.constructor = constructor
+
+ def to_string(self):
+ function = self.function_name
+ if self.implicit_arg:
+ function = self.args[0] + '.' + self.function_name
+ params = []
+ start = 0
+ if self.implicit_arg or self.constructor:
+ start = 1
+ if self.args[start:]:
+ params.extend(self.args[start:])
+ if self.keywords:
+ params.extend(['%s=%s' % (name, value) for name, value in self.keywords])
+ if self.args_arg is not None:
+ params.append('*' + self.args_arg)
+ if self.keywords_arg:
+ params.append('**' + self.keywords_arg)
+ return '%s(%s)' % (function, ', '.join(params))
+
+ @staticmethod
+ def read(primary, pyname, definition_info, code):
+ is_method_call = CallInfo._is_method_call(primary, pyname)
+ is_constructor = CallInfo._is_class(pyname)
+ is_classmethod = CallInfo._is_classmethod(pyname)
+ info = _FunctionParser(code, is_method_call or is_classmethod)
+ args, keywords = info.get_parameters()
+ args_arg = None
+ keywords_arg = None
+ if args and args[-1].startswith('**'):
+ keywords_arg = args[-1][2:]
+ del args[-1]
+ if args and args[-1].startswith('*'):
+ args_arg = args[-1][1:]
+ del args[-1]
+ if is_constructor:
+ args.insert(0, definition_info.args_with_defaults[0][0])
+ return CallInfo(info.get_function_name(), args, keywords, args_arg,
+ keywords_arg, is_method_call or is_classmethod,
+ is_constructor)
+
+ @staticmethod
+ def _is_method_call(primary, pyname):
+ return primary is not None and \
+ isinstance(primary.get_object().get_type(),
+ rope.base.pyobjects.PyClass) and \
+ CallInfo._is_method(pyname)
+
+ @staticmethod
+ def _is_class(pyname):
+ return pyname is not None and \
+ isinstance(pyname.get_object(),
+ rope.base.pyobjects.PyClass)
+
+ @staticmethod
+ def _is_method(pyname):
+ if pyname is not None and \
+ isinstance(pyname.get_object(), rope.base.pyobjects.PyFunction):
+ return pyname.get_object().get_kind() == 'method'
+ return False
+
+ @staticmethod
+ def _is_classmethod(pyname):
+ if pyname is not None and \
+ isinstance(pyname.get_object(), rope.base.pyobjects.PyFunction):
+ return pyname.get_object().get_kind() == 'classmethod'
+ return False
+
+
+class ArgumentMapping(object):
+
+ def __init__(self, definition_info, call_info):
+ self.call_info = call_info
+ self.param_dict = {}
+ self.keyword_args = []
+ self.args_arg = []
+ for index, value in enumerate(call_info.args):
+ if index < len(definition_info.args_with_defaults):
+ name = definition_info.args_with_defaults[index][0]
+ self.param_dict[name] = value
+ else:
+ self.args_arg.append(value)
+ for name, value in call_info.keywords:
+ index = -1
+ for pair in definition_info.args_with_defaults:
+ if pair[0] == name:
+ self.param_dict[name] = value
+ break
+ else:
+ self.keyword_args.append((name, value))
+
+ def to_call_info(self, definition_info):
+ args = []
+ keywords = []
+ for index in range(len(definition_info.args_with_defaults)):
+ name = definition_info.args_with_defaults[index][0]
+ if name in self.param_dict:
+ args.append(self.param_dict[name])
+ else:
+ for i in range(index, len(definition_info.args_with_defaults)):
+ name = definition_info.args_with_defaults[i][0]
+ if name in self.param_dict:
+ keywords.append((name, self.param_dict[name]))
+ break
+ args.extend(self.args_arg)
+ keywords.extend(self.keyword_args)
+ return CallInfo(self.call_info.function_name, args, keywords,
+ self.call_info.args_arg, self.call_info.keywords_arg,
+ self.call_info.implicit_arg, self.call_info.constructor)
+
+
+class _FunctionParser(object):
+
+ def __init__(self, call, implicit_arg, is_lambda=False):
+ self.call = call
+ self.implicit_arg = implicit_arg
+ self.word_finder = worder.Worder(self.call)
+ if is_lambda:
+ self.last_parens = self.call.rindex(':')
+ else:
+ self.last_parens = self.call.rindex(')')
+ self.first_parens = self.word_finder._find_parens_start(self.last_parens)
+
+ def get_parameters(self):
+ args, keywords = self.word_finder.get_parameters(self.first_parens,
+ self.last_parens)
+ if self.is_called_as_a_method():
+ instance = self.call[:self.call.rindex('.', 0, self.first_parens)]
+ args.insert(0, instance.strip())
+ return args, keywords
+
+ def get_instance(self):
+ if self.is_called_as_a_method():
+ return self.word_finder.get_primary_at(
+ self.call.rindex('.', 0, self.first_parens) - 1)
+
+ def get_function_name(self):
+ if self.is_called_as_a_method():
+ return self.word_finder.get_word_at(self.first_parens - 1)
+ else:
+ return self.word_finder.get_primary_at(self.first_parens - 1)
+
+ def is_called_as_a_method(self):
+ return self.implicit_arg and '.' in self.call[:self.first_parens]
diff --git a/.vim/bundle/python-mode/pylibs/rope/refactor/importutils/__init__.py b/.vim/bundle/python-mode/pylibs/rope/refactor/importutils/__init__.py
@@ -0,0 +1,299 @@
+"""A package for handling imports
+
+This package provides tools for modifying module imports after
+refactorings or as a separate task.
+
+"""
+import rope.base.evaluate
+from rope.base.change import ChangeSet, ChangeContents
+from rope.refactor import occurrences, rename
+from rope.refactor.importutils import module_imports, actions
+from rope.refactor.importutils.importinfo import NormalImport, FromImport
+import rope.base.codeanalyze
+
+
+class ImportOrganizer(object):
+ """Perform some import-related commands
+
+ Each method returns a `rope.base.change.Change` object.
+
+ """
+
+ def __init__(self, project):
+ self.project = project
+ self.pycore = project.pycore
+ self.import_tools = ImportTools(self.pycore)
+
+ def organize_imports(self, resource, offset=None):
+ return self._perform_command_on_import_tools(
+ self.import_tools.organize_imports, resource, offset)
+
+ def expand_star_imports(self, resource, offset=None):
+ return self._perform_command_on_import_tools(
+ self.import_tools.expand_stars, resource, offset)
+
+ def froms_to_imports(self, resource, offset=None):
+ return self._perform_command_on_import_tools(
+ self.import_tools.froms_to_imports, resource, offset)
+
+ def relatives_to_absolutes(self, resource, offset=None):
+ return self._perform_command_on_import_tools(
+ self.import_tools.relatives_to_absolutes, resource, offset)
+
+ def handle_long_imports(self, resource, offset=None):
+ return self._perform_command_on_import_tools(
+ self.import_tools.handle_long_imports, resource, offset)
+
+ def _perform_command_on_import_tools(self, method, resource, offset):
+ pymodule = self.pycore.resource_to_pyobject(resource)
+ before_performing = pymodule.source_code
+ import_filter = None
+ if offset is not None:
+ import_filter = self._line_filter(
+ pymodule.lines.get_line_number(offset))
+ result = method(pymodule, import_filter=import_filter)
+ if result is not None and result != before_performing:
+ changes = ChangeSet(method.__name__.replace('_', ' ') +
+ ' in <%s>' % resource.path)
+ changes.add_change(ChangeContents(resource, result))
+ return changes
+
+ def _line_filter(self, lineno):
+ def import_filter(import_stmt):
+ return import_stmt.start_line <= lineno < import_stmt.end_line
+ return import_filter
+
+
+class ImportTools(object):
+
+ def __init__(self, pycore):
+ self.pycore = pycore
+
+ def get_import(self, resource):
+ """The import statement for `resource`"""
+ module_name = self.pycore.modname(resource)
+ return NormalImport(((module_name, None), ))
+
+ def get_from_import(self, resource, name):
+ """The from import statement for `name` in `resource`"""
+ module_name = self.pycore.modname(resource)
+ names = []
+ if isinstance(name, list):
+ names = [(imported, None) for imported in name]
+ else:
+ names = [(name, None),]
+ return FromImport(module_name, 0, tuple(names))
+
+ def module_imports(self, module, imports_filter=None):
+ return module_imports.ModuleImports(self.pycore, module,
+ imports_filter)
+
+ def froms_to_imports(self, pymodule, import_filter=None):
+ pymodule = self._clean_up_imports(pymodule, import_filter)
+ module_imports = self.module_imports(pymodule, import_filter)
+ for import_stmt in module_imports.imports:
+ if import_stmt.readonly or \
+ not self._is_transformable_to_normal(import_stmt.import_info):
+ continue
+ pymodule = self._from_to_normal(pymodule, import_stmt)
+
+ # Adding normal imports in place of froms
+ module_imports = self.module_imports(pymodule, import_filter)
+ for import_stmt in module_imports.imports:
+ if not import_stmt.readonly and \
+ self._is_transformable_to_normal(import_stmt.import_info):
+ import_stmt.import_info = \
+ NormalImport(((import_stmt.import_info.module_name, None),))
+ module_imports.remove_duplicates()
+ return module_imports.get_changed_source()
+
+ def expand_stars(self, pymodule, import_filter=None):
+ module_imports = self.module_imports(pymodule, import_filter)
+ module_imports.expand_stars()
+ return module_imports.get_changed_source()
+
+ def _from_to_normal(self, pymodule, import_stmt):
+ resource = pymodule.get_resource()
+ from_import = import_stmt.import_info
+ module_name = from_import.module_name
+ for name, alias in from_import.names_and_aliases:
+ imported = name
+ if alias is not None:
+ imported = alias
+ occurrence_finder = occurrences.create_finder(
+ self.pycore, imported, pymodule[imported], imports=False)
+ source = rename.rename_in_module(
+ occurrence_finder, module_name + '.' + name,
+ pymodule=pymodule, replace_primary=True)
+ if source is not None:
+ pymodule = self.pycore.get_string_module(source, resource)
+ return pymodule
+
+ def _clean_up_imports(self, pymodule, import_filter):
+ resource = pymodule.get_resource()
+ module_with_imports = self.module_imports(pymodule, import_filter)
+ module_with_imports.expand_stars()
+ source = module_with_imports.get_changed_source()
+ if source is not None:
+ pymodule = self.pycore.get_string_module(source, resource)
+ source = self.relatives_to_absolutes(pymodule)
+ if source is not None:
+ pymodule = self.pycore.get_string_module(source, resource)
+
+ module_with_imports = self.module_imports(pymodule, import_filter)
+ module_with_imports.remove_duplicates()
+ module_with_imports.remove_unused_imports()
+ source = module_with_imports.get_changed_source()
+ if source is not None:
+ pymodule = self.pycore.get_string_module(source, resource)
+ return pymodule
+
+ def relatives_to_absolutes(self, pymodule, import_filter=None):
+ module_imports = self.module_imports(pymodule, import_filter)
+ to_be_absolute_list = module_imports.get_relative_to_absolute_list()
+ for name, absolute_name in to_be_absolute_list:
+ pymodule = self._rename_in_module(pymodule, name, absolute_name)
+ module_imports = self.module_imports(pymodule, import_filter)
+ module_imports.get_relative_to_absolute_list()
+ source = module_imports.get_changed_source()
+ if source is None:
+ source = pymodule.source_code
+ return source
+
+ def _is_transformable_to_normal(self, import_info):
+ if not isinstance(import_info, FromImport):
+ return False
+ return True
+
+ def organize_imports(self, pymodule,
+ unused=True, duplicates=True,
+ selfs=True, sort=True, import_filter=None):
+ if unused or duplicates:
+ module_imports = self.module_imports(pymodule, import_filter)
+ if unused:
+ module_imports.remove_unused_imports()
+ if duplicates:
+ module_imports.remove_duplicates()
+ source = module_imports.get_changed_source()
+ if source is not None:
+ pymodule = self.pycore.get_string_module(
+ source, pymodule.get_resource())
+ if selfs:
+ pymodule = self._remove_self_imports(pymodule, import_filter)
+ if sort:
+ return self.sort_imports(pymodule, import_filter)
+ else:
+ return pymodule.source_code
+
+ def _remove_self_imports(self, pymodule, import_filter=None):
+ module_imports = self.module_imports(pymodule, import_filter)
+ to_be_fixed, to_be_renamed = module_imports.get_self_import_fix_and_rename_list()
+ for name in to_be_fixed:
+ try:
+ pymodule = self._rename_in_module(pymodule, name, '', till_dot=True)
+ except ValueError:
+ # There is a self import with direct access to it
+ return pymodule
+ for name, new_name in to_be_renamed:
+ pymodule = self._rename_in_module(pymodule, name, new_name)
+ module_imports = self.module_imports(pymodule, import_filter)
+ module_imports.get_self_import_fix_and_rename_list()
+ source = module_imports.get_changed_source()
+ if source is not None:
+ pymodule = self.pycore.get_string_module(source, pymodule.get_resource())
+ return pymodule
+
+ def _rename_in_module(self, pymodule, name, new_name, till_dot=False):
+ old_name = name.split('.')[-1]
+ old_pyname = rope.base.evaluate.eval_str(pymodule.get_scope(), name)
+ occurrence_finder = occurrences.create_finder(
+ self.pycore, old_name, old_pyname, imports=False)
+ changes = rope.base.codeanalyze.ChangeCollector(pymodule.source_code)
+ for occurrence in occurrence_finder.find_occurrences(pymodule=pymodule):
+ start, end = occurrence.get_primary_range()
+ if till_dot:
+ new_end = pymodule.source_code.index('.', end) + 1
+ space = pymodule.source_code[end:new_end - 1].strip()
+ if not space == '':
+ for c in space:
+ if not c.isspace() and c not in '\\':
+ raise ValueError()
+ end = new_end
+ changes.add_change(start, end, new_name)
+ source = changes.get_changed()
+ if source is not None:
+ pymodule = self.pycore.get_string_module(source, pymodule.get_resource())
+ return pymodule
+
+ def sort_imports(self, pymodule, import_filter=None):
+ module_imports = self.module_imports(pymodule, import_filter)
+ module_imports.sort_imports()
+ return module_imports.get_changed_source()
+
+ def handle_long_imports(self, pymodule, maxdots=2, maxlength=27,
+ import_filter=None):
+ # IDEA: `maxdots` and `maxlength` can be specified in project config
+ # adding new from imports
+ module_imports = self.module_imports(pymodule, import_filter)
+ to_be_fixed = module_imports.handle_long_imports(maxdots, maxlength)
+ # performing the renaming
+ pymodule = self.pycore.get_string_module(
+ module_imports.get_changed_source(),
+ resource=pymodule.get_resource())
+ for name in to_be_fixed:
+ pymodule = self._rename_in_module(pymodule, name,
+ name.split('.')[-1])
+ # organizing imports
+ return self.organize_imports(pymodule, selfs=False, sort=False,
+ import_filter=import_filter)
+
+
+def get_imports(pycore, pydefined):
+ """A shortcut for getting the `ImportInfo`\s used in a scope"""
+ pymodule = pydefined.get_module()
+ module = module_imports.ModuleImports(pycore, pymodule)
+ if pymodule == pydefined:
+ return [stmt.import_info for stmt in module.imports]
+ return module.get_used_imports(pydefined)
+
+
+def get_module_imports(pycore, pymodule):
+ """A shortcut for creating a `module_imports.ModuleImports` object"""
+ return module_imports.ModuleImports(pycore, pymodule)
+
+
+def add_import(pycore, pymodule, module_name, name=None):
+ imports = get_module_imports(pycore, pymodule)
+ candidates = []
+ names = []
+ # from mod import name
+ if name is not None:
+ from_import = FromImport(module_name, 0, [(name, None)])
+ names.append(name)
+ candidates.append(from_import)
+ # from pkg import mod
+ if '.' in module_name:
+ pkg, mod = module_name.rsplit('.', 1)
+ candidates.append(FromImport(pkg, 0, [(mod, None)]))
+ if name:
+ names.append(mod + '.' + name)
+ else:
+ names.append(mod)
+ # import mod
+ normal_import = NormalImport([(module_name, None)])
+ if name:
+ names.append(module_name + '.' + name)
+ else:
+ names.append(module_name)
+
+ candidates.append(normal_import)
+
+ visitor = actions.AddingVisitor(pycore, candidates)
+ selected_import = normal_import
+ for import_statement in imports.imports:
+ if import_statement.accept(visitor):
+ selected_import = visitor.import_info
+ break
+ imports.add_import(selected_import)
+ imported_name = names[candidates.index(selected_import)]
+ return imports.get_changed_source(), imported_name
diff --git a/.vim/bundle/python-mode/pylibs/rope/refactor/importutils/actions.py b/.vim/bundle/python-mode/pylibs/rope/refactor/importutils/actions.py
@@ -0,0 +1,359 @@
+import os
+import sys
+
+from rope.base import pyobjects, exceptions, stdmods
+from rope.refactor import occurrences
+from rope.refactor.importutils import importinfo
+
+
+class ImportInfoVisitor(object):
+
+ def dispatch(self, import_):
+ try:
+ method_name = 'visit' + import_.import_info.__class__.__name__
+ method = getattr(self, method_name)
+ return method(import_, import_.import_info)
+ except exceptions.ModuleNotFoundError:
+ pass
+
+ def visitEmptyImport(self, import_stmt, import_info):
+ pass
+
+ def visitNormalImport(self, import_stmt, import_info):
+ pass
+
+ def visitFromImport(self, import_stmt, import_info):
+ pass
+
+
+class RelativeToAbsoluteVisitor(ImportInfoVisitor):
+
+ def __init__(self, pycore, current_folder):
+ self.to_be_absolute = []
+ self.pycore = pycore
+ self.folder = current_folder
+ self.context = importinfo.ImportContext(pycore, current_folder)
+
+ def visitNormalImport(self, import_stmt, import_info):
+ self.to_be_absolute.extend(self._get_relative_to_absolute_list(import_info))
+ new_pairs = []
+ for name, alias in import_info.names_and_aliases:
+ resource = self.pycore.find_module(name, folder=self.folder)
+ if resource is None:
+ new_pairs.append((name, alias))
+ continue
+ absolute_name = self.pycore.modname(resource)
+ new_pairs.append((absolute_name, alias))
+ if not import_info._are_name_and_alias_lists_equal(
+ new_pairs, import_info.names_and_aliases):
+ import_stmt.import_info = importinfo.NormalImport(new_pairs)
+
+ def _get_relative_to_absolute_list(self, import_info):
+ result = []
+ for name, alias in import_info.names_and_aliases:
+ if alias is not None:
+ continue
+ resource = self.pycore.find_module(name, folder=self.folder)
+ if resource is None:
+ continue
+ absolute_name = self.pycore.modname(resource)
+ if absolute_name != name:
+ result.append((name, absolute_name))
+ return result
+
+ def visitFromImport(self, import_stmt, import_info):
+ resource = import_info.get_imported_resource(self.context)
+ if resource is None:
+ return None
+ absolute_name = self.pycore.modname(resource)
+ if import_info.module_name != absolute_name:
+ import_stmt.import_info = importinfo.FromImport(
+ absolute_name, 0, import_info.names_and_aliases)
+
+
+class FilteringVisitor(ImportInfoVisitor):
+
+ def __init__(self, pycore, folder, can_select):
+ self.to_be_absolute = []
+ self.pycore = pycore
+ self.can_select = self._transform_can_select(can_select)
+ self.context = importinfo.ImportContext(pycore, folder)
+
+ def _transform_can_select(self, can_select):
+ def can_select_name_and_alias(name, alias):
+ imported = name
+ if alias is not None:
+ imported = alias
+ return can_select(imported)
+ return can_select_name_and_alias
+
+ def visitNormalImport(self, import_stmt, import_info):
+ new_pairs = []
+ for name, alias in import_info.names_and_aliases:
+ if self.can_select(name, alias):
+ new_pairs.append((name, alias))
+ return importinfo.NormalImport(new_pairs)
+
+ def visitFromImport(self, import_stmt, import_info):
+ if _is_future(import_info):
+ return import_info
+ new_pairs = []
+ if import_info.is_star_import():
+ for name in import_info.get_imported_names(self.context):
+ if self.can_select(name, None):
+ new_pairs.append(import_info.names_and_aliases[0])
+ break
+ else:
+ for name, alias in import_info.names_and_aliases:
+ if self.can_select(name, alias):
+ new_pairs.append((name, alias))
+ return importinfo.FromImport(
+ import_info.module_name, import_info.level, new_pairs)
+
+
+class RemovingVisitor(ImportInfoVisitor):
+
+ def __init__(self, pycore, folder, can_select):
+ self.to_be_absolute = []
+ self.pycore = pycore
+ self.filtering = FilteringVisitor(pycore, folder, can_select)
+
+ def dispatch(self, import_):
+ result = self.filtering.dispatch(import_)
+ if result is not None:
+ import_.import_info = result
+
+
+class AddingVisitor(ImportInfoVisitor):
+ """A class for adding imports
+
+ Given a list of `ImportInfo`\s, it tries to add each import to the
+ module and returns `True` and gives up when an import can be added
+ to older ones.
+
+ """
+
+ def __init__(self, pycore, import_list):
+ self.pycore = pycore
+ self.import_list = import_list
+ self.import_info = None
+
+ def dispatch(self, import_):
+ for import_info in self.import_list:
+ self.import_info = import_info
+ if ImportInfoVisitor.dispatch(self, import_):
+ return True
+
+ # TODO: Handle adding relative and absolute imports
+ def visitNormalImport(self, import_stmt, import_info):
+ if not isinstance(self.import_info, import_info.__class__):
+ return False
+ # Adding ``import x`` and ``import x.y`` that results ``import x.y``
+ if len(import_info.names_and_aliases) == \
+ len(self.import_info.names_and_aliases) == 1:
+ imported1 = import_info.names_and_aliases[0]
+ imported2 = self.import_info.names_and_aliases[0]
+ if imported1[1] == imported2[1] is None:
+ if imported1[0].startswith(imported2[0] + '.'):
+ return True
+ if imported2[0].startswith(imported1[0] + '.'):
+ import_stmt.import_info = self.import_info
+ return True
+ # Multiple imports using a single import statement is discouraged
+ # so we won't bother adding them.
+ if self.import_info._are_name_and_alias_lists_equal(
+ import_info.names_and_aliases, self.import_info.names_and_aliases):
+ return True
+
+ def visitFromImport(self, import_stmt, import_info):
+ if isinstance(self.import_info, import_info.__class__) and \
+ import_info.module_name == self.import_info.module_name and \
+ import_info.level == self.import_info.level:
+ if import_info.is_star_import():
+ return True
+ if self.import_info.is_star_import():
+ import_stmt.import_info = self.import_info
+ return True
+ new_pairs = list(import_info.names_and_aliases)
+ for pair in self.import_info.names_and_aliases:
+ if pair not in new_pairs:
+ new_pairs.append(pair)
+ import_stmt.import_info = importinfo.FromImport(
+ import_info.module_name, import_info.level, new_pairs)
+ return True
+
+
+class ExpandStarsVisitor(ImportInfoVisitor):
+
+ def __init__(self, pycore, folder, can_select):
+ self.pycore = pycore
+ self.filtering = FilteringVisitor(pycore, folder, can_select)
+ self.context = importinfo.ImportContext(pycore, folder)
+
+ def visitNormalImport(self, import_stmt, import_info):
+ self.filtering.dispatch(import_stmt)
+
+ def visitFromImport(self, import_stmt, import_info):
+ if import_info.is_star_import():
+ new_pairs = []
+ for name in import_info.get_imported_names(self.context):
+ new_pairs.append((name, None))
+ new_import = importinfo.FromImport(
+ import_info.module_name, import_info.level, new_pairs)
+ import_stmt.import_info = \
+ self.filtering.visitFromImport(None, new_import)
+ else:
+ self.filtering.dispatch(import_stmt)
+
+
+class SelfImportVisitor(ImportInfoVisitor):
+
+ def __init__(self, pycore, current_folder, resource):
+ self.pycore = pycore
+ self.folder = current_folder
+ self.resource = resource
+ self.to_be_fixed = set()
+ self.to_be_renamed = set()
+ self.context = importinfo.ImportContext(pycore, current_folder)
+
+ def visitNormalImport(self, import_stmt, import_info):
+ new_pairs = []
+ for name, alias in import_info.names_and_aliases:
+ resource = self.pycore.find_module(name, folder=self.folder)
+ if resource is not None and resource == self.resource:
+ imported = name
+ if alias is not None:
+ imported = alias
+ self.to_be_fixed.add(imported)
+ else:
+ new_pairs.append((name, alias))
+ if not import_info._are_name_and_alias_lists_equal(
+ new_pairs, import_info.names_and_aliases):
+ import_stmt.import_info = importinfo.NormalImport(new_pairs)
+
+ def visitFromImport(self, import_stmt, import_info):
+ resource = import_info.get_imported_resource(self.context)
+ if resource is None:
+ return
+ if resource == self.resource:
+ self._importing_names_from_self(import_info, import_stmt)
+ return
+ pymodule = self.pycore.resource_to_pyobject(resource)
+ new_pairs = []
+ for name, alias in import_info.names_and_aliases:
+ try:
+ result = pymodule[name].get_object()
+ if isinstance(result, pyobjects.PyModule) and \
+ result.get_resource() == self.resource:
+ imported = name
+ if alias is not None:
+ imported = alias
+ self.to_be_fixed.add(imported)
+ else:
+ new_pairs.append((name, alias))
+ except exceptions.AttributeNotFoundError:
+ new_pairs.append((name, alias))
+ if not import_info._are_name_and_alias_lists_equal(
+ new_pairs, import_info.names_and_aliases):
+ import_stmt.import_info = importinfo.FromImport(
+ import_info.module_name, import_info.level, new_pairs)
+
+ def _importing_names_from_self(self, import_info, import_stmt):
+ if not import_info.is_star_import():
+ for name, alias in import_info.names_and_aliases:
+ if alias is not None:
+ self.to_be_renamed.add((alias, name))
+ import_stmt.empty_import()
+
+
+class SortingVisitor(ImportInfoVisitor):
+
+ def __init__(self, pycore, current_folder):
+ self.pycore = pycore
+ self.folder = current_folder
+ self.standard = set()
+ self.third_party = set()
+ self.in_project = set()
+ self.future = set()
+ self.context = importinfo.ImportContext(pycore, current_folder)
+
+ def visitNormalImport(self, import_stmt, import_info):
+ if import_info.names_and_aliases:
+ name, alias = import_info.names_and_aliases[0]
+ resource = self.pycore.find_module(
+ name, folder=self.folder)
+ self._check_imported_resource(import_stmt, resource, name)
+
+ def visitFromImport(self, import_stmt, import_info):
+ resource = import_info.get_imported_resource(self.context)
+ self._check_imported_resource(import_stmt, resource,
+ import_info.module_name)
+
+ def _check_imported_resource(self, import_stmt, resource, imported_name):
+ info = import_stmt.import_info
+ if resource is not None and resource.project == self.pycore.project:
+ self.in_project.add(import_stmt)
+ elif _is_future(info):
+ self.future.add(import_stmt)
+ elif imported_name.split('.')[0] in stdmods.standard_modules():
+ self.standard.add(import_stmt)
+ else:
+ self.third_party.add(import_stmt)
+
+
+class LongImportVisitor(ImportInfoVisitor):
+
+ def __init__(self, current_folder, pycore, maxdots, maxlength):
+ self.maxdots = maxdots
+ self.maxlength = maxlength
+ self.to_be_renamed = set()
+ self.current_folder = current_folder
+ self.pycore = pycore
+ self.new_imports = []
+
+ def visitNormalImport(self, import_stmt, import_info):
+ new_pairs = []
+ for name, alias in import_info.names_and_aliases:
+ if alias is None and self._is_long(name):
+ self.to_be_renamed.add(name)
+ last_dot = name.rindex('.')
+ from_ = name[:last_dot]
+ imported = name[last_dot + 1:]
+ self.new_imports.append(
+ importinfo.FromImport(from_, 0, ((imported, None), )))
+
+ def _is_long(self, name):
+ return name.count('.') > self.maxdots or \
+ ('.' in name and len(name) > self.maxlength)
+
+
+class RemovePyNameVisitor(ImportInfoVisitor):
+
+ def __init__(self, pycore, pymodule, pyname, folder):
+ self.pymodule = pymodule
+ self.pyname = pyname
+ self.context = importinfo.ImportContext(pycore, folder)
+
+ def visitFromImport(self, import_stmt, import_info):
+ new_pairs = []
+ if not import_info.is_star_import():
+ for name, alias in import_info.names_and_aliases:
+ try:
+ pyname = self.pymodule[alias or name]
+ if occurrences.same_pyname(self.pyname, pyname):
+ continue
+ except exceptions.AttributeNotFoundError:
+ pass
+ new_pairs.append((name, alias))
+ return importinfo.FromImport(
+ import_info.module_name, import_info.level, new_pairs)
+
+ def dispatch(self, import_):
+ result = ImportInfoVisitor.dispatch(self, import_)
+ if result is not None:
+ import_.import_info = result
+
+
+def _is_future(info):
+ return isinstance(info, importinfo.FromImport) and \
+ info.module_name == '__future__'
diff --git a/.vim/bundle/python-mode/pylibs/rope/refactor/importutils/importinfo.py b/.vim/bundle/python-mode/pylibs/rope/refactor/importutils/importinfo.py
@@ -0,0 +1,201 @@
+class ImportStatement(object):
+ """Represent an import in a module
+
+ `readonly` attribute controls whether this import can be changed
+ by import actions or not.
+
+ """
+
+ def __init__(self, import_info, start_line, end_line,
+ main_statement=None, blank_lines=0):
+ self.start_line = start_line
+ self.end_line = end_line
+ self.readonly = False
+ self.main_statement = main_statement
+ self._import_info = None
+ self.import_info = import_info
+ self._is_changed = False
+ self.new_start = None
+ self.blank_lines = blank_lines
+
+ def _get_import_info(self):
+ return self._import_info
+
+ def _set_import_info(self, new_import):
+ if not self.readonly and \
+ new_import is not None and not new_import == self._import_info:
+ self._is_changed = True
+ self._import_info = new_import
+
+ import_info = property(_get_import_info, _set_import_info)
+
+ def get_import_statement(self):
+ if self._is_changed or self.main_statement is None:
+ return self.import_info.get_import_statement()
+ else:
+ return self.main_statement
+
+ def empty_import(self):
+ self.import_info = ImportInfo.get_empty_import()
+
+ def move(self, lineno, blank_lines=0):
+ self.new_start = lineno
+ self.blank_lines = blank_lines
+
+ def get_old_location(self):
+ return self.start_line, self.end_line
+
+ def get_new_start(self):
+ return self.new_start
+
+ def is_changed(self):
+ return self._is_changed or (self.new_start is not None or
+ self.new_start != self.start_line)
+
+ def accept(self, visitor):
+ return visitor.dispatch(self)
+
+
+class ImportInfo(object):
+
+ def get_imported_primaries(self, context):
+ pass
+
+ def get_imported_names(self, context):
+ return [primary.split('.')[0]
+ for primary in self.get_imported_primaries(context)]
+
+ def get_import_statement(self):
+ pass
+
+ def is_empty(self):
+ pass
+
+ def __hash__(self):
+ return hash(self.get_import_statement())
+
+ def _are_name_and_alias_lists_equal(self, list1, list2):
+ if len(list1) != len(list2):
+ return False
+ for pair1, pair2 in zip(list1, list2):
+ if pair1 != pair2:
+ return False
+ return True
+
+ def __eq__(self, obj):
+ return isinstance(obj, self.__class__) and \
+ self.get_import_statement() == obj.get_import_statement()
+
+ def __ne__(self, obj):
+ return not self.__eq__(obj)
+
+ @staticmethod
+ def get_empty_import():
+ return EmptyImport()
+
+
+class NormalImport(ImportInfo):
+
+ def __init__(self, names_and_aliases):
+ self.names_and_aliases = names_and_aliases
+
+ def get_imported_primaries(self, context):
+ result = []
+ for name, alias in self.names_and_aliases:
+ if alias:
+ result.append(alias)
+ else:
+ result.append(name)
+ return result
+
+ def get_import_statement(self):
+ result = 'import '
+ for name, alias in self.names_and_aliases:
+ result += name
+ if alias:
+ result += ' as ' + alias
+ result += ', '
+ return result[:-2]
+
+ def is_empty(self):
+ return len(self.names_and_aliases) == 0
+
+
+class FromImport(ImportInfo):
+
+ def __init__(self, module_name, level, names_and_aliases):
+ self.module_name = module_name
+ self.level = level
+ self.names_and_aliases = names_and_aliases
+
+ def get_imported_primaries(self, context):
+ if self.names_and_aliases[0][0] == '*':
+ module = self.get_imported_module(context)
+ return [name for name in module
+ if not name.startswith('_')]
+ result = []
+ for name, alias in self.names_and_aliases:
+ if alias:
+ result.append(alias)
+ else:
+ result.append(name)
+ return result
+
+ def get_imported_resource(self, context):
+ """Get the imported resource
+
+ Returns `None` if module was not found.
+ """
+ if self.level == 0:
+ return context.pycore.find_module(
+ self.module_name, folder=context.folder)
+ else:
+ return context.pycore.find_relative_module(
+ self.module_name, context.folder, self.level)
+
+ def get_imported_module(self, context):
+ """Get the imported `PyModule`
+
+ Raises `rope.base.exceptions.ModuleNotFoundError` if module
+ could not be found.
+ """
+ if self.level == 0:
+ return context.pycore.get_module(
+ self.module_name, context.folder)
+ else:
+ return context.pycore.get_relative_module(
+ self.module_name, context.folder, self.level)
+
+ def get_import_statement(self):
+ result = 'from ' + '.' * self.level + self.module_name + ' import '
+ for name, alias in self.names_and_aliases:
+ result += name
+ if alias:
+ result += ' as ' + alias
+ result += ', '
+ return result[:-2]
+
+ def is_empty(self):
+ return len(self.names_and_aliases) == 0
+
+ def is_star_import(self):
+ return len(self.names_and_aliases) > 0 and \
+ self.names_and_aliases[0][0] == '*'
+
+
+class EmptyImport(ImportInfo):
+
+ names_and_aliases = []
+
+ def is_empty(self):
+ return True
+
+ def get_imported_primaries(self, context):
+ return []
+
+
+class ImportContext(object):
+
+ def __init__(self, pycore, folder):
+ self.pycore = pycore
+ self.folder = folder
diff --git a/.vim/bundle/python-mode/pylibs/rope/refactor/importutils/module_imports.py b/.vim/bundle/python-mode/pylibs/rope/refactor/importutils/module_imports.py
@@ -0,0 +1,455 @@
+import rope.base.pynames
+from rope.base import ast, utils
+from rope.refactor.importutils import importinfo
+from rope.refactor.importutils import actions
+
+
+class ModuleImports(object):
+
+ def __init__(self, pycore, pymodule, import_filter=None):
+ self.pycore = pycore
+ self.pymodule = pymodule
+ self.separating_lines = 0
+ self.filter = import_filter
+
+ @property
+ @utils.saveit
+ def imports(self):
+ finder = _GlobalImportFinder(self.pymodule, self.pycore)
+ result = finder.find_import_statements()
+ self.separating_lines = finder.get_separating_line_count()
+ if self.filter is not None:
+ for import_stmt in result:
+ if not self.filter(import_stmt):
+ import_stmt.readonly = True
+ return result
+
+ def _get_unbound_names(self, defined_pyobject):
+ visitor = _GlobalUnboundNameFinder(self.pymodule, defined_pyobject)
+ ast.walk(self.pymodule.get_ast(), visitor)
+ return visitor.unbound
+
+ def remove_unused_imports(self):
+ can_select = _OneTimeSelector(self._get_unbound_names(self.pymodule))
+ visitor = actions.RemovingVisitor(
+ self.pycore, self._current_folder(), can_select)
+ for import_statement in self.imports:
+ import_statement.accept(visitor)
+
+ def get_used_imports(self, defined_pyobject):
+ result = []
+ can_select = _OneTimeSelector(self._get_unbound_names(defined_pyobject))
+ visitor = actions.FilteringVisitor(
+ self.pycore, self._current_folder(), can_select)
+ for import_statement in self.imports:
+ new_import = import_statement.accept(visitor)
+ if new_import is not None and not new_import.is_empty():
+ result.append(new_import)
+ return result
+
+ def get_changed_source(self):
+ imports = self.imports
+ after_removing = self._remove_imports(imports)
+ imports = [stmt for stmt in imports
+ if not stmt.import_info.is_empty()]
+
+ first_non_blank = self._first_non_blank_line(after_removing, 0)
+ first_import = self._first_import_line() - 1
+ result = []
+ # Writing module docs
+ result.extend(after_removing[first_non_blank:first_import])
+ # Writing imports
+ sorted_imports = sorted(imports, self._compare_import_locations)
+ for stmt in sorted_imports:
+ start = self._get_import_location(stmt)
+ if stmt != sorted_imports[0]:
+ result.append('\n' * stmt.blank_lines)
+ result.append(stmt.get_import_statement() + '\n')
+ if sorted_imports and first_non_blank < len(after_removing):
+ result.append('\n' * self.separating_lines)
+
+ # Writing the body
+ first_after_imports = self._first_non_blank_line(after_removing,
+ first_import)
+ result.extend(after_removing[first_after_imports:])
+ return ''.join(result)
+
+ def _get_import_location(self, stmt):
+ start = stmt.get_new_start()
+ if start is None:
+ start = stmt.get_old_location()[0]
+ return start
+
+ def _compare_import_locations(self, stmt1, stmt2):
+ def get_location(stmt):
+ if stmt.get_new_start() is not None:
+ return stmt.get_new_start()
+ else:
+ return stmt.get_old_location()[0]
+ return cmp(get_location(stmt1), get_location(stmt2))
+
+ def _remove_imports(self, imports):
+ lines = self.pymodule.source_code.splitlines(True)
+ after_removing = []
+ last_index = 0
+ for stmt in imports:
+ start, end = stmt.get_old_location()
+ after_removing.extend(lines[last_index:start - 1])
+ last_index = end - 1
+ for i in range(start, end):
+ after_removing.append('')
+ after_removing.extend(lines[last_index:])
+ return after_removing
+
+ def _first_non_blank_line(self, lines, lineno):
+ result = lineno
+ for line in lines[lineno:]:
+ if line.strip() == '':
+ result += 1
+ else:
+ break
+ return result
+
+ def add_import(self, import_info):
+ visitor = actions.AddingVisitor(self.pycore, [import_info])
+ for import_statement in self.imports:
+ if import_statement.accept(visitor):
+ break
+ else:
+ lineno = self._get_new_import_lineno()
+ blanks = self._get_new_import_blanks()
+ self.imports.append(importinfo.ImportStatement(
+ import_info, lineno, lineno,
+ blank_lines=blanks))
+
+ def _get_new_import_blanks(self):
+ return 0
+
+ def _get_new_import_lineno(self):
+ if self.imports:
+ return self.imports[-1].end_line
+ return 1
+
+ def filter_names(self, can_select):
+ visitor = actions.RemovingVisitor(
+ self.pycore, self._current_folder(), can_select)
+ for import_statement in self.imports:
+ import_statement.accept(visitor)
+
+ def expand_stars(self):
+ can_select = _OneTimeSelector(self._get_unbound_names(self.pymodule))
+ visitor = actions.ExpandStarsVisitor(
+ self.pycore, self._current_folder(), can_select)
+ for import_statement in self.imports:
+ import_statement.accept(visitor)
+
+ def remove_duplicates(self):
+ added_imports = []
+ for import_stmt in self.imports:
+ visitor = actions.AddingVisitor(self.pycore,
+ [import_stmt.import_info])
+ for added_import in added_imports:
+ if added_import.accept(visitor):
+ import_stmt.empty_import()
+ else:
+ added_imports.append(import_stmt)
+
+ def get_relative_to_absolute_list(self):
+ visitor = rope.refactor.importutils.actions.RelativeToAbsoluteVisitor(
+ self.pycore, self._current_folder())
+ for import_stmt in self.imports:
+ if not import_stmt.readonly:
+ import_stmt.accept(visitor)
+ return visitor.to_be_absolute
+
+ def get_self_import_fix_and_rename_list(self):
+ visitor = rope.refactor.importutils.actions.SelfImportVisitor(
+ self.pycore, self._current_folder(), self.pymodule.get_resource())
+ for import_stmt in self.imports:
+ if not import_stmt.readonly:
+ import_stmt.accept(visitor)
+ return visitor.to_be_fixed, visitor.to_be_renamed
+
+ def _current_folder(self):
+ return self.pymodule.get_resource().parent
+
+ def sort_imports(self):
+ # IDEA: Sort from import list
+ visitor = actions.SortingVisitor(self.pycore, self._current_folder())
+ for import_statement in self.imports:
+ import_statement.accept(visitor)
+ in_projects = sorted(visitor.in_project, self._compare_imports)
+ third_party = sorted(visitor.third_party, self._compare_imports)
+ standards = sorted(visitor.standard, self._compare_imports)
+ future = sorted(visitor.future, self._compare_imports)
+ blank_lines = 0
+ last_index = self._first_import_line()
+ last_index = self._move_imports(future, last_index, 0)
+ last_index = self._move_imports(standards, last_index, 1)
+ last_index = self._move_imports(third_party, last_index, 1)
+ last_index = self._move_imports(in_projects, last_index, 1)
+ self.separating_lines = 2
+
+ def _first_import_line(self):
+ nodes = self.pymodule.get_ast().body
+ lineno = 0
+ if self.pymodule.get_doc() is not None:
+ lineno = 1
+ if len(nodes) > lineno:
+ lineno = self.pymodule.logical_lines.logical_line_in(
+ nodes[lineno].lineno)[0]
+ else:
+ lineno = self.pymodule.lines.length()
+ while lineno > 1:
+ line = self.pymodule.lines.get_line(lineno - 1)
+ if line.strip() == '':
+ lineno -= 1
+ else:
+ break
+ return lineno
+
+ def _compare_imports(self, stmt1, stmt2):
+ str1 = stmt1.get_import_statement()
+ str2 = stmt2.get_import_statement()
+ if str1.startswith('from ') and not str2.startswith('from '):
+ return 1
+ if not str1.startswith('from ') and str2.startswith('from '):
+ return -1
+ return cmp(str1, str2)
+
+ def _move_imports(self, imports, index, blank_lines):
+ if imports:
+ imports[0].move(index, blank_lines)
+ index += 1
+ if len(imports) > 1:
+ for stmt in imports[1:]:
+ stmt.move(index)
+ index += 1
+ return index
+
+ def handle_long_imports(self, maxdots, maxlength):
+ visitor = actions.LongImportVisitor(
+ self._current_folder(), self.pycore, maxdots, maxlength)
+ for import_statement in self.imports:
+ if not import_statement.readonly:
+ import_statement.accept(visitor)
+ for import_info in visitor.new_imports:
+ self.add_import(import_info)
+ return visitor.to_be_renamed
+
+ def remove_pyname(self, pyname):
+ """Removes pyname when imported in ``from mod import x``"""
+ visitor = actions.RemovePyNameVisitor(self.pycore, self.pymodule,
+ pyname, self._current_folder())
+ for import_stmt in self.imports:
+ import_stmt.accept(visitor)
+
+
+class _OneTimeSelector(object):
+
+ def __init__(self, names):
+ self.names = names
+ self.selected_names = set()
+
+ def __call__(self, imported_primary):
+ if self._can_name_be_added(imported_primary):
+ for name in self._get_dotted_tokens(imported_primary):
+ self.selected_names.add(name)
+ return True
+ return False
+
+ def _get_dotted_tokens(self, imported_primary):
+ tokens = imported_primary.split('.')
+ for i in range(len(tokens)):
+ yield '.'.join(tokens[:i + 1])
+
+ def _can_name_be_added(self, imported_primary):
+ for name in self._get_dotted_tokens(imported_primary):
+ if name in self.names and name not in self.selected_names:
+ return True
+ return False
+
+
+class _UnboundNameFinder(object):
+
+ def __init__(self, pyobject):
+ self.pyobject = pyobject
+
+ def _visit_child_scope(self, node):
+ pyobject = self.pyobject.get_module().get_scope().\
+ get_inner_scope_for_line(node.lineno).pyobject
+ visitor = _LocalUnboundNameFinder(pyobject, self)
+ for child in ast.get_child_nodes(node):
+ ast.walk(child, visitor)
+
+ def _FunctionDef(self, node):
+ self._visit_child_scope(node)
+
+ def _ClassDef(self, node):
+ self._visit_child_scope(node)
+
+ def _Name(self, node):
+ if self._get_root()._is_node_interesting(node) and \
+ not self.is_bound(node.id):
+ self.add_unbound(node.id)
+
+ def _Attribute(self, node):
+ result = []
+ while isinstance(node, ast.Attribute):
+ result.append(node.attr)
+ node = node.value
+ if isinstance(node, ast.Name):
+ result.append(node.id)
+ primary = '.'.join(reversed(result))
+ if self._get_root()._is_node_interesting(node) and \
+ not self.is_bound(primary):
+ self.add_unbound(primary)
+ else:
+ ast.walk(node, self)
+
+ def _get_root(self):
+ pass
+
+ def is_bound(self, name, propagated=False):
+ pass
+
+ def add_unbound(self, name):
+ pass
+
+
+class _GlobalUnboundNameFinder(_UnboundNameFinder):
+
+ def __init__(self, pymodule, wanted_pyobject):
+ super(_GlobalUnboundNameFinder, self).__init__(pymodule)
+ self.unbound = set()
+ self.names = set()
+ for name, pyname in pymodule._get_structural_attributes().items():
+ if not isinstance(pyname, (rope.base.pynames.ImportedName,
+ rope.base.pynames.ImportedModule)):
+ self.names.add(name)
+ wanted_scope = wanted_pyobject.get_scope()
+ self.start = wanted_scope.get_start()
+ self.end = wanted_scope.get_end() + 1
+
+ def _get_root(self):
+ return self
+
+ def is_bound(self, primary, propagated=False):
+ name = primary.split('.')[0]
+ if name in self.names:
+ return True
+ return False
+
+ def add_unbound(self, name):
+ names = name.split('.')
+ for i in range(len(names)):
+ self.unbound.add('.'.join(names[:i + 1]))
+
+ def _is_node_interesting(self, node):
+ return self.start <= node.lineno < self.end
+
+
+class _LocalUnboundNameFinder(_UnboundNameFinder):
+
+ def __init__(self, pyobject, parent):
+ super(_LocalUnboundNameFinder, self).__init__(pyobject)
+ self.parent = parent
+
+ def _get_root(self):
+ return self.parent._get_root()
+
+ def is_bound(self, primary, propagated=False):
+ name = primary.split('.')[0]
+ if propagated:
+ names = self.pyobject.get_scope().get_propagated_names()
+ else:
+ names = self.pyobject.get_scope().get_names()
+ if name in names or self.parent.is_bound(name, propagated=True):
+ return True
+ return False
+
+ def add_unbound(self, name):
+ self.parent.add_unbound(name)
+
+
+class _GlobalImportFinder(object):
+
+ def __init__(self, pymodule, pycore):
+ self.current_folder = None
+ if pymodule.get_resource():
+ self.current_folder = pymodule.get_resource().parent
+ self.pymodule = pymodule
+ self.pycore = pycore
+ self.imports = []
+ self.pymodule = pymodule
+ self.lines = self.pymodule.lines
+
+ def visit_import(self, node, end_line):
+ start_line = node.lineno
+ import_statement = importinfo.ImportStatement(
+ importinfo.NormalImport(self._get_names(node.names)),
+ start_line, end_line, self._get_text(start_line, end_line),
+ blank_lines=self._count_empty_lines_before(start_line))
+ self.imports.append(import_statement)
+
+ def _count_empty_lines_before(self, lineno):
+ result = 0
+ for current in range(lineno - 1, 0, -1):
+ line = self.lines.get_line(current)
+ if line.strip() == '':
+ result += 1
+ else:
+ break
+ return result
+
+ def _count_empty_lines_after(self, lineno):
+ result = 0
+ for current in range(lineno + 1, self.lines.length()):
+ line = self.lines.get_line(current)
+ if line.strip() == '':
+ result += 1
+ else:
+ break
+ return result
+
+ def get_separating_line_count(self):
+ if not self.imports:
+ return 0
+ return self._count_empty_lines_after(self.imports[-1].end_line - 1)
+
+ def _get_text(self, start_line, end_line):
+ result = []
+ for index in range(start_line, end_line):
+ result.append(self.lines.get_line(index))
+ return '\n'.join(result)
+
+ def visit_from(self, node, end_line):
+ level = 0
+ if node.level:
+ level = node.level
+ import_info = importinfo.FromImport(
+ node.module or '', # see comment at rope.base.ast.walk
+ level, self._get_names(node.names))
+ start_line = node.lineno
+ self.imports.append(importinfo.ImportStatement(
+ import_info, node.lineno, end_line,
+ self._get_text(start_line, end_line),
+ blank_lines=self._count_empty_lines_before(start_line)))
+
+ def _get_names(self, alias_names):
+ result = []
+ for alias in alias_names:
+ result.append((alias.name, alias.asname))
+ return result
+
+ def find_import_statements(self):
+ nodes = self.pymodule.get_ast().body
+ for index, node in enumerate(nodes):
+ if isinstance(node, (ast.Import, ast.ImportFrom)):
+ lines = self.pymodule.logical_lines
+ end_line = lines.logical_line_in(node.lineno)[1] + 1
+ if isinstance(node, ast.Import):
+ self.visit_import(node, end_line)
+ if isinstance(node, ast.ImportFrom):
+ self.visit_from(node, end_line)
+ return self.imports
diff --git a/.vim/bundle/python-mode/pylibs/rope/refactor/inline.py b/.vim/bundle/python-mode/pylibs/rope/refactor/inline.py
@@ -0,0 +1,615 @@
+# Known Bugs when inlining a function/method
+# The values passed to function are inlined using _inlined_variable.
+# This may cause two problems, illustrated in the examples below
+#
+# def foo(var1):
+# var1 = var1*10
+# return var1
+#
+# If a call to foo(20) is inlined, the result of inlined function is 20,
+# but it should be 200.
+#
+# def foo(var1):
+# var2 = var1*10
+# return var2
+#
+# 2- If a call to foo(10+10) is inlined the result of inlined function is 110
+# but it should be 200.
+
+import re
+
+import rope.base.exceptions
+import rope.refactor.functionutils
+from rope.base import (pynames, pyobjects, codeanalyze,
+ taskhandle, evaluate, worder, utils)
+from rope.base.change import ChangeSet, ChangeContents
+from rope.refactor import (occurrences, rename, sourceutils,
+ importutils, move, change_signature)
+
+def unique_prefix():
+ n = 0
+ while True:
+ yield "__" + str(n) + "__"
+ n += 1
+
+def create_inline(project, resource, offset):
+ """Create a refactoring object for inlining
+
+ Based on `resource` and `offset` it returns an instance of
+ `InlineMethod`, `InlineVariable` or `InlineParameter`.
+
+ """
+ pycore = project.pycore
+ pyname = _get_pyname(pycore, resource, offset)
+ message = 'Inline refactoring should be performed on ' \
+ 'a method, local variable or parameter.'
+ if pyname is None:
+ raise rope.base.exceptions.RefactoringError(message)
+ if isinstance(pyname, pynames.ImportedName):
+ pyname = pyname._get_imported_pyname()
+ if isinstance(pyname, pynames.AssignedName):
+ return InlineVariable(project, resource, offset)
+ if isinstance(pyname, pynames.ParameterName):
+ return InlineParameter(project, resource, offset)
+ if isinstance(pyname.get_object(), pyobjects.PyFunction):
+ return InlineMethod(project, resource, offset)
+ else:
+ raise rope.base.exceptions.RefactoringError(message)
+
+
+class _Inliner(object):
+
+ def __init__(self, project, resource, offset):
+ self.project = project
+ self.pycore = project.pycore
+ self.pyname = _get_pyname(self.pycore, resource, offset)
+ range_finder = worder.Worder(resource.read())
+ self.region = range_finder.get_primary_range(offset)
+ self.name = range_finder.get_word_at(offset)
+ self.offset = offset
+ self.original = resource
+
+ def get_changes(self, *args, **kwds):
+ pass
+
+ def get_kind(self):
+ """Return either 'variable', 'method' or 'parameter'"""
+
+
+class InlineMethod(_Inliner):
+
+ def __init__(self, *args, **kwds):
+ super(InlineMethod, self).__init__(*args, **kwds)
+ self.pyfunction = self.pyname.get_object()
+ self.pymodule = self.pyfunction.get_module()
+ self.resource = self.pyfunction.get_module().get_resource()
+ self.occurrence_finder = occurrences.create_finder(
+ self.pycore, self.name, self.pyname)
+ self.normal_generator = _DefinitionGenerator(self.project,
+ self.pyfunction)
+ self._init_imports()
+
+ def _init_imports(self):
+ body = sourceutils.get_body(self.pyfunction)
+ body, imports = move.moving_code_with_imports(
+ self.pycore, self.resource, body)
+ self.imports = imports
+ self.others_generator = _DefinitionGenerator(
+ self.project, self.pyfunction, body=body)
+
+ def _get_scope_range(self):
+ scope = self.pyfunction.get_scope()
+ lines = self.pymodule.lines
+ logicals = self.pymodule.logical_lines
+ start_line = scope.get_start()
+ if self.pyfunction.decorators:
+ decorators = self.pyfunction.decorators
+ if hasattr(decorators[0], 'lineno'):
+ start_line = decorators[0].lineno
+ start_offset = lines.get_line_start(start_line)
+ end_offset = min(lines.get_line_end(scope.end) + 1,
+ len(self.pymodule.source_code))
+ return (start_offset, end_offset)
+
+ def get_changes(self, remove=True, only_current=False, resources=None,
+ task_handle=taskhandle.NullTaskHandle()):
+ """Get the changes this refactoring makes
+
+ If `remove` is `False` the definition will not be removed. If
+ `only_current` is `True`, the the current occurrence will be
+ inlined, only.
+ """
+ changes = ChangeSet('Inline method <%s>' % self.name)
+ if resources is None:
+ resources = self.pycore.get_python_files()
+ if only_current:
+ resources = [self.original]
+ if remove:
+ resources.append(self.resource)
+ job_set = task_handle.create_jobset('Collecting Changes',
+ len(resources))
+ for file in resources:
+ job_set.started_job(file.path)
+ if file == self.resource:
+ changes.add_change(self._defining_file_changes(
+ changes, remove=remove, only_current=only_current))
+ else:
+ aim = None
+ if only_current and self.original == file:
+ aim = self.offset
+ handle = _InlineFunctionCallsForModuleHandle(
+ self.pycore, file, self.others_generator, aim)
+ result = move.ModuleSkipRenamer(
+ self.occurrence_finder, file, handle).get_changed_module()
+ if result is not None:
+ result = _add_imports(self.pycore, result,
+ file, self.imports)
+ if remove:
+ result = _remove_from(self.pycore, self.pyname,
+ result, file)
+ changes.add_change(ChangeContents(file, result))
+ job_set.finished_job()
+ return changes
+
+ def _get_removed_range(self):
+ scope = self.pyfunction.get_scope()
+ lines = self.pymodule.lines
+ logical = self.pymodule.logical_lines
+ start_line = scope.get_start()
+ start, end = self._get_scope_range()
+ end_line = scope.get_end()
+ for i in range(end_line + 1, lines.length()):
+ if lines.get_line(i).strip() == '':
+ end_line = i
+ else:
+ break
+ end = min(lines.get_line_end(end_line) + 1,
+ len(self.pymodule.source_code))
+ return (start, end)
+
+ def _defining_file_changes(self, changes, remove, only_current):
+ start_offset, end_offset = self._get_removed_range()
+ aim = None
+ if only_current:
+ if self.resource == self.original:
+ aim = self.offset
+ else:
+ # we don't want to change any of them
+ aim = len(self.resource.read()) + 100
+ handle = _InlineFunctionCallsForModuleHandle(
+ self.pycore, self.resource,
+ self.normal_generator, aim_offset=aim)
+ replacement = None
+ if remove:
+ replacement = self._get_method_replacement()
+ result = move.ModuleSkipRenamer(
+ self.occurrence_finder, self.resource, handle, start_offset,
+ end_offset, replacement).get_changed_module()
+ return ChangeContents(self.resource, result)
+
+ def _get_method_replacement(self):
+ if self._is_the_last_method_of_a_class():
+ indents = sourceutils.get_indents(
+ self.pymodule.lines, self.pyfunction.get_scope().get_start())
+ return ' ' * indents + 'pass\n'
+ return ''
+
+ def _is_the_last_method_of_a_class(self):
+ pyclass = self.pyfunction.parent
+ if not isinstance(pyclass, pyobjects.PyClass):
+ return False
+ class_start, class_end = sourceutils.get_body_region(pyclass)
+ source = self.pymodule.source_code
+ lines = self.pymodule.lines
+ func_start, func_end = self._get_scope_range()
+ if source[class_start:func_start].strip() == '' and \
+ source[func_end:class_end].strip() == '':
+ return True
+ return False
+
+ def get_kind(self):
+ return 'method'
+
+
+class InlineVariable(_Inliner):
+
+ def __init__(self, *args, **kwds):
+ super(InlineVariable, self).__init__(*args, **kwds)
+ self.pymodule = self.pyname.get_definition_location()[0]
+ self.resource = self.pymodule.get_resource()
+ self._check_exceptional_conditions()
+ self._init_imports()
+
+ def _check_exceptional_conditions(self):
+ if len(self.pyname.assignments) != 1:
+ raise rope.base.exceptions.RefactoringError(
+ 'Local variable should be assigned once for inlining.')
+
+ def get_changes(self, remove=True, only_current=False, resources=None,
+ task_handle=taskhandle.NullTaskHandle()):
+ if resources is None:
+ if rename._is_local(self.pyname):
+ resources = [self.resource]
+ else:
+ resources = self.pycore.get_python_files()
+ if only_current:
+ resources = [self.original]
+ if remove and self.original != self.resource:
+ resources.append(self.resource)
+ changes = ChangeSet('Inline variable <%s>' % self.name)
+ jobset = task_handle.create_jobset('Calculating changes',
+ len(resources))
+
+ for resource in resources:
+ jobset.started_job(resource.path)
+ if resource == self.resource:
+ source = self._change_main_module(remove, only_current)
+ changes.add_change(ChangeContents(self.resource, source))
+ else:
+ result = self._change_module(resource, remove, only_current)
+ if result is not None:
+ result = _add_imports(self.pycore, result,
+ resource, self.imports)
+ changes.add_change(ChangeContents(resource, result))
+ jobset.finished_job()
+ return changes
+
+ def _change_main_module(self, remove, only_current):
+ region = None
+ if only_current and self.original == self.resource:
+ region = self.region
+ return _inline_variable(self.pycore, self.pymodule, self.pyname,
+ self.name, remove=remove, region=region)
+
+ def _init_imports(self):
+ vardef = _getvardef(self.pymodule, self.pyname)
+ self.imported, self.imports = move.moving_code_with_imports(
+ self.pycore, self.resource, vardef)
+
+ def _change_module(self, resource, remove, only_current):
+ filters = [occurrences.NoImportsFilter(),
+ occurrences.PyNameFilter(self.pyname)]
+ if only_current and resource == self.original:
+ def check_aim(occurrence):
+ start, end = occurrence.get_primary_range()
+ if self.offset < start or end < self.offset:
+ return False
+ filters.insert(0, check_aim)
+ finder = occurrences.Finder(self.pycore, self.name, filters=filters)
+ changed = rename.rename_in_module(
+ finder, self.imported, resource=resource, replace_primary=True)
+ if changed and remove:
+ changed = _remove_from(self.pycore, self.pyname, changed, resource)
+ return changed
+
+ def get_kind(self):
+ return 'variable'
+
+
+class InlineParameter(_Inliner):
+
+ def __init__(self, *args, **kwds):
+ super(InlineParameter, self).__init__(*args, **kwds)
+ resource, offset = self._function_location()
+ index = self.pyname.index
+ self.changers = [change_signature.ArgumentDefaultInliner(index)]
+ self.signature = change_signature.ChangeSignature(self.project,
+ resource, offset)
+
+ def _function_location(self):
+ pymodule, lineno = self.pyname.get_definition_location()
+ resource = pymodule.get_resource()
+ start = pymodule.lines.get_line_start(lineno)
+ word_finder = worder.Worder(pymodule.source_code)
+ offset = word_finder.find_function_offset(start)
+ return resource, offset
+
+ def get_changes(self, **kwds):
+ """Get the changes needed by this refactoring
+
+ See `rope.refactor.change_signature.ChangeSignature.get_changes()`
+ for arguments.
+ """
+ return self.signature.get_changes(self.changers, **kwds)
+
+ def get_kind(self):
+ return 'parameter'
+
+
+def _join_lines(lines):
+ definition_lines = []
+ for unchanged_line in lines:
+ line = unchanged_line.strip()
+ if line.endswith('\\'):
+ line = line[:-1].strip()
+ definition_lines.append(line)
+ joined = ' '.join(definition_lines)
+ return joined
+
+
+class _DefinitionGenerator(object):
+ unique_prefix = unique_prefix()
+ def __init__(self, project, pyfunction, body=None):
+ self.pycore = project.pycore
+ self.pyfunction = pyfunction
+ self.pymodule = pyfunction.get_module()
+ self.resource = self.pymodule.get_resource()
+ self.definition_info = self._get_definition_info()
+ self.definition_params = self._get_definition_params()
+ self._calculated_definitions = {}
+ if body is not None:
+ self.body = body
+ else:
+ self.body = sourceutils.get_body(self.pyfunction)
+
+ def _get_definition_info(self):
+ return rope.refactor.functionutils.DefinitionInfo.read(self.pyfunction)
+
+ def _get_definition_params(self):
+ definition_info = self.definition_info
+ paramdict = dict([pair for pair in definition_info.args_with_defaults])
+ if definition_info.args_arg is not None or \
+ definition_info.keywords_arg is not None:
+ raise rope.base.exceptions.RefactoringError(
+ 'Cannot inline functions with list and keyword arguements.')
+ if self.pyfunction.get_kind() == 'classmethod':
+ paramdict[definition_info.args_with_defaults[0][0]] = \
+ self.pyfunction.parent.get_name()
+ return paramdict
+
+ def get_function_name(self):
+ return self.pyfunction.get_name()
+
+ def get_definition(self, primary, pyname, call, host_vars=[],returns=False):
+ # caching already calculated definitions
+ return self._calculate_definition(primary, pyname, call,
+ host_vars, returns)
+
+ def _calculate_header(self, primary, pyname, call):
+ # A header is created which initializes parameters
+ # to the values passed to the function.
+ call_info = rope.refactor.functionutils.CallInfo.read(
+ primary, pyname, self.definition_info, call)
+ paramdict = self.definition_params
+ mapping = rope.refactor.functionutils.ArgumentMapping(
+ self.definition_info, call_info)
+ for param_name, value in mapping.param_dict.items():
+ paramdict[param_name] = value
+ header = ''
+ to_be_inlined = []
+ mod = self.pycore.get_string_module(self.body)
+ all_names = mod.get_scope().get_names()
+ assigned_names = [name for name in all_names if
+ isinstance(all_names[name], rope.base.pynamesdef.AssignedName)]
+ for name, value in paramdict.items():
+ if name != value and value is not None:
+ header += name + ' = ' + value.replace('\n', ' ') + '\n'
+ to_be_inlined.append(name)
+ return header, to_be_inlined
+
+ def _calculate_definition(self, primary, pyname, call, host_vars, returns):
+
+ header, to_be_inlined = self._calculate_header(primary, pyname, call)
+
+ source = header + self.body
+ mod = self.pycore.get_string_module(source)
+ name_dict = mod.get_scope().get_names()
+ all_names = [x for x in name_dict if
+ not isinstance(name_dict[x], rope.base.builtins.BuiltinName)]
+
+ # If there is a name conflict, all variable names
+ # inside the inlined function are renamed
+ if len(set(all_names).intersection(set(host_vars))) > 0:
+
+ prefix = _DefinitionGenerator.unique_prefix.next()
+ guest = self.pycore.get_string_module(source, self.resource)
+
+ to_be_inlined = [prefix+item for item in to_be_inlined]
+ for item in all_names:
+ pyname = guest[item]
+ occurrence_finder = occurrences.create_finder(
+ self.pycore, item, pyname)
+ source = rename.rename_in_module(occurrence_finder,
+ prefix+item, pymodule=guest)
+ guest = self.pycore.get_string_module(source, self.resource)
+
+ #parameters not reassigned inside the functions are now inlined.
+ for name in to_be_inlined:
+ pymodule = self.pycore.get_string_module(source, self.resource)
+ pyname = pymodule[name]
+ source = _inline_variable(self.pycore, pymodule, pyname, name)
+
+ return self._replace_returns_with(source, returns)
+
+ def _replace_returns_with(self, source, returns):
+ result = []
+ returned = None
+ last_changed = 0
+ for match in _DefinitionGenerator._get_return_pattern().finditer(source):
+ for key, value in match.groupdict().items():
+ if value and key == 'return':
+ result.append(source[last_changed:match.start('return')])
+ if returns:
+ self._check_nothing_after_return(source,
+ match.end('return'))
+ returned = _join_lines(
+ source[match.end('return'): len(source)].splitlines())
+ last_changed = len(source)
+ else:
+ current = match.end('return')
+ while current < len(source) and source[current] in ' \t':
+ current += 1
+ last_changed = current
+ if current == len(source) or source[current] == '\n':
+ result.append('pass')
+ result.append(source[last_changed:])
+ return ''.join(result), returned
+
+ def _check_nothing_after_return(self, source, offset):
+ lines = codeanalyze.SourceLinesAdapter(source)
+ lineno = lines.get_line_number(offset)
+ logical_lines = codeanalyze.LogicalLineFinder(lines)
+ lineno = logical_lines.logical_line_in(lineno)[1]
+ if source[lines.get_line_end(lineno):len(source)].strip() != '':
+ raise rope.base.exceptions.RefactoringError(
+ 'Cannot inline functions with statements after return statement.')
+
+ @classmethod
+ def _get_return_pattern(cls):
+ if not hasattr(cls, '_return_pattern'):
+ def named_pattern(name, list_):
+ return "(?P<%s>" % name + "|".join(list_) + ")"
+ comment_pattern = named_pattern('comment', [r'#[^\n]*'])
+ string_pattern = named_pattern('string',
+ [codeanalyze.get_string_pattern()])
+ return_pattern = r'\b(?P<return>return)\b'
+ cls._return_pattern = re.compile(comment_pattern + "|" +
+ string_pattern + "|" +
+ return_pattern)
+ return cls._return_pattern
+
+
+class _InlineFunctionCallsForModuleHandle(object):
+
+ def __init__(self, pycore, resource,
+ definition_generator, aim_offset=None):
+ """Inlines occurrences
+
+ If `aim` is not `None` only the occurrences that intersect
+ `aim` offset will be inlined.
+
+ """
+ self.pycore = pycore
+ self.generator = definition_generator
+ self.resource = resource
+ self.aim = aim_offset
+
+ def occurred_inside_skip(self, change_collector, occurrence):
+ if not occurrence.is_defined():
+ raise rope.base.exceptions.RefactoringError(
+ 'Cannot inline functions that reference themselves')
+
+ def occurred_outside_skip(self, change_collector, occurrence):
+ start, end = occurrence.get_primary_range()
+ # we remove out of date imports later
+ if occurrence.is_in_import_statement():
+ return
+ # the function is referenced outside an import statement
+ if not occurrence.is_called():
+ raise rope.base.exceptions.RefactoringError(
+ 'Reference to inlining function other than function call'
+ ' in <file: %s, offset: %d>' % (self.resource.path, start))
+ if self.aim is not None and (self.aim < start or self.aim > end):
+ return
+ end_parens = self._find_end_parens(self.source, end - 1)
+ lineno = self.lines.get_line_number(start)
+ start_line, end_line = self.pymodule.logical_lines.\
+ logical_line_in(lineno)
+ line_start = self.lines.get_line_start(start_line)
+ line_end = self.lines.get_line_end(end_line)
+
+
+ returns = self.source[line_start:start].strip() != '' or \
+ self.source[end_parens:line_end].strip() != ''
+ indents = sourceutils.get_indents(self.lines, start_line)
+ primary, pyname = occurrence.get_primary_and_pyname()
+
+ host = self.pycore.resource_to_pyobject(self.resource)
+ scope = host.scope.get_inner_scope_for_line(lineno)
+ definition, returned = self.generator.get_definition(
+ primary, pyname, self.source[start:end_parens], scope.get_names(), returns=returns)
+
+ end = min(line_end + 1, len(self.source))
+ change_collector.add_change(line_start, end,
+ sourceutils.fix_indentation(definition, indents))
+ if returns:
+ name = returned
+ if name is None:
+ name = 'None'
+ change_collector.add_change(
+ line_end, end, self.source[line_start:start] + name +
+ self.source[end_parens:end])
+
+ def _find_end_parens(self, source, offset):
+ finder = worder.Worder(source)
+ return finder.get_word_parens_range(offset)[1]
+
+ @property
+ @utils.saveit
+ def pymodule(self):
+ return self.pycore.resource_to_pyobject(self.resource)
+
+ @property
+ @utils.saveit
+ def source(self):
+ if self.resource is not None:
+ return self.resource.read()
+ else:
+ return self.pymodule.source_code
+
+ @property
+ @utils.saveit
+ def lines(self):
+ return self.pymodule.lines
+
+
+def _inline_variable(pycore, pymodule, pyname, name,
+ remove=True, region=None):
+ definition = _getvardef(pymodule, pyname)
+ start, end = _assigned_lineno(pymodule, pyname)
+
+ occurrence_finder = occurrences.create_finder(pycore, name, pyname)
+ changed_source = rename.rename_in_module(
+ occurrence_finder, definition, pymodule=pymodule,
+ replace_primary=True, writes=False, region=region)
+ if changed_source is None:
+ changed_source = pymodule.source_code
+ if remove:
+ lines = codeanalyze.SourceLinesAdapter(changed_source)
+ source = changed_source[:lines.get_line_start(start)] + \
+ changed_source[lines.get_line_end(end) + 1:]
+ else:
+ source = changed_source
+ return source
+
+def _getvardef(pymodule, pyname):
+ assignment = pyname.assignments[0]
+ lines = pymodule.lines
+ start, end = _assigned_lineno(pymodule, pyname)
+ definition_with_assignment = _join_lines(
+ [lines.get_line(n) for n in range(start, end + 1)])
+ if assignment.levels:
+ raise rope.base.exceptions.RefactoringError(
+ 'Cannot inline tuple assignments.')
+ definition = definition_with_assignment[definition_with_assignment.\
+ index('=') + 1:].strip()
+ return definition
+
+def _assigned_lineno(pymodule, pyname):
+ definition_line = pyname.assignments[0].ast_node.lineno
+ return pymodule.logical_lines.logical_line_in(definition_line)
+
+def _add_imports(pycore, source, resource, imports):
+ if not imports:
+ return source
+ pymodule = pycore.get_string_module(source, resource)
+ module_import = importutils.get_module_imports(pycore, pymodule)
+ for import_info in imports:
+ module_import.add_import(import_info)
+ source = module_import.get_changed_source()
+ pymodule = pycore.get_string_module(source, resource)
+ import_tools = importutils.ImportTools(pycore)
+ return import_tools.organize_imports(pymodule, unused=False, sort=False)
+
+def _get_pyname(pycore, resource, offset):
+ pymodule = pycore.resource_to_pyobject(resource)
+ pyname = evaluate.eval_location(pymodule, offset)
+ if isinstance(pyname, pynames.ImportedName):
+ pyname = pyname._get_imported_pyname()
+ return pyname
+
+def _remove_from(pycore, pyname, source, resource):
+ pymodule = pycore.get_string_module(source, resource)
+ module_import = importutils.get_module_imports(pycore, pymodule)
+ module_import.remove_pyname(pyname)
+ return module_import.get_changed_source()
diff --git a/.vim/bundle/python-mode/pylibs/rope/refactor/introduce_factory.py b/.vim/bundle/python-mode/pylibs/rope/refactor/introduce_factory.py
@@ -0,0 +1,133 @@
+import rope.base.exceptions
+import rope.base.pyobjects
+from rope.base import taskhandle, evaluate
+from rope.base.change import (ChangeSet, ChangeContents)
+from rope.refactor import rename, occurrences, sourceutils, importutils
+
+
+class IntroduceFactory(object):
+
+ def __init__(self, project, resource, offset):
+ self.pycore = project.pycore
+ self.offset = offset
+
+ this_pymodule = self.pycore.resource_to_pyobject(resource)
+ self.old_pyname = evaluate.eval_location(this_pymodule, offset)
+ if self.old_pyname is None or not isinstance(self.old_pyname.get_object(),
+ rope.base.pyobjects.PyClass):
+ raise rope.base.exceptions.RefactoringError(
+ 'Introduce factory should be performed on a class.')
+ self.old_name = self.old_pyname.get_object().get_name()
+ self.pymodule = self.old_pyname.get_object().get_module()
+ self.resource = self.pymodule.get_resource()
+
+ def get_changes(self, factory_name, global_factory=False, resources=None,
+ task_handle=taskhandle.NullTaskHandle()):
+ """Get the changes this refactoring makes
+
+ `factory_name` indicates the name of the factory function to
+ be added. If `global_factory` is `True` the factory will be
+ global otherwise a static method is added to the class.
+
+ `resources` can be a list of `rope.base.resource.File`\s that
+ this refactoring should be applied on; if `None` all python
+ files in the project are searched.
+
+ """
+ if resources is None:
+ resources = self.pycore.get_python_files()
+ changes = ChangeSet('Introduce factory method <%s>' % factory_name)
+ job_set = task_handle.create_jobset('Collecting Changes',
+ len(resources))
+ self._change_module(resources, changes, factory_name,
+ global_factory, job_set)
+ return changes
+
+ def get_name(self):
+ """Return the name of the class"""
+ return self.old_name
+
+ def _change_module(self, resources, changes,
+ factory_name, global_, job_set):
+ if global_:
+ replacement = '__rope_factory_%s_' % factory_name
+ else:
+ replacement = self._new_function_name(factory_name, global_)
+
+ for file_ in resources:
+ job_set.started_job(file_.path)
+ if file_ == self.resource:
+ self._change_resource(changes, factory_name, global_)
+ job_set.finished_job()
+ continue
+ changed_code = self._rename_occurrences(file_, replacement,
+ global_)
+ if changed_code is not None:
+ if global_:
+ new_pymodule = self.pycore.get_string_module(changed_code,
+ self.resource)
+ modname = self.pycore.modname(self.resource)
+ changed_code, imported = importutils.add_import(
+ self.pycore, new_pymodule, modname, factory_name)
+ changed_code = changed_code.replace(replacement, imported)
+ changes.add_change(ChangeContents(file_, changed_code))
+ job_set.finished_job()
+
+ def _change_resource(self, changes, factory_name, global_):
+ class_scope = self.old_pyname.get_object().get_scope()
+ source_code = self._rename_occurrences(
+ self.resource, self._new_function_name(factory_name,
+ global_), global_)
+ if source_code is None:
+ source_code = self.pymodule.source_code
+ else:
+ self.pymodule = self.pycore.get_string_module(
+ source_code, resource=self.resource)
+ lines = self.pymodule.lines
+ start = self._get_insertion_offset(class_scope, lines)
+ result = source_code[:start]
+ result += self._get_factory_method(lines, class_scope,
+ factory_name, global_)
+ result += source_code[start:]
+ changes.add_change(ChangeContents(self.resource, result))
+
+ def _get_insertion_offset(self, class_scope, lines):
+ start_line = class_scope.get_end()
+ if class_scope.get_scopes():
+ start_line = class_scope.get_scopes()[-1].get_end()
+ start = lines.get_line_end(start_line) + 1
+ return start
+
+ def _get_factory_method(self, lines, class_scope,
+ factory_name, global_):
+ unit_indents = ' ' * sourceutils.get_indent(self.pycore)
+ if global_:
+ if self._get_scope_indents(lines, class_scope) > 0:
+ raise rope.base.exceptions.RefactoringError(
+ 'Cannot make global factory method for nested classes.')
+ return ('\ndef %s(*args, **kwds):\n%sreturn %s(*args, **kwds)\n' %
+ (factory_name, unit_indents, self.old_name))
+ unindented_factory = \
+ ('@staticmethod\ndef %s(*args, **kwds):\n' % factory_name +
+ '%sreturn %s(*args, **kwds)\n' % (unit_indents, self.old_name))
+ indents = self._get_scope_indents(lines, class_scope) + \
+ sourceutils.get_indent(self.pycore)
+ return '\n' + sourceutils.indent_lines(unindented_factory, indents)
+
+ def _get_scope_indents(self, lines, scope):
+ return sourceutils.get_indents(lines, scope.get_start())
+
+ def _new_function_name(self, factory_name, global_):
+ if global_:
+ return factory_name
+ else:
+ return self.old_name + '.' + factory_name
+
+ def _rename_occurrences(self, file_, changed_name, global_factory):
+ finder = occurrences.create_finder(self.pycore, self.old_name,
+ self.old_pyname, only_calls=True)
+ result = rename.rename_in_module(finder, changed_name, resource=file_,
+ replace_primary=global_factory)
+ return result
+
+IntroduceFactoryRefactoring = IntroduceFactory
diff --git a/.vim/bundle/python-mode/pylibs/rope/refactor/introduce_parameter.py b/.vim/bundle/python-mode/pylibs/rope/refactor/introduce_parameter.py
@@ -0,0 +1,95 @@
+import rope.base.change
+from rope.base import exceptions, evaluate, worder, codeanalyze
+from rope.refactor import functionutils, sourceutils, occurrences
+
+
+class IntroduceParameter(object):
+ """Introduce parameter refactoring
+
+ This refactoring adds a new parameter to a function and replaces
+ references to an expression in it with the new parameter.
+
+ The parameter finding part is different from finding similar
+ pieces in extract refactorings. In this refactoring parameters
+ are found based on the object they reference to. For instance
+ in::
+
+ class A(object):
+ var = None
+
+ class B(object):
+ a = A()
+
+ b = B()
+ a = b.a
+
+ def f(a):
+ x = b.a.var + a.var
+
+ using this refactoring on ``a.var`` with ``p`` as the new
+ parameter name, will result in::
+
+ def f(p=a.var):
+ x = p + p
+
+ """
+
+ def __init__(self, project, resource, offset):
+ self.pycore = project.pycore
+ self.resource = resource
+ self.offset = offset
+ self.pymodule = self.pycore.resource_to_pyobject(self.resource)
+ scope = self.pymodule.get_scope().get_inner_scope_for_offset(offset)
+ if scope.get_kind() != 'Function':
+ raise exceptions.RefactoringError(
+ 'Introduce parameter should be performed inside functions')
+ self.pyfunction = scope.pyobject
+ self.name, self.pyname = self._get_name_and_pyname()
+ if self.pyname is None:
+ raise exceptions.RefactoringError(
+ 'Cannot find the definition of <%s>' % self.name)
+
+ def _get_primary(self):
+ word_finder = worder.Worder(self.resource.read())
+ return word_finder.get_primary_at(self.offset)
+
+ def _get_name_and_pyname(self):
+ return (worder.get_name_at(self.resource, self.offset),
+ evaluate.eval_location(self.pymodule, self.offset))
+
+ def get_changes(self, new_parameter):
+ definition_info = functionutils.DefinitionInfo.read(self.pyfunction)
+ definition_info.args_with_defaults.append((new_parameter,
+ self._get_primary()))
+ collector = codeanalyze.ChangeCollector(self.resource.read())
+ header_start, header_end = self._get_header_offsets()
+ body_start, body_end = sourceutils.get_body_region(self.pyfunction)
+ collector.add_change(header_start, header_end,
+ definition_info.to_string())
+ self._change_function_occurances(collector, body_start,
+ body_end, new_parameter)
+ changes = rope.base.change.ChangeSet('Introduce parameter <%s>' %
+ new_parameter)
+ change = rope.base.change.ChangeContents(self.resource,
+ collector.get_changed())
+ changes.add_change(change)
+ return changes
+
+ def _get_header_offsets(self):
+ lines = self.pymodule.lines
+ start_line = self.pyfunction.get_scope().get_start()
+ end_line = self.pymodule.logical_lines.\
+ logical_line_in(start_line)[1]
+ start = lines.get_line_start(start_line)
+ end = lines.get_line_end(end_line)
+ start = self.pymodule.source_code.find('def', start) + 4
+ end = self.pymodule.source_code.rfind(':', start, end)
+ return start, end
+
+ def _change_function_occurances(self, collector, function_start,
+ function_end, new_name):
+ finder = occurrences.create_finder(self.pycore, self.name, self.pyname)
+ for occurrence in finder.find_occurrences(resource=self.resource):
+ start, end = occurrence.get_primary_range()
+ if function_start <= start < function_end:
+ collector.add_change(start, end, new_name)
diff --git a/.vim/bundle/python-mode/pylibs/rope/refactor/localtofield.py b/.vim/bundle/python-mode/pylibs/rope/refactor/localtofield.py
@@ -0,0 +1,50 @@
+from rope.base import pynames, evaluate, exceptions, worder
+from rope.refactor.rename import Rename
+
+
+class LocalToField(object):
+
+ def __init__(self, project, resource, offset):
+ self.project = project
+ self.pycore = project.pycore
+ self.resource = resource
+ self.offset = offset
+
+ def get_changes(self):
+ name = worder.get_name_at(self.resource, self.offset)
+ this_pymodule = self.pycore.resource_to_pyobject(self.resource)
+ pyname = evaluate.eval_location(this_pymodule, self.offset)
+ if not self._is_a_method_local(pyname):
+ raise exceptions.RefactoringError(
+ 'Convert local variable to field should be performed on \n'
+ 'a local variable of a method.')
+
+ pymodule, lineno = pyname.get_definition_location()
+ function_scope = pymodule.get_scope().get_inner_scope_for_line(lineno)
+ # Not checking redefinition
+ #self._check_redefinition(name, function_scope)
+
+ new_name = self._get_field_name(function_scope.pyobject, name)
+ changes = Rename(self.project, self.resource, self.offset).\
+ get_changes(new_name, resources=[self.resource])
+ return changes
+
+ def _check_redefinition(self, name, function_scope):
+ class_scope = function_scope.parent
+ if name in class_scope.pyobject:
+ raise exceptions.RefactoringError(
+ 'The field %s already exists' % name)
+
+ def _get_field_name(self, pyfunction, name):
+ self_name = pyfunction.get_param_names()[0]
+ new_name = self_name + '.' + name
+ return new_name
+
+ def _is_a_method_local(self, pyname):
+ pymodule, lineno = pyname.get_definition_location()
+ holding_scope = pymodule.get_scope().get_inner_scope_for_line(lineno)
+ parent = holding_scope.parent
+ return isinstance(pyname, pynames.AssignedName) and \
+ pyname in holding_scope.get_names().values() and \
+ holding_scope.get_kind() == 'Function' and \
+ parent is not None and parent.get_kind() == 'Class'
diff --git a/.vim/bundle/python-mode/pylibs/rope/refactor/method_object.py b/.vim/bundle/python-mode/pylibs/rope/refactor/method_object.py
@@ -0,0 +1,87 @@
+import warnings
+
+from rope.base import pyobjects, exceptions, change, evaluate, codeanalyze
+from rope.refactor import sourceutils, occurrences, rename
+
+
+class MethodObject(object):
+
+ def __init__(self, project, resource, offset):
+ self.pycore = project.pycore
+ this_pymodule = self.pycore.resource_to_pyobject(resource)
+ pyname = evaluate.eval_location(this_pymodule, offset)
+ if pyname is None or not isinstance(pyname.get_object(),
+ pyobjects.PyFunction):
+ raise exceptions.RefactoringError(
+ 'Replace method with method object refactoring should be '
+ 'performed on a function.')
+ self.pyfunction = pyname.get_object()
+ self.pymodule = self.pyfunction.get_module()
+ self.resource = self.pymodule.get_resource()
+
+ def get_new_class(self, name):
+ body = sourceutils.fix_indentation(
+ self._get_body(), sourceutils.get_indent(self.pycore) * 2)
+ return 'class %s(object):\n\n%s%sdef __call__(self):\n%s' % \
+ (name, self._get_init(),
+ ' ' * sourceutils.get_indent(self.pycore), body)
+
+ def get_changes(self, classname=None, new_class_name=None):
+ if new_class_name is not None:
+ warnings.warn(
+ 'new_class_name parameter is deprecated; use classname',
+ DeprecationWarning, stacklevel=2)
+ classname = new_class_name
+ collector = codeanalyze.ChangeCollector(self.pymodule.source_code)
+ start, end = sourceutils.get_body_region(self.pyfunction)
+ indents = sourceutils.get_indents(
+ self.pymodule.lines, self.pyfunction.get_scope().get_start()) + \
+ sourceutils.get_indent(self.pycore)
+ new_contents = ' ' * indents + 'return %s(%s)()\n' % \
+ (classname, ', '.join(self._get_parameter_names()))
+ collector.add_change(start, end, new_contents)
+ insertion = self._get_class_insertion_point()
+ collector.add_change(insertion, insertion,
+ '\n\n' + self.get_new_class(classname))
+ changes = change.ChangeSet('Replace method with method object refactoring')
+ changes.add_change(change.ChangeContents(self.resource,
+ collector.get_changed()))
+ return changes
+
+ def _get_class_insertion_point(self):
+ current = self.pyfunction
+ while current.parent != self.pymodule:
+ current = current.parent
+ end = self.pymodule.lines.get_line_end(current.get_scope().get_end())
+ return min(end + 1, len(self.pymodule.source_code))
+
+ def _get_body(self):
+ body = sourceutils.get_body(self.pyfunction)
+ for param in self._get_parameter_names():
+ body = param + ' = None\n' + body
+ pymod = self.pycore.get_string_module(body, self.resource)
+ pyname = pymod[param]
+ finder = occurrences.create_finder(self.pycore, param, pyname)
+ result = rename.rename_in_module(finder, 'self.' + param,
+ pymodule=pymod)
+ body = result[result.index('\n') + 1:]
+ return body
+
+ def _get_init(self):
+ params = self._get_parameter_names()
+ indents = ' ' * sourceutils.get_indent(self.pycore)
+ if not params:
+ return ''
+ header = indents + 'def __init__(self'
+ body = ''
+ for arg in params:
+ new_name = arg
+ if arg == 'self':
+ new_name = 'host'
+ header += ', %s' % new_name
+ body += indents * 2 + 'self.%s = %s\n' % (arg, new_name)
+ header += '):'
+ return '%s\n%s\n' % (header, body)
+
+ def _get_parameter_names(self):
+ return self.pyfunction.get_param_names()
diff --git a/.vim/bundle/python-mode/pylibs/rope/refactor/move.py b/.vim/bundle/python-mode/pylibs/rope/refactor/move.py
@@ -0,0 +1,629 @@
+"""A module containing classes for move refactoring
+
+`create_move()` is a factory for creating move refactoring objects
+based on inputs.
+
+"""
+from rope.base import pyobjects, codeanalyze, exceptions, pynames, taskhandle, evaluate, worder
+from rope.base.change import ChangeSet, ChangeContents, MoveResource
+from rope.refactor import importutils, rename, occurrences, sourceutils, functionutils
+
+
+def create_move(project, resource, offset=None):
+ """A factory for creating Move objects
+
+ Based on `resource` and `offset`, return one of `MoveModule`,
+ `MoveGlobal` or `MoveMethod` for performing move refactoring.
+
+ """
+ if offset is None:
+ return MoveModule(project, resource)
+ this_pymodule = project.pycore.resource_to_pyobject(resource)
+ pyname = evaluate.eval_location(this_pymodule, offset)
+ if pyname is None:
+ raise exceptions.RefactoringError(
+ 'Move only works on classes, functions, modules and methods.')
+ pyobject = pyname.get_object()
+ if isinstance(pyobject, pyobjects.PyModule) or \
+ isinstance(pyobject, pyobjects.PyPackage):
+ return MoveModule(project, pyobject.get_resource())
+ if isinstance(pyobject, pyobjects.PyFunction) and \
+ isinstance(pyobject.parent, pyobjects.PyClass):
+ return MoveMethod(project, resource, offset)
+ if isinstance(pyobject, pyobjects.PyDefinedObject) and \
+ isinstance(pyobject.parent, pyobjects.PyModule):
+ return MoveGlobal(project, resource, offset)
+ raise exceptions.RefactoringError(
+ 'Move only works on global classes/functions, modules and methods.')
+
+
+class MoveMethod(object):
+ """For moving methods
+
+ It makes a new method in the destination class and changes
+ the body of the old method to call the new method. You can
+ inline the old method to change all of its occurrences.
+
+ """
+
+ def __init__(self, project, resource, offset):
+ self.project = project
+ self.pycore = project.pycore
+ this_pymodule = self.pycore.resource_to_pyobject(resource)
+ pyname = evaluate.eval_location(this_pymodule, offset)
+ self.method_name = worder.get_name_at(resource, offset)
+ self.pyfunction = pyname.get_object()
+ if self.pyfunction.get_kind() != 'method':
+ raise exceptions.RefactoringError('Only normal methods'
+ ' can be moved.')
+
+ def get_changes(self, dest_attr, new_name=None, resources=None,
+ task_handle=taskhandle.NullTaskHandle()):
+ """Return the changes needed for this refactoring
+
+ Parameters:
+
+ - `dest_attr`: the name of the destination attribute
+ - `new_name`: the name of the new method; if `None` uses
+ the old name
+ - `resources` can be a list of `rope.base.resources.File`\s to
+ apply this refactoring on. If `None`, the restructuring
+ will be applied to all python files.
+
+ """
+ changes = ChangeSet('Moving method <%s>' % self.method_name)
+ if resources is None:
+ resources = self.pycore.get_python_files()
+ if new_name is None:
+ new_name = self.get_method_name()
+ resource1, start1, end1, new_content1 = \
+ self._get_changes_made_by_old_class(dest_attr, new_name)
+ collector1 = codeanalyze.ChangeCollector(resource1.read())
+ collector1.add_change(start1, end1, new_content1)
+
+ resource2, start2, end2, new_content2 = \
+ self._get_changes_made_by_new_class(dest_attr, new_name)
+ if resource1 == resource2:
+ collector1.add_change(start2, end2, new_content2)
+ else:
+ collector2 = codeanalyze.ChangeCollector(resource2.read())
+ collector2.add_change(start2, end2, new_content2)
+ result = collector2.get_changed()
+ import_tools = importutils.ImportTools(self.pycore)
+ new_imports = self._get_used_imports(import_tools)
+ if new_imports:
+ goal_pymodule = self.pycore.get_string_module(result,
+ resource2)
+ result = _add_imports_to_module(
+ import_tools, goal_pymodule, new_imports)
+ if resource2 in resources:
+ changes.add_change(ChangeContents(resource2, result))
+
+ if resource1 in resources:
+ changes.add_change(ChangeContents(resource1,
+ collector1.get_changed()))
+ return changes
+
+ def get_method_name(self):
+ return self.method_name
+
+ def _get_used_imports(self, import_tools):
+ return importutils.get_imports(self.pycore, self.pyfunction)
+
+ def _get_changes_made_by_old_class(self, dest_attr, new_name):
+ pymodule = self.pyfunction.get_module()
+ indents = self._get_scope_indents(self.pyfunction)
+ body = 'return self.%s.%s(%s)\n' % (dest_attr, new_name,
+ self._get_passed_arguments_string())
+ region = sourceutils.get_body_region(self.pyfunction)
+ return (pymodule.get_resource(), region[0], region[1],
+ sourceutils.fix_indentation(body, indents))
+
+ def _get_scope_indents(self, pyobject):
+ pymodule = pyobject.get_module()
+ return sourceutils.get_indents(
+ pymodule.lines, pyobject.get_scope().get_start()) + \
+ sourceutils.get_indent(self.pycore)
+
+ def _get_changes_made_by_new_class(self, dest_attr, new_name):
+ old_pyclass = self.pyfunction.parent
+ if dest_attr not in old_pyclass:
+ raise exceptions.RefactoringError(
+ 'Destination attribute <%s> not found' % dest_attr)
+ pyclass = old_pyclass[dest_attr].get_object().get_type()
+ if not isinstance(pyclass, pyobjects.PyClass):
+ raise exceptions.RefactoringError(
+ 'Unknown class type for attribute <%s>' % dest_attr)
+ pymodule = pyclass.get_module()
+ resource = pyclass.get_module().get_resource()
+ start, end = sourceutils.get_body_region(pyclass)
+ pre_blanks = '\n'
+ if pymodule.source_code[start:end].strip() != 'pass':
+ pre_blanks = '\n\n'
+ start = end
+ indents = self._get_scope_indents(pyclass)
+ body = pre_blanks + sourceutils.fix_indentation(
+ self.get_new_method(new_name), indents)
+ return resource, start, end, body
+
+ def get_new_method(self, name):
+ return '%s\n%s' % (
+ self._get_new_header(name),
+ sourceutils.fix_indentation(self._get_body(),
+ sourceutils.get_indent(self.pycore)))
+
+ def _get_unchanged_body(self):
+ return sourceutils.get_body(self.pyfunction)
+
+ def _get_body(self, host='host'):
+ self_name = self._get_self_name()
+ body = self_name + ' = None\n' + self._get_unchanged_body()
+ pymodule = self.pycore.get_string_module(body)
+ finder = occurrences.create_finder(
+ self.pycore, self_name, pymodule[self_name])
+ result = rename.rename_in_module(finder, host, pymodule=pymodule)
+ if result is None:
+ result = body
+ return result[result.index('\n') + 1:]
+
+ def _get_self_name(self):
+ return self.pyfunction.get_param_names()[0]
+
+ def _get_new_header(self, name):
+ header = 'def %s(self' % name
+ if self._is_host_used():
+ header += ', host'
+ definition_info = functionutils.DefinitionInfo.read(self.pyfunction)
+ others = definition_info.arguments_to_string(1)
+ if others:
+ header += ', ' + others
+ return header + '):'
+
+ def _get_passed_arguments_string(self):
+ result = ''
+ if self._is_host_used():
+ result = 'self'
+ definition_info = functionutils.DefinitionInfo.read(self.pyfunction)
+ others = definition_info.arguments_to_string(1)
+ if others:
+ if result:
+ result += ', '
+ result += others
+ return result
+
+ def _is_host_used(self):
+ return self._get_body('__old_self') != self._get_unchanged_body()
+
+
+class MoveGlobal(object):
+ """For moving global function and classes"""
+
+ def __init__(self, project, resource, offset):
+ self.pycore = project.pycore
+ this_pymodule = self.pycore.resource_to_pyobject(resource)
+ self.old_pyname = evaluate.eval_location(this_pymodule, offset)
+ self.old_name = self.old_pyname.get_object().get_name()
+ pymodule = self.old_pyname.get_object().get_module()
+ self.source = pymodule.get_resource()
+ self.tools = _MoveTools(self.pycore, self.source,
+ self.old_pyname, self.old_name)
+ self.import_tools = self.tools.import_tools
+ self._check_exceptional_conditions()
+
+ def _check_exceptional_conditions(self):
+ if self.old_pyname is None or \
+ not isinstance(self.old_pyname.get_object(), pyobjects.PyDefinedObject):
+ raise exceptions.RefactoringError(
+ 'Move refactoring should be performed on a class/function.')
+ moving_pyobject = self.old_pyname.get_object()
+ if not self._is_global(moving_pyobject):
+ raise exceptions.RefactoringError(
+ 'Move refactoring should be performed on a global class/function.')
+
+ def _is_global(self, pyobject):
+ return pyobject.get_scope().parent == pyobject.get_module().get_scope()
+
+ def get_changes(self, dest, resources=None,
+ task_handle=taskhandle.NullTaskHandle()):
+ if resources is None:
+ resources = self.pycore.get_python_files()
+ if dest is None or not dest.exists():
+ raise exceptions.RefactoringError(
+ 'Move destination does not exist.')
+ if dest.is_folder() and dest.has_child('__init__.py'):
+ dest = dest.get_child('__init__.py')
+ if dest.is_folder():
+ raise exceptions.RefactoringError(
+ 'Move destination for non-modules should not be folders.')
+ if self.source == dest:
+ raise exceptions.RefactoringError(
+ 'Moving global elements to the same module.')
+ return self._calculate_changes(dest, resources, task_handle)
+
+ def _calculate_changes(self, dest, resources, task_handle):
+ changes = ChangeSet('Moving global <%s>' % self.old_name)
+ job_set = task_handle.create_jobset('Collecting Changes',
+ len(resources))
+ for file_ in resources:
+ job_set.started_job(file_.path)
+ if file_ == self.source:
+ changes.add_change(self._source_module_changes(dest))
+ elif file_ == dest:
+ changes.add_change(self._dest_module_changes(dest))
+ elif self.tools.occurs_in_module(resource=file_):
+ pymodule = self.pycore.resource_to_pyobject(file_)
+ # Changing occurrences
+ placeholder = '__rope_renaming_%s_' % self.old_name
+ source = self.tools.rename_in_module(placeholder,
+ resource=file_)
+ should_import = source is not None
+ # Removing out of date imports
+ pymodule = self.tools.new_pymodule(pymodule, source)
+ source = self.tools.remove_old_imports(pymodule)
+ # Adding new import
+ if should_import:
+ pymodule = self.tools.new_pymodule(pymodule, source)
+ source, imported = importutils.add_import(
+ self.pycore, pymodule, self._new_modname(dest), self.old_name)
+ source = source.replace(placeholder, imported)
+ source = self.tools.new_source(pymodule, source)
+ if source != file_.read():
+ changes.add_change(ChangeContents(file_, source))
+ job_set.finished_job()
+ return changes
+
+ def _source_module_changes(self, dest):
+ placeholder = '__rope_moving_%s_' % self.old_name
+ handle = _ChangeMoveOccurrencesHandle(placeholder)
+ occurrence_finder = occurrences.create_finder(
+ self.pycore, self.old_name, self.old_pyname)
+ start, end = self._get_moving_region()
+ renamer = ModuleSkipRenamer(occurrence_finder, self.source,
+ handle, start, end)
+ source = renamer.get_changed_module()
+ if handle.occurred:
+ pymodule = self.pycore.get_string_module(source, self.source)
+ # Adding new import
+ source, imported = importutils.add_import(
+ self.pycore, pymodule, self._new_modname(dest), self.old_name)
+ source = source.replace(placeholder, imported)
+ return ChangeContents(self.source, source)
+
+ def _new_modname(self, dest):
+ return self.pycore.modname(dest)
+
+ def _dest_module_changes(self, dest):
+ # Changing occurrences
+ pymodule = self.pycore.resource_to_pyobject(dest)
+ source = self.tools.rename_in_module(self.old_name, pymodule)
+ pymodule = self.tools.new_pymodule(pymodule, source)
+
+ moving, imports = self._get_moving_element_with_imports()
+ source = self.tools.remove_old_imports(pymodule)
+ pymodule = self.tools.new_pymodule(pymodule, source)
+ pymodule, has_changed = self._add_imports2(pymodule, imports)
+
+ module_with_imports = self.import_tools.module_imports(pymodule)
+ source = pymodule.source_code
+ lineno = 0
+ if module_with_imports.imports:
+ lineno = module_with_imports.imports[-1].end_line - 1
+ else:
+ while lineno < pymodule.lines.length() and \
+ pymodule.lines.get_line(lineno + 1).lstrip().startswith('#'):
+ lineno += 1
+ if lineno > 0:
+ cut = pymodule.lines.get_line_end(lineno) + 1
+ result = source[:cut] + '\n\n' + moving + source[cut:]
+ else:
+ result = moving + source
+
+ # Organizing imports
+ source = result
+ pymodule = self.pycore.get_string_module(source, dest)
+ source = self.import_tools.organize_imports(pymodule, sort=False,
+ unused=False)
+ return ChangeContents(dest, source)
+
+ def _get_moving_element_with_imports(self):
+ return moving_code_with_imports(
+ self.pycore, self.source, self._get_moving_element())
+
+ def _get_module_with_imports(self, source_code, resource):
+ pymodule = self.pycore.get_string_module(source_code, resource)
+ return self.import_tools.module_imports(pymodule)
+
+ def _get_moving_element(self):
+ start, end = self._get_moving_region()
+ moving = self.source.read()[start:end]
+ return moving.rstrip() + '\n'
+
+ def _get_moving_region(self):
+ pymodule = self.pycore.resource_to_pyobject(self.source)
+ lines = pymodule.lines
+ scope = self.old_pyname.get_object().get_scope()
+ start = lines.get_line_start(scope.get_start())
+ end_line = scope.get_end()
+ while end_line < lines.length() and \
+ lines.get_line(end_line + 1).strip() == '':
+ end_line += 1
+ end = min(lines.get_line_end(end_line) + 1, len(pymodule.source_code))
+ return start, end
+
+ def _add_imports2(self, pymodule, new_imports):
+ source = self.tools.add_imports(pymodule, new_imports)
+ if source is None:
+ return pymodule, False
+ else:
+ resource = pymodule.get_resource()
+ pymodule = self.pycore.get_string_module(source, resource)
+ return pymodule, True
+
+
+class MoveModule(object):
+ """For moving modules and packages"""
+
+ def __init__(self, project, resource):
+ self.project = project
+ self.pycore = project.pycore
+ if not resource.is_folder() and resource.name == '__init__.py':
+ resource = resource.parent
+ if resource.is_folder() and not resource.has_child('__init__.py'):
+ raise exceptions.RefactoringError(
+ 'Cannot move non-package folder.')
+ dummy_pymodule = self.pycore.get_string_module('')
+ self.old_pyname = pynames.ImportedModule(dummy_pymodule,
+ resource=resource)
+ self.source = self.old_pyname.get_object().get_resource()
+ if self.source.is_folder():
+ self.old_name = self.source.name
+ else:
+ self.old_name = self.source.name[:-3]
+ self.tools = _MoveTools(self.pycore, self.source,
+ self.old_pyname, self.old_name)
+ self.import_tools = self.tools.import_tools
+
+ def get_changes(self, dest, resources=None,
+ task_handle=taskhandle.NullTaskHandle()):
+ moving_pyobject = self.old_pyname.get_object()
+ if resources is None:
+ resources = self.pycore.get_python_files()
+ if dest is None or not dest.is_folder():
+ raise exceptions.RefactoringError(
+ 'Move destination for modules should be packages.')
+ return self._calculate_changes(dest, resources, task_handle)
+
+ def _calculate_changes(self, dest, resources, task_handle):
+ changes = ChangeSet('Moving module <%s>' % self.old_name)
+ job_set = task_handle.create_jobset('Collecting changes',
+ len(resources))
+ for module in resources:
+ job_set.started_job(module.path)
+ if module == self.source:
+ self._change_moving_module(changes, dest)
+ else:
+ source = self._change_occurrences_in_module(dest,
+ resource=module)
+ if source is not None:
+ changes.add_change(ChangeContents(module, source))
+ job_set.finished_job()
+ if self.project == self.source.project:
+ changes.add_change(MoveResource(self.source, dest.path))
+ return changes
+
+ def _new_modname(self, dest):
+ destname = self.pycore.modname(dest)
+ if destname:
+ return destname + '.' + self.old_name
+ return self.old_name
+
+ def _new_import(self, dest):
+ return importutils.NormalImport([(self._new_modname(dest), None)])
+
+ def _change_moving_module(self, changes, dest):
+ if not self.source.is_folder():
+ pymodule = self.pycore.resource_to_pyobject(self.source)
+ source = self.import_tools.relatives_to_absolutes(pymodule)
+ pymodule = self.tools.new_pymodule(pymodule, source)
+ source = self._change_occurrences_in_module(dest, pymodule)
+ source = self.tools.new_source(pymodule, source)
+ if source != self.source.read():
+ changes.add_change(ChangeContents(self.source, source))
+
+ def _change_occurrences_in_module(self, dest, pymodule=None,
+ resource=None):
+ if not self.tools.occurs_in_module(pymodule=pymodule,
+ resource=resource):
+ return
+ if pymodule is None:
+ pymodule = self.pycore.resource_to_pyobject(resource)
+ new_name = self._new_modname(dest)
+ new_import = self._new_import(dest)
+ source = self.tools.rename_in_module(
+ new_name, imports=True, pymodule=pymodule, resource=resource)
+ should_import = self.tools.occurs_in_module(
+ pymodule=pymodule, resource=resource, imports=False)
+ pymodule = self.tools.new_pymodule(pymodule, source)
+ source = self.tools.remove_old_imports(pymodule)
+ if should_import:
+ pymodule = self.tools.new_pymodule(pymodule, source)
+ source = self.tools.add_imports(pymodule, [new_import])
+ source = self.tools.new_source(pymodule, source)
+ if source != pymodule.resource.read():
+ return source
+
+
+class _ChangeMoveOccurrencesHandle(object):
+
+ def __init__(self, new_name):
+ self.new_name = new_name
+ self.occurred = False
+
+ def occurred_inside_skip(self, change_collector, occurrence):
+ pass
+
+ def occurred_outside_skip(self, change_collector, occurrence):
+ start, end = occurrence.get_primary_range()
+ change_collector.add_change(start, end, self.new_name)
+ self.occurred = True
+
+
+class _MoveTools(object):
+
+ def __init__(self, pycore, source, pyname, old_name):
+ self.pycore = pycore
+ self.source = source
+ self.old_pyname = pyname
+ self.old_name = old_name
+ self.import_tools = importutils.ImportTools(self.pycore)
+
+ def remove_old_imports(self, pymodule):
+ old_source = pymodule.source_code
+ module_with_imports = self.import_tools.module_imports(pymodule)
+ class CanSelect(object):
+ changed = False
+ old_name = self.old_name
+ old_pyname = self.old_pyname
+ def __call__(self, name):
+ try:
+ if name == self.old_name and \
+ pymodule[name].get_object() == \
+ self.old_pyname.get_object():
+ self.changed = True
+ return False
+ except exceptions.AttributeNotFoundError:
+ pass
+ return True
+ can_select = CanSelect()
+ module_with_imports.filter_names(can_select)
+ new_source = module_with_imports.get_changed_source()
+ if old_source != new_source:
+ return new_source
+
+ def rename_in_module(self, new_name, pymodule=None,
+ imports=False, resource=None):
+ occurrence_finder = self._create_finder(imports)
+ source = rename.rename_in_module(
+ occurrence_finder, new_name, replace_primary=True,
+ pymodule=pymodule, resource=resource)
+ return source
+
+ def occurs_in_module(self, pymodule=None, resource=None, imports=True):
+ finder = self._create_finder(imports)
+ for occurrence in finder.find_occurrences(pymodule=pymodule,
+ resource=resource):
+ return True
+ return False
+
+ def _create_finder(self, imports):
+ return occurrences.create_finder(self.pycore, self.old_name,
+ self.old_pyname, imports=imports)
+
+ def new_pymodule(self, pymodule, source):
+ if source is not None:
+ return self.pycore.get_string_module(
+ source, pymodule.get_resource())
+ return pymodule
+
+ def new_source(self, pymodule, source):
+ if source is None:
+ return pymodule.source_code
+ return source
+
+ def add_imports(self, pymodule, new_imports):
+ return _add_imports_to_module(self.import_tools, pymodule, new_imports)
+
+
+def _add_imports_to_module(import_tools, pymodule, new_imports):
+ module_with_imports = import_tools.module_imports(pymodule)
+ for new_import in new_imports:
+ module_with_imports.add_import(new_import)
+ return module_with_imports.get_changed_source()
+
+
+def moving_code_with_imports(pycore, resource, source):
+ import_tools = importutils.ImportTools(pycore)
+ pymodule = pycore.get_string_module(source, resource)
+ origin = pycore.resource_to_pyobject(resource)
+
+ imports = []
+ for stmt in import_tools.module_imports(origin).imports:
+ imports.append(stmt.import_info)
+
+ back_names = []
+ for name in origin:
+ if name not in pymodule:
+ back_names.append(name)
+ imports.append(import_tools.get_from_import(resource, back_names))
+
+ source = _add_imports_to_module(import_tools, pymodule, imports)
+ pymodule = pycore.get_string_module(source, resource)
+
+ source = import_tools.relatives_to_absolutes(pymodule)
+ pymodule = pycore.get_string_module(source, resource)
+ source = import_tools.organize_imports(pymodule, selfs=False)
+ pymodule = pycore.get_string_module(source, resource)
+
+ # extracting imports after changes
+ module_imports = import_tools.module_imports(pymodule)
+ imports = [import_stmt.import_info
+ for import_stmt in module_imports.imports]
+ start = 1
+ if module_imports.imports:
+ start = module_imports.imports[-1].end_line
+ lines = codeanalyze.SourceLinesAdapter(source)
+ while start < lines.length() and not lines.get_line(start).strip():
+ start += 1
+ moving = source[lines.get_line_start(start):]
+ return moving, imports
+
+
+class ModuleSkipRenamerHandle(object):
+
+ def occurred_outside_skip(self, change_collector, occurrence):
+ pass
+
+ def occurred_inside_skip(self, change_collector, occurrence):
+ pass
+
+
+class ModuleSkipRenamer(object):
+ """Rename occurrences in a module
+
+ This class can be used when you want to treat a region in a file
+ separately from other parts when renaming.
+
+ """
+
+ def __init__(self, occurrence_finder, resource, handle=None,
+ skip_start=0, skip_end=0, replacement=''):
+ """Constructor
+
+ if replacement is `None` the region is not changed. Otherwise
+ it is replaced with `replacement`.
+
+ """
+ self.occurrence_finder = occurrence_finder
+ self.resource = resource
+ self.skip_start = skip_start
+ self.skip_end = skip_end
+ self.replacement = replacement
+ self.handle = handle
+ if self.handle is None:
+ self.handle = ModuleSkipHandle()
+
+ def get_changed_module(self):
+ source = self.resource.read()
+ change_collector = codeanalyze.ChangeCollector(source)
+ if self.replacement is not None:
+ change_collector.add_change(self.skip_start, self.skip_end,
+ self.replacement)
+ for occurrence in self.occurrence_finder.find_occurrences(self.resource):
+ start, end = occurrence.get_primary_range()
+ if self.skip_start <= start < self.skip_end:
+ self.handle.occurred_inside_skip(change_collector, occurrence)
+ else:
+ self.handle.occurred_outside_skip(change_collector, occurrence)
+ result = change_collector.get_changed()
+ if result is not None and result != source:
+ return result
diff --git a/.vim/bundle/python-mode/pylibs/rope/refactor/multiproject.py b/.vim/bundle/python-mode/pylibs/rope/refactor/multiproject.py
@@ -0,0 +1,78 @@
+"""This module can be used for performing cross-project refactorings
+
+See the "cross-project refactorings" section of ``docs/library.txt``
+file.
+
+"""
+
+from rope.base import resources, project, libutils
+
+
+class MultiProjectRefactoring(object):
+
+ def __init__(self, refactoring, projects, addpath=True):
+ """Create a multiproject proxy for the main refactoring
+
+ `projects` are other project.
+
+ """
+ self.refactoring = refactoring
+ self.projects = projects
+ self.addpath = addpath
+
+ def __call__(self, project, *args, **kwds):
+ """Create the refactoring"""
+ return _MultiRefactoring(self.refactoring, self.projects,
+ self.addpath, project, *args, **kwds)
+
+
+class _MultiRefactoring(object):
+
+ def __init__(self, refactoring, other_projects, addpath,
+ project, *args, **kwds):
+ self.refactoring = refactoring
+ self.projects = [project] + other_projects
+ for other_project in other_projects:
+ for folder in self.project.pycore.get_source_folders():
+ other_project.get_prefs().add('python_path', folder.real_path)
+ self.refactorings = []
+ for other in self.projects:
+ args, kwds = self._resources_for_args(other, args, kwds)
+ self.refactorings.append(
+ self.refactoring(other, *args, **kwds))
+
+ def get_all_changes(self, *args, **kwds):
+ """Get a project to changes dict"""
+ result = []
+ for project, refactoring in zip(self.projects, self.refactorings):
+ args, kwds = self._resources_for_args(project, args, kwds)
+ result.append((project, refactoring.get_changes(*args, **kwds)))
+ return result
+
+ def __getattr__(self, name):
+ return getattr(self.main_refactoring, name)
+
+ def _resources_for_args(self, project, args, kwds):
+ newargs = [self._change_project_resource(project, arg) for arg in args]
+ newkwds = dict((name, self._change_project_resource(project, value))
+ for name, value in kwds.items())
+ return newargs, newkwds
+
+ def _change_project_resource(self, project, obj):
+ if isinstance(obj, resources.Resource) and \
+ obj.project != project:
+ return libutils.path_to_resource(project, obj.real_path)
+ return obj
+
+ @property
+ def project(self):
+ return self.projects[0]
+
+ @property
+ def main_refactoring(self):
+ return self.refactorings[0]
+
+
+def perform(project_changes):
+ for project, changes in project_changes:
+ project.do(changes)
diff --git a/.vim/bundle/python-mode/pylibs/rope/refactor/occurrences.py b/.vim/bundle/python-mode/pylibs/rope/refactor/occurrences.py
@@ -0,0 +1,334 @@
+import re
+
+import rope.base.pynames
+from rope.base import pynames, pyobjects, codeanalyze, evaluate, exceptions, utils, worder
+
+
+class Finder(object):
+ """For finding occurrences of a name
+
+ The constructor takes a `filters` argument. It should be a list
+ of functions that take a single argument. For each possible
+ occurrence, these functions are called in order with the an
+ instance of `Occurrence`:
+
+ * If it returns `None` other filters are tried.
+ * If it returns `True`, the occurrence will be a match.
+ * If it returns `False`, the occurrence will be skipped.
+ * If all of the filters return `None`, it is skipped also.
+
+ """
+
+ def __init__(self, pycore, name, filters=[lambda o: True], docs=False):
+ self.pycore = pycore
+ self.name = name
+ self.docs = docs
+ self.filters = filters
+ self._textual_finder = _TextualFinder(name, docs=docs)
+
+ def find_occurrences(self, resource=None, pymodule=None):
+ """Generate `Occurrence` instances"""
+ tools = _OccurrenceToolsCreator(self.pycore, resource=resource,
+ pymodule=pymodule, docs=self.docs)
+ for offset in self._textual_finder.find_offsets(tools.source_code):
+ occurrence = Occurrence(tools, offset)
+ for filter in self.filters:
+ result = filter(occurrence)
+ if result is None:
+ continue
+ if result:
+ yield occurrence
+ break
+
+
+def create_finder(pycore, name, pyname, only_calls=False, imports=True,
+ unsure=None, docs=False, instance=None, in_hierarchy=False):
+ """A factory for `Finder`
+
+ Based on the arguments it creates a list of filters. `instance`
+ argument is needed only when you want implicit interfaces to be
+ considered.
+
+ """
+ pynames = set([pyname])
+ filters = []
+ if only_calls:
+ filters.append(CallsFilter())
+ if not imports:
+ filters.append(NoImportsFilter())
+ if isinstance(instance, rope.base.pynames.ParameterName):
+ for pyobject in instance.get_objects():
+ try:
+ pynames.add(pyobject[name])
+ except exceptions.AttributeNotFoundError:
+ pass
+ for pyname in pynames:
+ filters.append(PyNameFilter(pyname))
+ if in_hierarchy:
+ filters.append(InHierarchyFilter(pyname))
+ if unsure:
+ filters.append(UnsureFilter(unsure))
+ return Finder(pycore, name, filters=filters, docs=docs)
+
+
+class Occurrence(object):
+
+ def __init__(self, tools, offset):
+ self.tools = tools
+ self.offset = offset
+ self.resource = tools.resource
+
+ @utils.saveit
+ def get_word_range(self):
+ return self.tools.word_finder.get_word_range(self.offset)
+
+ @utils.saveit
+ def get_primary_range(self):
+ return self.tools.word_finder.get_primary_range(self.offset)
+
+ @utils.saveit
+ def get_pyname(self):
+ try:
+ return self.tools.name_finder.get_pyname_at(self.offset)
+ except exceptions.BadIdentifierError:
+ pass
+
+ @utils.saveit
+ def get_primary_and_pyname(self):
+ try:
+ return self.tools.name_finder.get_primary_and_pyname_at(self.offset)
+ except exceptions.BadIdentifierError:
+ pass
+
+ @utils.saveit
+ def is_in_import_statement(self):
+ return (self.tools.word_finder.is_from_statement(self.offset) or
+ self.tools.word_finder.is_import_statement(self.offset))
+
+ def is_called(self):
+ return self.tools.word_finder.is_a_function_being_called(self.offset)
+
+ def is_defined(self):
+ return self.tools.word_finder.is_a_class_or_function_name_in_header(self.offset)
+
+ def is_a_fixed_primary(self):
+ return self.tools.word_finder.is_a_class_or_function_name_in_header(self.offset) or \
+ self.tools.word_finder.is_a_name_after_from_import(self.offset)
+
+ def is_written(self):
+ return self.tools.word_finder.is_assigned_here(self.offset)
+
+ def is_unsure(self):
+ return unsure_pyname(self.get_pyname())
+
+ @property
+ @utils.saveit
+ def lineno(self):
+ offset = self.get_word_range()[0]
+ return self.tools.pymodule.lines.get_line_number(offset)
+
+
+def same_pyname(expected, pyname):
+ """Check whether `expected` and `pyname` are the same"""
+ if expected is None or pyname is None:
+ return False
+ if expected == pyname:
+ return True
+ if type(expected) not in (pynames.ImportedModule, pynames.ImportedName) and \
+ type(pyname) not in (pynames.ImportedModule, pynames.ImportedName):
+ return False
+ return expected.get_definition_location() == pyname.get_definition_location() and \
+ expected.get_object() == pyname.get_object()
+
+def unsure_pyname(pyname, unbound=True):
+ """Return `True` if we don't know what this name references"""
+ if pyname is None:
+ return True
+ if unbound and not isinstance(pyname, pynames.UnboundName):
+ return False
+ if pyname.get_object() == pyobjects.get_unknown():
+ return True
+
+
+class PyNameFilter(object):
+ """For finding occurrences of a name"""
+
+ def __init__(self, pyname):
+ self.pyname = pyname
+
+ def __call__(self, occurrence):
+ if same_pyname(self.pyname, occurrence.get_pyname()):
+ return True
+
+
+class InHierarchyFilter(object):
+ """For finding occurrences of a name"""
+
+ def __init__(self, pyname, implementations_only=False):
+ self.pyname = pyname
+ self.impl_only = implementations_only
+ self.pyclass = self._get_containing_class(pyname)
+ if self.pyclass is not None:
+ self.name = pyname.get_object().get_name()
+ self.roots = self._get_root_classes(self.pyclass, self.name)
+ else:
+ self.roots = None
+
+ def __call__(self, occurrence):
+ if self.roots is None:
+ return
+ pyclass = self._get_containing_class(occurrence.get_pyname())
+ if pyclass is not None:
+ roots = self._get_root_classes(pyclass, self.name)
+ if self.roots.intersection(roots):
+ return True
+
+ def _get_containing_class(self, pyname):
+ if isinstance(pyname, pynames.DefinedName):
+ scope = pyname.get_object().get_scope()
+ parent = scope.parent
+ if parent is not None and parent.get_kind() == 'Class':
+ return parent.pyobject
+
+ def _get_root_classes(self, pyclass, name):
+ if self.impl_only and pyclass == self.pyclass:
+ return set([pyclass])
+ result = set()
+ for superclass in pyclass.get_superclasses():
+ if name in superclass:
+ result.update(self._get_root_classes(superclass, name))
+ if not result:
+ return set([pyclass])
+ return result
+
+
+class UnsureFilter(object):
+
+ def __init__(self, unsure):
+ self.unsure = unsure
+
+ def __call__(self, occurrence):
+ if occurrence.is_unsure() and self.unsure(occurrence):
+ return True
+
+
+class NoImportsFilter(object):
+
+ def __call__(self, occurrence):
+ if occurrence.is_in_import_statement():
+ return False
+
+
+class CallsFilter(object):
+
+ def __call__(self, occurrence):
+ if not occurrence.is_called():
+ return False
+
+
+class _TextualFinder(object):
+
+ def __init__(self, name, docs=False):
+ self.name = name
+ self.docs = docs
+ self.comment_pattern = _TextualFinder.any('comment', [r'#[^\n]*'])
+ self.string_pattern = _TextualFinder.any(
+ 'string', [codeanalyze.get_string_pattern()])
+ self.pattern = self._get_occurrence_pattern(self.name)
+
+ def find_offsets(self, source):
+ if not self._fast_file_query(source):
+ return
+ if self.docs:
+ searcher = self._normal_search
+ else:
+ searcher = self._re_search
+ for matched in searcher(source):
+ yield matched
+
+ def _re_search(self, source):
+ for match in self.pattern.finditer(source):
+ for key, value in match.groupdict().items():
+ if value and key == 'occurrence':
+ yield match.start(key)
+
+ def _normal_search(self, source):
+ current = 0
+ while True:
+ try:
+ found = source.index(self.name, current)
+ current = found + len(self.name)
+ if (found == 0 or not self._is_id_char(source[found - 1])) and \
+ (current == len(source) or not self._is_id_char(source[current])):
+ yield found
+ except ValueError:
+ break
+
+ def _is_id_char(self, c):
+ return c.isalnum() or c == '_'
+
+ def _fast_file_query(self, source):
+ try:
+ source.index(self.name)
+ return True
+ except ValueError:
+ return False
+
+ def _get_source(self, resource, pymodule):
+ if resource is not None:
+ return resource.read()
+ else:
+ return pymodule.source_code
+
+ def _get_occurrence_pattern(self, name):
+ occurrence_pattern = _TextualFinder.any('occurrence',
+ ['\\b' + name + '\\b'])
+ pattern = re.compile(occurrence_pattern + '|' + self.comment_pattern +
+ '|' + self.string_pattern)
+ return pattern
+
+ @staticmethod
+ def any(name, list_):
+ return '(?P<%s>' % name + '|'.join(list_) + ')'
+
+
+class _OccurrenceToolsCreator(object):
+
+ def __init__(self, pycore, resource=None, pymodule=None, docs=False):
+ self.pycore = pycore
+ self.__resource = resource
+ self.__pymodule = pymodule
+ self.docs = docs
+
+ @property
+ @utils.saveit
+ def name_finder(self):
+ return evaluate.ScopeNameFinder(self.pymodule)
+
+ @property
+ @utils.saveit
+ def source_code(self):
+ if self.__resource is not None:
+ return self.resource.read()
+ else:
+ return self.pymodule.source_code
+
+ @property
+ @utils.saveit
+ def word_finder(self):
+ return worder.Worder(self.source_code, self.docs)
+
+ @property
+ @utils.saveit
+ def resource(self):
+ if self.__resource is not None:
+ return self.__resource
+ if self.__pymodule is not None:
+ return self.__pymodule.resource
+
+ @property
+ @utils.saveit
+ def pymodule(self):
+ if self.__pymodule is not None:
+ return self.__pymodule
+ return self.pycore.resource_to_pyobject(self.resource)
diff --git a/.vim/bundle/python-mode/pylibs/rope/refactor/patchedast.py b/.vim/bundle/python-mode/pylibs/rope/refactor/patchedast.py
@@ -0,0 +1,734 @@
+import collections
+import re
+import warnings
+
+from rope.base import ast, codeanalyze, exceptions
+
+
+def get_patched_ast(source, sorted_children=False):
+ """Adds ``region`` and ``sorted_children`` fields to nodes
+
+ Adds ``sorted_children`` field only if `sorted_children` is True.
+
+ """
+ return patch_ast(ast.parse(source), source, sorted_children)
+
+
+def patch_ast(node, source, sorted_children=False):
+ """Patches the given node
+
+ After calling, each node in `node` will have a new field named
+ `region` that is a tuple containing the start and end offsets
+ of the code that generated it.
+
+ If `sorted_children` is true, a `sorted_children` field will
+ be created for each node, too. It is a list containing child
+ nodes as well as whitespaces and comments that occur between
+ them.
+
+ """
+ if hasattr(node, 'region'):
+ return node
+ walker = _PatchingASTWalker(source, children=sorted_children)
+ ast.call_for_nodes(node, walker)
+ return node
+
+
+def node_region(patched_ast_node):
+ """Get the region of a patched ast node"""
+ return patched_ast_node.region
+
+
+def write_ast(patched_ast_node):
+ """Extract source form a patched AST node with `sorted_children` field
+
+ If the node is patched with sorted_children turned off you can use
+ `node_region` function for obtaining code using module source code.
+ """
+ result = []
+ for child in patched_ast_node.sorted_children:
+ if isinstance(child, ast.AST):
+ result.append(write_ast(child))
+ else:
+ result.append(child)
+ return ''.join(result)
+
+
+class MismatchedTokenError(exceptions.RopeError):
+ pass
+
+
+class _PatchingASTWalker(object):
+
+ def __init__(self, source, children=False):
+ self.source = _Source(source)
+ self.children = children
+ self.lines = codeanalyze.SourceLinesAdapter(source)
+ self.children_stack = []
+
+ Number = object()
+ String = object()
+
+ def __call__(self, node):
+ method = getattr(self, '_' + node.__class__.__name__, None)
+ if method is not None:
+ return method(node)
+ # ???: Unknown node; what should we do here?
+ warnings.warn('Unknown node type <%s>; please report!'
+ % node.__class__.__name__, RuntimeWarning)
+ node.region = (self.source.offset, self.source.offset)
+ if self.children:
+ node.sorted_children = ast.get_children(node)
+
+ def _handle(self, node, base_children, eat_parens=False, eat_spaces=False):
+ if hasattr(node, 'region'):
+ # ???: The same node was seen twice; what should we do?
+ warnings.warn(
+ 'Node <%s> has been already patched; please report!' %
+ node.__class__.__name__, RuntimeWarning)
+ return
+ base_children = collections.deque(base_children)
+ self.children_stack.append(base_children)
+ children = collections.deque()
+ formats = []
+ suspected_start = self.source.offset
+ start = suspected_start
+ first_token = True
+ while base_children:
+ child = base_children.popleft()
+ if child is None:
+ continue
+ offset = self.source.offset
+ if isinstance(child, ast.AST):
+ ast.call_for_nodes(child, self)
+ token_start = child.region[0]
+ else:
+ if child is self.String:
+ region = self.source.consume_string(
+ end=self._find_next_statement_start())
+ elif child is self.Number:
+ region = self.source.consume_number()
+ elif child == '!=':
+ # INFO: This has been added to handle deprecated ``<>``
+ region = self.source.consume_not_equal()
+ else:
+ region = self.source.consume(child)
+ child = self.source[region[0]:region[1]]
+ token_start = region[0]
+ if not first_token:
+ formats.append(self.source[offset:token_start])
+ if self.children:
+ children.append(self.source[offset:token_start])
+ else:
+ first_token = False
+ start = token_start
+ if self.children:
+ children.append(child)
+ start = self._handle_parens(children, start, formats)
+ if eat_parens:
+ start = self._eat_surrounding_parens(
+ children, suspected_start, start)
+ if eat_spaces:
+ if self.children:
+ children.appendleft(self.source[0:start])
+ end_spaces = self.source[self.source.offset:]
+ self.source.consume(end_spaces)
+ if self.children:
+ children.append(end_spaces)
+ start = 0
+ if self.children:
+ node.sorted_children = children
+ node.region = (start, self.source.offset)
+ self.children_stack.pop()
+
+ def _handle_parens(self, children, start, formats):
+ """Changes `children` and returns new start"""
+ opens, closes = self._count_needed_parens(formats)
+ old_end = self.source.offset
+ new_end = None
+ for i in range(closes):
+ new_end = self.source.consume(')')[1]
+ if new_end is not None:
+ if self.children:
+ children.append(self.source[old_end:new_end])
+ new_start = start
+ for i in range(opens):
+ new_start = self.source.rfind_token('(', 0, new_start)
+ if new_start != start:
+ if self.children:
+ children.appendleft(self.source[new_start:start])
+ start = new_start
+ return start
+
+ def _eat_surrounding_parens(self, children, suspected_start, start):
+ index = self.source.rfind_token('(', suspected_start, start)
+ if index is not None:
+ old_start = start
+ old_offset = self.source.offset
+ start = index
+ if self.children:
+ children.appendleft(self.source[start + 1:old_start])
+ children.appendleft('(')
+ token_start, token_end = self.source.consume(')')
+ if self.children:
+ children.append(self.source[old_offset:token_start])
+ children.append(')')
+ return start
+
+ def _count_needed_parens(self, children):
+ start = 0
+ opens = 0
+ for child in children:
+ if not isinstance(child, basestring):
+ continue
+ if child == '' or child[0] in '\'"':
+ continue
+ index = 0
+ while index < len(child):
+ if child[index] == ')':
+ if opens > 0:
+ opens -= 1
+ else:
+ start += 1
+ if child[index] == '(':
+ opens += 1
+ if child[index] == '#':
+ try:
+ index = child.index('\n', index)
+ except ValueError:
+ break
+ index += 1
+ return start, opens
+
+ def _find_next_statement_start(self):
+ for children in reversed(self.children_stack):
+ for child in children:
+ if isinstance(child, ast.stmt):
+ return child.col_offset \
+ + self.lines.get_line_start(child.lineno)
+ return len(self.source.source)
+
+ _operators = {'And': 'and', 'Or': 'or', 'Add': '+', 'Sub': '-', 'Mult': '*',
+ 'Div': '/', 'Mod': '%', 'Pow': '**', 'LShift': '<<',
+ 'RShift': '>>', 'BitOr': '|', 'BitAnd': '&', 'BitXor': '^',
+ 'FloorDiv': '//', 'Invert': '~', 'Not': 'not', 'UAdd': '+',
+ 'USub': '-', 'Eq': '==', 'NotEq': '!=', 'Lt': '<',
+ 'LtE': '<=', 'Gt': '>', 'GtE': '>=', 'Is': 'is',
+ 'IsNot': 'is not', 'In': 'in', 'NotIn': 'not in'}
+
+ def _get_op(self, node):
+ return self._operators[node.__class__.__name__].split(' ')
+
+ def _Attribute(self, node):
+ self._handle(node, [node.value, '.', node.attr])
+
+ def _Assert(self, node):
+ children = ['assert', node.test]
+ if node.msg:
+ children.append(',')
+ children.append(node.msg)
+ self._handle(node, children)
+
+ def _Assign(self, node):
+ children = self._child_nodes(node.targets, '=')
+ children.append('=')
+ children.append(node.value)
+ self._handle(node, children)
+
+ def _AugAssign(self, node):
+ children = [node.target]
+ children.extend(self._get_op(node.op))
+ children.extend(['=', node.value])
+ self._handle(node, children)
+
+ def _Repr(self, node):
+ self._handle(node, ['`', node.value, '`'])
+
+ def _BinOp(self, node):
+ children = [node.left] + self._get_op(node.op) + [node.right]
+ self._handle(node, children)
+
+ def _BoolOp(self, node):
+ self._handle(node, self._child_nodes(node.values,
+ self._get_op(node.op)[0]))
+
+ def _Break(self, node):
+ self._handle(node, ['break'])
+
+ def _Call(self, node):
+ children = [node.func, '(']
+ args = list(node.args) + node.keywords
+ children.extend(self._child_nodes(args, ','))
+ if node.starargs is not None:
+ if args:
+ children.append(',')
+ children.extend(['*', node.starargs])
+ if node.kwargs is not None:
+ if args or node.starargs is not None:
+ children.append(',')
+ children.extend(['**', node.kwargs])
+ children.append(')')
+ self._handle(node, children)
+
+ def _ClassDef(self, node):
+ children = []
+ if getattr(node, 'decorator_list', None):
+ for decorator in node.decorator_list:
+ children.append('@')
+ children.append(decorator)
+ children.extend(['class', node.name])
+ if node.bases:
+ children.append('(')
+ children.extend(self._child_nodes(node.bases, ','))
+ children.append(')')
+ children.append(':')
+ children.extend(node.body)
+ self._handle(node, children)
+
+ def _Compare(self, node):
+ children = []
+ children.append(node.left)
+ for op, expr in zip(node.ops, node.comparators):
+ children.extend(self._get_op(op))
+ children.append(expr)
+ self._handle(node, children)
+
+ def _Delete(self, node):
+ self._handle(node, ['del'] + self._child_nodes(node.targets, ','))
+
+ def _Num(self, node):
+ self._handle(node, [self.Number])
+
+ def _Str(self, node):
+ self._handle(node, [self.String])
+
+ def _Continue(self, node):
+ self._handle(node, ['continue'])
+
+ def _Dict(self, node):
+ children = []
+ children.append('{')
+ if node.keys:
+ for index, (key, value) in enumerate(zip(node.keys, node.values)):
+ children.extend([key, ':', value])
+ if index < len(node.keys) - 1:
+ children.append(',')
+ children.append('}')
+ self._handle(node, children)
+
+ def _Ellipsis(self, node):
+ self._handle(node, ['...'])
+
+ def _Expr(self, node):
+ self._handle(node, [node.value])
+
+ def _Exec(self, node):
+ children = []
+ children.extend(['exec', node.body])
+ if node.globals:
+ children.extend(['in', node.globals])
+ if node.locals:
+ children.extend([',', node.locals])
+ self._handle(node, children)
+
+ def _ExtSlice(self, node):
+ children = []
+ for index, dim in enumerate(node.dims):
+ if index > 0:
+ children.append(',')
+ children.append(dim)
+ self._handle(node, children)
+
+ def _For(self, node):
+ children = ['for', node.target, 'in', node.iter, ':']
+ children.extend(node.body)
+ if node.orelse:
+ children.extend(['else', ':'])
+ children.extend(node.orelse)
+ self._handle(node, children)
+
+ def _ImportFrom(self, node):
+ children = ['from']
+ if node.level:
+ children.append('.' * node.level)
+ children.extend([node.module or '', # see comment at rope.base.ast.walk
+ 'import'])
+ children.extend(self._child_nodes(node.names, ','))
+ self._handle(node, children)
+
+ def _alias(self, node):
+ children = [node.name]
+ if node.asname:
+ children.extend(['as', node.asname])
+ self._handle(node, children)
+
+ def _FunctionDef(self, node):
+ children = []
+ try:
+ decorators = getattr(node, 'decorator_list')
+ except AttributeError:
+ decorators = getattr(node, 'decorators', None)
+ if decorators:
+ for decorator in decorators:
+ children.append('@')
+ children.append(decorator)
+ children.extend(['def', node.name, '(', node.args])
+ children.extend([')', ':'])
+ children.extend(node.body)
+ self._handle(node, children)
+
+ def _arguments(self, node):
+ children = []
+ args = list(node.args)
+ defaults = [None] * (len(args) - len(node.defaults)) + list(node.defaults)
+ for index, (arg, default) in enumerate(zip(args, defaults)):
+ if index > 0:
+ children.append(',')
+ self._add_args_to_children(children, arg, default)
+ if node.vararg is not None:
+ if args:
+ children.append(',')
+ children.extend(['*', node.vararg])
+ if node.kwarg is not None:
+ if args or node.vararg is not None:
+ children.append(',')
+ children.extend(['**', node.kwarg])
+ self._handle(node, children)
+
+ def _add_args_to_children(self, children, arg, default):
+ if isinstance(arg, (list, tuple)):
+ self._add_tuple_parameter(children, arg)
+ else:
+ children.append(arg)
+ if default is not None:
+ children.append('=')
+ children.append(default)
+
+ def _add_tuple_parameter(self, children, arg):
+ children.append('(')
+ for index, token in enumerate(arg):
+ if index > 0:
+ children.append(',')
+ if isinstance(token, (list, tuple)):
+ self._add_tuple_parameter(children, token)
+ else:
+ children.append(token)
+ children.append(')')
+
+ def _GeneratorExp(self, node):
+ children = [node.elt]
+ children.extend(node.generators)
+ self._handle(node, children, eat_parens=True)
+
+ def _comprehension(self, node):
+ children = ['for', node.target, 'in', node.iter]
+ if node.ifs:
+ for if_ in node.ifs:
+ children.append('if')
+ children.append(if_)
+ self._handle(node, children)
+
+ def _Global(self, node):
+ children = self._child_nodes(node.names, ',')
+ children.insert(0, 'global')
+ self._handle(node, children)
+
+ def _If(self, node):
+ if self._is_elif(node):
+ children = ['elif']
+ else:
+ children = ['if']
+ children.extend([node.test, ':'])
+ children.extend(node.body)
+ if node.orelse:
+ if len(node.orelse) == 1 and self._is_elif(node.orelse[0]):
+ pass
+ else:
+ children.extend(['else', ':'])
+ children.extend(node.orelse)
+ self._handle(node, children)
+
+ def _is_elif(self, node):
+ if not isinstance(node, ast.If):
+ return False
+ offset = self.lines.get_line_start(node.lineno) + node.col_offset
+ word = self.source[offset:offset + 4]
+ # XXX: This is a bug; the offset does not point to the first
+ alt_word = self.source[offset - 5:offset - 1]
+ return 'elif' in (word, alt_word)
+
+ def _IfExp(self, node):
+ return self._handle(node, [node.body, 'if', node.test,
+ 'else', node.orelse])
+
+ def _Import(self, node):
+ children = ['import']
+ children.extend(self._child_nodes(node.names, ','))
+ self._handle(node, children)
+
+ def _keyword(self, node):
+ self._handle(node, [node.arg, '=', node.value])
+
+ def _Lambda(self, node):
+ self._handle(node, ['lambda', node.args, ':', node.body])
+
+ def _List(self, node):
+ self._handle(node, ['['] + self._child_nodes(node.elts, ',') + [']'])
+
+ def _ListComp(self, node):
+ children = ['[', node.elt]
+ children.extend(node.generators)
+ children.append(']')
+ self._handle(node, children)
+
+ def _Module(self, node):
+ self._handle(node, list(node.body), eat_spaces=True)
+
+ def _Name(self, node):
+ self._handle(node, [node.id])
+
+ def _Pass(self, node):
+ self._handle(node, ['pass'])
+
+ def _Print(self, node):
+ children = ['print']
+ if node.dest:
+ children.extend(['>>', node.dest])
+ if node.values:
+ children.append(',')
+ children.extend(self._child_nodes(node.values, ','))
+ if not node.nl:
+ children.append(',')
+ self._handle(node, children)
+
+ def _Raise(self, node):
+ children = ['raise']
+ if node.type:
+ children.append(node.type)
+ if node.inst:
+ children.append(',')
+ children.append(node.inst)
+ if node.tback:
+ children.append(',')
+ children.append(node.tback)
+ self._handle(node, children)
+
+ def _Return(self, node):
+ children = ['return']
+ if node.value:
+ children.append(node.value)
+ self._handle(node, children)
+
+ def _Sliceobj(self, node):
+ children = []
+ for index, slice in enumerate(node.nodes):
+ if index > 0:
+ children.append(':')
+ if slice:
+ children.append(slice)
+ self._handle(node, children)
+
+ def _Index(self, node):
+ self._handle(node, [node.value])
+
+ def _Subscript(self, node):
+ self._handle(node, [node.value, '[', node.slice, ']'])
+
+ def _Slice(self, node):
+ children = []
+ if node.lower:
+ children.append(node.lower)
+ children.append(':')
+ if node.upper:
+ children.append(node.upper)
+ if node.step:
+ children.append(':')
+ children.append(node.step)
+ self._handle(node, children)
+
+ def _TryFinally(self, node):
+ children = []
+ if len(node.body) != 1 or not isinstance(node.body[0], ast.TryExcept):
+ children.extend(['try', ':'])
+ children.extend(node.body)
+ children.extend(['finally', ':'])
+ children.extend(node.finalbody)
+ self._handle(node, children)
+
+ def _TryExcept(self, node):
+ children = ['try', ':']
+ children.extend(node.body)
+ children.extend(node.handlers)
+ if node.orelse:
+ children.extend(['else', ':'])
+ children.extend(node.orelse)
+ self._handle(node, children)
+
+ def _ExceptHandler(self, node):
+ self._excepthandler(node)
+
+ def _excepthandler(self, node):
+ children = ['except']
+ if node.type:
+ children.append(node.type)
+ if node.name:
+ children.extend([',', node.name])
+ children.append(':')
+ children.extend(node.body)
+ self._handle(node, children)
+
+ def _Tuple(self, node):
+ if node.elts:
+ self._handle(node, self._child_nodes(node.elts, ','),
+ eat_parens=True)
+ else:
+ self._handle(node, ['(', ')'])
+
+ def _UnaryOp(self, node):
+ children = self._get_op(node.op)
+ children.append(node.operand)
+ self._handle(node, children)
+
+ def _Yield(self, node):
+ children = ['yield']
+ if node.value:
+ children.append(node.value)
+ self._handle(node, children)
+
+ def _While(self, node):
+ children = ['while', node.test, ':']
+ children.extend(node.body)
+ if node.orelse:
+ children.extend(['else', ':'])
+ children.extend(node.orelse)
+ self._handle(node, children)
+
+ def _With(self, node):
+ children = ['with', node.context_expr]
+ if node.optional_vars:
+ children.extend(['as', node.optional_vars])
+ children.append(':')
+ children.extend(node.body)
+ self._handle(node, children)
+
+ def _child_nodes(self, nodes, separator):
+ children = []
+ for index, child in enumerate(nodes):
+ children.append(child)
+ if index < len(nodes) - 1:
+ children.append(separator)
+ return children
+
+
+class _Source(object):
+
+ def __init__(self, source):
+ self.source = source
+ self.offset = 0
+
+ def consume(self, token):
+ try:
+ while True:
+ new_offset = self.source.index(token, self.offset)
+ if self._good_token(token, new_offset):
+ break
+ else:
+ self._skip_comment()
+ except (ValueError, TypeError):
+ raise MismatchedTokenError(
+ 'Token <%s> at %s cannot be matched' %
+ (token, self._get_location()))
+ self.offset = new_offset + len(token)
+ return (new_offset, self.offset)
+
+ def consume_string(self, end=None):
+ if _Source._string_pattern is None:
+ original = codeanalyze.get_string_pattern()
+ pattern = r'(%s)((\s|\\\n|#[^\n]*\n)*(%s))*' % \
+ (original, original)
+ _Source._string_pattern = re.compile(pattern)
+ repattern = _Source._string_pattern
+ return self._consume_pattern(repattern, end)
+
+ def consume_number(self):
+ if _Source._number_pattern is None:
+ _Source._number_pattern = re.compile(
+ self._get_number_pattern())
+ repattern = _Source._number_pattern
+ return self._consume_pattern(repattern)
+
+ def consume_not_equal(self):
+ if _Source._not_equals_pattern is None:
+ _Source._not_equals_pattern = re.compile(r'<>|!=')
+ repattern = _Source._not_equals_pattern
+ return self._consume_pattern(repattern)
+
+ def _good_token(self, token, offset, start=None):
+ """Checks whether consumed token is in comments"""
+ if start is None:
+ start = self.offset
+ try:
+ comment_index = self.source.rindex('#', start, offset)
+ except ValueError:
+ return True
+ try:
+ new_line_index = self.source.rindex('\n', start, offset)
+ except ValueError:
+ return False
+ return comment_index < new_line_index
+
+ def _skip_comment(self):
+ self.offset = self.source.index('\n', self.offset + 1)
+
+ def _get_location(self):
+ lines = self.source[:self.offset].split('\n')
+ return (len(lines), len(lines[-1]))
+
+ def _consume_pattern(self, repattern, end=None):
+ while True:
+ if end is None:
+ end = len(self.source)
+ match = repattern.search(self.source, self.offset, end)
+ if self._good_token(match.group(), match.start()):
+ break
+ else:
+ self._skip_comment()
+ self.offset = match.end()
+ return match.start(), match.end()
+
+ def till_token(self, token):
+ new_offset = self.source.index(token, self.offset)
+ return self[self.offset:new_offset]
+
+ def rfind_token(self, token, start, end):
+ index = start
+ while True:
+ try:
+ index = self.source.rindex(token, start, end)
+ if self._good_token(token, index, start=start):
+ return index
+ else:
+ end = index
+ except ValueError:
+ return None
+
+ def from_offset(self, offset):
+ return self[offset:self.offset]
+
+ def find_backwards(self, pattern, offset):
+ return self.source.rindex(pattern, 0, offset)
+
+ def __getitem__(self, index):
+ return self.source[index]
+
+ def __getslice__(self, i, j):
+ return self.source[i:j]
+
+ def _get_number_pattern(self):
+ # HACK: It is merely an approaximation and does the job
+ integer = r'(0|0x)?[\da-fA-F]+[lL]?'
+ return r'(%s(\.\d*)?|(\.\d+))([eE][-+]?\d*)?[jJ]?' % integer
+
+ _string_pattern = None
+ _number_pattern = None
+ _not_equals_pattern = None
diff --git a/.vim/bundle/python-mode/pylibs/rope/refactor/rename.py b/.vim/bundle/python-mode/pylibs/rope/refactor/rename.py
@@ -0,0 +1,216 @@
+import warnings
+
+from rope.base import exceptions, pyobjects, pynames, taskhandle, evaluate, worder, codeanalyze
+from rope.base.change import ChangeSet, ChangeContents, MoveResource
+from rope.refactor import occurrences, sourceutils
+
+
+class Rename(object):
+ """A class for performing rename refactoring
+
+ It can rename everything: classes, functions, modules, packages,
+ methods, variables and keyword arguments.
+
+ """
+
+ def __init__(self, project, resource, offset=None):
+ """If `offset` is None, the `resource` itself will be renamed"""
+ self.project = project
+ self.pycore = project.pycore
+ self.resource = resource
+ if offset is not None:
+ self.old_name = worder.get_name_at(self.resource, offset)
+ this_pymodule = self.pycore.resource_to_pyobject(self.resource)
+ self.old_instance, self.old_pyname = \
+ evaluate.eval_location2(this_pymodule, offset)
+ if self.old_pyname is None:
+ raise exceptions.RefactoringError(
+ 'Rename refactoring should be performed'
+ ' on resolvable python identifiers.')
+ else:
+ if not resource.is_folder() and resource.name == '__init__.py':
+ resource = resource.parent
+ dummy_pymodule = self.pycore.get_string_module('')
+ self.old_instance = None
+ self.old_pyname = pynames.ImportedModule(dummy_pymodule,
+ resource=resource)
+ if resource.is_folder():
+ self.old_name = resource.name
+ else:
+ self.old_name = resource.name[:-3]
+
+ def get_old_name(self):
+ return self.old_name
+
+ def get_changes(self, new_name, in_file=None, in_hierarchy=False,
+ unsure=None, docs=False, resources=None,
+ task_handle=taskhandle.NullTaskHandle()):
+ """Get the changes needed for this refactoring
+
+ Parameters:
+
+ - `in_hierarchy`: when renaming a method this keyword forces
+ to rename all matching methods in the hierarchy
+ - `docs`: when `True` rename refactoring will rename
+ occurrences in comments and strings where the name is
+ visible. Setting it will make renames faster, too.
+ - `unsure`: decides what to do about unsure occurrences.
+ If `None`, they are ignored. Otherwise `unsure` is
+ called with an instance of `occurrence.Occurrence` as
+ parameter. If it returns `True`, the occurrence is
+ considered to be a match.
+ - `resources` can be a list of `rope.base.resources.File`\s to
+ apply this refactoring on. If `None`, the restructuring
+ will be applied to all python files.
+ - `in_file`: this argument has been deprecated; use
+ `resources` instead.
+
+ """
+ if unsure in (True, False):
+ warnings.warn(
+ 'unsure parameter should be a function that returns '
+ 'True or False', DeprecationWarning, stacklevel=2)
+ def unsure_func(value=unsure):
+ return value
+ unsure = unsure_func
+ if in_file is not None:
+ warnings.warn(
+ '`in_file` argument has been deprecated; use `resources` '
+ 'instead. ', DeprecationWarning, stacklevel=2)
+ if in_file:
+ resources = [self.resource]
+ if _is_local(self.old_pyname):
+ resources = [self.resource]
+ if resources is None:
+ resources = self.pycore.get_python_files()
+ changes = ChangeSet('Renaming <%s> to <%s>' %
+ (self.old_name, new_name))
+ finder = occurrences.create_finder(
+ self.pycore, self.old_name, self.old_pyname, unsure=unsure,
+ docs=docs, instance=self.old_instance,
+ in_hierarchy=in_hierarchy and self.is_method())
+ job_set = task_handle.create_jobset('Collecting Changes', len(resources))
+ for file_ in resources:
+ job_set.started_job(file_.path)
+ new_content = rename_in_module(finder, new_name, resource=file_)
+ if new_content is not None:
+ changes.add_change(ChangeContents(file_, new_content))
+ job_set.finished_job()
+ if self._is_renaming_a_module():
+ resource = self.old_pyname.get_object().get_resource()
+ if self._is_allowed_to_move(resources, resource):
+ self._rename_module(resource, new_name, changes)
+ return changes
+
+ def _is_allowed_to_move(self, resources, resource):
+ if resource.is_folder():
+ try:
+ return resource.get_child('__init__.py') in resources
+ except exceptions.ResourceNotFoundError:
+ return False
+ else:
+ return resource in resources
+
+ def _is_renaming_a_module(self):
+ if isinstance(self.old_pyname.get_object(), pyobjects.AbstractModule):
+ return True
+ return False
+
+ def is_method(self):
+ pyname = self.old_pyname
+ return isinstance(pyname, pynames.DefinedName) and \
+ isinstance(pyname.get_object(), pyobjects.PyFunction) and \
+ isinstance(pyname.get_object().parent, pyobjects.PyClass)
+
+ def _rename_module(self, resource, new_name, changes):
+ if not resource.is_folder():
+ new_name = new_name + '.py'
+ parent_path = resource.parent.path
+ if parent_path == '':
+ new_location = new_name
+ else:
+ new_location = parent_path + '/' + new_name
+ changes.add_change(MoveResource(resource, new_location))
+
+
+class ChangeOccurrences(object):
+ """A class for changing the occurrences of a name in a scope
+
+ This class replaces the occurrences of a name. Note that it only
+ changes the scope containing the offset passed to the constructor.
+ What's more it does not have any side-effects. That is for
+ example changing occurrences of a module does not rename the
+ module; it merely replaces the occurrences of that module in a
+ scope with the given expression. This class is useful for
+ performing many custom refactorings.
+
+ """
+
+ def __init__(self, project, resource, offset):
+ self.pycore = project.pycore
+ self.resource = resource
+ self.offset = offset
+ self.old_name = worder.get_name_at(resource, offset)
+ self.pymodule = self.pycore.resource_to_pyobject(self.resource)
+ self.old_pyname = evaluate.eval_location(self.pymodule, offset)
+
+ def get_old_name(self):
+ word_finder = worder.Worder(self.resource.read())
+ return word_finder.get_primary_at(self.offset)
+
+ def _get_scope_offset(self):
+ lines = self.pymodule.lines
+ scope = self.pymodule.get_scope().\
+ get_inner_scope_for_line(lines.get_line_number(self.offset))
+ start = lines.get_line_start(scope.get_start())
+ end = lines.get_line_end(scope.get_end())
+ return start, end
+
+ def get_changes(self, new_name, only_calls=False, reads=True, writes=True):
+ changes = ChangeSet('Changing <%s> occurrences to <%s>' %
+ (self.old_name, new_name))
+ scope_start, scope_end = self._get_scope_offset()
+ finder = occurrences.create_finder(
+ self.pycore, self.old_name, self.old_pyname,
+ imports=False, only_calls=only_calls)
+ new_contents = rename_in_module(
+ finder, new_name, pymodule=self.pymodule, replace_primary=True,
+ region=(scope_start, scope_end), reads=reads, writes=writes)
+ if new_contents is not None:
+ changes.add_change(ChangeContents(self.resource, new_contents))
+ return changes
+
+
+def rename_in_module(occurrences_finder, new_name, resource=None, pymodule=None,
+ replace_primary=False, region=None, reads=True, writes=True):
+ """Returns the changed source or `None` if there is no changes"""
+ if resource is not None:
+ source_code = resource.read()
+ else:
+ source_code = pymodule.source_code
+ change_collector = codeanalyze.ChangeCollector(source_code)
+ for occurrence in occurrences_finder.find_occurrences(resource, pymodule):
+ if replace_primary and occurrence.is_a_fixed_primary():
+ continue
+ if replace_primary:
+ start, end = occurrence.get_primary_range()
+ else:
+ start, end = occurrence.get_word_range()
+ if (not reads and not occurrence.is_written()) or \
+ (not writes and occurrence.is_written()):
+ continue
+ if region is None or region[0] <= start < region[1]:
+ change_collector.add_change(start, end, new_name)
+ return change_collector.get_changed()
+
+def _is_local(pyname):
+ module, lineno = pyname.get_definition_location()
+ if lineno is None:
+ return False
+ scope = module.get_scope().get_inner_scope_for_line(lineno)
+ if isinstance(pyname, pynames.DefinedName) and \
+ scope.get_kind() in ('Function', 'Class'):
+ scope = scope.parent
+ return scope.get_kind() == 'Function' and \
+ pyname in scope.get_names().values() and \
+ isinstance(pyname, pynames.AssignedName)
diff --git a/.vim/bundle/python-mode/pylibs/rope/refactor/restructure.py b/.vim/bundle/python-mode/pylibs/rope/refactor/restructure.py
@@ -0,0 +1,307 @@
+import warnings
+
+from rope.base import change, taskhandle, builtins, ast, codeanalyze
+from rope.refactor import patchedast, similarfinder, sourceutils
+from rope.refactor.importutils import module_imports
+
+
+class Restructure(object):
+ """A class to perform python restructurings
+
+ A restructuring transforms pieces of code matching `pattern` to
+ `goal`. In the `pattern` wildcards can appear. Wildcards match
+ some piece of code based on their kind and arguments that are
+ passed to them through `args`.
+
+ `args` is a dictionary of wildcard names to wildcard arguments.
+ If the argument is a tuple, the first item of the tuple is
+ considered to be the name of the wildcard to use; otherwise the
+ "default" wildcard is used. For getting the list arguments a
+ wildcard supports, see the pydoc of the wildcard. (see
+ `rope.refactor.wildcard.DefaultWildcard` for the default
+ wildcard.)
+
+ `wildcards` is the list of wildcard types that can appear in
+ `pattern`. See `rope.refactor.wildcards`. If a wildcard does not
+ specify its kind (by using a tuple in args), the wildcard named
+ "default" is used. So there should be a wildcard with "default"
+ name in `wildcards`.
+
+ `imports` is the list of imports that changed modules should
+ import. Note that rope handles duplicate imports and does not add
+ the import if it already appears.
+
+ Example #1::
+
+ pattern ${pyobject}.get_attribute(${name})
+ goal ${pyobject}[${name}]
+ args pyobject: instance=rope.base.pyobjects.PyObject
+
+ Example #2::
+
+ pattern ${name} in ${pyobject}.get_attributes()
+ goal ${name} in {pyobject}
+ args pyobject: instance=rope.base.pyobjects.PyObject
+
+ Example #3::
+
+ pattern ${pycore}.create_module(${project}.root, ${name})
+ goal generate.create_module(${project}, ${name})
+
+ imports
+ from rope.contrib import generate
+
+ args
+ pycore: type=rope.base.pycore.PyCore
+ project: type=rope.base.project.Project
+
+ Example #4::
+
+ pattern ${pow}(${param1}, ${param2})
+ goal ${param1} ** ${param2}
+ args pow: name=mod.pow, exact
+
+ Example #5::
+
+ pattern ${inst}.longtask(${p1}, ${p2})
+ goal
+ ${inst}.subtask1(${p1})
+ ${inst}.subtask2(${p2})
+ args
+ inst: type=mod.A,unsure
+
+ """
+
+ def __init__(self, project, pattern, goal, args=None,
+ imports=None, wildcards=None):
+ """Construct a restructuring
+
+ See class pydoc for more info about the arguments.
+
+ """
+ self.pycore = project.pycore
+ self.pattern = pattern
+ self.goal = goal
+ self.args = args
+ if self.args is None:
+ self.args = {}
+ self.imports = imports
+ if self.imports is None:
+ self.imports = []
+ self.wildcards = wildcards
+ self.template = similarfinder.CodeTemplate(self.goal)
+
+ def get_changes(self, checks=None, imports=None, resources=None,
+ task_handle=taskhandle.NullTaskHandle()):
+ """Get the changes needed by this restructuring
+
+ `resources` can be a list of `rope.base.resources.File`\s to
+ apply the restructuring on. If `None`, the restructuring will
+ be applied to all python files.
+
+ `checks` argument has been deprecated. Use the `args` argument
+ of the constructor. The usage of::
+
+ strchecks = {'obj1.type': 'mod.A', 'obj2': 'mod.B',
+ 'obj3.object': 'mod.C'}
+ checks = restructuring.make_checks(strchecks)
+
+ can be replaced with::
+
+ args = {'obj1': 'type=mod.A', 'obj2': 'name=mod.B',
+ 'obj3': 'object=mod.C'}
+
+ where obj1, obj2 and obj3 are wildcard names that appear
+ in restructuring pattern.
+
+ """
+ if checks is not None:
+ warnings.warn(
+ 'The use of checks parameter is deprecated; '
+ 'use the args parameter of the constructor instead.',
+ DeprecationWarning, stacklevel=2)
+ for name, value in checks.items():
+ self.args[name] = similarfinder._pydefined_to_str(value)
+ if imports is not None:
+ warnings.warn(
+ 'The use of imports parameter is deprecated; '
+ 'use imports parameter of the constructor, instead.',
+ DeprecationWarning, stacklevel=2)
+ self.imports = imports
+ changes = change.ChangeSet('Restructuring <%s> to <%s>' %
+ (self.pattern, self.goal))
+ if resources is not None:
+ files = [resource for resource in resources
+ if self.pycore.is_python_file(resource)]
+ else:
+ files = self.pycore.get_python_files()
+ job_set = task_handle.create_jobset('Collecting Changes', len(files))
+ for resource in files:
+ job_set.started_job(resource.path)
+ pymodule = self.pycore.resource_to_pyobject(resource)
+ finder = similarfinder.SimilarFinder(pymodule,
+ wildcards=self.wildcards)
+ matches = list(finder.get_matches(self.pattern, self.args))
+ computer = self._compute_changes(matches, pymodule)
+ result = computer.get_changed()
+ if result is not None:
+ imported_source = self._add_imports(resource, result,
+ self.imports)
+ changes.add_change(change.ChangeContents(resource,
+ imported_source))
+ job_set.finished_job()
+ return changes
+
+ def _compute_changes(self, matches, pymodule):
+ return _ChangeComputer(
+ pymodule.source_code, pymodule.get_ast(),
+ pymodule.lines, self.template, matches)
+
+ def _add_imports(self, resource, source, imports):
+ if not imports:
+ return source
+ import_infos = self._get_import_infos(resource, imports)
+ pymodule = self.pycore.get_string_module(source, resource)
+ imports = module_imports.ModuleImports(self.pycore, pymodule)
+ for import_info in import_infos:
+ imports.add_import(import_info)
+ return imports.get_changed_source()
+
+ def _get_import_infos(self, resource, imports):
+ pymodule = self.pycore.get_string_module('\n'.join(imports),
+ resource)
+ imports = module_imports.ModuleImports(self.pycore, pymodule)
+ return [imports.import_info
+ for imports in imports.imports]
+
+ def make_checks(self, string_checks):
+ """Convert str to str dicts to str to PyObject dicts
+
+ This function is here to ease writing a UI.
+
+ """
+ checks = {}
+ for key, value in string_checks.items():
+ is_pyname = not key.endswith('.object') and \
+ not key.endswith('.type')
+ evaluated = self._evaluate(value, is_pyname=is_pyname)
+ if evaluated is not None:
+ checks[key] = evaluated
+ return checks
+
+ def _evaluate(self, code, is_pyname=True):
+ attributes = code.split('.')
+ pyname = None
+ if attributes[0] in ('__builtin__', '__builtins__'):
+ class _BuiltinsStub(object):
+ def get_attribute(self, name):
+ return builtins.builtins[name]
+ pyobject = _BuiltinsStub()
+ else:
+ pyobject = self.pycore.get_module(attributes[0])
+ for attribute in attributes[1:]:
+ pyname = pyobject[attribute]
+ if pyname is None:
+ return None
+ pyobject = pyname.get_object()
+ return pyname if is_pyname else pyobject
+
+
+def replace(code, pattern, goal):
+ """used by other refactorings"""
+ finder = similarfinder.RawSimilarFinder(code)
+ matches = list(finder.get_matches(pattern))
+ ast = patchedast.get_patched_ast(code)
+ lines = codeanalyze.SourceLinesAdapter(code)
+ template = similarfinder.CodeTemplate(goal)
+ computer = _ChangeComputer(code, ast, lines, template, matches)
+ result = computer.get_changed()
+ if result is None:
+ return code
+ return result
+
+
+class _ChangeComputer(object):
+
+ def __init__(self, code, ast, lines, goal, matches):
+ self.source = code
+ self.goal = goal
+ self.matches = matches
+ self.ast = ast
+ self.lines = lines
+ self.matched_asts = {}
+ self._nearest_roots = {}
+ if self._is_expression():
+ for match in self.matches:
+ self.matched_asts[match.ast] = match
+
+ def get_changed(self):
+ if self._is_expression():
+ result = self._get_node_text(self.ast)
+ if result == self.source:
+ return None
+ return result
+ else:
+ collector = codeanalyze.ChangeCollector(self.source)
+ last_end = -1
+ for match in self.matches:
+ start, end = match.get_region()
+ if start < last_end:
+ if not self._is_expression():
+ continue
+ last_end = end
+ replacement = self._get_matched_text(match)
+ collector.add_change(start, end, replacement)
+ return collector.get_changed()
+
+ def _is_expression(self):
+ return self.matches and isinstance(self.matches[0],
+ similarfinder.ExpressionMatch)
+
+ def _get_matched_text(self, match):
+ mapping = {}
+ for name in self.goal.get_names():
+ node = match.get_ast(name)
+ if node is None:
+ raise similarfinder.BadNameInCheckError(
+ 'Unknown name <%s>' % name)
+ force = self._is_expression() and match.ast == node
+ mapping[name] = self._get_node_text(node, force)
+ unindented = self.goal.substitute(mapping)
+ return self._auto_indent(match.get_region()[0], unindented)
+
+ def _get_node_text(self, node, force=False):
+ if not force and node in self.matched_asts:
+ return self._get_matched_text(self.matched_asts[node])
+ start, end = patchedast.node_region(node)
+ main_text = self.source[start:end]
+ collector = codeanalyze.ChangeCollector(main_text)
+ for node in self._get_nearest_roots(node):
+ sub_start, sub_end = patchedast.node_region(node)
+ collector.add_change(sub_start - start, sub_end - start,
+ self._get_node_text(node))
+ result = collector.get_changed()
+ if result is None:
+ return main_text
+ return result
+
+ def _auto_indent(self, offset, text):
+ lineno = self.lines.get_line_number(offset)
+ indents = sourceutils.get_indents(self.lines, lineno)
+ result = []
+ for index, line in enumerate(text.splitlines(True)):
+ if index != 0 and line.strip():
+ result.append(' ' * indents)
+ result.append(line)
+ return ''.join(result)
+
+ def _get_nearest_roots(self, node):
+ if node not in self._nearest_roots:
+ result = []
+ for child in ast.get_child_nodes(node):
+ if child in self.matched_asts:
+ result.append(child)
+ else:
+ result.extend(self._get_nearest_roots(child))
+ self._nearest_roots[node] = result
+ return self._nearest_roots[node]
diff --git a/.vim/bundle/python-mode/pylibs/rope/refactor/similarfinder.py b/.vim/bundle/python-mode/pylibs/rope/refactor/similarfinder.py
@@ -0,0 +1,362 @@
+"""This module can be used for finding similar code"""
+import re
+
+import rope.refactor.wildcards
+from rope.base import codeanalyze, evaluate, exceptions, ast, builtins
+from rope.refactor import (patchedast, sourceutils, occurrences,
+ wildcards, importutils)
+
+
+class BadNameInCheckError(exceptions.RefactoringError):
+ pass
+
+
+class SimilarFinder(object):
+ """`SimilarFinder` can be used to find similar pieces of code
+
+ See the notes in the `rope.refactor.restructure` module for more
+ info.
+
+ """
+
+ def __init__(self, pymodule, wildcards=None):
+ """Construct a SimilarFinder"""
+ self.source = pymodule.source_code
+ self.raw_finder = RawSimilarFinder(
+ pymodule.source_code, pymodule.get_ast(), self._does_match)
+ self.pymodule = pymodule
+ if wildcards is None:
+ self.wildcards = {}
+ for wildcard in [rope.refactor.wildcards.
+ DefaultWildcard(pymodule.pycore.project)]:
+ self.wildcards[wildcard.get_name()] = wildcard
+ else:
+ self.wildcards = wildcards
+
+ def get_matches(self, code, args={}, start=0, end=None):
+ self.args = args
+ if end is None:
+ end = len(self.source)
+ skip_region = None
+ if 'skip' in args.get('', {}):
+ resource, region = args['']['skip']
+ if resource == self.pymodule.get_resource():
+ skip_region = region
+ return self.raw_finder.get_matches(code, start=start, end=end,
+ skip=skip_region)
+
+ def get_match_regions(self, *args, **kwds):
+ for match in self.get_matches(*args, **kwds):
+ yield match.get_region()
+
+ def _does_match(self, node, name):
+ arg = self.args.get(name, '')
+ kind = 'default'
+ if isinstance(arg, (tuple, list)):
+ kind = arg[0]
+ arg = arg[1]
+ suspect = wildcards.Suspect(self.pymodule, node, name)
+ return self.wildcards[kind].matches(suspect, arg)
+
+
+class RawSimilarFinder(object):
+ """A class for finding similar expressions and statements"""
+
+ def __init__(self, source, node=None, does_match=None):
+ if node is None:
+ node = ast.parse(source)
+ if does_match is None:
+ self.does_match = self._simple_does_match
+ else:
+ self.does_match = does_match
+ self._init_using_ast(node, source)
+
+ def _simple_does_match(self, node, name):
+ return isinstance(node, (ast.expr, ast.Name))
+
+ def _init_using_ast(self, node, source):
+ self.source = source
+ self._matched_asts = {}
+ if not hasattr(node, 'region'):
+ patchedast.patch_ast(node, source)
+ self.ast = node
+
+ def get_matches(self, code, start=0, end=None, skip=None):
+ """Search for `code` in source and return a list of `Match`\es
+
+ `code` can contain wildcards. ``${name}`` matches normal
+ names and ``${?name} can match any expression. You can use
+ `Match.get_ast()` for getting the node that has matched a
+ given pattern.
+
+ """
+ if end is None:
+ end = len(self.source)
+ for match in self._get_matched_asts(code):
+ match_start, match_end = match.get_region()
+ if start <= match_start and match_end <= end:
+ if skip is not None and (skip[0] < match_end and
+ skip[1] > match_start):
+ continue
+ yield match
+
+ def _get_matched_asts(self, code):
+ if code not in self._matched_asts:
+ wanted = self._create_pattern(code)
+ matches = _ASTMatcher(self.ast, wanted,
+ self.does_match).find_matches()
+ self._matched_asts[code] = matches
+ return self._matched_asts[code]
+
+ def _create_pattern(self, expression):
+ expression = self._replace_wildcards(expression)
+ node = ast.parse(expression)
+ # Getting Module.Stmt.nodes
+ nodes = node.body
+ if len(nodes) == 1 and isinstance(nodes[0], ast.Expr):
+ # Getting Discard.expr
+ wanted = nodes[0].value
+ else:
+ wanted = nodes
+ return wanted
+
+ def _replace_wildcards(self, expression):
+ ropevar = _RopeVariable()
+ template = CodeTemplate(expression)
+ mapping = {}
+ for name in template.get_names():
+ mapping[name] = ropevar.get_var(name)
+ return template.substitute(mapping)
+
+
+class _ASTMatcher(object):
+
+ def __init__(self, body, pattern, does_match):
+ """Searches the given pattern in the body AST.
+
+ body is an AST node and pattern can be either an AST node or
+ a list of ASTs nodes
+ """
+ self.body = body
+ self.pattern = pattern
+ self.matches = None
+ self.ropevar = _RopeVariable()
+ self.matches_callback = does_match
+
+ def find_matches(self):
+ if self.matches is None:
+ self.matches = []
+ ast.call_for_nodes(self.body, self._check_node, recursive=True)
+ return self.matches
+
+ def _check_node(self, node):
+ if isinstance(self.pattern, list):
+ self._check_statements(node)
+ else:
+ self._check_expression(node)
+
+ def _check_expression(self, node):
+ mapping = {}
+ if self._match_nodes(self.pattern, node, mapping):
+ self.matches.append(ExpressionMatch(node, mapping))
+
+ def _check_statements(self, node):
+ for child in ast.get_children(node):
+ if isinstance(child, (list, tuple)):
+ self.__check_stmt_list(child)
+
+ def __check_stmt_list(self, nodes):
+ for index in range(len(nodes)):
+ if len(nodes) - index >= len(self.pattern):
+ current_stmts = nodes[index:index + len(self.pattern)]
+ mapping = {}
+ if self._match_stmts(current_stmts, mapping):
+ self.matches.append(StatementMatch(current_stmts, mapping))
+
+ def _match_nodes(self, expected, node, mapping):
+ if isinstance(expected, ast.Name):
+ if self.ropevar.is_var(expected.id):
+ return self._match_wildcard(expected, node, mapping)
+ if not isinstance(expected, ast.AST):
+ return expected == node
+ if expected.__class__ != node.__class__:
+ return False
+
+ children1 = self._get_children(expected)
+ children2 = self._get_children(node)
+ if len(children1) != len(children2):
+ return False
+ for child1, child2 in zip(children1, children2):
+ if isinstance(child1, ast.AST):
+ if not self._match_nodes(child1, child2, mapping):
+ return False
+ elif isinstance(child1, (list, tuple)):
+ if not isinstance(child2, (list, tuple)) or \
+ len(child1) != len(child2):
+ return False
+ for c1, c2 in zip(child1, child2):
+ if not self._match_nodes(c1, c2, mapping):
+ return False
+ else:
+ if child1 != child2:
+ return False
+ return True
+
+ def _get_children(self, node):
+ """Return not `ast.expr_context` children of `node`"""
+ children = ast.get_children(node)
+ return [child for child in children
+ if not isinstance(child, ast.expr_context)]
+
+ def _match_stmts(self, current_stmts, mapping):
+ if len(current_stmts) != len(self.pattern):
+ return False
+ for stmt, expected in zip(current_stmts, self.pattern):
+ if not self._match_nodes(expected, stmt, mapping):
+ return False
+ return True
+
+ def _match_wildcard(self, node1, node2, mapping):
+ name = self.ropevar.get_base(node1.id)
+ if name not in mapping:
+ if self.matches_callback(node2, name):
+ mapping[name] = node2
+ return True
+ return False
+ else:
+ return self._match_nodes(mapping[name], node2, {})
+
+
+class Match(object):
+
+ def __init__(self, mapping):
+ self.mapping = mapping
+
+ def get_region(self):
+ """Returns match region"""
+
+ def get_ast(self, name):
+ """Return the ast node that has matched rope variables"""
+ return self.mapping.get(name, None)
+
+
+class ExpressionMatch(Match):
+
+ def __init__(self, ast, mapping):
+ super(ExpressionMatch, self).__init__(mapping)
+ self.ast = ast
+
+ def get_region(self):
+ return self.ast.region
+
+
+class StatementMatch(Match):
+
+ def __init__(self, ast_list, mapping):
+ super(StatementMatch, self).__init__(mapping)
+ self.ast_list = ast_list
+
+ def get_region(self):
+ return self.ast_list[0].region[0], self.ast_list[-1].region[1]
+
+
+class CodeTemplate(object):
+
+ def __init__(self, template):
+ self.template = template
+ self._find_names()
+
+ def _find_names(self):
+ self.names = {}
+ for match in CodeTemplate._get_pattern().finditer(self.template):
+ if 'name' in match.groupdict() and \
+ match.group('name') is not None:
+ start, end = match.span('name')
+ name = self.template[start + 2:end - 1]
+ if name not in self.names:
+ self.names[name] = []
+ self.names[name].append((start, end))
+
+ def get_names(self):
+ return self.names.keys()
+
+ def substitute(self, mapping):
+ collector = codeanalyze.ChangeCollector(self.template)
+ for name, occurrences in self.names.items():
+ for region in occurrences:
+ collector.add_change(region[0], region[1], mapping[name])
+ result = collector.get_changed()
+ if result is None:
+ return self.template
+ return result
+
+ _match_pattern = None
+
+ @classmethod
+ def _get_pattern(cls):
+ if cls._match_pattern is None:
+ pattern = codeanalyze.get_comment_pattern() + '|' + \
+ codeanalyze.get_string_pattern() + '|' + \
+ r'(?P<name>\$\{[^\s\$\}]*\})'
+ cls._match_pattern = re.compile(pattern)
+ return cls._match_pattern
+
+
+class _RopeVariable(object):
+ """Transform and identify rope inserted wildcards"""
+
+ _normal_prefix = '__rope__variable_normal_'
+ _any_prefix = '__rope__variable_any_'
+
+ def get_var(self, name):
+ if name.startswith('?'):
+ return self._get_any(name)
+ else:
+ return self._get_normal(name)
+
+ def is_var(self, name):
+ return self._is_normal(name) or self._is_var(name)
+
+ def get_base(self, name):
+ if self._is_normal(name):
+ return name[len(self._normal_prefix):]
+ if self._is_var(name):
+ return '?' + name[len(self._any_prefix):]
+
+ def _get_normal(self, name):
+ return self._normal_prefix + name
+
+ def _get_any(self, name):
+ return self._any_prefix + name[1:]
+
+ def _is_normal(self, name):
+ return name.startswith(self._normal_prefix)
+
+ def _is_var(self, name):
+ return name.startswith(self._any_prefix)
+
+
+def make_pattern(code, variables):
+ variables = set(variables)
+ collector = codeanalyze.ChangeCollector(code)
+ def does_match(node, name):
+ return isinstance(node, ast.Name) and node.id == name
+ finder = RawSimilarFinder(code, does_match=does_match)
+ for variable in variables:
+ for match in finder.get_matches('${%s}' % variable):
+ start, end = match.get_region()
+ collector.add_change(start, end, '${%s}' % variable)
+ result = collector.get_changed()
+ return result if result is not None else code
+
+
+def _pydefined_to_str(pydefined):
+ address = []
+ if isinstance(pydefined, (builtins.BuiltinClass, builtins.BuiltinFunction)):
+ return '__builtins__.' + pydefined.get_name()
+ else:
+ while pydefined.parent is not None:
+ address.insert(0, pydefined.get_name())
+ pydefined = pydefined.parent
+ module_name = pydefined.pycore.modname(pydefined.resource)
+ return '.'.join(module_name.split('.') + address)
diff --git a/.vim/bundle/python-mode/pylibs/rope/refactor/sourceutils.py b/.vim/bundle/python-mode/pylibs/rope/refactor/sourceutils.py
@@ -0,0 +1,92 @@
+from rope.base import ast, codeanalyze
+
+
+def get_indents(lines, lineno):
+ return codeanalyze.count_line_indents(lines.get_line(lineno))
+
+
+def find_minimum_indents(source_code):
+ result = 80
+ lines = source_code.split('\n')
+ for line in lines:
+ if line.strip() == '':
+ continue
+ result = min(result, codeanalyze.count_line_indents(line))
+ return result
+
+
+def indent_lines(source_code, amount):
+ if amount == 0:
+ return source_code
+ lines = source_code.splitlines(True)
+ result = []
+ for l in lines:
+ if l.strip() == '':
+ result.append('\n')
+ continue
+ if amount < 0:
+ indents = codeanalyze.count_line_indents(l)
+ result.append(max(0, indents + amount) * ' ' + l.lstrip())
+ else:
+ result.append(' ' * amount + l)
+ return ''.join(result)
+
+
+def fix_indentation(code, new_indents):
+ """Change the indentation of `code` to `new_indents`"""
+ min_indents = find_minimum_indents(code)
+ return indent_lines(code, new_indents - min_indents)
+
+
+def add_methods(pymodule, class_scope, methods_sources):
+ source_code = pymodule.source_code
+ lines = pymodule.lines
+ insertion_line = class_scope.get_end()
+ if class_scope.get_scopes():
+ insertion_line = class_scope.get_scopes()[-1].get_end()
+ insertion_offset = lines.get_line_end(insertion_line)
+ methods = '\n\n' + '\n\n'.join(methods_sources)
+ indented_methods = fix_indentation(
+ methods, get_indents(lines, class_scope.get_start()) +
+ get_indent(pymodule.pycore))
+ result = []
+ result.append(source_code[:insertion_offset])
+ result.append(indented_methods)
+ result.append(source_code[insertion_offset:])
+ return ''.join(result)
+
+
+def get_body(pyfunction):
+ """Return unindented function body"""
+ scope = pyfunction.get_scope()
+ pymodule = pyfunction.get_module()
+ start, end = get_body_region(pyfunction)
+ return fix_indentation(pymodule.source_code[start:end], 0)
+
+
+def get_body_region(defined):
+ """Return the start and end offsets of function body"""
+ scope = defined.get_scope()
+ pymodule = defined.get_module()
+ lines = pymodule.lines
+ node = defined.get_ast()
+ start_line = node.lineno
+ if defined.get_doc() is None:
+ start_line = node.body[0].lineno
+ elif len(node.body) > 1:
+ start_line = node.body[1].lineno
+ start = lines.get_line_start(start_line)
+ scope_start = pymodule.logical_lines.logical_line_in(scope.start)
+ if scope_start[1] >= start_line:
+ # a one-liner!
+ # XXX: what if colon appears in a string
+ start = pymodule.source_code.index(':', start) + 1
+ while pymodule.source_code[start].isspace():
+ start += 1
+ end = min(lines.get_line_end(scope.end) + 1, len(pymodule.source_code))
+ return start, end
+
+
+def get_indent(pycore):
+ project = pycore.project
+ return project.prefs.get('indent_size', 4)
diff --git a/.vim/bundle/python-mode/pylibs/rope/refactor/suites.py b/.vim/bundle/python-mode/pylibs/rope/refactor/suites.py
@@ -0,0 +1,142 @@
+from rope.base import ast
+
+
+def find_visible(node, lines):
+ """Return the line which is visible from all `lines`"""
+ root = ast_suite_tree(node)
+ return find_visible_for_suite(root, lines)
+
+
+def find_visible_for_suite(root, lines):
+ if len(lines) == 1:
+ return lines[0]
+ line1 = lines[0]
+ line2 = find_visible_for_suite(root, lines[1:])
+ suite1 = root.find_suite(line1)
+ suite2 = root.find_suite(line2)
+ def valid(suite):
+ return suite is not None and not suite.ignored
+ if valid(suite1) and not valid(suite2):
+ return line1
+ if not valid(suite1) and valid(suite2):
+ return line2
+ if not valid(suite1) and not valid(suite2):
+ return None
+ while suite1 != suite2 and suite1.parent != suite2.parent:
+ if suite1._get_level() < suite2._get_level():
+ line2 = suite2.get_start()
+ suite2 = suite2.parent
+ elif suite1._get_level() > suite2._get_level():
+ line1 = suite1.get_start()
+ suite1 = suite1.parent
+ else:
+ line1 = suite1.get_start()
+ line2 = suite2.get_start()
+ suite1 = suite1.parent
+ suite2 = suite2.parent
+ if suite1 == suite2:
+ return min(line1, line2)
+ return min(suite1.get_start(), suite2.get_start())
+
+
+def ast_suite_tree(node):
+ if hasattr(node, 'lineno'):
+ lineno = node.lineno
+ else:
+ lineno = 1
+ return Suite(node.body, lineno)
+
+
+class Suite(object):
+
+ def __init__(self, child_nodes, lineno, parent=None, ignored=False):
+ self.parent = parent
+ self.lineno = lineno
+ self.child_nodes = child_nodes
+ self._children = None
+ self.ignored = ignored
+
+ def get_start(self):
+ if self.parent is None:
+ if self.child_nodes:
+ return self.local_start()
+ else:
+ return 1
+ return self.lineno
+
+ def get_children(self):
+ if self._children is None:
+ walker = _SuiteWalker(self)
+ for child in self.child_nodes:
+ ast.walk(child, walker)
+ self._children = walker.suites
+ return self._children
+
+ def local_start(self):
+ return self.child_nodes[0].lineno
+
+ def local_end(self):
+ end = self.child_nodes[-1].lineno
+ if self.get_children():
+ end = max(end, self.get_children()[-1].local_end())
+ return end
+
+ def find_suite(self, line):
+ if line is None:
+ return None
+ for child in self.get_children():
+ if child.local_start() <= line <= child.local_end():
+ return child.find_suite(line)
+ return self
+
+ def _get_level(self):
+ if self.parent is None:
+ return 0
+ return self.parent._get_level() + 1
+
+
+class _SuiteWalker(object):
+
+ def __init__(self, suite):
+ self.suite = suite
+ self.suites = []
+
+ def _If(self, node):
+ self._add_if_like_node(node)
+
+ def _For(self, node):
+ self._add_if_like_node(node)
+
+ def _While(self, node):
+ self._add_if_like_node(node)
+
+ def _With(self, node):
+ self.suites.append(Suite(node.body, node.lineno, self.suite))
+
+ def _TryFinally(self, node):
+ if len(node.finalbody) == 1 and \
+ isinstance(node.body[0], ast.TryExcept):
+ self._TryExcept(node.body[0])
+ else:
+ self.suites.append(Suite(node.body, node.lineno, self.suite))
+ self.suites.append(Suite(node.finalbody, node.lineno, self.suite))
+
+ def _TryExcept(self, node):
+ self.suites.append(Suite(node.body, node.lineno, self.suite))
+ for handler in node.handlers:
+ self.suites.append(Suite(handler.body, node.lineno, self.suite))
+ if node.orelse:
+ self.suites.append(Suite(node.orelse, node.lineno, self.suite))
+
+ def _add_if_like_node(self, node):
+ self.suites.append(Suite(node.body, node.lineno, self.suite))
+ if node.orelse:
+ self.suites.append(Suite(node.orelse, node.lineno, self.suite))
+
+ def _FunctionDef(self, node):
+ self.suites.append(Suite(node.body, node.lineno,
+ self.suite, ignored=True))
+
+ def _ClassDef(self, node):
+ self.suites.append(Suite(node.body, node.lineno,
+ self.suite, ignored=True))
diff --git a/.vim/bundle/python-mode/pylibs/rope/refactor/topackage.py b/.vim/bundle/python-mode/pylibs/rope/refactor/topackage.py
@@ -0,0 +1,32 @@
+import rope.refactor.importutils
+from rope.base.change import ChangeSet, ChangeContents, MoveResource, CreateFolder
+
+
+class ModuleToPackage(object):
+
+ def __init__(self, project, resource):
+ self.project = project
+ self.pycore = project.pycore
+ self.resource = resource
+
+ def get_changes(self):
+ changes = ChangeSet('Transform <%s> module to package' %
+ self.resource.path)
+ new_content = self._transform_relatives_to_absolute(self.resource)
+ if new_content is not None:
+ changes.add_change(ChangeContents(self.resource, new_content))
+ parent = self.resource.parent
+ name = self.resource.name[:-3]
+ changes.add_change(CreateFolder(parent, name))
+ parent_path = parent.path + '/'
+ if not parent.path:
+ parent_path = ''
+ new_path = parent_path + '%s/__init__.py' % name
+ if self.resource.project == self.project:
+ changes.add_change(MoveResource(self.resource, new_path))
+ return changes
+
+ def _transform_relatives_to_absolute(self, resource):
+ pymodule = self.pycore.resource_to_pyobject(resource)
+ import_tools = rope.refactor.importutils.ImportTools(self.pycore)
+ return import_tools.relatives_to_absolutes(pymodule)
diff --git a/.vim/bundle/python-mode/pylibs/rope/refactor/usefunction.py b/.vim/bundle/python-mode/pylibs/rope/refactor/usefunction.py
@@ -0,0 +1,171 @@
+from rope.base import (change, taskhandle, evaluate,
+ exceptions, pyobjects, pynames, ast)
+from rope.refactor import restructure, sourceutils, similarfinder, importutils
+
+
+class UseFunction(object):
+ """Try to use a function wherever possible"""
+
+ def __init__(self, project, resource, offset):
+ self.project = project
+ self.offset = offset
+ this_pymodule = project.pycore.resource_to_pyobject(resource)
+ pyname = evaluate.eval_location(this_pymodule, offset)
+ if pyname is None:
+ raise exceptions.RefactoringError('Unresolvable name selected')
+ self.pyfunction = pyname.get_object()
+ if not isinstance(self.pyfunction, pyobjects.PyFunction) or \
+ not isinstance(self.pyfunction.parent, pyobjects.PyModule):
+ raise exceptions.RefactoringError(
+ 'Use function works for global functions, only.')
+ self.resource = self.pyfunction.get_module().get_resource()
+ self._check_returns()
+
+ def _check_returns(self):
+ node = self.pyfunction.get_ast()
+ if _yield_count(node):
+ raise exceptions.RefactoringError('Use function should not '
+ 'be used on generators.')
+ returns = _return_count(node)
+ if returns > 1:
+ raise exceptions.RefactoringError('usefunction: Function has more '
+ 'than one return statement.')
+ if returns == 1 and not _returns_last(node):
+ raise exceptions.RefactoringError('usefunction: return should '
+ 'be the last statement.')
+
+ def get_changes(self, resources=None,
+ task_handle=taskhandle.NullTaskHandle()):
+ if resources is None:
+ resources = self.project.pycore.get_python_files()
+ changes = change.ChangeSet('Using function <%s>' %
+ self.pyfunction.get_name())
+ if self.resource in resources:
+ newresources = list(resources)
+ newresources.remove(self.resource)
+ for c in self._restructure(newresources, task_handle).changes:
+ changes.add_change(c)
+ if self.resource in resources:
+ for c in self._restructure([self.resource], task_handle,
+ others=False).changes:
+ changes.add_change(c)
+ return changes
+
+ def get_function_name(self):
+ return self.pyfunction.get_name()
+
+ def _restructure(self, resources, task_handle, others=True):
+ body = self._get_body()
+ pattern = self._make_pattern()
+ goal = self._make_goal(import_=others)
+ imports = None
+ if others:
+ imports = ['import %s' % self._module_name()]
+
+ body_region = sourceutils.get_body_region(self.pyfunction)
+ args_value = {'skip': (self.resource, body_region)}
+ args = {'': args_value}
+
+ restructuring = restructure.Restructure(
+ self.project, pattern, goal, args=args, imports=imports)
+ return restructuring.get_changes(resources=resources,
+ task_handle=task_handle)
+
+ def _find_temps(self):
+ return find_temps(self.project, self._get_body())
+
+ def _module_name(self):
+ return self.project.pycore.modname(self.resource)
+
+ def _make_pattern(self):
+ params = self.pyfunction.get_param_names()
+ body = self._get_body()
+ body = restructure.replace(body, 'return', 'pass')
+ wildcards = list(params)
+ wildcards.extend(self._find_temps())
+ if self._does_return():
+ if self._is_expression():
+ replacement = '${%s}' % self._rope_returned
+ else:
+ replacement = '%s = ${%s}' % (self._rope_result,
+ self._rope_returned)
+ body = restructure.replace(
+ body, 'return ${%s}' % self._rope_returned,
+ replacement)
+ wildcards.append(self._rope_result)
+ return similarfinder.make_pattern(body, wildcards)
+
+ def _get_body(self):
+ return sourceutils.get_body(self.pyfunction)
+
+ def _make_goal(self, import_=False):
+ params = self.pyfunction.get_param_names()
+ function_name = self.pyfunction.get_name()
+ if import_:
+ function_name = self._module_name() + '.' + function_name
+ goal = '%s(%s)' % (function_name,
+ ', ' .join(('${%s}' % p) for p in params))
+ if self._does_return() and not self._is_expression():
+ goal = '${%s} = %s' % (self._rope_result, goal)
+ return goal
+
+ def _does_return(self):
+ body = self._get_body()
+ removed_return = restructure.replace(body, 'return ${result}', '')
+ return removed_return != body
+
+ def _is_expression(self):
+ return len(self.pyfunction.get_ast().body) == 1
+
+ _rope_result = '_rope__result'
+ _rope_returned = '_rope__returned'
+
+
+def find_temps(project, code):
+ code = 'def f():\n' + sourceutils.indent_lines(code, 4)
+ pymodule = project.pycore.get_string_module(code)
+ result = []
+ function_scope = pymodule.get_scope().get_scopes()[0]
+ for name, pyname in function_scope.get_names().items():
+ if isinstance(pyname, pynames.AssignedName):
+ result.append(name)
+ return result
+
+
+def _returns_last(node):
+ return node.body and isinstance(node.body[-1], ast.Return)
+
+def _yield_count(node):
+ visitor = _ReturnOrYieldFinder()
+ visitor.start_walking(node)
+ return visitor.yields
+
+def _return_count(node):
+ visitor = _ReturnOrYieldFinder()
+ visitor.start_walking(node)
+ return visitor.returns
+
+class _ReturnOrYieldFinder(object):
+
+ def __init__(self):
+ self.returns = 0
+ self.yields = 0
+
+ def _Return(self, node):
+ self.returns += 1
+
+ def _Yield(self, node):
+ self.yields += 1
+
+ def _FunctionDef(self, node):
+ pass
+
+ def _ClassDef(self, node):
+ pass
+
+ def start_walking(self, node):
+ nodes = [node]
+ if isinstance(node, ast.FunctionDef):
+ nodes = ast.get_child_nodes(node)
+ for child in nodes:
+ ast.walk(child, self)
diff --git a/.vim/bundle/python-mode/pylibs/rope/refactor/wildcards.py b/.vim/bundle/python-mode/pylibs/rope/refactor/wildcards.py
@@ -0,0 +1,176 @@
+from rope.base import ast, evaluate, builtins, pyobjects
+from rope.refactor import patchedast, occurrences
+
+
+class Wildcard(object):
+
+ def get_name(self):
+ """Return the name of this wildcard"""
+
+ def matches(self, suspect, arg):
+ """Return `True` if `suspect` matches this wildcard"""
+
+
+class Suspect(object):
+
+ def __init__(self, pymodule, node, name):
+ self.name = name
+ self.pymodule = pymodule
+ self.node = node
+
+
+class DefaultWildcard(object):
+ """The default restructuring wildcard
+
+ The argument passed to this wildcard is in the
+ ``key1=value1,key2=value2,...`` format. Possible keys are:
+
+ * name - for checking the reference
+ * type - for checking the type
+ * object - for checking the object
+ * instance - for checking types but similar to builtin isinstance
+ * exact - matching only occurrences with the same name as the wildcard
+ * unsure - matching unsure occurrences
+
+ """
+
+ def __init__(self, project):
+ self.project = project
+
+ def get_name(self):
+ return 'default'
+
+ def matches(self, suspect, arg=''):
+ args = parse_arg(arg)
+
+ if not self._check_exact(args, suspect):
+ return False
+ if not self._check_object(args, suspect):
+ return False
+ return True
+
+ def _check_object(self, args, suspect):
+ kind = None
+ expected = None
+ unsure = args.get('unsure', False)
+ for check in ['name', 'object', 'type', 'instance']:
+ if check in args:
+ kind = check
+ expected = args[check]
+ if expected is not None:
+ checker = _CheckObject(self.project, expected,
+ kind, unsure=unsure)
+ return checker(suspect.pymodule, suspect.node)
+ return True
+
+ def _check_exact(self, args, suspect):
+ node = suspect.node
+ if args.get('exact'):
+ if not isinstance(node, ast.Name) or not node.id == suspect.name:
+ return False
+ else:
+ if not isinstance(node, ast.expr):
+ return False
+ return True
+
+
+def parse_arg(arg):
+ if isinstance(arg, dict):
+ return arg
+ result = {}
+ tokens = arg.split(',')
+ for token in tokens:
+ if '=' in token:
+ parts = token.split('=', 1)
+ result[parts[0].strip()] = parts[1].strip()
+ else:
+ result[token.strip()] = True
+ return result
+
+
+class _CheckObject(object):
+
+ def __init__(self, project, expected, kind='object', unsure=False):
+ self.project = project
+ self.kind = kind
+ self.unsure = unsure
+ self.expected = self._evaluate(expected)
+
+ def __call__(self, pymodule, node):
+ pyname = self._evaluate_node(pymodule, node)
+ if pyname is None or self.expected is None:
+ return self.unsure
+ if self._unsure_pyname(pyname, unbound=self.kind=='name'):
+ return True
+ if self.kind == 'name':
+ return self._same_pyname(self.expected, pyname)
+ else:
+ pyobject = pyname.get_object()
+ if self.kind == 'object':
+ objects = [pyobject]
+ if self.kind == 'type':
+ objects = [pyobject.get_type()]
+ if self.kind == 'instance':
+ objects = [pyobject]
+ objects.extend(self._get_super_classes(pyobject))
+ objects.extend(self._get_super_classes(pyobject.get_type()))
+ for pyobject in objects:
+ if self._same_pyobject(self.expected.get_object(), pyobject):
+ return True
+ return False
+
+ def _get_super_classes(self, pyobject):
+ result = []
+ if isinstance(pyobject, pyobjects.AbstractClass):
+ for superclass in pyobject.get_superclasses():
+ result.append(superclass)
+ result.extend(self._get_super_classes(superclass))
+ return result
+
+ def _same_pyobject(self, expected, pyobject):
+ return expected == pyobject
+
+ def _same_pyname(self, expected, pyname):
+ return occurrences.same_pyname(expected, pyname)
+
+ def _unsure_pyname(self, pyname, unbound=True):
+ return self.unsure and occurrences.unsure_pyname(pyname, unbound)
+
+ def _split_name(self, name):
+ parts = name.split('.')
+ expression, kind = parts[0], parts[-1]
+ if len(parts) == 1:
+ kind = 'name'
+ return expression, kind
+
+ def _evaluate_node(self, pymodule, node):
+ scope = pymodule.get_scope().get_inner_scope_for_line(node.lineno)
+ expression = node
+ if isinstance(expression, ast.Name) and \
+ isinstance(expression.ctx, ast.Store):
+ start, end = patchedast.node_region(expression)
+ text = pymodule.source_code[start:end]
+ return evaluate.eval_str(scope, text)
+ else:
+ return evaluate.eval_node(scope, expression)
+
+ def _evaluate(self, code):
+ attributes = code.split('.')
+ pyname = None
+ if attributes[0] in ('__builtin__', '__builtins__'):
+ class _BuiltinsStub(object):
+ def get_attribute(self, name):
+ return builtins.builtins[name]
+ def __getitem__(self, name):
+ return builtins.builtins[name]
+ def __contains__(self, name):
+ return name in builtins.builtins
+ pyobject = _BuiltinsStub()
+ else:
+ pyobject = self.project.pycore.get_module(attributes[0])
+ for attribute in attributes[1:]:
+ pyname = pyobject[attribute]
+ if pyname is None:
+ return None
+ pyobject = pyname.get_object()
+ return pyname
diff --git a/.vim/bundle/python-mode/pylibs/ropemode/__init__.py b/.vim/bundle/python-mode/pylibs/ropemode/__init__.py
@@ -0,0 +1,16 @@
+"""ropemode, a helper for using rope refactoring library in IDEs"""
+
+INFO = __doc__
+VERSION = '0.2'
+COPYRIGHT = """\
+Copyright (C) 2007-2012 Ali Gholami Rudi
+
+This program is free software; you can redistribute it and/or modify it
+under the terms of GNU General Public License as published by the
+Free Software Foundation; either version 2 of the license, or (at your
+opinion) any later version.
+
+This program is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+GNU General Public License for more details."""
diff --git a/.vim/bundle/python-mode/pylibs/ropemode/decorators.py b/.vim/bundle/python-mode/pylibs/ropemode/decorators.py
@@ -0,0 +1,98 @@
+import traceback
+
+from rope.base import exceptions
+
+
+class Logger(object):
+
+ message = None
+ only_short = False
+
+ def __call__(self, message, short=None):
+ if short is None or not self.only_short:
+ self._show(message)
+ if short is not None:
+ self._show(short)
+
+ def _show(self, message):
+ if self.message is None:
+ print message
+ else:
+ self.message(message)
+
+logger = Logger()
+
+
+def lisphook(func):
+ def newfunc(*args, **kwds):
+ try:
+ func(*args, **kwds)
+ except Exception, e:
+ trace = str(traceback.format_exc())
+ short = 'Ignored an exception in ropemode hook: %s' % \
+ _exception_message(e)
+ logger(trace, short)
+ newfunc.lisp = None
+ newfunc.__name__ = func.__name__
+ newfunc.__doc__ = func.__doc__
+ return newfunc
+
+
+def lispfunction(func):
+ func.lisp = None
+ return func
+
+
+input_exceptions = (exceptions.RefactoringError,
+ exceptions.ModuleSyntaxError,
+ exceptions.BadIdentifierError)
+
+def _exception_handler(func):
+ def newfunc(*args, **kwds):
+ try:
+ return func(*args, **kwds)
+ except exceptions.RopeError, e:
+ short = None
+ if isinstance(e, input_exceptions):
+ short = _exception_message(e)
+ logger(str(traceback.format_exc()), short)
+ newfunc.__name__ = func.__name__
+ newfunc.__doc__ = func.__doc__
+ return newfunc
+
+def _exception_message(e):
+ return '%s: %s' % (e.__class__.__name__, str(e))
+
+def rope_hook(hook):
+ def decorator(func):
+ func = lisphook(func)
+ func.name = func.__name__
+ func.kind = 'hook'
+ func.hook = hook
+ return func
+ return decorator
+
+
+def local_command(key=None, prefix=False, shortcut=None, name=None):
+ def decorator(func, name=name):
+ func = _exception_handler(func)
+ func.kind = 'local'
+ func.prefix = prefix
+ func.local_key = key
+ func.shortcut_key = shortcut
+ if name is None:
+ name = func.__name__
+ func.name = name
+ return func
+ return decorator
+
+
+def global_command(key=None, prefix=False):
+ def decorator(func):
+ func = _exception_handler(func)
+ func.kind = 'global'
+ func.prefix = prefix
+ func.global_key = key
+ func.name = func.__name__
+ return func
+ return decorator
diff --git a/.vim/bundle/python-mode/pylibs/ropemode/dialog.py b/.vim/bundle/python-mode/pylibs/ropemode/dialog.py
@@ -0,0 +1,97 @@
+class Data(object):
+
+ def __init__(self, prompt=None, default=None, values=None,
+ kind=None, decode=None):
+ self.prompt = prompt
+ self.default = default
+ self.values = values
+ self.kind = kind
+ self._decode = decode
+
+ def decode(self, value):
+ if self._decode:
+ return self._decode(value)
+ return value
+
+
+class Boolean(Data):
+
+ def __init__(self, prompt=None, default=False):
+ Data.__init__(self, prompt, self._encode(default),
+ [self._encode(True), self._encode(False)])
+
+ def _encode(self, value):
+ if value:
+ return 'yes'
+ return 'no'
+
+ def decode(self, value):
+ if value.lower() in ('yes', '1', 'true'):
+ return True
+ return False
+
+
+def show_dialog(askdata, actions, confs={}, optionals={}, initial_asking=True):
+ result = {}
+ if initial_asking:
+ for name, conf in confs.items():
+ result[name] = askdata(conf)
+ actions.append('batchset')
+ names = list(actions)
+ names.extend(optionals.keys())
+ names.extend(confs.keys())
+ base_question = Data('Choose what to do: ',
+ default=actions[0], values=names)
+ batchset_question = Data('Batch sets: ')
+ while True:
+ response = askdata(base_question)
+ if response == '':
+ response = base_question.default
+ elif response == 'batchset':
+ sets = askdata(batchset_question)
+ for key, value in _parse_batchset(sets).items():
+ if key.endswith(':'):
+ key = key[:-1]
+ if key in names:
+ conf = confs.get(key, optionals.get(key))
+ result[key] = value
+ elif response in actions:
+ break
+ else:
+ if response in confs:
+ conf = confs[response]
+ else:
+ conf = optionals[response]
+ oldvalue = result.get(response, None)
+ result[response] = askdata(conf, starting=oldvalue)
+ decoded = {}
+ all_confs = dict(confs)
+ all_confs.update(optionals)
+ for key in all_confs:
+ conf = all_confs.get(key)
+ if key in result:
+ decoded[key] = conf.decode(result[key])
+ else:
+ decoded[key] = conf.decode(conf.default)
+ return response, decoded
+
+
+def _parse_batchset(sets):
+ result = []
+ multiline = False
+ for line in sets.splitlines(True):
+ if line[0].isspace():
+ if multiline:
+ result[-1][1] += line[1:]
+ else:
+ if not line.strip():
+ continue
+ multiline= False
+ tokens = line.split(None, 1)
+ value = ''
+ if len(tokens) > 1:
+ result.append([tokens[0], tokens[1].rstrip('\r\n')])
+ else:
+ multiline = True
+ result.append([tokens[0], ''])
+ return dict(result)
diff --git a/.vim/bundle/python-mode/pylibs/ropemode/environment.py b/.vim/bundle/python-mode/pylibs/ropemode/environment.py
@@ -0,0 +1,104 @@
+class Environment(object):
+
+ def ask(self, prompt, default=None, starting=None):
+ pass
+
+ def ask_values(self, prompt, values, default=None, starting=None):
+ pass
+
+ def ask_directory(self, prompt, default=None, starting=None):
+ pass
+
+ def ask_completion(self, prompt, values, starting=None):
+ pass
+
+ def message(self, message):
+ pass
+
+ def yes_or_no(self, prompt):
+ pass
+
+ def y_or_n(self, prompt):
+ pass
+
+ def get(self, name, default=None):
+ pass
+
+ def get_offset(self):
+ pass
+
+ def get_text(self):
+ pass
+
+ def get_region(self):
+ pass
+
+ def filename(self):
+ pass
+
+ def is_modified(self):
+ pass
+
+ def goto_line(self, lineno):
+ pass
+
+ def insert_line(self, line, lineno):
+ pass
+
+ def insert(self, text):
+ pass
+
+ def delete(self, start, end):
+ pass
+
+ def filenames(self):
+ pass
+
+ def save_files(self, filenames):
+ pass
+
+ def reload_files(self, filenames, moves={}):
+ pass
+
+ def find_file(self, filename, readonly=False, other=False):
+ pass
+
+ def create_progress(self, name):
+ pass
+
+ def current_word(self):
+ pass
+
+ def push_mark(self):
+ pass
+
+ def pop_mark(self):
+ pass
+
+ def prefix_value(self, prefix):
+ pass
+
+ def show_occurrences(self, locations):
+ pass
+
+ def show_doc(self, docs, altview=False):
+ pass
+
+ def preview_changes(self, diffs):
+ pass
+
+ def local_command(self, name, callback, key=None, prefix=False):
+ pass
+
+ def global_command(self, name, callback, key=None, prefix=False):
+ pass
+
+ def add_hook(self, name, callback, hook):
+ pass
+
+ def _completion_text(self, proposal):
+ return proposal.name
+
+ def _completion_data(self, proposal):
+ return self._completion_text(proposal)
+
diff --git a/.vim/bundle/python-mode/pylibs/ropemode/filter.py b/.vim/bundle/python-mode/pylibs/ropemode/filter.py
@@ -0,0 +1,39 @@
+from rope.base import exceptions
+
+
+def resources(project, rules):
+ """Find python files in the `project` matching `rules`
+
+ `rules` is a multi-line `str`; each line starts with either a '+'
+ or '-'. Each '+' means include the file (or its children if it's
+ a folder) that comes after it. '-' has the same meaning for
+ exclusion.
+
+ """
+ all = set(project.pycore.get_python_files())
+ files = None
+ for line in rules.splitlines():
+ if not line.strip():
+ continue
+ first, path = (line[0], line[1:])
+ if first not in '+-':
+ continue
+ try:
+ resource = project.get_resource(path.strip())
+ except exceptions.ResourceNotFoundError:
+ continue
+ if resource.is_folder():
+ matches = set(filter(lambda item: resource.contains(item), all))
+ else:
+ matches = set([resource])
+ if first == '+':
+ if files is None:
+ files = set()
+ files.update(matches)
+ if first == '-':
+ if files is None:
+ files = set(all)
+ files -= matches
+ if files is None:
+ return all
+ return files
diff --git a/.vim/bundle/python-mode/pylibs/ropemode/interface.py b/.vim/bundle/python-mode/pylibs/ropemode/interface.py
@@ -0,0 +1,706 @@
+import os
+
+import rope.base.change
+from rope.base import libutils, utils, exceptions
+from rope.contrib import codeassist, generate, autoimport, findit
+
+from ropemode import refactor, decorators, dialog
+
+
+class RopeMode(object):
+
+ def __init__(self, env):
+ self.project = None
+ self.old_content = None
+ self.env = env
+ self._assist = None
+
+ self._prepare_refactorings()
+ self.autoimport = None
+
+ for attrname in dir(self):
+ attr = getattr(self, attrname)
+ if not callable(attr):
+ continue
+ kind = getattr(attr, 'kind', None)
+ if kind == 'local':
+ key = getattr(attr, 'local_key', None)
+ prefix = getattr(attr, 'prefix', None)
+ self.env.local_command(attrname, attr, key, prefix)
+ if kind == 'global':
+ key = getattr(attr, 'global_key', None)
+ prefix = getattr(attr, 'prefix', None)
+ self.env.global_command(attrname, attr, key, prefix)
+ if kind == 'hook':
+ hook = getattr(attr, 'hook', None)
+ self.env.add_hook(attrname, attr, hook)
+
+ def _prepare_refactorings(self):
+ for name in dir(refactor):
+ if not name.startswith('_') and name != 'Refactoring':
+ attr = getattr(refactor, name)
+ if isinstance(attr, type) and \
+ issubclass(attr, refactor.Refactoring):
+ refname = self._refactoring_name(attr)
+
+ @decorators.local_command(attr.key, 'P', None, refname)
+ def do_refactor(prefix, self=self, refactoring=attr):
+ initial_asking = prefix is None
+ refactoring(self, self.env).show(initial_asking=initial_asking)
+ setattr(self, refname, do_refactor)
+
+ @staticmethod
+ def _refactoring_name(refactoring):
+ return refactor.refactoring_name(refactoring)
+
+ @decorators.rope_hook('before_save')
+ def before_save_actions(self):
+ if self.project is not None:
+ if not self._is_python_file(self.env.filename()):
+ return
+ resource = self._get_resource()
+ if resource.exists():
+ self.old_content = resource.read()
+ else:
+ self.old_content = ''
+
+ @decorators.rope_hook('after_save')
+ def after_save_actions(self):
+ if self.project is not None and self.old_content is not None:
+ libutils.report_change(self.project, self.env.filename(),
+ self.old_content)
+ self.old_content = None
+
+ @decorators.rope_hook('exit')
+ def exiting_actions(self):
+ if self.project is not None:
+ self.close_project()
+
+ @decorators.global_command('o')
+ def open_project(self, root=None):
+ if not root:
+ if self.env.get('auto_project'):
+ root = self.env.get_cur_dir()
+ else:
+ root = self.env.ask_directory('Rope project root folder: ')
+ if self.project is not None:
+ self.close_project()
+ address = rope.base.project._realpath(os.path.join(root,
+ '.ropeproject'))
+ if not os.path.exists(address) and not self.env.get('auto_project'):
+ if not self.env.y_or_n('Project not exists in %s, create one?' % root):
+ self.env.message("Project creation aborted")
+ return
+ progress = self.env.create_progress('Opening [%s] project' % root)
+ self.project = rope.base.project.Project(root)
+ if self.env.get('enable_autoimport'):
+ underlined = self.env.get('autoimport_underlineds')
+ self.autoimport = autoimport.AutoImport(self.project,
+ underlined=underlined)
+ progress.done()
+
+ @decorators.global_command('k')
+ def close_project(self):
+ if self.project is not None:
+ progress = self.env.create_progress('Closing [%s] project' %
+ self.project.address)
+ self.project.close()
+ self.project = None
+ progress.done()
+
+ @decorators.global_command()
+ def write_project(self):
+ if self.project is not None:
+ progress = self.env.create_progress(
+ 'Writing [%s] project data to disk' % self.project.address)
+ self.project.sync()
+ progress.done()
+
+ @decorators.global_command('u')
+ def undo(self):
+ self._check_project()
+ change = self.project.history.tobe_undone
+ if change is None:
+ self.env.message('Nothing to undo!')
+ return
+ if self.env.y_or_n('Undo [%s]? ' % str(change)):
+ def undo(handle):
+ for changes in self.project.history.undo(task_handle=handle):
+ self._reload_buffers(changes, undo=True)
+ refactor.runtask(self.env, undo, 'Undo refactoring',
+ interrupts=False)
+
+ @decorators.global_command('r')
+ def redo(self):
+ self._check_project()
+ change = self.project.history.tobe_redone
+ if change is None:
+ self.env.message('Nothing to redo!')
+ return
+ if self.env.y_or_n('Redo [%s]? ' % str(change)):
+ def redo(handle):
+ for changes in self.project.history.redo(task_handle=handle):
+ self._reload_buffers(changes)
+ refactor.runtask(self.env, redo, 'Redo refactoring',
+ interrupts=False)
+
+ @decorators.local_command('a g', shortcut='C-c g')
+ def goto_definition(self):
+ definition = self._base_definition_location()
+ if definition:
+ self.env.push_mark()
+ self._goto_location(definition[0], definition[1])
+ else:
+ self.env.message('Cannot find the definition!')
+
+ @decorators.local_command()
+ def pop_mark(self):
+ self.env.pop_mark()
+
+ @decorators.local_command()
+ def definition_location(self):
+ definition = self._base_definition_location()
+ if definition:
+ return str(definition[0].real_path), definition[1]
+ return None
+
+ def _base_definition_location(self):
+ self._check_project()
+ resource, offset = self._get_location()
+ maxfixes = self.env.get('codeassist_maxfixes')
+ try:
+ definition = codeassist.get_definition_location(
+ self.project, self._get_text(), offset, resource, maxfixes)
+ except exceptions.BadIdentifierError:
+ return None
+ if tuple(definition) != (None, None):
+ return definition
+ return None
+
+ @decorators.local_command('a d', 'P', 'C-c d')
+ def show_doc(self, prefix):
+ self._check_project()
+ self._base_show_doc(prefix, self._base_get_doc(codeassist.get_doc))
+
+ @decorators.local_command()
+ def get_calltip(self):
+ self._check_project()
+ def _get_doc(project, text, offset, *args, **kwds):
+ try:
+ offset = text.rindex('(', 0, offset) - 1
+ except ValueError:
+ return None
+ return codeassist.get_calltip(project, text, offset, *args, **kwds)
+ return self._base_get_doc(_get_doc)
+
+ @decorators.local_command('a c', 'P')
+ def show_calltip(self, prefix):
+ self._base_show_doc(prefix, self.get_calltip())
+
+ def _base_show_doc(self, prefix, docs):
+ if docs:
+ self.env.show_doc(docs, prefix)
+ else:
+ self.env.message('No docs available!')
+
+ @decorators.local_command()
+ def get_doc(self):
+ self._check_project()
+ return self._base_get_doc(codeassist.get_doc)
+
+ def _base_get_doc(self, get_doc):
+ maxfixes = self.env.get('codeassist_maxfixes')
+ text = self._get_text()
+ offset = self.env.get_offset()
+ try:
+ return get_doc(self.project, text, offset,
+ self.resource, maxfixes)
+ except exceptions.BadIdentifierError:
+ return None
+
+ def _get_text(self):
+ resource = self.resource
+ if not self.env.is_modified() and resource is not None:
+ return resource.read()
+ return self.env.get_text()
+
+ def _base_findit(self, do_find, optionals, get_kwds):
+ self._check_project()
+ self._save_buffers()
+ resource, offset = self._get_location()
+
+ action, values = dialog.show_dialog(
+ self._askdata, ['search', 'cancel'], optionals=optionals)
+ if action == 'search':
+ kwds = get_kwds(values)
+ def calculate(handle):
+ resources = refactor._resources(self.project,
+ values.get('resources'))
+ return do_find(self.project, resource, offset,
+ resources=resources, task_handle=handle, **kwds)
+ result = refactor.runtask(self.env, calculate, 'Find Occurrences')
+ locations = [Location(location) for location in result]
+ self.env.show_occurrences(locations)
+
+ @decorators.local_command('a f', shortcut='C-c f')
+ def find_occurrences(self):
+ optionals = {
+ 'unsure': dialog.Data('Find uncertain occurrences: ',
+ default='no', values=['yes', 'no']),
+ 'resources': dialog.Data('Files to search: '),
+ 'in_hierarchy': dialog.Data(
+ 'Rename methods in class hierarchy: ',
+ default='no', values=['yes', 'no'])}
+ def get_kwds(values):
+ return {'unsure': values.get('unsure') == 'yes',
+ 'in_hierarchy': values.get('in_hierarchy') == 'yes'}
+ self._base_findit(findit.find_occurrences, optionals, get_kwds)
+
+ @decorators.local_command('a i')
+ def find_implementations(self):
+ optionals = {'resources': dialog.Data('Files to search: ')}
+ def get_kwds(values):
+ return {}
+ self._base_findit(findit.find_implementations, optionals, get_kwds)
+
+ @decorators.local_command('a /', 'P', 'M-/')
+ def code_assist(self, prefix):
+ _CodeAssist(self, self.env).code_assist(prefix)
+
+ @decorators.local_command('a ?', 'P', 'M-?')
+ def lucky_assist(self, prefix):
+ _CodeAssist(self, self.env).lucky_assist(prefix)
+
+ @decorators.local_command(prefix='P')
+ def omni_complete(self, prefix):
+ self._assist.omni_complete(prefix)
+
+ def _find_start(self):
+ self._assist = _CodeAssist(self, self.env)
+ start = (self.env.cursor[1] - self.env.get_offset()
+ + self._assist.starting_offset)
+ self.env._command('let g:pymode_offset = %s' % start)
+
+ @decorators.local_command('a')
+ def auto_import(self):
+ _CodeAssist(self, self.env).auto_import()
+
+ @decorators.local_command()
+ def completions(self):
+ return _CodeAssist(self, self.env).completions()
+
+ @decorators.local_command()
+ def extended_completions(self):
+ return _CodeAssist(self, self.env).extended_completions()
+
+ def _check_autoimport(self):
+ self._check_project()
+ if self.autoimport is None:
+ self.env.message('autoimport is disabled; '
+ 'see `enable_autoimport\' variable')
+ return False
+ return True
+
+ @decorators.global_command('g')
+ def generate_autoimport_cache(self):
+ if not self._check_autoimport():
+ return
+ modules = self.env.get('autoimport_modules')
+ modules = [ m if isinstance(m, basestring) else m.value() for m in modules ]
+
+ def generate(handle):
+ self.autoimport.generate_cache(task_handle=handle)
+ self.autoimport.generate_modules_cache(modules, task_handle=handle)
+
+ refactor.runtask(self.env, generate, 'Generate autoimport cache')
+ self.write_project()
+
+ @decorators.global_command('f', 'P')
+ def find_file(self, prefix):
+ file = self._base_find_file(prefix)
+ if file is not None:
+ self.env.find_file(file.real_path)
+
+ @decorators.global_command('4 f', 'P')
+ def find_file_other_window(self, prefix):
+ file = self._base_find_file(prefix)
+ if file is not None:
+ self.env.find_file(file.real_path, other=True)
+
+ def _base_find_file(self, prefix):
+ self._check_project()
+ if prefix:
+ files = self.project.pycore.get_python_files()
+ else:
+ files = self.project.get_files()
+ return self._ask_file(files)
+
+ def _ask_file(self, files):
+ names = []
+ for file in files:
+ names.append('<'.join(reversed(file.path.split('/'))))
+ result = self.env.ask_values('Rope Find File: ', names)
+ if result is not None:
+ path = '/'.join(reversed(result.split('<')))
+ file = self.project.get_file(path)
+ return file
+ self.env.message('No file selected')
+
+ @decorators.local_command('a j')
+ def jump_to_global(self):
+ if not self._check_autoimport():
+ return
+ all_names = list(self.autoimport.get_all_names())
+ name = self.env.ask_values('Global name: ', all_names)
+ result = dict(self.autoimport.get_name_locations(name))
+ if len(result) == 1:
+ resource = list(result.keys())[0]
+ else:
+ resource = self._ask_file(result.keys())
+ if resource:
+ self._goto_location(resource, result[resource])
+
+ @decorators.global_command('c')
+ def project_config(self):
+ self._check_project()
+ if self.project.ropefolder is not None:
+ config = self.project.ropefolder.get_child('config.py')
+ self.env.find_file(config.real_path)
+ else:
+ self.env.message('No rope project folder found')
+
+ @decorators.global_command('n m')
+ def create_module(self):
+ def callback(sourcefolder, name):
+ return generate.create_module(self.project, name, sourcefolder)
+ self._create('module', callback)
+
+ @decorators.global_command('n p')
+ def create_package(self):
+ def callback(sourcefolder, name):
+ folder = generate.create_package(self.project, name, sourcefolder)
+ return folder.get_child('__init__.py')
+ self._create('package', callback)
+
+ @decorators.global_command('n f')
+ def create_file(self):
+ def callback(parent, name):
+ return parent.create_file(name)
+ self._create('file', callback, 'parent')
+
+ @decorators.global_command('n d')
+ def create_directory(self):
+ def callback(parent, name):
+ parent.create_folder(name)
+ self._create('directory', callback, 'parent')
+
+ @decorators.local_command()
+ def analyze_module(self):
+ """Perform static object analysis on this module"""
+ self._check_project()
+ self.project.pycore.analyze_module(self.resource)
+
+ @decorators.global_command()
+ def analyze_modules(self):
+ """Perform static object analysis on all project modules"""
+ self._check_project()
+ def _analyze_modules(handle):
+ libutils.analyze_modules(self.project, task_handle=handle)
+ refactor.runtask(self.env, _analyze_modules, 'Analyze project modules')
+
+ @decorators.local_command()
+ def run_module(self):
+ """Run and perform dynamic object analysis on this module"""
+ self._check_project()
+ process = self.project.pycore.run_module(self.resource)
+ try:
+ process.wait_process()
+ finally:
+ process.kill_process()
+
+ def _create(self, name, callback, parentname='source'):
+ self._check_project()
+ confs = {'name': dialog.Data(name.title() + ' name: ')}
+ parentname = parentname + 'folder'
+ optionals = {parentname: dialog.Data(
+ parentname.title() + ' Folder: ',
+ default=self.project.address, kind='directory')}
+ action, values = dialog.show_dialog(
+ self._askdata, ['perform', 'cancel'], confs, optionals)
+ if action == 'perform':
+ parent = libutils.path_to_resource(
+ self.project, values.get(parentname, self.project.address))
+ resource = callback(parent, values['name'])
+ if resource:
+ self.env.find_file(resource.real_path)
+
+ def _goto_location(self, resource, lineno):
+ if resource:
+ self.env.find_file(str(resource.real_path),
+ other=self.env.get('goto_def_newwin'))
+ if lineno:
+ self.env.goto_line(lineno)
+
+ def _get_location(self):
+ offset = self.env.get_offset()
+ return self.resource, offset
+
+ def _get_resource(self, filename=None):
+ if filename is None:
+ filename = self.env.filename()
+ if filename is None or self.project is None:
+ return
+ resource = libutils.path_to_resource(self.project, filename, 'file')
+ return resource
+
+ @property
+ def resource(self):
+ """the current resource
+
+ Returns `None` when file does not exist.
+ """
+ resource = self._get_resource()
+ if resource and resource.exists():
+ return resource
+
+ @decorators.global_command()
+ def get_project_root(self):
+ if self.project is not None:
+ return self.project.root.real_path
+ else:
+ return None
+
+ def _check_project(self):
+ if self.project is None:
+ if self.env.get('guess_project'):
+ self.open_project(self._guess_project())
+ else:
+ self.open_project()
+ else:
+ self.project.validate(self.project.root)
+
+ def _guess_project(self):
+ cwd = self.env.filename()
+ if cwd is not None:
+ while True:
+ ropefolder = os.path.join(cwd, '.ropeproject')
+ if os.path.exists(ropefolder) and os.path.isdir(ropefolder):
+ return cwd
+ newcwd = os.path.dirname(cwd)
+ if newcwd == cwd:
+ break
+ cwd = newcwd
+
+ def _reload_buffers(self, changes, undo=False):
+ self._reload_buffers_for_changes(
+ changes.get_changed_resources(),
+ self._get_moved_resources(changes, undo))
+
+ def _reload_buffers_for_changes(self, changed, moved={}):
+ filenames = [resource.real_path for resource in changed]
+ moved = dict([(resource.real_path, moved[resource].real_path)
+ for resource in moved])
+ self.env.reload_files(filenames, moved)
+
+ def _get_moved_resources(self, changes, undo=False):
+ result = {}
+ if isinstance(changes, rope.base.change.ChangeSet):
+ for change in changes.changes:
+ result.update(self._get_moved_resources(change))
+ if isinstance(changes, rope.base.change.MoveResource):
+ result[changes.resource] = changes.new_resource
+ if undo:
+ return dict([(value, key) for key, value in result.items()])
+ return result
+
+ def _save_buffers(self, only_current=False):
+ if only_current:
+ filenames = [self.env.filename()]
+ else:
+ filenames = self.env.filenames()
+ pythons = []
+ for filename in filenames:
+ if self._is_python_file(filename):
+ pythons.append(filename)
+ self.env.save_files(pythons)
+
+ def _is_python_file(self, path):
+ resource = self._get_resource(path)
+ return (resource is not None and
+ resource.project == self.project and
+ self.project.pycore.is_python_file(resource))
+
+ def _askdata(self, data, starting=None):
+ ask_func = self.env.ask
+ ask_args = {'prompt': data.prompt, 'starting': starting,
+ 'default': data.default}
+ if data.values:
+ ask_func = self.env.ask_values
+ ask_args['values'] = data.values
+ elif data.kind == 'directory':
+ ask_func = self.env.ask_directory
+ return ask_func(**ask_args)
+
+
+class Location(object):
+ def __init__(self, location):
+ self.location = location
+ self.filename = location.resource.real_path
+ self.offset = location.offset
+ self.note = ''
+ if location.unsure:
+ self.note = '?'
+
+ @property
+ def lineno(self):
+ if hasattr(self.location, 'lineno'):
+ return self.location.lineno
+ return self.location.resource.read().count('\n', 0, self.offset) + 1
+
+
+class _CodeAssist(object):
+
+ def __init__(self, interface, env):
+ self.interface = interface
+ self.env = env
+
+ def code_assist(self, prefix):
+ proposals = self._calculate_proposals()
+ if prefix is not None:
+ arg = self.env.prefix_value(prefix)
+ if arg == 0:
+ arg = len(proposals)
+ common_start = self._calculate_prefix(proposals[:arg])
+ self.env.insert(common_start[self.offset - self.starting_offset:])
+ self._starting = common_start
+ self._offset = self.starting_offset + len(common_start)
+ prompt = 'Completion for %s: ' % self.expression
+ proposals = map(self.env._completion_data, proposals)
+ result = self.env.ask_completion(prompt, proposals, self.starting)
+ if result is not None:
+ self._apply_assist(result)
+
+ def omni_complete(self, prefix):
+ proposals = self._calculate_proposals()
+ proposals = self.env._update_proposals(proposals)
+ command = u'let g:pythoncomplete_completions = [%s]' % proposals
+ self.env._command(command, encode=True)
+
+ def lucky_assist(self, prefix):
+ proposals = self._calculate_proposals()
+ selected = 0
+ if prefix is not None:
+ selected = self.env.prefix_value(prefix)
+ if 0 <= selected < len(proposals):
+ result = self.env._completion_text(proposals[selected])
+ else:
+ self.env.message('Not enough proposals!')
+ return
+ self._apply_assist(result)
+
+ def auto_import(self):
+ if not self.interface._check_autoimport():
+ return
+
+ if not self.autoimport.names and self.env.get('autoimport_generate'):
+ self.interface.generate_autoimport_cache()
+
+ name = self.env.current_word()
+ modules = self.autoimport.get_modules(name)
+ if modules:
+ if len(modules) == 1:
+ module = modules[0]
+ else:
+ module = self.env.ask_values(
+ 'Which module to import: ', modules)
+ self._insert_import(name, module)
+ else:
+ self.env.message('Global name %s not found!' % name)
+
+ def completions(self):
+ proposals = self._calculate_proposals()
+ prefix = self.offset - self.starting_offset
+ return [self.env._completion_text(proposal)[prefix:]
+ for proposal in proposals]
+
+ def extended_completions(self):
+ proposals = self._calculate_proposals()
+ prefix = self.offset - self.starting_offset
+ return [[proposal.name[prefix:], proposal.get_doc(),
+ proposal.type] for proposal in proposals]
+
+ def _apply_assist(self, assist):
+ if ' : ' in assist:
+ name, module = assist.rsplit(' : ', 1)
+ self.env.delete(self.starting_offset + 1, self.offset + 1)
+ self.env.insert(name)
+ self._insert_import(name, module)
+ else:
+ self.env.delete(self.starting_offset + 1, self.offset + 1)
+ self.env.insert(assist)
+
+ def _calculate_proposals(self):
+ self.interface._check_project()
+ resource = self.interface.resource
+ maxfixes = self.env.get('codeassist_maxfixes')
+ proposals = codeassist.code_assist(
+ self.interface.project, self.source, self.offset,
+ resource, maxfixes=maxfixes)
+ if self.env.get('sorted_completions', True):
+ proposals = codeassist.sorted_proposals(proposals)
+ if self.autoimport is not None:
+ if self.starting.strip() and '.' not in self.expression:
+ import_assists = self.autoimport.import_assist(self.starting)
+ for assist in import_assists:
+ p = codeassist.CompletionProposal(' : '.join(assist),
+ 'autoimport')
+ proposals.append(p)
+ return proposals
+
+ def _insert_import(self, name, module):
+ lineno = self.autoimport.find_insertion_line(self.source)
+ line = 'from %s import %s' % (module, name)
+ self.env.insert_line(line, lineno)
+
+ def _calculate_prefix(self, proposals):
+ if not proposals:
+ return ''
+ prefix = self.env._completion_text(proposals[0])
+ for proposal in proposals:
+ common = 0
+ name = self.env._completion_text(proposal)
+ for c1, c2 in zip(prefix, name):
+ if c1 != c2 or ' ' in (c1, c2):
+ break
+ common += 1
+ prefix = prefix[:common]
+ return prefix
+
+ @property
+ @utils.cacheit
+ def offset(self):
+ return self.env.get_offset()
+
+ @property
+ @utils.cacheit
+ def source(self):
+ return self.interface._get_text()
+
+ @property
+ @utils.cacheit
+ def starting_offset(self):
+ return codeassist.starting_offset(self.source, self.offset)
+
+ @property
+ @utils.cacheit
+ def starting(self):
+ return self.source[self.starting_offset:self.offset]
+
+ @property
+ @utils.cacheit
+ def expression(self):
+ return codeassist.starting_expression(self.source, self.offset)
+
+ @property
+ def autoimport(self):
+ return self.interface.autoimport
diff --git a/.vim/bundle/python-mode/pylibs/ropemode/refactor.py b/.vim/bundle/python-mode/pylibs/ropemode/refactor.py
@@ -0,0 +1,494 @@
+import re
+
+import rope.base.change
+import rope.contrib.generate
+import rope.refactor.change_signature
+import rope.refactor.extract
+import rope.refactor.inline
+import rope.refactor.introduce_factory
+import rope.refactor.method_object
+import rope.refactor.move
+import rope.refactor.rename
+import rope.refactor.restructure
+import rope.refactor.usefunction
+from rope.base import taskhandle
+
+from ropemode import dialog, filter as file_filter
+
+
+class Refactoring(object):
+ key = None
+ confs = {}
+ optionals = {}
+ saveall = True
+
+ def __init__(self, interface, env):
+ self.interface = interface
+ self.env = env
+
+ def show(self, initial_asking=True):
+ self.interface._check_project()
+ self.interface._save_buffers(only_current=not self.saveall)
+ self._create_refactoring()
+ action, result = dialog.show_dialog(
+ self.interface._askdata, ['perform', 'preview', 'cancel'],
+ self._get_confs(), self._get_optionals(),
+ initial_asking=initial_asking)
+ if action == 'cancel':
+ self.env.message('Cancelled!')
+ return
+ def calculate(handle):
+ return self._calculate_changes(result, handle)
+ name = 'Calculating %s changes' % self.name
+ changes = runtask(self.env, calculate, name=name)
+ if action == 'perform':
+ self._perform(changes)
+ if action == 'preview':
+ if changes is not None:
+ diffs = changes.get_description()
+ if self.env.preview_changes(diffs):
+ self._perform(changes)
+ else:
+ self.env.message('Thrown away!')
+ else:
+ self.env.message('No changes!')
+
+ @property
+ def project(self):
+ return self.interface.project
+
+ @property
+ def resource(self):
+ return self.interface._get_resource()
+
+ @property
+ def offset(self):
+ return self.env.get_offset()
+
+ @property
+ def region(self):
+ return self.env.get_region()
+
+ @property
+ def name(self):
+ return refactoring_name(self.__class__)
+
+ def _calculate_changes(self, option_values, task_handle):
+ pass
+
+ def _create_refactoring(self):
+ pass
+
+ def _done(self):
+ pass
+
+ def _perform(self, changes):
+ if changes is None:
+ self.env.message('No changes!')
+ return
+ def perform(handle, self=self, changes=changes):
+ self.project.do(changes, task_handle=handle)
+ self.interface._reload_buffers(changes)
+ self._done()
+ runtask(self.env, perform, 'Making %s changes' % self.name,
+ interrupts=False)
+ self.env.message(str(changes.description) + ' finished')
+
+ def _get_confs(self):
+ return self.confs
+
+ def _get_optionals(self):
+ return self.optionals
+
+ @property
+ def resources_option(self):
+ return dialog.Data('Files to apply this refactoring on: ',
+ decode=self._decode_resources)
+
+ def _decode_resources(self, value):
+ return _resources(self.project, value)
+
+
+class Rename(Refactoring):
+ key = 'r'
+ saveall = True
+
+ def _create_refactoring(self):
+ self.renamer = rope.refactor.rename.Rename(
+ self.project, self.resource, self.offset)
+
+ def _calculate_changes(self, values, task_handle):
+ return self.renamer.get_changes(task_handle=task_handle, **values)
+
+ def _get_optionals(self):
+ opts = {}
+ opts['docs'] = dialog.Boolean('Search comments and docs: ', True)
+ if self.renamer.is_method():
+ opts['in_hierarchy'] = dialog.Boolean('Rename methods in '
+ 'class hierarchy: ')
+ opts['resources'] = self.resources_option
+ opts['unsure'] = dialog.Data('Unsure occurrences: ',
+ decode=self._decode_unsure,
+ values=['ignore', 'match'],
+ default='ignore')
+ return opts
+
+ def _get_confs(self):
+ oldname = str(self.renamer.get_old_name())
+ return {'new_name': dialog.Data('New name: ', default=oldname)}
+
+ def _decode_unsure(self, value):
+ unsure = value == 'match'
+ return lambda occurrence: unsure
+
+
+class RenameCurrentModule(Rename):
+ key = '1 r'
+ offset = None
+
+
+class Restructure(Refactoring):
+ key = 'x'
+ confs = {'pattern': dialog.Data('Restructuring pattern: '),
+ 'goal': dialog.Data('Restructuring goal: ')}
+
+ def _calculate_changes(self, values, task_handle):
+ restructuring = rope.refactor.restructure.Restructure(
+ self.project, values['pattern'], values['goal'],
+ args=values['args'], imports=values['imports'])
+ return restructuring.get_changes(resources=values['resources'],
+ task_handle=task_handle)
+
+ def _get_optionals(self):
+ return {
+ 'args': dialog.Data('Arguments: ', decode=self._decode_args),
+ 'imports': dialog.Data('Imports: ', decode=self._decode_imports),
+ 'resources': self.resources_option}
+
+ def _decode_args(self, value):
+ if value:
+ args = {}
+ for raw_check in value.split('\n'):
+ if raw_check:
+ key, value = raw_check.split(':', 1)
+ args[key.strip()] = value.strip()
+ return args
+
+ def _decode_imports(self, value):
+ if value:
+ return [line.strip() for line in value.split('\n')]
+
+
+class UseFunction(Refactoring):
+ key = 'u'
+
+ def _create_refactoring(self):
+ self.user = rope.refactor.usefunction.UseFunction(
+ self.project, self.resource, self.offset)
+
+ def _calculate_changes(self, values, task_handle):
+ return self.user.get_changes(task_handle=task_handle, **values)
+
+ def _get_optionals(self):
+ return {'resources': self.resources_option}
+
+
+class Move(Refactoring):
+ key = 'v'
+
+ def _create_refactoring(self):
+ self.mover = rope.refactor.move.create_move(self.project,
+ self.resource,
+ self.offset)
+
+ def _calculate_changes(self, values, task_handle):
+ destination = values['destination']
+ resources = values.get('resources', None)
+ if isinstance(self.mover, rope.refactor.move.MoveGlobal):
+ return self._move_global(destination, resources, task_handle)
+ if isinstance(self.mover, rope.refactor.move.MoveModule):
+ return self._move_module(destination, resources, task_handle)
+ if isinstance(self.mover, rope.refactor.move.MoveMethod):
+ return self._move_method(destination, resources, task_handle)
+
+ def _move_global(self, dest, resources, handle):
+ destination = self.project.pycore.find_module(dest)
+ return self.mover.get_changes(
+ destination, resources=resources, task_handle=handle)
+
+ def _move_method(self, dest, resources, handle):
+ return self.mover.get_changes(
+ dest, self.mover.get_method_name(),
+ resources=resources, task_handle=handle)
+
+ def _move_module(self, dest, resources, handle):
+ destination = self.project.pycore.find_module(dest)
+ return self.mover.get_changes(
+ destination, resources=resources, task_handle=handle)
+
+ def _get_confs(self):
+ if isinstance(self.mover, rope.refactor.move.MoveGlobal):
+ prompt = 'Destination module: '
+ if isinstance(self.mover, rope.refactor.move.MoveModule):
+ prompt = 'Destination package: '
+ if isinstance(self.mover, rope.refactor.move.MoveMethod):
+ prompt = 'Destination attribute: '
+ return {'destination': dialog.Data(prompt)}
+
+ def _get_optionals(self):
+ return {'resources': self.resources_option}
+
+
+class MoveCurrentModule(Move):
+ key = '1 v'
+ offset = None
+
+
+class ModuleToPackage(Refactoring):
+ key = '1 p'
+ saveall = False
+
+ def _create_refactoring(self):
+ self.packager = rope.refactor.ModuleToPackage(
+ self.project, self.resource)
+
+ def _calculate_changes(self, values, task_handle):
+ return self.packager.get_changes()
+
+
+class Inline(Refactoring):
+ key = 'i'
+
+ def _create_refactoring(self):
+ self.inliner = rope.refactor.inline.create_inline(
+ self.project, self.resource, self.offset)
+
+ def _calculate_changes(self, values, task_handle):
+ return self.inliner.get_changes(task_handle=task_handle, **values)
+
+ def _get_optionals(self):
+ opts = {'resources': self.resources_option}
+ if self.inliner.get_kind() == 'parameter':
+ opts['in_hierarchy'] = dialog.Boolean(
+ 'Apply on all matching methods in class hierarchy: ', False)
+ else:
+ opts['remove'] = dialog.Boolean('Remove the definition: ', True)
+ opts['only_current'] = dialog.Boolean('Inline this '
+ 'occurrence only: ')
+ return opts
+
+
+class _Extract(Refactoring):
+ saveall = False
+ optionals = {'similar': dialog.Boolean('Extract similar pieces: ', True),
+ 'global_': dialog.Boolean('Make global: ')}
+ kind = None
+ constructor = rope.refactor.extract.ExtractVariable
+
+ def __init__(self, *args):
+ super(_Extract, self).__init__(*args)
+ self.extractor = None
+
+ def _create_refactoring(self):
+ start, end = self.region
+ self.extractor = self.constructor(self.project,
+ self.resource, start, end)
+
+ def _calculate_changes(self, values, task_handle):
+ similar = values.get('similar')
+ global_ = values.get('global_')
+ return self.extractor.get_changes(values['name'], similar=similar,
+ global_=global_)
+
+ def _get_confs(self):
+ return {'name': dialog.Data('Extracted %s name: ' % self.kind)}
+
+
+class ExtractVariable(_Extract):
+ key = 'l'
+ kind = 'variable'
+ constructor = rope.refactor.extract.ExtractVariable
+
+
+class ExtractMethod(_Extract):
+ key = 'm'
+ kind = 'method'
+ constructor = rope.refactor.extract.ExtractMethod
+
+
+class OrganizeImports(Refactoring):
+ key = 'o'
+ saveall = False
+
+ def _create_refactoring(self):
+ self.organizer = rope.refactor.ImportOrganizer(self.project)
+
+ def _calculate_changes(self, values, task_handle):
+ return self.organizer.organize_imports(self.resource)
+
+
+class MethodObject(Refactoring):
+ saveall = False
+ confs = {'classname': dialog.Data('New class name: ',
+ default='_ExtractedClass')}
+
+ def _create_refactoring(self):
+ self.objecter = rope.refactor.method_object.MethodObject(
+ self.project, self.resource, self.offset)
+
+ def _calculate_changes(self, values, task_handle):
+ classname = values.get('classname')
+ return self.objecter.get_changes(classname)
+
+
+class IntroduceFactory(Refactoring):
+ saveall = True
+ key = 'f'
+
+ def _create_refactoring(self):
+ self.factory = rope.refactor.introduce_factory.IntroduceFactory(
+ self.project, self.resource, self.offset)
+
+ def _calculate_changes(self, values, task_handle):
+ return self.factory.get_changes(task_handle=task_handle, **values)
+
+ def _get_confs(self):
+ default = 'create_%s' % self.factory.old_name.lower()
+ return {'factory_name': dialog.Data('Factory name: ', default)}
+
+ def _get_optionals(self):
+ return {'global_factory': dialog.Boolean('Make global: ', True),
+ 'resources': self.resources_option}
+
+
+class ChangeSignature(Refactoring):
+ saveall = True
+ key = 's'
+
+ def _create_refactoring(self):
+ self.changer = rope.refactor.change_signature.ChangeSignature(
+ self.project, self.resource, self.offset)
+
+ def _calculate_changes(self, values, task_handle):
+ signature = values.get('signature')
+ args = re.sub(r'[\s\(\)]+', '', signature).split(',')
+ olds = [arg[0] for arg in self._get_args()]
+
+ changers = []
+ for arg in list(olds):
+ if arg in args:
+ continue
+ changers.append(rope.refactor.change_signature.
+ ArgumentRemover(olds.index(arg)))
+ olds.remove(arg)
+
+ order = []
+ for index, arg in enumerate(args):
+ if arg not in olds:
+ changers.append(rope.refactor.change_signature.
+ ArgumentAdder(index, arg))
+ olds.insert(index, arg)
+ order.append(olds.index(arg))
+ changers.append(rope.refactor.change_signature.
+ ArgumentReorderer(order, autodef='None'))
+
+ del values['signature']
+ return self.changer.get_changes(changers, task_handle=task_handle,
+ **values)
+
+ def _get_args(self):
+ if hasattr(self.changer, 'get_args'):
+ return self.changer.get_args()
+ return self.changer.get_definition_info().args_with_defaults
+
+ def _get_confs(self):
+ args = []
+ for arg, default in self._get_args():
+ args.append(arg)
+ signature = '(' + ', '.join(args) + ')'
+ return {'signature': dialog.Data('Change the signature: ',
+ default=signature)}
+
+ def _get_optionals(self):
+ opts = {'resources': self.resources_option}
+ if self.changer.is_method():
+ opts['in_hierarchy'] = dialog.Boolean('Rename methods in '
+ 'class hierarchy: ')
+ return opts
+
+
+class _GenerateElement(Refactoring):
+
+ def _create_refactoring(self):
+ kind = self.name.split('_')[-1]
+ self.generator = rope.contrib.generate.create_generate(
+ kind, self.project, self.resource, self.offset)
+
+ def _calculate_changes(self, values, task_handle):
+ return self.generator.get_changes()
+
+ def _done(self):
+ resource, lineno = self.generator.get_location()
+ self.interface._goto_location(resource, lineno)
+
+
+class GenerateVariable(_GenerateElement):
+ key = 'n v'
+
+
+class GenerateFunction(_GenerateElement):
+ key = 'n f'
+
+
+class GenerateClass(_GenerateElement):
+ key = 'n c'
+
+
+class GenerateModule(_GenerateElement):
+ key = 'n m'
+
+
+class GeneratePackage(_GenerateElement):
+ key = 'n p'
+
+
+def refactoring_name(refactoring):
+ classname = refactoring.__name__
+ result = []
+ for c in classname:
+ if result and c.isupper():
+ result.append('_')
+ result.append(c.lower())
+ name = ''.join(result)
+ return name
+
+def _resources(project, text):
+ if text is None or text.strip() == '':
+ return None
+ return file_filter.resources(project, text)
+
+
+def runtask(env, command, name, interrupts=True):
+ return RunTask(env, command, name, interrupts)()
+
+class RunTask(object):
+
+ def __init__(self, env, task, name, interrupts=True):
+ self.env = env
+ self.task = task
+ self.name = name
+ self.interrupts = interrupts
+
+ def __call__(self):
+ handle = taskhandle.TaskHandle(name=self.name)
+ progress = self.env.create_progress(self.name)
+ def update_progress():
+ jobset = handle.current_jobset()
+ if jobset:
+ percent = jobset.get_percent_done()
+ if percent is not None:
+ progress.update(percent)
+ handle.add_observer(update_progress)
+ result = self.task(handle)
+ progress.done()
+ return result
diff --git a/.vim/bundle/python-mode/pylibs/ropemode/tests/__init__.py b/.vim/bundle/python-mode/pylibs/ropemode/tests/__init__.py
diff --git a/.vim/bundle/python-mode/pylibs/ropemode/tests/decorators_test.py b/.vim/bundle/python-mode/pylibs/ropemode/tests/decorators_test.py
@@ -0,0 +1,48 @@
+#!/usr/bin/env python
+import os
+import sys
+sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", ".."))
+
+import unittest
+
+from ropemode.decorators import Logger
+
+
+class LoggerTests(unittest.TestCase):
+ def test_Logger_called_with_no_args_doesnt_raise_TypeError(self):
+ """
+ When not initialized with a message display method, Logger
+ prints the message to stdout without raising an exception.
+ """
+ logger = Logger()
+ try:
+ logger("a message")
+ except TypeError:
+ self.fail("logger raised TypeError unexpectedly")
+
+
+class LoggerMessageHandlerTests(unittest.TestCase):
+ def setUp(self):
+ self.message = ""
+ self.logger = Logger()
+ self.logger.message = self._echo
+
+ def _echo(self, message):
+ self.message += message
+
+ def test_message_handler_with_no_short_message(self):
+ """Test that message handler is called"""
+ self.logger("a message")
+ self.assertEqual(self.message, "a message")
+
+ def test_only_short_True(self):
+ """Test that only_short=True prints only the short message"""
+ self.logger.only_short = True
+ self.logger("a long message", "a short message")
+ self.assertEqual(self.message, "a short message")
+
+ def test_only_short_False(self):
+ """Test that only_short=False prints both messages"""
+ self.logger.only_short = False
+ self.logger("a long message", "a short message")
+ self.assertEqual(self.message, "a long messagea short message")
diff --git a/.vim/bundle/python-mode/pylibs/ropevim.py b/.vim/bundle/python-mode/pylibs/ropevim.py
@@ -0,0 +1,453 @@
+"""ropevim, a vim mode for using rope refactoring library"""
+import glob
+import os
+import tempfile
+import re
+
+from ropemode import decorators
+from ropemode import environment
+from ropemode import interface
+
+import vim
+
+# Gobal var to be able to shutup output
+_rope_quiet = False
+
+
+class VimUtils(environment.Environment):
+
+ def __init__(self, *args, **kwargs):
+ super(VimUtils, self).__init__(*args, **kwargs)
+ self.completeopt = vim.eval('&completeopt')
+ self.preview = 'preview' in self.completeopt
+
+ def ask(self, prompt, default=None, starting=None):
+ if starting is None:
+ starting = ''
+ if default is not None:
+ prompt = prompt + '[{0}] '.format(default)
+ result = call('input("{0}", "{1}")'.format(prompt, starting))
+ if default is not None and result == '':
+ return default
+ return result
+
+ def ask_values(self, prompt, values, default=None,
+ starting=None, show_values=None):
+ if show_values or (show_values is None and len(values) < 14):
+ self._print_values(values)
+ if default is not None:
+ prompt = prompt + '[{0}] '.format(default)
+ starting = starting or ''
+ _completer.values = values
+ answer = call(
+ 'input("{0}", "{1}", "customlist,RopeValueCompleter")'.format(
+ prompt, starting
+ )
+ )
+ if answer is None:
+ if 'cancel' in values:
+ return 'cancel'
+ return
+ if default is not None and not answer:
+ return default
+ if answer.isdigit() and 0 <= int(answer) < len(values):
+ return values[int(answer)]
+ return answer
+
+ def _print_values(self, values):
+ numbered = []
+ for index, value in enumerate(values):
+ numbered.append('%s. %s' % (index, str(value)))
+ echo('\n'.join(numbered) + '\n')
+
+ def ask_directory(self, prompt, default=None, starting=None):
+ return call('input("{0}", ".", "dir")'.format(prompt))
+
+ def _update_proposals(self, values):
+ self.completeopt = vim.eval('&completeopt')
+ self.preview = 'preview' in self.completeopt
+
+ if not self.get('extended_complete'):
+ return u','.join(u"'{0}'".format(self._completion_text(proposal))
+ for proposal in values)
+
+ return u','.join(self._extended_completion(proposal)
+ for proposal in values)
+
+ def _command(self, command, encode=False):
+ if encode:
+ command = command.encode(self._get_encoding())
+ vim.command(command)
+
+ def ask_completion(self, prompt, values, starting=None):
+ if self.get('vim_completion') and 'i' in call('mode()'):
+ proposals = self._update_proposals(values)
+ col = int(call('col(".")'))
+ if starting:
+ col -= len(starting)
+ self._command(u'call complete({0}, [{1}])'.format(col, proposals),
+ encode=True)
+ return None
+
+ return self.ask_values(prompt, values, starting=starting,
+ show_values=False)
+
+ def message(self, message):
+ echo(message)
+
+ def yes_or_no(self, prompt):
+ return self.ask_values(prompt, ['yes', 'no']) == 'yes'
+
+ def y_or_n(self, prompt):
+ return self.yes_or_no(prompt)
+
+ def get(self, name, default=None):
+ vimname = 'g:pymode_rope_{0}'.format(name)
+ if str(vim.eval('exists("{0}")'.format(vimname))) == '0':
+ return default
+ result = vim.eval(vimname)
+ if isinstance(result, str) and result.isdigit():
+ return int(result)
+ return result
+
+ def get_offset(self):
+ result = self._position_to_offset(*self.cursor)
+ return result
+
+ @staticmethod
+ def _get_encoding():
+ return vim.eval('&encoding') or 'utf-8'
+
+ def _encode_line(self, line):
+ return line.encode(self._get_encoding())
+
+ def _decode_line(self, line):
+ return line.decode(self._get_encoding())
+
+ def _position_to_offset(self, lineno, colno):
+ result = min(colno, len(self.buffer[lineno - 1]) + 1)
+ for line in self.buffer[:lineno - 1]:
+ line = self._decode_line(line)
+ result += len(line) + 1
+ return result
+
+ def get_text(self):
+ return self._decode_line('\n'.join(self.buffer)) + u'\n'
+
+ def get_region(self):
+ start = self._position_to_offset(*self.buffer.mark('<'))
+ end = self._position_to_offset(*self.buffer.mark('>'))
+ return start, end
+
+ @property
+ def buffer(self):
+ return vim.current.buffer
+
+ def _get_cursor(self):
+ lineno, col = vim.current.window.cursor
+ line = self._decode_line(vim.current.line[:col])
+ col = len(line)
+ return (lineno, col)
+
+ def _set_cursor(self, cursor):
+ lineno, col = cursor
+ line = self._decode_line(vim.current.line)
+ line = self._encode_line(line[:col])
+ col = len(line)
+ vim.current.window.cursor = (lineno, col)
+
+ cursor = property(_get_cursor, _set_cursor)
+
+ @staticmethod
+ def get_cur_dir():
+ return vim.eval('getcwd()')
+
+ def filename(self):
+ return self.buffer.name
+
+ def is_modified(self):
+ return vim.eval('&modified')
+
+ def goto_line(self, lineno):
+ self.cursor = (lineno, 0)
+
+ def insert_line(self, line, lineno):
+ self.buffer[lineno - 1:lineno - 1] = [line]
+
+ def insert(self, text):
+ lineno, colno = self.cursor
+ line = self.buffer[lineno - 1]
+ self.buffer[lineno - 1] = line[:colno] + text + line[colno:]
+ self.cursor = (lineno, colno + len(text))
+
+ def delete(self, start, end):
+ lineno1, colno1 = self._offset_to_position(start - 1)
+ lineno2, colno2 = self._offset_to_position(end - 1)
+ lineno, colno = self.cursor
+ if lineno1 == lineno2:
+ line = self.buffer[lineno1 - 1]
+ self.buffer[lineno1 - 1] = line[:colno1] + line[colno2:]
+ if lineno == lineno1 and colno >= colno1:
+ diff = colno2 - colno1
+ self.cursor = (lineno, max(0, colno - diff))
+
+ def _offset_to_position(self, offset):
+ text = self.get_text()
+ lineno = text.count('\n', 0, offset) + 1
+ try:
+ colno = offset - text.rindex('\n', 0, offset) - 1
+ except ValueError:
+ colno = offset
+ return lineno, colno
+
+ def filenames(self):
+ result = []
+ for buffer in vim.buffers:
+ if buffer.name:
+ result.append(buffer.name)
+ return result
+
+ def save_files(self, filenames):
+ vim.command('wall')
+
+ def reload_files(self, filenames, moves={}):
+ initial = self.filename()
+ for filename in filenames:
+ self.find_file(moves.get(filename, filename), force=True)
+ if initial:
+ self.find_file(initial)
+
+ def find_file(self, filename, readonly=False, other=False, force=False):
+ if filename != self.filename() or force:
+ if other:
+ vim.command(other)
+ filename = '\\ '.join(s.rstrip() for s in filename.split())
+ vim.command('e %s' % filename)
+ if readonly:
+ vim.command('set nomodifiable')
+
+ def create_progress(self, name):
+ return VimProgress(name)
+
+ def current_word(self):
+ return vim.eval('expand("<cword>")')
+
+ def push_mark(self):
+ vim.command('mark `')
+
+ def prefix_value(self, prefix):
+ return prefix
+
+ def show_occurrences(self, locations):
+ self._quickfixdefs(locations)
+ vim.command('cwindow')
+
+ def _quickfixdefs(self, locations):
+ filename = os.path.join(tempfile.gettempdir(), tempfile.mktemp())
+ try:
+ self._writedefs(locations, filename)
+ vim.command('let old_errorfile = &errorfile')
+ vim.command('let old_errorformat = &errorformat')
+ vim.command('set errorformat=%f:%l:\ %m')
+ vim.command('cfile ' + filename)
+ vim.command('let &errorformat = old_errorformat')
+ vim.command('let &errorfile = old_errorfile')
+ finally:
+ os.remove(filename)
+
+ def _writedefs(self, locations, filename):
+ tofile = open(filename, 'w')
+ try:
+ for location in locations:
+ err = '%s:%d: - %s\n' % (location.filename,
+ location.lineno, location.note)
+ echo(err)
+ tofile.write(err)
+ finally:
+ tofile.close()
+
+ def show_doc(self, docs, altview=False):
+ if docs:
+ vim.command(
+ 'call pymode#ShowStr("{0}")'.format(docs.replace('"', '\\"'))
+ )
+
+ def preview_changes(self, diffs):
+ echo(diffs)
+ return self.y_or_n('Do the changes? ')
+
+ def local_command(self, name, callback, key=None, prefix=False):
+ self._add_command(name, callback, key, prefix,
+ prekey=self.get('local_prefix'))
+
+ def global_command(self, name, callback, key=None, prefix=False):
+ self._add_command(name, callback, key, prefix,
+ prekey=self.get('global_prefix'))
+
+ def add_hook(self, name, callback, hook):
+ mapping = {'before_save': 'FileWritePre,BufWritePre',
+ 'after_save': 'FileWritePost,BufWritePost',
+ 'exit': 'VimLeave'}
+ self._add_function(name, callback)
+ vim.command(
+ 'autocmd {0} *.py call {1}()'.format(
+ mapping[hook], _vim_name(name)
+ )
+ )
+
+ def _add_command(self, name, callback, key, prefix, prekey):
+ self._add_function(name, callback, prefix)
+ vim.command(
+ 'command! -range {0} call {1}()'.format(
+ _vim_name(name), _vim_name(name)
+ )
+ )
+ if key is not None:
+ key = prekey + key.replace(' ', '')
+ vim.command(
+ 'noremap {0} :call {1}()<cr>'.format(key, _vim_name(name))
+ )
+
+ def _add_function(self, name, callback, prefix=False):
+ globals()[name] = callback
+ arg = 'None' if prefix else ''
+ vim.command(
+ 'function! {0}()\n'
+ 'python ropevim.{1}({2})\n'
+ 'endfunction\n'.format(_vim_name(name), name, arg)
+ )
+
+ def _completion_data(self, proposal):
+ return proposal
+
+ _docstring_re = re.compile('^[\s\t\n]*([^\n]*)')
+
+ def _extended_completion(self, proposal):
+ # we are using extended complete and return dicts instead of strings.
+ # `ci` means "completion item". see `:help complete-items`
+ word, _, menu = map(lambda x: x.strip(), proposal.name.partition(':'))
+ ci = dict(
+ word=word,
+ info='',
+ kind=''.join(
+ s if s not in 'aeyuo' else '' for s in proposal.type)[:3],
+ menu=menu or '')
+
+ if proposal.scope == 'parameter_keyword':
+ default = proposal.get_default()
+ ci["menu"] += '*' if default is None else '= {0}'.format(default)
+
+ if self.preview and not ci['menu']:
+ doc = proposal.get_doc()
+ ci['info'] = self._docstring_re.match(doc).group(1) if doc else ''
+
+ return self._conv(ci)
+
+ def _conv(self, obj):
+ if isinstance(obj, dict):
+ return u'{' + u','.join([
+ u"{0}:{1}".format(self._conv(key), self._conv(value))
+ for key, value in obj.iteritems()]) + u'}'
+ return u'"{0}"'.format(str(obj).replace(u'"', u'\\"'))
+
+
+def _vim_name(name):
+ tokens = name.split('_')
+ newtokens = ['Rope'] + [token.title() for token in tokens]
+ return ''.join(newtokens)
+
+
+class VimProgress(object):
+
+ def __init__(self, name):
+ self.name = name
+ self.last = 0
+ status('{0} ... '.format(self.name))
+
+ def update(self, percent):
+ try:
+ vim.eval('getchar(0)')
+ except vim.error:
+ raise KeyboardInterrupt(
+ 'Task {0} was interrupted!'.format(self.name)
+ )
+ if percent > self.last + 4:
+ status('{0} ... {1}%'.format(self.name, percent))
+ self.last = percent
+
+ def done(self):
+ status('{0} ... done'.format(self.name))
+
+
+def echo(message):
+ if isinstance(message, unicode):
+ message = message.encode(vim.eval('&encoding'))
+ print message
+
+
+def status(message):
+ if _rope_quiet:
+ return
+
+ if isinstance(message, unicode):
+ message = message.encode(vim.eval('&encoding'))
+ vim.command('redraw | echon "{0}"'.format(message))
+
+
+def call(command):
+ return vim.eval(command)
+
+
+class _ValueCompleter(object):
+
+ def __init__(self):
+ self.values = []
+ vim.command('python import vim')
+ vim.command('function! RopeValueCompleter(A, L, P)\n'
+ 'python args = [vim.eval("a:" + p) for p in "ALP"]\n'
+ 'python ropevim._completer(*args)\n'
+ 'return s:completions\n'
+ 'endfunction\n')
+
+ def __call__(self, arg_lead, cmd_line, cursor_pos):
+ # don't know if self.values can be empty but better safe then sorry
+ if self.values:
+ if not isinstance(self.values[0], basestring):
+ result = [proposal.name for proposal in self.values
+ if proposal.name.startswith(arg_lead)]
+ else:
+ result = [proposal for proposal in self.values
+ if proposal.startswith(arg_lead)]
+ vim.command('let s:completions = {0}'.format(result))
+
+
+class RopeMode(interface.RopeMode):
+ @decorators.global_command('o')
+ def open_project(self, root=None, quiet=False):
+ global _rope_quiet
+ _rope_quiet = quiet
+
+ super(RopeMode, self).open_project(root=root)
+ rope_project_dir = os.path.join(self.project.address, '.ropeproject')
+ vimfiles = glob.glob(os.path.join(rope_project_dir, '*.vim'))
+
+ if not vimfiles:
+ return
+
+ txt = 'Sourcing vim files under \'.ropeproject/\''
+ progress = self.env.create_progress(txt)
+ for idx, vimfile in enumerate(sorted(vimfiles)):
+ progress.name = txt + ' ({0})'.format(os.path.basename(vimfile))
+ vim.command(':silent source {0}'.format(vimfile))
+ progress.update(idx * 100 / len(vimfiles))
+
+ progress.name = txt
+ progress.done()
+
+decorators.logger.message = echo
+decorators.logger.only_short = True
+
+_completer = _ValueCompleter()
+
+_env = VimUtils()
+_interface = RopeMode(env=_env)
diff --git a/.vim/bundle/python-mode/pylint.ini b/.vim/bundle/python-mode/pylint.ini
@@ -0,0 +1,25 @@
+[MESSAGES CONTROL]
+# Disable the message(s) with the given id(s).
+# http://pylint-messages.wikidot.com/all-codes
+#
+# C0103: Invalid name "%s" (should match %s)
+# C0111: Missing docstring
+# E1101: %s %r has no %r member
+# R0901: Too many ancestors (%s/%s)
+# R0902: Too many instance attributes (%s/%s)
+# R0903: Too few public methods (%s/%s)
+# R0904: Too many public methods (%s/%s)
+# R0913: Too many arguments (%s/%s)
+# R0915: Too many statements (%s/%s)
+# W0141: Used builtin function %r
+# W0142: Used * or ** magic
+# W0221: Arguments number differs from %s method
+# W0232: Class has no __init__ method
+# W0401: Wildcard import %s
+# W0613: Unused argument %r
+# W0631: Using possibly undefined loop variable %r
+#
+disable = C0103,C0111,E1101,R0901,R0902,R0903,R0904,R0913,R0915,W0141,W0142,W0221,W0232,W0401,W0613,W0631
+
+[TYPECHECK]
+generated-members = REQUEST,acl_users,aq_parent,objects,DoesNotExist,_meta,status_code,content,context
diff --git a/.vim/bundle/python-mode/syntax/pyrex.vim b/.vim/bundle/python-mode/syntax/pyrex.vim
@@ -0,0 +1,71 @@
+" Vim syntax file
+" Language: Pyrex
+" Maintainer: John Tyree
+" Last Change: 2012 Nov 06
+
+" For version 5.x: Clear all syntax items
+" For version 6.x: Quit when a syntax file was already loaded
+if version < 600
+ syntax clear
+elseif exists("b:current_syntax")
+ finish
+endif
+
+" Read the Python syntax to start with
+if version < 600
+ so <sfile>:p:h/python.vim
+else
+ runtime! syntax/python.vim
+ unlet b:current_syntax
+endif
+
+" Pyrex extentions
+syn keyword pyrexStatement nogil inline typedef ctypedef sizeof
+syn keyword pyrexType Py_ssize_t int long short float double char object void
+" Here we want slightly different behavior depending on whether we're declaring
+" variables or functions. c[p]def should work on the top level as a keyword, but
+" should ALSO work to identify functions and classes.
+syn match pyrexStatement "\<cp\?def\>"
+syn match pyrexStatement "\<cp\?def\>[^=]*(\@=" contains=pythonStatement,pyrexStatement,pythonFunction,pyrexType skipwhite
+syn keyword pyrexType signed unsigned
+syn keyword pyrexStructure struct union enum
+syn keyword pyrexInclude include cimport
+syn keyword pyrexAccess public private property readonly extern
+" If someome wants Python's built-ins highlighted probably he
+" also wants Pyrex's built-ins highlighted
+if exists("python_highlight_builtins") || exists("pyrex_highlight_builtins")
+ syn keyword pyrexBuiltin NULL
+endif
+
+" This deletes "from" from the keywords and re-adds it as a
+" match with lower priority than pyrexForFrom
+syn clear pythonInclude
+syn keyword pythonInclude import
+syn match pythonInclude "\<from\>"
+
+" With "for[^:]*\zsfrom" VIM does not match "for" anymore, so
+" I used the slower "\@<=" form
+syn match pyrexForFrom "\(\<for\>[^:]*\)\@<=\<from\>"
+
+" Default highlighting
+if version >= 508 || !exists("did_pyrex_syntax_inits")
+ if version < 508
+ let did_pyrex_syntax_inits = 1
+ command -nargs=+ HiLink hi link <args>
+ else
+ command -nargs=+ HiLink hi def link <args>
+ endif
+ HiLink pyrexStatement Statement
+ HiLink pyrexType Type
+ HiLink pyrexStructure Structure
+ HiLink pyrexInclude PreCondit
+ HiLink pyrexAccess pyrexStatement
+ if exists("python_highlight_builtins") || exists("pyrex_highlight_builtins")
+ HiLink pyrexBuiltin Function
+ endif
+ HiLink pyrexForFrom Statement
+
+ delcommand HiLink
+endif
+
+let b:current_syntax = "pyrex"
diff --git a/.vim/bundle/python-mode/syntax/python.vim b/.vim/bundle/python-mode/syntax/python.vim
@@ -0,0 +1,298 @@
+" vim: ft=vim:fdm=marker
+"
+runtime ftplugin/python/init-pymode.vim
+
+" DESC: Disable script loading
+if !pymode#Option('syntax') || pymode#Default('b:current_syntax', 'python')
+ finish
+endif
+
+" For version 5.x: Clear all syntax items
+if version < 600
+ syntax clear
+endif
+
+" Highlight all
+call pymode#Default('g:pymode_syntax_all', 1)
+
+" Keywords {{{
+" ============
+
+ syn keyword pythonStatement break continue del
+ syn keyword pythonStatement exec return
+ syn keyword pythonStatement pass raise
+ syn keyword pythonStatement global assert
+ syn keyword pythonStatement lambda yield
+ syn keyword pythonStatement with as
+ syn keyword pythonStatement def class nextgroup=pythonFunction skipwhite
+ syn match pythonFunction "[a-zA-Z_][a-zA-Z0-9_]*" display contained
+ syn keyword pythonRepeat for while
+ syn keyword pythonConditional if elif else
+ syn keyword pythonInclude import from
+ syn keyword pythonException try except finally
+ syn keyword pythonOperator and in is not or
+
+ if !pymode#Default("g:pymode_syntax_print_as_function", 0) || !g:pymode_syntax_print_as_function
+ syn keyword pythonStatement print
+ endif
+
+" }}}
+
+
+" Decorators {{{
+" ==============
+
+ syn match pythonDecorator "@" display nextgroup=pythonDottedName skipwhite
+ syn match pythonDottedName "[a-zA-Z_][a-zA-Z0-9_]*\(\.[a-zA-Z_][a-zA-Z0-9_]*\)*" display contained
+ syn match pythonDot "\." display containedin=pythonDottedName
+
+" }}}
+
+
+" Comments {{{
+" ============
+
+ syn match pythonComment "#.*$" display contains=pythonTodo,@Spell
+ syn match pythonRun "\%^#!.*$"
+ syn match pythonCoding "\%^.*\(\n.*\)\?#.*coding[:=]\s*[0-9A-Za-z-_.]\+.*$"
+ syn keyword pythonTodo TODO FIXME XXX contained
+
+" }}}
+
+
+" Errors {{{
+" ==========
+
+ syn match pythonError "\<\d\+\D\+\>" display
+ syn match pythonError "[$?]" display
+ syn match pythonError "[&|]\{2,}" display
+ syn match pythonError "[=]\{3,}" display
+
+ " Indent errors (mix space and tabs)
+ if !pymode#Default('g:pymode_syntax_indent_errors', g:pymode_syntax_all) || g:pymode_syntax_indent_errors
+ syn match pythonIndentError "^\s*\( \t\|\t \)\s*\S"me=e-1 display
+ endif
+
+ " Trailing space errors
+ if !pymode#Default('g:pymode_syntax_space_errors', g:pymode_syntax_all) || g:pymode_syntax_space_errors
+ syn match pythonSpaceError "\s\+$" display
+ endif
+
+" }}}
+
+
+" Strings {{{
+" ===========
+
+ syn region pythonString start=+[bB]\='+ skip=+\\\\\|\\'\|\\$+ excludenl end=+'+ end=+$+ keepend contains=pythonEscape,pythonEscapeError,@Spell
+ syn region pythonString start=+[bB]\="+ skip=+\\\\\|\\"\|\\$+ excludenl end=+"+ end=+$+ keepend contains=pythonEscape,pythonEscapeError,@Spell
+ syn region pythonString start=+[bB]\="""+ end=+"""+ keepend contains=pythonEscape,pythonEscapeError,pythonDocTest2,pythonSpaceError,@Spell
+ syn region pythonString start=+[bB]\='''+ end=+'''+ keepend contains=pythonEscape,pythonEscapeError,pythonDocTest,pythonSpaceError,@Spell
+
+ syn match pythonEscape +\\[abfnrtv'"\\]+ display contained
+ syn match pythonEscape "\\\o\o\=\o\=" display contained
+ syn match pythonEscapeError "\\\o\{,2}[89]" display contained
+ syn match pythonEscape "\\x\x\{2}" display contained
+ syn match pythonEscapeError "\\x\x\=\X" display contained
+ syn match pythonEscape "\\$"
+
+ " Unicode
+ syn region pythonUniString start=+[uU]'+ skip=+\\\\\|\\'\|\\$+ excludenl end=+'+ end=+$+ keepend contains=pythonEscape,pythonUniEscape,pythonEscapeError,pythonUniEscapeError,@Spell
+ syn region pythonUniString start=+[uU]"+ skip=+\\\\\|\\"\|\\$+ excludenl end=+"+ end=+$+ keepend contains=pythonEscape,pythonUniEscape,pythonEscapeError,pythonUniEscapeError,@Spell
+ syn region pythonUniString start=+[uU]"""+ end=+"""+ keepend contains=pythonEscape,pythonUniEscape,pythonEscapeError,pythonUniEscapeError,pythonDocTest2,pythonSpaceError,@Spell
+ syn region pythonUniString start=+[uU]'''+ end=+'''+ keepend contains=pythonEscape,pythonUniEscape,pythonEscapeError,pythonUniEscapeError,pythonDocTest,pythonSpaceError,@Spell
+
+ syn match pythonUniEscape "\\u\x\{4}" display contained
+ syn match pythonUniEscapeError "\\u\x\{,3}\X" display contained
+ syn match pythonUniEscape "\\U\x\{8}" display contained
+ syn match pythonUniEscapeError "\\U\x\{,7}\X" display contained
+ syn match pythonUniEscape "\\N{[A-Z ]\+}" display contained
+ syn match pythonUniEscapeError "\\N{[^A-Z ]\+}" display contained
+
+ " Raw strings
+ syn region pythonRawString start=+[rR]'+ skip=+\\\\\|\\'\|\\$+ excludenl end=+'+ end=+$+ keepend contains=pythonRawEscape,@Spell
+ syn region pythonRawString start=+[rR]"+ skip=+\\\\\|\\"\|\\$+ excludenl end=+"+ end=+$+ keepend contains=pythonRawEscape,@Spell
+ syn region pythonRawString start=+[rR]"""+ end=+"""+ keepend contains=pythonDocTest2,pythonSpaceError,@Spell
+ syn region pythonRawString start=+[rR]'''+ end=+'''+ keepend contains=pythonDocTest,pythonSpaceError,@Spell
+
+ syn match pythonRawEscape +\\['"]+ display transparent contained
+
+ " Unicode raw strings
+ syn region pythonUniRawString start=+[uU][rR]'+ skip=+\\\\\|\\'\|\\$+ excludenl end=+'+ end=+$+ keepend contains=pythonRawEscape,pythonUniRawEscape,pythonUniRawEscapeError,@Spell
+ syn region pythonUniRawString start=+[uU][rR]"+ skip=+\\\\\|\\"\|\\$+ excludenl end=+"+ end=+$+ keepend contains=pythonRawEscape,pythonUniRawEscape,pythonUniRawEscapeError,@Spell
+ syn region pythonUniRawString start=+[uU][rR]"""+ end=+"""+ keepend contains=pythonUniRawEscape,pythonUniRawEscapeError,pythonDocTest2,pythonSpaceError,@Spell
+ syn region pythonUniRawString start=+[uU][rR]'''+ end=+'''+ keepend contains=pythonUniRawEscape,pythonUniRawEscapeError,pythonDocTest,pythonSpaceError,@Spell
+
+ syn match pythonUniRawEscape "\([^\\]\(\\\\\)*\)\@<=\\u\x\{4}" display contained
+ syn match pythonUniRawEscapeError "\([^\\]\(\\\\\)*\)\@<=\\u\x\{,3}\X" display contained
+
+ " String formatting
+ if !pymode#Default('g:pymode_syntax_string_formatting', g:pymode_syntax_all) || g:pymode_syntax_string_formatting
+ syn match pythonStrFormatting "%\(([^)]\+)\)\=[-#0 +]*\d*\(\.\d\+\)\=[hlL]\=[diouxXeEfFgGcrs%]" contained containedin=pythonString,pythonUniString,pythonRawString,pythonUniRawString
+ syn match pythonStrFormatting "%[-#0 +]*\(\*\|\d\+\)\=\(\.\(\*\|\d\+\)\)\=[hlL]\=[diouxXeEfFgGcrs%]" contained containedin=pythonString,pythonUniString,pythonRawString,pythonUniRawString
+ endif
+
+ " Str.format syntax
+ if !pymode#Default('g:pymode_syntax_string_format', g:pymode_syntax_all) || g:pymode_syntax_string_format
+ syn match pythonStrFormat "{{\|}}" contained containedin=pythonString,pythonUniString,pythonRawString,pythonUniRawString
+ syn match pythonStrFormat "{\([a-zA-Z_][a-zA-Z0-9_]*\|\d\+\)\(\.[a-zA-Z_][a-zA-Z0-9_]*\|\[\(\d\+\|[^!:\}]\+\)\]\)*\(![rs]\)\=\(:\({\([a-zA-Z_][a-zA-Z0-9_]*\|\d\+\)}\|\([^}]\=[<>=^]\)\=[ +-]\=#\=0\=\d*\(\.\d\+\)\=[bcdeEfFgGnoxX%]\=\)\=\)\=}" contained containedin=pythonString,pythonUniString,pythonRawString,pythonUniRawString
+ endif
+
+ " String templates
+ if !pymode#Default('g:pymode_syntax_string_templates', g:pymode_syntax_all) || g:pymode_syntax_string_templates
+ syn match pythonStrTemplate "\$\$" contained containedin=pythonString,pythonUniString,pythonRawString,pythonUniRawString
+ syn match pythonStrTemplate "\${[a-zA-Z_][a-zA-Z0-9_]*}" contained containedin=pythonString,pythonUniString,pythonRawString,pythonUniRawString
+ syn match pythonStrTemplate "\$[a-zA-Z_][a-zA-Z0-9_]*" contained containedin=pythonString,pythonUniString,pythonRawString,pythonUniRawString
+ endif
+
+ " DocTests
+ if !pymode#Default('g:pymode_syntax_doctests', g:pymode_syntax_all) || g:pymode_syntax_doctests
+ syn region pythonDocTest start="^\s*>>>" end=+'''+he=s-1 end="^\s*$" contained
+ syn region pythonDocTest2 start="^\s*>>>" end=+"""+he=s-1 end="^\s*$" contained
+ endif
+
+" }}}
+
+" Numbers {{{
+" ===========
+
+ syn match pythonHexError "\<0[xX]\x*[g-zG-Z]\x*[lL]\=\>" display
+ syn match pythonHexNumber "\<0[xX]\x\+[lL]\=\>" display
+ syn match pythonOctNumber "\<0[oO]\o\+[lL]\=\>" display
+ syn match pythonBinNumber "\<0[bB][01]\+[lL]\=\>" display
+ syn match pythonNumber "\<\d\+[lLjJ]\=\>" display
+ syn match pythonFloat "\.\d\+\([eE][+-]\=\d\+\)\=[jJ]\=\>" display
+ syn match pythonFloat "\<\d\+[eE][+-]\=\d\+[jJ]\=\>" display
+ syn match pythonFloat "\<\d\+\.\d*\([eE][+-]\=\d\+\)\=[jJ]\=" display
+ syn match pythonOctError "\<0[oO]\=\o*[8-9]\d*[lL]\=\>" display
+ syn match pythonBinError "\<0[bB][01]*[2-9]\d*[lL]\=\>" display
+
+" }}}
+
+" Builtins {{{
+" ============
+
+ " Builtin objects and types
+ if !pymode#Default('g:pymode_syntax_builtin_objs', g:pymode_syntax_all) || g:pymode_syntax_builtin_objs
+ syn keyword pythonBuiltinObj True False Ellipsis None NotImplemented
+ syn keyword pythonBuiltinObj __debug__ __doc__ __file__ __name__ __package__
+ syn keyword pythonBuiltinObj self
+ endif
+
+ " Builtin functions
+ if !pymode#Default('g:pymode_syntax_builtin_funcs', g:pymode_syntax_all) || g:pymode_syntax_builtin_funcs
+ syn keyword pythonBuiltinFunc __import__ abs all any apply
+ syn keyword pythonBuiltinFunc basestring bin bool buffer bytearray bytes callable
+ syn keyword pythonBuiltinFunc chr classmethod cmp coerce compile complex
+ syn keyword pythonBuiltinFunc delattr dict dir divmod enumerate eval
+ syn keyword pythonBuiltinFunc execfile file filter float format frozenset getattr
+ syn keyword pythonBuiltinFunc globals hasattr hash help hex id
+ syn keyword pythonBuiltinFunc input int intern isinstance
+ syn keyword pythonBuiltinFunc issubclass iter len list locals long map max
+ syn keyword pythonBuiltinFunc min next object oct open ord
+ syn keyword pythonBuiltinFunc pow property range
+ syn keyword pythonBuiltinFunc raw_input reduce reload repr
+ syn keyword pythonBuiltinFunc reversed round set setattr
+ syn keyword pythonBuiltinFunc slice sorted staticmethod str sum super tuple
+ syn keyword pythonBuiltinFunc type unichr unicode vars xrange zip
+
+ if pymode#Default('g:pymode_syntax_print_as_function', 0) && g:pymode_syntax_print_as_function
+ syn keyword pythonBuiltinFunc print
+ endif
+
+ endif
+
+ " Builtin exceptions and warnings
+ if !pymode#Default('g:pymode_syntax_highlight_exceptions', g:pymode_syntax_all) || g:pymode_syntax_highlight_exceptions
+ syn keyword pythonExClass BaseException
+ syn keyword pythonExClass Exception StandardError ArithmeticError
+ syn keyword pythonExClass LookupError EnvironmentError
+ syn keyword pythonExClass AssertionError AttributeError BufferError EOFError
+ syn keyword pythonExClass FloatingPointError GeneratorExit IOError
+ syn keyword pythonExClass ImportError IndexError KeyError
+ syn keyword pythonExClass KeyboardInterrupt MemoryError NameError
+ syn keyword pythonExClass NotImplementedError OSError OverflowError
+ syn keyword pythonExClass ReferenceError RuntimeError StopIteration
+ syn keyword pythonExClass SyntaxError IndentationError TabError
+ syn keyword pythonExClass SystemError SystemExit TypeError
+ syn keyword pythonExClass UnboundLocalError UnicodeError
+ syn keyword pythonExClass UnicodeEncodeError UnicodeDecodeError
+ syn keyword pythonExClass UnicodeTranslateError ValueError VMSError
+ syn keyword pythonExClass WindowsError ZeroDivisionError
+ syn keyword pythonExClass Warning UserWarning BytesWarning DeprecationWarning
+ syn keyword pythonExClass PendingDepricationWarning SyntaxWarning
+ syn keyword pythonExClass RuntimeWarning FutureWarning
+ syn keyword pythonExClass ImportWarning UnicodeWarning
+ endif
+
+" }}}
+
+
+if !pymode#Default('g:pymode_syntax_slow_sync', 0) || g:pymode_syntax_slow_sync
+ syn sync minlines=2000
+else
+ " This is fast but code inside triple quoted strings screws it up. It
+ " is impossible to fix because the only way to know if you are inside a
+ " triple quoted string is to start from the beginning of the file.
+ syn sync match pythonSync grouphere NONE "):$"
+ syn sync maxlines=200
+endif
+
+" Highlight {{{
+" =============
+
+ hi def link pythonStatement Statement
+ hi def link pythonInclude Include
+ hi def link pythonFunction Function
+ hi def link pythonConditional Conditional
+ hi def link pythonRepeat Repeat
+ hi def link pythonException Exception
+ hi def link pythonOperator Operator
+
+ hi def link pythonDecorator Define
+ hi def link pythonDottedName Function
+ hi def link pythonDot Normal
+
+ hi def link pythonComment Comment
+ hi def link pythonCoding Special
+ hi def link pythonRun Special
+ hi def link pythonTodo Todo
+
+ hi def link pythonError Error
+ hi def link pythonIndentError Error
+ hi def link pythonSpaceError Error
+
+ hi def link pythonString String
+ hi def link pythonUniString String
+ hi def link pythonRawString String
+ hi def link pythonUniRawString String
+
+ hi def link pythonEscape Special
+ hi def link pythonEscapeError Error
+ hi def link pythonUniEscape Special
+ hi def link pythonUniEscapeError Error
+ hi def link pythonUniRawEscape Special
+ hi def link pythonUniRawEscapeError Error
+
+ hi def link pythonStrFormatting Special
+ hi def link pythonStrFormat Special
+ hi def link pythonStrTemplate Special
+
+ hi def link pythonDocTest Special
+ hi def link pythonDocTest2 Special
+
+ hi def link pythonNumber Number
+ hi def link pythonHexNumber Number
+ hi def link pythonOctNumber Number
+ hi def link pythonBinNumber Number
+ hi def link pythonFloat Float
+ hi def link pythonOctError Error
+ hi def link pythonHexError Error
+ hi def link pythonBinError Error
+
+ hi def link pythonBuiltinObj Structure
+ hi def link pythonBuiltinFunc Function
+
+ hi def link pythonExClass Structure
+
+" }}}